summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSiyuan Zhou <siyuan.zhou@mongodb.com>2014-03-03 17:15:39 -0500
committerMatt Kangas <matt.kangas@mongodb.com>2014-03-03 22:54:14 -0500
commit93d8befdbac74fb965faa4d3a8ae3e60d5a7a5b9 (patch)
treeb1be042b0d074266fb417177b33aa0f266a3f38b
parent3660343e0b4627d2fee4afb89b74d32644d16d18 (diff)
downloadmongo-93d8befdbac74fb965faa4d3a8ae3e60d5a7a5b9.tar.gz
SERVER-12127 Temporarily put back jstest in order not to lose test coverage.
Signed-off-by: Matt Kangas <matt.kangas@mongodb.com>
-rw-r--r--jstests/js1.js12
-rw-r--r--jstests/js2.js23
-rw-r--r--jstests/js3.js76
-rw-r--r--jstests/js4.js49
-rw-r--r--jstests/js5.js10
-rw-r--r--jstests/js7.js5
-rw-r--r--jstests/js8.js14
-rw-r--r--jstests/js9.js24
-rw-r--r--jstests/json1.js28
-rw-r--r--jstests/killop.js62
-rw-r--r--jstests/loadserverscripts.js57
-rw-r--r--jstests/loglong.js32
-rw-r--r--jstests/logprocessdetails.js39
-rw-r--r--jstests/long_index_rename.js18
-rw-r--r--jstests/map1.js24
-rw-r--r--jstests/max_time_ms.js303
-rw-r--r--jstests/maxscan.js18
-rw-r--r--jstests/minmax.js54
-rw-r--r--jstests/mod1.js25
-rw-r--r--jstests/mr1.js184
-rw-r--r--jstests/mr2.js83
-rw-r--r--jstests/mr3.js73
-rw-r--r--jstests/mr4.js45
-rw-r--r--jstests/mr5.js58
-rw-r--r--jstests/mr_bigobject.js46
-rw-r--r--jstests/mr_comments.js28
-rw-r--r--jstests/mr_drop.js38
-rw-r--r--jstests/mr_errorhandling.js49
-rw-r--r--jstests/mr_index.js43
-rw-r--r--jstests/mr_index2.js22
-rw-r--r--jstests/mr_index3.js50
-rw-r--r--jstests/mr_killop.js123
-rw-r--r--jstests/mr_merge.js60
-rw-r--r--jstests/mr_merge2.js37
-rw-r--r--jstests/mr_mutable_properties.js62
-rw-r--r--jstests/mr_optim.js48
-rw-r--r--jstests/mr_outreduce.js49
-rw-r--r--jstests/mr_outreduce2.js27
-rw-r--r--jstests/mr_replaceIntoDB.js45
-rw-r--r--jstests/mr_sort.js44
-rw-r--r--jstests/mr_stored.js66
-rw-r--r--jstests/mr_undef.js22
-rw-r--r--jstests/multi.js24
-rw-r--r--jstests/multi2.js23
-rw-r--r--jstests/ne1.js11
-rw-r--r--jstests/ne2.js16
-rw-r--r--jstests/ne3.js12
-rw-r--r--jstests/nestedarr1.js30
-rw-r--r--jstests/nestedobj1.js30
-rw-r--r--jstests/nin.js58
-rw-r--r--jstests/nin2.js67
-rw-r--r--jstests/not1.js20
-rw-r--r--jstests/not2.js84
-rw-r--r--jstests/notablescan.js31
-rw-r--r--jstests/ns_length.js85
-rw-r--r--jstests/null.js26
-rw-r--r--jstests/null2.js45
-rw-r--r--jstests/null_field_name.js8
-rw-r--r--jstests/numberint.js92
-rw-r--r--jstests/numberlong.js55
-rw-r--r--jstests/numberlong2.js28
-rw-r--r--jstests/numberlong3.js25
-rw-r--r--jstests/numberlong4.js21
-rw-r--r--jstests/objid1.js16
-rw-r--r--jstests/objid2.js7
-rw-r--r--jstests/objid3.js9
-rw-r--r--jstests/objid4.js16
-rw-r--r--jstests/objid5.js19
-rw-r--r--jstests/objid6.js16
-rw-r--r--jstests/objid7.js13
-rw-r--r--jstests/or1.js57
-rw-r--r--jstests/or2.js69
-rw-r--r--jstests/or3.js62
-rw-r--r--jstests/or4.js99
-rw-r--r--jstests/or5.js70
-rw-r--r--jstests/or6.js23
-rw-r--r--jstests/or7.js41
-rw-r--r--jstests/or8.js28
-rw-r--r--jstests/or9.js64
-rw-r--r--jstests/ora.js17
-rw-r--r--jstests/orb.js17
-rw-r--r--jstests/orc.js29
-rw-r--r--jstests/ord.js35
-rw-r--r--jstests/ore.js13
-rw-r--r--jstests/orf.js27
-rw-r--r--jstests/org.js19
-rw-r--r--jstests/orh.js17
-rw-r--r--jstests/orj.js121
-rw-r--r--jstests/ork.js11
-rw-r--r--jstests/orl.js13
-rw-r--r--jstests/oro.js27
-rw-r--r--jstests/orp.js43
-rw-r--r--jstests/padding.js66
-rw-r--r--jstests/plan_cache_commands.js358
-rw-r--r--jstests/profile1.js170
-rw-r--r--jstests/profile2.js25
-rw-r--r--jstests/profile3.js54
-rw-r--r--jstests/profile4.js98
-rw-r--r--jstests/proj_key1.js28
-rw-r--r--jstests/pull.js33
-rw-r--r--jstests/pull2.js31
-rw-r--r--jstests/pull_or.js21
-rw-r--r--jstests/pull_remove1.js14
-rw-r--r--jstests/pullall.js31
-rw-r--r--jstests/pullall2.js20
-rw-r--r--jstests/push.js54
-rw-r--r--jstests/push2.js21
-rw-r--r--jstests/push_sort.js96
-rw-r--r--jstests/pushall.js20
-rw-r--r--jstests/query1.js26
-rw-r--r--jstests/queryoptimizer3.js33
-rw-r--r--jstests/queryoptimizer6.js16
-rw-r--r--jstests/queryoptimizera.js92
-rw-r--r--jstests/ref.js19
-rw-r--r--jstests/ref2.js14
-rw-r--r--jstests/ref3.js19
-rw-r--r--jstests/ref4.js20
-rw-r--r--jstests/regex.js24
-rw-r--r--jstests/regex2.js70
-rw-r--r--jstests/regex3.js36
-rw-r--r--jstests/regex4.js18
-rw-r--r--jstests/regex5.js53
-rw-r--r--jstests/regex6.js29
-rw-r--r--jstests/regex7.js26
-rw-r--r--jstests/regex8.js19
-rw-r--r--jstests/regex9.js11
-rw-r--r--jstests/regex_embed1.js25
-rw-r--r--jstests/regex_limit.js22
-rw-r--r--jstests/regex_options.js7
-rw-r--r--jstests/regex_util.js27
-rw-r--r--jstests/regexa.js19
-rw-r--r--jstests/regexb.js14
-rw-r--r--jstests/regexc.js28
-rw-r--r--jstests/remove.js27
-rw-r--r--jstests/remove2.js46
-rw-r--r--jstests/remove3.js18
-rw-r--r--jstests/remove4.js10
-rw-r--r--jstests/remove6.js38
-rw-r--r--jstests/remove7.js35
-rw-r--r--jstests/remove8.js21
-rw-r--r--jstests/remove9.js16
-rw-r--r--jstests/remove_justone.js16
-rw-r--r--jstests/remove_undefined.js28
-rw-r--r--jstests/removea.js23
-rw-r--r--jstests/removeb.js39
-rw-r--r--jstests/removec.js40
-rw-r--r--jstests/rename.js56
-rw-r--r--jstests/rename2.js19
-rw-r--r--jstests/rename3.js25
-rw-r--r--jstests/rename4.js145
-rw-r--r--jstests/rename5.js46
-rw-r--r--jstests/rename6.js24
-rw-r--r--jstests/rename7.js56
-rw-r--r--jstests/rename8.js25
-rw-r--r--jstests/rename_stayTemp.js24
-rw-r--r--jstests/repair.js28
-rw-r--r--jstests/reversecursor.js34
-rw-r--r--jstests/role_management_helpers.js137
-rw-r--r--jstests/run_program1.js19
-rw-r--r--jstests/server1470.js20
-rw-r--r--jstests/server5346.js15
-rw-r--r--jstests/server7756.js12
-rw-r--r--jstests/server9385.js16
-rw-r--r--jstests/server9547.js21
-rw-r--r--jstests/set1.js9
-rw-r--r--jstests/set2.js18
-rw-r--r--jstests/set3.js11
-rw-r--r--jstests/set4.js15
-rw-r--r--jstests/set5.js17
-rw-r--r--jstests/set6.js20
-rw-r--r--jstests/set7.js67
-rw-r--r--jstests/set_param1.js9
-rw-r--r--jstests/shell1.js15
-rw-r--r--jstests/shell_writeconcern.js72
-rw-r--r--jstests/shellkillop.js61
-rw-r--r--jstests/shellspawn.js33
-rw-r--r--jstests/shellstartparallel.js17
-rw-r--r--jstests/shelltypes.js53
-rw-r--r--jstests/showdiskloc.js25
-rw-r--r--jstests/skip1.js15
-rw-r--r--jstests/slice1.js68
-rw-r--r--jstests/sort1.js48
-rw-r--r--jstests/sort10.js48
-rw-r--r--jstests/sort2.js32
-rw-r--r--jstests/sort3.js16
-rw-r--r--jstests/sort4.js43
-rw-r--r--jstests/sort5.js21
-rw-r--r--jstests/sort6.js38
-rw-r--r--jstests/sort7.js25
-rw-r--r--jstests/sort8.js30
-rw-r--r--jstests/sort9.js26
-rw-r--r--jstests/sort_numeric.js35
-rw-r--r--jstests/sorta.js26
-rw-r--r--jstests/sortb.js27
-rw-r--r--jstests/sortc.js37
-rw-r--r--jstests/sortd.js70
-rw-r--r--jstests/sortf.js20
-rw-r--r--jstests/sortg.js64
-rw-r--r--jstests/sorth.js140
-rw-r--r--jstests/sorti.js25
-rw-r--r--jstests/sortj.js17
-rw-r--r--jstests/sortk.js140
-rw-r--r--jstests/splitvector.js309
-rw-r--r--jstests/stages_and_hash.js42
-rw-r--r--jstests/stages_and_sorted.js49
-rw-r--r--jstests/stages_collection_scan.js43
-rw-r--r--jstests/stages_fetch.js33
-rw-r--r--jstests/stages_ixscan.js76
-rw-r--r--jstests/stages_limit_skip.js34
-rw-r--r--jstests/stages_mergesort.js32
-rw-r--r--jstests/stages_or.js33
-rw-r--r--jstests/stages_sort.js36
-rw-r--r--jstests/stages_text.js17
-rw-r--r--jstests/stats.js23
-rw-r--r--jstests/storageDetailsCommand.js98
-rw-r--r--jstests/storefunc.js44
-rw-r--r--jstests/string_with_nul_bytes.js9
-rw-r--r--jstests/sub1.js14
-rw-r--r--jstests/temp_cleanup.js16
-rw-r--r--jstests/testminmax.js14
-rw-r--r--jstests/touch1.js15
-rw-r--r--jstests/ts1.js38
-rw-r--r--jstests/type1.js24
-rw-r--r--jstests/type2.js19
-rw-r--r--jstests/type3.js68
-rw-r--r--jstests/uniqueness.js58
-rw-r--r--jstests/unset.js19
-rw-r--r--jstests/unset2.js23
-rw-r--r--jstests/update.js40
-rw-r--r--jstests/update2.js18
-rw-r--r--jstests/update3.js28
-rw-r--r--jstests/update5.js41
-rw-r--r--jstests/update6.js46
-rw-r--r--jstests/update7.js138
-rw-r--r--jstests/update8.js11
-rw-r--r--jstests/update9.js19
-rw-r--r--jstests/update_addToSet.js58
-rw-r--r--jstests/update_addToSet2.js11
-rw-r--r--jstests/update_addToSet3.js18
-rw-r--r--jstests/update_arraymatch1.js16
-rw-r--r--jstests/update_arraymatch2.js16
-rw-r--r--jstests/update_arraymatch3.js17
-rw-r--r--jstests/update_arraymatch4.js18
-rw-r--r--jstests/update_arraymatch5.js15
-rw-r--r--jstests/update_arraymatch6.js14
-rw-r--r--jstests/update_arraymatch7.js19
-rw-r--r--jstests/update_arraymatch8.js158
-rw-r--r--jstests/update_bit_examples.js24
-rw-r--r--jstests/update_blank1.js10
-rw-r--r--jstests/update_currentdate_examples.js24
-rw-r--r--jstests/update_dbref.js36
-rw-r--r--jstests/update_invalid1.js6
-rw-r--r--jstests/update_min_max_examples.js31
-rw-r--r--jstests/update_mul_examples.js24
-rw-r--r--jstests/update_multi3.js25
-rw-r--r--jstests/update_multi4.js18
-rw-r--r--jstests/update_multi5.js17
-rw-r--r--jstests/update_multi6.js10
-rw-r--r--jstests/update_replace.js50
-rw-r--r--jstests/update_setOnInsert.js47
-rw-r--r--jstests/updatea.js67
-rw-r--r--jstests/updateb.js11
-rw-r--r--jstests/updatec.js14
-rw-r--r--jstests/updated.js20
-rw-r--r--jstests/updatee.js71
-rw-r--r--jstests/updatef.js24
-rw-r--r--jstests/updateg.js17
-rw-r--r--jstests/updateh.js39
-rw-r--r--jstests/updatei.js86
-rw-r--r--jstests/updatej.js12
-rw-r--r--jstests/updatek.js14
-rw-r--r--jstests/updatel.js48
-rw-r--r--jstests/updatem.js20
-rw-r--r--jstests/upsert1.js59
-rw-r--r--jstests/upsert2.js20
-rw-r--r--jstests/upsert3.js60
-rw-r--r--jstests/upsert4.js36
-rw-r--r--jstests/use_power_of_2.js86
-rwxr-xr-xjstests/useindexonobjgtlt.js15
-rw-r--r--jstests/user_management_helpers.js94
-rw-r--r--jstests/validate_cmd_ns.js25
-rw-r--r--jstests/validate_user_documents.js65
-rw-r--r--jstests/verify_update_mods.js82
-rw-r--r--jstests/where1.js28
-rw-r--r--jstests/where2.js10
-rw-r--r--jstests/where3.js10
-rw-r--r--jstests/where4.js27
287 files changed, 11572 insertions, 0 deletions
diff --git a/jstests/js1.js b/jstests/js1.js
new file mode 100644
index 00000000000..240d9f82fbb
--- /dev/null
+++ b/jstests/js1.js
@@ -0,0 +1,12 @@
+
+
+t = db.jstests_js1;
+t.remove( {} );
+
+t.save( { z : 1 } );
+t.save( { z : 2 } );
+assert( 2 == t.find().length() );
+assert( 2 == t.find( { $where : function(){ return 1; } } ).length() );
+assert( 1 == t.find( { $where : function(){ return obj.z == 2; } } ).length() );
+
+assert(t.validate().valid);
diff --git a/jstests/js2.js b/jstests/js2.js
new file mode 100644
index 00000000000..8753599887a
--- /dev/null
+++ b/jstests/js2.js
@@ -0,0 +1,23 @@
+
+t = db.jstests_js2;
+t.remove( {} );
+
+t2 = db.jstests_js2_2;
+t2.remove( {} );
+
+assert.eq( 0 , t2.find().length() , "A" );
+
+t.save( { z : 1 } );
+t.save( { z : 2 } );
+assert.throws( function(){
+ t.find( { $where :
+ function(){
+ db.jstests_js2_2.save( { y : 1 } );
+ return 1;
+ }
+ } ).forEach( printjson );
+} , null , "can't save from $where" );
+
+assert.eq( 0 , t2.find().length() , "B" )
+
+assert(t.validate().valid , "E");
diff --git a/jstests/js3.js b/jstests/js3.js
new file mode 100644
index 00000000000..4249ad6183d
--- /dev/null
+++ b/jstests/js3.js
@@ -0,0 +1,76 @@
+
+t = db.jstests_js3;
+
+debug = function( s ){
+ //printjson( s );
+}
+
+for( z = 0; z < 2; z++ ) {
+ debug(z);
+
+ t.drop();
+
+ if( z > 0 ) {
+ t.ensureIndex({_id:1});
+ t.ensureIndex({i:1});
+ }
+
+ for( i = 0; i < 1000; i++ )
+ t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ assert( 33 == db.dbEval(function() { return 33; } ) );
+
+ db.dbEval( function() { db.jstests_js3.save({i:-1, z:"server side"}) } );
+
+ assert( t.findOne({i:-1}) );
+
+ assert( 2 == t.find( { $where :
+ function(){
+ return obj.i == 7 || obj.i == 8;
+ }
+ } ).length() );
+
+
+ // NPE test
+ var ok = false;
+ try {
+ var x = t.find( { $where :
+ function(){
+ asdf.asdf.f.s.s();
+ }
+ } );
+ debug( x.length() );
+ debug( tojson( x ) );
+ }
+ catch(e) {
+ ok = true;
+ }
+ debug( ok );
+ assert(ok);
+
+ t.ensureIndex({z:1});
+ t.ensureIndex({q:1});
+
+ debug( "before indexed find" );
+
+ arr = t.find( { $where :
+ function(){
+ return obj.i == 7 || obj.i == 8;
+ }
+ } ).toArray();
+ debug( arr );
+ assert.eq( 2, arr.length );
+
+ debug( "after indexed find" );
+
+ for( i = 1000; i < 2000; i++ )
+ t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ assert( t.find().count() == 2001 );
+
+ assert( t.validate().valid );
+
+ debug( "done iter" );
+}
+
+t.drop(); \ No newline at end of file
diff --git a/jstests/js4.js b/jstests/js4.js
new file mode 100644
index 00000000000..38cadf355de
--- /dev/null
+++ b/jstests/js4.js
@@ -0,0 +1,49 @@
+t = db.jstests_js4;
+t.drop();
+
+real = { a : 1 ,
+ b : "abc" ,
+ c : /abc/i ,
+ d : new Date(111911100111) ,
+ e : null ,
+ f : true
+ };
+
+t.save( real );
+
+assert.eq( "/abc/i" , real.c.toString() , "regex 1" );
+
+var cursor = t.find( { $where :
+ function(){
+ fullObject;
+ assert.eq( 7 , Object.keySet( obj ).length , "A" )
+ assert.eq( 1 , obj.a , "B" );
+ assert.eq( "abc" , obj.b , "C" );
+ assert.eq( "/abc/i" , obj.c.toString() , "D" );
+ assert.eq( 111911100111 , obj.d.getTime() , "E" );
+ assert( obj.f , "F" );
+ assert( ! obj.e , "G" );
+
+ return true;
+ }
+ } );
+assert.eq( 1 , cursor.toArray().length );
+assert.eq( "abc" , cursor[0].b );
+
+// ---
+
+t.drop();
+t.save( { a : 2 , b : { c : 7 , d : "d is good" } } );
+var cursor = t.find( { $where :
+ function(){
+ fullObject;
+ assert.eq( 3 , Object.keySet( obj ).length )
+ assert.eq( 2 , obj.a );
+ assert.eq( 7 , obj.b.c );
+ assert.eq( "d is good" , obj.b.d );
+ return true;
+ }
+ } );
+assert.eq( 1 , cursor.toArray().length );
+
+assert(t.validate().valid);
diff --git a/jstests/js5.js b/jstests/js5.js
new file mode 100644
index 00000000000..84770d72da2
--- /dev/null
+++ b/jstests/js5.js
@@ -0,0 +1,10 @@
+
+t = db.jstests_js5
+t.drop();
+
+t.save( { a : 1 } )
+t.save( { a : 2 } )
+
+assert.eq( 2 , t.find( { "$where" : "this.a" } ).count() , "A" );
+assert.eq( 0 , t.find( { "$where" : "this.b" } ).count() , "B" );
+assert.eq( 0 , t.find( { "$where" : "this.b > 45" } ).count() , "C" );
diff --git a/jstests/js7.js b/jstests/js7.js
new file mode 100644
index 00000000000..d12e207379e
--- /dev/null
+++ b/jstests/js7.js
@@ -0,0 +1,5 @@
+t = db.jstests_js7;
+t.drop();
+
+assert.eq( 17 , db.eval( function( foo ){ return foo; } , 17 ) );
+
diff --git a/jstests/js8.js b/jstests/js8.js
new file mode 100644
index 00000000000..da2dcc619cd
--- /dev/null
+++ b/jstests/js8.js
@@ -0,0 +1,14 @@
+t = db.jstests_js8;
+t.drop();
+
+t.save( { a : 1 , b : [ 2 , 3 , 4 ] } );
+
+assert.eq( 1 , t.find().length() , "A" );
+assert.eq( 1 , t.find( function(){ return this.a == 1; } ).length() , "B" );
+assert.eq( 1 , t.find( function(){ if ( ! this.b.length ) return true; return this.b.length == 3; } ).length() , "B2" );
+assert.eq( 1 , t.find( function(){ return this.b[0] == 2; } ).length() , "C" );
+assert.eq( 0 , t.find( function(){ return this.b[0] == 3; } ).length() , "D" );
+assert.eq( 1 , t.find( function(){ return this.b[1] == 3; } ).length() , "E" );
+
+
+assert(t.validate().valid);
diff --git a/jstests/js9.js b/jstests/js9.js
new file mode 100644
index 00000000000..8748667f527
--- /dev/null
+++ b/jstests/js9.js
@@ -0,0 +1,24 @@
+c = db.jstests_js9;
+c.drop();
+
+c.save( { a : 1 } );
+c.save( { a : 2 } );
+
+
+assert.eq( 2 , c.find().length() );
+assert.eq( 2 , c.find().count() );
+
+
+assert.eq( 2 ,
+ db.eval(
+ function(){
+ num = 0;
+ db.jstests_js9.find().forEach(
+ function(z){
+ num++;
+ }
+ );
+ return num;
+ }
+ )
+ )
diff --git a/jstests/json1.js b/jstests/json1.js
new file mode 100644
index 00000000000..054a9b46047
--- /dev/null
+++ b/jstests/json1.js
@@ -0,0 +1,28 @@
+
+x = { quotes:"a\"b" , nulls:null };
+eval( "y = " + tojson( x ) );
+assert.eq( tojson( x ) , tojson( y ) , "A" );
+assert.eq( typeof( x.nulls ) , typeof( y.nulls ) , "B" );
+
+// each type is parsed properly
+x = {"x" : null, "y" : true, "z" : 123, "w" : "foo", "a": undefined};
+assert.eq(tojson(x,"",false), '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo",\n\t"a" : undefined\n}' , "C" );
+
+x = {"x" : [], "y" : {}};
+assert.eq(tojson(x,"",false), '{\n\t"x" : [ ],\n\t"y" : {\n\t\t\n\t}\n}' , "D" );
+
+// nested
+x = {"x" : [{"x" : [1,2,[]], "z" : "ok", "y" : [[]]}, {"foo" : "bar"}], "y" : null};
+assert.eq(tojson(x), '{\n\t"x" : [\n\t\t{\n\t\t\t"x" : [\n\t\t\t\t1,\n\t\t\t\t2,\n\t\t\t\t[ ]\n\t\t\t],\n\t\t\t"z" : "ok",\n\t\t\t"y" : [\n\t\t\t\t[ ]\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\t"foo" : "bar"\n\t\t}\n\t],\n\t"y" : null\n}' , "E" );
+
+// special types
+x = {"x" : ObjectId("4ad35a73d2e34eb4fc43579a"), 'z' : /xd?/ig};
+assert.eq(tojson(x,"",false), '{\n\t"x" : ObjectId("4ad35a73d2e34eb4fc43579a"),\n\t"z" : /xd?/gi\n}' , "F" );
+
+// Timestamp type
+x = {"x" : Timestamp()};
+assert.eq(tojson(x,"",false), '{\n\t"x" : Timestamp(0, 0)\n}' , "G")
+
+// Timestamp type, second
+x = {"x" : Timestamp(10,2)};
+assert.eq(tojson(x,"",false), '{\n\t"x" : Timestamp(10, 2)\n}' , "H")
diff --git a/jstests/killop.js b/jstests/killop.js
new file mode 100644
index 00000000000..9567391598d
--- /dev/null
+++ b/jstests/killop.js
@@ -0,0 +1,62 @@
+/**
+ * Basic test of killop functionality.
+ *
+ * Theory of operation: Creates two operations that will take a long time, sends killop for those
+ * operations, and then attempts to infer that the operations died because of killop, and not for
+ * some other reason.
+ *
+ * NOTES:
+ * The long operations are count({$where: function () { while (1) ; } }). These operations do not
+ * terminate until the server determines that they've spent too much time in JS execution, typically
+ * after 30 seconds of wall clock time have passed. For these operations to take a long time, the
+ * counted collection must not be empty; hence an initial write to the collection is required.
+ */
+t = db.jstests_killop
+t.drop();
+
+t.save( {} );
+db.getLastError();
+
+/**
+ * This function filters for the operations that we're looking for, based on their state and
+ * the contents of their query object.
+ */
+function ops() {
+ p = db.currentOp().inprog;
+ ids = [];
+ for ( var i in p ) {
+ var o = p[ i ];
+ // We *can't* check for ns, b/c it's not guaranteed to be there unless the query is active, which
+ // it may not be in our polling cycle - particularly b/c we sleep every second in both the query and
+ // the assert
+ if ( ( o.active || o.waitingForLock ) && o.query && o.query.query && o.query.query.$where && o.query.count == "jstests_killop" ) {
+ ids.push( o.opid );
+ }
+ }
+ return ids;
+}
+
+var s1 = null;
+var s2 = null;
+try {
+ s1 = startParallelShell( "db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )" );
+ s2 = startParallelShell( "db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )" );
+
+ o = [];
+ assert.soon(function() { o = ops(); return o.length == 2; },
+ { toString: function () { return tojson(db.currentOp().inprog); } },
+ 10000);
+ db.killOp( o[ 0 ] );
+ db.killOp( o[ 1 ] );
+ start = new Date();
+}
+finally {
+ if (s1) s1();
+ if (s2) s2();
+}
+
+// don't want to pass if timeout killed the js function NOTE: This test will sometimes pass when the
+// JS engine did actually kill the operation, because the JS timeout is 30 seconds of wall clock
+// time from the moment the operation starts, but "start" measures from shortly after the test sends
+// the killop message to the server.
+assert( ( new Date() ) - start < 30000 );
diff --git a/jstests/loadserverscripts.js b/jstests/loadserverscripts.js
new file mode 100644
index 00000000000..792e1c9228a
--- /dev/null
+++ b/jstests/loadserverscripts.js
@@ -0,0 +1,57 @@
+
+// Test db.loadServerScripts()
+
+var testdb = db.getSisterDB("loadserverscripts");
+
+jsTest.log("testing db.loadServerScripts()");
+var x;
+
+// assert._debug = true;
+
+// clear out any data from old tests
+testdb.system.js.remove({});
+delete myfunc;
+delete myfunc2;
+
+x = testdb.system.js.findOne();
+assert.isnull(x, "Test for empty collection");
+
+// User functions should not be defined yet
+assert.eq( typeof myfunc, "undefined", "Checking that myfunc() is undefined" );
+assert.eq( typeof myfunc2, "undefined", "Checking that myfunc2() is undefined" );
+
+// Insert a function in the context of this process: make sure it's in the collection
+testdb.system.js.insert( { _id: "myfunc", "value": function(){ return "myfunc"; } } );
+x = testdb.system.js.count();
+assert.eq( x, 1, "Should now be one function in the system.js collection");
+
+// Load that function
+testdb.loadServerScripts();
+assert.eq( typeof myfunc, "function", "Checking that myfunc() loaded correctly" );
+
+// Make sure it works
+x = myfunc();
+assert.eq(x, "myfunc", "Checking that myfunc() returns the correct value");
+
+// Insert value into collection from another process
+var coproc = startParallelShell(
+ 'db.getSisterDB("loadserverscripts").system.js.insert' +
+ ' ( {_id: "myfunc2", "value": function(){ return "myfunc2"; } } );' +
+ 'db.getLastError();'
+ );
+// wait for results
+coproc();
+
+// Make sure the collection's been updated
+x = testdb.system.js.count();
+assert.eq( x, 2, "Should now be two functions in the system.js collection");
+
+
+// Load the new functions: test them as above
+testdb.loadServerScripts();
+assert.eq( typeof myfunc2, "function", "Checking that myfunc2() loaded correctly" );
+x = myfunc2();
+assert.eq(x, "myfunc2", "Checking that myfunc2() returns the correct value");
+
+jsTest.log("completed test of db.loadServerScripts()");
+
diff --git a/jstests/loglong.js b/jstests/loglong.js
new file mode 100644
index 00000000000..06cbf296c09
--- /dev/null
+++ b/jstests/loglong.js
@@ -0,0 +1,32 @@
+// test for SERVER-5013
+// make sure very long long lines get truncated
+
+t = db.loglong;
+t.drop();
+
+t.insert( { x : 1 } );
+
+n = 0;
+query = { x : [] }
+while ( Object.bsonsize( query ) < 30000 ) {
+ query.x.push( n++ );
+}
+
+before = db.adminCommand( { setParameter : 1 , logLevel : 1 } )
+
+t.findOne( query )
+
+x = db.adminCommand( { setParameter : 1 , logLevel : before.was } )
+assert.eq( 1 , x.was , tojson( x ) )
+
+log = db.adminCommand( { getLog : "global" } ).log
+
+found = false
+for ( i=log.length - 1; i>= 0; i-- ) {
+ if ( log[i].indexOf( "warning: log line attempted (16k)" ) >= 0 ) {
+ found = true;
+ break;
+ }
+}
+
+assert( found )
diff --git a/jstests/logprocessdetails.js b/jstests/logprocessdetails.js
new file mode 100644
index 00000000000..607b1acb057
--- /dev/null
+++ b/jstests/logprocessdetails.js
@@ -0,0 +1,39 @@
+/**
+ * SERVER-7140 test. Checks that process info is re-logged on log rotation
+ */
+
+/**
+ * Checks an array for match against regex.
+ * Returns true if regex matches a string in the array
+ */
+doesLogMatchRegex = function(logArray, regex) {
+ for (var i = (logArray.length - 1); i >= 0; i--){
+ var regexInLine = regex.exec(logArray[i]);
+ if (regexInLine != null){
+ return true;
+ }
+ }
+ return false;
+}
+
+doTest = function() {
+ var log = db.adminCommand({ getLog: 'global'});
+ //this regex will need to change if output changes
+ var re = new RegExp(".*conn.*options.*");
+
+ assert.neq(null, log);
+ var lineCount = log.totalLinesWritten;
+ assert.neq(0, lineCount);
+
+ var result = db.adminCommand({ logRotate: 1});
+ assert.eq(1, result.ok);
+
+ var log2 = db.adminCommand({ getLog: 'global'});
+ assert.neq(null, log2);
+ assert.gte(log2.totalLinesWritten, lineCount);
+
+ var informationIsLogged = doesLogMatchRegex(log2.log, re);
+ assert.eq(informationIsLogged, true, "Process details not present in RAM log");
+}
+
+doTest();
diff --git a/jstests/long_index_rename.js b/jstests/long_index_rename.js
new file mode 100644
index 00000000000..41e1bfd4a3b
--- /dev/null
+++ b/jstests/long_index_rename.js
@@ -0,0 +1,18 @@
+// SERVER-7720 Building an index with a too-long name should always fail
+// Formerly, we would allow an index that already existed to be "created" with too long a name,
+// but this caused secondaries to crash when replicating what should be a bad createIndex command.
+// Here we test that the too-long name is rejected in this situation as well
+
+t = db.long_index_rename;
+t.drop();
+
+for (i = 1; i < 10; i++) {
+ t.save({a:i});
+}
+
+t.createIndex({a:1}, {name: "aaa"});
+t.createIndex({a:1}, {name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"});
+var result = db.getLastErrorObj();
+assert( result.code >= 0 );
+assert( result.err.indexOf( "too long" ) >= 0 );
diff --git a/jstests/map1.js b/jstests/map1.js
new file mode 100644
index 00000000000..1db53cd3848
--- /dev/null
+++ b/jstests/map1.js
@@ -0,0 +1,24 @@
+
+function basic1( key , lookup , shouldFail){
+ var m = new Map();
+ m.put( key , 17 );
+
+ var out = m.get( lookup || key );
+
+ if ( ! shouldFail ){
+ assert.eq( 17 , out , "basic1 missing: " + tojson( key ) );
+ }
+ else {
+ assert.isnull( out , "basic1 not missing: " + tojson( key ) );
+ }
+
+}
+
+basic1( 6 )
+basic1( new Date() )
+basic1( "eliot" )
+basic1( { a : 1 } );
+basic1( { a : 1 , b : 1 } )
+basic1( { a : 1 } , { b : 1 } , true )
+basic1( { a : 1 , b : 1 } , { b : 1 , a : 1 } , true )
+basic1( { a : 1 } , { a : 2 } , true );
diff --git a/jstests/max_time_ms.js b/jstests/max_time_ms.js
new file mode 100644
index 00000000000..1d0cca7949f
--- /dev/null
+++ b/jstests/max_time_ms.js
@@ -0,0 +1,303 @@
+// Tests query/command option $maxTimeMS.
+
+var t = db.max_time_ms;
+var exceededTimeLimit = 50; // ErrorCodes::ExceededTimeLimit
+var cursor;
+var res;
+
+//
+// Simple positive test for query: a ~300ms query with a 100ms time limit should be aborted.
+//
+
+t.drop();
+t.insert([{},{},{}]);
+cursor = t.find({$where: function() { sleep(100); return true; }});
+cursor.maxTimeMS(100);
+assert.throws(function() { cursor.itcount(); }, [], "expected query to abort due to time limit");
+
+//
+// Simple negative test for query: a ~300ms query with a 10s time limit should not hit the time
+// limit.
+//
+
+t.drop();
+t.insert([{},{},{}]);
+cursor = t.find({$where: function() { sleep(100); return true; }});
+cursor.maxTimeMS(10*1000);
+assert.doesNotThrow(function() { cursor.itcount(); },
+ [],
+ "expected query to not hit the time limit");
+
+//
+// Simple positive test for getmore:
+// - Issue a find() that returns 2 batches: a fast batch, then a slow batch.
+// - The find() has a 2-second time limit; the first batch should run "instantly", but the second
+// batch takes ~6 seconds, so the getmore should be aborted.
+//
+
+t.drop();
+t.insert([{},{},{}]); // fast batch
+t.insert([{slow: true},{slow: true},{slow: true}]); // slow batch
+cursor = t.find({$where: function() {
+ if (this.slow) {
+ sleep(2*1000);
+ }
+ return true;
+}});
+cursor.batchSize(3);
+cursor.maxTimeMS(2*1000);
+assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
+ [],
+ "expected batch 1 (query) to not hit the time limit");
+assert.throws(function() { cursor.next(); cursor.next(); cursor.next(); },
+ [],
+ "expected batch 2 (getmore) to abort due to time limit");
+
+//
+// Simple negative test for getmore:
+// - Issue a find() that returns 2 batches: a fast batch, then a slow batch.
+// - The find() has a 10-second time limit; the first batch should run "instantly", and the second
+// batch takes only ~2 seconds, so both the query and getmore should not hit the time limit.
+//
+
+t.drop();
+t.insert([{},{},{}]); // fast batch
+t.insert([{},{},{slow: true}]); // slow batch
+cursor = t.find({$where: function() {
+ if (this.slow) {
+ sleep(2*1000);
+ }
+ return true;
+}});
+cursor.batchSize(3);
+cursor.maxTimeMS(10*1000);
+assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
+ [],
+ "expected batch 1 (query) to not hit the time limit");
+assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
+ [],
+ "expected batch 2 (getmore) to not hit the time limit");
+
+//
+// Many-batch positive test for getmore:
+// - Issue a many-batch find() with a 6-second time limit where the results take 10 seconds to
+// generate; one of the later getmore ops should be aborted.
+//
+
+t.drop();
+for (var i=0; i<5; i++) {
+ t.insert([{},{},{slow:true}]);
+}
+cursor = t.find({$where: function() {
+ if (this.slow) {
+ sleep(2*1000);
+ }
+ return true;
+}});
+cursor.batchSize(3);
+cursor.maxTimeMS(6*1000);
+assert.throws(function() { cursor.itcount(); }, [], "expected find() to abort due to time limit");
+
+//
+// Many-batch negative test for getmore:
+// - Issue a many-batch find() with a 20-second time limit where the results take 10 seconds to
+// generate; the find() should not hit the time limit.
+//
+
+t.drop();
+for (var i=0; i<5; i++) {
+ t.insert([{},{},{slow:true}]);
+}
+cursor = t.find({$where: function() {
+ if (this.slow) {
+ sleep(2*1000);
+ }
+ return true;
+}});
+cursor.batchSize(3);
+cursor.maxTimeMS(20*1000);
+assert.doesNotThrow(function() { cursor.itcount(); },
+ [],
+ "expected find() to not hit the time limit");
+
+//
+// Simple positive test for commands: a ~300ms command with a 100ms time limit should be aborted.
+//
+
+t.drop();
+res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 100});
+assert(res.ok == 0 && res.code == exceededTimeLimit,
+ "expected sleep command to abort due to time limit, ok=" + res.ok + ", code=" + res.code);
+
+//
+// Simple negative test for commands: a ~300ms command with a 10s time limit should not hit the
+// time limit.
+//
+
+t.drop();
+res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 10*1000});
+assert(res.ok == 1,
+ "expected sleep command to not hit the time limit, ok=" + res.ok + ", code=" + res.code);
+
+//
+// Tests for input validation.
+//
+
+t.drop();
+t.insert({});
+
+// Verify lower boundary for acceptable input (0 is acceptable, 1 isn't).
+
+assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(0).itcount(); });
+assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberInt(0)).itcount(); });
+assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberLong(0)).itcount(); });
+assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: 0}).ok);
+assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(0)}).ok);
+assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(0)}).ok);
+
+assert.throws.automsg(function() { t.find().maxTimeMS(-1).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS(NumberInt(-1)).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS(NumberLong(-1)).itcount(); });
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: -1}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(-1)}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(-1)}).ok);
+
+// Verify upper boundary for acceptable input (2^31-1 is acceptable, 2^31 isn't).
+
+var maxValue = Math.pow(2,31)-1;
+
+assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(maxValue).itcount(); });
+assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberInt(maxValue)).itcount(); });
+assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberLong(maxValue)).itcount(); });
+assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue}).ok);
+assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue)}).ok);
+assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue)}).ok);
+
+assert.throws.automsg(function() { t.find().maxTimeMS(maxValue+1).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS(NumberInt(maxValue+1)).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS(NumberLong(maxValue+1)).itcount(); });
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue+1}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue+1)}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue+1)}).ok);
+
+// Verify invalid values are rejected.
+assert.throws.automsg(function() { t.find().maxTimeMS(0.1).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS(-0.1).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS().itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS("").itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS(true).itcount(); });
+assert.throws.automsg(function() { t.find().maxTimeMS({}).itcount(); });
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: 0.1}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: -0.1}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: undefined}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: ""}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: true}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: {}}).ok);
+
+// Verify that the maxTimeMS command argument can be sent with $query-wrapped commands.
+cursor = t.getDB().$cmd.find({ping: 1, maxTimeMS: 0}).limit(-1);
+cursor._ensureSpecial();
+assert.eq(1, cursor.next().ok);
+
+// Verify that the server rejects invalid command argument $maxTimeMS.
+cursor = t.getDB().$cmd.find({ping: 1, $maxTimeMS: 0}).limit(-1);
+cursor._ensureSpecial();
+assert.eq(0, cursor.next().ok);
+
+// Verify that the $maxTimeMS query option can't be sent with $query-wrapped commands.
+cursor = t.getDB().$cmd.find({ping: 1}).limit(-1).maxTimeMS(0);
+cursor._ensureSpecial();
+assert.eq(0, cursor.next().ok);
+
+//
+// Tests for fail points maxTimeAlwaysTimeOut and maxTimeNeverTimeOut.
+//
+
+// maxTimeAlwaysTimeOut positive test for command.
+t.drop();
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
+ mode: "alwaysOn"}).ok);
+res = t.getDB().runCommand({ping: 1, maxTimeMS: 10*1000});
+assert(res.ok == 0 && res.code == exceededTimeLimit,
+ "expected command to trigger maxTimeAlwaysTimeOut fail point, ok=" + res.ok + ", code="
+ + res.code);
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
+
+// maxTimeNeverTimeOut positive test for command.
+t.drop();
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
+ mode: "alwaysOn"}).ok);
+res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 100});
+assert(res.ok == 1,
+ "expected command to trigger maxTimeNeverTimeOut fail point, ok=" + res.ok + ", code="
+ + res.code);
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
+
+// maxTimeAlwaysTimeOut positive test for query.
+t.drop();
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
+ mode: "alwaysOn"}).ok);
+assert.throws(function() { t.find().maxTimeMS(10*1000).itcount(); },
+ [],
+ "expected query to trigger maxTimeAlwaysTimeOut fail point");
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
+
+// maxTimeNeverTimeOut positive test for query.
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
+ mode: "alwaysOn"}).ok);
+t.drop();
+t.insert([{},{},{}]);
+cursor = t.find({$where: function() { sleep(100); return true; }});
+cursor.maxTimeMS(100);
+assert.doesNotThrow(function() { cursor.itcount(); },
+ [],
+ "expected query to trigger maxTimeNeverTimeOut fail point");
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
+
+// maxTimeAlwaysTimeOut positive test for getmore.
+t.drop();
+t.insert([{},{},{}]);
+cursor = t.find().maxTimeMS(10*1000).batchSize(2);
+assert.doesNotThrow.automsg(function() { cursor.next(); cursor.next(); });
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
+ mode: "alwaysOn"}).ok);
+assert.throws(function() { cursor.next(); },
+ [],
+ "expected getmore to trigger maxTimeAlwaysTimeOut fail point");
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
+
+// maxTimeNeverTimeOut positive test for getmore.
+t.drop();
+t.insert([{},{},{}]); // fast batch
+t.insert([{slow: true},{slow: true},{slow: true}]); // slow batch
+cursor = t.find({$where: function() {
+ if (this.slow) {
+ sleep(2*1000);
+ }
+ return true;
+}});
+cursor.batchSize(3);
+cursor.maxTimeMS(2*1000);
+assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
+ [],
+ "expected batch 1 (query) to not hit the time limit");
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
+ mode: "alwaysOn"}).ok);
+assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
+ [],
+ "expected batch 2 (getmore) to trigger maxTimeNeverTimeOut fail point");
+assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
+
+//
+// Test that maxTimeMS is accepted by commands that have an option whitelist.
+//
+
+// "aggregate" command.
+res = t.runCommand("aggregate", {pipeline: [], maxTimeMS: 60*1000});
+assert(res.ok == 1,
+ "expected aggregate with maxtime to succeed, ok=" + res.ok + ", code=" + res.code);
+
+// "collMod" command.
+res = t.runCommand("collMod", {usePowerOf2Sizes: true, maxTimeMS: 60*1000});
+assert(res.ok == 1,
+ "expected collmod with maxtime to succeed, ok=" + res.ok + ", code=" + res.code);
diff --git a/jstests/maxscan.js b/jstests/maxscan.js
new file mode 100644
index 00000000000..3d15b26f638
--- /dev/null
+++ b/jstests/maxscan.js
@@ -0,0 +1,18 @@
+
+t = db.maxscan;
+t.drop();
+
+N = 100;
+for ( i=0; i<N; i++ ){
+ t.insert( { _id : i , x : i % 10 } );
+}
+
+assert.eq( N , t.find().itcount() , "A" )
+assert.eq( 50 , t.find()._addSpecial( "$maxScan" , 50 ).itcount() , "B" )
+
+assert.eq( 10 , t.find( { x : 2 } ).itcount() , "C" )
+assert.eq( 5 , t.find( { x : 2 } )._addSpecial( "$maxScan" , 50 ).itcount() , "D" )
+
+t.ensureIndex({x: 1});
+assert.eq( 10, t.find( { x : 2 } ).hint({x:1})._addSpecial( "$maxScan" , N ).itcount() , "E" )
+assert.eq( 0, t.find( { x : 2 } ).hint({x:1})._addSpecial( "$maxScan" , 1 ).itcount() , "E" )
diff --git a/jstests/minmax.js b/jstests/minmax.js
new file mode 100644
index 00000000000..d84a6e42855
--- /dev/null
+++ b/jstests/minmax.js
@@ -0,0 +1,54 @@
+// test min / max query parameters
+
+addData = function() {
+ t.save( { a: 1, b: 1 } );
+ t.save( { a: 1, b: 2 } );
+ t.save( { a: 2, b: 1 } );
+ t.save( { a: 2, b: 2 } );
+}
+
+t = db.jstests_minmax;
+t.drop();
+t.ensureIndex( { a: 1, b: 1 } );
+addData();
+
+printjson( t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray() );
+assert.eq( 1, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray().length );
+assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1.5 } ).toArray().length );
+assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 2 } ).toArray().length );
+
+// just one bound
+assert.eq( 3, t.find().min( { a: 1, b: 2 } ).toArray().length );
+assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).toArray().length );
+assert.eq( 3, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: 1 } ).toArray().length );
+assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).hint( { a: 1, b: 1 } ).toArray().length );
+
+t.drop();
+t.ensureIndex( { a: 1, b: -1 } );
+addData();
+assert.eq( 4, t.find().min( { a: 1, b: 2 } ).toArray().length );
+assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).toArray().length );
+assert.eq( 1, t.find().min( { a: 2, b: 1 } ).toArray().length );
+assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).toArray().length );
+assert.eq( 4, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq( 1, t.find().min( { a: 2, b: 1 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
+
+// hint doesn't match
+assert.throws( function() { t.find().min( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray() } );
+assert.throws( function() { t.find().min( { a: 1, b: 1 } ).max( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray() } );
+assert.throws( function() { t.find().min( { b: 1 } ).max( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray() } );
+assert.throws( function() { t.find().min( { a: 1 } ).hint( { $natural: 1 } ).toArray() } );
+assert.throws( function() { t.find().max( { a: 1 } ).hint( { $natural: 1 } ).toArray() } );
+
+// Reverse direction scan of the a:1 index between a:6 (inclusive) and a:3 (exclusive).
+t.drop();
+t.ensureIndex( { a:1 } );
+for( i = 0; i < 10; ++i ) {
+ t.save( { _id:i, a:i } );
+}
+if ( 0 ) { // SERVER-3766
+reverseResult = t.find().min( { a:6 } ).max( { a:3 } ).sort( { a:-1 } ).hint( { a:1 } ).toArray();
+assert.eq( [ { _id:6, a:6 }, { _id:5, a:5 }, { _id:4, a:4 } ], reverseResult );
+}
diff --git a/jstests/mod1.js b/jstests/mod1.js
new file mode 100644
index 00000000000..46e3482bc72
--- /dev/null
+++ b/jstests/mod1.js
@@ -0,0 +1,25 @@
+
+t = db.mod1;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 11 } );
+t.save( { a : 20 } );
+t.save( { a : "asd" } );
+t.save( { a : "adasdas" } );
+
+assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "A1" );
+assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "A2" );
+assert.eq( 6 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "A3" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "B1" );
+assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "B2" );
+
+assert.eq( 1 , t.find( "this.a % 10 == 0" ).itcount() , "B3" );
+assert.eq( 1 , t.find( { a : { $mod : [ 10 , 0 ] } } ).itcount() , "B4" );
+assert.eq( 4 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "B5" );
+
+assert.eq( 1, t.find( { a: { $gt: 5, $mod : [ 10, 1 ] } } ).itcount() ); \ No newline at end of file
diff --git a/jstests/mr1.js b/jstests/mr1.js
new file mode 100644
index 00000000000..33390a6187a
--- /dev/null
+++ b/jstests/mr1.js
@@ -0,0 +1,184 @@
+
+t = db.mr1;
+t.drop();
+
+t.save( { x : 1 , tags : [ "a" , "b" ] } );
+t.save( { x : 2 , tags : [ "b" , "c" ] } );
+t.save( { x : 3 , tags : [ "c" , "a" ] } );
+t.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+emit = printjson;
+
+function d( x ){
+ printjson( x );
+}
+
+ks = "_id";
+if ( db.version() == "1.1.1" )
+ ks = "key";
+
+
+m = function(){
+ this.tags.forEach(
+ function(z){
+ emit( z , { count : 1 } );
+ }
+ );
+};
+
+m2 = function(){
+ for ( var i=0; i<this.tags.length; i++ ){
+ emit( this.tags[i] , 1 );
+ }
+};
+
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+r2 = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i];
+ }
+ return total;
+};
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
+d( res );
+if ( ks == "_id" ) assert( res.ok , "not ok" );
+assert.eq( 4 , res.counts.input , "A" );
+x = db[res.result];
+
+assert.eq( 3 , x.find().count() , "B" );
+x.find().forEach( d );
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+d( z );
+assert.eq( 3 , Object.keySet( z ).length , "C" );
+assert.eq( 2 , z.a , "D" );
+assert.eq( 3 , z.b , "E" );
+assert.eq( 3 , z.c , "F" );
+x.drop();
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , query : { x : { "$gt" : 2 } } , out : "mr1_out" } );
+d( res );
+assert.eq( 2 , res.counts.input , "B" );
+x = db[res.result];
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+assert.eq( 1 , z.a , "C1" );
+assert.eq( 1 , z.b , "C2" );
+assert.eq( 2 , z.c , "C3" );
+x.drop();
+
+res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , query : { x : { "$gt" : 2 } } , out : "mr1_out" } );
+d( res );
+assert.eq( 2 , res.counts.input , "B" );
+x = db[res.result];
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value; } );
+assert.eq( 1 , z.a , "C1z" );
+assert.eq( 1 , z.b , "C2z" );
+assert.eq( 2 , z.c , "C3z" );
+x.drop();
+
+res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r , query : { x : { "$gt" : 2 } } } );
+d( res );
+assert.eq( 2 , res.counts.input , "B2" );
+assert.eq( "mr1_foo" , res.result , "B2-c" );
+x = db[res.result];
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+assert.eq( 1 , z.a , "C1a" );
+assert.eq( 1 , z.b , "C2a" );
+assert.eq( 2 , z.c , "C3a" );
+x.drop();
+
+for ( i=5; i<1000; i++ ){
+ t.save( { x : i , tags : [ "b" , "d" ] } );
+}
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
+d( res );
+assert.eq( 999 , res.counts.input , "Z1" );
+x = db[res.result];
+x.find().forEach( d )
+assert.eq( 4 , x.find().count() , "Z2" );
+assert.eq( "a,b,c,d" , x.distinct( ks ) , "Z3" );
+
+function getk( k ){
+ var o = {};
+ o[ks] = k;
+ return x.findOne( o );
+}
+
+assert.eq( 2 , getk( "a" ).value.count , "ZA" );
+assert.eq( 998 , getk( "b" ).value.count , "ZB" );
+assert.eq( 3 , getk( "c" ).value.count , "ZC" );
+assert.eq( 995 , getk( "d" ).value.count , "ZD" );
+x.drop();
+
+if ( true ){
+ printjson( db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , verbose : true , out : "mr1_out" } ) );
+}
+
+print( "t1: " + Date.timeFunc(
+ function(){
+ var out = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
+ if ( ks == "_id" ) assert( out.ok , "XXX : " + tojson( out ) );
+ db[out.result].drop();
+ } , 10 ) + " (~500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } , 10 ) );
+
+
+
+// test doesn't exist
+res = db.runCommand( { mapreduce : "lasjdlasjdlasjdjasldjalsdj12e" , map : m , reduce : r , out : "mr1_out" } );
+assert( ! res.ok , "should be not ok" );
+
+if ( true ){
+ correct = {};
+
+ for ( i=0; i<20000; i++ ){
+ k = "Z" + i % 10000;
+ if ( correct[k] )
+ correct[k]++;
+ else
+ correct[k] = 1;
+ t.save( { x : i , tags : [ k ] } );
+ }
+
+ res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r } );
+ d( res );
+ print( "t2: " + res.timeMillis + " (~3500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } ) );
+ x = db[res.result];
+ z = {};
+ x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+ for ( zz in z ){
+ if ( zz.indexOf( "Z" ) == 0 ){
+ assert.eq( correct[zz] , z[zz] , "ZZ : " + zz );
+ }
+ }
+ x.drop();
+
+ res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m2 , reduce : r2 , out : "mr1_out" } );
+ d(res);
+ print( "t3: " + res.timeMillis + " (~3500 on 2.8ghz)" );
+
+ res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , out : { inline : true } } );
+ print( "t4: " + res.timeMillis );
+
+}
+
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
+assert( res.ok , "should be ok" );
+
+t.drop();
+t1 = db.mr1_out;
+t1.drop(); \ No newline at end of file
diff --git a/jstests/mr2.js b/jstests/mr2.js
new file mode 100644
index 00000000000..c15d8abdfae
--- /dev/null
+++ b/jstests/mr2.js
@@ -0,0 +1,83 @@
+
+
+t = db.mr2;
+t.drop();
+
+t.save( { comments : [ { who : "a" , txt : "asdasdasd" } ,
+ { who : "b" , txt : "asdasdasdasdasdasdas" } ] } );
+
+t.save( { comments : [ { who : "b" , txt : "asdasdasdaaa" } ,
+ { who : "c" , txt : "asdasdasdaasdasdas" } ] } );
+
+
+
+function m(){
+ for ( var i=0; i<this.comments.length; i++ ){
+ var c = this.comments[i];
+ emit( c.who , { totalSize : c.txt.length , num : 1 } );
+ }
+}
+
+function r( who , values ){
+ var n = { totalSize : 0 , num : 0 };
+ for ( var i=0; i<values.length; i++ ){
+ n.totalSize += values[i].totalSize;
+ n.num += values[i].num;
+ }
+ return n;
+}
+
+function reformat( r ){
+ var x = {};
+ var cursor;
+ if ( r.results )
+ cursor = r.results;
+ else
+ cursor = r.find();
+ cursor.forEach(
+ function(z){
+ x[z._id] = z.value;
+ }
+ );
+ return x;
+}
+
+function f( who , res ){
+ res.avg = res.totalSize / res.num;
+ return res;
+}
+
+res = t.mapReduce( m , r , { finalize : f , out : "mr2_out" } );
+printjson( res )
+x = reformat( res );
+assert.eq( 9 , x.a.avg , "A1" );
+assert.eq( 16 , x.b.avg , "A2" );
+assert.eq( 18 , x.c.avg , "A3" );
+res.drop();
+
+res = t.mapReduce( m , r , { finalize : f , out : { inline : 1 } } );
+printjson( res )
+x = reformat( res );
+assert.eq( 9 , x.a.avg , "B1" );
+assert.eq( 16 , x.b.avg , "B2" );
+assert.eq( 18 , x.c.avg , "B3" );
+res.drop();
+assert( ! ( "result" in res ) , "B4" )
+
+res = t.mapReduce( m , r , { finalize : f , out : "mr2_out", jsMode: true } );
+printjson( res )
+x = reformat( res );
+assert.eq( 9 , x.a.avg , "A1" );
+assert.eq( 16 , x.b.avg , "A2" );
+assert.eq( 18 , x.c.avg , "A3" );
+res.drop();
+
+res = t.mapReduce( m , r , { finalize : f , out : { inline : 5 }, jsMode: true } );
+printjson( res )
+x = reformat( res );
+assert.eq( 9 , x.a.avg , "B1" );
+assert.eq( 16 , x.b.avg , "B2" );
+assert.eq( 18 , x.c.avg , "B3" );
+res.drop();
+assert( ! ( "result" in res ) , "B4" )
+
diff --git a/jstests/mr3.js b/jstests/mr3.js
new file mode 100644
index 00000000000..3b0a918a4f3
--- /dev/null
+++ b/jstests/mr3.js
@@ -0,0 +1,73 @@
+
+t = db.mr3;
+t.drop();
+
+t.save( { x : 1 , tags : [ "a" , "b" ] } );
+t.save( { x : 2 , tags : [ "b" , "c" ] } );
+t.save( { x : 3 , tags : [ "c" , "a" ] } );
+t.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+m = function( n , x ){
+ x = x || 1;
+ this.tags.forEach(
+ function(z){
+ for ( var i=0; i<x; i++ )
+ emit( z , { count : n || 1 } );
+ }
+ );
+};
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+res = t.mapReduce( m , r , { out : "mr3_out" } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "A1" );
+assert.eq( 2 , z.a.count , "A2" );
+assert.eq( 3 , z.b.count , "A3" );
+assert.eq( 3 , z.c.count , "A4" );
+
+res.drop();
+
+res = t.mapReduce( m , r , { out : "mr3_out" , mapparams : [ 2 , 2 ] } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "B1" );
+assert.eq( 8 , z.a.count , "B2" );
+assert.eq( 12 , z.b.count , "B3" );
+assert.eq( 12 , z.c.count , "B4" );
+
+res.drop();
+
+// -- just some random tests
+
+realm = m;
+
+m = function(){
+ emit( this._id , 1 );
+}
+res = t.mapReduce( m , r , { out : "mr3_out" } );
+res.drop();
+
+m = function(){
+ emit( this._id , this.xzz.a );
+}
+
+before = db.getCollectionNames().length;
+assert.throws( function(){ t.mapReduce( m , r , { out : "mr3_out" } ); } );
+assert.eq( before , db.getCollectionNames().length , "after throw crap" );
+
+
+m = realm;
+r = function( k , v ){
+ return v.x.x.x;
+}
+before = db.getCollectionNames().length;
+assert.throws( function(){ t.mapReduce( m , r , "mr3_out" ) } )
+assert.eq( before , db.getCollectionNames().length , "after throw crap" );
diff --git a/jstests/mr4.js b/jstests/mr4.js
new file mode 100644
index 00000000000..78c8bce8953
--- /dev/null
+++ b/jstests/mr4.js
@@ -0,0 +1,45 @@
+
+t = db.mr4;
+t.drop();
+
+t.save( { x : 1 , tags : [ "a" , "b" ] } );
+t.save( { x : 2 , tags : [ "b" , "c" ] } );
+t.save( { x : 3 , tags : [ "c" , "a" ] } );
+t.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+m = function(){
+ this.tags.forEach(
+ function(z){
+ emit( z , { count : xx } );
+ }
+ );
+};
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+res = t.mapReduce( m , r , { out : "mr4_out" , scope : { xx : 1 } } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "A1" );
+assert.eq( 2 , z.a.count , "A2" );
+assert.eq( 3 , z.b.count , "A3" );
+assert.eq( 3 , z.c.count , "A4" );
+
+res.drop();
+
+
+res = t.mapReduce( m , r , { scope : { xx : 2 } , out : "mr4_out" } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "A1" );
+assert.eq( 4 , z.a.count , "A2" );
+assert.eq( 6 , z.b.count , "A3" );
+assert.eq( 6 , z.c.count , "A4" );
+
+res.drop();
diff --git a/jstests/mr5.js b/jstests/mr5.js
new file mode 100644
index 00000000000..50a63d1d55b
--- /dev/null
+++ b/jstests/mr5.js
@@ -0,0 +1,58 @@
+
+t = db.mr5;
+t.drop();
+
+t.save( { "partner" : 1, "visits" : 9 } )
+t.save( { "partner" : 2, "visits" : 9 } )
+t.save( { "partner" : 1, "visits" : 11 } )
+t.save( { "partner" : 1, "visits" : 30 } )
+t.save( { "partner" : 2, "visits" : 41 } )
+t.save( { "partner" : 2, "visits" : 41 } )
+
+m = function(){
+ emit( this.partner , { stats : [ this.visits ] } )
+}
+
+r = function( k , v ){
+ var stats = [];
+ var total = 0;
+ for ( var i=0; i<v.length; i++ ){
+ for ( var j in v[i].stats ) {
+ stats.push( v[i].stats[j] )
+ total += v[i].stats[j];
+ }
+ }
+ return { stats : stats , total : total }
+}
+
+res = t.mapReduce( m , r , { out : "mr5_out" , scope : { xx : 1 } } );
+//res.find().forEach( printjson )
+
+z = res.convertToSingleObject()
+assert.eq( 2 , Object.keySet( z ).length , "A1" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" )
+
+
+res.drop()
+
+m = function(){
+ var x = "partner";
+ var y = "visits";
+ emit( this[x] , { stats : [ this[y] ] } )
+}
+
+
+
+res = t.mapReduce( m , r , { out : "mr5_out" , scope : { xx : 1 } } );
+//res.find().forEach( printjson )
+
+z = res.convertToSingleObject()
+assert.eq( 2 , Object.keySet( z ).length , "B1" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" )
+
+
+res.drop()
+
+
diff --git a/jstests/mr_bigobject.js b/jstests/mr_bigobject.js
new file mode 100644
index 00000000000..97195e2542e
--- /dev/null
+++ b/jstests/mr_bigobject.js
@@ -0,0 +1,46 @@
+
+t = db.mr_bigobject
+t.drop()
+
+// v8 requires large start string, otherwise UTF16
+var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+var s = large;
+while ( s.length < ( 6 * 1024 * 1024 ) ){
+ s += large;
+}
+
+for ( i=0; i<5; i++ )
+ t.insert( { _id : i , s : s } )
+
+m = function(){
+ emit( 1 , this.s + this.s );
+}
+
+r = function( k , v ){
+ return 1;
+}
+
+assert.throws( function(){ r = t.mapReduce( m , r , "mr_bigobject_out" ); } , null , "emit should fail" )
+
+
+m = function(){
+ emit( 1 , this.s );
+}
+
+assert.eq( { 1 : 1 } , t.mapReduce( m , r , "mr_bigobject_out" ).convertToSingleObject() , "A1" )
+
+r = function( k , v ){
+ total = 0;
+ for ( var i=0; i<v.length; i++ ){
+ var x = v[i];
+ if ( typeof( x ) == "number" )
+ total += x
+ else
+ total += x.length;
+ }
+ return total;
+}
+
+assert.eq( { 1 : t.count() * s.length } , t.mapReduce( m , r , "mr_bigobject_out" ).convertToSingleObject() , "A1" )
+
+t.drop()
diff --git a/jstests/mr_comments.js b/jstests/mr_comments.js
new file mode 100644
index 00000000000..f6a06994f55
--- /dev/null
+++ b/jstests/mr_comments.js
@@ -0,0 +1,28 @@
+
+t = db.mr_comments
+t.drop()
+
+t.insert( { foo : 1 } )
+t.insert( { foo : 1 } )
+t.insert( { foo : 2 } )
+
+res = db.runCommand(
+ { mapreduce : "mr_comments",
+ map : "// This will fail\n\n // Emit some stuff\n emit(this.foo, 1)\n",
+ reduce : function(key, values){
+ return Array.sum(values);
+ },
+ out: "mr_comments_out"
+ });
+assert.eq( 3 , res.counts.emit )
+
+res = db.runCommand(
+ { mapreduce : "mr_comments",
+ map : "// This will fail\nfunction(){\n // Emit some stuff\n emit(this.foo, 1)\n}\n",
+ reduce : function(key, values){
+ return Array.sum(values);
+ },
+ out: "mr_comments_out"
+ });
+
+assert.eq( 3 , res.counts.emit )
diff --git a/jstests/mr_drop.js b/jstests/mr_drop.js
new file mode 100644
index 00000000000..8c4f9f8846f
--- /dev/null
+++ b/jstests/mr_drop.js
@@ -0,0 +1,38 @@
+// Drop a collection while a map/reduce job is running against it. SERVER-6757
+
+t = db.jstests_mr_drop;
+t.drop();
+
+Random.setRandomSeed();
+
+// Set sleep times for different stages of the map/reduce job. The collection drop will occur
+// during different stages of map/reduce depending on these sleep values.
+mapSleep = Random.randInt( 4 );
+reduceSleep = Random.randInt( 4 );
+finalizeSleep = Random.randInt( 4 );
+
+// Insert some documents.
+for( i = 0; i < 10000; ++i ) {
+ t.save( { key:parseInt( i / 2 ),
+ mapSleep:mapSleep,
+ reduceSleep:reduceSleep,
+ finalizeSleep:finalizeSleep } );
+}
+db.getLastError();
+
+// Schedule a collection drop two seconds in the future.
+s = startParallelShell( "sleep( 2000 ); db.jstests_mr_drop.drop();" );
+
+// Run the map/reduce job. Check for command failure internally. The job succeeds even if the
+// source collection is dropped in progress.
+t.mapReduce( function() { sleep( this.mapSleep ); emit( this.key, this ); },
+ function( key, vals ) { sleep( vals[ 0 ].reduceSleep ); return vals[ 0 ]; },
+ { finalize:function( key, value ) { sleep( value.finalizeSleep ); return value; },
+ out:'jstests_mr_drop_out' }
+ );
+
+// Wait for the parallel shell to finish.
+s();
+
+// Ensure the server is still alive. Under SERVER-6757 the server can crash.
+assert( !db.getLastError() );
diff --git a/jstests/mr_errorhandling.js b/jstests/mr_errorhandling.js
new file mode 100644
index 00000000000..c4e1137b4c6
--- /dev/null
+++ b/jstests/mr_errorhandling.js
@@ -0,0 +1,49 @@
+
+t = db.mr_errorhandling;
+t.drop();
+
+t.save( { a : [ 1 , 2 , 3 ] } )
+t.save( { a : [ 2 , 3 , 4 ] } )
+
+m_good = function(){
+ for ( var i=0; i<this.a.length; i++ ){
+ emit( this.a[i] , 1 );
+ }
+}
+
+m_bad = function(){
+ for ( var i=0; i<this.a.length; i++ ){
+ emit( this.a[i] );
+ }
+}
+
+r = function( k , v ){
+ var total = 0;
+ for ( var i=0; i<v.length; i++ )
+ total += v[i];
+ return total;
+}
+
+res = t.mapReduce( m_good , r , "mr_errorhandling_out" );
+assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
+res.drop()
+
+res = null;
+
+theerror = null;
+try {
+ res = t.mapReduce( m_bad , r , "mr_errorhandling_out" );
+}
+catch ( e ){
+ theerror = e.toString();
+}
+assert.isnull( res , "B1" );
+assert( theerror , "B2" );
+assert( theerror.indexOf( "emit" ) >= 0 , "B3" );
+
+// test things are still in an ok state
+res = t.mapReduce( m_good , r , "mr_errorhandling_out" );
+assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
+res.drop()
+
+assert.throws( function(){ t.mapReduce( m_good , r , { out : "xxx" , query : "foo" } ); } )
diff --git a/jstests/mr_index.js b/jstests/mr_index.js
new file mode 100644
index 00000000000..521d44d29f0
--- /dev/null
+++ b/jstests/mr_index.js
@@ -0,0 +1,43 @@
+
+t = db.mr_index
+t.drop()
+
+outName = "mr_index_out"
+out = db[outName]
+out.drop()
+
+t.insert( { tags : [ 1 ] } )
+t.insert( { tags : [ 1 , 2 ] } )
+t.insert( { tags : [ 1 , 2 , 3 ] } )
+t.insert( { tags : [ 3 ] } )
+t.insert( { tags : [ 2 , 3 ] } )
+t.insert( { tags : [ 2 , 3 ] } )
+t.insert( { tags : [ 1 , 2 ] } )
+
+m = function(){
+ for ( i=0; i<this.tags.length; i++ )
+ emit( this.tags[i] , 1 );
+}
+
+r = function( k , vs ){
+ return Array.sum( vs );
+}
+
+ex = function(){
+ return out.find().sort( { value : 1 } ).explain()
+}
+
+res = t.mapReduce( m , r , { out : outName } )
+
+assert.eq( "BasicCursor" , ex().cursor , "A1" )
+out.ensureIndex( { value : 1 } )
+assert.eq( "BtreeCursor value_1" , ex().cursor , "A2" )
+assert.eq( 3 , ex().n , "A3" )
+
+res = t.mapReduce( m , r , { out : outName } )
+
+assert.eq( "BtreeCursor value_1" , ex().cursor , "B1" )
+assert.eq( 3 , ex().n , "B2" )
+res.drop()
+
+
diff --git a/jstests/mr_index2.js b/jstests/mr_index2.js
new file mode 100644
index 00000000000..a8d845ed69d
--- /dev/null
+++ b/jstests/mr_index2.js
@@ -0,0 +1,22 @@
+
+t = db.mr_index2;
+t.drop()
+
+t.save( { arr : [1, 2] } )
+
+map = function() { emit(this._id, 1) }
+reduce = function(k,vals) { return Array.sum( vals ); }
+
+res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : {} })
+assert.eq( 1 ,res.counts.input , "A" )
+res.drop()
+
+res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : { arr: {$gte:0} } })
+assert.eq( 1 ,res.counts.input , "B" )
+res.drop()
+
+t.ensureIndex({arr:1})
+res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : { arr: {$gte:0} } })
+assert.eq( 1 ,res.counts.input , "C" )
+res.drop();
+
diff --git a/jstests/mr_index3.js b/jstests/mr_index3.js
new file mode 100644
index 00000000000..0607cc8aa84
--- /dev/null
+++ b/jstests/mr_index3.js
@@ -0,0 +1,50 @@
+
+t = db.mr_index3
+t.drop();
+
+t.insert( { _id : 1, name : 'name1', tags : ['dog', 'cat'] } );
+t.insert( { _id : 2, name : 'name2', tags : ['cat'] } );
+t.insert( { _id : 3, name : 'name3', tags : ['mouse', 'cat', 'dog'] } );
+t.insert( { _id : 4, name : 'name4', tags : [] } );
+
+m = function(){
+ for ( var i=0; i<this.tags.length; i++ )
+ emit( this.tags[i] , 1 )
+};
+
+r = function( key , values ){
+ return Array.sum( values );
+};
+
+a1 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r , out : { inline : true } } ).results
+a2 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : 'name1'} , out : { inline : true }}).results
+a3 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : {$gt:'name'} } , out : { inline : true }}).results
+
+assert.eq( [
+ {
+ "_id" : "cat",
+ "value" : 3
+ },
+ {
+ "_id" : "dog",
+ "value" : 2
+ },
+ {
+ "_id" : "mouse",
+ "value" : 1
+ }
+] , a1 , "A1" );
+assert.eq( [ { "_id" : "cat", "value" : 1 }, { "_id" : "dog", "value" : 1 } ] , a2 , "A2" )
+assert.eq( a1 , a3 , "A3" )
+
+t.ensureIndex({name:1, tags:1});
+
+b1 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r , out : { inline : true } } ).results
+b2 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : 'name1'} , out : { inline : true }}).results
+b3 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : {$gt:'name'} } , out : { inline : true }}).results
+
+assert.eq( a1 , b1 , "AB1" )
+assert.eq( a2 , b2 , "AB2" )
+assert.eq( a3 , b3 , "AB3" )
+
+
diff --git a/jstests/mr_killop.js b/jstests/mr_killop.js
new file mode 100644
index 00000000000..fe2a9ac76fa
--- /dev/null
+++ b/jstests/mr_killop.js
@@ -0,0 +1,123 @@
+// Test killop applied to m/r operations and child ops of m/r operations.
+
+t = db.jstests_mr_killop;
+t.drop();
+t2 = db.jstests_mr_killop_out;
+t2.drop();
+
+function debug( x ) {
+// printjson( x );
+}
+
+/** @return op code for map reduce op created by spawned shell, or that op's child */
+function op( childLoop ) {
+ p = db.currentOp().inprog;
+ debug( p );
+ for ( var i in p ) {
+ var o = p[ i ];
+ // Identify a map/reduce or where distinct operation by its collection, whether or not
+ // it is currently active.
+ if ( childLoop ) {
+ if ( ( o.active || o.waitingForLock ) &&
+ o.query &&
+ o.query.query &&
+ o.query.query.$where &&
+ o.query.distinct == "jstests_mr_killop" ) {
+ return o.opid;
+ }
+ }
+ else {
+ if ( ( o.active || o.waitingForLock ) &&
+ o.query &&
+ o.query.mapreduce &&
+ o.query.mapreduce == "jstests_mr_killop" ) {
+ return o.opid;
+ }
+ }
+ }
+ return -1;
+}
+
+/**
+* Run one map reduce with the specified parameters in a parallel shell, kill the
+* map reduce op or its child op with killOp, and wait for the map reduce op to
+* terminate.
+* @param childLoop - if true, a distinct $where op is killed rather than the map reduce op.
+* This is necessay for a child distinct $where of a map reduce op because child
+* ops currently mask parent ops in currentOp.
+*/
+function testOne( map, reduce, finalize, scope, childLoop, wait ) {
+ t.drop();
+ t2.drop();
+ // Ensure we have 2 documents for the reduce to run
+ t.save( {a:1} );
+ t.save( {a:1} );
+ db.getLastError();
+
+ spec = {
+ mapreduce:"jstests_mr_killop",
+ out:"jstests_mr_killop_out",
+ map: map,
+ reduce: reduce
+ };
+ if ( finalize ) {
+ spec[ "finalize" ] = finalize;
+ }
+ if ( scope ) {
+ spec[ "scope" ] = scope;
+ }
+
+ // Windows shell strips all double quotes from command line, so use
+ // single quotes.
+ stringifiedSpec = tojson( spec ).toString().replace( /\n/g, ' ' ).replace( /\"/g, "\'" );
+
+ // The assert below won't be caught by this test script, but it will cause error messages
+ // to be printed.
+ s = startParallelShell( "assert.commandWorked( db.runCommand( " + stringifiedSpec + " ) );" );
+
+ if ( wait ) {
+ sleep( 2000 );
+ }
+
+ o = null;
+ assert.soon( function() { o = op( childLoop ); return o != -1 } );
+
+ res = db.killOp( o );
+ debug( "did kill : " + tojson( res ) );
+
+ // When the map reduce op is killed, the spawned shell will exit
+ s();
+ debug( "parallel shell completed" );
+
+ assert.eq( -1, op( childLoop ) );
+}
+
+/** Test using wait and non wait modes */
+function test( map, reduce, finalize, scope, childLoop ) {
+ testOne( map, reduce, finalize, scope, childLoop, false );
+ testOne( map, reduce, finalize, scope, childLoop, true );
+}
+
+/** Test looping in map and reduce functions */
+function runMRTests( loop, childLoop ) {
+ test( loop, function( k, v ) { return v[ 0 ]; }, null, null, childLoop );
+ test( function() { emit( this.a, 1 ); }, loop, null, null, childLoop );
+ test( function() { loop(); }, function( k, v ) { return v[ 0 ] },
+ null, { loop: loop }, childLoop );
+}
+
+/** Test looping in finalize function */
+function runFinalizeTests( loop, childLoop ) {
+ test( function() { emit( this.a, 1 ); }, function( k, v ) { return v[ 0 ] },
+ loop, null, childLoop );
+ test( function() { emit( this.a, 1 ); }, function( k, v ) { return v[ 0 ] },
+ function( a, b ) { loop() }, { loop: loop }, childLoop );
+}
+
+var loop = function() {
+ while( 1 ) {
+ ;
+ }
+}
+runMRTests( loop, false );
+runFinalizeTests( loop, false );
diff --git a/jstests/mr_merge.js b/jstests/mr_merge.js
new file mode 100644
index 00000000000..9350c45f773
--- /dev/null
+++ b/jstests/mr_merge.js
@@ -0,0 +1,60 @@
+
+t = db.mr_merge;
+t.drop();
+
+t.insert( { a : [ 1 , 2 ] } )
+t.insert( { a : [ 2 , 3 ] } )
+t.insert( { a : [ 3 , 4 ] } )
+
+outName = "mr_merge_out";
+out = db[outName];
+out.drop();
+
+m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
+r = function(k,vs){ return Array.sum( vs ); }
+
+function tos( o ){
+ var s = "";
+ for ( var i=0; i<100; i++ ){
+ if ( o[i] )
+ s += i + "_" + o[i];
+ }
+ return s;
+}
+
+
+res = t.mapReduce( m , r , { out : outName } )
+
+
+expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 }
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
+
+t.insert( { a : [ 4 , 5 ] } )
+out.insert( { _id : 10 , value : "5" } )
+res = t.mapReduce( m , r , { out : outName } )
+
+expected["4"]++;
+expected["5"] = 1
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
+
+t.insert( { a : [ 5 , 6 ] } )
+out.insert( { _id : 10 , value : "5" } )
+res = t.mapReduce( m , r , { out : { merge : outName } } )
+
+expected["5"]++;
+expected["10"] = 5
+expected["6"] = 1
+
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "C" );
+
+// test that the nonAtomic output gives valid result
+t.insert( { a : [ 6 , 7 ] } )
+out.insert( { _id : 20 , value : "10" } )
+res = t.mapReduce( m , r , { out : { merge : outName, nonAtomic: true } } )
+
+expected["6"]++;
+expected["20"] = 10
+expected["7"] = 1
+
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "D" );
+
diff --git a/jstests/mr_merge2.js b/jstests/mr_merge2.js
new file mode 100644
index 00000000000..520bbfdbc8e
--- /dev/null
+++ b/jstests/mr_merge2.js
@@ -0,0 +1,37 @@
+
+t = db.mr_merge2;
+t.drop();
+
+t.insert( { a : [ 1 , 2 ] } )
+t.insert( { a : [ 2 , 3 ] } )
+t.insert( { a : [ 3 , 4 ] } )
+
+outName = "mr_merge2_out";
+out = db[outName];
+out.drop();
+
+m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
+r = function(k,vs){ return Array.sum( vs ); }
+
+function tos( o ){
+ var s = "";
+ for ( var i=0; i<100; i++ ){
+ if ( o[i] )
+ s += i + "_" + o[i] + "|";
+ }
+ return s;
+}
+
+
+outOptions = { out : { merge : outName } }
+
+res = t.mapReduce( m , r , outOptions )
+expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 }
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
+
+t.insert( { a : [ 4 , 5 ] } )
+res = t.mapReduce( m , r , outOptions )
+expected["4"]++;
+expected["5"] = 1
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
+
diff --git a/jstests/mr_mutable_properties.js b/jstests/mr_mutable_properties.js
new file mode 100644
index 00000000000..7c4442aab9e
--- /dev/null
+++ b/jstests/mr_mutable_properties.js
@@ -0,0 +1,62 @@
+// See SERVER-9448
+// Test argument and receiver (aka 'this') objects and their children can be mutated
+// in Map, Reduce and Finalize functions
+
+var collection = db.mrMutableReceiver;
+collection.drop();
+collection.insert({a:1});
+
+var map = function() {
+ // set property on receiver
+ this.feed = {beef:1};
+
+ // modify property on receiever
+ this.a = {cake:1};
+ emit(this._id, this.feed);
+ emit(this._id, this.a);
+}
+
+var reduce = function(key, values) {
+ // set property on receiver
+ this.feed = {beat:1};
+
+ // set property on key arg
+ key.fed = {mochi:1};
+
+ // push properties onto values array arg
+ values.push(this.feed);
+ values.push(key.fed);
+
+ // modify each value in the (modified) array arg
+ values.forEach(function(val) { val.mod = 1; });
+ return {food:values};
+}
+
+var finalize = function(key, values) {
+ // set property on receiver
+ this.feed = {ice:1};
+
+ // set property on key arg
+ key.fed = {cream:1};
+
+ // push properties onto values array arg
+ printjson(values);
+ values.food.push(this.feed);
+ values.food.push(key.fed);
+
+ // modify each value in the (modified) array arg
+ values.food.forEach(function(val) { val.mod = 1; });
+ return values;
+}
+
+var mr = collection.mapReduce(map, reduce, {finalize: finalize, out: {inline: 1}});
+printjson(mr);
+
+// verify mutated properties exist (order dictated by emit sequence and properties added)
+assert.eq(mr.results[0].value.food[0].beef, 1);
+assert.eq(mr.results[0].value.food[1].cake, 1);
+assert.eq(mr.results[0].value.food[2].beat, 1);
+assert.eq(mr.results[0].value.food[3].mochi, 1);
+assert.eq(mr.results[0].value.food[4].ice, 1);
+assert.eq(mr.results[0].value.food[5].cream, 1);
+mr.results[0].value.food.forEach(function(val) { assert.eq(val.mod, 1); });
diff --git a/jstests/mr_optim.js b/jstests/mr_optim.js
new file mode 100644
index 00000000000..164839e2f2c
--- /dev/null
+++ b/jstests/mr_optim.js
@@ -0,0 +1,48 @@
+
+
+t = db.mr_optim;
+t.drop();
+
+for (var i = 0; i < 1000; ++i) {
+ t.save( {a: Math.random(1000), b: Math.random(10000)} );
+}
+
+function m(){
+ emit(this._id, 13);
+}
+
+function r( key , values ){
+ return "bad";
+}
+
+function reformat( r ){
+ var x = {};
+ var cursor;
+ if ( r.results )
+ cursor = r.results;
+ else
+ cursor = r.find();
+ cursor.forEach(
+ function(z){
+ x[z._id] = z.value;
+ }
+ );
+ return x;
+}
+
+res = t.mapReduce( m , r , { out : "mr_optim_out" } );
+printjson( res )
+x = reformat( res );
+for (var key in x) {
+ assert.eq(x[key], 13, "value is not equal to original, maybe reduce has run");
+}
+res.drop();
+
+res = t.mapReduce( m , r , { out : { inline : 1 } } );
+//printjson( res )
+x2 = reformat( res );
+res.drop();
+
+assert.eq(x, x2, "object from inline and collection are not equal")
+
+t.drop(); \ No newline at end of file
diff --git a/jstests/mr_outreduce.js b/jstests/mr_outreduce.js
new file mode 100644
index 00000000000..793ec252feb
--- /dev/null
+++ b/jstests/mr_outreduce.js
@@ -0,0 +1,49 @@
+
+t = db.mr_outreduce;
+t.drop();
+
+t.insert( { _id : 1 , a : [ 1 , 2 ] } )
+t.insert( { _id : 2 , a : [ 2 , 3 ] } )
+t.insert( { _id : 3 , a : [ 3 , 4 ] } )
+
+outName = "mr_outreduce_out";
+out = db[outName];
+out.drop();
+
+m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
+r = function(k,vs){ return Array.sum( vs ); }
+
+function tos( o ){
+ var s = "";
+ for ( var i=0; i<100; i++ ){
+ if ( o[i] )
+ s += i + "_" + o[i] + "|"
+ }
+ return s;
+}
+
+
+res = t.mapReduce( m , r , { out : outName } )
+
+
+expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 }
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
+
+t.insert( { _id : 4 , a : [ 4 , 5 ] } )
+out.insert( { _id : 10 , value : "5" } ) // this is a sentinal to make sure it wasn't killed
+res = t.mapReduce( m , r , { out : { reduce : outName } , query : { _id : { $gt : 3 } } } )
+
+expected["4"]++;
+expected["5"] = 1
+expected["10"] = 5
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
+
+t.insert( { _id : 5 , a : [ 5 , 6 ] } )
+out.insert( { _id : 20 , value : "10" } ) // this is a sentinal to make sure it wasn't killed
+res = t.mapReduce( m , r , { out : { reduce : outName, nonAtomic: true } , query : { _id : { $gt : 4 } } } )
+
+expected["5"]++;
+expected["6"] = 1
+expected["20"] = 10
+assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "C" );
+
diff --git a/jstests/mr_outreduce2.js b/jstests/mr_outreduce2.js
new file mode 100644
index 00000000000..fc273638577
--- /dev/null
+++ b/jstests/mr_outreduce2.js
@@ -0,0 +1,27 @@
+
+normal = "mr_outreduce2"
+out = normal + "_out"
+
+t = db[normal]
+t.drop();
+
+db[out].drop()
+
+t.insert( { _id : 1 , x : 1 } )
+t.insert( { _id : 2 , x : 1 } )
+t.insert( { _id : 3 , x : 2 } )
+
+m = function(){ emit( this.x , 1 ); }
+r = function(k,v){ return Array.sum( v ); }
+
+res = t.mapReduce( m , r , { out : { reduce : out } , query : { _id : { $gt : 0 } } } )
+
+assert.eq( 2 , db[out].findOne( { _id : 1 } ).value , "A1" )
+assert.eq( 1 , db[out].findOne( { _id : 2 } ).value , "A2" )
+
+
+t.insert( { _id : 4 , x : 2 } )
+res = t.mapReduce( m , r , { out : { reduce : out } , query : { _id : { $gt : 3 } } , finalize : null } )
+
+assert.eq( 2 , db[out].findOne( { _id : 1 } ).value , "B1" )
+assert.eq( 2 , db[out].findOne( { _id : 2 } ).value , "B2" )
diff --git a/jstests/mr_replaceIntoDB.js b/jstests/mr_replaceIntoDB.js
new file mode 100644
index 00000000000..217f40717e5
--- /dev/null
+++ b/jstests/mr_replaceIntoDB.js
@@ -0,0 +1,45 @@
+
+t = db.mr_replace;
+t.drop();
+
+t.insert( { a : [ 1 , 2 ] } )
+t.insert( { a : [ 2 , 3 ] } )
+t.insert( { a : [ 3 , 4 ] } )
+
+outCollStr = "mr_replace_col";
+outDbStr = "mr_db";
+
+m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
+r = function(k,vs){ return Array.sum( vs ); }
+
+function tos( o ){
+ var s = "";
+ for ( var i=0; i<100; i++ ){
+ if ( o[i] )
+ s += i + "_" + o[i];
+ }
+ return s;
+}
+
+print("Testing mr replace into other DB")
+res = t.mapReduce( m , r , { out : { replace: outCollStr, db: outDbStr } } )
+printjson( res );
+expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 };
+outDb = db.getMongo().getDB(outDbStr);
+outColl = outDb[outCollStr];
+str = tos( outColl.convertToSingleObject("value") )
+print("Received result: " + str);
+assert.eq( tos( expected ) , str , "A Received wrong result " + str );
+
+print("checking result field");
+assert.eq(res.result.collection, outCollStr, "B1 Wrong collection " + res.result.collection)
+assert.eq(res.result.db, outDbStr, "B2 Wrong db " + res.result.db)
+
+print("Replace again and check");
+outColl.save({_id: "5", value : 1});
+t.mapReduce( m , r , { out : { replace: outCollStr, db: outDbStr } } )
+str = tos( outColl.convertToSingleObject("value") )
+print("Received result: " + str);
+assert.eq( tos( expected ) , str , "C1 Received wrong result " + str );
+
+
diff --git a/jstests/mr_sort.js b/jstests/mr_sort.js
new file mode 100644
index 00000000000..cc8db18e174
--- /dev/null
+++ b/jstests/mr_sort.js
@@ -0,0 +1,44 @@
+
+t = db.mr_sort;
+t.drop()
+
+t.ensureIndex( { x : 1 } )
+
+t.insert( { x : 1 } )
+t.insert( { x : 10 } )
+t.insert( { x : 2 } )
+t.insert( { x : 9 } )
+t.insert( { x : 3 } )
+t.insert( { x : 8 } )
+t.insert( { x : 4 } )
+t.insert( { x : 7 } )
+t.insert( { x : 5 } )
+t.insert( { x : 6 } )
+
+m = function(){
+ emit( "a" , this.x )
+}
+
+r = function( k , v ){
+ return Array.sum( v )
+}
+
+
+res = t.mapReduce( m , r , "mr_sort_out " );
+x = res.convertToSingleObject();
+res.drop();
+assert.eq( { "a" : 55 } , x , "A1" )
+
+res = t.mapReduce( m , r , { out : "mr_sort_out" , query : { x : { $lt : 3 } } } )
+x = res.convertToSingleObject();
+res.drop();
+assert.eq( { "a" : 3 } , x , "A2" )
+
+res = t.mapReduce( m , r , { out : "mr_sort_out" , sort : { x : 1 } , limit : 2 } );
+x = res.convertToSingleObject();
+res.drop();
+assert.eq( { "a" : 3 } , x , "A3" )
+
+
+
+
diff --git a/jstests/mr_stored.js b/jstests/mr_stored.js
new file mode 100644
index 00000000000..7963d9892e1
--- /dev/null
+++ b/jstests/mr_stored.js
@@ -0,0 +1,66 @@
+
+t = db.mr_stored;
+t.drop();
+
+t.save( { "partner" : 1, "visits" : 9 } )
+t.save( { "partner" : 2, "visits" : 9 } )
+t.save( { "partner" : 1, "visits" : 11 } )
+t.save( { "partner" : 1, "visits" : 30 } )
+t.save( { "partner" : 2, "visits" : 41 } )
+t.save( { "partner" : 2, "visits" : 41 } )
+
+m = function(obj){
+ emit( obj.partner , { stats : [ obj.visits ] } )
+}
+
+r = function( k , v ){
+ var stats = [];
+ var total = 0;
+ for ( var i=0; i<v.length; i++ ){
+ for ( var j in v[i].stats ) {
+ stats.push( v[i].stats[j] )
+ total += v[i].stats[j];
+ }
+ }
+ return { stats : stats , total : total }
+}
+
+// Test that map reduce works with stored javascript
+db.system.js.save( { _id : "mr_stored_map" , value : m } )
+db.system.js.save( { _id : "mr_stored_reduce" , value : r } )
+
+res = t.mapReduce( function () { mr_stored_map(this) } ,
+ function ( k , v ) { return mr_stored_reduce( k , v ) } ,
+ { out : "mr_stored_out" , scope : { xx : 1 } } );
+//res.find().forEach( printjson )
+
+z = res.convertToSingleObject()
+assert.eq( 2 , Object.keySet( z ).length , "A1" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" )
+
+
+res.drop()
+
+m = function(obj){
+ var x = "partner";
+ var y = "visits";
+ emit( obj[x] , { stats : [ obj[y] ] } )
+}
+
+db.system.js.save( { _id : "mr_stored_map" , value : m } )
+
+res = t.mapReduce( function () { mr_stored_map(this) } ,
+ function ( k , v ) { return mr_stored_reduce( k , v ) } ,
+ { out : "mr_stored_out" , scope : { xx : 1 } } );
+//res.find().forEach( printjson )
+
+z = res.convertToSingleObject()
+assert.eq( 2 , Object.keySet( z ).length , "B1" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" )
+
+db.system.js.remove( { _id : "mr_stored_map" } )
+db.system.js.remove( { _id : "mr_stored_reduce" } )
+
+res.drop()
diff --git a/jstests/mr_undef.js b/jstests/mr_undef.js
new file mode 100644
index 00000000000..e162f99836b
--- /dev/null
+++ b/jstests/mr_undef.js
@@ -0,0 +1,22 @@
+
+t = db.mr_undef
+t.drop()
+
+outname = "mr_undef_out"
+out = db[outname]
+out.drop()
+
+t.insert({x : 0})
+
+var m = function() { emit(this.mod, this.x); }
+var r = function(k,v) { total = 0; for(i in v) { total+= v[i]; } return total; }
+
+res = t.mapReduce(m, r, {out : outname } )
+
+assert.eq( 0 , out.find( { _id : { $type : 6 } } ).itcount() , "A1" )
+assert.eq( 1 , out.find( { _id : { $type : 10 } } ).itcount() , "A2" )
+
+x = out.findOne()
+assert.eq( x , out.findOne( { _id : x["_id"] } ) , "A3" )
+
+
diff --git a/jstests/multi.js b/jstests/multi.js
new file mode 100644
index 00000000000..eb6cad348cd
--- /dev/null
+++ b/jstests/multi.js
@@ -0,0 +1,24 @@
+t = db.jstests_multi;
+t.drop();
+
+t.ensureIndex( { a: 1 } );
+t.save( { a: [ 1, 2 ] } );
+assert.eq( 1, t.find( { a: { $gt: 0 } } ).count() , "A" );
+assert.eq( 1, t.find( { a: { $gt: 0 } } ).toArray().length , "B" );
+
+t.drop();
+t.save( { a: [ [ [ 1 ] ] ] } );
+assert.eq( 0, t.find( { a:1 } ).count() , "C" );
+assert.eq( 0, t.find( { a: [ 1 ] } ).count() , "D" );
+assert.eq( 1, t.find( { a: [ [ 1 ] ] } ).count() , "E" );
+assert.eq( 1, t.find( { a: [ [ [ 1 ] ] ] } ).count() , "F" );
+
+t.drop();
+t.save( { a: [ 1, 2 ] } );
+assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , "G" );
+
+t.drop();
+t.save( { a: [ { b: 1 }, { b: 2 } ] } );
+assert.eq( 0, t.find( { 'a.b': { $ne: 1 } } ).count() , "H" );
+
+// TODO - run same tests with an index on a
diff --git a/jstests/multi2.js b/jstests/multi2.js
new file mode 100644
index 00000000000..7c72722fd34
--- /dev/null
+++ b/jstests/multi2.js
@@ -0,0 +1,23 @@
+
+t = db.multi2;
+t.drop();
+
+t.save( { x : 1 , a : [ 1 ] } );
+t.save( { x : 1 , a : [] } );
+t.save( { x : 1 , a : null } );
+t.save( {} );
+
+assert.eq( 3 , t.find( { x : 1 } ).count() , "A" );
+
+t.ensureIndex( { x : 1 } );
+assert.eq( 3 , t.find( { x : 1 } ).count() , "B" );
+assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s1" );
+assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "B2" );
+
+t.dropIndex( { x : 1 } );
+t.ensureIndex( { x : 1 , a : 1 } );
+assert.eq( 3 , t.find( { x : 1 } ).count() , "C" ); // SERVER-279
+assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s2" );
+assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "C2" );
+
+
diff --git a/jstests/ne1.js b/jstests/ne1.js
new file mode 100644
index 00000000000..e1c5656b5c8
--- /dev/null
+++ b/jstests/ne1.js
@@ -0,0 +1,11 @@
+
+t = db.ne1;
+t.drop();
+
+t.save( { x : 1 } );
+t.save( { x : 2 } );
+t.save( { x : 3 } );
+
+assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "A" );
+t.ensureIndex( { x : 1 } );
+assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "B" );
diff --git a/jstests/ne2.js b/jstests/ne2.js
new file mode 100644
index 00000000000..a69bfd6a114
--- /dev/null
+++ b/jstests/ne2.js
@@ -0,0 +1,16 @@
+// check that we don't scan $ne values
+
+t = db.jstests_ne2;
+t.drop();
+t.ensureIndex( {a:1} );
+
+t.save( { a:-0.5 } );
+t.save( { a:0 } );
+t.save( { a:0 } );
+t.save( { a:0.5 } );
+
+e = t.find( { a: { $ne: 0 } } ).explain( true );
+assert.eq( 2, e.n, 'A' );
+
+e = t.find( { a: { $gt: -1, $lt: 1, $ne: 0 } } ).explain();
+assert.eq( 2, e.n, 'B' );
diff --git a/jstests/ne3.js b/jstests/ne3.js
new file mode 100644
index 00000000000..3260fd3c40f
--- /dev/null
+++ b/jstests/ne3.js
@@ -0,0 +1,12 @@
+// don't allow most operators with regex
+
+t = db.jstests_ne3;
+t.drop();
+
+assert.throws( function() { t.findOne( { t: { $ne: /a/ } } ); } );
+assert.throws( function() { t.findOne( { t: { $gt: /a/ } } ); } );
+assert.throws( function() { t.findOne( { t: { $gte: /a/ } } ); } );
+assert.throws( function() { t.findOne( { t: { $lt: /a/ } } ); } );
+assert.throws( function() { t.findOne( { t: { $lte: /a/ } } ); } );
+
+assert.eq( 0, t.count( { t: { $in: [ /a/ ] } } ) );
diff --git a/jstests/nestedarr1.js b/jstests/nestedarr1.js
new file mode 100644
index 00000000000..b3bc9b73156
--- /dev/null
+++ b/jstests/nestedarr1.js
@@ -0,0 +1,30 @@
+// make sure that we don't crash on large nested arrays but correctly do not index them
+// SERVER-5127, SERVER-5036
+
+function makeNestArr(depth){
+ if(depth == 1){
+ return {a : [depth]};
+ }
+ else{
+ return {a : [makeNestArr(depth - 1)] };
+ }
+}
+
+t = db.arrNestTest;
+t.drop();
+
+t.ensureIndex({a:1});
+
+n = 1;
+while ( true ) {
+ var before = t.count();
+ t.insert( { _id : n, a : makeNestArr(n) } );
+ var after = t.count();
+ if ( before == after )
+ break;
+ n++;
+}
+
+assert( n > 30, "not enough n: " + n );
+
+assert.eq( t.count(), t.find( { _id : { $gt : 0 } } ).hint( { a : 1 } ).itcount() );
diff --git a/jstests/nestedobj1.js b/jstests/nestedobj1.js
new file mode 100644
index 00000000000..45ef0c530d4
--- /dev/null
+++ b/jstests/nestedobj1.js
@@ -0,0 +1,30 @@
+//SERVER-5127, SERVER-5036
+
+function makeNestObj(depth){
+ toret = { a : 1};
+
+ for(i = 1; i < depth; i++){
+ toret = {a : toret};
+ }
+
+ return toret;
+}
+
+t = db.objNestTest;
+t.drop();
+
+t.ensureIndex({a:1});
+
+n = 1;
+while ( true ) {
+ var before = t.count();
+ t.insert( { _id : n, a : makeNestObj(n) } );
+ var after = t.count();
+ if ( before == after )
+ break;
+ n++;
+}
+
+assert( n > 30, "not enough n: " + n );
+
+assert.eq( t.count(), t.find( { _id : { $gt : 0 } } ).hint( { a : 1 } ).itcount() );
diff --git a/jstests/nin.js b/jstests/nin.js
new file mode 100644
index 00000000000..06582781591
--- /dev/null
+++ b/jstests/nin.js
@@ -0,0 +1,58 @@
+t = db.jstests_nin;
+t.drop();
+
+function checkEqual( name , key , value ){
+ var o = {};
+ o[key] = { $in : [ value ] };
+ var i = t.find( o ).count();
+ o[key] = { $nin : [ value ] };
+ var n = t.find( o ).count();
+
+ assert.eq( t.find().count() , i + n ,
+ "checkEqual " + name + " $in + $nin != total | " + i + " + " + n + " != " + t.find().count() );
+}
+
+doTest = function( n ) {
+
+ t.save( { a:[ 1,2,3 ] } );
+ t.save( { a:[ 1,2,4 ] } );
+ t.save( { a:[ 1,8,5 ] } );
+ t.save( { a:[ 1,8,6 ] } );
+ t.save( { a:[ 1,9,7 ] } );
+
+ assert.eq( 5, t.find( { a: { $nin: [ 10 ] } } ).count() , n + " A" );
+ assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , n + " B" );
+ assert.eq( 0, t.find( { a: { $nin: [ 1 ] } } ).count() , n + " C" );
+ assert.eq( 0, t.find( { a: { $nin: [ 1, 2 ] } } ).count() , n + " D" );
+ assert.eq( 3, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " E" );
+ assert.eq( 3, t.find( { a: { $nin: [ 8 ] } } ).count() , n + " F" );
+ assert.eq( 4, t.find( { a: { $nin: [ 9 ] } } ).count() , n + " G" );
+ assert.eq( 4, t.find( { a: { $nin: [ 3 ] } } ).count() , n + " H" );
+ assert.eq( 3, t.find( { a: { $nin: [ 2, 3 ] } } ).count() , n + " I" );
+ assert.eq( 1, t.find( { a: { $ne: 8, $nin: [ 2, 3 ] } } ).count() , n + " I2" );
+
+ checkEqual( n + " A" , "a" , 5 );
+
+ t.save( { a: [ 2, 2 ] } );
+ assert.eq( 3, t.find( { a: { $nin: [ 2, 2 ] } } ).count() , n + " J" );
+
+ t.save( { a: [ [ 2 ] ] } );
+ assert.eq( 4, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " K" );
+
+ t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
+ checkEqual( n + " B" , "a" , 5 );
+ checkEqual( n + " C" , "a.b" , 5 );
+
+ assert.eq( 7, t.find( { 'a.b': { $nin: [ 10 ] } } ).count() , n + " L" );
+ assert.eq( 7, t.find( { 'a.b': { $nin: [ [ 10, 11 ] ] } } ).count() , n + " M" );
+ assert.eq( 7, t.find( { a: { $nin: [ 11 ] } } ).count() , n + " N" );
+
+ t.save( { a: { b: [ 20, 30 ] } } );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() , n + " O" );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() , n + " P" );
+}
+
+doTest( "no index" );
+t.drop();
+t.ensureIndex( {a:1} );
+doTest( "with index" );
diff --git a/jstests/nin2.js b/jstests/nin2.js
new file mode 100644
index 00000000000..afdbb0494da
--- /dev/null
+++ b/jstests/nin2.js
@@ -0,0 +1,67 @@
+// Check that $nin is the opposite of $in SERVER-3264
+
+t = db.jstests_nin2;
+t.drop();
+
+// Check various operator types.
+function checkOperators( array, inMatches ) {
+ inCount = inMatches ? 1 : 0;
+ notInCount = 1 - inCount;
+ assert.eq( inCount, t.count( {foo:{$in:array}} ) );
+ assert.eq( notInCount, t.count( {foo:{$not:{$in:array}}} ) );
+ assert.eq( notInCount, t.count( {foo:{$nin:array}} ) );
+ assert.eq( inCount, t.count( {foo:{$not:{$nin:array}}} ) );
+}
+
+t.save({});
+
+assert.eq( 1, t.count( {foo:null} ) );
+assert.eq( 0, t.count( {foo:{$ne:null}} ) );
+assert.eq( 0, t.count( {foo:1} ) );
+
+// Check matching null against missing field.
+checkOperators( [null], true );
+checkOperators( [null,1], true );
+checkOperators( [1,null], true );
+
+t.remove({});
+t.save({foo:null});
+
+assert.eq( 1, t.count( {foo:null} ) );
+assert.eq( 0, t.count( {foo:{$ne:null}} ) );
+assert.eq( 0, t.count( {foo:1} ) );
+
+// Check matching empty set.
+checkOperators( [], false );
+
+// Check matching null against missing null field.
+checkOperators( [null], true );
+checkOperators( [null,1], true );
+checkOperators( [1,null], true );
+
+t.remove({});
+t.save({foo:1});
+
+assert.eq( 0, t.count( {foo:null} ) );
+assert.eq( 1, t.count( {foo:{$ne:null}} ) );
+assert.eq( 1, t.count( {foo:1} ) );
+
+// Check matching null against 1.
+checkOperators( [null], false );
+checkOperators( [null,1], true );
+checkOperators( [1,null], true );
+
+t.remove({});
+t.save( {foo:[0,1]} );
+// Check exact match of embedded array.
+checkOperators( [[0,1]], true );
+
+t.remove({});
+t.save( {foo:[]} );
+// Check exact match of embedded empty array.
+checkOperators( [[]], true );
+
+t.remove({});
+t.save( {foo:'foo'} );
+// Check regex match.
+checkOperators( [/o/], true );
diff --git a/jstests/not1.js b/jstests/not1.js
new file mode 100644
index 00000000000..f99a8490170
--- /dev/null
+++ b/jstests/not1.js
@@ -0,0 +1,20 @@
+
+t = db.not1;
+t.drop();
+
+
+t.insert({a:1})
+t.insert({a:2})
+t.insert({})
+
+function test( name ){
+ assert.eq( 3 , t.find().count() , name + "A" );
+ assert.eq( 1 , t.find( { a : 1 } ).count() , name + "B" );
+ assert.eq( 2 , t.find( { a : { $ne : 1 } } ).count() , name + "C" ); // SERVER-198
+ assert.eq( 1 , t.find({a:{$in:[1]}}).count() , name + "D" );
+ assert.eq( 2 , t.find({a:{$nin:[1]}}).count() , name + "E" ); // SERVER-198
+}
+
+test( "no index" );
+t.ensureIndex( { a : 1 } );
+test( "with index" );
diff --git a/jstests/not2.js b/jstests/not2.js
new file mode 100644
index 00000000000..239ea89d226
--- /dev/null
+++ b/jstests/not2.js
@@ -0,0 +1,84 @@
+t = db.jstests_not2;
+t.drop();
+
+check = function( query, expected, size ) {
+ if ( size == null ) {
+ size = 1;
+ }
+ assert.eq( size, t.find( query ).itcount(), tojson( query ) );
+ if ( size > 0 ) {
+ assert.eq( expected, t.findOne( query ).i, tojson( query ) );
+ }
+}
+
+fail = function( query ) {
+ try {
+ t.find( query ).itcount();
+ assert( false, tojson( query ) );
+ } catch ( e ) {
+ // expected
+ }
+}
+
+doTest = function() {
+
+t.remove( {} );
+
+t.save( {i:"a"} );
+t.save( {i:"b"} );
+
+fail( {i:{$not:"a"}} );
+// SERVER-12735: We currently do not handle double negatives
+// during query canonicalization.
+//fail( {i:{$not:{$not:"a"}}} );
+//fail( {i:{$not:{$not:{$gt:"a"}}}} );
+fail( {i:{$not:{$ref:"foo"}}} );
+fail( {i:{$not:{}}} );
+check( {i:{$gt:"a"}}, "b" );
+check( {i:{$not:{$gt:"a"}}}, "a" );
+check( {i:{$not:{$ne:"a"}}}, "a" );
+check( {i:{$not:{$gte:"b"}}}, "a" );
+check( {i:{$exists:true}}, "a", 2 );
+check( {i:{$not:{$exists:true}}}, "", 0 );
+check( {j:{$not:{$exists:false}}}, "", 0 );
+check( {j:{$not:{$exists:true}}}, "a", 2 );
+check( {i:{$not:{$in:["a"]}}}, "b" );
+check( {i:{$not:{$in:["a", "b"]}}}, "", 0 );
+check( {i:{$not:{$in:["g"]}}}, "a", 2 );
+check( {i:{$not:{$nin:["a"]}}}, "a" );
+check( {i:{$not:/a/}}, "b" );
+check( {i:{$not:/(a|b)/}}, "", 0 );
+check( {i:{$not:/a/,$regex:"a"}}, "", 0 );
+check( {i:{$not:/aa/}}, "a", 2 );
+fail( {i:{$not:{$regex:"a"}}} );
+fail( {i:{$not:{$options:"a"}}} );
+check( {i:{$type:2}}, "a", 2 );
+check( {i:{$not:{$type:1}}}, "a", 2 );
+check( {i:{$not:{$type:2}}}, "", 0 );
+
+t.remove( {} );
+t.save( {i:1} );
+check( {i:{$not:{$mod:[5,1]}}}, null, 0 );
+check( {i:{$mod:[5,2]}}, null, 0 );
+check( {i:{$not:{$mod:[5,2]}}}, 1, 1 );
+
+t.remove( {} );
+t.save( {i:["a","b"]} );
+check( {i:{$not:{$size:2}}}, null, 0 );
+check( {i:{$not:{$size:3}}}, ["a","b"] );
+check( {i:{$not:{$gt:"a"}}}, null, 0 );
+check( {i:{$not:{$gt:"c"}}}, ["a","b"] );
+check( {i:{$not:{$all:["a","b"]}}}, null, 0 );
+check( {i:{$not:{$all:["c"]}}}, ["a","b"] );
+
+t.remove( {} );
+t.save( {i:[{j:"a"}]} );
+t.save( {i:[{j:"b"}]} );
+check( {i:{$not:{$elemMatch:{j:"a"}}}}, [{j:"b"}] );
+check( {i:{$not:{$elemMatch:{j:"f"}}}}, [{j:"a"}], 2 );
+
+}
+
+doTest();
+t.ensureIndex( {i:1} );
+doTest();
diff --git a/jstests/notablescan.js b/jstests/notablescan.js
new file mode 100644
index 00000000000..f2ca68d2912
--- /dev/null
+++ b/jstests/notablescan.js
@@ -0,0 +1,31 @@
+// check notablescan mode
+
+t = db.test_notablescan;
+t.drop();
+
+try {
+ assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) );
+ // commented lines are SERVER-2222
+ if ( 0 ) { // SERVER-2222
+ assert.throws( function() { t.find( {a:1} ).toArray(); } );
+ }
+ t.save( {a:1} );
+ if ( 0 ) { // SERVER-2222
+ assert.throws( function() { t.count( {a:1} ); } );
+ assert.throws( function() { t.find( {} ).toArray(); } );
+ }
+ assert.eq( 1, t.find( {} ).itcount() ); // SERVER-274
+ assert.throws( function() { t.find( {a:1} ).toArray(); } );
+ assert.throws( function() { t.find( {a:1} ).hint( {$natural:1} ).toArray(); } );
+ t.ensureIndex( {a:1} );
+ assert.eq( 0, t.find( {a:1,b:1} ).itcount() );
+ assert.eq( 1, t.find( {a:1,b:null} ).itcount() );
+
+ // SERVER-4327
+ assert.eq( 0, t.find( {a:{$in:[]}} ).itcount() );
+ assert.eq( 0, t.find( {a:{$in:[]},b:0} ).itcount() );
+} finally {
+ // We assume notablescan was false before this test started and restore that
+ // expected value.
+ assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false } ) );
+}
diff --git a/jstests/ns_length.js b/jstests/ns_length.js
new file mode 100644
index 00000000000..2e3fb02b0af
--- /dev/null
+++ b/jstests/ns_length.js
@@ -0,0 +1,85 @@
+// SERVER-7282 Faulty logic when testing maximum collection name length.
+
+// constants from server
+var maxNsLength = 127;
+var maxNsCollectionLength = 120;
+
+var myDb = db.getSiblingDB("ns_length");
+myDb.dropDatabase(); // start empty
+
+function mkStr(length) {
+ s = "";
+ while (s.length < length) {
+ s += "x";
+ }
+ return s;
+}
+
+function canMakeCollectionWithName(name) {
+ assert.eq(myDb.stats().fileSize, 0, "initial conditions");
+
+ myDb[name].insert({});
+ var success = myDb.getLastError() == null;
+ if (!success) {
+ assert.eq(myDb.stats().fileSize, 0, "no files should be created on error");
+ return false;
+ }
+
+ myDb.dropDatabase();
+ return true;
+}
+
+function canMakeIndexWithName(collection, name) {
+ var success = (collection.ensureIndex({x:1}, {name: name}) == undefined);
+ if (success) {
+ assert.commandWorked(collection.dropIndex(name));
+ }
+ return success;
+}
+
+function canRenameCollection(from, to) {
+ var success = myDb[from].renameCollection(to).ok;
+ if (success) {
+ // put it back
+ assert.commandWorked(myDb[to].renameCollection(from));
+ }
+ return success;
+}
+
+// test making collections around the name limit
+var prefixOverhead = (myDb.getName() + ".").length;
+var maxCollectionNameLength = maxNsCollectionLength - prefixOverhead;
+for (var i = maxCollectionNameLength - 3; i <= maxCollectionNameLength + 3; i++) {
+ assert.eq(canMakeCollectionWithName(mkStr(i)),
+ i <= maxCollectionNameLength,
+ "ns name length = " + (prefixOverhead + i));
+}
+
+// test making indexes around the name limit
+var collection = myDb.collection;
+collection.insert({});
+var maxIndexNameLength = maxNsLength - (collection.getFullName() + ".$").length;
+for (var i = maxIndexNameLength - 3; i <= maxIndexNameLength + 3; i++) {
+ assert.eq(canMakeIndexWithName(collection, mkStr(i)),
+ i <= maxIndexNameLength,
+ "index ns name length = " + ((collection.getFullName() + ".$").length + i));
+}
+
+// test renaming collections with the destination around the name limit
+myDb.from.insert({});
+for (var i = maxCollectionNameLength - 3; i <= maxCollectionNameLength + 3; i++) {
+ assert.eq(canRenameCollection("from", mkStr(i)),
+ i <= maxCollectionNameLength,
+ "new ns name length = " + (prefixOverhead + i));
+}
+
+// test renaming collections with the destination around the name limit due to long indexe names
+myDb.from.ensureIndex({a:1}, {name: mkStr(100)});
+var indexNsNameOverhead = (myDb.getName() + "..$").length + 100; // index ns name - collection name
+var maxCollectionNameWithIndex = maxNsLength - indexNsNameOverhead;
+for (var i = maxCollectionNameWithIndex - 3; i <= maxCollectionNameWithIndex + 3; i++) {
+ assert.eq(canRenameCollection("from", mkStr(i)),
+ i <= maxCollectionNameWithIndex,
+ "index ns name length = " + (indexNsNameOverhead + i));
+}
+
diff --git a/jstests/null.js b/jstests/null.js
new file mode 100644
index 00000000000..f4bdeb44a4d
--- /dev/null
+++ b/jstests/null.js
@@ -0,0 +1,26 @@
+
+t = db.null1;
+t.drop();
+
+t.save( { x : 1 } );
+t.save( { x : null } );
+
+assert.eq( 1 , t.find( { x : null } ).count() , "A" );
+assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "B" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 1 , t.find( { x : null } ).count() , "C" );
+assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "D" );
+
+// -----
+
+assert.eq( 2, t.find( { y : null } ).count(), "E" );
+
+t.ensureIndex( { y : 1 } );
+assert.eq( 2, t.find( { y : null } ).count(), "E" );
+
+t.dropIndex( { y : 1 } );
+
+t.ensureIndex( { y : 1 }, { sparse : true } );
+assert.eq( 2, t.find( { y : null } ).count(), "E" );
diff --git a/jstests/null2.js b/jstests/null2.js
new file mode 100644
index 00000000000..17b1a392714
--- /dev/null
+++ b/jstests/null2.js
@@ -0,0 +1,45 @@
+
+t = db.null2;
+t.drop();
+
+t.insert( { _id : 1, a : [ { b : 5 } ] } );
+t.insert( { _id : 2, a : [ {} ] } );
+t.insert( { _id : 3, a : [] } );
+t.insert( { _id : 4, a : [ {}, { b : 5 } ] } );
+t.insert( { _id : 5, a : [ 5, { b : 5 } ] } );
+
+function doQuery( query ) {
+ printjson( query );
+ t.find( query ).forEach(
+ function(z) {
+ print( "\t" + tojson(z) );
+ }
+ );
+ return t.find( query ).count();
+}
+
+function getIds( query ) {
+ var ids = []
+ t.find( query ).forEach(
+ function(z) {
+ ids.push( z._id );
+ }
+ );
+ return ids;
+}
+
+theQueries = [ { "a.b" : null }, { "a.b" : { $in : [ null ] } } ];
+
+for ( var i=0; i < theQueries.length; i++ ) {
+ assert.eq( 2, doQuery( theQueries[i] ) );
+ assert.eq( [2,4], getIds( theQueries[i] ) );
+}
+
+t.ensureIndex( { "a.b" : 1 } )
+
+for ( var i=0; i < theQueries.length; i++ ) {
+ assert.eq( 2, doQuery( theQueries[i] ) );
+ assert.eq( [2,4], getIds( theQueries[i] ) );
+}
+
+
diff --git a/jstests/null_field_name.js b/jstests/null_field_name.js
new file mode 100644
index 00000000000..7fa14b0a1bc
--- /dev/null
+++ b/jstests/null_field_name.js
@@ -0,0 +1,8 @@
+// SERVER-10313: Test that null char in field name causes an error when converting to bson
+assert.throws( function () { Object.bsonsize({"a\0":1}); },
+ null,
+ "null char in field name");
+
+assert.throws( function () { Object.bsonsize({"\0asdf":1}); },
+ null,
+ "null char in field name"); \ No newline at end of file
diff --git a/jstests/numberint.js b/jstests/numberint.js
new file mode 100644
index 00000000000..258450f8e82
--- /dev/null
+++ b/jstests/numberint.js
@@ -0,0 +1,92 @@
+assert.eq.automsg( "0", "new NumberInt()" );
+
+n = new NumberInt( 4 );
+assert.eq.automsg( "4", "n" );
+assert.eq.automsg( "4", "n.toNumber()" );
+assert.eq.automsg( "8", "n + 4" );
+assert.eq.automsg( "'NumberInt(4)'", "n.toString()" );
+assert.eq.automsg( "'NumberInt(4)'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberInt(4) }'", "p" );
+
+assert.eq.automsg( "NumberInt(4 )", "eval( tojson( NumberInt( 4 ) ) )" );
+assert.eq.automsg( "a", "eval( tojson( a ) )" );
+
+n = new NumberInt( -4 );
+assert.eq.automsg( "-4", "n" );
+assert.eq.automsg( "-4", "n.toNumber()" );
+assert.eq.automsg( "0", "n + 4" );
+assert.eq.automsg( "'NumberInt(-4)'", "n.toString()" );
+assert.eq.automsg( "'NumberInt(-4)'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberInt(-4) }'", "p" );
+
+n = new NumberInt( "11111" );
+assert.eq.automsg( "'NumberInt(11111)'", "n.toString()" );
+assert.eq.automsg( "'NumberInt(11111)'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberInt(11111) }'", "p" );
+
+assert.eq.automsg( "NumberInt('11111' )", "eval( tojson( NumberInt( '11111' ) ) )" );
+assert.eq.automsg( "a", "eval( tojson( a ) )" );
+
+n = new NumberInt( "-11111" );
+assert.eq.automsg( "-11111", "n.toNumber()" );
+assert.eq.automsg( "-11107", "n + 4" );
+assert.eq.automsg( "'NumberInt(-11111)'", "n.toString()" );
+assert.eq.automsg( "'NumberInt(-11111)'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberInt(-11111) }'", "p" );
+
+// parsing: v8 evaluates not numbers to 0 which is not bad
+//assert.throws.automsg( function() { new NumberInt( "" ); } );
+//assert.throws.automsg( function() { new NumberInt( "y" ); } );
+
+// eq
+
+assert.eq( { x : 5 } , { x : new NumberInt( "5" ) } );
+
+assert( 5 == NumberInt( 5 ) , "eq" );
+assert( 5 < NumberInt( 6 ) , "lt" );
+assert( 5 > NumberInt( 4 ) , "lt" );
+assert( NumberInt( 1 ) , "to bool a" );
+
+// objects are always considered thruthy
+//assert( ! NumberInt( 0 ) , "to bool b" );
+
+// create doc with int value in db
+t = db.getCollection( "numberint" );
+t.drop();
+
+o = { a : NumberInt(42) };
+t.save( o );
+
+assert.eq( 42 , t.findOne().a , "save doc 1" );
+assert.eq( 1 , t.find({a: {$type: 16}}).count() , "save doc 2" );
+assert.eq( 0 , t.find({a: {$type: 1}}).count() , "save doc 3" );
+
+// roundtripping
+mod = t.findOne({a: 42});
+mod.a += 10;
+mod.b = "foo";
+delete mod._id;
+t.save(mod);
+assert.eq( 2 , t.find({a: {$type: 16}}).count() , "roundtrip 1" );
+assert.eq( 0 , t.find({a: {$type: 1}}).count() , "roundtrip 2" );
+assert.eq( 1 , t.find({a: 52}).count() , "roundtrip 3" );
+
+// save regular number
+t.save({a: 42});
+assert.eq( 2 , t.find({a: {$type: 16}}).count() , "normal 1" );
+assert.eq( 1 , t.find({a: {$type: 1}}).count() , "normal 2" );
+assert.eq( 2 , t.find({a: 42}).count() , "normal 3" );
+
+
diff --git a/jstests/numberlong.js b/jstests/numberlong.js
new file mode 100644
index 00000000000..1cbbc7a798a
--- /dev/null
+++ b/jstests/numberlong.js
@@ -0,0 +1,55 @@
+assert.eq.automsg( "0", "new NumberLong()" );
+
+n = new NumberLong( 4 );
+assert.eq.automsg( "4", "n" );
+assert.eq.automsg( "4", "n.toNumber()" );
+assert.eq.automsg( "8", "n + 4" );
+assert.eq.automsg( "'NumberLong(4)'", "n.toString()" );
+assert.eq.automsg( "'NumberLong(4)'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberLong(4) }'", "p" );
+
+assert.eq.automsg( "NumberLong(4 )", "eval( tojson( NumberLong( 4 ) ) )" );
+assert.eq.automsg( "a", "eval( tojson( a ) )" );
+
+n = new NumberLong( -4 );
+assert.eq.automsg( "-4", "n" );
+assert.eq.automsg( "-4", "n.toNumber()" );
+assert.eq.automsg( "0", "n + 4" );
+assert.eq.automsg( "'NumberLong(-4)'", "n.toString()" );
+assert.eq.automsg( "'NumberLong(-4)'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberLong(-4) }'", "p" );
+
+// too big to fit in double
+n = new NumberLong( "11111111111111111" );
+assert.eq.automsg( "11111111111111112", "n.toNumber()" );
+assert.eq.automsg( "11111111111111116", "n + 4" );
+assert.eq.automsg( "'NumberLong(\"11111111111111111\")'", "n.toString()" );
+assert.eq.automsg( "'NumberLong(\"11111111111111111\")'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberLong(\"11111111111111111\") }'", "p" );
+
+assert.eq.automsg( "NumberLong('11111111111111111' )", "eval( tojson( NumberLong( '11111111111111111' ) ) )" );
+assert.eq.automsg( "a", "eval( tojson( a ) )" );
+
+n = new NumberLong( "-11111111111111111" );
+assert.eq.automsg( "-11111111111111112", "n.toNumber()" );
+assert.eq.automsg( "-11111111111111108", "n + 4" );
+assert.eq.automsg( "'NumberLong(\"-11111111111111111\")'", "n.toString()" );
+assert.eq.automsg( "'NumberLong(\"-11111111111111111\")'", "tojson( n )" );
+a = {}
+a.a = n;
+p = tojson( a );
+assert.eq.automsg( "'{ \"a\" : NumberLong(\"-11111111111111111\") }'", "p" );
+
+// parsing
+assert.throws.automsg( function() { new NumberLong( "" ); } );
+assert.throws.automsg( function() { new NumberLong( "y" ); } );
+assert.throws.automsg( function() { new NumberLong( "11111111111111111111" ); } );
diff --git a/jstests/numberlong2.js b/jstests/numberlong2.js
new file mode 100644
index 00000000000..5d7529a9e21
--- /dev/null
+++ b/jstests/numberlong2.js
@@ -0,0 +1,28 @@
+// Test precision of NumberLong values with v1 index code SERVER-3717
+
+t = db.jstests_numberlong2;
+t.drop();
+
+t.ensureIndex( {x:1} );
+
+function chk(longNum) {
+ t.remove({});
+ t.save({ x: longNum });
+ assert.eq(longNum, t.find().hint({ x: 1 }).next().x);
+ assert.eq(longNum, t.find({}, { _id: 0, x: 1 }).hint({ x: 1 }).next().x);
+}
+
+chk( NumberLong("1123539983311657217") );
+chk(NumberLong("-1123539983311657217"));
+ chk(NumberLong("4503599627370495"));
+ chk(NumberLong("4503599627370496"));
+ chk(NumberLong("4503599627370497"));
+
+t.remove({});
+
+s = "11235399833116571";
+for( i = 99; i >= 0; --i ) {
+ t.save( {x:NumberLong( s + i )} );
+}
+
+assert.eq( t.find().sort( {x:1} ).hint( {$natural:1} ).toArray(), t.find().sort( {x:1} ).hint( {x:1} ).toArray() );
diff --git a/jstests/numberlong3.js b/jstests/numberlong3.js
new file mode 100644
index 00000000000..10036c0544e
--- /dev/null
+++ b/jstests/numberlong3.js
@@ -0,0 +1,25 @@
+// Test sorting with long longs and doubles - SERVER-3719
+
+t = db.jstests_numberlong3;
+t.drop();
+
+s = "11235399833116571";
+for( i = 10; i >= 0; --i ) {
+ n = NumberLong( s + i );
+ t.save( {x:n} );
+ if ( 0 ) { // SERVER-3719
+ t.save( {x:n.floatApprox} );
+ }
+}
+
+ret = t.find().sort({x:1}).toArray().filter( function( x ) { return typeof( x.x.floatApprox ) != 'undefined' } );
+
+//printjson( ret );
+
+for( i = 1; i < ret.length; ++i ) {
+ first = ret[i-1].x.toString();
+ second = ret[i].x.toString();
+ if ( first.length == second.length ) {
+ assert.lte( ret[i-1].x.toString(), ret[i].x.toString() );
+ }
+}
diff --git a/jstests/numberlong4.js b/jstests/numberlong4.js
new file mode 100644
index 00000000000..0924931efaf
--- /dev/null
+++ b/jstests/numberlong4.js
@@ -0,0 +1,21 @@
+// Test handling of comparison between long longs and their double approximations in btrees - SERVER-3719.
+
+t = db.jstests_numberlong4;
+t.drop();
+
+if ( 0 ) { // SERVER-3719
+
+t.ensureIndex({x:1});
+
+Random.setRandomSeed();
+
+s = "11235399833116571";
+for( i = 0; i < 10000; ++i ) {
+ n = NumberLong( s + Random.randInt( 10 ) );
+ t.insert( { x: ( Random.randInt( 2 ) ? n : n.floatApprox ) } );
+}
+
+// If this does not return, there is a problem with index structure.
+t.find().hint({x:1}).itcount();
+
+}
diff --git a/jstests/objid1.js b/jstests/objid1.js
new file mode 100644
index 00000000000..dea31eed0d8
--- /dev/null
+++ b/jstests/objid1.js
@@ -0,0 +1,16 @@
+t = db.objid1;
+t.drop();
+
+b = new ObjectId();
+assert( b.str , "A" );
+
+a = new ObjectId( b.str );
+assert.eq( a.str , b.str , "B" );
+
+t.save( { a : a } )
+assert( t.findOne().a.isObjectId , "C" );
+assert.eq( a.str , t.findOne().a.str , "D" );
+
+x = { a : new ObjectId() };
+eval( " y = " + tojson( x ) );
+assert.eq( x.a.str , y.a.str , "E" );
diff --git a/jstests/objid2.js b/jstests/objid2.js
new file mode 100644
index 00000000000..a28c18fca15
--- /dev/null
+++ b/jstests/objid2.js
@@ -0,0 +1,7 @@
+t = db.objid2;
+t.drop();
+
+t.save( { _id : 517 , a : "hello" } )
+
+assert.eq( t.findOne().a , "hello" );
+assert.eq( t.findOne()._id , 517 );
diff --git a/jstests/objid3.js b/jstests/objid3.js
new file mode 100644
index 00000000000..ddf20d9af27
--- /dev/null
+++ b/jstests/objid3.js
@@ -0,0 +1,9 @@
+t = db.objid3;
+t.drop();
+
+t.save( { a : "bob" , _id : 517 } );
+for ( var k in t.findOne() ){
+ assert.eq( k , "_id" , "keys out of order" );
+ break;
+}
+
diff --git a/jstests/objid4.js b/jstests/objid4.js
new file mode 100644
index 00000000000..23986b95c71
--- /dev/null
+++ b/jstests/objid4.js
@@ -0,0 +1,16 @@
+
+
+
+o = new ObjectId();
+assert( o.str );
+
+a = new ObjectId( o.str );
+assert.eq( o.str , a.str );
+assert.eq( a.str , a.str.toString() )
+
+b = ObjectId( o.str );
+assert.eq( o.str , b.str );
+assert.eq( b.str , b.str.toString() )
+
+assert.throws( function(z){ return new ObjectId( "a" ); } );
+assert.throws( function(z){ return new ObjectId( "12345678901234567890123z" ); } );
diff --git a/jstests/objid5.js b/jstests/objid5.js
new file mode 100644
index 00000000000..f85ebc8c71d
--- /dev/null
+++ b/jstests/objid5.js
@@ -0,0 +1,19 @@
+
+t = db.objid5;
+t.drop();
+
+t.save( { _id : 5.5 } );
+assert.eq( 18 , Object.bsonsize( t.findOne() ) , "A" );
+
+x = db.runCommand( { features : 1 } )
+y = db.runCommand( { features : 1 , oidReset : 1 } )
+
+if( !x.ok )
+ print("x: " + tojson(x));
+
+assert( x.oidMachine , "B1" )
+assert.neq( x.oidMachine , y.oidMachine , "B2" )
+assert.eq( x.oidMachine , y.oidMachineOld , "B3" )
+
+assert.eq( 18 , Object.bsonsize( { _id : 7.7 } ) , "C1" )
+assert.eq( 0 , Object.bsonsize( null ) , "C2" )
diff --git a/jstests/objid6.js b/jstests/objid6.js
new file mode 100644
index 00000000000..b90dc9e914e
--- /dev/null
+++ b/jstests/objid6.js
@@ -0,0 +1,16 @@
+o = new ObjectId();
+assert(o.getTimestamp);
+
+a = new ObjectId("4c17f616a707427266a2801a");
+b = new ObjectId("4c17f616a707428966a2801c");
+assert.eq(a.getTimestamp(), b.getTimestamp() , "A" );
+
+x = Math.floor( (new Date()).getTime() / 1000 );
+sleep(10/*ms*/)
+a = new ObjectId();
+sleep(10/*ms*/)
+z = Math.floor( (new Date()).getTime() / 1000 );
+y = a.getTimestamp().getTime() / 1000;
+
+assert.lte( x , y , "B" );
+assert.lte( y , z , "C" );
diff --git a/jstests/objid7.js b/jstests/objid7.js
new file mode 100644
index 00000000000..5a5ca728c7d
--- /dev/null
+++ b/jstests/objid7.js
@@ -0,0 +1,13 @@
+
+a = new ObjectId( "4c1a478603eba73620000000" )
+b = new ObjectId( "4c1a478603eba73620000000" )
+c = new ObjectId();
+
+assert.eq( a.toString() , b.toString() , "A" )
+assert.eq( a.toString() , "ObjectId(\"4c1a478603eba73620000000\")" , "B" );
+
+assert( a.equals( b ) , "C" )
+
+assert.neq( a.toString() , c.toString() , "D" );
+assert( ! a.equals( c ) , "E" );
+
diff --git a/jstests/or1.js b/jstests/or1.js
new file mode 100644
index 00000000000..66bbd2e6eea
--- /dev/null
+++ b/jstests/or1.js
@@ -0,0 +1,57 @@
+t = db.jstests_or1;
+t.drop();
+
+checkArrs = function( a, b, m ) {
+ assert.eq( a.length, b.length, m );
+ aStr = [];
+ bStr = [];
+ a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
+ b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
+ for ( i = 0; i < aStr.length; ++i ) {
+ assert( -1 != bStr.indexOf( aStr[ i ] ), m );
+ }
+}
+
+doTest = function() {
+
+t.save( {_id:0,a:1} );
+t.save( {_id:1,a:2} );
+t.save( {_id:2,b:1} );
+t.save( {_id:3,b:2} );
+t.save( {_id:4,a:1,b:1} );
+t.save( {_id:5,a:1,b:2} );
+t.save( {_id:6,a:2,b:1} );
+t.save( {_id:7,a:2,b:2} );
+
+assert.throws( function() { t.find( { $or:"a" } ).toArray(); } );
+assert.throws( function() { t.find( { $or:[] } ).toArray(); } );
+assert.throws( function() { t.find( { $or:[ "a" ] } ).toArray(); } );
+
+a1 = t.find( { $or: [ { a : 1 } ] } ).toArray();
+checkArrs( [ { _id:0, a:1 }, { _id:4, a:1, b:1 }, { _id:5, a:1, b:2 } ], a1 );
+
+a1b2 = t.find( { $or: [ { a : 1 }, { b : 2 } ] } ).toArray();
+checkArrs( [ { _id:0, a:1 }, { _id:3, b:2 }, { _id:4, a:1, b:1 }, { _id:5, a:1, b:2 }, { _id:7, a:2, b:2 } ], a1b2 );
+
+t.drop();
+t.save( {a:[0,1],b:[0,1]} );
+assert.eq( 1, t.find( { $or: [ { a: {$in:[0,1]}} ] } ).toArray().length );
+assert.eq( 1, t.find( { $or: [ { b: {$in:[0,1]}} ] } ).toArray().length );
+assert.eq( 1, t.find( { $or: [ { a: {$in:[0,1]}}, { b: {$in:[0,1]}} ] } ).toArray().length );
+
+}
+
+doTest();
+
+// not part of SERVER-1003, but good check for subseq. implementations
+t.drop();
+t.ensureIndex( {a:1} );
+doTest();
+
+t.drop();
+t.ensureIndex( {b:1} );
+doTest();
+
+t.drop();
+t.ensureIndex( {a:1,b:1} );
+doTest(); \ No newline at end of file
diff --git a/jstests/or2.js b/jstests/or2.js
new file mode 100644
index 00000000000..00e9f68decf
--- /dev/null
+++ b/jstests/or2.js
@@ -0,0 +1,69 @@
+t = db.jstests_or2;
+t.drop();
+
+checkArrs = function( a, b, m ) {
+ assert.eq( a.length, b.length, m );
+ aStr = [];
+ bStr = [];
+ a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
+ b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
+ for ( i = 0; i < aStr.length; ++i ) {
+ assert( -1 != bStr.indexOf( aStr[ i ] ), m );
+ }
+}
+
+doTest = function( index ) {
+ if ( index == null ) {
+ index = true;
+ }
+
+ t.save( {_id:0,x:0,a:1} );
+ t.save( {_id:1,x:0,a:2} );
+ t.save( {_id:2,x:0,b:1} );
+ t.save( {_id:3,x:0,b:2} );
+ t.save( {_id:4,x:1,a:1,b:1} );
+ t.save( {_id:5,x:1,a:1,b:2} );
+ t.save( {_id:6,x:1,a:2,b:1} );
+ t.save( {_id:7,x:1,a:2,b:2} );
+
+ assert.throws( function() { t.find( { x:0,$or:"a" } ).toArray(); } );
+ assert.throws( function() { t.find( { x:0,$or:[] } ).toArray(); } );
+ assert.throws( function() { t.find( { x:0,$or:[ "a" ] } ).toArray(); } );
+
+ a1 = t.find( { x:0, $or: [ { a : 1 } ] } ).toArray();
+ checkArrs( [ { _id:0, x:0, a:1 } ], a1 );
+ if ( index ) {
+ assert( t.find( { x:0,$or: [ { a : 1 } ] } ).explain().cursor.match( /Btree/ ) );
+ }
+
+ a1b2 = t.find( { x:1, $or: [ { a : 1 }, { b : 2 } ] } ).toArray();
+ checkArrs( [ { _id:4, x:1, a:1, b:1 }, { _id:5, x:1, a:1, b:2 }, { _id:7, x:1, a:2, b:2 } ], a1b2 );
+ if ( index ) {
+ assert( t.find( { x:0,$or: [ { a : 1 } ] } ).explain().cursor.match( /Btree/ ) );
+ }
+
+ /*
+ t.drop();
+ obj = {_id:0,x:10,a:[1,2,3]};
+ t.save( obj );
+ t.update( {x:10,$or:[ {a:2} ]}, {$set:{'a.$':100}} );
+ assert.eq( obj, t.findOne() ); // no change
+ */
+}
+
+doTest( false );
+
+t.ensureIndex( { x:1 } );
+doTest();
+
+t.drop();
+t.ensureIndex( { x:1,a:1 } );
+doTest();
+
+t.drop();
+t.ensureIndex( {x:1,b:1} );
+doTest();
+
+t.drop();
+t.ensureIndex( {x:1,a:1,b:1} );
+doTest();
diff --git a/jstests/or3.js b/jstests/or3.js
new file mode 100644
index 00000000000..7759e689f84
--- /dev/null
+++ b/jstests/or3.js
@@ -0,0 +1,62 @@
+t = db.jstests_or3;
+t.drop();
+
+checkArrs = function( a, b, m ) {
+ assert.eq( a.length, b.length, m );
+ aStr = [];
+ bStr = [];
+ a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
+ b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
+ for ( i = 0; i < aStr.length; ++i ) {
+ assert( -1 != bStr.indexOf( aStr[ i ] ), m );
+ }
+}
+
+doTest = function( index ) {
+ if ( index == null ) {
+ index = true;
+ }
+
+ t.save( {_id:0,x:0,a:1} );
+ t.save( {_id:1,x:0,a:2} );
+ t.save( {_id:2,x:0,b:1} );
+ t.save( {_id:3,x:0,b:2} );
+ t.save( {_id:4,x:1,a:1,b:1} );
+ t.save( {_id:5,x:1,a:1,b:2} );
+ t.save( {_id:6,x:1,a:2,b:1} );
+ t.save( {_id:7,x:1,a:2,b:2} );
+
+ assert.throws( function() { t.find( { x:0,$nor:"a" } ).toArray(); } );
+ assert.throws( function() { t.find( { x:0,$nor:[] } ).toArray(); } );
+ assert.throws( function() { t.find( { x:0,$nor:[ "a" ] } ).toArray(); } );
+
+ an1 = t.find( { $nor: [ { a : 1 } ] } ).toArray();
+ checkArrs( t.find( {a:{$ne:1}} ).toArray(), an1 );
+
+ an1bn2 = t.find( { x:1, $nor: [ { a : 1 }, { b : 2 } ] } ).toArray();
+ checkArrs( [ { _id:6, x:1, a:2, b:1 } ], an1bn2 );
+ checkArrs( t.find( { x:1, a:{$ne:1}, b:{$ne:2} } ).toArray(), an1bn2 );
+ if ( index ) {
+ assert( t.find( { x:1, $nor: [ { a : 1 }, { b : 2 } ] } ).explain().cursor.match( /Btree/ ) );
+ }
+
+ an1b2 = t.find( { $nor: [ { a : 1 } ], $or: [ { b : 2 } ] } ).toArray();
+ checkArrs( t.find( {a:{$ne:1},b:2} ).toArray(), an1b2 );
+}
+
+doTest( false );
+
+t.ensureIndex( { x:1 } );
+doTest();
+
+t.drop();
+t.ensureIndex( { x:1,a:1 } );
+doTest();
+
+t.drop();
+t.ensureIndex( {x:1,b:1} );
+doTest();
+
+t.drop();
+t.ensureIndex( {x:1,a:1,b:1} );
+doTest();
diff --git a/jstests/or4.js b/jstests/or4.js
new file mode 100644
index 00000000000..23c10bba8e2
--- /dev/null
+++ b/jstests/or4.js
@@ -0,0 +1,99 @@
+t = db.jstests_or4;
+t.drop();
+
+// v8 does not have a builtin Array.sort
+if (!Array.sort) {
+ Array.sort = function(arr) {
+ return arr.sort();
+ };
+}
+
+checkArrs = function( a, b ) {
+ m = "[" + a + "] != [" + b + "]";
+ a = eval( a );
+ b = eval( b );
+ assert.eq( a.length, b.length, m );
+ aStr = [];
+ bStr = [];
+ a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
+ b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
+ for ( i = 0; i < aStr.length; ++i ) {
+ assert( -1 != bStr.indexOf( aStr[ i ] ), m );
+ }
+}
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+
+t.save( {a:2} );
+t.save( {b:3} );
+t.save( {b:3} );
+t.save( {a:2,b:3} );
+
+assert.eq.automsg( "4", "t.count( {$or:[{a:2},{b:3}]} )" );
+assert.eq.automsg( "2", "t.count( {$or:[{a:2},{a:2}]} )" );
+
+assert.eq.automsg( "2", "t.find( {} ).skip( 2 ).count( true )" );
+assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).skip( 2 ).count( true )" );
+assert.eq.automsg( "1", "t.find( {$or:[{a:2},{b:3}]} ).skip( 3 ).count( true )" );
+
+assert.eq.automsg( "2", "t.find( {} ).limit( 2 ).count( true )" );
+assert.eq.automsg( "1", "t.find( {$or:[{a:2},{b:3}]} ).limit( 1 ).count( true )" );
+assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).limit( 2 ).count( true )" );
+assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).limit( 3 ).count( true )" );
+assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).limit( 4 ).count( true )" );
+
+t.remove({ $or: [{ a: 2 }, { b: 3}] });
+assert.eq.automsg( "0", "t.count()" );
+
+t.save( {b:3} );
+t.remove({ $or: [{ a: 2 }, { b: 3}] });
+assert.eq.automsg( "0", "t.count()" );
+
+t.save( {a:2} );
+t.save( {b:3} );
+t.save( {a:2,b:3} );
+
+t.update( {$or:[{a:2},{b:3}]}, {$set:{z:1}}, false, true );
+assert.eq.automsg( "3", "t.count( {z:1} )" );
+
+assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).toArray().length" );
+checkArrs( "t.find().toArray()", "t.find( {$or:[{a:2},{b:3}]} ).toArray()" );
+assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).skip(1).toArray().length" );
+
+assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length" );
+
+t.save( {a:1} );
+t.save( {b:4} );
+t.save( {a:2} );
+
+assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length" );
+assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).snapshot().toArray().length" );
+
+t.save( {a:1,b:3} );
+assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).batchSize(-4).toArray().length" );
+
+assert.eq.automsg( "[1,2]", "Array.sort( t.distinct( 'a', {$or:[{a:2},{b:3}]} ) )" );
+
+assert.eq.automsg( "[{a:2},{a:null},{a:1}]", "t.group( {key:{a:1}, cond:{$or:[{a:2},{b:3}]}, reduce:function( x, y ) { }, initial:{} } )" );
+assert.eq.automsg( "5", "t.mapReduce( function() { emit( 'a', this.a ); }, function( key, vals ) { return vals.length; }, {out:{inline:true},query:{$or:[{a:2},{b:3}]}} ).counts.input" );
+
+explain = t.find( {$or:[{a:2},{b:3}]} ).explain();
+
+t.remove( {} );
+
+t.save( {a:[1,2]} );
+assert.eq.automsg( "1", "t.find( {$or:[{a:1},{a:2}]} ).toArray().length" );
+assert.eq.automsg( "1", "t.count( {$or:[{a:1},{a:2}]} )" );
+assert.eq.automsg( "1", "t.find( {$or:[{a:2},{a:1}]} ).toArray().length" );
+assert.eq.automsg( "1", "t.count( {$or:[{a:2},{a:1}]} )" );
+
+t.remove({});
+
+assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{a:1}]} ).sort( {b:1} ).explain().cursor" );
+assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{}]} ).sort( {b:1} ).explain().cursor" );
+assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{b:1}]} ).sort( {b:1} ).explain().cursor" );
+
+assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{a:1}]} ).hint( {b:1} ).explain().cursor" );
+assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{}]} ).hint( {b:1} ).explain().cursor" );
+assert.eq.automsg( "1", "t.find( {$or:[{b:1}]} ).hint( {b:1} ).explain().indexBounds.b[ 0 ][ 0 ]" );
diff --git a/jstests/or5.js b/jstests/or5.js
new file mode 100644
index 00000000000..6a7316787d4
--- /dev/null
+++ b/jstests/or5.js
@@ -0,0 +1,70 @@
+t = db.jstests_or5;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+
+assert.eq.automsg( "'BasicCursor'", "t.find( {$or:[{a:2},{b:3},{}]} ).explain().cursor" );
+assert.eq.automsg( "'BasicCursor'", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).explain().cursor" );
+
+t.ensureIndex( {c:1} );
+
+t.save( {a:2} );
+t.save( {b:3} );
+t.save( {c:4} );
+t.save( {a:2,b:3} );
+t.save( {a:2,c:4} );
+t.save( {b:3,c:4} );
+t.save( {a:2,b:3,c:4} );
+
+assert.eq.automsg( "7", "t.count( {$or:[{a:2},{b:3},{c:4}]} )" );
+assert.eq.automsg( "6", "t.count( {$or:[{a:6},{b:3},{c:4}]} )" );
+assert.eq.automsg( "6", "t.count( {$or:[{a:2},{b:6},{c:4}]} )" );
+assert.eq.automsg( "6", "t.count( {$or:[{a:2},{b:3},{c:6}]} )" );
+
+assert.eq.automsg( "7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).toArray().length" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).toArray().length" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).toArray().length" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).toArray().length" );
+
+for( i = 2; i <= 7; ++i ) {
+assert.eq.automsg( "7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( i ).toArray().length" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).batchSize( i ).toArray().length" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).batchSize( i ).toArray().length" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).batchSize( i ).toArray().length" );
+}
+
+t.ensureIndex( {z:"2d"} );
+
+assert.eq.automsg( "'GeoSearchCursor'", "t.find( {z:{$near:[50,50]},a:2} ).explain().cursor" );
+assert.eq.automsg( "'GeoSearchCursor'", "t.find( {z:{$near:[50,50]},$or:[{a:2}]} ).explain().cursor" );
+assert.eq.automsg( "'GeoSearchCursor'", "t.find( {$or:[{a:2}],z:{$near:[50,50]}} ).explain().cursor" );
+assert.eq.automsg( "'GeoSearchCursor'", "t.find( {$or:[{a:2},{b:3}],z:{$near:[50,50]}} ).explain().cursor" );
+assert.throws.automsg( function() { return t.find( {$or:[{z:{$near:[50,50]}},{a:2}]} ).toArray(); } );
+
+function reset() {
+ t.drop();
+
+ t.ensureIndex( {a:1} );
+ t.ensureIndex( {b:1} );
+ t.ensureIndex( {c:1} );
+
+ t.save( {a:2} );
+ t.save( {a:2} );
+ t.save( {b:3} );
+ t.save( {b:3} );
+ t.save( {c:4} );
+ t.save( {c:4} );
+}
+
+reset();
+
+assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 1 ).itcount()" );
+assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 2 ).itcount()" );
+
+t.drop();
+
+t.save( {a:[1,2]} );
+assert.eq.automsg( "1", "t.find( {$or:[{a:[1,2]}]} ).itcount()" );
+assert.eq.automsg( "1", "t.find( {$or:[{a:{$all:[1,2]}}]} ).itcount()" );
+assert.eq.automsg( "0", "t.find( {$or:[{a:{$all:[1,3]}}]} ).itcount()" );
diff --git a/jstests/or6.js b/jstests/or6.js
new file mode 100644
index 00000000000..43b75f467aa
--- /dev/null
+++ b/jstests/or6.js
@@ -0,0 +1,23 @@
+t = db.jstests_or6;
+t.drop();
+
+t.ensureIndex( {a:1} );
+
+assert.eq.automsg( "null", "t.find( {$or:[{a:1},{b:2}]} ).hint( {a:1} ).explain().clauses" );
+
+assert.eq.automsg( "'BasicCursor'", "t.find( {$or:[{a:1},{a:3}]} ).hint( {$natural:1} ).explain().cursor" );
+
+t.ensureIndex( {b:1} );
+assert.eq.automsg( "2", "t.find( {$or:[{a:1,b:5},{a:3,b:5}]} ).hint( {a:1} ).explain().clauses.length" );
+
+t.drop();
+
+t.ensureIndex( {a:1,b:1} );
+assert.eq.automsg( "2", "t.find( {$or:[{a:{$in:[1,2]},b:5}, {a:2,b:6}]} )" +
+ ".hint({a:1,b:1}).explain().clauses.length" );
+assert.eq.automsg( "2", "t.find( {$or:[{a:{$gt:1,$lte:2},b:5}, {a:2,b:6}]} )" +
+ ".hint({a:1,b:1}).explain().clauses.length" );
+assert.eq.automsg( "2", "t.find( {$or:[{a:{$gt:1,$lte:3},b:5}, {a:2,b:6}]} )" +
+ ".hint({a:1,b:1}).explain().clauses.length" );
+assert.eq.automsg( "null", "t.find( {$or:[{a:{$in:[1,2]}}, {a:2}]} )" +
+ ".hint({a:1,b:1}).explain().clauses" );
diff --git a/jstests/or7.js b/jstests/or7.js
new file mode 100644
index 00000000000..916158047d8
--- /dev/null
+++ b/jstests/or7.js
@@ -0,0 +1,41 @@
+t = db.jstests_or7;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:2} );
+
+assert.eq.automsg( "1", "t.count( {$or:[{a:{$in:[1,3]}},{a:2}]} )" );
+
+//SERVER-1201 ...
+
+t.remove({});
+
+t.save( {a:"aa"} );
+t.save( {a:"ab"} );
+t.save( {a:"ad"} );
+
+assert.eq.automsg( "3", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+
+t.remove({});
+
+t.save( {a:"aa"} );
+t.save( {a:"ad"} );
+
+assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+
+t.remove({});
+
+t.save( {a:"aa"} );
+t.save( {a:"ac"} );
+
+assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+
+assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+
+t.save( {a:"ab"} );
+assert.eq.automsg( "3", "t.count( {$or:[{a:{$in:[/^ab/],$gte:'abc'}},{a:/^a/}]} )" );
+
+t.remove({});
+t.save( {a:"a"} );
+t.save( {a:"b"} );
+assert.eq.automsg( "2", "t.count( {$or:[{a:{$gt:'a',$lt:'b'}},{a:{$gte:'a',$lte:'b'}}]} )" );
diff --git a/jstests/or8.js b/jstests/or8.js
new file mode 100644
index 00000000000..40d5b38cede
--- /dev/null
+++ b/jstests/or8.js
@@ -0,0 +1,28 @@
+// missing collection
+
+t = db.jstests_or8;
+t.drop();
+
+t.find({ "$or": [ { "PropA": { "$lt": "b" } }, { "PropA": { "$lt": "b", "$gt": "a" } } ] }).toArray();
+
+// empty $in
+
+t.save( {a:1} );
+t.save( {a:3} );
+t.ensureIndex( {a:1} );
+t.find({ $or: [ { a: {$in:[]} } ] } ).toArray();
+assert.eq.automsg( "2", "t.find({ $or: [ { a: {$in:[]} }, {a:1}, {a:3} ] } ).toArray().length" );
+assert.eq.automsg( "2", "t.find({ $or: [ {a:1}, { a: {$in:[]} }, {a:3} ] } ).toArray().length" );
+assert.eq.automsg( "2", "t.find({ $or: [ {a:1}, {a:3}, { a: {$in:[]} } ] } ).toArray().length" );
+
+// nested negate field
+
+t.drop();
+t.save( {a:{b:1,c:1}} );
+t.ensureIndex( { 'a.b':1 } );
+t.ensureIndex( { 'a.c':1 } );
+assert.eq( 1, t.find( {$or: [ { 'a.b':1 }, { 'a.c':1 } ] } ).itcount() );
+
+t.remove({});
+t.save( {a:[{b:1,c:1},{b:2,c:1}]} );
+assert.eq( 1, t.find( {$or: [ { 'a.b':2 }, { 'a.c':1 } ] } ).itcount() );
diff --git a/jstests/or9.js b/jstests/or9.js
new file mode 100644
index 00000000000..7318a532af4
--- /dev/null
+++ b/jstests/or9.js
@@ -0,0 +1,64 @@
+// index skipping and previous index range negation
+
+t = db.jstests_or9;
+t.drop();
+
+t.ensureIndex( {a:1,b:1} );
+
+t.save( {a:2,b:2} );
+
+function check( a, b, q ) {
+ count = a;
+ clauses = b;
+ query = q;
+ assert.eq.automsg( "count", "t.count( query )" );
+ if ( clauses == 1 ) {
+ assert.eq.automsg( "undefined", "t.find( query ).explain().clauses" );
+ } else {
+ assert.eq.automsg( "clauses", "t.find( query ).hint({a:1, b:1}).explain().clauses.length" );
+ }
+}
+
+// SERVER-12594: there are two clauses in this case, because we do
+// not yet collapse OR of ANDs to a single ixscan.
+check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 } }, { a: 2 } ] } );
+
+check( 1, 2, { $or: [ { a: { $gt:2,$lte:3 } }, { a: 2 } ] } );
+
+check( 1, 1, { $or: [ { b: { $gte:1,$lte:3 } }, { b: 2 } ] } );
+check( 1, 1, { $or: [ { b: { $gte:2,$lte:3 } }, { b: 2 } ] } );
+check( 1, 1, { $or: [ { b: { $gt:2,$lte:3 } }, { b: 2 } ] } );
+
+// SERVER-12594: there are two clauses in this case, because we do
+// not yet collapse OR of ANDs to a single ixscan.
+check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 } }, { a: 2, b: 2 } ] } );
+
+check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 }, b:3 }, { a: 2 } ] } );
+
+check( 1, 1, { $or: [ { b: { $gte:1,$lte:3 } }, { b: 2, a: 2 } ] } );
+
+check( 1, 1, { $or: [ { b: { $gte:1,$lte:3 }, a:3 }, { b: 2 } ] } );
+
+check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 }, b: 3 }, { a: 2, b: 2 } ] } );
+check( 1, 2, { $or: [ { a: { $gte:2,$lte:3 }, b: 3 }, { a: 2, b: 2 } ] } );
+// SERVER-12594: there are two clauses in this case, because we do
+// not yet collapse OR of ANDs to a single ixscan.
+check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 }, b: 2 }, { a: 2, b: 2 } ] } );
+
+check( 1, 2, { $or: [ { b: { $gte:1,$lte:3 }, a: 3 }, { a: 2, b: 2 } ] } );
+check( 1, 2, { $or: [ { b: { $gte:2,$lte:3 }, a: 3 }, { a: 2, b: 2 } ] } );
+// SERVER-12594: there are two clauses in this case, because we do
+// not yet collapse OR of ANDs to a single ixscan.
+check( 1, 2, { $or: [ { b: { $gte:1,$lte:3 }, a: 2 }, { a: 2, b: 2 } ] } );
+
+t.remove({});
+
+t.save( {a:1,b:5} );
+t.save( {a:5,b:1} );
+
+// SERVER-12594: there are two clauses in the case below, because we do
+// not yet collapse OR of ANDs to a single ixscan.
+check( 2, 2, { $or: [ { a: { $in:[1,5] }, b: { $in:[1,5] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
+
+check( 2, 2, { $or: [ { a: { $in:[1] }, b: { $in:[1,5] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
+check( 2, 2, { $or: [ { a: { $in:[1] }, b: { $in:[1] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
diff --git a/jstests/ora.js b/jstests/ora.js
new file mode 100644
index 00000000000..67af4c191ec
--- /dev/null
+++ b/jstests/ora.js
@@ -0,0 +1,17 @@
+var t = db.jstests_ora;
+
+// $where
+t.drop();
+for (var i = 0; i < 10; i += 1) {
+ t.save({x: i, y: 10 - i});
+}
+assert.eq.automsg("1", "t.find({$or: [{$where: 'this.x === 2'}]}).count()");
+assert.eq.automsg("2", "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 2'}]}).count()");
+assert.eq.automsg("1", "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 8'}]}).count()");
+assert.eq.automsg("10", "t.find({$or: [{$where: 'this.x === 2'}, {x: {$ne: 2}}]}).count()");
+
+// geo
+t.drop();
+t.ensureIndex({loc: "2d"});
+
+assert.throws(function () {t.find({$or: [{loc: {$near: [11, 11]}}]}).limit(1).next()['_id'];});
diff --git a/jstests/orb.js b/jstests/orb.js
new file mode 100644
index 00000000000..a4abdeecabf
--- /dev/null
+++ b/jstests/orb.js
@@ -0,0 +1,17 @@
+// check neg direction index and negation
+
+var t = db.jstests_orb;
+t.drop();
+
+t.save( {a:1} );
+t.ensureIndex( {a:-1} );
+
+assert.eq.automsg( "1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )" );
+
+t.drop();
+
+t.save( {a:1,b:1} );
+t.ensureIndex( {a:1,b:-1} );
+
+assert.eq.automsg( "1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )" );
+assert.eq.automsg( "1", "t.count( {$or: [ { a:1, b: { $gt:0,$lt:2 } }, { a:1, b: { $gt:-1,$lt:3 } } ] } )" ); \ No newline at end of file
diff --git a/jstests/orc.js b/jstests/orc.js
new file mode 100644
index 00000000000..dec6a7b920d
--- /dev/null
+++ b/jstests/orc.js
@@ -0,0 +1,29 @@
+// test that or duplicates are dropped in certain special cases
+t = db.jstests_orc;
+t.drop();
+
+// The goal here will be to ensure the full range of valid values is scanned for each or clause, in order to ensure that
+// duplicates are eliminated properly in the cases below when field range elimination is not employed. The deduplication
+// of interest will occur on field a. The range specifications for fields b and c are such that (in the current
+// implementation) field range elimination will not occur between the or clauses, meaning that the full range of valid values
+// will be scanned for each clause and deduplication will be forced.
+
+// NOTE This test uses some tricks to avoid or range elimination, but in future implementations these tricks may not apply.
+// Perhaps it would be worthwhile to create a mode where range elimination is disabled so it will be possible to write a more
+// robust test.
+
+t.ensureIndex( {a:-1,b:1,c:1} );
+
+// sanity test
+t.save( {a:null,b:4,c:4} );
+assert.eq( 1, t.count( {$or:[{a:null,b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:null,b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
+
+// from here on is SERVER-2245
+t.remove({});
+t.save( {b:4,c:4} );
+assert.eq( 1, t.count( {$or:[{a:null,b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:null,b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
+
+//t.remove({});
+//t.save( {a:[],b:4,c:4} );
+//printjson( t.find( {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ).explain() );
+//assert.eq( 1, t.count( {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
diff --git a/jstests/ord.js b/jstests/ord.js
new file mode 100644
index 00000000000..1ab0c1258a9
--- /dev/null
+++ b/jstests/ord.js
@@ -0,0 +1,35 @@
+// check that we don't crash if an index used by an earlier or clause is dropped
+
+// Dropping an index kills all cursors on the indexed namespace, not just those
+// cursors using the dropped index. This test is to serve as a reminder that
+// the $or implementation may need minor adjustments (memory ownership) if this
+// behavior is changed.
+
+t = db.jstests_ord;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+
+for( i = 0; i < 80; ++i ) {
+ t.save( {a:1} );
+}
+
+for( i = 0; i < 100; ++i ) {
+ t.save( {b:1} );
+}
+
+c = t.find( { $or: [ {a:1}, {b:1} ] } ).batchSize( 100 );
+for( i = 0; i < 90; ++i ) {
+ c.next();
+}
+// At this point, our initial query has ended and there is a client cursor waiting
+// to read additional documents from index {b:1}. Deduping is performed against
+// the index key {a:1}.
+
+t.dropIndex( {a:1} );
+db.getLastError();
+
+// Dropping an index kills all cursors on the indexed namespace, not just those
+// cursors using the dropped index.
+assert.throws( c.next() );
diff --git a/jstests/ore.js b/jstests/ore.js
new file mode 100644
index 00000000000..f938f635d41
--- /dev/null
+++ b/jstests/ore.js
@@ -0,0 +1,13 @@
+// verify that index direction is considered when deduping based on an earlier
+// index
+
+t = db.jstests_ore;
+t.drop();
+
+t.ensureIndex( {a:-1} )
+t.ensureIndex( {b:1} );
+
+t.save( {a:1,b:1} );
+t.save( {a:2,b:1} );
+
+assert.eq( 2, t.count( {$or:[{a:{$in:[1,2]}},{b:1}]} ) );
diff --git a/jstests/orf.js b/jstests/orf.js
new file mode 100644
index 00000000000..720b5b31f0c
--- /dev/null
+++ b/jstests/orf.js
@@ -0,0 +1,27 @@
+// Test a query with 200 $or clauses
+
+t = db.jstests_orf;
+t.drop();
+
+var a = [];
+var expectBounds = [];
+for( var i = 0; i < 200; ++i ) {
+ a.push( {_id:i} );
+ expectBounds.push([i, i]);
+}
+a.forEach( function( x ) { t.save( x ); } );
+
+// This $or query is answered as an index scan over
+// a series of _id index point intervals.
+explain = t.find( {$or:a} ).hint( {_id: 1} ).explain( true );
+printjson( explain );
+assert.eq( 'BtreeCursor _id_', explain.cursor, 'cursor' );
+assert.eq( expectBounds, explain.indexBounds['_id'], 'indexBounds' );
+assert.eq( 200, explain.n, 'n' );
+assert.eq( 200, explain.nscanned, 'nscanned' );
+assert.eq( 200, explain.nscannedObjects, 'nscannedObjects' );
+assert.eq( false, explain.isMultiKey, 'isMultiKey' );
+assert.eq( false, explain.scanAndOrder, 'scanAndOrder' );
+assert.eq( false, explain.indexOnly, 'indexOnly' );
+
+assert.eq( 200, t.count( {$or:a} ) );
diff --git a/jstests/org.js b/jstests/org.js
new file mode 100644
index 00000000000..19239f96c10
--- /dev/null
+++ b/jstests/org.js
@@ -0,0 +1,19 @@
+// SERVER-2282 $or de duping with sparse indexes
+
+t = db.jstests_org;
+t.drop();
+
+t.ensureIndex( {a:1}, {sparse:true} );
+t.ensureIndex( {b:1} );
+
+t.remove({});
+t.save( {a:1,b:2} );
+assert.eq( 1, t.count( {$or:[{a:1},{b:2}]} ) );
+
+t.remove({});
+t.save( {a:null,b:2} );
+assert.eq( 1, t.count( {$or:[{a:null},{b:2}]} ) );
+
+t.remove({});
+t.save( {b:2} );
+assert.eq( 1, t.count( {$or:[{a:null},{b:2}]} ) );
diff --git a/jstests/orh.js b/jstests/orh.js
new file mode 100644
index 00000000000..5fb845fd01c
--- /dev/null
+++ b/jstests/orh.js
@@ -0,0 +1,17 @@
+// SERVER-2831 Demonstration of sparse index matching semantics in a multi index $or query.
+
+t = db.jstests_orh;
+t.drop();
+
+t.ensureIndex( {a:1}, {sparse:true} );
+t.ensureIndex( {b:1,a:1} );
+
+t.remove({});
+t.save( {b:2} );
+assert.eq( 1, t.count( {a:null} ) );
+assert.eq( 1, t.count( {b:2,a:null} ) );
+
+assert.eq( 1, t.count( {$or:[{b:2,a:null},{a:null}]} ) );
+
+// Is this desired?
+assert.eq( 1, t.count( {$or:[{a:null},{b:2,a:null}]} ) );
diff --git a/jstests/orj.js b/jstests/orj.js
new file mode 100644
index 00000000000..fa234f36cb5
--- /dev/null
+++ b/jstests/orj.js
@@ -0,0 +1,121 @@
+// Test nested $or clauses SERVER-2585 SERVER-3192
+
+t = db.jstests_orj;
+t.drop();
+
+t.save( {a:1,b:2} );
+
+function check() {
+
+assert.throws( function() { t.find( { x:0,$or:"a" } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$or:[] } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$or:[ "a" ] } ).toArray(); } );
+
+assert.throws( function() { t.find( { x:0,$or:[{$or:"a"}] } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$or:[{$or:[]}] } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$or:[{$or:[ "a" ]}] } ).toArray(); } );
+
+assert.throws( function() { t.find( { x:0,$nor:"a" } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$nor:[] } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$nor:[ "a" ] } ).toArray(); } );
+
+assert.throws( function() { t.find( { x:0,$nor:[{$nor:"a"}] } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$nor:[{$nor:[]}] } ).toArray(); } );
+assert.throws( function() { t.find( { x:0,$nor:[{$nor:[ "a" ]}] } ).toArray(); } );
+
+assert.eq( 1, t.find( {a:1,b:2} ).itcount() );
+
+assert.eq( 1, t.find( {a:1,$or:[{b:2}]} ).itcount() );
+assert.eq( 0, t.find( {a:1,$or:[{b:3}]} ).itcount() );
+
+assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).itcount() );
+assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).itcount() );
+assert.eq( 0, t.find( {a:1,$or:[{$or:[{b:3}]}]} ).itcount() );
+
+assert.eq( 1, t.find( {$or:[{$or:[{a:2},{b:2}]}]} ).itcount() );
+assert.eq( 1, t.find( {$or:[{a:2},{$or:[{b:2}]}]} ).itcount() );
+assert.eq( 1, t.find( {$or:[{a:1},{$or:[{b:3}]}]} ).itcount() );
+
+assert.eq( 1, t.find( {$or:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:4}]}]} ).itcount() );
+assert.eq( 1, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:2},{b:4}]}]} ).itcount() );
+assert.eq( 0, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:3},{b:4}]}]} ).itcount() );
+
+assert.eq( 1, t.find( {a:1,$and:[{$or:[{$or:[{b:2}]}]}]} ).itcount() );
+assert.eq( 0, t.find( {a:1,$and:[{$or:[{$or:[{b:3}]}]}]} ).itcount() );
+
+assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:1},{b:2}]}]} ).itcount() );
+assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$or:[{b:1},{b:2}]}]} ).itcount() );
+assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:1}]}]} ).itcount() );
+
+assert.eq( 0, t.find( {$and:[{$nor:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
+assert.eq( 0, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
+assert.eq( 1, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:3},{b:1}]}]} ).itcount() );
+
+assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).itcount() );
+assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).itcount() );
+assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
+
+}
+
+check();
+
+t.ensureIndex( {a:1} );
+check();
+t.dropIndexes();
+
+t.ensureIndex( {b:1} );
+check();
+t.dropIndexes();
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+check();
+t.dropIndexes();
+
+t.ensureIndex( {a:1,b:1} );
+check();
+t.dropIndexes();
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+t.ensureIndex( {a:1,b:1} );
+check();
+
+function checkHinted( hint ) {
+ assert.eq( 1, t.find( {a:1,b:2} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {a:1,$or:[{b:2}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {a:1,$or:[{b:3}]} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {a:1,$or:[{$or:[{b:3}]}]} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {$or:[{$or:[{a:2},{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 1, t.find( {$or:[{a:2},{$or:[{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 1, t.find( {$or:[{a:1},{$or:[{b:3}]}]} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {$or:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:4}]}]} ).hint( hint ).itcount() );
+ assert.eq( 1, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:2},{b:4}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:3},{b:4}]}]} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {a:1,$and:[{$or:[{$or:[{b:2}]}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {a:1,$and:[{$or:[{$or:[{b:3}]}]}]} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$or:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:1}]}]} ).hint( hint ).itcount() );
+
+ assert.eq( 0, t.find( {$and:[{$nor:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
+ assert.eq( 1, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:3},{b:1}]}]} ).hint( hint ).itcount() );
+
+ assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).hint( hint ).itcount() );
+ assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
+}
+
+checkHinted( {$natural:1} );
+checkHinted( {a:1} );
+checkHinted( {b:1} );
+checkHinted( {a:1,b:1} ); \ No newline at end of file
diff --git a/jstests/ork.js b/jstests/ork.js
new file mode 100644
index 00000000000..d6d40161e69
--- /dev/null
+++ b/jstests/ork.js
@@ -0,0 +1,11 @@
+// SERVER-2585 Test $or clauses within indexed top level $or clauses.
+
+t = db.jstests_ork;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:[1,2],b:5} );
+t.save( {a:[2,4],b:5} );
+
+assert.eq( 2, t.find( {$or:[{a:1,$and:[{$or:[{a:2},{a:3}]},{$or:[{b:5}]}]},{a:2,$or:[{a:3},{a:4}]}]} ).itcount() );
+assert.eq( 1, t.find( {$or:[{a:1,$and:[{$or:[{a:2},{a:3}]},{$or:[{b:6}]}]},{a:2,$or:[{a:3},{a:4}]}]} ).itcount() );
diff --git a/jstests/orl.js b/jstests/orl.js
new file mode 100644
index 00000000000..2726975d5aa
--- /dev/null
+++ b/jstests/orl.js
@@ -0,0 +1,13 @@
+// SERVER-3445 Test using coarse multikey bounds for or range elimination.
+
+t = db.jstests_orl;
+t.drop();
+
+t.ensureIndex( {'a.b':1,'a.c':1} );
+// make the index multikey
+t.save( {a:{b:[1,2]}} );
+
+// SERVER-3445
+if ( 0 ) {
+assert( !t.find( {$or:[{'a.b':2,'a.c':3},{'a.b':2,'a.c':4}]} ).explain().clauses );
+} \ No newline at end of file
diff --git a/jstests/oro.js b/jstests/oro.js
new file mode 100644
index 00000000000..ae1b6f53552
--- /dev/null
+++ b/jstests/oro.js
@@ -0,0 +1,27 @@
+// Test $or query with several clauses on separate indexes.
+
+t = db.jstests_oro;
+t.drop();
+
+orClauses = [];
+for( idxKey = 'a'; idxKey <= 'aaaaaaaaaa'; idxKey += 'a' ) {
+ idx = {}
+ idx[ idxKey ] = 1;
+ t.ensureIndex( idx );
+ for( i = 0; i < 200; ++i ) {
+ t.insert( idx );
+ }
+ orClauses.push( idx );
+}
+
+printjson( t.find({$or:orClauses}).explain() );
+c = t.find({$or:orClauses}).batchSize( 100 );
+count = 0;
+
+while( c.hasNext() ) {
+ for( i = 0; i < 50 && c.hasNext(); ++i, c.next(), ++count );
+ // Interleave with another operation.
+ t.stats();
+}
+
+assert.eq( 10 * 200, count );
diff --git a/jstests/orp.js b/jstests/orp.js
new file mode 100644
index 00000000000..18abdfbc63a
--- /dev/null
+++ b/jstests/orp.js
@@ -0,0 +1,43 @@
+// $or clause deduping with result set sizes > 101 (smaller result sets are now also deduped by the
+// query optimizer cursor).
+
+t = db.jstests_orp;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+t.ensureIndex( { b:1 } );
+t.ensureIndex( { c:1 } );
+
+for( i = 0; i < 200; ++i ) {
+ t.save( { a:1, b:1 } );
+}
+
+// Deduping results from the previous clause.
+assert.eq( 200, t.count( { $or:[ { a:1 }, { b:1 } ] } ) );
+
+// Deduping results from a prior clause.
+assert.eq( 200, t.count( { $or:[ { a:1 }, { c:1 }, { b:1 } ] } ) );
+t.save( { c:1 } );
+assert.eq( 201, t.count( { $or:[ { a:1 }, { c:1 }, { b:1 } ] } ) );
+
+// Deduping results that would normally be index only matches on overlapping and double scanned $or
+// field regions.
+t.drop();
+t.ensureIndex( { a:1, b:1 } );
+for( i = 0; i < 16; ++i ) {
+ for( j = 0; j < 16; ++j ) {
+ t.save( { a:i, b:j } );
+ }
+}
+assert.eq( 16 * 16,
+ t.count( { $or:[ { a:{ $gte:0 }, b:{ $gte:0 } }, { a:{ $lte:16 }, b:{ $lte:16 } } ] } ) );
+
+// Deduping results from a clause that completed before the multi cursor takeover.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.ensureIndex( { b:1 } );
+t.save( { a:1,b:200 } );
+for( i = 0; i < 200; ++i ) {
+ t.save( { b:i } );
+}
+assert.eq( 201, t.count( { $or:[ { a:1 }, { b:{ $gte:0 } } ] } ) );
diff --git a/jstests/padding.js b/jstests/padding.js
new file mode 100644
index 00000000000..1872574d80f
--- /dev/null
+++ b/jstests/padding.js
@@ -0,0 +1,66 @@
+p = db.getCollection("padding");
+p.drop();
+
+// this test requires usePowerOf2Sizes to be off
+db.createCollection( p.getName(), { "usePowerOf2Sizes" : false } );
+assert.eq(0, p.stats().userFlags);
+
+for (var i = 0; i < 1000; i++) {
+ p.insert({ x: 1, y: "aaaaaaaaaaaaaaa" });
+}
+
+assert.eq(p.stats().paddingFactor, 1, "Padding Not 1");
+
+for (var i = 0; i < 1000; i++) {
+ var x = p.findOne();
+ x.y = x.y + "aaaaaaaaaaaaaaaa";
+ p.update({}, x);
+ if (i % 100 == 0)
+
+ print(p.stats().paddingFactor);
+}
+
+assert.gt(p.stats().paddingFactor, 1.9, "Padding not > 1.9");
+
+// this should make it go down
+for (var i = 0; i < 1000; i++) {
+ p.update({}, { $inc: { x: 1} });
+ if (i % 100 == 0)
+ print(p.stats().paddingFactor);
+}
+assert.lt(p.stats().paddingFactor, 1.7, "Padding not < 1.7");
+
+for (var i = 0; i < 1000; i++) {
+ if (i % 2 == 0) {
+ p.update({}, { $inc: { x: 1} });
+ }
+ else {
+ var x = p.findOne();
+ x.y = x.y + "aaaaaaaaaaaaaaaa";
+ p.update({}, x);
+ }
+ if( i % 100 == 0 )
+ print(p.stats().paddingFactor);
+}
+var ps = p.stats().paddingFactor;
+assert.gt(ps, 1.7, "Padding not greater than 1.7");
+assert.lt(ps, 1.9, "Padding not less than 1.9");
+
+// 50/50 inserts and nonfitting updates
+for (var i = 0; i < 1000; i++) {
+ if (i % 2 == 0) {
+ p.insert({});
+ }
+ else {
+ var x = p.findOne();
+ x.y = x.y + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ p.update({}, x);
+ }
+ if (i % 100 == 0)
+ print(p.stats().paddingFactor);
+}
+
+// should have trended somewhat higher over the above.
+// speed of increase would be higher with more indexes.
+assert.gt(p.stats().paddingFactor, ps + 0.02 , "padding factor not greater than value (+.02)");
+p.drop();
diff --git a/jstests/plan_cache_commands.js b/jstests/plan_cache_commands.js
new file mode 100644
index 00000000000..9554e3017b2
--- /dev/null
+++ b/jstests/plan_cache_commands.js
@@ -0,0 +1,358 @@
+/**
+ * Plan cache commands
+ *
+ * Cache-wide Commands
+ * - planCacheListQueryShapes
+ * - planCacheClear
+ * Removes plans for one or all query shapes.
+ * - planCacheListPlans
+ */
+
+var t = db.jstests_plan_cache_commands;
+t.drop();
+
+// Insert some data so we don't go to EOF.
+t.save({a: 1, b: 1});
+t.save({a: 2, b: 2});
+
+// We need two indices so that the MultiPlanRunner is executed.
+t.ensureIndex({a: 1});
+t.ensureIndex({a: 1, b:1});
+
+// Run the query.
+var queryA1 = {a: 1, b:1};
+var projectionA1 = {_id: 0, a: 1};
+var sortA1 = {a: -1};
+assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
+// We now expect the two indices to be compared and a cache entry to exist.
+
+
+//
+// tests for planCacheListQueryShapes
+// Returns a list of query shapes for the queries currently cached in the collection.
+//
+
+// Utility function to list query shapes in cache.
+function getShapes() {
+ var res = t.runCommand('planCacheListQueryShapes');
+ print('planCacheListQueryShapes() = ' + tojson(res));
+ assert.commandWorked(res, 'planCacheListQueryShapes failed');
+ assert(res.hasOwnProperty('shapes'), 'shapes missing from planCacheListQueryShapes result');
+ return res.shapes;
+
+}
+
+// Attempting to retrieve cache information on non-existent collection is an error.
+var missingCollection = db.jstests_query_cache_missing;
+missingCollection.drop();
+assert.commandFailed(missingCollection.runCommand('planCacheListQueryShapes'));
+
+// Retrieve query shapes from the test collection
+// Number of shapes should match queries executed by multi-plan runner.
+var shapes = getShapes();
+assert.eq(1, shapes.length, 'unexpected number of shapes in planCacheListQueryShapes result');
+assert.eq({query: queryA1, sort: sortA1, projection: projectionA1}, shapes[0],
+ 'unexpected query shape returned from planCacheListQueryShapes');
+
+
+
+//
+// Tests for planCacheClear (one query shape)
+//
+
+// Invalid key should be an error.
+assert.commandFailed(t.runCommand('planCacheClear', {query: {unknownfield: 1}}));
+
+// Run a new query shape and drop it from the cache
+assert.eq(1, t.find({a: 2, b: 2}).itcount(), 'unexpected document count');
+assert.eq(2, getShapes().length, 'unexpected cache size after running 2nd query');
+assert.commandWorked(t.runCommand('planCacheClear', {query: {a: 1, b: 1}}));
+assert.eq(1, getShapes().length, 'unexpected cache size after dropping 2nd query from cache');
+
+
+
+//
+// Tests for planCacheListPlans
+//
+
+// Utility function to list plans for a query.
+function getPlans(query, sort, projection) {
+ var key = {query: query, sort: sort, projection: projection};
+ var res = t.runCommand('planCacheListPlans', key);
+ assert.commandWorked(res, 'planCacheListPlans(' + tojson(key, '', true) + ' failed');
+ assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
+ tojson(key, '', true) + ') result');
+ return res.plans;
+}
+
+// Invalid key should be an error.
+assert.commandFailed(t.runCommand('planCacheListPlans', {query: {unknownfield: 1}}));
+
+// Retrieve plans for valid cache entry.
+var plans = getPlans(queryA1, sortA1, projectionA1);
+assert.eq(2, plans.length, 'unexpected number of plans cached for query');
+
+// Print every plan
+// Plan details/feedback verified separately in section after Query Plan Revision tests.
+print('planCacheListPlans result:');
+for (var i = 0; i < plans.length; i++) {
+ print('plan ' + i + ': ' + tojson(plans[i]));
+}
+
+
+
+//
+// Tests for planCacheClear
+//
+
+// Drop query cache. This clears all cached queries in the collection.
+res = t.runCommand('planCacheClear');
+print('planCacheClear() = ' + tojson(res));
+assert.commandWorked(res, 'planCacheClear failed');
+assert.eq(0, getShapes().length, 'plan cache should be empty after successful planCacheClear()');
+
+
+
+//
+// Query Plan Revision
+// http://docs.mongodb.org/manual/core/query-plans/#query-plan-revision
+// As collections change over time, the query optimizer deletes the query plan and re-evaluates
+// after any of the following events:
+// - The collection receives 1,000 write operations.
+// - The reIndex rebuilds the index.
+// - You add or drop an index.
+// - The mongod process restarts.
+//
+
+// Case 1: The collection receives 1,000 write operations.
+// Steps:
+// Populate cache. Cache should contain 1 key after running query.
+// Insert 1000 documents.
+// Cache should be cleared.
+assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
+assert.eq(1, getShapes().length, 'plan cache should not be empty after query');
+for (var i = 0; i < 1000; i++) {
+ t.save({b: i});
+}
+assert.eq(0, getShapes().length, 'plan cache should be empty after adding 1000 documents.');
+
+// Case 2: The reIndex rebuilds the index.
+// Steps:
+// Populate the cache with 1 entry.
+// Run reIndex on the collection.
+// Confirm that cache is empty.
+assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
+assert.eq(1, getShapes().length, 'plan cache should not be empty after query');
+res = t.reIndex();
+print('reIndex result = ' + tojson(res));
+assert.eq(0, getShapes().length, 'plan cache should be empty after reIndex operation');
+
+// Case 3: You add or drop an index.
+// Steps:
+// Populate the cache with 1 entry.
+// Add an index.
+// Confirm that cache is empty.
+assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
+assert.eq(1, getShapes().length, 'plan cache should not be empty after query');
+t.ensureIndex({b: 1});
+assert.eq(0, getShapes().length, 'plan cache should be empty after adding index');
+
+// Case 4: The mongod process restarts
+// Not applicable.
+
+
+
+//
+// Tests for plan reason and feedback in planCacheListPlans
+//
+
+// Generate more plans for test query by adding indexes (compound and sparse).
+// This will also clear the plan cache.
+t.ensureIndex({a: -1}, {sparse: true});
+t.ensureIndex({a: 1, b: 1});
+
+// Implementation note: feedback stats is calculated after 20 executions.
+// See PlanCacheEntry::kMaxFeedback.
+var numExecutions = 100;
+for (var i = 0; i < numExecutions; i++) {
+ assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'query failed');
+}
+
+plans = getPlans(queryA1, sortA1, projectionA1);
+
+// This should be obvious but feedback is available only for the first (winning) plan.
+print('planCacheListPlans result (after adding indexes and completing 20 executions):');
+for (var i = 0; i < plans.length; i++) {
+ print('plan ' + i + ': ' + tojson(plans[i]));
+ assert.gt(plans[i].reason.score, 0, 'plan ' + i + ' score is invalid');
+ if (i > 0) {
+ assert.lte(plans[i].reason.score, plans[i-1].reason.score,
+ 'plans not sorted by score in descending order. ' +
+ 'plan ' + i + ' has a score that is greater than that of the previous plan');
+ }
+ assert(plans[i].reason.stats.hasOwnProperty('type'), 'no stats inserted for plan ' + i);
+}
+
+// feedback meaningful only for plan 0
+// feedback is capped at 20
+assert.eq(20, plans[0].feedback.nfeedback, 'incorrect nfeedback');
+assert.gt(plans[0].feedback.averageScore, 0, 'invalid average score');
+
+
+
+//
+// Tests for shell helpers
+//
+
+// Reset collection data and indexes.
+t.drop();
+var n = 200;
+for (var i = 0; i < n; i++) {
+ t.save({a:i, b: i});
+}
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+t.ensureIndex({a: 1, b: 1});
+
+// Repopulate plan cache with 3 query shapes.
+var queryB = {a: {$gte: 0}, b: {$gte: 0}};
+var projectionB = {_id: 0, b: 1};
+var sortB = {b: -1};
+assert.eq(n, t.find(queryB, projectionB).sort(sortB).itcount(), 'unexpected document count');
+assert.eq(n, t.find(queryB, projectionB).itcount(), 'unexpected document count');
+assert.eq(n, t.find(queryB).sort(sortB).itcount(), 'unexpected document count');
+assert.eq(n, t.find(queryB).itcount(), 'unexpected document count');
+assert.eq(4, getShapes().length, 'unexpected number of query shapes in plan cache');
+
+//
+// PlanCache.getName
+//
+
+var planCache = t.getPlanCache();
+assert.eq(t.getName(), planCache.getName(), 'name of plan cache should match collection');
+
+//
+// PlanCache.help
+//
+planCache.help();
+
+//
+// shellPrint
+//
+
+print('plan cache:');
+print(planCache);
+
+//
+// collection.getPlanCache().listQueryShapes
+//
+
+missingCollection.drop();
+assert.throws(function() { missingCollection.getPlanCache().listQueryShapes() });
+assert.eq(getShapes(), planCache.listQueryShapes(),
+ 'unexpected collection.getPlanCache().listQueryShapes() shell helper result');
+
+//
+// collection.getPlanCache().getPlansByQuery
+//
+
+// should error on non-existent collection.
+assert.throws(function() { planCache.getPlansByQuery({unknownfield: 1}) });
+// should error on missing required field query.
+assert.throws(function() { planCache.getPlansByQuery() });
+
+// Invoke with various permutations of required (query) and optional (projection, sort) arguments.
+assert.eq(getPlans(queryB, sortB, projectionB), planCache.getPlansByQuery(queryB, projectionB,
+ sortB),
+ 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
+assert.eq(getPlans(queryB, {}, projectionB), planCache.getPlansByQuery(queryB, projectionB),
+ 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
+assert.eq(getPlans(queryB, sortB, {}), planCache.getPlansByQuery(queryB, undefined, sortB),
+ 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
+assert.eq(getPlans(queryB, {}, {}), planCache.getPlansByQuery(queryB),
+ 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
+
+//
+// collection.getPlanCache().clearPlansByQuery
+//
+
+// should error on non-existent collection.
+assert.throws(function() { planCache.clearPlansByQuery({unknownfield: 1}) });
+// should error on missing required field query.
+assert.throws(function() { planCache.clearPlansByQuery() });
+
+// Invoke with various permutations of required (query) and optional (projection, sort) arguments.
+planCache.clearPlansByQuery(queryB, projectionB, sortB);
+assert.eq(3, getShapes().length,
+ 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
+
+planCache.clearPlansByQuery(queryB, projectionB);
+assert.eq(2, getShapes().length,
+ 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
+
+planCache.clearPlansByQuery(queryB, undefined, sortB);
+assert.eq(1, getShapes().length,
+ 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
+
+planCache.clearPlansByQuery(queryB);
+assert.eq(0, getShapes().length,
+ 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
+
+
+//
+// collection.getPlanCache().clear
+//
+
+assert.throws(function() { missingCollection.getPlanCache().clear() });
+// Re-populate plan cache with 1 query shape.
+assert.eq(n, t.find(queryB, projectionB).sort(sortB).itcount(), 'unexpected document count');
+assert.eq(1, getShapes().length, 'plan cache should not be empty after running cacheable query');
+// Clear cache.
+planCache.clear();
+assert.eq(0, getShapes().length, 'plan cache not empty after clearing');
+
+
+
+//
+// explain and plan cache
+// Running explain should not mutate the plan cache.
+//
+
+planCache.clear();
+
+// MultiPlanRunner explain
+var multiPlanRunnerExplain = t.find(queryB, projectionB).sort(sortB).explain(true);
+
+print('multi plan runner explain = ' + tojson(multiPlanRunnerExplain));
+
+assert.eq(0, getShapes().length, 'explain should not mutate plan cache');
+
+
+
+
+//
+// SERVER-12796: Plans for queries that return zero
+// results should not be cached.
+//
+
+t.drop();
+
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+
+for (var i = 0; i < 200; i++) {
+ t.save({a: 1, b: 1});
+}
+t.save({a: 2, b: 2});
+
+// A query with zero results that does not hit EOF should not be cached...
+assert.eq(0, t.find({c: 0}).itcount(), 'unexpected count');
+assert.eq(0, getShapes().length, 'unexpected number of query shapes in plan cache');
+
+// ...but a query with zero results that hits EOF will be cached.
+assert.eq(1, t.find({a: 2, b: 2}).itcount(), 'unexpected count');
+assert.eq(1, getShapes().length, 'unexpected number of query shapes in plan cache');
+
+// A query that returns results but does not hit EOF will also be cached.
+assert.eq(200, t.find({a: {$gte: 0}, b:1}).itcount(), 'unexpected count');
+assert.eq(2, getShapes().length, 'unexpected number of query shapes in plan cache');
diff --git a/jstests/profile1.js b/jstests/profile1.js
new file mode 100644
index 00000000000..7c168dea0ab
--- /dev/null
+++ b/jstests/profile1.js
@@ -0,0 +1,170 @@
+// This test is inherently a race between the client and the server, and the test is unreliable.
+// We compare the duration of a query as seen by the server with the duration as seen by the
+// client, and if the client is delayed by a few milliseconds, or, in extreme cases, by even
+// 1 millisecond, it may think that there is a problem when in fact it's just a race, and the
+// client lost the race.
+// Windows seems to experience this more than the other platforms, so, to "fix" SERVER-5373,
+// disable the test for Windows.
+
+if (!_isWindows()) {
+
+print("profile1.js BEGIN");
+
+// special db so that it can be run in parallel tests
+var stddb = db;
+var db = db.getSisterDB("profile1");
+var username = "jstests_profile1_user";
+
+db.dropUser(username)
+db.dropDatabase();
+
+try {
+
+ db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
+ db.auth( username, "password" );
+
+ function profileCursor( query ) {
+ query = query || {};
+ Object.extend( query, { user:username + "@" + db.getName() } );
+ return db.system.profile.find( query );
+ }
+
+ function getProfileAString() {
+ var s = "\n";
+ profileCursor().forEach( function(z){
+ s += tojson( z ) + " ,\n" ;
+ } );
+ return s;
+ }
+
+ /* With pre-created system.profile (capped) */
+ db.runCommand({profile: 0});
+ db.getCollection("system.profile").drop();
+ assert(!db.getLastError(), "Z");
+ assert.eq(0, db.runCommand({profile: -1}).was, "A");
+
+ // Create 32MB profile (capped) collection
+ db.system.profile.drop();
+ db.createCollection("system.profile", {capped: true, size: 32 * 1024 * 1024});
+ db.runCommand({profile: 2});
+ assert.eq(2, db.runCommand({profile: -1}).was, "B");
+ assert.eq(1, db.system.profile.stats().capped, "C");
+ var capped_size = db.system.profile.storageSize();
+ assert.gt(capped_size, 31 * 1024 * 1024, "D");
+ assert.lt(capped_size, 65 * 1024 * 1024, "E");
+
+ db.foo.findOne()
+
+ var profileItems = profileCursor().toArray();
+
+ // create a msg for later if there is a failure.
+ var msg = "";
+ profileItems.forEach(function(d) {msg += "profile doc: " + d.ns + " " + d.op + " " + tojson(d.query ? d.query : d.command)});
+ msg += tojson(db.system.profile.stats());
+
+ // If these nunmbers don't match, it is possible the collection has rolled over (set to 32MB above in the hope this doesn't happen)
+ assert.eq( 4 , profileItems.length , "E2 -- " + msg );
+
+ /* Make sure we can't drop if profiling is still on */
+ assert.throws( function(z){ db.getCollection("system.profile").drop(); } )
+
+ /* With pre-created system.profile (un-capped) */
+ db.runCommand({profile: 0});
+ db.getCollection("system.profile").drop();
+ assert.eq(0, db.runCommand({profile: -1}).was, "F");
+
+ db.createCollection("system.profile");
+ assert.eq( 0, db.runCommand({profile: 2}).ok );
+ assert.eq( 0, db.runCommand({profile: -1}).was, "G");
+ assert.eq(null, db.system.profile.stats().capped, "G1");
+
+ /* With no system.profile collection */
+ db.runCommand({profile: 0});
+ db.getCollection("system.profile").drop();
+ assert.eq(0, db.runCommand({profile: -1}).was, "H");
+
+ db.runCommand({profile: 2});
+ assert.eq(2, db.runCommand({profile: -1}).was, "I");
+ assert.eq(1, db.system.profile.stats().capped, "J");
+ var auto_size = db.system.profile.storageSize();
+ assert.lt(auto_size, capped_size, "K");
+
+
+ db.eval("sleep(1)") // pre-load system.js
+
+ function resetProfile( level , slowms ) {
+ db.setProfilingLevel(0);
+ db.system.profile.drop();
+ db.setProfilingLevel(level,slowms);
+ }
+
+ resetProfile(2);
+
+ db.eval( "sleep(25)" )
+ db.eval( "sleep(120)" )
+
+ assert.eq( 2 , profileCursor( { "command.$eval" : /^sleep/ } ).count() );
+
+ assert.lte( 119 , profileCursor( { "command.$eval" : "sleep(120)" } )[0].millis );
+ assert.lte( 24 , profileCursor( { "command.$eval" : "sleep(25)" } )[0].millis );
+
+ /* sleep() could be inaccurate on certain platforms. let's check */
+ print("\nsleep 2 time actual:");
+ for (var i = 0; i < 4; i++) {
+ print(db.eval("var x = new Date(); sleep(2); return new Date() - x;"));
+ }
+ print();
+ print("\nsleep 20 times actual:");
+ for (var i = 0; i < 4; i++) {
+ print(db.eval("var x = new Date(); sleep(20); return new Date() - x;"));
+ }
+ print();
+ print("\nsleep 120 times actual:");
+ for (var i = 0; i < 4; i++) {
+ print(db.eval("var x = new Date(); sleep(120); return new Date() - x;"));
+ }
+ print();
+
+ function evalSleepMoreThan(millis,max){
+ var start = new Date();
+ db.eval("sleep("+millis+")");
+ var end = new Date();
+ var actual = end.getTime() - start.getTime();
+ if ( actual > ( millis + 5 ) ) {
+ print( "warning wanted to sleep for: " + millis + " but took: " + actual );
+ }
+ return actual >= max ? 1 : 0;
+ }
+
+ resetProfile(1,100);
+ var delta = 0;
+ delta += evalSleepMoreThan( 15 , 100 );
+ delta += evalSleepMoreThan( 120 , 100 );
+ assert.eq( delta , profileCursor( { "command.$eval" : /^sleep/ } ).count() , "X2 : " + getProfileAString() )
+
+ resetProfile(1,20);
+ delta = 0;
+ delta += evalSleepMoreThan( 5 , 20 );
+ delta += evalSleepMoreThan( 120 , 20 );
+ assert.eq( delta , profileCursor( { "command.$eval" : /^sleep/ } ).count() , "X3 : " + getProfileAString() )
+
+ resetProfile(2);
+ db.profile1.drop();
+ var q = { _id : 5 };
+ var u = { $inc : { x : 1 } };
+ db.profile1.update( q , u );
+ var r = profileCursor( { ns : db.profile1.getFullName() } ).sort( { $natural : -1 } )[0]
+ assert.eq( q , r.query , "Y1: " + tojson(r) );
+ assert.eq( u , r.updateobj , "Y2" );
+ assert.eq( "update" , r.op , "Y3" );
+ assert.eq("profile1.profile1", r.ns, "Y4");
+
+ print("profile1.js SUCCESS OK");
+
+} finally {
+ // disable profiling for subsequent tests
+ assert.commandWorked( db.runCommand( {profile:0} ) );
+ db = stddb;
+}
+
+} // !_isWindows()
diff --git a/jstests/profile2.js b/jstests/profile2.js
new file mode 100644
index 00000000000..1006c03a40d
--- /dev/null
+++ b/jstests/profile2.js
@@ -0,0 +1,25 @@
+print("profile2.js BEGIN");
+
+// special db so that it can be run in parallel tests
+var stddb = db;
+var db = db.getSisterDB("profile2");
+
+try {
+
+ assert.commandWorked( db.runCommand( {profile:2} ) );
+
+ var str = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ huge = str;
+ while (huge.length < 2*1024*1024){
+ huge += str;
+ }
+
+ db.profile2.count({huge:huge}) // would make a huge entry in db.system.profile
+
+ print("profile2.js SUCCESS OK");
+
+} finally {
+ // disable profiling for subsequent tests
+ assert.commandWorked( db.runCommand( {profile:0} ) );
+ db = stddb;
+}
diff --git a/jstests/profile3.js b/jstests/profile3.js
new file mode 100644
index 00000000000..89fa0a33269
--- /dev/null
+++ b/jstests/profile3.js
@@ -0,0 +1,54 @@
+
+// special db so that it can be run in parallel tests
+var stddb = db;
+var db = db.getSisterDB("profile3");
+
+db.dropAllUsers();
+t = db.profile3;
+t.drop();
+
+profileCursor = function( query ) {
+ print( "----" );
+ query = query || {};
+ Object.extend( query, { user: username + "@" + db.getName() } );
+ return db.system.profile.find( query );
+}
+
+try {
+ username = "jstests_profile3_user";
+ db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
+ db.auth( username, "password" );
+
+ db.setProfilingLevel(0);
+
+ db.system.profile.drop();
+ assert.eq( 0 , profileCursor().count() )
+
+ db.setProfilingLevel(2);
+
+ db.createCollection(t.getName(), {usePowerOf2Sizes: false});
+ t.insert( { x : 1 } );
+ t.findOne( { x : 1 } );
+ t.find( { x : 1 } ).count();
+ t.update( { x : 1 }, {$inc:{a:1}} );
+ t.update( { x : 1 }, {$inc:{a:1}} );
+ t.update( { x : 0 }, {$inc:{a:1}} );
+
+ profileCursor().forEach( printjson )
+
+ db.setProfilingLevel(0);
+
+
+ assert.eq(profileCursor({nMatched: {$exists:1}}).count(), 3)
+ assert.eq(profileCursor({nMatched: 1}).count(), 2)
+ assert.eq(profileCursor({nMatched: 0}).count(), 1)
+ assert.eq(profileCursor({nmoved: 1}).count(), 1)
+
+ db.system.profile.drop();
+
+}
+finally {
+ db.setProfilingLevel(0);
+ db = stddb;
+}
+
diff --git a/jstests/profile4.js b/jstests/profile4.js
new file mode 100644
index 00000000000..5b9a0a66be2
--- /dev/null
+++ b/jstests/profile4.js
@@ -0,0 +1,98 @@
+// Check debug information recorded for a query.
+
+// special db so that it can be run in parallel tests
+var stddb = db;
+var db = db.getSisterDB("profile4");
+
+db.dropAllUsers();
+t = db.profile4;
+t.drop();
+
+function profileCursor() {
+ return db.system.profile.find( { user:username + "@" + db.getName() } );
+}
+
+function lastOp() {
+ p = profileCursor().sort( { $natural:-1 } ).next();
+// printjson( p );
+ return p;
+}
+
+function checkLastOp( spec ) {
+ p = lastOp();
+ for( i in spec ) {
+ s = spec[ i ];
+ assert.eq( s[ 1 ], p[ s[ 0 ] ], s[ 0 ] );
+ }
+}
+
+try {
+ username = "jstests_profile4_user";
+ db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
+ db.auth( username, "password" );
+
+ db.setProfilingLevel(0);
+
+ db.system.profile.drop();
+ assert.eq( 0 , profileCursor().count() )
+
+ db.setProfilingLevel(2);
+
+ t.find().itcount();
+ checkLastOp( [ [ "op", "query" ],
+ [ "ns", "profile4.profile4" ],
+ [ "query", {} ],
+ [ "ntoreturn", 0 ],
+ [ "ntoskip", 0 ],
+ [ "nscanned", 0 ],
+ [ "keyUpdates", 0 ],
+ [ "nreturned", 0 ],
+ [ "responseLength", 20 ] ] );
+
+ t.save( {} );
+
+ // check write lock stats are set
+ o = lastOp();
+ assert.eq('insert', o.op);
+ assert.eq( 0, o.lockStats.timeLockedMicros.r );
+ assert.lt( 0, o.lockStats.timeLockedMicros.w );
+ assert.eq( 0, o.lockStats.timeAcquiringMicros.r );
+ //assert.lt( 0, o.lockStats.timeAcquiringMicros.w ); // Removed due to SERVER-8331
+
+ // check read lock stats are set
+ t.find();
+ o = lastOp();
+ assert.eq('query', o.op);
+ assert.lt( 0, o.lockStats.timeLockedMicros.r );
+ assert.eq( 0, o.lockStats.timeLockedMicros.w );
+ //assert.lt( 0, o.lockStats.timeAcquiringMicros.r ); // Removed due to SERVER-8331
+ //assert.lt( 0, o.lockStats.timeAcquiringMicros.w ); // Removed due to SERVER-8331
+
+ t.save( {} );
+ t.save( {} );
+ t.find().skip( 1 ).limit( 4 ).itcount();
+ checkLastOp( [ [ "ntoreturn", 4 ],
+ [ "ntoskip", 1 ],
+ [ "nscanned", 3 ],
+ [ "nreturned", 2 ] ] );
+
+ t.find().batchSize( 2 ).next();
+ o = lastOp();
+ assert.lt( 0, o.cursorid );
+
+ t.find( {a:1} ).itcount();
+ checkLastOp( [ [ "query", {a:1} ] ] );
+
+ t.find( {_id:0} ).itcount();
+ checkLastOp( [ [ "idhack", true ] ] );
+
+ t.find().sort( {a:1} ).itcount();
+ checkLastOp( [ [ "scanAndOrder", true ] ] );
+
+ db.setProfilingLevel(0);
+ db.system.profile.drop();
+}
+finally {
+ db.setProfilingLevel(0);
+ db = stddb;
+}
diff --git a/jstests/proj_key1.js b/jstests/proj_key1.js
new file mode 100644
index 00000000000..ad944f71827
--- /dev/null
+++ b/jstests/proj_key1.js
@@ -0,0 +1,28 @@
+
+t = db.proj_key1;
+t.drop();
+
+as = []
+
+for ( i=0; i<10; i++ ){
+ as.push( { a : i } )
+ t.insert( { a : i , b : i } );
+}
+
+assert( ! t.find( {} , { a : 1 } ).explain().indexOnly , "A1" )
+
+t.ensureIndex( { a : 1 } )
+
+assert( t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).explain().indexOnly , "A2" )
+
+assert( ! t.find( { a : { $gte : 0 } } , { a : 1 } ).explain().indexOnly , "A3" ) // because id _id
+
+// assert( t.find( {} , { a : 1 , _id : 0 } ).explain().indexOnly , "A4" ); // TODO: need to modify query optimier SERVER-2109
+
+assert.eq( as , t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).toArray() , "B1" )
+assert.eq( as , t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).batchSize(2).toArray() , "B1" )
+
+
+
+
+
diff --git a/jstests/pull.js b/jstests/pull.js
new file mode 100644
index 00000000000..3cb6328e2de
--- /dev/null
+++ b/jstests/pull.js
@@ -0,0 +1,33 @@
+t = db.jstests_pull;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 6 } } );
+assert.eq( [ 1, 3 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 2 } } );
+assert.eq( [ 1, 3 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 2 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 6 } } );
+assert.eq( [], t.findOne().a );
+
+// SERVER-6047: $pull creates empty nested docs for dotted fields
+// that don't exist.
+t.drop()
+t.save({ m : 1 } );
+t.update( { m : 1 }, { $pull : { 'a.b' : [ 1 ] } } );
+assert( ('a' in t.findOne()) == false );
+// Non-obvious bit: the implementation of non-in-place update
+// might do different things depending on whether the "new" field
+// comes before or after existing fields in the document.
+// So for now it's worth testing that too. Sorry, future; blame the past.
+t.update( { m : 1 }, { $pull : { 'x.y' : [ 1 ] } } );
+assert( ('z' in t.findOne()) == false );
+// End SERVER-6047
diff --git a/jstests/pull2.js b/jstests/pull2.js
new file mode 100644
index 00000000000..ca13fc2e726
--- /dev/null
+++ b/jstests/pull2.js
@@ -0,0 +1,31 @@
+
+t = db.pull2;
+t.drop();
+
+t.save( { a : [ { x : 1 } , { x : 1 , b : 2 } ] } );
+assert.eq( 2 , t.findOne().a.length , "A" );
+
+t.update( {} , { $pull : { a : { x : 1 } } } );
+assert.eq( 0 , t.findOne().a.length , "B" );
+
+assert.eq( 1 , t.find().count() , "C1" )
+
+t.update( {} , { $push : { a : { x : 1 } } } )
+t.update( {} , { $push : { a : { x : 1 , b : 2 } } } )
+assert.eq( 2 , t.findOne().a.length , "C" );
+
+t.update( {} , { $pullAll : { a : [ { x : 1 } ] } } );
+assert.eq( 1 , t.findOne().a.length , "D" );
+
+t.update( {} , { $push : { a : { x : 2 , b : 2 } } } )
+t.update( {} , { $push : { a : { x : 3 , b : 2 } } } )
+t.update( {} , { $push : { a : { x : 4 , b : 2 } } } )
+assert.eq( 4 , t.findOne().a.length , "E" );
+
+assert.eq( 1 , t.find().count() , "C2" )
+
+
+t.update( {} , { $pull : { a : { x : { $lt : 3 } } } } );
+assert.eq( 2 , t.findOne().a.length , "F" );
+assert.eq( [ 3 , 4 ] , t.findOne().a.map( function(z){ return z.x; } ) , "G" )
+
diff --git a/jstests/pull_or.js b/jstests/pull_or.js
new file mode 100644
index 00000000000..905c7a87060
--- /dev/null
+++ b/jstests/pull_or.js
@@ -0,0 +1,21 @@
+
+t = db.pull_or;
+t.drop();
+
+doc = { _id : 1 , a : { b : [ { x : 1 },
+ { y : 'y' },
+ { x : 2 },
+ { z : 'z' } ] } };
+
+t.insert( doc );
+
+t.update({}, { $pull : { 'a.b' : { 'y' : { $exists : true } } } } );
+
+assert.eq( [ { x : 1 }, { x : 2 }, { z : 'z' } ], t.findOne().a.b );
+
+t.drop();
+t.insert( doc );
+t.update({}, { $pull : { 'a.b' : { $or : [ { 'y' : { $exists : true } },
+ { 'z' : { $exists : true } } ] } } } );
+
+assert.eq( [ { x : 1 }, { x : 2 } ], t.findOne().a.b );
diff --git a/jstests/pull_remove1.js b/jstests/pull_remove1.js
new file mode 100644
index 00000000000..379f3f2832b
--- /dev/null
+++ b/jstests/pull_remove1.js
@@ -0,0 +1,14 @@
+
+t = db.pull_remove1
+t.drop()
+
+o = { _id : 1 , a : [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ] }
+t.insert( o )
+
+assert.eq( o , t.findOne() , "A1" )
+
+o.a = o.a.filter( function(z){ return z >= 6; } )
+t.update( {} , { $pull : { a : { $lt : 6 } } } )
+
+assert.eq( o.a , t.findOne().a , "A2" )
+
diff --git a/jstests/pullall.js b/jstests/pullall.js
new file mode 100644
index 00000000000..7dd932c4bbf
--- /dev/null
+++ b/jstests/pullall.js
@@ -0,0 +1,31 @@
+t = db.jstests_pullall;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pullAll: { a: [ 3 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [ 3 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pullAll: { a: [ 2, 3 ] } } );
+assert.eq( [ 1 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [] } } );
+assert.eq( [ 1 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [ 1, 5 ] } } );
+assert.eq( [], t.findOne().a );
+
+// SERVER-6047: $pullAll creates empty nested docs for dotted fields
+// that don't exist.
+t.drop()
+t.save({ m : 1 } );
+t.update( { m : 1 }, { $pullAll : { 'a.b' : [ 1 ] } } );
+assert( ('a' in t.findOne()) == false );
+// Non-obvious bit: the implementation of non-in-place update
+// might do different things depending on whether the "new" field
+// comes before or after existing fields in the document.
+// So for now it's worth testing that too. Sorry, future; blame the past.
+t.update( { m : 1 }, { $pullAll : { 'x.y' : [ 1 ] } } );
+assert( ('z' in t.findOne()) == false );
+// End SERVER-6047
diff --git a/jstests/pullall2.js b/jstests/pullall2.js
new file mode 100644
index 00000000000..61369badaa4
--- /dev/null
+++ b/jstests/pullall2.js
@@ -0,0 +1,20 @@
+
+t = db.pullall2
+t.drop()
+
+o = { _id : 1 , a : [] }
+for ( i=0; i<5; i++ )
+ o.a.push( { x : i , y : i } )
+
+t.insert( o )
+
+assert.eq( o , t.findOne() , "A" );
+
+t.update( {} , { $pull : { a : { x : 3 } } } )
+o.a = o.a.filter( function(z){ return z.x != 3 } )
+assert.eq( o , t.findOne() , "B" );
+
+t.update( {} , { $pull : { a : { x : { $in : [ 1 , 4 ] } } } } );
+o.a = o.a.filter( function(z){ return z.x != 1 } )
+o.a = o.a.filter( function(z){ return z.x != 4 } )
+assert.eq( o , t.findOne() , "C" );
diff --git a/jstests/push.js b/jstests/push.js
new file mode 100644
index 00000000000..9bcaa2ffb6b
--- /dev/null
+++ b/jstests/push.js
@@ -0,0 +1,54 @@
+
+t = db.push
+t.drop();
+
+t.save( { _id : 2 , a : [ 1 ] } );
+t.update( { _id : 2 } , { $push : { a : 2 } } );
+assert.eq( "1,2" , t.findOne().a.toString() , "A" );
+t.update( { _id : 2 } , { $push : { a : 3 } } );
+assert.eq( "1,2,3" , t.findOne().a.toString() , "B" );
+
+t.update( { _id : 2 } , { $pop : { a : 1 } } );
+assert.eq( "1,2" , t.findOne().a.toString() , "C" );
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.eq( "2" , t.findOne().a.toString() , "D" );
+
+
+t.update( { _id : 2 } , { $push : { a : 3 } } );
+t.update( { _id : 2 } , { $push : { a : 4 } } );
+t.update( { _id : 2 } , { $push : { a : 5 } } );
+assert.eq( "2,3,4,5" , t.findOne().a.toString() , "E1" );
+
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.eq( "3,4,5" , t.findOne().a.toString() , "E2" );
+
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.eq( "4,5" , t.findOne().a.toString() , "E3" );
+
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.isnull( db.getLastError() , "E4a" )
+assert.eq( "5" , t.findOne().a.toString() , "E4" );
+
+
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.isnull( db.getLastError() , "E5a")
+assert.eq( "" , t.findOne().a.toString() , "E5" );
+
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.isnull( db.getLastError() , "E6a" )
+assert.eq( "" , t.findOne().a.toString() , "E6" );
+
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.isnull( db.getLastError() , "E7a" )
+assert.eq( "" , t.findOne().a.toString() , "E7" );
+
+t.update( { _id : 2 } , { $pop : { a : 1 } } );
+assert.isnull( db.getLastError() , "E8a" )
+assert.eq( "" , t.findOne().a.toString() , "E8" );
+
+t.update( { _id : 2 } , { $pop : { b : -1 } } );
+assert.isnull( db.getLastError() , "E4a" )
+
+t.update( { _id : 2 } , { $pop : { b : 1 } } );
+assert.isnull( db.getLastError() , "E4a" )
+
diff --git a/jstests/push2.js b/jstests/push2.js
new file mode 100644
index 00000000000..e8bcff6760c
--- /dev/null
+++ b/jstests/push2.js
@@ -0,0 +1,21 @@
+
+t = db.push2
+t.drop()
+
+t.save( { _id : 1 , a : [] } )
+
+s = new Array(700000).toString();
+
+gotError = null;
+
+for ( x=0; x<100; x++ ){
+ print (x + " pushes");
+ t.update( {} , { $push : { a : s } } );
+ gotError = db.getLastError();
+ if ( gotError )
+ break;
+}
+
+assert( gotError , "should have gotten error" );
+
+t.drop();
diff --git a/jstests/push_sort.js b/jstests/push_sort.js
new file mode 100644
index 00000000000..87916d5ea6b
--- /dev/null
+++ b/jstests/push_sort.js
@@ -0,0 +1,96 @@
+//
+// $push acquired the possibility of sorting the resulting array as part of SERVER-8008. This
+// test exercises such $sort clause from the shell user's perspective.
+//
+
+t = db.push_sort;
+t.drop();
+
+//
+// Valid Cases
+//
+
+// $slice amount is too large to kick in.
+t.save( { _id: 1, x: [ {a:1}, {a:2} ] } );
+t.update( {_id:1}, { $push: { x: { $each: [ {a:3} ], $slice:-5, $sort: {a:1} } } } )
+assert.eq( [{a:1}, {a:2}, {a:3}] , t.findOne( {_id:1} ).x );
+
+// $slice amount kicks in using values of both the base doc and of the $each clause.
+t.save({ _id: 2, x: [ {a:1}, {a:3} ] } );
+t.update( {_id:2}, { $push: { x: { $each: [ {a:2} ], $slice:-2, $sort: {a:1} } } } )
+assert.eq( [{a:2}, {a:3}], t.findOne( {_id:2} ).x );
+
+// $sort is descending and $slice is too large to kick in.
+t.save({ _id: 3, x: [ {a:1}, {a:3} ] } );
+t.update( {_id:3}, { $push: { x: { $each: [ {a:2} ], $slice:-5, $sort: {a:-1} } } } )
+assert.eq( [{a:3}, {a:2}, {a:1}], t.findOne( {_id:3} ).x );
+
+// $sort is descending and $slice kicks in using values of both the base doc and of
+// the $each clause.
+t.save({ _id: 4, x: [ {a:1}, {a:3} ] } );
+t.update( {_id:4}, { $push: { x: { $each: [ {a:2} ], $slice:-2, $sort: {a:-1} } } } )
+assert.eq( [{a:2}, {a:1}], t.findOne( {_id:4} ).x );
+
+// $sort over only a portion of the array's elements objects and #slice kicking in
+// using values of both the base doc and of the $each clause.
+t.save({ _id: 5, x: [ {a:1,b:2}, {a:3,b:1} ] } );
+t.update( {_id:5}, { $push: { x: { $each: [ {a:2,b:3} ], $slice:-2, $sort: {b:1} } } } )
+assert.eq( [{a:1, b:2}, {a:2,b:3}], t.findOne( {_id:5} ).x );
+
+// $sort over an array of nested objects and $slice too large to kick in.
+t.save({ _id: 6, x: [ {a:{b:2}}, {a:{b:1}} ] } );
+t.update( {_id:6}, { $push: { x: { $each: [ {a:{b:3}} ], $slice:-5, $sort: {'a.b':1} } } } )
+assert.eq( [{a:{b:1}}, {a:{b:2}}, {a:{b:3}}], t.findOne( {_id:6} ).x );
+
+// $sort over an array of nested objects and $slice kicking in using values of both the
+// base doc and of the $each clause.
+t.save({ _id: 7, x: [ {a:{b:2}}, {a:{b:1}} ] } );
+t.update( {_id:7}, { $push: { x: { $each: [ {a:{b:3}} ], $slice:-2, $sort: {'a.b':1} } } } )
+assert.eq( [{a:{b:2}}, {a:{b:3}}], t.findOne( {_id:7} ).x );
+
+//
+// Invalid Cases
+//
+
+t.save({ _id: 100, x: [ {a:1} ] } );
+
+// For now, elements of the $each vector need to be objects. In here, '2' is an invalide $each.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [ 2 ], $slice:-2, $sort:{a:1} } } } ) )
+
+// For the same reason as above, '1' is an invalid $each element.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2},1], $slice:-2, $sort:{a:1} } } }))
+
+// The sort key pattern cannot be empty.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort:{} } } } ) )
+
+// For now, we do not support positive $slice's (ie, trimming from the array's front).
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:2, $sort: {a:1} } } }))
+
+// A $slice cannot be a fractional value.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2.1, $sort: {a:1} } }}))
+
+// The sort key pattern's value must be either 1 or -1. In here, {a:-2} is an invalid value.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {a:-2} } } }))
+
+// For now, we are not supporting sorting of basic elements (non-object, non-arrays). In here,
+// the $sort clause would need to have a key pattern value rather than 1.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: 1 } } } ) )
+
+// The key pattern 'a.' is an invalid value for $sort.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {'a.':1} }}}))
+
+// An empty key pattern is not a valid $sort value.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {'':1} } } }))
+
+// If a $slice is used, the only other $sort clause that's accepted is $sort. In here, $xxx
+// is not a valid clause.
+assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $xxx: {s:1} } } } ) )
+
+t.remove({})
+
+// Ensure that existing values are validated in the array as objects during a $sort with $each,
+// not only the elements in the $each array.
+t.save({ _id: 100, x: [ 1, "foo" ] } );
+assert.throws(t.update(
+ {_id: 100},
+ { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {a:1} } } } ) )
diff --git a/jstests/pushall.js b/jstests/pushall.js
new file mode 100644
index 00000000000..eda68203ed3
--- /dev/null
+++ b/jstests/pushall.js
@@ -0,0 +1,20 @@
+t = db.jstests_pushall;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pushAll: { a: [ 4 ] } } );
+assert.eq( [ 1, 2, 3, 4 ], t.findOne().a );
+t.update( {}, { $pushAll: { a: [ 4 ] } } );
+assert.eq( [ 1, 2, 3, 4, 4 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pushAll: { a: [ 4, 5 ] } } );
+assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
+t.update( {}, { $pushAll: { a: [] } } );
+assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
+
+t.drop();
+t.save( {} );
+t.update( {}, { $pushAll: { a: [ 1, 2 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
diff --git a/jstests/query1.js b/jstests/query1.js
new file mode 100644
index 00000000000..8fa402cda65
--- /dev/null
+++ b/jstests/query1.js
@@ -0,0 +1,26 @@
+
+t = db.query1;
+t.drop();
+
+t.save( { num : 1 } );
+t.save( { num : 3 } )
+t.save( { num : 4 } );
+
+num = 0;
+total = 0;
+
+t.find().forEach(
+ function(z){
+ num++;
+ total += z.num;
+ }
+);
+
+assert.eq( num , 3 , "num" )
+assert.eq( total , 8 , "total" )
+
+assert.eq( 3 , t.find()._addSpecial( "$comment" , "this is a test" ).itcount() , "B1" )
+assert.eq( 3 , t.find()._addSpecial( "$comment" , "this is a test" ).count() , "B2" )
+
+assert.eq( 3 , t.find( { "$comment" : "yo ho ho" } ).itcount() , "C1" )
+assert.eq( 3 , t.find( { "$comment" : "this is a test" } ).count() , "C2" )
diff --git a/jstests/queryoptimizer3.js b/jstests/queryoptimizer3.js
new file mode 100644
index 00000000000..a90c7985839
--- /dev/null
+++ b/jstests/queryoptimizer3.js
@@ -0,0 +1,33 @@
+// Check cases where index scans are aborted due to the collection being dropped. SERVER-4400
+
+t = db.jstests_queryoptimizer3;
+t.drop();
+
+p = startParallelShell( 'for( i = 0; i < 400; ++i ) { sleep( 50 ); db.jstests_queryoptimizer3.drop(); }' );
+
+for( i = 0; i < 100; ++i ) {
+ t.drop();
+ t.ensureIndex({a:1});
+ t.ensureIndex({b:1});
+ for( j = 0; j < 100; ++j ) {
+ t.save({a:j,b:j});
+ }
+ m = i % 5;
+ if ( m == 0 ) {
+ t.count({a:{$gte:0},b:{$gte:0}});
+ }
+ else if ( m == 1 ) {
+ t.find({a:{$gte:0},b:{$gte:0}}).itcount();
+ }
+ else if ( m == 2 ) {
+ t.remove({a:{$gte:0},b:{$gte:0}});
+ }
+ else if ( m == 3 ) {
+ t.update({a:{$gte:0},b:{$gte:0}},{});
+ }
+ else if ( m == 4 ) {
+ t.distinct('x',{a:{$gte:0},b:{$gte:0}});
+ }
+}
+
+p();
diff --git a/jstests/queryoptimizer6.js b/jstests/queryoptimizer6.js
new file mode 100644
index 00000000000..32efccbdb0b
--- /dev/null
+++ b/jstests/queryoptimizer6.js
@@ -0,0 +1,16 @@
+// Test that $ne constraints are accounted for in QueryPattern. SERVER-4665
+
+t = db.jstests_queryoptimizer6;
+t.drop();
+
+t.save( {a:1} );
+
+// There is a bug in the 2.4.x indexing where the first query below returns 0 results with this
+// index, but 1 result without it.
+//
+// t.ensureIndex( {b:1}, {sparse:true} );
+
+// The sparse index will be used, and recorded for this query pattern.
+assert.eq( 1, t.find( {a:1,b:{$ne:1}} ).itcount() );
+// The query pattern should be different, and the sparse index should not be used.
+assert.eq( 1, t.find( {a:1} ).itcount() );
diff --git a/jstests/queryoptimizera.js b/jstests/queryoptimizera.js
new file mode 100644
index 00000000000..f26c2b0978c
--- /dev/null
+++ b/jstests/queryoptimizera.js
@@ -0,0 +1,92 @@
+// Check that a warning message about doing a capped collection scan for a query with an _id
+// constraint is printed at appropriate times. SERVER-5353
+
+function numWarnings() {
+ logs = db.adminCommand( { getLog:"global" } ).log
+ ret = 0;
+ logs.forEach( function( x ) {
+ if ( x.match( warningMatchRegexp ) ) {
+ ++ret;
+ }
+ } );
+ return ret;
+}
+
+collectionNameIndex = 0;
+
+// Generate a collection name not already present in the log.
+do {
+ testCollectionName = 'jstests_queryoptimizera__' + collectionNameIndex++;
+ warningMatchString = 'unindexed _id query on capped collection.*collection: test.' +
+ testCollectionName;
+ warningMatchRegexp = new RegExp( warningMatchString );
+
+} while( numWarnings() > 0 );
+
+t = db[ testCollectionName ];
+t.drop();
+
+notCappedCollectionName = testCollectionName + '_notCapped';
+
+notCapped = db[ notCappedCollectionName ];
+notCapped.drop();
+
+db.createCollection( testCollectionName, { capped:true, size:1000 } );
+db.createCollection( notCappedCollectionName, { autoIndexId:false } );
+
+t.insert( {} );
+notCapped.insert( {} );
+
+oldNumWarnings = 0;
+
+function assertNoNewWarnings() {
+ assert.eq( oldNumWarnings, numWarnings() );
+}
+
+function assertNewWarning() {
+ newNumWarnings = numWarnings();
+ // Ensure that newNumWarnings > oldNumWarnings. It's not safe to test that oldNumWarnings + 1
+ // == newNumWarnings, because a (simulated) page fault exception may cause multiple messages to
+ // be logged instead of only one.
+ assert.lt( oldNumWarnings, newNumWarnings );
+ oldNumWarnings = newNumWarnings;
+}
+
+// Simple _id query
+t.find( { _id:0 } ).itcount();
+assertNoNewWarnings();
+
+// Simple _id query without an _id index, on a non capped collection.
+notCapped.find( { _id:0 } ).itcount();
+assertNoNewWarnings();
+
+// A multi field query, including _id.
+t.find( { _id:0, a:0 } ).itcount();
+assertNoNewWarnings();
+
+// An unsatisfiable query.
+t.find( { _id:0, a:{$in:[]} } ).itcount();
+assertNoNewWarnings();
+
+// An hinted query.
+t.find( { _id:0 } ).hint( { $natural:1 } ).itcount();
+assertNoNewWarnings();
+
+// Retry a multi field query.
+t.find( { _id:0, a:0 } ).itcount();
+assertNoNewWarnings();
+
+// Warnings should not be printed when an index is added on _id.
+t.ensureIndex( { _id:1 } );
+
+t.find( { _id:0 } ).itcount();
+assertNoNewWarnings();
+
+t.find( { _id:0, a:0 } ).itcount();
+assertNoNewWarnings();
+
+t.find( { _id:0, a:0 } ).itcount();
+assertNoNewWarnings();
+
+t.drop(); // cleanup
+notCapped.drop(); \ No newline at end of file
diff --git a/jstests/ref.js b/jstests/ref.js
new file mode 100644
index 00000000000..20fd6ca94f0
--- /dev/null
+++ b/jstests/ref.js
@@ -0,0 +1,19 @@
+// to run:
+// ./mongo jstests/ref.js
+
+db.otherthings.drop();
+db.things.drop();
+
+var other = { s : "other thing", n : 1};
+db.otherthings.save(other);
+
+db.things.save( { name : "abc" } );
+x = db.things.findOne();
+x.o = new DBPointer( "otherthings" , other._id );
+db.things.save(x);
+
+assert( db.things.findOne().o.fetch().n == 1, "dbref broken 2" );
+
+other.n++;
+db.otherthings.save(other);
+assert( db.things.findOne().o.fetch().n == 2, "dbrefs broken" );
diff --git a/jstests/ref2.js b/jstests/ref2.js
new file mode 100644
index 00000000000..29640cd5da0
--- /dev/null
+++ b/jstests/ref2.js
@@ -0,0 +1,14 @@
+
+t = db.ref2;
+t.drop();
+
+a = { $ref : "foo" , $id : 1 };
+b = { $ref : "foo" , $id : 2 };
+
+
+t.save( { name : "a" , r : a } );
+t.save( { name : "b" , r : b } );
+
+assert.eq( 2 , t.find().count() , "A" );
+assert.eq( 1 , t.find( { r : a } ).count() , "B" );
+assert.eq( 1 , t.find( { r : b } ).count() , "C" );
diff --git a/jstests/ref3.js b/jstests/ref3.js
new file mode 100644
index 00000000000..14037ee4cc8
--- /dev/null
+++ b/jstests/ref3.js
@@ -0,0 +1,19 @@
+// to run:
+// ./mongo jstests/ref3.js
+
+db.otherthings3.drop();
+db.things3.drop();
+
+var other = { s : "other thing", n : 1};
+db.otherthings3.save(other);
+
+db.things3.save( { name : "abc" } );
+x = db.things3.findOne();
+x.o = new DBRef( "otherthings3" , other._id );
+db.things3.save(x);
+
+assert( db.things3.findOne().o.fetch().n == 1, "dbref broken 2" );
+
+other.n++;
+db.otherthings3.save(other);
+assert( db.things3.findOne().o.fetch().n == 2, "dbrefs broken" );
diff --git a/jstests/ref4.js b/jstests/ref4.js
new file mode 100644
index 00000000000..1c105ef2795
--- /dev/null
+++ b/jstests/ref4.js
@@ -0,0 +1,20 @@
+
+a = db.ref4a;
+b = db.ref4b;
+
+a.drop();
+b.drop();
+
+var other = { s : "other thing", n : 17 };
+b.save(other);
+
+a.save( { name : "abc" , others : [ new DBRef( "ref4b" , other._id ) , new DBPointer( "ref4b" , other._id ) ] } );
+assert( a.findOne().others[0].fetch().n == 17 , "dbref broken 1" );
+
+x = Array.fetchRefs( a.findOne().others );
+assert.eq( 2 , x.length , "A" );
+assert.eq( 17 , x[0].n , "B" );
+assert.eq( 17 , x[1].n , "C" );
+
+
+assert.eq( 0 , Array.fetchRefs( a.findOne().others , "z" ).length , "D" );
diff --git a/jstests/regex.js b/jstests/regex.js
new file mode 100644
index 00000000000..f431d506ea6
--- /dev/null
+++ b/jstests/regex.js
@@ -0,0 +1,24 @@
+t = db.jstests_regex;
+
+t.drop();
+t.save( { a: "bcd" } );
+assert.eq( 1, t.count( { a: /b/ } ) , "A" );
+assert.eq( 1, t.count( { a: /bc/ } ) , "B" );
+assert.eq( 1, t.count( { a: /bcd/ } ) , "C" );
+assert.eq( 0, t.count( { a: /bcde/ } ) , "D" );
+
+t.drop();
+t.save( { a: { b: "cde" } } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "E" );
+
+t.drop();
+t.save( { a: { b: [ "cde" ] } } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "F" );
+
+t.drop();
+t.save( { a: [ { b: "cde" } ] } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "G" );
+
+t.drop();
+t.save( { a: [ { b: [ "cde" ] } ] } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "H" );
diff --git a/jstests/regex2.js b/jstests/regex2.js
new file mode 100644
index 00000000000..87d5cb47c05
--- /dev/null
+++ b/jstests/regex2.js
@@ -0,0 +1,70 @@
+
+t = db.regex2;
+t.drop();
+
+t.save( { a : "test" } );
+t.save( { a : "Test" } );
+
+assert.eq( 2 , t.find().count() , "A" );
+assert.eq( 1 , t.find( { a : "Test" } ).count() , "B" );
+assert.eq( 1 , t.find( { a : "test" } ).count() , "C" );
+assert.eq( 1 , t.find( { a : /Test/ } ).count() , "D" );
+assert.eq( 1 , t.find( { a : /test/ } ).count() , "E" );
+assert.eq( 2 , t.find( { a : /test/i } ).count() , "F" );
+
+
+t.drop();
+
+a = "\u0442\u0435\u0441\u0442";
+b = "\u0422\u0435\u0441\u0442";
+
+assert( ( new RegExp( a ) ).test( a ) , "B 1" );
+assert( ! ( new RegExp( a ) ).test( b ) , "B 2" );
+assert( ( new RegExp( a , "i" ) ).test( b ) , "B 3 " );
+
+t.save( { a : a } );
+t.save( { a : b } );
+
+
+assert.eq( 2 , t.find().count() , "C A" );
+assert.eq( 1 , t.find( { a : a } ).count() , "C B" );
+assert.eq( 1 , t.find( { a : b } ).count() , "C C" );
+assert.eq( 1 , t.find( { a : new RegExp( a ) } ).count() , "C D" );
+assert.eq( 1 , t.find( { a : new RegExp( b ) } ).count() , "C E" );
+assert.eq( 2 , t.find( { a : new RegExp( a , "i" ) } ).count() , "C F is spidermonkey built with UTF-8 support?" );
+
+
+// same tests as above but using {$regex: "a|b", $options: "imx"} syntax.
+t.drop();
+
+t.save( { a : "test" } );
+t.save( { a : "Test" } );
+
+assert.eq( 2 , t.find().count() , "obj A" );
+assert.eq( 1 , t.find( { a : {$regex:"Test"} } ).count() , "obj D" );
+assert.eq( 1 , t.find( { a : {$regex:"test"} } ).count() , "obj E" );
+assert.eq( 2 , t.find( { a : {$regex:"test", $options:"i"} } ).count() , "obj F" );
+assert.eq( 2 , t.find( { a : {$options:"i", $regex:"test"} } ).count() , "obj F rev" ); // both orders should work
+
+
+t.drop();
+
+a = "\u0442\u0435\u0441\u0442";
+b = "\u0422\u0435\u0441\u0442";
+
+t.save( { a : a } );
+t.save( { a : b } );
+
+
+assert.eq( 1 , t.find( { a : {$regex: a} } ).count() , "obj C D" );
+assert.eq( 1 , t.find( { a : {$regex: b} } ).count() , "obj C E" );
+assert.eq( 2 , t.find( { a : {$regex: a , $options: "i" } } ).count() , "obj C F is spidermonkey built with UTF-8 support?" );
+
+// Test s (DOT_ALL) option. Not supported with /regex/opts syntax
+t.drop();
+t.save({a:'1 2'})
+t.save({a:'1\n2'})
+assert.eq( 1 , t.find( { a : {$regex: '1.*2'} } ).count() );
+assert.eq( 2 , t.find( { a : {$regex: '1.*2', $options: 's'} } ).count() );
+
+
diff --git a/jstests/regex3.js b/jstests/regex3.js
new file mode 100644
index 00000000000..5ac8fab4c40
--- /dev/null
+++ b/jstests/regex3.js
@@ -0,0 +1,36 @@
+
+t = db.regex3;
+t.drop();
+
+t.save( { name : "eliot" } );
+t.save( { name : "emily" } );
+t.save( { name : "bob" } );
+t.save( { name : "aaron" } );
+
+assert.eq( 2 , t.find( { name : /^e.*/ } ).itcount() , "no index count" );
+assert.eq( 4 , t.find( { name : /^e.*/ } ).explain().nscanned , "no index explain" );
+t.ensureIndex( { name : 1 } );
+assert.eq( 2 , t.find( { name : /^e.*/ } ).itcount() , "index count" );
+assert.eq( 2 , t.find( { name : /^e.*/ } ).explain().nscanned , "index explain" ); // SERVER-239
+
+t.drop();
+
+t.save( { name : "aa" } );
+t.save( { name : "ab" } );
+t.save( { name : "ac" } );
+t.save( { name : "c" } );
+
+assert.eq( 3 , t.find( { name : /^aa*/ } ).itcount() , "B ni" );
+t.ensureIndex( { name : 1 } );
+assert.eq( 3 , t.find( { name : /^aa*/ } ).itcount() , "B i 1" );
+assert.eq( 4 , t.find( { name : /^aa*/ } ).explain().nscanned , "B i 1 e" );
+
+assert.eq( 2 , t.find( { name : /^a[ab]/ } ).itcount() , "B i 2" );
+assert.eq( 2 , t.find( { name : /^a[bc]/ } ).itcount() , "B i 3" );
+
+t.drop();
+
+t.save( { name: "" } );
+assert.eq( 1, t.find( { name: /^a?/ } ).itcount() , "C 1" );
+t.ensureIndex( { name: 1 } );
+assert.eq( 1, t.find( { name: /^a?/ } ).itcount(), "C 2");
diff --git a/jstests/regex4.js b/jstests/regex4.js
new file mode 100644
index 00000000000..fc26d691c91
--- /dev/null
+++ b/jstests/regex4.js
@@ -0,0 +1,18 @@
+
+t = db.regex4;
+t.drop();
+
+t.save( { name : "eliot" } );
+t.save( { name : "emily" } );
+t.save( { name : "bob" } );
+t.save( { name : "aaron" } );
+
+assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "no index count" );
+assert.eq( 4 , t.find( { name : /^e.*/ } ).explain().nscanned , "no index explain" );
+//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "no index count ne" ); // SERVER-251
+
+t.ensureIndex( { name : 1 } );
+
+assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "index count" );
+assert.eq( 2 , t.find( { name : /^e.*/ } ).explain().nscanned , "index explain" ); // SERVER-239
+//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "index count ne" ); // SERVER-251
diff --git a/jstests/regex5.js b/jstests/regex5.js
new file mode 100644
index 00000000000..9f2549d7146
--- /dev/null
+++ b/jstests/regex5.js
@@ -0,0 +1,53 @@
+
+t = db.regex5
+t.drop()
+
+// Add filler data to make sure that indexed solutions are
+// chosen over collection scans.
+for (var i = 0; i < 10; i++) {
+ t.save({filler: "filler"});
+}
+
+t.save( { x : [ "abc" , "xyz1" ] } )
+t.save( { x : [ "ac" , "xyz2" ] } )
+
+a = /.*b.*c/
+x = /.*y.*/
+
+doit = function() {
+
+ assert.eq( 1 , t.find( { x : a } ).count() , "A" );
+ assert.eq( 2 , t.find( { x : x } ).count() , "B" );
+ assert.eq( 2 , t.find( { x : { $in: [ x ] } } ).count() , "C" ); // SERVER-322
+ assert.eq( 1 , t.find( { x : { $in: [ a, "xyz1" ] } } ).count() , "D" ); // SERVER-322
+ assert.eq( 2 , t.find( { x : { $in: [ a, "xyz2" ] } } ).count() , "E" ); // SERVER-322
+ assert.eq( 1 , t.find( { x : { $all : [ a , x ] } } ).count() , "F" ); // SERVER-505
+ assert.eq( 1 , t.find( { x : { $all : [ a , "abc" ] } } ).count() , "G" ); // SERVER-505
+ assert.eq( 0 , t.find( { x : { $all : [ a , "ac" ] } } ).count() , "H" ); // SERVER-505
+ assert.eq( 10 , t.find( { x : { $nin: [ x ] } } ).count() , "I" ); // SERVER-322
+ assert.eq( 11 , t.find( { x : { $nin: [ a, "xyz1" ] } } ).count() , "J" ); // SERVER-322
+ assert.eq( 10 , t.find( { x : { $nin: [ a, "xyz2" ] } } ).count() , "K" ); // SERVER-322
+ assert.eq( 2 , t.find( { x : { $not: { $nin: [ x ] } } } ).count() , "L" ); // SERVER-322
+ assert.eq( 11 , t.find( { x : { $nin: [ /^a.c/ ] } } ).count() , "M" ) // SERVER-322
+}
+
+doit();
+t.ensureIndex( {x:1} );
+print( "now indexed" );
+doit();
+
+// check bound unions SERVER-322
+assert.eq( {
+ x:[[1,1],
+ [2.5,2.5],
+ ["a","a"],
+ ["b","e"],
+ [/^b/,/^b/],
+ [/^c/,/^c/],
+ [/^d/,/^d/]]
+ },
+ t.find( { x : { $in: [ 1, 2.5, "a", "b", /^b/, /^c/, /^d/ ] } } ).explain().indexBounds );
+
+// SERVER-505
+assert.eq( 0, t.find( { x : { $all: [ "a", /^a/ ] } } ).itcount());
+assert.eq( 2, t.find( { x : { $all: [ /^a/ ] } } ).itcount());
diff --git a/jstests/regex6.js b/jstests/regex6.js
new file mode 100644
index 00000000000..54143248398
--- /dev/null
+++ b/jstests/regex6.js
@@ -0,0 +1,29 @@
+// contributed by Andrew Kempe
+t = db.regex6;
+t.drop();
+
+t.save( { name : "eliot" } );
+t.save( { name : "emily" } );
+t.save( { name : "bob" } );
+t.save( { name : "aaron" } );
+t.save( { name : "[with]some?symbols" } );
+
+t.ensureIndex( { name : 1 } );
+
+assert.eq( 0 , t.find( { name : /^\// } ).count() , "index count" );
+assert.eq( 1 , t.find( { name : /^\// } ).explain().nscanned , "index explain 1" );
+assert.eq( 0 , t.find( { name : /^é/ } ).explain().nscanned , "index explain 2" );
+assert.eq( 0 , t.find( { name : /^\é/ } ).explain().nscanned , "index explain 3" );
+assert.eq( 1 , t.find( { name : /^\./ } ).explain().nscanned , "index explain 4" );
+assert.eq( 5 , t.find( { name : /^./ } ).explain().nscanned , "index explain 5" );
+
+// SERVER-2862
+assert.eq( 0 , t.find( { name : /^\Qblah\E/ } ).count() , "index explain 6" );
+assert.eq( 1 , t.find( { name : /^\Qblah\E/ } ).explain().nscanned , "index explain 6" );
+assert.eq( 1 , t.find( { name : /^blah/ } ).explain().nscanned , "index explain 6" );
+assert.eq( 1 , t.find( { name : /^\Q[\Ewi\Qth]some?s\Eym/ } ).count() , "index explain 6" );
+assert.eq( 2 , t.find( { name : /^\Q[\Ewi\Qth]some?s\Eym/ } ).explain().nscanned , "index explain 6" );
+assert.eq( 2 , t.find( { name : /^bob/ } ).explain().nscanned , "index explain 6" ); // proof nscanned == count+1
+
+assert.eq( 1, t.find( { name : { $regex : "^e", $gte: "emily" } } ).explain().nscanned , "ie7" );
+assert.eq( 1, t.find( { name : { $gt : "a", $regex: "^emily" } } ).explain().nscanned , "ie7" );
diff --git a/jstests/regex7.js b/jstests/regex7.js
new file mode 100644
index 00000000000..ab4f6089f9b
--- /dev/null
+++ b/jstests/regex7.js
@@ -0,0 +1,26 @@
+t = db.regex_matches_self;
+t.drop();
+
+t.insert({r:/^a/});
+t.insert({r:/^a/i});
+t.insert({r:/^b/});
+
+// no index
+assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 a')
+assert.eq( 1, t.count({r:/^a/}), '1 2')
+assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 a')
+assert.eq( 1, t.count({r:/^a/i}), '2 2 a')
+assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 a')
+assert.eq( 1, t.count({r:/^b/}), '3 2 a')
+
+// with index
+t.ensureIndex({r:1})
+assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 b')
+assert.eq( 1, t.count({r:/^a/}), '1 2 b')
+assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 b')
+assert.eq( 1, t.count({r:/^a/i}), '2 2 b')
+assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 b')
+assert.eq( 1, t.count({r:/^b/}), '3 2 b')
+
+t.insert( {r:"a"} );
+assert.eq( 2, t.count({r:/^a/}), 'c' ); \ No newline at end of file
diff --git a/jstests/regex8.js b/jstests/regex8.js
new file mode 100644
index 00000000000..33dd74fb812
--- /dev/null
+++ b/jstests/regex8.js
@@ -0,0 +1,19 @@
+
+t = db.regex8;
+t.drop()
+
+t.insert( { _id : 1 , a : "abc" } )
+t.insert( { _ud : 2 , a : "abc" } )
+t.insert( { _id : 3 , a : "bdc" } )
+
+function test( msg ){
+ assert.eq( 3 , t.find().itcount() , msg + "1" )
+ assert.eq( 2 , t.find( { a : /a.*/ } ).itcount() , msg + "2" )
+ assert.eq( 3 , t.find( { a : /[ab].*/ } ).itcount() , msg + "3" )
+ assert.eq( 3 , t.find( { a : /[a|b].*/ } ).itcount() , msg + "4" )
+}
+
+test( "A" );
+
+t.ensureIndex( { a : 1 } )
+test( "B" )
diff --git a/jstests/regex9.js b/jstests/regex9.js
new file mode 100644
index 00000000000..896855c6dfb
--- /dev/null
+++ b/jstests/regex9.js
@@ -0,0 +1,11 @@
+
+t = db.regex9;
+t.drop();
+
+t.insert( { _id : 1 , a : [ "a" , "b" , "c" ] } )
+t.insert( { _id : 2 , a : [ "a" , "b" , "c" , "d" ] } )
+t.insert( { _id : 3 , a : [ "b" , "c" , "d" ] } )
+
+assert.eq( 2 , t.find( { a : /a/ } ).itcount() , "A1" )
+assert.eq( 2 , t.find( { a : { $regex : "a" } } ).itcount() , "A2" )
+assert.eq( 2 , t.find( { a : { $regex : /a/ } } ).itcount() , "A3" )
diff --git a/jstests/regex_embed1.js b/jstests/regex_embed1.js
new file mode 100644
index 00000000000..61b1b9a14f6
--- /dev/null
+++ b/jstests/regex_embed1.js
@@ -0,0 +1,25 @@
+
+t = db.regex_embed1
+
+t.drop()
+
+t.insert( { _id : 1 , a : [ { x : "abc" } , { x : "def" } ] } )
+t.insert( { _id : 2 , a : [ { x : "ab" } , { x : "de" } ] } )
+t.insert( { _id : 3 , a : [ { x : "ab" } , { x : "de" } , { x : "abc" } ] } )
+
+function test( m ){
+ assert.eq( 3 , t.find().itcount() , m + "1" );
+ assert.eq( 2 , t.find( { "a.x" : "abc" } ).itcount() , m + "2" );
+ assert.eq( 2 , t.find( { "a.x" : /.*abc.*/ } ).itcount() , m + "3" );
+
+ assert.eq( 1 , t.find( { "a.0.x" : "abc" } ).itcount() , m + "4" );
+ assert.eq( 1 , t.find( { "a.0.x" : /abc/ } ).itcount() , m + "5" );
+}
+
+test( "A" );
+
+t.ensureIndex( { "a.x" : 1 } )
+test( "B" );
+
+
+
diff --git a/jstests/regex_limit.js b/jstests/regex_limit.js
new file mode 100644
index 00000000000..e05dae8ab8b
--- /dev/null
+++ b/jstests/regex_limit.js
@@ -0,0 +1,22 @@
+var t = db.regex_limit;
+t.drop();
+
+var repeatStr = function(str, n){
+ return new Array(n + 1).join(str);
+};
+
+t.insert({ z: repeatStr('c', 100000) });
+
+var maxOkStrLen = repeatStr('c', 32764);
+var strTooLong = maxOkStrLen + 'c';
+
+assert(t.findOne({ z: { $regex: maxOkStrLen }}) != null);
+assert.throws(function() {
+ t.findOne({ z: { $regex: strTooLong }});
+});
+
+assert(t.findOne({ z: { $in: [ new RegExp(maxOkStrLen) ]}}) != null);
+assert.throws(function() {
+ t.findOne({ z: { $in: [ new RegExp(strTooLong) ]}});
+});
+
diff --git a/jstests/regex_options.js b/jstests/regex_options.js
new file mode 100644
index 00000000000..3febe2575ab
--- /dev/null
+++ b/jstests/regex_options.js
@@ -0,0 +1,7 @@
+t = db.jstests_regex_options;
+
+t.drop();
+t.save( { a: "foo" } );
+assert.eq( 1, t.count( { a: { "$regex": /O/i } } ) );
+assert.eq( 1, t.count( { a: /O/i } ) );
+assert.eq( 1, t.count( { a: { "$regex": "O", "$options": "i" } } ) );
diff --git a/jstests/regex_util.js b/jstests/regex_util.js
new file mode 100644
index 00000000000..86ba8036516
--- /dev/null
+++ b/jstests/regex_util.js
@@ -0,0 +1,27 @@
+// Tests for RegExp.escape
+
+(function() {
+ var TEST_STRINGS = [
+ "[db]",
+ "{ab}",
+ "<c2>",
+ "(abc)",
+ "^first^",
+ "&addr",
+ "k@10gen.com",
+ "#4",
+ "!b",
+ "<>3",
+ "****word+",
+ "\t| |\n\r",
+ "Mongo-db",
+ "[{(<>)}]!@#%^&*+\\"
+ ];
+
+ TEST_STRINGS.forEach(function (str) {
+ var escaped = RegExp.escape(str);
+ var regex = new RegExp(escaped);
+ assert(regex.test(str), "Wrong escape for " + str);
+ });
+})();
+
diff --git a/jstests/regexa.js b/jstests/regexa.js
new file mode 100644
index 00000000000..b0d47190e77
--- /dev/null
+++ b/jstests/regexa.js
@@ -0,0 +1,19 @@
+// Test simple regex optimization with a regex | (bar) present - SERVER-3298
+
+t = db.jstests_regexa;
+t.drop();
+
+function check() {
+ assert.eq( 1, t.count( {a:/^(z|.)/} ) );
+ assert.eq( 1, t.count( {a:/^z|./} ) );
+ assert.eq( 0, t.count( {a:/^z(z|.)/} ) );
+ assert.eq( 1, t.count( {a:/^zz|./} ) );
+}
+
+t.save( {a:'a'} );
+
+check();
+t.ensureIndex( {a:1} );
+if ( 1 ) { // SERVER-3298
+check();
+}
diff --git a/jstests/regexb.js b/jstests/regexb.js
new file mode 100644
index 00000000000..169841239c8
--- /dev/null
+++ b/jstests/regexb.js
@@ -0,0 +1,14 @@
+// Test more than four regex expressions in a query -- SERVER-969
+
+t = db.jstests_regexb;
+t.drop();
+
+t.save( {a:'a',b:'b',c:'c',d:'d',e:'e'} );
+
+assert.eq( 1, t.count( {a:/a/,b:/b/,c:/c/,d:/d/,e:/e/} ) );
+assert.eq( 0, t.count( {a:/a/,b:/b/,c:/c/,d:/d/,e:/barf/} ) );
+
+
+
+
+
diff --git a/jstests/regexc.js b/jstests/regexc.js
new file mode 100644
index 00000000000..f7690c96496
--- /dev/null
+++ b/jstests/regexc.js
@@ -0,0 +1,28 @@
+// Multiple regular expressions using the same index
+
+var t = db.jstests_regexc;
+
+// $and using same index twice
+t.drop();
+t.ensureIndex({a: 1});
+t.save({a: "0"});
+t.save({a: "1"});
+t.save({a: "10"});
+assert.eq( 1, t.find({$and: [{a: /0/}, {a: /1/}]}).itcount() );
+
+// implicit $and using compound index twice
+t.drop();
+t.ensureIndex({a: 1, b: 1});
+t.save({a: "0", b: "1"});
+t.save({a: "10", b: "10"});
+t.save({a: "10", b: "2"});
+assert.eq( 2, t.find({a: /0/, b: /1/}).itcount() );
+
+// $or using same index twice
+t.drop();
+t.ensureIndex({a: 1});
+t.save({a: "0"});
+t.save({a: "1"});
+t.save({a: "2"});
+t.save({a: "10"});
+assert.eq( 3, t.find({$or: [{a: /0/}, {a: /1/}]}).itcount() );
diff --git a/jstests/remove.js b/jstests/remove.js
new file mode 100644
index 00000000000..6800a41fedc
--- /dev/null
+++ b/jstests/remove.js
@@ -0,0 +1,27 @@
+// remove.js
+// unit test for db remove
+
+t = db.removetest;
+
+function f(n,dir) {
+ t.ensureIndex({x:dir||1});
+ for( i = 0; i < n; i++ ) t.save( { x:3, z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ assert.eq( n , t.find().count() );
+ t.remove({x:3});
+
+ assert.eq( 0 , t.find().count() );
+
+ assert( t.findOne() == null , "A:" + tojson( t.findOne() ) );
+ assert( t.validate().valid , "B" );
+}
+
+t.drop();
+f(300, 1);
+
+f(500, -1);
+
+assert(t.validate().valid , "C" );
+
+// no query for remove() throws starting in 2.6
+assert.throws(function() { db.t.remove() });
diff --git a/jstests/remove2.js b/jstests/remove2.js
new file mode 100644
index 00000000000..2b222d7ecac
--- /dev/null
+++ b/jstests/remove2.js
@@ -0,0 +1,46 @@
+// remove2.js
+// a unit test for db remove
+
+t = db.removetest2;
+
+function f() {
+ t.save( { x:[3,3,3,3,3,3,3,3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+ t.save( { x: 9 } );
+ t.save( { x: 1 } );
+
+ t.remove({x:3});
+
+ assert( t.findOne({x:3}) == null );
+ assert( t.validate().valid );
+}
+
+x = 0;
+
+function g() {
+ t.save( { x:[3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+ t.save( { x:[7,8,9], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ t.remove( {x : {$gte:3}, $atomic:x++ } );
+
+ assert( !db.getLastError() );
+ // $atomic within $and is not allowed.
+ //t.remove( {x : {$gte:3}, $and:[{$atomic:true}] } );
+ //assert( db.getLastError() );
+
+ assert( t.findOne({x:3}) == null );
+ assert( t.findOne({x:8}) == null );
+ assert( t.validate().valid );
+}
+
+t.drop();
+f();
+t.drop();
+g();
+
+t.ensureIndex({x:1});
+t.remove({});
+f();
+t.drop();
+t.ensureIndex({x:1});
+g();
+
diff --git a/jstests/remove3.js b/jstests/remove3.js
new file mode 100644
index 00000000000..2a51a6e0fd4
--- /dev/null
+++ b/jstests/remove3.js
@@ -0,0 +1,18 @@
+
+t = db.remove3;
+t.drop();
+
+for ( i=1; i<=8; i++){
+ t.save( { _id : i , x : i } );
+}
+
+assert.eq( 8 , t.count() , "A" );
+
+t.remove( { x : { $lt : 5 } } );
+assert.eq( 4 , t.count() , "B" );
+
+t.remove( { _id : 5 } );
+assert.eq( 3 , t.count() , "C" );
+
+t.remove( { _id : { $lt : 8 } } );
+assert.eq( 1 , t.count() , "D" );
diff --git a/jstests/remove4.js b/jstests/remove4.js
new file mode 100644
index 00000000000..bd007ed4d27
--- /dev/null
+++ b/jstests/remove4.js
@@ -0,0 +1,10 @@
+t = db.remove4;
+t.drop();
+
+t.save ( { a : 1 , b : 1 } );
+t.save ( { a : 2 , b : 1 } );
+t.save ( { a : 3 , b : 1 } );
+
+assert.eq( 3 , t.find().length() );
+t.remove( { b : 1 } );
+assert.eq( 0 , t.find().length() );
diff --git a/jstests/remove6.js b/jstests/remove6.js
new file mode 100644
index 00000000000..d843aeeec0f
--- /dev/null
+++ b/jstests/remove6.js
@@ -0,0 +1,38 @@
+
+t = db.remove6;
+t.drop();
+
+N = 1000;
+
+function pop(){
+ t.drop();
+ for ( var i=0; i<N; i++ ){
+ t.save( { x : 1 , tags : [ "a" , "b" , "c" ] } );
+ }
+}
+
+function del(){
+ t.remove( { tags : { $in : [ "a" , "c" ] } } );
+}
+
+function test( n , idx ){
+ pop();
+ assert.eq( N , t.count() , n + " A " + idx );
+ if ( idx )
+ t.ensureIndex( idx );
+ del();
+ var e = db.getLastError();
+ assert( e == null , "error deleting: " + e );
+ assert.eq( 0 , t.count() , n + " B " + idx );
+}
+
+test( "a" );
+test( "b" , { x : 1 } );
+test( "c" , { tags : 1 } );
+
+N = 5000
+
+test( "a2" );
+test( "b2" , { x : 1 } );
+test( "c2" , { tags : 1 } );
+
diff --git a/jstests/remove7.js b/jstests/remove7.js
new file mode 100644
index 00000000000..50c6ac189bc
--- /dev/null
+++ b/jstests/remove7.js
@@ -0,0 +1,35 @@
+
+t = db.remove7
+t.drop();
+
+
+
+function getTags( n ){
+ n = n || 5;
+ var a = [];
+ for ( var i=0; i<n; i++ ){
+ var v = Math.ceil( 20 * Math.random() );
+ a.push( v );
+ }
+
+ return a;
+}
+
+for ( i=0; i<1000; i++ ){
+ t.save( { tags : getTags() } );
+}
+
+t.ensureIndex( { tags : 1 } );
+
+for ( i=0; i<200; i++ ){
+ for ( var j=0; j<10; j++ )
+ t.save( { tags : getTags( 100 ) } );
+ var q = { tags : { $in : getTags( 10 ) } };
+ var before = t.find( q ).count();
+ t.remove( q );
+ var o = db.getLastErrorObj();
+ var after = t.find( q ).count();
+ assert.eq( 0 , after , "not zero after!" );
+ assert.isnull( o.err , "error: " + tojson( o ) );
+}
+
diff --git a/jstests/remove8.js b/jstests/remove8.js
new file mode 100644
index 00000000000..3ab53f3289a
--- /dev/null
+++ b/jstests/remove8.js
@@ -0,0 +1,21 @@
+
+t = db.remove8;
+t.drop();
+
+N = 1000;
+
+function fill(){
+ for ( var i=0; i<N; i++ ){
+ t.save( { x : i } );
+ }
+}
+
+fill();
+assert.eq( N , t.count() , "A" );
+t.remove( {} )
+assert.eq( 0 , t.count() , "B" );
+
+fill();
+assert.eq( N , t.count() , "C" );
+db.eval( function(){ db.remove8.remove( {} ); } )
+assert.eq( 0 , t.count() , "D" );
diff --git a/jstests/remove9.js b/jstests/remove9.js
new file mode 100644
index 00000000000..655594afe8b
--- /dev/null
+++ b/jstests/remove9.js
@@ -0,0 +1,16 @@
+// SERVER-2009 Count odd numbered entries while updating and deleting even numbered entries.
+
+t = db.jstests_remove9;
+t.drop();
+t.ensureIndex( {i:1} );
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+
+s = startParallelShell( 't = db.jstests_remove9; for( j = 0; j < 5000; ++j ) { i = Random.randInt( 499 ) * 2; t.update( {i:i}, {$set:{i:2000}} ); t.remove( {i:2000} ); t.save( {i:i} ); }' );
+
+for( i = 0; i < 1000; ++i ) {
+ assert.eq( 500, t.find( {i:{$gte:0,$mod:[2,1]}} ).hint( {i:1} ).itcount() );
+}
+
+s();
diff --git a/jstests/remove_justone.js b/jstests/remove_justone.js
new file mode 100644
index 00000000000..e412a13483c
--- /dev/null
+++ b/jstests/remove_justone.js
@@ -0,0 +1,16 @@
+
+t = db.remove_justone
+t.drop()
+
+t.insert( { x : 1 } )
+t.insert( { x : 1 } )
+t.insert( { x : 1 } )
+t.insert( { x : 1 } )
+
+assert.eq( 4 , t.count() )
+
+t.remove( { x : 1 } , true )
+assert.eq( 3 , t.count() )
+
+t.remove( { x : 1 } )
+assert.eq( 0 , t.count() )
diff --git a/jstests/remove_undefined.js b/jstests/remove_undefined.js
new file mode 100644
index 00000000000..d5344a3a562
--- /dev/null
+++ b/jstests/remove_undefined.js
@@ -0,0 +1,28 @@
+
+t = db.drop_undefined.js
+
+t.insert( { _id : 1 } )
+t.insert( { _id : 2 } )
+t.insert( { _id : null } )
+
+z = { foo : 1 , x : null }
+
+t.remove( { x : z.bar } )
+assert.eq( 3 , t.count() , "A1" )
+
+t.remove( { x : undefined } )
+assert.eq( 3 , t.count() , "A2" )
+
+assert.throws( function(){ t.remove( { _id : z.bar } ) } , null , "B1" )
+assert.throws( function(){ t.remove( { _id : undefined } ) } , null , "B2" )
+
+
+t.remove( { _id : z.x } )
+assert.eq( 2 , t.count() , "C1" )
+
+t.insert( { _id : null } )
+assert.eq( 3 , t.count() , "C2" )
+
+assert.throws( function(){ t.remove( { _id : undefined } ) } , null, "C3" )
+assert.eq( 3 , t.count() , "C4" )
+
diff --git a/jstests/removea.js b/jstests/removea.js
new file mode 100644
index 00000000000..703d8c4cf92
--- /dev/null
+++ b/jstests/removea.js
@@ -0,0 +1,23 @@
+// Test removal of a substantial proportion of inserted documents. SERVER-3803
+// A complete test will only be performed against a DEBUG build.
+
+t = db.jstests_removea;
+
+Random.setRandomSeed();
+
+for( v = 0; v < 2; ++v ) { // Try each index version.
+ t.drop();
+ t.ensureIndex( { a:1 }, { v:v } );
+ for( i = 0; i < 10000; ++i ) {
+ t.save( { a:i } );
+ }
+
+ toDrop = [];
+ for( i = 0; i < 10000; ++i ) {
+ toDrop.push( Random.randInt( 10000 ) ); // Dups in the query will be ignored.
+ }
+ // Remove many of the documents; $atomic prevents use of a ClientCursor, which would invoke a
+ // different bucket deallocation procedure than the one to be tested (see SERVER-4575).
+ t.remove( { a:{ $in:toDrop }, $atomic:true } );
+ assert( !db.getLastError() );
+}
diff --git a/jstests/removeb.js b/jstests/removeb.js
new file mode 100644
index 00000000000..b6634140081
--- /dev/null
+++ b/jstests/removeb.js
@@ -0,0 +1,39 @@
+// Test removal of Records that have been reused since the remove operation began. SERVER-5198
+
+t = db.jstests_removeb;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+
+// Make the index multikey to trigger cursor dedup checking.
+t.insert( { a:[ -1, -2 ] } );
+t.remove({});
+
+// Insert some data.
+for( i = 0; i < 20000; ++i ) {
+ t.insert( { a:i } );
+}
+db.getLastError();
+
+p = startParallelShell(
+ // Wait until the remove operation (below) begins running.
+ 'while( db.jstests_removeb.count() == 20000 );' +
+ // Insert documents with increasing 'a' values. These inserted documents may
+ // reuse Records freed by the remove operation in progress and will be
+ // visited by the remove operation if it has not completed.
+ 'for( i = 20000; i < 40000; ++i ) {' +
+ ' db.jstests_removeb.insert( { a:i } );' +
+ ' db.getLastError();' +
+ ' if (i % 1000 == 0) {' +
+ ' print( i-20000 + \" of 20000 documents inserted\" );' +
+ ' }' +
+ '}'
+ );
+
+// Remove using the a:1 index in ascending direction.
+t.remove( { a:{ $gte:0 } } );
+assert( !db.getLastError(), 'The remove operation failed.' );
+
+p();
+
+t.drop();
diff --git a/jstests/removec.js b/jstests/removec.js
new file mode 100644
index 00000000000..539647c502e
--- /dev/null
+++ b/jstests/removec.js
@@ -0,0 +1,40 @@
+// Sanity test for removing documents with adjacent index keys. SERVER-2008
+
+t = db.jstests_removec;
+t.drop();
+t.ensureIndex( { a:1 } );
+
+/** @return an array containing a sequence of numbers from i to i + 10. */
+function runStartingWith( i ) {
+ ret = [];
+ for( j = 0; j < 11; ++j ) {
+ ret.push( i + j );
+ }
+ return ret;
+}
+
+// Insert some documents with adjacent index keys.
+for( i = 0; i < 1100; i += 11 ) {
+ t.save( { a:runStartingWith( i ) } );
+}
+db.getLastError();
+
+// Remove and then reinsert random documents in the background.
+s = startParallelShell(
+ 't = db.jstests_removec;' +
+ 'for( j = 0; j < 1000; ++j ) {' +
+ ' o = t.findOne( { a:Random.randInt( 1100 ) } );' +
+ ' t.remove( { _id:o._id } );' +
+ ' t.insert( o );' +
+ '}'
+ );
+
+// Find operations are error free. Note that the cursor throws if it detects the $err
+// field in the returned document.
+for( i = 0; i < 200; ++i ) {
+ t.find( { a:{ $gte:0 } } ).hint( { a:1 } ).itcount();
+}
+
+s();
+
+t.drop();
diff --git a/jstests/rename.js b/jstests/rename.js
new file mode 100644
index 00000000000..51b74047288
--- /dev/null
+++ b/jstests/rename.js
@@ -0,0 +1,56 @@
+admin = db.getMongo().getDB( "admin" );
+
+a = db.jstests_rename_a;
+b = db.jstests_rename_b;
+c = db.jstests_rename_c;
+
+a.drop();
+b.drop();
+c.drop();
+
+a.save( {a: 1} );
+a.save( {a: 2} );
+a.ensureIndex( {a:1} );
+a.ensureIndex( {b:1} );
+
+c.save( {a: 100} );
+assert.commandFailed( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_c"} ) );
+
+assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
+assert.eq( 0, a.find().count() );
+
+assert.eq( 2, b.find().count() );
+assert( db.system.namespaces.findOne( {name:"test.jstests_rename_b" } ) );
+assert( !db.system.namespaces.findOne( {name:"test.jstests_rename_a" } ) );
+assert.eq( 3, db.system.indexes.find( {ns:"test.jstests_rename_b"} ).count() );
+assert.eq( 0, db.system.indexes.find( {ns:"test.jstests_rename_a"} ).count() );
+assert( b.find( {a:1} ).explain().cursor.match( /^BtreeCursor/ ) );
+
+// now try renaming a capped collection
+
+a.drop();
+b.drop();
+c.drop();
+
+// TODO: too many numbers hard coded here
+// this test depends precisely on record size and hence may not be very reliable
+// note we use floats to make sure numbers are represented as doubles for both SM and v8, since test relies on record size
+db.createCollection( "jstests_rename_a", {capped:true,size:10000} );
+for( i = 0.1; i < 10; ++i ) {
+ a.save( { i: i } );
+}
+assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
+assert.eq( 1, b.count( {i:9.1} ) );
+for( i = 10.1; i < 250; ++i ) {
+ b.save( { i: i } );
+}
+
+//res = b.find().sort({i:1});
+//while (res.hasNext()) printjson(res.next());
+
+assert.eq( 0, b.count( {i:9.1} ) );
+assert.eq( 1, b.count( {i:19.1} ) );
+
+assert( db.system.namespaces.findOne( {name:"test.jstests_rename_b" } ) );
+assert( !db.system.namespaces.findOne( {name:"test.jstests_rename_a" } ) );
+assert.eq( true, db.system.namespaces.findOne( {name:"test.jstests_rename_b"} ).options.capped );
diff --git a/jstests/rename2.js b/jstests/rename2.js
new file mode 100644
index 00000000000..a06268f1bfb
--- /dev/null
+++ b/jstests/rename2.js
@@ -0,0 +1,19 @@
+
+
+a = db.rename2a;
+b = db.rename2b;
+
+a.drop();
+b.drop();
+
+a.save( { x : 1 } )
+a.save( { x : 2 } )
+a.save( { x : 3 } )
+
+assert.eq( 3 , a.count() , "A" )
+assert.eq( 0 , b.count() , "B" )
+
+assert( a.renameCollection( "rename2b" ) , "the command" );
+
+assert.eq( 0 , a.count() , "C" )
+assert.eq( 3 , b.count() , "D" )
diff --git a/jstests/rename3.js b/jstests/rename3.js
new file mode 100644
index 00000000000..5e1005f8176
--- /dev/null
+++ b/jstests/rename3.js
@@ -0,0 +1,25 @@
+
+
+a = db.rename3a
+b = db.rename3b
+
+a.drop();
+b.drop()
+
+a.save( { x : 1 } );
+b.save( { x : 2 } );
+
+assert.eq( 1 , a.findOne().x , "before 1a" );
+assert.eq( 2 , b.findOne().x , "before 2a" );
+
+res = b.renameCollection( a._shortName );
+assert.eq( 0 , res.ok , "should fail: " + tojson( res ) );
+
+assert.eq( 1 , a.findOne().x , "before 1b" );
+assert.eq( 2 , b.findOne().x , "before 2b" );
+
+res = b.renameCollection( a._shortName , true )
+assert.eq( 1 , res.ok , "should succeed:" + tojson( res ) );
+
+assert.eq( 2 , a.findOne().x , "after 1" );
+assert.isnull( b.findOne() , "after 2" );
diff --git a/jstests/rename4.js b/jstests/rename4.js
new file mode 100644
index 00000000000..508b8b9321b
--- /dev/null
+++ b/jstests/rename4.js
@@ -0,0 +1,145 @@
+t = db.jstests_rename4;
+t.drop();
+
+function bad( f ) {
+ //Ensure no error to start with
+ var lstError = db.getLastError();
+ if (lstError)
+ assert( false, "Unexpected error : " + lstError );
+
+ var docsBeforeUpdate = t.find().toArray();
+ eval( f );
+
+ //Ensure error
+ var lstError = db.getLastErrorObj();
+ if (!lstError.err) {
+ print("Error:" + tojson(lstError));
+ print("Existing docs (before)")
+ printjson(docsBeforeUpdate);
+ print("Existing docs (after)")
+ printjson(t.find().toArray());
+ assert( false, "Expected error but didn't get one for: " + f );
+ }
+
+ db.resetError();
+}
+
+bad( "t.update( {}, {$rename:{'a':'a'}} )" );
+bad( "t.update( {}, {$rename:{'':'a'}} )" );
+bad( "t.update( {}, {$rename:{'a':''}} )" );
+bad( "t.update( {}, {$rename:{'.a':'b'}} )" );
+bad( "t.update( {}, {$rename:{'a':'.b'}} )" );
+bad( "t.update( {}, {$rename:{'a.':'b'}} )" );
+bad( "t.update( {}, {$rename:{'a':'b.'}} )" );
+bad( "t.update( {}, {$rename:{'a.b':'a'}} )" );
+bad( "t.update( {}, {$rename:{'a.$':'b'}} )" );
+bad( "t.update( {}, {$rename:{'a':'b.$'}} )" );
+
+// Only bad if input doc has field resulting in conflict
+t.save( {_id:1, a:2} );
+bad( "t.update( {}, {$rename:{'_id':'a'}} )" );
+bad( "t.update( {}, {$set:{b:1},$rename:{'a':'b'}} )" );
+bad( "t.update( {}, {$rename:{'a':'b'},$set:{b:1}} )" );
+bad( "t.update( {}, {$rename:{'a':'b'},$set:{a:1}} )" );
+bad( "t.update( {}, {$set:{'b.c':1},$rename:{'a':'b'}} )" );
+bad( "t.update( {}, {$set:{b:1},$rename:{'a':'b.c'}} )" );
+bad( "t.update( {}, {$rename:{'a':'b'},$set:{'b.c':1}} )" );
+bad( "t.update( {}, {$rename:{'a':'b.c'},$set:{b:1}} )" );
+
+
+t.remove({});
+t.save( {a:[1],b:{c:[2]},d:[{e:3}],f:4} );
+bad( "t.update( {}, {$rename:{'a.0':'f'}} )" );
+bad( "t.update( {}, {$rename:{'a.0':'g'}} )" );
+bad( "t.update( {}, {$rename:{'f':'a.0'}} )" );
+bad( "t.update( {}, {$rename:{'b.c.0':'f'}} )" );
+bad( "t.update( {}, {$rename:{'f':'b.c.0'}} )" );
+bad( "t.update( {}, {$rename:{'d.e':'d.f'}} )" );
+bad( "t.update( {}, {$rename:{'d.e':'f'}} )" );
+bad( "t.update( {}, {$rename:{'d.f':'d.e'}} )" );
+bad( "t.update( {}, {$rename:{'f':'d.e'}} )" );
+bad( "t.update( {}, {$rename:{'d.0.e':'d.f'}} )" );
+bad( "t.update( {}, {$rename:{'d.0.e':'f'}} )" );
+bad( "t.update( {}, {$rename:{'d.f':'d.0.e'}} )" );
+bad( "t.update( {}, {$rename:{'f':'d.0.e'}} )" );
+bad( "t.update( {}, {$rename:{'f.g':'a'}} )" );
+bad( "t.update( {}, {$rename:{'a':'f.g'}} )" );
+
+function good( start, mod, expected ) {
+ t.remove({});
+ t.save( start );
+ t.update( {}, mod );
+ assert( !db.getLastError() );
+ var got = t.findOne();
+ delete got._id;
+ assert.docEq( expected, got );
+}
+
+good( {a:1}, {$rename:{a:'b'}}, {b:1} );
+good( {a:1}, {$rename:{a:'bb'}}, {bb:1} );
+good( {b:1}, {$rename:{b:'a'}}, {a:1} );
+good( {bb:1}, {$rename:{bb:'a'}}, {a:1} );
+good( {a:{y:1}}, {$rename:{'a.y':'a.z'}}, {a:{z:1}} );
+good( {a:{yy:1}}, {$rename:{'a.yy':'a.z'}}, {a:{z:1}} );
+good( {a:{z:1}}, {$rename:{'a.z':'a.y'}}, {a:{y:1}} );
+good( {a:{zz:1}}, {$rename:{'a.zz':'a.y'}}, {a:{y:1}} );
+good( {a:{c:1}}, {$rename:{a:'b'}}, {b:{c:1}} );
+good( {aa:{c:1}}, {$rename:{aa:'b'}}, {b:{c:1}} );
+good( {a:1,b:2}, {$rename:{a:'b'}}, {b:1} );
+good( {aa:1,b:2}, {$rename:{aa:'b'}}, {b:1} );
+good( {a:1,bb:2}, {$rename:{a:'bb'}}, {bb:1} );
+good( {a:1}, {$rename:{a:'b.c'}}, {b:{c:1}} );
+good( {aa:1}, {$rename:{aa:'b.c'}}, {b:{c:1}} );
+good( {a:1,b:{}}, {$rename:{a:'b.c'}}, {b:{c:1}} );
+good( {aa:1,b:{}}, {$rename:{aa:'b.c'}}, {b:{c:1}} );
+good( {a:1}, {$rename:{b:'c'}}, {a:1} );
+good( {aa:1}, {$rename:{b:'c'}}, {aa:1} );
+good( {}, {$rename:{b:'c'}}, {} );
+good( {a:{b:1,c:2}}, {$rename:{'a.b':'d'}}, {a:{c:2},d:1} );
+good( {a:{bb:1,c:2}}, {$rename:{'a.bb':'d'}}, {a:{c:2},d:1} );
+good( {a:{b:1}}, {$rename:{'a.b':'d'}}, {a:{},d:1} );
+good( {a:[5]}, {$rename:{a:'b'}}, {b:[5]} );
+good( {aa:[5]}, {$rename:{aa:'b'}}, {b:[5]} );
+good( {'0':1}, {$rename:{'0':'5'}}, {'5':1} );
+good( {a:1,b:2}, {$rename:{a:'c'},$set:{b:5}}, {b:5,c:1} );
+good( {aa:1,b:2}, {$rename:{aa:'c'},$set:{b:5}}, {b:5,c:1} );
+good( {a:1,b:2}, {$rename:{z:'c'},$set:{b:5}}, {a:1,b:5} );
+good( {aa:1,b:2}, {$rename:{z:'c'},$set:{b:5}}, {aa:1,b:5} );
+
+// (formerly) rewriting single field
+good( {a:{z:1,b:1}}, {$rename:{'a.b':'a.c'}}, {a:{c:1,z:1}} );
+good( {a:{z:1,tomato:1}}, {$rename:{'a.tomato':'a.potato'}}, {a:{potato:1,z:1}} );
+good( {a:{z:1,b:1,c:1}}, {$rename:{'a.b':'a.c'}}, {a:{c:1,z:1}} );
+good( {a:{z:1,tomato:1,potato:1}}, {$rename:{'a.tomato':'a.potato'}}, {a:{potato:1,z:1}} );
+good( {a:{z:1,b:1}}, {$rename:{'a.b':'a.cc'}}, {a:{cc:1,z:1}} );
+good( {a:{z:1,b:1,c:1}}, {$rename:{'a.b':'aa.c'}}, {a:{c:1,z:1},aa:{c:1}} );
+
+// invalid target, but missing source
+good( {a:1,c:4}, {$rename:{b:'c.d'}}, {a:1,c:4} );
+
+// TODO: This should be supported, and it is with the new update framework, but not with the
+// old, and we currently don't have a good way to check which mode we are in. When we do have
+// that, add this test guarded under that condition. Or, when we remove the old update path
+// just enable this test.
+
+// valid to rename away from an invalid name
+// good( {x:1}, {$rename:{'$a.b':'a.b'}}, {x:1} );
+
+// check index
+t.drop();
+t.ensureIndex( {a:1} );
+
+function l( start, mod, query, expected ) {
+ t.remove({});
+ t.save( start );
+ t.update( {}, mod );
+ assert( !db.getLastError() );
+ var got = t.find( query ).hint( {a:1} ).next();
+ delete got._id;
+ assert.docEq( expected, got );
+}
+
+l( {a:1}, {$rename:{a:'b'}}, {a:null}, {b:1} );
+l( {a:1}, {$rename:{a:'bb'}}, {a:null}, {bb:1} );
+l( {b:1}, {$rename:{b:'a'}}, {a:1}, {a:1} );
+l( {bb:1}, {$rename:{bb:'a'}}, {a:1}, {a:1} );
diff --git a/jstests/rename5.js b/jstests/rename5.js
new file mode 100644
index 00000000000..927c767b981
--- /dev/null
+++ b/jstests/rename5.js
@@ -0,0 +1,46 @@
+// Check some $rename cases with a missing source. SERVER-4845
+
+t = db.jstests_rename5;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+t.save( { b:1 } );
+
+t.update( {}, { $rename:{ a:'b' } } );
+assert.eq( 1, t.findOne().b );
+
+// Test with another modifier.
+t.update( {}, { $rename:{ a:'b' }, $set:{ x:1 } } );
+assert.eq( 1, t.findOne().b );
+assert.eq( 1, t.findOne().x );
+
+// Test with an in place modifier.
+t.update( {}, { $rename:{ a:'b' }, $inc:{ x:1 } } );
+assert.eq( 1, t.findOne().b );
+assert.eq( 2, t.findOne().x );
+
+// Check similar cases with upserts.
+t.drop();
+
+t.remove({});
+t.update( { b:1 }, { $rename:{ a:'b' } }, true );
+assert.eq( 1, t.findOne().b );
+
+t.remove({});
+t.update( { b:1 }, { $rename:{ a:'b' }, $set:{ c:1 } }, true );
+assert.eq( 1, t.findOne().b );
+assert.eq( 1, t.findOne().c );
+
+t.remove({});
+t.update( { b:1, c:2 }, { $rename:{ a:'b' }, $inc:{ c:1 } }, true );
+assert.eq( 1, t.findOne().b );
+assert.eq( 3, t.findOne().c );
+
+// Check a similar case with multiple renames of an unindexed document.
+t.drop();
+
+t.save( { b:1, x:1 } );
+t.update( {}, { $rename: { a:'b', x:'y' } } );
+assert.eq( 1, t.findOne().b );
+assert.eq( 1, t.findOne().y );
+assert( !t.findOne().x );
diff --git a/jstests/rename6.js b/jstests/rename6.js
new file mode 100644
index 00000000000..17cbf4b80b1
--- /dev/null
+++ b/jstests/rename6.js
@@ -0,0 +1,24 @@
+// Test for SERVER-7017
+// We shouldn't rename a collection when one of its indexes will generate a namespace
+// that is greater than 120 chars. To do this we create a long index name and try
+// and rename the collection to one with a much longer name. We use the test database
+// by default and we add this here to ensure we are using it
+testDB = db.getSiblingDB("test")
+c = "rename2c";
+dbc = testDB.getCollection(c);
+d = "dest4567890123456789012345678901234567890123456789012345678901234567890"
+dbd = testDB.getCollection(d);
+dbc.ensureIndex({ "name" : 1,
+ "date" : 1,
+ "time" : 1,
+ "renameCollection" : 1,
+ "mongodb" : 1,
+ "testing" : 1,
+ "data" : 1});
+//Checking for the newly created index and the _id index in original collection
+assert.eq(2, testDB.system.indexes.find( { "ns" : "test." + c } ).count(), "Long Rename Init");
+//Should fail to rename collection as the index namespace is too long
+assert.commandFailed( dbc.renameCollection( dbd ) , "Long Rename Exec" );
+//Since we failed we should have the 2 indexes unmoved and no indexes under the new collection name
+assert.eq(2, testDB.system.indexes.find( { "ns" : "test." + c } ).count(), "Long Rename Result 1");
+assert.eq(0, testDB.system.indexes.find( { "ns" : "test." + d } ).count(), "Long Rename Result 2");
diff --git a/jstests/rename7.js b/jstests/rename7.js
new file mode 100644
index 00000000000..33899957755
--- /dev/null
+++ b/jstests/rename7.js
@@ -0,0 +1,56 @@
+// ***************************************************************
+// rename7.js
+// Test renameCollection functionality across different databases.
+// ***************************************************************
+
+// Set up namespaces a and b.
+admin = db.getMongo().getDB( "admin" );
+db_a = db.getMongo().getDB( "db_a" );
+db_b = db.getMongo().getDB( "db_b" );
+a = db_a.rename7;
+b = db_b.rename7;
+
+a.drop();
+b.drop();
+
+// Put some documents and indexes in a.
+a.save( {a: 1} );
+a.save( {a: 2} );
+a.save( {a: 3} );
+a.ensureIndex( {a: 1} );
+a.ensureIndex( {b: 1} );
+
+assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7", to: "db_b.rename7"} ) );
+
+assert.eq( 0, a.find().count() );
+assert( !db_a.system.namespaces.findOne( {name: "db_a.rename7"} ) );
+
+assert.eq( 3, b.find().count() );
+assert( db_b.system.namespaces.findOne( {name: "db_b.rename7"} ) );
+assert( b.find( {a: 1} ).explain().cursor.match( /^BtreeCursor/ ) );
+
+a.drop();
+b.drop();
+
+// Capped collection testing.
+db_a.createCollection( "rename7_capped", {capped:true, size:10000} );
+a = db_a.rename7_capped;
+b = db_b.rename7_capped;
+
+a.save( {a: 1} );
+a.save( {a: 2} );
+a.save( {a: 3} );
+
+assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7_capped",
+ to: "db_b.rename7_capped"} ) );
+
+assert.eq( 0, a.find().count() );
+assert( !db_a.system.namespaces.findOne( {name: "db_a.rename7_capped"} ) );
+
+assert.eq( 3, b.find().count() );
+assert( db_b.system.namespaces.findOne( {name: "db_b.rename7_capped"} ) );
+assert.eq( true, db_b.system.namespaces.findOne( {name:"db_b.rename7_capped"} ).options.capped );
+assert.eq( 12288, b.stats().storageSize );
+
+a.drop();
+b.drop();
diff --git a/jstests/rename8.js b/jstests/rename8.js
new file mode 100644
index 00000000000..8b955824ea8
--- /dev/null
+++ b/jstests/rename8.js
@@ -0,0 +1,25 @@
+// SERVER-12591: prevent renaming to arbitrary system collections.
+
+var testdb = db.getSiblingDB("rename8"); // to avoid breaking other tests when we touch system.users
+var coll = testdb.rename8;
+var systemNamespaces = testdb.system.namespaces;
+var systemFoo = testdb.system.foo;
+var systemUsers = testdb.system.users;
+
+systemFoo.drop();
+systemUsers.drop();
+coll.drop();
+coll.insert({});
+
+// system.foo isn't in the whitelist so it can't be renamed to or from
+assert.commandFailed(coll.renameCollection(systemFoo.getName()));
+assert.commandFailed(systemFoo.renameCollection(coll.getName()));
+
+// same with system.namespaces, even though it does exist
+assert.commandFailed(coll.renameCollection(systemNamespaces.getName()));
+assert.commandFailed(coll.renameCollection(systemNamespaces.getName(), /*dropTarget*/true));
+assert.commandFailed(systemNamespaces.renameCollection(coll.getName()));
+
+// system.users is whitelisted so these should work
+assert.commandWorked(coll.renameCollection(systemUsers.getName()));
+assert.commandWorked(systemUsers.renameCollection(coll.getName()));
diff --git a/jstests/rename_stayTemp.js b/jstests/rename_stayTemp.js
new file mode 100644
index 00000000000..afd77d1289c
--- /dev/null
+++ b/jstests/rename_stayTemp.js
@@ -0,0 +1,24 @@
+orig = 'rename_stayTemp_orig'
+dest = 'rename_stayTemp_dest'
+
+db[orig].drop()
+db[dest].drop()
+
+function ns(coll){ return db[coll].getFullName() }
+
+db.runCommand({create: orig, temp:1})
+assert.eq(db.system.namespaces.findOne({name:ns(orig)}).options.temp, 1)
+
+db.adminCommand({renameCollection: ns(orig), to: ns(dest)});
+var options = db.system.namespaces.findOne({name:ns(dest)}).options || {};
+assert.eq(options.temp, undefined);
+
+db[dest].drop();
+
+db.runCommand({create: orig, temp:1})
+assert.eq(db.system.namespaces.findOne({name:ns(orig)}).options.temp, 1)
+
+db.adminCommand({renameCollection: ns(orig), to: ns(dest), stayTemp: true});
+assert.eq(db.system.namespaces.findOne({name:ns(dest)}).options.temp, 1)
+
+
diff --git a/jstests/repair.js b/jstests/repair.js
new file mode 100644
index 00000000000..5026ec3bcbb
--- /dev/null
+++ b/jstests/repair.js
@@ -0,0 +1,28 @@
+mydb = db.getSisterDB( "repair_test1" )
+
+t = mydb.jstests_repair;
+t.drop();
+
+t.save( { i:1 } );
+doc = t.findOne();
+t.ensureIndex( { i : 1 } );
+assert.eq( 2, t.getIndexes().length );
+ex = t.find( { i : 1 } ).explain();
+
+assert.commandWorked( mydb.repairDatabase() );
+
+v = t.validate();
+assert( v.valid , "not valid! " + tojson( v ) );
+
+assert.eq( 1, t.count() );
+assert.eq( doc, t.findOne() );
+
+assert.eq( 2, t.getIndexes().length, tojson( t.getIndexes() ) );
+var explainAfterRepair = t.find( { i : 1 } ).explain();
+
+// Remove "millis" field. We're interested in the other fields.
+// It's not relevant for both explain() operations to have
+// the same execution time.
+delete ex[ "millis" ];
+delete explainAfterRepair[ "millis" ];
+assert.eq( ex, explainAfterRepair );
diff --git a/jstests/reversecursor.js b/jstests/reversecursor.js
new file mode 100644
index 00000000000..bb661952fc9
--- /dev/null
+++ b/jstests/reversecursor.js
@@ -0,0 +1,34 @@
+// Test to make sure that a reverse cursor can correctly handle empty extents (SERVER-6980)
+
+// Create a collection with three small extents
+db.jstests_reversecursor.drop();
+db.runCommand({"create":"jstests_reversecursor", $nExtents: [4096,4096,4096]});
+
+// Function to check whether all three extents are non empty
+function extentsSpanned() {
+ var extents = db.jstests_reversecursor.validate(true).extents;
+ return (extents[0].firstRecord != "null" &&
+ extents[1].firstRecord != "null" &&
+ extents[2].firstRecord != "null");
+}
+
+// Insert enough documents to span all three extents
+a = 0;
+while (!extentsSpanned()) {
+ db.jstests_reversecursor.insert({a:a++});
+}
+
+// Delete all the elements in the middle
+db.jstests_reversecursor.remove({a:{$gt:0,$lt:a-1}});
+
+// Make sure the middle extent is empty and that both end extents are not empty
+assert.eq(db.jstests_reversecursor.validate(true).extents[1].firstRecord, "null");
+assert.eq(db.jstests_reversecursor.validate(true).extents[1].lastRecord, "null");
+assert.neq(db.jstests_reversecursor.validate(true).extents[0].firstRecord, "null");
+assert.neq(db.jstests_reversecursor.validate(true).extents[0].lastRecord, "null");
+assert.neq(db.jstests_reversecursor.validate(true).extents[2].firstRecord, "null");
+assert.neq(db.jstests_reversecursor.validate(true).extents[2].lastRecord, "null");
+
+// Make sure that we get the same number of elements for both the forward and reverse cursors
+assert.eq(db.jstests_reversecursor.find().sort({$natural:1}).toArray().length, 2);
+assert.eq(db.jstests_reversecursor.find().sort({$natural:-1}).toArray().length, 2);
diff --git a/jstests/role_management_helpers.js b/jstests/role_management_helpers.js
new file mode 100644
index 00000000000..1cb821975ef
--- /dev/null
+++ b/jstests/role_management_helpers.js
@@ -0,0 +1,137 @@
+// This test is a basic sanity check of the shell helpers for manipulating role objects
+// It is not a comprehensive test of the functionality of the role manipulation commands
+
+function assertHasRole(rolesArray, roleName, roleDB) {
+ for (i in rolesArray) {
+ var curRole = rolesArray[i];
+ if (curRole.role == roleName && curRole.db == roleDB) {
+ return;
+ }
+ }
+ assert(false, "role " + roleName + "@" + roleDB + " not found in array: " + tojson(rolesArray));
+}
+
+function assertHasPrivilege(privilegeArray, privilege) {
+ for (i in privilegeArray) {
+ var curPriv = privilegeArray[i];
+ if (curPriv.resource.cluster == privilege.resource.cluster &&
+ curPriv.resource.anyResource == privilege.resource.anyResource &&
+ curPriv.resource.db == privilege.resource.db &&
+ curPriv.resource.collection == privilege.resource.collection) {
+ // Same resource
+ assert.eq(curPriv.actions.length, privilege.actions.length);
+ for (k in curPriv.actions) {
+ assert.eq(curPriv.actions[k], privilege.actions[k]);
+ }
+ return;
+ }
+ }
+ assert(false, "Privilege " + tojson(privilege) + " not found in privilege array: " +
+ tojson(privilegeArray));
+}
+
+(function(db) {
+ var db = db.getSiblingDB("role_management_helpers");
+ db.dropDatabase();
+ db.dropAllRoles();
+
+ db.createRole({role:'roleA',
+ roles: [],
+ privileges: [{resource: {db:db.getName(), collection: "foo"},
+ actions: ['find']}]});
+ db.createRole({role:'roleB', privileges: [], roles: ["roleA"]});
+ db.createRole({role:'roleC', privileges: [], roles: []});
+
+ // Test getRole
+ var roleObj = db.getRole("roleA");
+ assert.eq(0, roleObj.roles.length);
+ assert.eq(null, roleObj.privileges);
+ roleObj = db.getRole("roleA", {showPrivileges: true});
+ assert.eq(1, roleObj.privileges.length);
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(1, roleObj.inheritedPrivileges.length); // inherited from roleA
+ assertHasPrivilege(roleObj.inheritedPrivileges,
+ {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleA", db.getName());
+
+ // Test getRoles
+ var roles = db.getRoles();
+ assert.eq(3, roles.length);
+ printjson(roles);
+ assert(roles[0].role == 'roleA' || roles[1].role == 'roleA' || roles[2].role == 'roleA');
+ assert(roles[0].role == 'roleB' || roles[1].role == 'roleB' || roles[2].role == 'roleB');
+ assert(roles[0].role == 'roleC' || roles[1].role == 'roleC' || roles[2].role == 'roleC');
+ assert.eq(null, roles[0].inheritedPrivileges);
+ var roles = db.getRoles({showPrivileges: true, showBuiltinRoles: true});
+ assert.eq(8, roles.length);
+ assert.neq(null, roles[0].inheritedPrivileges);
+
+
+ // Granting roles to nonexistent role fails
+ assert.throws(function() { db.grantRolesToRole("fakeRole", ['dbAdmin']); });
+ // Granting roles to built-in role fails
+ assert.throws(function() { db.grantRolesToRole("readWrite", ['dbAdmin']); });
+ // Granting non-existant role fails
+ assert.throws(function() { db.grantRolesToRole("roleB", ['dbAdmin', 'fakeRole']); });
+
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(1, roleObj.inheritedPrivileges.length);
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleA", db.getName());
+
+ // Granting a role you already have is no problem
+ db.grantRolesToRole("roleB", ['readWrite', 'roleC']);
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.gt(roleObj.inheritedPrivileges.length, 1); // Got privileges from readWrite role
+ assert.eq(3, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "readWrite", db.getName());
+ assertHasRole(roleObj.roles, "roleA", db.getName());
+ assertHasRole(roleObj.roles, "roleC", db.getName());
+
+ // Revoking roles the role doesn't have is fine
+ db.revokeRolesFromRole("roleB", ['roleA', 'readWrite', 'dbAdmin']);
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(0, roleObj.inheritedPrivileges.length);
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleC", db.getName());
+
+ // Privileges on the same resource get collapsed
+ db.grantPrivilegesToRole("roleA",
+ [{resource: {db:db.getName(), collection:""}, actions:['dropDatabase']},
+ {resource: {db:db.getName(), collection:"foo"}, actions:['insert']}]);
+ roleObj = db.getRole("roleA", {showPrivileges: true});
+ assert.eq(0, roleObj.roles.length);
+ assert.eq(2, roleObj.privileges.length);
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db:db.getName(), collection:"foo"}, actions:['find', 'insert']});
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db:db.getName(), collection:""}, actions:['dropDatabase']});
+
+ // Update role
+ db.updateRole("roleA", {roles:['roleB'],
+ privileges:[{resource: {db: db.getName(), collection:"foo"},
+ actions:['find']}]});
+ roleObj = db.getRole("roleA", {showPrivileges: true});
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleB", db.getName());
+ assert.eq(1, roleObj.privileges.length);
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
+
+ // Test dropRole
+ db.dropRole('roleC');
+ assert.throws(function() {db.getRole('roleC')});
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(0, roleObj.privileges.length);
+ assert.eq(0, roleObj.roles.length);
+
+ // Test dropAllRoles
+ db.dropAllRoles();
+ assert.throws(function() {db.getRole('roleA')});
+ assert.throws(function() {db.getRole('roleB')});
+ assert.throws(function() {db.getRole('roleC')});
+
+}(db)); \ No newline at end of file
diff --git a/jstests/run_program1.js b/jstests/run_program1.js
new file mode 100644
index 00000000000..7a994b2171a
--- /dev/null
+++ b/jstests/run_program1.js
@@ -0,0 +1,19 @@
+if ( ! _isWindows() ) {
+
+ // note that normal program exit returns 0
+ assert.eq (0, runProgram('true'))
+ assert.neq(0, runProgram('false'))
+ assert.neq(0, runProgram('this_program_doesnt_exit'));
+
+ //verify output visually
+ runProgram('echo', 'Hello', 'World.', 'How are you?');
+ runProgram('bash', '-c', 'echo Hello World. "How are you?"'); // only one space is printed between Hello and World
+
+ // numbers can be passed as numbers or strings
+ runProgram('sleep', 0.5);
+ runProgram('sleep', '0.5');
+
+} else {
+
+ runProgram('cmd', '/c', 'echo hello windows');
+}
diff --git a/jstests/server1470.js b/jstests/server1470.js
new file mode 100644
index 00000000000..0bb4d02c933
--- /dev/null
+++ b/jstests/server1470.js
@@ -0,0 +1,20 @@
+
+t = db.server1470;
+t.drop();
+
+q = { "name" : "first" , "pic" : { "$ref" : "foo", "$id" : ObjectId("4c48d04cd33a5a92628c9af6") } };
+t.update( q , {$set:{ x : 1 } } , true, true );
+ref = t.findOne().pic
+assert.eq( "object", typeof( ref ) );
+assert.eq( q.pic["$ref"] , ref["$ref"] )
+assert.eq( q.pic["$id"] , ref["$id"] )
+
+// just make we haven't broken other update operators
+t.drop();
+t.update( { _id : 1 , x : { $gt : 5 } } , { $set : { y : 1 } } , true );
+assert.eq( { _id : 1 , y : 1 } , t.findOne() );
+
+
+
+
+
diff --git a/jstests/server5346.js b/jstests/server5346.js
new file mode 100644
index 00000000000..f4a692bd16a
--- /dev/null
+++ b/jstests/server5346.js
@@ -0,0 +1,15 @@
+
+t = db.server5346;
+t.drop();
+
+x = { _id : 1 , versions : {} }
+t.insert( x )
+
+t.update({ _id : 1 }, { $inc : { "versions.2_01" : 1 } } )
+t.update({ _id : 1 }, { $inc : { "versions.2_1" : 2 } } )
+t.update({ _id : 1 }, { $inc : { "versions.01" : 3 } } )
+t.update({ _id : 1 }, { $inc : { "versions.1" : 4 } } )
+
+// Make sure the correct fields are set, without duplicates.
+assert.docEq( { "_id" : 1, "versions" : { "01" : 3, "1" : 4, "2_01" : 1, "2_1" : 2 } },
+ t.findOne())
diff --git a/jstests/server7756.js b/jstests/server7756.js
new file mode 100644
index 00000000000..5a7177ebcc9
--- /dev/null
+++ b/jstests/server7756.js
@@ -0,0 +1,12 @@
+
+t = db.server7756;
+t.drop();
+
+t.save( { a:[ { 1:'x' }, 'y' ] } );
+
+assert.eq( 1, t.count( { 'a.1':'x' } ) );
+assert.eq( 1, t.count( { 'a.1':'y' } ) );
+
+assert.eq( 1, t.count( { 'a.1':/x/ } ) );
+assert.eq( 1, t.count( { 'a.1':/y/ } ) );
+
diff --git a/jstests/server9385.js b/jstests/server9385.js
new file mode 100644
index 00000000000..ee86891ce2a
--- /dev/null
+++ b/jstests/server9385.js
@@ -0,0 +1,16 @@
+// SERVER-9385 ensure saving a document derived from bson->js conversion doesn't lose it's _id
+t = db.server9385;
+t.drop();
+
+t.insert( { _id : 1, x : 1 } );
+x = t.findOne();
+x._id = 2;
+t.save( x );
+
+t.find().forEach( printjson );
+
+assert.eq( 2, t.find().count() );
+assert.eq( 2, t.find().itcount() );
+
+assert( t.findOne( { _id : 1 } ), "original insert missing" );
+assert( t.findOne( { _id : 2 } ), "save didn't work?" );
diff --git a/jstests/server9547.js b/jstests/server9547.js
new file mode 100644
index 00000000000..67cacfc22a7
--- /dev/null
+++ b/jstests/server9547.js
@@ -0,0 +1,21 @@
+// SERVER-9547
+// Test that sorting with .max() and .min() doesn't crash.
+
+var t = db.server9547;
+t.drop();
+
+for (var i=0; i<10; i++) {
+ t.save({a: i});
+}
+
+t.ensureIndex({a: 1});
+
+// note: max() value is exclusive upper bound
+assert.eq(4, t.find({}).max({a: 4}).toArray().length, "no order");
+
+// Ascending order is fine.
+assert.eq(4, t.find({}).max({a: 4}).sort({a: 1}).toArray().length, "ascending");
+
+// Descending order is still broken.
+// This should really return the same # of results but doesn't.
+assert.eq(5, t.find({}).max({a: 4}).sort({a: -1}).toArray().length, "descending");
diff --git a/jstests/set1.js b/jstests/set1.js
new file mode 100644
index 00000000000..d741387af58
--- /dev/null
+++ b/jstests/set1.js
@@ -0,0 +1,9 @@
+
+t = db.set1;
+t.drop();
+
+t.insert( { _id : 1, emb : {} });
+t.update( { _id : 1 }, { $set : { emb : { 'a.dot' : 'data'} }});
+assert.eq( { _id : 1 , emb : {} } , t.findOne() , "A" );
+
+
diff --git a/jstests/set2.js b/jstests/set2.js
new file mode 100644
index 00000000000..221ee407759
--- /dev/null
+++ b/jstests/set2.js
@@ -0,0 +1,18 @@
+
+t = db.set2;
+t.drop();
+
+t.save( { _id : 1 , x : true , y : { x : true } } );
+assert.eq( true , t.findOne().x );
+
+t.update( { _id : 1 } , { $set : { x : 17 } } );
+assert.eq( 17 , t.findOne().x );
+
+assert.eq( true , t.findOne().y.x );
+t.update( { _id : 1 } , { $set : { "y.x" : 17 } } );
+assert.eq( 17 , t.findOne().y.x );
+
+t.update( { _id : 1 } , { $set : { a : 2 , b : 3 } } );
+assert.eq( 2 , t.findOne().a );
+assert.eq( 3 , t.findOne().b );
+
diff --git a/jstests/set3.js b/jstests/set3.js
new file mode 100644
index 00000000000..611abc4e6bf
--- /dev/null
+++ b/jstests/set3.js
@@ -0,0 +1,11 @@
+
+t = db.set3;
+t.drop();
+
+t.insert( { "test1" : { "test2" : { "abcdefghijklmnopqrstu" : {"id":1} } } } );
+t.update( {}, {"$set":{"test1.test2.abcdefghijklmnopqrstuvwxyz":{"id":2}}})
+
+x = t.findOne();
+assert.eq( 1 , x.test1.test2.abcdefghijklmnopqrstu.id , "A" );
+assert.eq( 2 , x.test1.test2.abcdefghijklmnopqrstuvwxyz.id , "B" );
+
diff --git a/jstests/set4.js b/jstests/set4.js
new file mode 100644
index 00000000000..b37366cdb81
--- /dev/null
+++ b/jstests/set4.js
@@ -0,0 +1,15 @@
+
+t = db.set4;
+t.drop();
+
+orig = { _id:1 , a : [ { x : 1 } ]}
+t.insert( orig );
+
+t.update( {}, { $set : { 'a.0.x' : 2, 'foo.bar' : 3 } } );
+orig.a[0].x = 2; orig.foo = { bar : 3 }
+assert.eq( orig , t.findOne() , "A" );
+
+t.update( {}, { $set : { 'a.0.x' : 4, 'foo.bar' : 5 } } );
+orig.a[0].x = 4; orig.foo.bar = 5;
+assert.eq( orig , t.findOne() , "B" );
+
diff --git a/jstests/set5.js b/jstests/set5.js
new file mode 100644
index 00000000000..afa0d014bde
--- /dev/null
+++ b/jstests/set5.js
@@ -0,0 +1,17 @@
+
+t = db.set5;
+t.drop();
+
+function check( want , err ){
+ var x = t.findOne();
+ delete x._id;
+ assert.docEq( want , x , err );
+}
+
+t.update( { a : 5 } , { $set : { a : 6 , b : null } } , true );
+check( { a : 6 , b : null } , "A" )
+
+t.drop();
+
+t.update( { z : 5 } , { $set : { z : 6 , b : null } } , true );
+check( { b : null , z : 6 } , "B" )
diff --git a/jstests/set6.js b/jstests/set6.js
new file mode 100644
index 00000000000..d41e7aba971
--- /dev/null
+++ b/jstests/set6.js
@@ -0,0 +1,20 @@
+
+t = db.set6;
+t.drop();
+
+x = { _id : 1 , r : new DBRef( "foo" , new ObjectId() ) }
+t.insert( x )
+assert.eq( x , t.findOne() , "A" );
+
+x.r.$id = new ObjectId()
+t.update({}, { $set : { r : x.r } } );
+assert.eq( x , t.findOne() , "B");
+
+x.r2 = new DBRef( "foo2" , 5 )
+t.update( {} , { $set : { "r2" : x.r2 } } );
+assert.eq( x , t.findOne() , "C" )
+
+x.r.$id = 2;
+t.update( {} , { $set : { "r.$id" : 2 } } )
+assert.eq( x.r.$id , t.findOne().r.$id , "D");
+
diff --git a/jstests/set7.js b/jstests/set7.js
new file mode 100644
index 00000000000..68c4d471f58
--- /dev/null
+++ b/jstests/set7.js
@@ -0,0 +1,67 @@
+// test $set with array indicies
+
+t = db.jstests_set7;
+
+t.drop();
+
+t.save( {a:[0,1,2,3]} );
+t.update( {}, {$set:{"a.0":2}} );
+assert.eq( [2,1,2,3], t.findOne().a );
+
+t.update( {}, {$set:{"a.4":5}} );
+assert.eq( [2,1,2,3,5], t.findOne().a );
+
+t.update( {}, {$set:{"a.9":9}} );
+assert.eq( [2,1,2,3,5,null,null,null,null,9], t.findOne().a );
+
+t.drop();
+t.save( {a:[0,1,2,3]} );
+t.update( {}, {$set:{"a.9":9,"a.7":7}} );
+assert.eq( [0,1,2,3,null,null,null,7,null,9], t.findOne().a );
+
+t.drop();
+t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
+t.update( {}, {$set:{"a.11":11} } );
+assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+
+t.drop();
+t.save( {} );
+t.update( {}, {$set:{"a.0":4}} );
+assert.eq( {"0":4}, t.findOne().a );
+
+t.drop();
+t.update( {"a.0":4}, {$set:{b:1}}, true );
+assert.eq( {"0":4}, t.findOne().a );
+
+t.drop();
+t.save( {a:[]} );
+t.update( {}, {$set:{"a.f":1}} );
+assert( db.getLastError() );
+assert.eq( [], t.findOne().a );
+
+// Test requiring proper ordering of multiple mods.
+t.drop();
+t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
+t.update( {}, {$set:{"a.11":11,"a.2":-2}} );
+assert.eq( [0,1,-2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+
+// Test upsert case
+t.drop();
+t.update( {a:[0,1,2,3,4,5,6,7,8,9,10]}, {$set:{"a.11":11} }, true );
+assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+
+// SERVER-3750
+t.drop();
+t.save( {a:[]} );
+t.update( {}, {$set:{"a.1500000":1}} ); // current limit
+assert( db.getLastError() == null );
+
+t.drop();
+t.save( {a:[]} );
+t.update( {}, {$set:{"a.1500001":1}} ); // 1 over limit
+assert.neq( db.getLastErrorObj(), null );
+
+t.drop();
+t.save( {a:[]} );
+t.update( {}, {$set:{"a.1000000000":1}} ); // way over limit
+assert.neq( db.getLastErrorObj(), null );
diff --git a/jstests/set_param1.js b/jstests/set_param1.js
new file mode 100644
index 00000000000..555cb520306
--- /dev/null
+++ b/jstests/set_param1.js
@@ -0,0 +1,9 @@
+
+old = db.adminCommand( { "getParameter" : "*" } )
+tmp1 = db.adminCommand( { "setParameter" : 1 , "logLevel" : 5 } )
+tmp2 = db.adminCommand( { "setParameter" : 1 , "logLevel" : old.logLevel } )
+now = db.adminCommand( { "getParameter" : "*" } )
+
+assert.eq( old , now , "A" )
+assert.eq( old.logLevel , tmp1.was , "B" )
+assert.eq( 5 , tmp2.was , "C" )
diff --git a/jstests/shell1.js b/jstests/shell1.js
new file mode 100644
index 00000000000..2e6c7292374
--- /dev/null
+++ b/jstests/shell1.js
@@ -0,0 +1,15 @@
+x = 1;
+
+shellHelper( "show", "tables;" )
+shellHelper( "show", "tables" )
+shellHelper( "show", "tables ;" )
+
+// test slaveOk levels
+assert(!db.getSlaveOk() && !db.test.getSlaveOk() && !db.getMongo().getSlaveOk(), "slaveOk 1");
+db.getMongo().setSlaveOk();
+assert(db.getSlaveOk() && db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 2");
+db.setSlaveOk(false);
+assert(!db.getSlaveOk() && !db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 3");
+db.test.setSlaveOk(true);
+assert(!db.getSlaveOk() && db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 4");
+
diff --git a/jstests/shell_writeconcern.js b/jstests/shell_writeconcern.js
new file mode 100644
index 00000000000..74247026b31
--- /dev/null
+++ b/jstests/shell_writeconcern.js
@@ -0,0 +1,72 @@
+"use strict"
+// check that shell writeconcern work correctly
+// 1.) tests that it can be set on each level and is inherited
+// 2.) tests that each operation (update/insert/remove/save) take and ensure a write concern
+
+var collA = db.shell_wc_a;
+var collB = db.shell_wc_b;
+collA.drop()
+collB.drop()
+
+// test inheritance
+db.setWriteConcern({w:1})
+assert.eq(1, db.getWriteConcern().toJSON().w)
+assert.eq(1, collB.getWriteConcern().toJSON().w)
+
+collA.setWriteConcern({w:2})
+assert.eq(2, collA.getWriteConcern().toJSON().w)
+collA.unsetWriteConcern()
+assert.eq(1, collA.getWriteConcern().toJSON().w)
+
+db.unsetWriteConcern()
+assert.eq(undefined, collA.getWriteConcern())
+assert.eq(undefined, collB.getWriteConcern())
+assert.eq(undefined, db.getWriteConcern())
+
+// test methods, by generating an error
+var res = assert.writeOK(collA.save({_id:1}, {writeConcern:{w:1}}));
+if (!db.getMongo().useWriteCommands() ) {
+ assert.eq(1, res.n, tojson(res));
+ assert.eq(1, res.upserted, tojson(res));
+} else {
+ assert.eq(1, res.nUpserted, tojson(res));
+}
+
+var res = assert.writeOK(collA.update({_id:1}, {_id:1}, {writeConcern:{w:1}}));
+if (!db.getMongo().useWriteCommands() ) {
+ assert.eq(1, res.n, tojson(res));
+} else {
+ assert.eq(1, res.nMatched, tojson(res));
+}
+var res = assert.writeOK(collA.update({_id:1}, {_id:1}, {writeConcern:{w:1}}));
+if (!db.getMongo().useWriteCommands() ) {
+ assert.eq(1, res.n, tojson(res));
+} else {
+ assert.eq(1, res.nMatched, tojson(res));
+}
+
+var res = assert.writeOK(collA.insert({_id:2}, {writeConcern:{w:1}}));
+if (!db.getMongo().useWriteCommands() ) {
+ assert.eq(0, res.n, tojson(res));
+} else {
+ assert.eq(1, res.nInserted, tojson(res));
+}
+
+var res = assert.writeOK(collA.remove({_id:3}, {writeConcern:{w:1}}));
+if (!db.getMongo().useWriteCommands() ) {
+ assert.eq(0, res.n, tojson(res));
+} else {
+ assert.eq(0, res.nRemoved, tojson(res));
+}
+
+var res = assert.writeOK(collA.remove({}, {justOne:true, writeConcern:{w:1}}));
+if (!db.getMongo().useWriteCommands() ) {
+ assert.eq(1, res.n, tojson(res));
+} else {
+ assert.eq(1, res.nRemoved, tojson(res));
+}
+
+assert.writeError(collA.insert([{_id:1}, {_id:1}], {ordered:true, writeConcern:{w:1}}));
+assert.writeError(collA.insert([{_id:1}, {_id:1}], {ordered:false, writeConcern:{w:1}}));
+
+
diff --git a/jstests/shellkillop.js b/jstests/shellkillop.js
new file mode 100644
index 00000000000..d903f251f13
--- /dev/null
+++ b/jstests/shellkillop.js
@@ -0,0 +1,61 @@
+baseName = "jstests_shellkillop";
+
+// 'retry' should be set to true in contexts where an exception should cause the test to be retried rather than to fail.
+retry = false;
+
+function testShellAutokillop() {
+
+if (true) { // toggle to disable test
+ db[baseName].drop();
+
+ print("shellkillop.js insert data");
+ for (i = 0; i < 100000; ++i) {
+ db[baseName].insert({ i: 1 });
+ }
+ assert.eq(100000, db[baseName].count());
+
+ // mongo --autokillop suppressed the ctrl-c "do you want to kill current operation" message
+ // it's just for testing purposes and thus not in the shell help
+ var evalStr = "print('SKO subtask started'); db." + baseName + ".update( {}, {$set:{i:'abcdefghijkl'}}, false, true ); db." + baseName + ".count();";
+ print("shellkillop.js evalStr:" + evalStr);
+ spawn = startMongoProgramNoConnect("mongo", "--autokillop", "--port", myPort(), "--eval", evalStr);
+
+ sleep(100);
+ retry = true;
+ assert(db[baseName].find({ i: 'abcdefghijkl' }).count() < 100000, "update ran too fast, test won't be valid");
+ retry = false;
+
+ stopMongoProgramByPid(spawn);
+
+ sleep(100);
+
+ print("count abcdefghijkl:" + db[baseName].find({ i: 'abcdefghijkl' }).count());
+
+ var inprog = db.currentOp().inprog;
+ for (i in inprog) {
+ if (inprog[i].ns == "test." + baseName)
+ throw "shellkillop.js op is still running: " + tojson( inprog[i] );
+ }
+
+ retry = true;
+ assert(db[baseName].find({ i: 'abcdefghijkl' }).count() < 100000, "update ran too fast, test was not valid");
+ retry = false;
+}
+
+}
+
+for( var nTries = 0; nTries < 10 && retry; ++nTries ) {
+ try {
+ testShellAutokillop();
+ } catch (e) {
+ if ( !retry ) {
+ throw e;
+ }
+ printjson( e );
+ print( "retrying..." );
+ }
+}
+
+assert( !retry, "retried too many times" );
+
+print("shellkillop.js SUCCESS");
diff --git a/jstests/shellspawn.js b/jstests/shellspawn.js
new file mode 100644
index 00000000000..f43e40e9e62
--- /dev/null
+++ b/jstests/shellspawn.js
@@ -0,0 +1,33 @@
+#!/usr/bin/mongod
+
+baseName = "jstests_shellspawn";
+t = db.getSiblingDB('test').getCollection( baseName );
+t.drop();
+
+if ( typeof( _startMongoProgram ) == "undefined" ){
+ print( "no fork support" );
+}
+else {
+ var evalString = "sleep( 2000 ); db.getSiblingDB('test').getCollection( '" + baseName + "' ).save( {a:1} );";
+ spawn = startMongoProgramNoConnect( "mongo", "admin", "--port", myPort(), "--eval", evalString );
+
+// assert.soon( function() { return 1 == t.count(); } );
+ // SERVER-2784 debugging - error message overwritten to indicate last count value.
+ assert.soon( "count = t.count(); msg = 'did not reach expected count, last value: ' + t.count(); 1 == count;" );
+
+ stopMongoProgramByPid( spawn );
+
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "print( 'I am a shell' );" );
+
+ stopMongoProgramByPid( spawn );
+
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort() );
+
+ stopMongoProgramByPid( spawn );
+
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort() );
+
+ stopMongoProgramByPid( spawn );
+
+ // all these shells should be killed
+}
diff --git a/jstests/shellstartparallel.js b/jstests/shellstartparallel.js
new file mode 100644
index 00000000000..59110296b26
--- /dev/null
+++ b/jstests/shellstartparallel.js
@@ -0,0 +1,17 @@
+function f() {
+ throw "intentional_throw_to_test_assert_throws";
+}
+assert.throws(f);
+
+// verify that join works
+db.sps.drop();
+join = startParallelShell("sleep(1000); db.sps.insert({x:1}); db.getLastError();");
+join();
+assert.eq(1, db.sps.count(), "join problem?");
+
+// test with a throw
+join = startParallelShell("db.sps.insert({x:1}); db.getLastError(); throw 'intentionally_uncaught';");
+join();
+assert.eq(2, db.sps.count(), "join2 problem?");
+
+print("shellstartparallel.js SUCCESS");
diff --git a/jstests/shelltypes.js b/jstests/shelltypes.js
new file mode 100644
index 00000000000..3f109269b39
--- /dev/null
+++ b/jstests/shelltypes.js
@@ -0,0 +1,53 @@
+// check that constructor also works without "new"
+var a;
+var b;
+a = new ObjectId();
+b = ObjectId(a.valueOf());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "oid");
+
+a = new DBRef("test", "theid");
+b = DBRef(a.getRef(), a.getId());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "dbref");
+
+a = new DBPointer("test", new ObjectId());
+b = DBPointer(a.getCollection(), a.getId());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "dbpointer");
+
+a = new Timestamp(10, 20);
+b = Timestamp(a.t, a.i);
+printjson(a);
+assert.eq(tojson(a), tojson(b), "timestamp");
+
+a = new BinData(3,"VQ6EAOKbQdSnFkRmVUQAAA==");
+b = BinData(a.type, a.base64());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "bindata");
+
+a = new UUID("550e8400e29b41d4a716446655440000");
+b = UUID(a.hex());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "uuid");
+
+a = new MD5("550e8400e29b41d4a716446655440000");
+b = MD5(a.hex());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "md5");
+
+a = new HexData(4, "550e8400e29b41d4a716446655440000");
+b = HexData(a.type, a.hex());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "hexdata");
+
+a = new NumberLong(100);
+b = NumberLong(a.toNumber());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "long");
+
+a = new NumberInt(100);
+b = NumberInt(a.toNumber());
+printjson(a);
+assert.eq(tojson(a), tojson(b), "int");
+
diff --git a/jstests/showdiskloc.js b/jstests/showdiskloc.js
new file mode 100644
index 00000000000..d1339c6d238
--- /dev/null
+++ b/jstests/showdiskloc.js
@@ -0,0 +1,25 @@
+// Sanity check for the $showDiskLoc option.
+
+t = db.jstests_showdiskloc;
+t.drop();
+
+function checkResults( arr ) {
+ for( i in arr ) {
+ a = arr[ i ];
+ assert( a['$diskLoc'] );
+ }
+}
+
+// Check query.
+t.save( {} );
+checkResults( t.find()._addSpecial("$showDiskLoc" , true).toArray() );
+
+// Check query and get more.
+t.save( {} );
+t.save( {} );
+checkResults( t.find().batchSize( 2 )._addSpecial("$showDiskLoc" , true).toArray() );
+
+// Check with a covered index.
+t.ensureIndex( { a:1 } );
+checkResults
+( t.find( {}, { _id:0, a:1 } ).hint( { a:1 } )._addSpecial("$showDiskLoc" , true).toArray() );
diff --git a/jstests/skip1.js b/jstests/skip1.js
new file mode 100644
index 00000000000..c620fb01bca
--- /dev/null
+++ b/jstests/skip1.js
@@ -0,0 +1,15 @@
+// SERVER-2845 When skipping objects without loading them, they shouldn't be
+// included in the nscannedObjects count.
+
+if ( 0 ) { // SERVER-2845
+t = db.jstests_skip1;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:5} );
+t.save( {a:5} );
+t.save( {a:5} );
+
+assert.eq( 3, t.find( {a:5} ).skip( 2 ).explain().nscanned );
+assert.eq( 1, t.find( {a:5} ).skip( 2 ).explain().nscannedObjects );
+} \ No newline at end of file
diff --git a/jstests/slice1.js b/jstests/slice1.js
new file mode 100644
index 00000000000..b20e7e48b14
--- /dev/null
+++ b/jstests/slice1.js
@@ -0,0 +1,68 @@
+t = db.slice1;
+t.drop();
+
+t.insert({_id:1, a:[0,1,2,3,4,5,-5,-4,-3,-2,-1], b:1, c:1});
+
+// first three
+out = t.findOne({}, {a:{$slice:3}});
+assert.eq(out.a , [0,1,2], '1');
+
+// last three
+out = t.findOne({}, {a:{$slice:-3}});
+assert.eq(out.a , [-3, -2, -1], '2');
+
+// skip 2, limit 3
+out = t.findOne({}, {a:{$slice:[2, 3]}});
+assert.eq(out.a , [2,3,4], '3');
+
+// skip to fifth from last, limit 4
+out = t.findOne({}, {a:{$slice:[-5, 4]}});
+assert.eq(out.a , [-5, -4, -3, -2], '4');
+
+// skip to fifth from last, limit 10
+out = t.findOne({}, {a:{$slice:[-5, 10]}});
+assert.eq(out.a , [-5, -4, -3, -2, -1], '5');
+
+
+// interaction with other fields
+
+out = t.findOne({}, {a:{$slice:3}});
+assert.eq(out.a , [0,1,2], 'A 1');
+assert.eq(out.b , 1, 'A 2');
+assert.eq(out.c , 1, 'A 3');
+
+out = t.findOne({}, {a:{$slice:3}, b:true});
+assert.eq(out.a , [0,1,2], 'B 1');
+assert.eq(out.b , 1, 'B 2');
+assert.eq(out.c , undefined);
+
+out = t.findOne({}, {a:{$slice:3}, b:false});
+assert.eq(out.a , [0,1,2]);
+assert.eq(out.b , undefined);
+assert.eq(out.c , 1);
+
+t.drop()
+t.insert({comments: [{id:0, text:'a'},{id:1, text:'b'},{id:2, text:'c'},{id:3, text:'d'}], title:'foo'})
+
+
+out = t.findOne({}, {comments:{$slice:2}, 'comments.id':true});
+assert.eq(out.comments , [{id:0}, {id:1}]);
+assert.eq(out.title , undefined);
+
+out = t.findOne({}, {comments:{$slice:2}, 'comments.id':false});
+assert.eq(out.comments , [{text: 'a'}, {text: 'b'}]);
+assert.eq(out.title , 'foo');
+
+//nested arrays
+t.drop();
+t.insert({_id:1, a:[[1,1,1], [2,2,2], [3,3,3]], b:1, c:1});
+
+out = t.findOne({}, {a:{$slice:1}});
+assert.eq(out.a , [[1,1,1]], 'n 1');
+
+out = t.findOne({}, {a:{$slice:-1}});
+assert.eq(out.a , [[3,3,3]], 'n 2');
+
+out = t.findOne({}, {a:{$slice:[0,2]}});
+assert.eq(out.a , [[1,1,1],[2,2,2]], 'n 2');
+
diff --git a/jstests/sort1.js b/jstests/sort1.js
new file mode 100644
index 00000000000..12b97728e90
--- /dev/null
+++ b/jstests/sort1.js
@@ -0,0 +1,48 @@
+debug = function( s ){
+ //print( s );
+}
+
+t = db.sort1;
+t.drop();
+
+t.save({x:3,z:33});
+t.save({x:5,z:33});
+t.save({x:2,z:33});
+t.save({x:3,z:33});
+t.save({x:1,z:33});
+
+debug( "a" )
+for( var pass = 0; pass < 2; pass++ ) {
+ assert( t.find().sort({x:1})[0].x == 1 );
+ assert( t.find().sort({x:1}).skip(1)[0].x == 2 );
+ assert( t.find().sort({x:-1})[0].x == 5 );
+ assert( t.find().sort({x:-1})[1].x == 3 );
+ assert.eq( t.find().sort({x:-1}).skip(0)[0].x , 5 );
+ assert.eq( t.find().sort({x:-1}).skip(1)[0].x , 3 );
+ t.ensureIndex({x:1});
+
+}
+
+debug( "b" )
+assert(t.validate().valid);
+
+t.drop();
+t.save({x:'a'});
+t.save({x:'aba'});
+t.save({x:'zed'});
+t.save({x:'foo'});
+
+debug( "c" )
+
+for( var pass = 0; pass < 2; pass++ ) {
+ debug( tojson( t.find().sort( { "x" : 1 } ).limit(1).next() ) );
+ assert.eq( "a" , t.find().sort({'x': 1}).limit(1).next().x , "c.1" );
+ assert.eq( "a" , t.find().sort({'x': 1}).next().x , "c.2" );
+ assert.eq( "zed" , t.find().sort({'x': -1}).limit(1).next().x , "c.3" );
+ assert.eq( "zed" , t.find().sort({'x': -1}).next().x , "c.4" );
+ t.ensureIndex({x:1});
+}
+
+debug( "d" )
+
+assert(t.validate().valid);
diff --git a/jstests/sort10.js b/jstests/sort10.js
new file mode 100644
index 00000000000..e9663f4a55d
--- /dev/null
+++ b/jstests/sort10.js
@@ -0,0 +1,48 @@
+// signed dates check
+t = db.sort10;
+
+function checkSorting1(opts) {
+ t.drop();
+ t.insert({ x: new Date(50000) });
+ t.insert({ x: new Date(-50) });
+ var d = new Date(-50);
+ for (var pass = 0; pass < 2; pass++) {
+ assert(t.find().sort({x:1})[0].x.valueOf() == d.valueOf());
+ t.ensureIndex({ x: 1 }, opts);
+ t.insert({ x: new Date() });
+ }
+}
+
+checkSorting1({})
+checkSorting1({"background":true})
+
+
+
+function checkSorting2(dates, sortOrder) {
+ cur = t.find().sort({x:sortOrder});
+ assert.eq(dates.length, cur.count(), "Incorrect number of results returned");
+ index = 0;
+ while (cur.hasNext()) {
+ date = cur.next().x;
+ assert.eq(dates[index].valueOf(), date.valueOf());
+ index++;
+ }
+}
+
+t.drop();
+dates = [new Date(-5000000000000), new Date(5000000000000), new Date(0), new Date(5), new Date(-5)];
+for (var i = 0; i < dates.length; i++) {
+ t.insert({x:dates[i]});
+}
+dates.sort(function(a,b){return a - b});
+reverseDates = dates.slice(0).reverse()
+
+checkSorting2(dates, 1)
+checkSorting2(reverseDates, -1)
+t.ensureIndex({x:1})
+checkSorting2(dates, 1)
+checkSorting2(reverseDates, -1)
+t.dropIndexes()
+t.ensureIndex({x:-1})
+checkSorting2(dates, 1)
+checkSorting2(reverseDates, -1)
diff --git a/jstests/sort2.js b/jstests/sort2.js
new file mode 100644
index 00000000000..6dfa8486201
--- /dev/null
+++ b/jstests/sort2.js
@@ -0,0 +1,32 @@
+// test sorting, mainly a test ver simple with no index
+
+t = db.sort2;
+
+t.drop();
+t.save({x:1, y:{a:5,b:4}});
+t.save({x:1, y:{a:7,b:3}});
+t.save({x:1, y:{a:2,b:3}});
+t.save({x:1, y:{a:9,b:3}});
+for( var pass = 0; pass < 2; pass++ ) {
+ var res = t.find().sort({'y.a':1}).toArray();
+ assert( res[0].y.a == 2 );
+ assert( res[1].y.a == 5 );
+ assert( res.length == 4 );
+ t.ensureIndex({"y.a":1});
+}
+assert(t.validate().valid);
+
+t.drop();
+t.insert({ x: 1 })
+t.insert({ x: 5000000000 })
+t.insert({ x: NaN });
+t.insert({ x: Infinity });
+t.insert({ x: -Infinity });
+var good = [NaN, -Infinity, 1, 5000000000, Infinity];
+for (var pass = 0; pass < 2; pass++) {
+ var res = t.find({}, { _id: 0 }).sort({ x: 1 }).toArray();
+ for (var i = 0; i < good.length; i++) {
+ assert(good[i].toString() == res[i].x.toString());
+ }
+ t.ensureIndex({ x : 1 });
+}
diff --git a/jstests/sort3.js b/jstests/sort3.js
new file mode 100644
index 00000000000..b79f1f60381
--- /dev/null
+++ b/jstests/sort3.js
@@ -0,0 +1,16 @@
+
+t = db.sort3;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 5 } );
+t.save( { a : 3 } );
+
+assert.eq( "1,5,3" , t.find().toArray().map( function(z){ return z.a; } ) );
+
+assert.eq( "1,3,5" , t.find().sort( { a : 1 } ).toArray().map( function(z){ return z.a; } ) );
+assert.eq( "5,3,1" , t.find().sort( { a : -1 } ).toArray().map( function(z){ return z.a; } ) );
+
+assert.eq( "1,3,5" , t.find( { query : {} , orderby : { a : 1 } } ).toArray().map( function(z){ return z.a; } ) );
+assert.eq( "5,3,1" , t.find( { query : {} , orderby : { a : -1 } } ).toArray().map( function(z){ return z.a; } ) );
+
diff --git a/jstests/sort4.js b/jstests/sort4.js
new file mode 100644
index 00000000000..5174b46f41f
--- /dev/null
+++ b/jstests/sort4.js
@@ -0,0 +1,43 @@
+t = db.sort4;
+t.drop();
+
+
+function nice( sort , correct , extra ){
+ var c = t.find().sort( sort );
+ var s = "";
+ c.forEach(
+ function(z){
+ if ( s.length )
+ s += ",";
+ s += z.name;
+ if ( z.prename )
+ s += z.prename;
+ }
+ );
+ print( tojson( sort ) + "\t" + s );
+ if ( correct )
+ assert.eq( correct , s , tojson( sort ) + "(" + extra + ")" );
+ return s;
+}
+
+t.save({name: 'A', prename: 'B'})
+t.save({name: 'A', prename: 'C'})
+t.save({name: 'B', prename: 'B'})
+t.save({name: 'B', prename: 'D'})
+
+nice( { name:1 } , "AB,AC,BB,BD" , "s1" );
+nice( { prename : 1 } , "AB,BB,AC,BD" , "s2" );
+nice( {name:1, prename:1} , "AB,AC,BB,BD" , "s3" );
+
+t.save({name: 'A'})
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD" , "e1" );
+
+t.save({name: 'C'})
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2" ); // SERVER-282
+
+t.ensureIndex( { name : 1 , prename : 1 } );
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ia" ); // SERVER-282
+
+t.dropIndexes();
+t.ensureIndex( { name : 1 } );
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ib" ); // SERVER-282
diff --git a/jstests/sort5.js b/jstests/sort5.js
new file mode 100644
index 00000000000..b90256ef79d
--- /dev/null
+++ b/jstests/sort5.js
@@ -0,0 +1,21 @@
+var t = db.sort5;
+t.drop();
+
+t.save({_id: 5, x: 1, y: {a: 5, b: 4}});
+t.save({_id: 7, x: 2, y: {a: 7, b: 3}});
+t.save({_id: 2, x: 3, y: {a: 2, b: 3}});
+t.save({_id: 9, x: 4, y: {a: 9, b: 3}});
+
+// test compound sorting
+
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A no index" );
+t.ensureIndex({"y.b": 1, "y.a": -1});
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A index" );
+assert(t.validate().valid, "A valid");
+
+// test sorting on compound key involving _id
+
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B no index" );
+t.ensureIndex({"y.b": 1, "_id": -1});
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B index" );
+assert(t.validate().valid, "B valid");
diff --git a/jstests/sort6.js b/jstests/sort6.js
new file mode 100644
index 00000000000..027ba7a01f5
--- /dev/null
+++ b/jstests/sort6.js
@@ -0,0 +1,38 @@
+
+t = db.sort6;
+
+function get( x ){
+ return t.find().sort( { c : x } ).map( function(z){ return z._id; } );
+}
+
+// part 1
+t.drop();
+
+t.insert({_id:1,c:null})
+t.insert({_id:2,c:1})
+t.insert({_id:3,c:2})
+
+
+assert.eq( [3,2,1] , get( -1 ) , "A1" ) // SERVER-635
+assert.eq( [1,2,3] , get( 1 ) , "A2" )
+
+t.ensureIndex( { c : 1 } );
+
+assert.eq( [3,2,1] , get( -1 ) , "B1" )
+assert.eq( [1,2,3] , get( 1 ) , "B2" )
+
+
+// part 2
+t.drop();
+
+t.insert({_id:1})
+t.insert({_id:2,c:1})
+t.insert({_id:3,c:2})
+
+assert.eq( [3,2,1] , get( -1 ) , "C1" ) // SERVER-635
+assert.eq( [1,2,3] , get( 1 ) , "C2" )
+
+t.ensureIndex( { c : 1 } );
+
+assert.eq( [3,2,1] , get( -1 ) , "D1" )
+assert.eq( [1,2,3] , get( 1 ) , "X2" )
diff --git a/jstests/sort7.js b/jstests/sort7.js
new file mode 100644
index 00000000000..0b98734e5ff
--- /dev/null
+++ b/jstests/sort7.js
@@ -0,0 +1,25 @@
+// Check sorting of array sub field SERVER-480.
+
+t = db.jstests_sort7;
+t.drop();
+
+// Compare indexed and unindexed sort order for an array embedded field.
+
+t.save( { a : [ { x : 2 } ] } );
+t.save( { a : [ { x : 1 } ] } );
+t.save( { a : [ { x : 3 } ] } );
+unindexed = t.find().sort( {"a.x":1} ).toArray();
+t.ensureIndex( { "a.x" : 1 } );
+indexed = t.find().sort( {"a.x":1} ).hint( {"a.x":1} ).toArray();
+assert.eq( unindexed, indexed );
+
+// Now check when there are two objects in the array.
+
+t.remove({});
+t.save( { a : [ { x : 2 }, { x : 3 } ] } );
+t.save( { a : [ { x : 1 }, { x : 4 } ] } );
+t.save( { a : [ { x : 3 }, { x : 2 } ] } );
+unindexed = t.find().sort( {"a.x":1} ).toArray();
+t.ensureIndex( { "a.x" : 1 } );
+indexed = t.find().sort( {"a.x":1} ).hint( {"a.x":1} ).toArray();
+assert.eq( unindexed, indexed );
diff --git a/jstests/sort8.js b/jstests/sort8.js
new file mode 100644
index 00000000000..916075502d7
--- /dev/null
+++ b/jstests/sort8.js
@@ -0,0 +1,30 @@
+// Check sorting of arrays indexed by key SERVER-2884
+
+t = db.jstests_sort8;
+t.drop();
+
+t.save( {a:[1,10]} );
+t.save( {a:5} );
+unindexedForward = t.find().sort( {a:1} ).toArray();
+unindexedReverse = t.find().sort( {a:-1} ).toArray();
+t.ensureIndex( {a:1} );
+indexedForward = t.find().sort( {a:1} ).hint( {a:1} ).toArray();
+indexedReverse = t.find().sort( {a:-1} ).hint( {a:1} ).toArray();
+
+assert.eq( unindexedForward, indexedForward );
+assert.eq( unindexedReverse, indexedReverse );
+
+// Sorting is based on array members, not the array itself.
+assert.eq( [1,10], unindexedForward[ 0 ].a );
+assert.eq( [1,10], unindexedReverse[ 0 ].a );
+
+// Now try with a bounds constraint.
+t.dropIndexes();
+unindexedForward = t.find({a:{$gte:5}}).sort( {a:1} ).toArray();
+unindexedReverse = t.find({a:{$lte:5}}).sort( {a:-1} ).toArray();
+t.ensureIndex( {a:1} );
+indexedForward = t.find({a:{$gte:5}}).sort( {a:1} ).hint( {a:1} ).toArray();
+indexedReverse = t.find({a:{$lte:5}}).sort( {a:-1} ).hint( {a:1} ).toArray();
+
+assert.eq( unindexedForward, indexedForward );
+assert.eq( unindexedReverse, indexedReverse );
diff --git a/jstests/sort9.js b/jstests/sort9.js
new file mode 100644
index 00000000000..62407d6e96d
--- /dev/null
+++ b/jstests/sort9.js
@@ -0,0 +1,26 @@
+// Unindexed array sorting SERVER-2884
+
+t = db.jstests_sort9;
+t.drop();
+
+t.save( {a:[]} );
+t.save( {a:[[]]} );
+assert.eq( 2, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
+assert.eq( 2, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+assert.eq( 2, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+
+t.drop();
+t.save( {} );
+assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
+assert.eq( 1, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+assert.eq( 1, t.find( {a:{$exists:0}} ).sort( {a:1} ).itcount() );
+assert.eq( 1, t.find( {a:{$exists:0}} ).sort( {'a.b':1} ).itcount() );
+
+t.drop();
+t.save( {a:{}} );
+assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
+assert.eq( 1, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+assert.eq( 1, t.find( {'a.b':{$exists:0}} ).sort( {a:1} ).itcount() );
+assert.eq( 1, t.find( {'a.b':{$exists:0}} ).sort( {'a.b':1} ).itcount() );
diff --git a/jstests/sort_numeric.js b/jstests/sort_numeric.js
new file mode 100644
index 00000000000..807f23dfe8d
--- /dev/null
+++ b/jstests/sort_numeric.js
@@ -0,0 +1,35 @@
+
+t = db.sort_numeric;
+t.drop();
+
+// there are two numeric types int he db; make sure it handles them right
+// for comparisons.
+
+t.save( { a : 3 } );
+t.save( { a : 3.1 } );
+t.save( { a : 2.9 } );
+t.save( { a : 1 } );
+t.save( { a : 1.9 } );
+t.save( { a : 5 } );
+t.save( { a : 4.9 } );
+t.save( { a : 2.91 } );
+
+for( var pass = 0; pass < 2; pass++ ) {
+
+ var c = t.find().sort({a:1});
+ var last = 0;
+ while( c.hasNext() ) {
+ current = c.next();
+ assert( current.a > last );
+ last = current.a;
+ }
+
+ assert( t.find({a:3}).count() == 1 );
+ assert( t.find({a:3.0}).count() == 1 );
+ assert( t.find({a:3.0}).length() == 1 );
+
+ t.ensureIndex({a:1});
+}
+
+assert(t.validate().valid);
+
diff --git a/jstests/sorta.js b/jstests/sorta.js
new file mode 100644
index 00000000000..7c82778a186
--- /dev/null
+++ b/jstests/sorta.js
@@ -0,0 +1,26 @@
+// SERVER-2905 sorting with missing fields
+
+t = db.jstests_sorta;
+t.drop();
+
+// Enable _allow_dot to try and bypass v8 field name checking.
+t.insert( {_id:0,a:MinKey}, true );
+t.save( {_id:3,a:null} );
+t.save( {_id:1,a:[]} );
+t.save( {_id:7,a:[2]} );
+t.save( {_id:4} );
+t.save( {_id:5,a:null} );
+t.save( {_id:2,a:[]} );
+t.save( {_id:6,a:1} );
+t.insert( {_id:8,a:MaxKey}, true );
+
+function sorted( arr ) {
+ assert.eq( 9, arr.length );
+ for( i = 1; i < arr.length; ++i ) {
+ assert.lte( arr[ i-1 ]._id, arr[ i ]._id );
+ }
+}
+
+sorted( t.find().sort( {a:1} ).toArray() );
+t.ensureIndex( {a:1} );
+sorted( t.find().sort( {a:1} ).hint( {a:1} ).toArray() );
diff --git a/jstests/sortb.js b/jstests/sortb.js
new file mode 100644
index 00000000000..e16c7d650e6
--- /dev/null
+++ b/jstests/sortb.js
@@ -0,0 +1,27 @@
+// Test that the in memory sort capacity limit is checked for all "top N" sort candidates.
+// SERVER-4716
+
+t = db.jstests_sortb;
+t.drop();
+
+t.ensureIndex({b:1});
+
+for( i = 0; i < 100; ++i ) {
+ t.save( {a:i,b:i} );
+}
+
+// These large documents will not be part of the initial set of "top 100" matches, and they will
+// not be part of the final set of "top 100" matches returned to the client. However, they are an
+// intermediate set of "top 100" matches and should trigger an in memory sort capacity exception.
+big = new Array( 1024 * 1024 ).toString();
+for( i = 100; i < 200; ++i ) {
+ t.save( {a:i,b:i,big:big} );
+}
+
+for( i = 200; i < 300; ++i ) {
+ t.save( {a:i,b:i} );
+}
+
+assert.throws( function() { t.find().sort( {a:-1} ).hint( {b:1} ).limit( 100 ).itcount(); } );
+assert.throws( function() { t.find().sort( {a:-1} ).hint( {b:1} ).showDiskLoc().limit( 100 ).itcount(); } );
+t.drop(); \ No newline at end of file
diff --git a/jstests/sortc.js b/jstests/sortc.js
new file mode 100644
index 00000000000..f9aa202508b
--- /dev/null
+++ b/jstests/sortc.js
@@ -0,0 +1,37 @@
+// Test sorting with skipping and multiple candidate query plans.
+
+t = db.jstests_sortc;
+t.drop();
+
+t.save( {a:1} );
+t.save( {a:2} );
+
+function checkA( a, sort, skip, query ) {
+ query = query || {};
+ assert.eq( a, t.find( query ).sort( sort ).skip( skip )[ 0 ].a );
+}
+
+function checkSortAndSkip() {
+ checkA( 1, {a:1}, 0 );
+ checkA( 2, {a:1}, 1 );
+
+ checkA( 1, {a:1}, 0, {a:{$gt:0},b:null} );
+ checkA( 2, {a:1}, 1, {a:{$gt:0},b:null} );
+
+ checkA( 2, {a:-1}, 0 );
+ checkA( 1, {a:-1}, 1 );
+
+ checkA( 2, {a:-1}, 0, {a:{$gt:0},b:null} );
+ checkA( 1, {a:-1}, 1, {a:{$gt:0},b:null} );
+
+ checkA( 1, {$natural:1}, 0 );
+ checkA( 2, {$natural:1}, 1 );
+
+ checkA( 2, {$natural:-1}, 0 );
+ checkA( 1, {$natural:-1}, 1 );
+}
+
+checkSortAndSkip();
+
+t.ensureIndex( {a:1} );
+checkSortAndSkip();
diff --git a/jstests/sortd.js b/jstests/sortd.js
new file mode 100644
index 00000000000..963d32b0ca4
--- /dev/null
+++ b/jstests/sortd.js
@@ -0,0 +1,70 @@
+// Test sorting with dups and multiple candidate query plans.
+
+t = db.jstests_sortd;
+
+function checkNumSorted( n, query ) {
+ docs = query.toArray();
+ assert.eq( n, docs.length );
+ for( i = 1; i < docs.length; ++i ) {
+ assert.lte( docs[ i-1 ].a, docs[ i ].a );
+ }
+}
+
+
+// Test results added by ordered and unordered plans, unordered plan finishes.
+
+t.drop();
+
+t.save( {a:[1,2,3,4,5]} );
+t.save( {a:10} );
+t.ensureIndex( {a:1} );
+
+assert.eq( 2, t.find( {a:{$gt:0}} ).sort( {a:1} ).itcount() );
+assert.eq( 2, t.find( {a:{$gt:0},b:null} ).sort( {a:1} ).itcount() );
+
+// Test results added by ordered and unordered plans, ordered plan finishes.
+
+t.drop();
+
+t.save( {a:1} );
+t.save( {a:10} );
+for( i = 2; i <= 9; ++i ) {
+ t.save( {a:i} );
+}
+for( i = 0; i < 30; ++i ) {
+ t.save( {a:100} );
+}
+t.ensureIndex( {a:1} );
+
+checkNumSorted( 10, t.find( {a:{$gte:0,$lte:10}} ).sort( {a:1} ) );
+checkNumSorted( 10, t.find( {a:{$gte:0,$lte:10},b:null} ).sort( {a:1} ) );
+
+// Test results added by ordered and unordered plans, ordered plan finishes and continues with getmore.
+
+t.drop();
+
+t.save( {a:1} );
+t.save( {a:200} );
+for( i = 2; i <= 199; ++i ) {
+ t.save( {a:i} );
+}
+for( i = 0; i < 30; ++i ) {
+ t.save( {a:2000} );
+}
+t.ensureIndex( {a:1} );
+
+checkNumSorted( 200, t.find( {a:{$gte:0,$lte:200}} ).sort( {a:1} ) );
+checkNumSorted( 200, t.find( {a:{$gte:0,$lte:200},b:null} ).sort( {a:1} ) );
+
+// Test results added by ordered and unordered plans, with unordered results excluded during
+// getmore.
+
+t.drop();
+
+for( i = 399; i >= 0; --i ) {
+ t.save( {a:i} );
+}
+t.ensureIndex( {a:1} );
+
+checkNumSorted( 400, t.find( {a:{$gte:0,$lte:400},b:null} ).batchSize( 50 ).sort( {a:1} ) );
+
diff --git a/jstests/sortf.js b/jstests/sortf.js
new file mode 100644
index 00000000000..615791e25a5
--- /dev/null
+++ b/jstests/sortf.js
@@ -0,0 +1,20 @@
+// Unsorted plan on {a:1}, sorted plan on {b:1}. The unsorted plan exhausts its memory limit before
+// the sorted plan is chosen by the query optimizer.
+
+t = db.jstests_sortf;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+
+for( i = 0; i < 100; ++i ) {
+ t.save( {a:0,b:0} );
+}
+
+big = new Array( 10 * 1000 * 1000 ).toString();
+for( i = 0; i < 5; ++i ) {
+ t.save( {a:1,b:1,big:big} );
+}
+
+assert.eq( 5, t.find( {a:1} ).sort( {b:1} ).itcount() );
+t.drop(); \ No newline at end of file
diff --git a/jstests/sortg.js b/jstests/sortg.js
new file mode 100644
index 00000000000..bde4ad70061
--- /dev/null
+++ b/jstests/sortg.js
@@ -0,0 +1,64 @@
+// Test that a memory exception is triggered for in memory sorts, but not for indexed sorts.
+
+t = db.jstests_sortg;
+t.drop();
+
+big = new Array( 1000000 ).toString()
+
+for( i = 0; i < 100; ++i ) {
+ t.save( {b:0} );
+}
+
+for( i = 0; i < 40; ++i ) {
+ t.save( {a:0,x:big} );
+}
+
+function memoryException( sortSpec, querySpec ) {
+ querySpec = querySpec || {};
+ var ex = assert.throws( function() {
+ t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).itcount()
+ } );
+ assert( ex.toString().match( /sort/ ) );
+ assert.throws( function() {
+ t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).explain( true )
+ } );
+ assert( ex.toString().match( /sort/ ) );
+}
+
+function noMemoryException( sortSpec, querySpec ) {
+ querySpec = querySpec || {};
+ t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).itcount();
+ t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).explain( true );
+}
+
+// Unindexed sorts.
+memoryException( {a:1} );
+memoryException( {b:1} );
+
+// Indexed sorts.
+noMemoryException( {_id:1} );
+noMemoryException( {$natural:1} );
+
+assert.eq( 1, t.getIndexes().length );
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+t.ensureIndex( {c:1} );
+
+assert.eq( 4, t.getIndexes().length );
+
+// These sorts are now indexed.
+noMemoryException( {a:1} );
+noMemoryException( {b:1} );
+
+// A memory exception is triggered for an unindexed sort involving multiple plans.
+memoryException( {d:1}, {b:null,c:null} );
+
+// With an indexed plan on _id:1 and an unindexed plan on b:1, the indexed plan
+// should succeed even if the unindexed one would exhaust its memory limit.
+noMemoryException( {_id:1}, {b:null} );
+
+// With an unindexed plan on b:1 recorded for a query, the query should be
+// retried when the unindexed plan exhausts its memory limit.
+noMemoryException( {_id:1}, {b:null} );
+t.drop();
diff --git a/jstests/sorth.js b/jstests/sorth.js
new file mode 100644
index 00000000000..1072975a3ec
--- /dev/null
+++ b/jstests/sorth.js
@@ -0,0 +1,140 @@
+// Tests for the $in/sort/limit optimization combined with inequality bounds. SERVER-5777
+
+
+t = db.jstests_sorth;
+t.drop();
+
+/** Assert that the 'a' and 'b' fields of the documents match. */
+function assertMatch( expectedMatch, match ) {
+ if (undefined !== expectedMatch.a) {
+ assert.eq( expectedMatch.a, match.a );
+ }
+ if (undefined !== expectedMatch.b) {
+ assert.eq( expectedMatch.b, match.b );
+ }
+}
+
+/** Assert an expected document or array of documents matches the 'matches' array. */
+function assertMatches( expectedMatches, matches ) {
+ if ( expectedMatches.length == null ) {
+ assertMatch( expectedMatches, matches[ 0 ] );
+ }
+ for( i = 0; i < expectedMatches.length; ++i ) {
+ assertMatch( expectedMatches[ i ], matches[ i ] );
+ }
+}
+
+/** Generate a cursor using global parameters. */
+function find( query ) {
+ return t.find( query ).sort( _sort ).limit( _limit ).hint( _hint );
+}
+
+/** Check the expected matches for a query. */
+function checkMatches( expectedMatch, query ) {
+ result = find( query ).toArray();
+ assertMatches( expectedMatch, result );
+ explain = find( query ).explain();
+ assert.eq( expectedMatch.length || 1, explain.n );
+}
+
+/** Reset data, index, and _sort and _hint globals. */
+function reset( sort, index ) {
+ t.drop();
+ t.save( { a:1, b:1 } );
+ t.save( { a:1, b:2 } );
+ t.save( { a:1, b:3 } );
+ t.save( { a:2, b:0 } );
+ t.save( { a:2, b:3 } );
+ t.save( { a:2, b:5 } );
+ t.ensureIndex( index );
+ _sort = sort;
+ _hint = index;
+}
+
+function checkForwardDirection( sort, index ) {
+ reset( sort, index );
+
+ _limit = -1;
+
+ // Lower bound checks.
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
+ checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:0 } } );
+ checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:1 } } );
+ checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:1 } } );
+ checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:2 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:3 } } );
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:3 } } );
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:4 } } );
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:4 } } );
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:5 } } );
+
+ // Upper bound checks.
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:0 } } );
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:1 } } );
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:1 } } );
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3 } } );
+
+ // Lower and upper bounds checks.
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lte:0 } } );
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lt:1 } } );
+ checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lte:1 } } );
+ checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:0, $lte:1 } } );
+ checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2, $lt:3 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2.5, $lte:3 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:2.5, $lte:3 } } );
+
+ // Limit is -2.
+ _limit = -2;
+ checkMatches( [ { a:2, b:0 }, { a:1, b:1 } ],
+ { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
+ // We omit 'a' here because it's not defined whether or not we will see
+ // {a:2, b:3} or {a:1, b:3} first as our sort is over 'b'.
+ checkMatches( [ { a:1, b:2 }, { b:3 } ],
+ { a:{ $in:[ 1, 2 ] }, b:{ $gt:1 } } );
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:4 } } );
+
+ // With an additional document between the $in values.
+ t.save( { a:1.5, b:3 } );
+ checkMatches( [ { a:2, b:0 }, { a:1, b:1 } ],
+ { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
+}
+
+// Basic test with an index suffix order.
+checkForwardDirection( { b:1 }, { a:1, b:1 } );
+// With an additonal index field.
+checkForwardDirection( { b:1 }, { a:1, b:1, c:1 } );
+// With an additonal reverse direction index field.
+checkForwardDirection( { b:1 }, { a:1, b:1, c:-1 } );
+// With an additonal ordered index field.
+checkForwardDirection( { b:1, c:1 }, { a:1, b:1, c:1 } );
+// With an additonal reverse direction ordered index field.
+checkForwardDirection( { b:1, c:-1 }, { a:1, b:1, c:-1 } );
+
+function checkReverseDirection( sort, index ) {
+ reset( sort, index );
+ _limit = -1;
+
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:5 } } );
+
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:5 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:5 } } );
+ checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.1 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.5 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:3 } } );
+
+ checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:5, $gte:5 } } );
+ checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:2, $gte:1 } } );
+ checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3, $gt:1 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.5, $gte:3 } } );
+ checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:3, $gt:0 } } );
+}
+
+// With a descending order index.
+checkReverseDirection( { b:-1 }, { a:1, b:-1 } );
+checkReverseDirection( { b:-1 }, { a:1, b:-1, c:1 } );
+checkReverseDirection( { b:-1 }, { a:1, b:-1, c:-1 } );
+checkReverseDirection( { b:-1, c:1 }, { a:1, b:-1, c:1 } );
+checkReverseDirection( { b:-1, c:-1 }, { a:1, b:-1, c:-1 } );
diff --git a/jstests/sorti.js b/jstests/sorti.js
new file mode 100644
index 00000000000..2e5cfe110d7
--- /dev/null
+++ b/jstests/sorti.js
@@ -0,0 +1,25 @@
+// Check that a projection is applied after an in memory sort.
+
+t = db.jstests_sorti;
+t.drop();
+
+t.save( { a:1, b:0 } );
+t.save( { a:3, b:1 } );
+t.save( { a:2, b:2 } );
+t.save( { a:4, b:3 } );
+
+function checkBOrder( query ) {
+ arr = query.toArray();
+ order = [];
+ for( i in arr ) {
+ a = arr[ i ];
+ order.push( a.b );
+ }
+ assert.eq( [ 0, 2, 1, 3 ], order );
+}
+
+checkBOrder( t.find().sort( { a:1 } ) );
+checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ) );
+t.ensureIndex( { b:1 } );
+checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ) );
+checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ).hint( { b:1 } ) );
diff --git a/jstests/sortj.js b/jstests/sortj.js
new file mode 100644
index 00000000000..7a73829b94e
--- /dev/null
+++ b/jstests/sortj.js
@@ -0,0 +1,17 @@
+// Test an in memory sort memory assertion after a plan has "taken over" in the query optimizer
+// cursor.
+
+t = db.jstests_sortj;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+
+big = new Array( 100000 ).toString();
+for( i = 0; i < 1000; ++i ) {
+ t.save( { a:1, b:big } );
+}
+
+assert.throws( function() {
+ t.find( { a:{ $gte:0 }, c:null } ).sort( { d:1 } ).itcount();
+ } );
+t.drop(); \ No newline at end of file
diff --git a/jstests/sortk.js b/jstests/sortk.js
new file mode 100644
index 00000000000..3895a34c3ac
--- /dev/null
+++ b/jstests/sortk.js
@@ -0,0 +1,140 @@
+// End-to-end testing for index scan explosion + merge sort.
+// SERVER-5063 and SERVER-1205.
+t = db.jstests_sortk;
+t.drop();
+
+function resetCollection() {
+ t.drop();
+ t.save( { a:1, b:1 } );
+ t.save( { a:1, b:2 } );
+ t.save( { a:1, b:3 } );
+ t.save( { a:2, b:4 } );
+ t.save( { a:2, b:5 } );
+ t.save( { a:2, b:0 } );
+}
+
+resetCollection();
+t.ensureIndex( { a:1, b:1 } );
+
+function simpleQuery( extraFields, sort, hint ) {
+ query = { a:{ $in:[ 1, 2 ] } };
+ Object.extend( query, extraFields );
+ sort = sort || { b:1 };
+ hint = hint || { a:1, b:1 };
+ return t.find( query ).sort( sort ).hint( hint );
+}
+
+function simpleQueryWithLimit( limit ) {
+ return simpleQuery().limit( limit );
+}
+
+// The limit is -1.
+assert.eq( 0, simpleQueryWithLimit( -1 )[ 0 ].b );
+
+// The limit is -2.
+assert.eq( 0, simpleQueryWithLimit( -2 )[ 0 ].b );
+assert.eq( 1, simpleQueryWithLimit( -2 )[ 1 ].b );
+
+// A skip is applied.
+assert.eq( 1, simpleQueryWithLimit( -1 ).skip( 1 )[ 0 ].b );
+
+// No limit is applied.
+assert.eq( 6, simpleQueryWithLimit( 0 ).itcount() );
+assert.eq( 6, simpleQueryWithLimit( 0 ).explain().nscanned );
+assert.eq( 5, simpleQueryWithLimit( 0 ).skip( 1 ).itcount() );
+
+// The query has additional constriants, preventing limit optimization.
+assert.eq( 2, simpleQuery( { $where:'this.b>=2' } ).limit( -1 )[ 0 ].b );
+
+// The sort order is the reverse of the index order.
+assert.eq( 5, simpleQuery( {}, { b:-1 } ).limit( -1 )[ 0 ].b );
+
+// The sort order is the reverse of the index order on a constrained field.
+assert.eq( 0, simpleQuery( {}, { a:-1, b:1 } ).limit( -1 )[ 0 ].b );
+
+// Without a hint, multiple cursors are attempted.
+assert.eq( 0, t.find( { a:{ $in:[ 1, 2 ] } } ).sort( { b:1 } ).limit( -1 )[ 0 ].b );
+explain = t.find( { a:{ $in:[ 1, 2 ] } } ).sort( { b:1 } ).limit( -1 ).explain( true );
+assert.eq( 1, explain.n );
+
+// The expected first result now comes from the first interval.
+t.remove( { b:0 } );
+assert.eq( 1, simpleQueryWithLimit( -1 )[ 0 ].b );
+
+// With three intervals.
+
+function inThreeIntervalQueryWithLimit( limit ) {
+ return t.find( { a:{ $in: [ 1, 2, 3 ] } } ).sort( { b:1 } ).hint( { a:1, b:1 } ).limit( limit );
+}
+
+assert.eq( 1, inThreeIntervalQueryWithLimit( -1 )[ 0 ].b );
+assert.eq( 1, inThreeIntervalQueryWithLimit( -2 )[ 0 ].b );
+assert.eq( 2, inThreeIntervalQueryWithLimit( -2 )[ 1 ].b );
+t.save( { a:3, b:0 } );
+assert.eq( 0, inThreeIntervalQueryWithLimit( -1 )[ 0 ].b );
+assert.eq( 0, inThreeIntervalQueryWithLimit( -2 )[ 0 ].b );
+assert.eq( 1, inThreeIntervalQueryWithLimit( -2 )[ 1 ].b );
+
+// The index is multikey.
+t.remove({});
+t.save( { a:1, b:[ 0, 1, 2 ] } );
+t.save( { a:2, b:[ 0, 1, 2 ] } );
+t.save( { a:1, b:5 } );
+assert.eq( 3, simpleQueryWithLimit( -3 ).itcount() );
+
+// The index ordering is reversed.
+resetCollection();
+t.ensureIndex( { a:1, b:-1 } );
+
+// The sort order is consistent with the index order.
+assert.eq( 5, simpleQuery( {}, { b:-1 }, { a:1, b:-1 } ).limit( -1 )[ 0 ].b );
+
+// The sort order is the reverse of the index order.
+assert.eq( 0, simpleQuery( {}, { b:1 }, { a:1, b:-1 } ).limit( -1 )[ 0 ].b );
+
+// An equality constraint precedes the $in constraint.
+t.drop();
+t.ensureIndex( { a:1, b:1, c:1 } );
+t.save( { a:0, b:0, c:-1 } );
+t.save( { a:0, b:2, c:1 } );
+t.save( { a:1, b:1, c:1 } );
+t.save( { a:1, b:1, c:2 } );
+t.save( { a:1, b:1, c:3 } );
+t.save( { a:1, b:2, c:4 } );
+t.save( { a:1, b:2, c:5 } );
+t.save( { a:1, b:2, c:0 } );
+
+function eqInQueryWithLimit( limit ) {
+ return t.find( { a:1, b:{ $in:[ 1, 2 ] } } ).sort( { c: 1 } ).hint( { a:1, b:1, c:1 } ).
+ limit( limit );
+}
+
+function andEqInQueryWithLimit( limit ) {
+ return t.find( { $and:[ { a:1 }, { b:{ $in:[ 1, 2 ] } } ] } ).sort( { c: 1 } ).
+ hint( { a:1, b:1, c:1 } ).limit( limit );
+}
+
+// The limit is -1.
+assert.eq( 0, eqInQueryWithLimit( -1 )[ 0 ].c );
+assert.eq( 0, andEqInQueryWithLimit( -1 )[ 0 ].c );
+
+// The limit is -2.
+assert.eq( 0, eqInQueryWithLimit( -2 )[ 0 ].c );
+assert.eq( 1, eqInQueryWithLimit( -2 )[ 1 ].c );
+assert.eq( 0, andEqInQueryWithLimit( -2 )[ 0 ].c );
+assert.eq( 1, andEqInQueryWithLimit( -2 )[ 1 ].c );
+
+function inQueryWithLimit( limit, sort ) {
+ sort = sort || { b:1 };
+ return t.find( { a:{ $in:[ 0, 1 ] } } ).sort( sort ).hint( { a:1, b:1, c:1 } ).limit( limit );
+}
+
+// The index has two suffix fields unconstrained by the query.
+assert.eq( 0, inQueryWithLimit( -1 )[ 0 ].b );
+
+// The index has two ordered suffix fields unconstrained by the query.
+assert.eq( 0, inQueryWithLimit( -1, { b:1, c:1 } )[ 0 ].b );
+
+// The index has two ordered suffix fields unconstrained by the query and the limit is -2.
+assert.eq( 0, inQueryWithLimit( -2, { b:1, c:1 } )[ 0 ].b );
+assert.eq( 1, inQueryWithLimit( -2, { b:1, c:1 } )[ 1 ].b );
diff --git a/jstests/splitvector.js b/jstests/splitvector.js
new file mode 100644
index 00000000000..d239625de67
--- /dev/null
+++ b/jstests/splitvector.js
@@ -0,0 +1,309 @@
+// -------------------------
+// SPLITVECTOR TEST UTILS
+// -------------------------
+
+// -------------------------
+// assertChunkSizes verifies that a given 'splitVec' divides the 'test.jstest_splitvector'
+// collection in 'maxChunkSize' approximately-sized chunks. Its asserts fail otherwise.
+// @param splitVec: an array with keys for field 'x'
+// e.g. [ { x : 1927 }, { x : 3855 }, ...
+// @param numDocs: domain of 'x' field
+// e.g. 20000
+// @param maxChunkSize is in MBs.
+//
+assertChunkSizes = function ( splitVec , numDocs , maxChunkSize , msg ){
+ splitVec = [{ x: -1 }].concat( splitVec );
+ splitVec.push( { x: numDocs+1 } );
+ for ( i=0; i<splitVec.length-1; i++) {
+ min = splitVec[i];
+ max = splitVec[i+1];
+ size = db.runCommand( { datasize: "test.jstests_splitvector" , min: min , max: max } ).size;
+
+ // It is okay for the last chunk to be smaller. A collection's size does not
+ // need to be exactly a multiple of maxChunkSize.
+ if ( i < splitVec.length - 2 )
+ assert.close( maxChunkSize , size , "A"+i , -3 );
+ else
+ assert.gt( maxChunkSize , size , "A"+i , msg + "b" );
+ }
+}
+
+// Takes two documents and asserts that both contain exactly the same set of field names.
+// This is useful for checking that splitPoints have the same format as the original key pattern,
+// even when sharding on a prefix key.
+// Not very efficient, so only call when # of field names is small
+var assertFieldNamesMatch = function( splitPoint , keyPattern ){
+ for ( var p in splitPoint ) {
+ if( splitPoint.hasOwnProperty( p ) ) {
+ assert( keyPattern.hasOwnProperty( p ) , "property " + p + " not in keyPattern" );
+ }
+ }
+ for ( var p in keyPattern ) {
+ if( keyPattern.hasOwnProperty( p ) ){
+ assert( splitPoint.hasOwnProperty( p ) , "property " + p + " not in splitPoint" );
+ }
+ }
+}
+
+// turn off powerOf2, this test checks regular allocation
+var resetCollection = function() {
+ f.drop();
+ db.createCollection(f.getName(), {usePowerOf2Sizes: false});
+}
+
+// -------------------------
+// TESTS START HERE
+// -------------------------
+f = db.jstests_splitvector;
+resetCollection();
+
+// -------------------------
+// Case 1: missing parameters
+
+assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" } ).ok , "1a" );
+assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" , maxChunkSize: 1} ).ok , "1b" );
+
+
+// -------------------------
+// Case 2: missing index
+
+assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } ).ok , "2");
+
+
+// -------------------------
+// Case 3: empty collection
+
+f.ensureIndex( { x: 1} );
+assert.eq( [], db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } ).splitKeys , "3");
+
+
+// -------------------------
+// Case 4: uniform collection
+
+resetCollection();
+f.ensureIndex( { x: 1 } );
+
+var case4 = function() {
+ // Get baseline document size
+ filler = "";
+ while( filler.length < 500 ) filler += "a";
+ f.save( { x: 0, y: filler } );
+ docSize = db.runCommand( { datasize: "test.jstests_splitvector" } ).size;
+ assert.gt( docSize, 500 , "4a" );
+
+ // Fill collection and get split vector for 1MB maxChunkSize
+ numDocs = 20000;
+ for( i=1; i<numDocs; i++ ){
+ f.save( { x: i, y: filler } );
+ }
+ db.getLastError();
+ res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
+
+ // splitVector aims at getting half-full chunks after split
+ factor = 0.5;
+
+ assert.eq( true , res.ok , "4b" );
+ assert.close( numDocs*docSize / ((1<<20) * factor), res.splitKeys.length , "num split keys" , -1 );
+ assertChunkSizes( res.splitKeys , numDocs, (1<<20) * factor , "4d" );
+ for( i=0; i < res.splitKeys.length; i++ ){
+ assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ }
+}
+case4();
+
+// -------------------------
+// Case 5: limit number of split points
+
+resetCollection();
+f.ensureIndex( { x: 1 } );
+
+var case5 = function() {
+ // Fill collection and get split vector for 1MB maxChunkSize
+ numDocs = 10000;
+ for( i=1; i<numDocs; i++ ){
+ f.save( { x: i, y: filler } );
+ }
+ db.getLastError();
+ res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 , maxSplitPoints: 1} );
+
+ assert.eq( true , res.ok , "5a" );
+ assert.eq( 1 , res.splitKeys.length , "5b" );
+ for( i=0; i < res.splitKeys.length; i++ ){
+ assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ }
+}
+case5();
+
+// -------------------------
+// Case 6: limit number of objects in a chunk
+
+resetCollection();
+f.ensureIndex( { x: 1 } );
+
+var case6 = function() {
+ // Fill collection and get split vector for 1MB maxChunkSize
+ numDocs = 10000;
+ for( i=1; i<numDocs; i++ ){
+ f.save( { x: i, y: filler } );
+ }
+ db.getLastError();
+ res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 , maxChunkObjects: 500} );
+
+ assert.eq( true , res.ok , "6a" );
+ assert.eq( 19 , res.splitKeys.length , "6b" );
+ for( i=0; i < res.splitKeys.length; i++ ){
+ assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ }
+}
+case6();
+
+// -------------------------
+// Case 7: enough occurances of min key documents to pass the chunk limit
+// [1111111111111111,2,3)
+
+resetCollection();
+f.ensureIndex( { x: 1 } );
+
+var case7 = function() {
+ // Fill collection and get split vector for 1MB maxChunkSize
+ numDocs = 2100;
+ for( i=1; i<numDocs; i++ ){
+ f.save( { x: 1, y: filler } );
+ }
+
+ for( i=1; i<10; i++ ){
+ f.save( { x: 2, y: filler } );
+ }
+ db.getLastError();
+ res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
+
+ assert.eq( true , res.ok , "7a" );
+ assert.eq( 2 , res.splitKeys[0].x, "7b");
+ for( i=0; i < res.splitKeys.length; i++ ){
+ assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ }
+}
+case7();
+
+// -------------------------
+// Case 8: few occurrances of min key, and enough of some other that we cannot split it
+// [1, 22222222222222, 3)
+
+resetCollection();
+f.ensureIndex( { x: 1 } );
+
+var case8 = function() {
+ for( i=1; i<10; i++ ){
+ f.save( { x: 1, y: filler } );
+ }
+
+ numDocs = 2100;
+ for( i=1; i<numDocs; i++ ){
+ f.save( { x: 2, y: filler } );
+ }
+
+ for( i=1; i<10; i++ ){
+ f.save( { x: 3, y: filler } );
+ }
+
+ db.getLastError();
+ res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
+
+ assert.eq( true , res.ok , "8a" );
+ assert.eq( 2 , res.splitKeys.length , "8b" );
+ assert.eq( 2 , res.splitKeys[0].x , "8c" );
+ assert.eq( 3 , res.splitKeys[1].x , "8d" );
+ for( i=0; i < res.splitKeys.length; i++ ){
+ assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ }
+}
+case8();
+
+// -------------------------
+// Case 9: splitVector "force" mode, where we split (possible small) chunks in the middle
+//
+
+resetCollection();
+f.ensureIndex( { x: 1 } );
+
+var case9 = function() {
+ f.save( { x: 1 } );
+ f.save( { x: 2 } );
+ f.save( { x: 3 } );
+ db.getLastError();
+
+ assert.eq( 3 , f.count() );
+ print( f.getFullName() )
+
+ res = db.runCommand( { splitVector: f.getFullName() , keyPattern: {x:1} , force : true } );
+
+ assert.eq( true , res.ok , "9a" );
+ assert.eq( 1 , res.splitKeys.length , "9b" );
+ assert.eq( 2 , res.splitKeys[0].x , "9c" );
+
+ if ( db.runCommand( "isMaster" ).msg != "isdbgrid" ) {
+ res = db.adminCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , force : true } );
+
+ assert.eq( true , res.ok , "9a: " + tojson(res) );
+ assert.eq( 1 , res.splitKeys.length , "9b: " + tojson(res) );
+ assert.eq( 2 , res.splitKeys[0].x , "9c: " + tojson(res) );
+ for( i=0; i < res.splitKeys.length; i++ ){
+ assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ }
+ }
+}
+case9();
+
+// -------------------------
+// Repeat all cases using prefix shard key.
+//
+
+resetCollection();
+f.ensureIndex( { x: 1, y: 1 } );
+case4();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+case4();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: 1 } );
+case5();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+case5();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: 1 } );
+case6();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+case6();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: 1 } );
+case7();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+case7();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: 1 } );
+case8();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+case8();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: 1 } );
+case9();
+
+resetCollection();
+f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+case9();
+
+print("PASSED");
diff --git a/jstests/stages_and_hash.js b/jstests/stages_and_hash.js
new file mode 100644
index 00000000000..42ae0c8e34d
--- /dev/null
+++ b/jstests/stages_and_hash.js
@@ -0,0 +1,42 @@
+t = db.stages_and_hashed;
+t.drop();
+
+var N = 50;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: i, bar: N - i, baz: i});
+}
+
+t.ensureIndex({foo: 1})
+t.ensureIndex({bar: 1})
+t.ensureIndex({baz: 1})
+
+// Scan foo <= 20
+ixscan1 = {ixscan: {args:{name: "stages_and_hashed", keyPattern:{foo: 1},
+ startKey: {"": 20}, endKey: {},
+ endKeyInclusive: true, direction: -1}}};
+
+// Scan bar >= 40
+ixscan2 = {ixscan: {args:{name: "stages_and_hashed", keyPattern:{bar: 1},
+ startKey: {"": 40}, endKey: {},
+ endKeyInclusive: true, direction: 1}}};
+
+// bar = 50 - foo
+// Intersection is (foo=0 bar=50, foo=1 bar=49, ..., foo=10 bar=40)
+andix1ix2 = {andHash: {args: { nodes: [ixscan1, ixscan2]}}}
+res = db.runCommand({stageDebug: andix1ix2});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 11);
+
+// This should raise an error as we can't filter on baz since we haven't done a fetch and it's not
+// in the index data.
+andix1ix2badfilter = {andHash: {filter: {baz: 5}, args: {nodes: [ixscan1, ixscan2]}}};
+res = db.runCommand({stageDebug: andix1ix2badfilter});
+assert.eq(res.ok, 0);
+
+// Filter predicates from 2 indices. Tests that we union the idx info.
+andix1ix2filter = {andHash: {filter: {bar: {$in: [45, 46, 48]},
+ foo: {$in: [4,5,6]}},
+ args: {nodes: [ixscan1, ixscan2]}}};
+res = db.runCommand({stageDebug: andix1ix2filter});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 2);
diff --git a/jstests/stages_and_sorted.js b/jstests/stages_and_sorted.js
new file mode 100644
index 00000000000..fd96ab24153
--- /dev/null
+++ b/jstests/stages_and_sorted.js
@@ -0,0 +1,49 @@
+t = db.stages_and_sorted;
+t.drop();
+
+var N = 10;
+for (var i = 0; i < N; ++i) {
+ // These will show up in the index scans below but must not be outputted in the and.
+ t.insert({foo: 1});
+ t.insert({foo: 1, bar: 1});
+ t.insert({baz: 12});
+ t.insert({bar: 1});
+ // This is the only thing that should be outputted in the and.
+ t.insert({foo: 1, bar:1, baz: 12});
+ t.insert({bar: 1});
+ t.insert({bar:1, baz: 12})
+ t.insert({baz: 12});
+ t.insert({foo: 1, baz: 12});
+ t.insert({baz: 12});
+}
+
+t.ensureIndex({foo: 1});
+t.ensureIndex({bar: 1});
+t.ensureIndex({baz: 1});
+
+// Scan foo == 1
+ixscan1 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{foo: 1},
+ startKey: {"": 1}, endKey: {"": 1},
+ endKeyInclusive: true, direction: 1}}};
+
+// Scan bar == 1
+ixscan2 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{bar: 1},
+ startKey: {"": 1}, endKey: {"": 1},
+ endKeyInclusive: true, direction: 1}}};
+
+// Scan baz == 12
+ixscan3 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{baz: 1},
+ startKey: {"": 12}, endKey: {"": 12},
+ endKeyInclusive: true, direction: 1}}};
+
+// Intersect foo==1 with bar==1 with baz==12.
+andix1ix2 = {andSorted: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}};
+res = db.runCommand({stageDebug: andix1ix2});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, N);
+
+// Might as well make sure that hashed does the same thing.
+andix1ix2hash = {andHash: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}};
+res = db.runCommand({stageDebug: andix1ix2hash});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, N);
diff --git a/jstests/stages_collection_scan.js b/jstests/stages_collection_scan.js
new file mode 100644
index 00000000000..d7de30cf8e7
--- /dev/null
+++ b/jstests/stages_collection_scan.js
@@ -0,0 +1,43 @@
+// Test basic query stage collection scan functionality.
+t = db.stages_collection_scan;
+t.drop();
+
+var N = 50;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: i});
+}
+
+forward = {cscan: {args: {name: "stages_collection_scan", direction: 1}}}
+res = db.runCommand({stageDebug: forward});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, N);
+assert.eq(res.results[0].foo, 0);
+assert.eq(res.results[49].foo, 49);
+
+// And, backwards.
+backward = {cscan: {args: {name: "stages_collection_scan", direction: -1}}}
+res = db.runCommand({stageDebug: backward});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, N);
+assert.eq(res.results[0].foo, 49);
+assert.eq(res.results[49].foo, 0);
+
+forwardFiltered = {cscan: {args: {name: "stages_collection_scan", direction: 1},
+ filter: {foo: {$lt: 25}}}}
+res = db.runCommand({stageDebug: forwardFiltered});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 25);
+assert.eq(res.results[0].foo, 0);
+assert.eq(res.results[24].foo, 24);
+
+backwardFiltered = {cscan: {args: {name: "stages_collection_scan", direction: -1},
+ filter: {foo: {$lt: 25}}}}
+res = db.runCommand({stageDebug: backwardFiltered});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 25);
+assert.eq(res.results[0].foo, 24);
+assert.eq(res.results[24].foo, 0);
diff --git a/jstests/stages_fetch.js b/jstests/stages_fetch.js
new file mode 100644
index 00000000000..3e2c01df91a
--- /dev/null
+++ b/jstests/stages_fetch.js
@@ -0,0 +1,33 @@
+// Test basic fetch functionality.
+t = db.stages_fetch;
+t.drop();
+
+var N = 50;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: i, bar: N - i, baz: i});
+}
+
+t.ensureIndex({foo: 1});
+
+// 20 <= foo <= 30
+// bar == 25 (not covered, should error.)
+ixscan1 = {ixscan: {args:{name: "stages_fetch", keyPattern:{foo:1},
+ startKey: {"": 20},
+ endKey: {"" : 30}, endKeyInclusive: true,
+ direction: 1},
+ filter: {bar: 25}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(db.getLastError());
+assert.eq(res.ok, 0);
+
+// Now, add a fetch. We should be able to filter on the non-covered field since we fetched the obj.
+ixscan2 = {ixscan: {args:{name: "stages_fetch", keyPattern:{foo:1},
+ startKey: {"": 20},
+ endKey: {"" : 30}, endKeyInclusive: true,
+ direction: 1}}}
+fetch = {fetch: {args: {node: ixscan2}, filter: {bar: 25}}}
+res = db.runCommand({stageDebug: fetch});
+printjson(res);
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 1);
diff --git a/jstests/stages_ixscan.js b/jstests/stages_ixscan.js
new file mode 100644
index 00000000000..a7cd6bedc3a
--- /dev/null
+++ b/jstests/stages_ixscan.js
@@ -0,0 +1,76 @@
+// Test basic query stage index scan functionality.
+t = db.stages_ixscan;
+t.drop();
+
+var N = 50;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: i, bar: N - i, baz: i});
+}
+
+t.ensureIndex({foo: 1})
+t.ensureIndex({foo: 1, baz: 1});
+
+// foo <= 20
+ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
+ startKey: {"": 20},
+ endKey: {}, endKeyInclusive: true,
+ direction: -1}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 21);
+
+// 20 <= foo < 30
+ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
+ startKey: {"": 20},
+ endKey: {"" : 30}, endKeyInclusive: false,
+ direction: 1}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 10);
+
+// 20 <= foo <= 30
+ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
+ startKey: {"": 20},
+ endKey: {"" : 30}, endKeyInclusive: true,
+ direction: 1}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 11);
+
+// 20 <= foo <= 30
+// foo == 25
+ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
+ startKey: {"": 20},
+ endKey: {"" : 30}, endKeyInclusive: true,
+ direction: 1},
+ filter: {foo: 25}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 1);
+
+// 20 <= foo <= 30
+// baz == 25 (in index so we can match against it.)
+ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo:1, baz: 1},
+ startKey: {"": 20, "":MinKey},
+ endKey: {"" : 30, "":MaxKey}, endKeyInclusive: true,
+ direction: 1},
+ filter: {baz: 25}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 1);
+
+// 20 <= foo <= 30
+// bar == 25 (not covered, should error.)
+ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo:1, baz: 1},
+ startKey: {"": 20, "":MinKey},
+ endKey: {"" : 30, "":MaxKey}, endKeyInclusive: true,
+ direction: 1},
+ filter: {bar: 25}}};
+res = db.runCommand({stageDebug: ixscan1});
+assert(db.getLastError());
+assert.eq(res.ok, 0);
diff --git a/jstests/stages_limit_skip.js b/jstests/stages_limit_skip.js
new file mode 100644
index 00000000000..9441e4cd65b
--- /dev/null
+++ b/jstests/stages_limit_skip.js
@@ -0,0 +1,34 @@
+// Test limit and skip
+t = db.stages_limit_skip;
+t.drop();
+
+var N = 50;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: i, bar: N - i, baz: i});
+}
+
+t.ensureIndex({foo: 1})
+
+// foo <= 20, decreasing
+// Limit of 5 results.
+ixscan1 = {ixscan: {args:{name: "stages_limit_skip", keyPattern:{foo: 1},
+ startKey: {"": 20},
+ endKey: {}, endKeyInclusive: true,
+ direction: -1}}};
+limit1 = {limit: {args: {node: ixscan1, num: 5}}}
+res = db.runCommand({stageDebug: limit1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 5);
+assert.eq(res.results[0].foo, 20);
+assert.eq(res.results[4].foo, 16);
+
+// foo <= 20, decreasing
+// Skip 5 results.
+skip1 = {skip: {args: {node: ixscan1, num: 5}}}
+res = db.runCommand({stageDebug: skip1});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 16);
+assert.eq(res.results[0].foo, 15);
+assert.eq(res.results[res.results.length - 1].foo, 0);
diff --git a/jstests/stages_mergesort.js b/jstests/stages_mergesort.js
new file mode 100644
index 00000000000..394d60b5b20
--- /dev/null
+++ b/jstests/stages_mergesort.js
@@ -0,0 +1,32 @@
+// Test query stage merge sorting.
+t = db.stages_mergesort;
+t.drop();
+
+var N = 10;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: 1, bar: N - i - 1});
+ t.insert({baz: 1, bar: i})
+}
+
+t.ensureIndex({foo: 1, bar:1})
+t.ensureIndex({baz: 1, bar:1})
+
+// foo == 1
+// We would (internally) use "": MinKey and "": MaxKey for the bar index bounds.
+ixscan1 = {ixscan: {args:{name: "stages_mergesort", keyPattern:{foo: 1, bar:1},
+ startKey: {"": 1, "": 0},
+ endKey: {"": 1, "": 100000}, endKeyInclusive: true,
+ direction: 1}}};
+// baz == 1
+ixscan2 = {ixscan: {args:{name: "stages_mergesort", keyPattern:{baz: 1, bar:1},
+ startKey: {"": 1, "": 0},
+ endKey: {"": 1, "": 100000}, endKeyInclusive: true,
+ direction: 1}}};
+
+mergesort = {mergeSort: {args: {nodes: [ixscan1, ixscan2], pattern: {bar: 1}}}};
+res = db.runCommand({stageDebug: mergesort});
+assert(!db.getLastError());
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 2 * N);
+assert.eq(res.results[0].bar, 0);
+assert.eq(res.results[2 * N - 1].bar, N - 1);
diff --git a/jstests/stages_or.js b/jstests/stages_or.js
new file mode 100644
index 00000000000..bb0e02b11d4
--- /dev/null
+++ b/jstests/stages_or.js
@@ -0,0 +1,33 @@
+// Test basic OR functionality
+t = db.stages_or;
+t.drop();
+
+var N = 50;
+for (var i = 0; i < N; ++i) {
+ t.insert({foo: i, bar: N - i, baz: i});
+}
+
+t.ensureIndex({foo: 1})
+t.ensureIndex({bar: 1})
+t.ensureIndex({baz: 1})
+
+// baz >= 40
+ixscan1 = {ixscan: {args:{name: "stages_or", keyPattern:{baz: 1},
+ startKey: {"": 40}, endKey: {},
+ endKeyInclusive: true, direction: 1}}};
+// foo >= 40
+ixscan2 = {ixscan: {args:{name: "stages_or", keyPattern:{foo: 1},
+ startKey: {"": 40}, endKey: {},
+ endKeyInclusive: true, direction: 1}}};
+
+// OR of baz and foo. Baz == foo and we dedup.
+orix1ix2 = {or: {args: {nodes: [ixscan1, ixscan2], dedup:true}}};
+res = db.runCommand({stageDebug: orix1ix2});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 10);
+
+// No deduping, 2x the results.
+orix1ix2nodd = {or: {args: {nodes: [ixscan1, ixscan2], dedup:false}}};
+res = db.runCommand({stageDebug: orix1ix2nodd});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 20);
diff --git a/jstests/stages_sort.js b/jstests/stages_sort.js
new file mode 100644
index 00000000000..f7200cbac03
--- /dev/null
+++ b/jstests/stages_sort.js
@@ -0,0 +1,36 @@
+// Test query stage sorting.
+if (false) {
+ t = db.stages_sort;
+ t.drop();
+
+ var N = 50;
+ for (var i = 0; i < N; ++i) {
+ t.insert({foo: i, bar: N - i});
+ }
+
+ t.ensureIndex({foo: 1})
+
+ // Foo <= 20, descending.
+ ixscan1 = {ixscan: {args:{name: "stages_sort", keyPattern:{foo: 1},
+ startKey: {"": 20},
+ endKey: {}, endKeyInclusive: true,
+ direction: -1}}};
+
+ // Sort with foo ascending.
+ sort1 = {sort: {args: {node: ixscan1, pattern: {foo: 1}}}};
+ res = db.runCommand({stageDebug: sort1});
+ assert(!db.getLastError());
+ assert.eq(res.ok, 1);
+ assert.eq(res.results.length, 21);
+ assert.eq(res.results[0].foo, 0);
+ assert.eq(res.results[20].foo, 20);
+
+ // Sort with a limit.
+ //sort2 = {sort: {args: {node: ixscan1, pattern: {foo: 1}, limit: 2}}};
+ //res = db.runCommand({stageDebug: sort2});
+ //assert(!db.getLastError());
+ //assert.eq(res.ok, 1);
+ //assert.eq(res.results.length, 2);
+ //assert.eq(res.results[0].foo, 0);
+ //assert.eq(res.results[1].foo, 1);
+}
diff --git a/jstests/stages_text.js b/jstests/stages_text.js
new file mode 100644
index 00000000000..8407ffe1e14
--- /dev/null
+++ b/jstests/stages_text.js
@@ -0,0 +1,17 @@
+// Test very basic functionality of text stage
+
+t = db.stages_text;
+t.drop();
+t.save({x: "az b x"})
+
+t.ensureIndex({x: "text"})
+
+// We expect to retrieve 'b'
+res = db.runCommand({stageDebug: {text: {args: {name: "test.stages_text", search: "b"}}}});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 1);
+
+// I have not been indexed yet.
+res = db.runCommand({stageDebug: {text: {args: {name: "test.stages_text", search: "hari"}}}});
+assert.eq(res.ok, 1);
+assert.eq(res.results.length, 0);
diff --git a/jstests/stats.js b/jstests/stats.js
new file mode 100644
index 00000000000..08a74a00fb7
--- /dev/null
+++ b/jstests/stats.js
@@ -0,0 +1,23 @@
+
+var statsDB = db.getSiblingDB( "stats" );
+statsDB.dropDatabase();
+var t = statsDB.stats1;
+
+t.save( { a : 1 } );
+
+assert.lt( 0 , t.dataSize() , "A" );
+assert.lt( t.dataSize() , t.storageSize() , "B" );
+assert.lt( 0 , t.totalIndexSize() , "C" );
+
+var stats = statsDB.stats();
+assert.gt( stats.fileSize, 0 );
+assert.eq( stats.dataFileVersion.major, 4 );
+assert.eq( stats.dataFileVersion.minor, 5 );
+
+// test empty database; should be no dataFileVersion
+statsDB.dropDatabase();
+var statsEmptyDB = statsDB.stats();
+assert.eq( statsEmptyDB.fileSize, 0 );
+assert.eq( {}, statsEmptyDB.dataFileVersion );
+
+statsDB.dropDatabase();
diff --git a/jstests/storageDetailsCommand.js b/jstests/storageDetailsCommand.js
new file mode 100644
index 00000000000..1340a1038d5
--- /dev/null
+++ b/jstests/storageDetailsCommand.js
@@ -0,0 +1,98 @@
+db.jstests_commands.drop();
+db.createCollection("jstests_commands");
+
+t = db.jstests_commands;
+
+for (var i = 0; i < 3000; ++i) {
+ t.insert({i: i, d: i % 13});
+}
+
+function test() {
+ var result = t.diskStorageStats({numberOfSlices: 100});
+ if (result["bad cmd"]) {
+ print("storageDetails command not available: skipping");
+ return;
+ }
+
+ assert.commandWorked(result);
+
+ function checkDiskStats(data) {
+ assert(isNumber(data.extentHeaderBytes));
+ assert(isNumber(data.recordHeaderBytes));
+ assert(isNumber(data.numEntries));
+ assert(data.bsonBytes instanceof NumberLong);
+ assert(data.recBytes instanceof NumberLong);
+ assert(data.onDiskBytes instanceof NumberLong);
+ assert(isNumber(data.outOfOrderRecs));
+ assert(isNumber(data.characteristicCount));
+ assert(isNumber(data.characteristicAvg));
+ assert(data.freeRecsPerBucket instanceof Array);
+ }
+
+ assert(result.extents && result.extents instanceof Array);
+
+ var extents = result.extents;
+
+ for (var i = 0; i < extents.length; ++i) {
+ assert(isObject(extents[i]));
+ assert.neq(extents[i], null);
+ assert(extents[i].range instanceof Array);
+ assert.eq(extents[i].range.length, 2);
+ assert.eq(extents[i].isCapped, false);
+ checkDiskStats(extents[i]);
+ assert(extents[i].slices instanceof Array);
+ for (var c = 0; c < extents[i].slices[c]; ++c) {
+ assert(isObject(extents[i].slices[c]));
+ assert.neq(extents[i].slices[c], null);
+ checkStats(extents[i].slices[c]);
+ }
+ }
+
+ result = t.pagesInRAM({numberOfSlices: 100});
+ assert(result.ok);
+
+ assert(result.extents instanceof Array);
+ var extents = result.extents;
+
+ for (var i = 0; i < result.extents.length; ++i) {
+ assert(isObject(extents[i]));
+ assert.neq(extents[i], null);
+ assert(isNumber(extents[i].pageBytes));
+ assert(isNumber(extents[i].onDiskBytes));
+ assert(isNumber(extents[i].inMem));
+
+ assert(extents[i].slices instanceof Array);
+ for (var c = 0; c < extents[i].slices.length; ++c) {
+ assert(isNumber(extents[i].slices[c]));
+ }
+ }
+
+ function checkErrorConditions(helper) {
+ var result = helper.apply(t, [{extent: 'a'}]);
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/extent.*must be a number/));
+
+ result = helper.apply(t, [{range: [2, 4]}]);
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/range is only allowed.*extent/));
+
+ result = helper.apply(t, [{extent: 3, range: [3, 'a']}]);
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/must be an array.*numeric elements/));
+
+ result = helper.apply(t, [{granularity: 'a'}]);
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/granularity.*number/));
+
+ result = helper.apply(t, [{numberOfSlices: 'a'}]);
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/numberOfSlices.*number/));
+
+ result = helper.apply(t, [{extent: 100}]);
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/extent.*does not exist/));
+ }
+
+ checkErrorConditions(t.diskStorageStats);
+ checkErrorConditions(t.pagesInRAM);
+}
diff --git a/jstests/storefunc.js b/jstests/storefunc.js
new file mode 100644
index 00000000000..f5d1c3be48a
--- /dev/null
+++ b/jstests/storefunc.js
@@ -0,0 +1,44 @@
+// Use a private sister database to avoid conflicts with other tests that use system.js
+var testdb = db.getSisterDB("storefunc");
+
+s = testdb.system.js;
+s.remove({});
+assert.eq( 0 , s.count() , "setup - A" );
+
+s.save( { _id : "x" , value : "3" } );
+assert.isnull( testdb.getLastError() , "setup - B" );
+assert.eq( 1 , s.count() , "setup - C" );
+
+s.remove( { _id : "x" } );
+assert.eq( 0 , s.count() , "setup - D" );
+s.save( { _id : "x" , value : "4" } );
+assert.eq( 1 , s.count() , "setup - E" );
+
+assert.eq( 4 , s.findOne( { _id : "x" } ).value , "E2 " );
+
+assert.eq( 4 , s.findOne().value , "setup - F" );
+s.update( { _id : "x" } , { $set : { value : 5 } } );
+assert.eq( 1 , s.count() , "setup - G" );
+assert.eq( 5 , s.findOne().value , "setup - H" );
+
+assert.eq( 5 , testdb.eval( "return x" ) , "exec - 1 " );
+
+s.update( { _id : "x" } , { $set : { value : 6 } } );
+assert.eq( 1 , s.count() , "setup2 - A" );
+assert.eq( 6 , s.findOne().value , "setup - B" );
+
+assert.eq( 6 , testdb.eval( "return x" ) , "exec - 2 " );
+
+
+
+s.insert( { _id : "bar" , value : function( z ){ return 17 + z; } } );
+assert.eq( 22 , testdb.eval( "return bar(5);" ) , "exec - 3 " );
+
+assert( s.getIndexKeys().length > 0 , "no indexes" );
+assert( s.getIndexKeys()[0]._id , "no _id index" );
+
+assert.eq( "undefined" , testdb.eval( function(){ return typeof(zzz); } ) , "C1" );
+s.save( { _id : "zzz" , value : 5 } )
+assert.eq( "number" , testdb.eval( function(){ return typeof(zzz); } ) , "C2" );
+s.remove( { _id : "zzz" } );
+assert.eq( "undefined" , testdb.eval( function(){ return typeof(zzz); } ) , "C3" );
diff --git a/jstests/string_with_nul_bytes.js b/jstests/string_with_nul_bytes.js
new file mode 100644
index 00000000000..a1f6e395dd2
--- /dev/null
+++ b/jstests/string_with_nul_bytes.js
@@ -0,0 +1,9 @@
+// SERVER-6649 - issues round-tripping strings with embedded NUL bytes
+
+t = db.string_with_nul_bytes.js;
+t.drop();
+
+string = "string with a NUL (\0) byte";
+t.insert({str:string});
+assert.eq(t.findOne().str, string);
+assert.eq(t.findOne().str.length, string.length); // just to be sure
diff --git a/jstests/sub1.js b/jstests/sub1.js
new file mode 100644
index 00000000000..9e643f811fd
--- /dev/null
+++ b/jstests/sub1.js
@@ -0,0 +1,14 @@
+// sub1.js
+
+t = db.sub1;
+t.drop();
+
+x = { a : 1 , b : { c : { d : 2 } } }
+
+t.save( x );
+
+y = t.findOne();
+
+assert.eq( 1 , y.a );
+assert.eq( 2 , y.b.c.d );
+print( tojson( y ) );
diff --git a/jstests/temp_cleanup.js b/jstests/temp_cleanup.js
new file mode 100644
index 00000000000..e827083d605
--- /dev/null
+++ b/jstests/temp_cleanup.js
@@ -0,0 +1,16 @@
+
+mydb = db.getSisterDB( "temp_cleanup_test" )
+
+t = mydb.tempCleanup
+t.drop()
+
+t.insert( { x : 1 } )
+
+res = t.mapReduce( function(){ emit(1,1); } , function(){ return 1; } , "xyz" );
+printjson( res );
+
+assert.eq( 1 , t.count() , "A1" )
+assert.eq( 1 , mydb[res.result].count() , "A2" )
+
+mydb.dropDatabase()
+
diff --git a/jstests/testminmax.js b/jstests/testminmax.js
new file mode 100644
index 00000000000..803f1b48a0b
--- /dev/null
+++ b/jstests/testminmax.js
@@ -0,0 +1,14 @@
+t = db.minmaxtest;
+t.drop();
+t.insert({"_id" : "IBM.N|00001264779918428889", "DESCRIPTION" : { "n" : "IBMSTK2", "o" : "IBM STK", "s" : "changed" } });
+t.insert({ "_id" : "VOD.N|00001264779918433344", "COMPANYNAME" : { "n" : "Vodafone Group PLC 2", "o" : "Vodafone Group PLC", "s" : "changed" } });
+t.insert({ "_id" : "IBM.N|00001264779918437075", "DESCRIPTION" : { "n" : "IBMSTK3", "o" : "IBM STK2", "s" : "changed" } });
+t.insert({ "_id" : "VOD.N|00001264779918441426", "COMPANYNAME" : { "n" : "Vodafone Group PLC 3", "o" : "Vodafone Group PLC 2", "s" : "changed" } });
+
+// temp:
+printjson( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).toArray() );
+
+// this should be 2!! add assertion when fixed
+// http://jira.mongodb.org/browse/SERVER-675
+print( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).count() );
+
diff --git a/jstests/touch1.js b/jstests/touch1.js
new file mode 100644
index 00000000000..f7a0878f2e6
--- /dev/null
+++ b/jstests/touch1.js
@@ -0,0 +1,15 @@
+
+t = db.touch1;
+t.drop();
+
+t.insert( { x : 1 } );
+t.ensureIndex( { x : 1 } );
+
+res = t.runCommand( "touch" );
+assert( !res.ok, tojson( res ) );
+
+res = t.runCommand( "touch", { data : true, index : true } );
+assert.eq( 1, res.data.numRanges, tojson( res ) );
+assert.eq( 2, res.indexes.numRanges, tojson( res ) );
+
+
diff --git a/jstests/ts1.js b/jstests/ts1.js
new file mode 100644
index 00000000000..30f7882e863
--- /dev/null
+++ b/jstests/ts1.js
@@ -0,0 +1,38 @@
+t = db.ts1
+t.drop()
+
+N = 20
+
+for ( i=0; i<N; i++ ){
+ t.insert( { _id : i , x : new Timestamp() } )
+ sleep( 100 )
+}
+
+function get(i){
+ return t.findOne( { _id : i } ).x;
+}
+
+function cmp( a , b ){
+ if ( a.t < b.t )
+ return -1;
+ if ( a.t > b.t )
+ return 1;
+
+ return a.i - b.i;
+}
+
+for ( i=0; i<N-1; i++ ){
+ a = get(i);
+ b = get(i+1);
+ //print( tojson(a) + "\t" + tojson(b) + "\t" + cmp(a,b) );
+ assert.gt( 0 , cmp( a , b ) , "cmp " + i )
+}
+
+assert.eq( N , t.find( { x : { $type : 17 } } ).itcount() , "B1" )
+assert.eq( 0 , t.find( { x : { $type : 3 } } ).itcount() , "B2" )
+
+t.insert( { _id : 100 , x : new Timestamp( 123456 , 50 ) } )
+x = t.findOne( { _id : 100 } ).x
+assert.eq( 123456 , x.t , "C1" )
+assert.eq( 50 , x.i , "C2" )
+
diff --git a/jstests/type1.js b/jstests/type1.js
new file mode 100644
index 00000000000..518e36728e7
--- /dev/null
+++ b/jstests/type1.js
@@ -0,0 +1,24 @@
+
+t = db.type1;
+t.drop();
+
+t.save( { x : 1.1 } );
+t.save( { x : "3" } );
+t.save( { x : "asd" } );
+t.save( { x : "foo" } );
+
+assert.eq( 4 , t.find().count() , "A1" );
+assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "A2" );
+assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "A3" );
+assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "A4" );
+assert.eq( 4 , t.find( { x : { $type : 1 } } ).explain().nscanned , "A5" );
+
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 4 , t.find().count() , "B1" );
+assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "B2" );
+assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "B3" );
+assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "B4" );
+assert.eq( 1 , t.find( { x : { $type : 1 } } ).explain().nscanned , "B5" );
+assert.eq( 1 , t.find( { x : { $regex:"f", $type : 2 } } ).count() , "B3" ); \ No newline at end of file
diff --git a/jstests/type2.js b/jstests/type2.js
new file mode 100644
index 00000000000..820607e0b30
--- /dev/null
+++ b/jstests/type2.js
@@ -0,0 +1,19 @@
+// SERVER-1735 $type:10 matches null value, not missing value.
+
+t = db.jstests_type2;
+t.drop();
+
+t.save( {a:null} );
+t.save( {} );
+t.save( {a:'a'} );
+
+function test() {
+ assert.eq( 2, t.count( {a:null} ) );
+ assert.eq( 1, t.count( {a:{$type:10}} ) );
+ assert.eq( 2, t.count( {a:{$exists:true}} ) );
+ assert.eq( 1, t.count( {a:{$exists:false}} ) );
+}
+
+test();
+t.ensureIndex( {a:1} );
+test(); \ No newline at end of file
diff --git a/jstests/type3.js b/jstests/type3.js
new file mode 100644
index 00000000000..82a8b8ae7fc
--- /dev/null
+++ b/jstests/type3.js
@@ -0,0 +1,68 @@
+// Check query type bracketing SERVER-3222
+
+t = db.jstests_type3;
+t.drop();
+
+t.ensureIndex( {a:1} );
+
+// Type Object
+t.save( {a:{'':''}} );
+assert.eq( 1, t.find( {a:{$type:3}} ).hint( {a:1} ).itcount() );
+
+// Type Array
+t.remove({});
+t.save( {a:[['c']]} );
+assert.eq( 1, t.find( {a:{$type:4}} ).hint( {a:1} ).itcount() );
+
+// Type RegEx
+t.remove({});
+t.save( {a:/r/} );
+assert.eq( 1, t.find( {a:{$type:11}} ).hint( {a:1} ).itcount() );
+
+// Type jstNULL
+t.remove({});
+assert.eq( [[null,null]], t.find( {a:{$type:10}} ).hint( {a:1} ).explain().indexBounds.a );
+
+// Type Undefined
+t.remove({});
+// 'null' is the client friendly version of undefined.
+assert.eq( [[null,null]], t.find( {a:{$type:6}} ).hint( {a:1} ).explain().indexBounds.a );
+
+t.save( {a:undefined} );
+assert.eq( 1, t.find( {a:{$type:6}} ).hint( {a:1} ).itcount() );
+
+// This one won't be returned.
+t.save( {a:null} );
+assert.eq( 1, t.find( {a:{$type:6}} ).hint( {a:1} ).itcount() );
+
+t.remove({});
+// Type MinKey
+assert.eq( [[{$minElement:1},{$minElement:1}]], t.find( {a:{$type:-1}} ).hint( {a:1} ).explain().indexBounds.a );
+// Type MaxKey
+assert.eq( [[{$maxElement:1},{$maxElement:1}]], t.find( {a:{$type:127}} ).hint( {a:1} ).explain().indexBounds.a );
+
+// Type Timestamp
+t.remove({});
+t.save( {a:new Timestamp()} );
+assert.eq( 1, t.find( {a:{$type:17}} ).itcount() );
+if ( 0 ) { // SERVER-3304
+assert.eq( 0, t.find( {a:{$type:9}} ).itcount() );
+}
+
+// Type Date
+t.remove({});
+t.save( {a:new Date()} );
+if ( 0 ) { // SERVER-3304
+assert.eq( 0, t.find( {a:{$type:17}} ).itcount() );
+}
+assert.eq( 1, t.find( {a:{$type:9}} ).itcount() );
+
+// Type Code
+t.remove({});
+t.save( {a:function(){var a = 0;}} );
+assert.eq( 1, t.find( {a:{$type:13}} ).itcount() );
+
+// Type BinData
+t.remove({});
+t.save( {a:new BinData(0,'')} );
+assert.eq( 1, t.find( {a:{$type:5}} ).itcount() );
diff --git a/jstests/uniqueness.js b/jstests/uniqueness.js
new file mode 100644
index 00000000000..ce19ad08d82
--- /dev/null
+++ b/jstests/uniqueness.js
@@ -0,0 +1,58 @@
+
+t = db.jstests_uniqueness;
+
+t.drop();
+
+// test uniqueness of _id
+
+t.save( { _id : 3 } );
+assert( !db.getLastError(), 1 );
+
+// this should yield an error
+t.insert( { _id : 3 } );
+assert( db.getLastError() , 2);
+assert( t.count() == 1, "hmmm");
+
+t.insert( { _id : 4, x : 99 } );
+assert( !db.getLastError() , 3);
+
+// this should yield an error
+t.update( { _id : 4 } , { _id : 3, x : 99 } );
+assert( db.getLastError() , 4);
+assert( t.findOne( {_id:4} ), 5 );
+
+// Check for an error message when we index and there are dups
+db.jstests_uniqueness2.drop();
+db.jstests_uniqueness2.insert({a:3});
+db.jstests_uniqueness2.insert({a:3});
+assert( db.jstests_uniqueness2.count() == 2 , 6) ;
+db.resetError();
+db.jstests_uniqueness2.ensureIndex({a:1}, true);
+assert( db.getLastError() , 7);
+assert( db.getLastError().match( /E11000/ ) );
+
+// Check for an error message when we index in the background and there are dups
+db.jstests_uniqueness2.drop();
+db.jstests_uniqueness2.insert({a:3});
+db.jstests_uniqueness2.insert({a:3});
+assert( db.jstests_uniqueness2.count() == 2 , 6) ;
+assert( !db.getLastError() );
+db.resetError();
+db.jstests_uniqueness2.ensureIndex({a:1}, {unique:true,background:true});
+assert( db.getLastError() , 7);
+assert( db.getLastError().match( /E11000/ ) );
+
+/* Check that if we update and remove _id, it gets added back by the DB */
+
+/* - test when object grows */
+t.drop();
+t.save( { _id : 'Z' } );
+t.update( {}, { k : 2 } );
+assert( t.findOne()._id == 'Z', "uniqueness.js problem with adding back _id" );
+
+/* - test when doesn't grow */
+t.drop();
+t.save( { _id : 'Z', k : 3 } );
+t.update( {}, { k : 2 } );
+assert( t.findOne()._id == 'Z', "uniqueness.js problem with adding back _id (2)" );
+
diff --git a/jstests/unset.js b/jstests/unset.js
new file mode 100644
index 00000000000..f3cdcf03deb
--- /dev/null
+++ b/jstests/unset.js
@@ -0,0 +1,19 @@
+t = db.unset;
+t.drop();
+
+orig = { _id : 1, emb : {} };
+t.insert(orig);
+
+t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
+t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
+assert.eq( orig , t.findOne() , "A" );
+
+t.update( { _id : 1 }, { $set : { 'emb.a' : 1 }});
+t.update( { _id : 1 }, { $set : { 'z' : 1 }});
+
+t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
+t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
+assert.eq( orig , t.findOne() , "B" ); // note that emb isn't removed
+
+t.update( { _id : 1 }, { $unset : { 'emb' : 1 }});
+assert.eq( {_id :1} , t.findOne() , "C" );
diff --git a/jstests/unset2.js b/jstests/unset2.js
new file mode 100644
index 00000000000..e1dc445fcb8
--- /dev/null
+++ b/jstests/unset2.js
@@ -0,0 +1,23 @@
+t = db.unset2;
+t.drop();
+
+t.save( {a:["a","b","c","d"]} );
+t.update( {}, {$unset:{"a.3":1}} );
+assert.eq( ["a","b","c",null], t.findOne().a );
+t.update( {}, {$unset:{"a.1":1}} );
+assert.eq( ["a",null,"c",null], t.findOne().a );
+t.update( {}, {$unset:{"a.0":1}} );
+assert.eq( [null,null,"c",null], t.findOne().a );
+t.update( {}, {$unset:{"a.4":1}} );
+assert.eq( [null,null,"c",null], t.findOne().a ); // no change
+
+t.drop();
+t.save( {a:["a","b","c","d","e"]} );
+t.update( {}, {$unset:{"a.2":1},$set:{"a.3":3,"a.4":4,"a.5":5}} );
+assert.eq( ["a","b",null,3,4,5], t.findOne().a );
+
+t.drop();
+t.save( {a:["a","b","c","d","e"]} );
+t.update( {}, {$unset:{"a.2":1},$set:{"a.2":4}} );
+assert( db.getLastError() );
+assert.eq( ["a","b","c","d","e"], t.findOne().a ); \ No newline at end of file
diff --git a/jstests/update.js b/jstests/update.js
new file mode 100644
index 00000000000..37bf6378c64
--- /dev/null
+++ b/jstests/update.js
@@ -0,0 +1,40 @@
+
+asdf = db.getCollection( "asdf" );
+asdf.drop();
+
+var txt = "asdf";
+for(var i=0; i<10; i++) {
+ txt = txt + txt;
+}
+
+var iterations = _isWindows() ? 2500 : 5000
+
+// fill db
+for(var i=1; i<=iterations; i++) {
+ var obj = {txt : txt};
+ asdf.save(obj);
+
+ var obj2 = {txt: txt, comments: [{num: i, txt: txt}, {num: [], txt: txt}, {num: true, txt: txt}]};
+ asdf.update(obj, obj2);
+
+ if(i%100 == 0) {
+ var c = asdf.count();
+ assert.eq(c , i);
+ }
+}
+
+assert(asdf.validate().valid);
+
+var stats = db.runCommand({ collstats: "asdf" });
+
+// some checks. want to check that padding factor is working; in addition this lets us do a little basic
+// testing of the collstats command at the same time
+assert(stats.count == iterations);
+assert(stats.size < 140433012 * 5 && stats.size > 1000000);
+assert(stats.numExtents < 20);
+assert(stats.nindexes == 1);
+var pf = stats.paddingFactor;
+print("update.js padding factor: " + pf);
+assert(pf > 1.7 && pf <= 2);
+
+asdf.drop();
diff --git a/jstests/update2.js b/jstests/update2.js
new file mode 100644
index 00000000000..654914c1f45
--- /dev/null
+++ b/jstests/update2.js
@@ -0,0 +1,18 @@
+f = db.ed_db_update2;
+
+f.drop();
+f.save( { a: 4 } );
+f.update( { a: 4 }, { $inc: { a: 2 } } );
+assert.eq( 6, f.findOne().a );
+
+f.drop();
+f.save( { a: 4 } );
+f.ensureIndex( { a: 1 } );
+f.update( { a: 4 }, { $inc: { a: 2 } } );
+assert.eq( 6, f.findOne().a );
+
+// Verify that drop clears the index
+f.drop();
+f.save( { a: 4 } );
+f.update( { a: 4 }, { $inc: { a: 2 } } );
+assert.eq( 6, f.findOne().a );
diff --git a/jstests/update3.js b/jstests/update3.js
new file mode 100644
index 00000000000..995c6e67b45
--- /dev/null
+++ b/jstests/update3.js
@@ -0,0 +1,28 @@
+// Update with mods corner cases.
+
+f = db.jstests_update3;
+
+f.drop();
+f.save( { a:1 } );
+f.update( {}, {$inc:{ a:1 }} );
+assert.eq( 2, f.findOne().a , "A" );
+
+f.drop();
+f.save( { a:{ b: 1 } } );
+f.update( {}, {$inc:{ "a.b":1 }} );
+assert.eq( 2, f.findOne().a.b , "B" );
+
+f.drop();
+f.save( { a:{ b: 1 } } );
+f.update( {}, {$set:{ "a.b":5 }} );
+assert.eq( 5, f.findOne().a.b , "C" );
+
+f.drop();
+f.save( {'_id':0} );
+f.update( {}, {$set:{'_id':5}} );
+assert.eq( 0, f.findOne()._id , "D" );
+
+f.drop();
+f.save({_id:1, a:1})
+f.update({}, {$unset:{"a":1, "b.c":1}})
+assert.docEq(f.findOne(), {_id:1}, "E") \ No newline at end of file
diff --git a/jstests/update5.js b/jstests/update5.js
new file mode 100644
index 00000000000..2728000f2d4
--- /dev/null
+++ b/jstests/update5.js
@@ -0,0 +1,41 @@
+
+t = db.update5;
+
+function go( key ){
+
+ t.drop();
+
+ function check( num , name ){
+ assert.eq( 1 , t.find().count() , tojson( key ) + " count " + name );
+ assert.eq( num , t.findOne().n , tojson( key ) + " value " + name );
+ }
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 1 , "A" );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 2 , "B" );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 3 , "C" );
+
+ var ik = {};
+ for ( k in key )
+ ik[k] = 1;
+ t.ensureIndex( ik );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 4 , "D" );
+
+}
+
+go( { a : 5 } );
+go( { a : 5 } );
+
+go( { a : 5 , b : 7 } );
+go( { a : null , b : 7 } );
+
+go( { referer: 'blah' } );
+go( { referer: 'blah', lame: 'bar' } );
+go( { referer: 'blah', name: 'bar' } );
+go( { date: null, referer: 'blah', name: 'bar' } );
diff --git a/jstests/update6.js b/jstests/update6.js
new file mode 100644
index 00000000000..05fc5b223d9
--- /dev/null
+++ b/jstests/update6.js
@@ -0,0 +1,46 @@
+
+t = db.update6;
+t.drop();
+
+t.save( { a : 1 , b : { c : 1 , d : 1 } } );
+
+t.update( { a : 1 } , { $inc : { "b.c" : 1 } } );
+assert.eq( 2 , t.findOne().b.c , "A" );
+assert.eq( "c,d" , Object.keySet( t.findOne().b ).toString() , "B" );
+
+t.update( { a : 1 } , { $inc : { "b.0e" : 1 } } );
+assert.eq( 1 , t.findOne().b["0e"] , "C" );
+assert.docEq( { "c" : 2, "d" : 1, "0e" : 1 }, t.findOne().b, "D" );
+
+// -----
+
+t.drop();
+
+t.save( {"_id" : 2 ,
+ "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
+ //"b323" : {"0720" : 1} ,
+ }
+ );
+
+
+assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks before" );
+t.update({_id:2},{$inc: { 'b3.0719' : 1}},true)
+assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks after" );
+
+
+// -----
+
+t.drop();
+
+t.save( {"_id" : 2 ,
+ "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
+ "b324" : {"0720" : 1} ,
+ }
+ );
+
+
+assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks before" );
+printjson( t.find({_id:2},{b3:1})[0].b3 )
+t.update({_id:2},{$inc: { 'b3.0719' : 1}} )
+printjson( t.find({_id:2},{b3:1})[0].b3 )
+assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks after" );
diff --git a/jstests/update7.js b/jstests/update7.js
new file mode 100644
index 00000000000..b893121080f
--- /dev/null
+++ b/jstests/update7.js
@@ -0,0 +1,138 @@
+
+t = db.update7;
+t.drop();
+
+function s(){
+ return t.find().sort( { _id : 1 } ).map( function(z){ return z.x; } );
+}
+
+t.save( { _id : 1 , x : 1 } );
+t.save( { _id : 2 , x : 5 } );
+
+assert.eq( "1,5" , s() , "A" );
+
+t.update( {} , { $inc : { x : 1 } } );
+assert.eq( "2,5" , s() , "B" );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+assert.eq( "3,5" , s() , "C" );
+
+t.update( { _id : 2 } , { $inc : { x : 1 } } );
+assert.eq( "3,6" , s() , "D" );
+
+t.update( {} , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "E" );
+
+t.update( {} , { $set : { x : 2 } } , false , true );
+assert.eq( "2,2" , s() , "F" );
+
+// non-matching in cursor
+
+t.drop();
+
+t.save( { _id : 1 , x : 1 , a : 1 , b : 1 } );
+t.save( { _id : 2 , x : 5 , a : 1 , b : 2 } );
+assert.eq( "1,5" , s() , "B1" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "2,6" , s() , "B2" );
+
+t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "B3" );
+
+t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "B4" );
+
+t.ensureIndex( { a : 1 } );
+t.ensureIndex( { b : 1 } );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "B5" );
+
+t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,7" , s() , "B6" );
+
+t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,7" , s() , "B7" );
+
+t.update( { b : 2 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8" , s() , "B7" );
+
+
+// multi-key
+
+t.drop();
+
+t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
+t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
+assert.eq( "1,5" , s() , "C1" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "2,5" , s() , "C2" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,5" , s() , "C3" );
+
+t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "C4" );
+
+t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "C5" );
+
+t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8" , s() , "C6" );
+
+
+t.drop();
+
+t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
+t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
+t.ensureIndex( { a : 1 } );
+assert.eq( "1,5" , s() , "D1" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "2,5" , s() , "D2" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,5" , s() , "D3" );
+
+t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "D4" );
+
+t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "D5" );
+
+t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8" , s() , "D6" );
+
+t.update( { a : { $lt : 10 } } , { $inc : { x : -1 } } , false , true );
+assert.eq( "4,7" , s() , "D7" );
+
+// ---
+
+t.save( { _id : 3 } );
+assert.eq( "4,7," , s() , "E1" );
+t.update( {} , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8,1" , s() , "E2" );
+
+for ( i = 4; i<8; i++ )
+ t.save( { _id : i } );
+t.save( { _id : i , x : 1 } );
+assert.eq( "5,8,1,,,,,1" , s() , "E4" );
+t.update( {} , { $inc : { x : 1 } } , false , true );
+assert.eq( "6,9,2,1,1,1,1,2" , s() , "E5" );
+
+
+// --- $inc indexed field
+
+t.drop();
+
+t.save( { x : 1 } );
+t.save( { x : 2 } );
+t.save( { x : 3 } );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( "1,2,3" , s() , "F1" )
+t.update( { x : { $gt : 0 } } , { $inc : { x : 5 } } , false , true );
+assert.eq( "6,7,8" , s() , "F1" )
diff --git a/jstests/update8.js b/jstests/update8.js
new file mode 100644
index 00000000000..2388ff85c9d
--- /dev/null
+++ b/jstests/update8.js
@@ -0,0 +1,11 @@
+
+t = db.update8;
+t.drop();
+
+t.update( { _id : 1 , tags: {"$ne": "a"}}, {"$push": { tags : "a" } } , true )
+assert.eq( { _id : 1 , tags : [ "a" ] } , t.findOne() , "A" );
+
+t.drop()
+//SERVER-390
+//t.update( { "x.y" : 1 } , { $inc : { i : 1 } } , true );
+//printjson( t.findOne() );
diff --git a/jstests/update9.js b/jstests/update9.js
new file mode 100644
index 00000000000..45b9e2d0e26
--- /dev/null
+++ b/jstests/update9.js
@@ -0,0 +1,19 @@
+
+t = db.update9;
+t.drop()
+
+orig = { "_id" : 1 ,
+ "question" : "a",
+ "choices" : { "1" : { "choice" : "b" },
+ "0" : { "choice" : "c" } } ,
+
+ }
+
+t.save( orig );
+assert.eq( orig , t.findOne() , "A" );
+
+t.update({_id: 1, 'choices.0.votes': {$ne: 1}}, {$push: {'choices.0.votes': 1}})
+
+orig.choices["0"].votes = [ 1 ] ;
+assert.eq( orig.choices["0"] , t.findOne().choices["0"] , "B" );
+
diff --git a/jstests/update_addToSet.js b/jstests/update_addToSet.js
new file mode 100644
index 00000000000..da930555267
--- /dev/null
+++ b/jstests/update_addToSet.js
@@ -0,0 +1,58 @@
+
+t = db.update_addToSet1;
+t.drop();
+
+o = { _id : 1 , a : [ 2 , 1 ] }
+t.insert( o );
+
+assert.eq( o , t.findOne() , "A1" );
+
+t.update( {} , { $addToSet : { a : 3 } } );
+o.a.push( 3 );
+assert.eq( o , t.findOne() , "A2" );
+
+t.update( {} , { $addToSet : { a : 3 } } );
+assert.eq( o , t.findOne() , "A3" );
+
+// SERVER-628
+t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
+o.a.push( 5 )
+o.a.push( 6 )
+assert.eq( o , t.findOne() , "B1" )
+
+t.drop()
+o = { _id : 1 , a : [ 3 , 5 , 6 ] }
+t.insert( o );
+t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
+assert.eq( o , t.findOne() , "B2" );
+
+t.drop();
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
+assert.eq( o , t.findOne() , "B3" );
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
+assert.eq( o , t.findOne() , "B4" );
+
+// SERVER-630
+t.drop();
+t.update( { _id : 2 } , { $addToSet : { a : 3 } } , true );
+assert.eq( 1 , t.count() , "C1" );
+assert.eq( { _id : 2 , a : [ 3 ] } , t.findOne() , "C2" );
+
+// SERVER-3245
+o = {_id: 1, a: [1,2]};
+t.drop();
+t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2]}}}, true );
+assert.eq( o, t.findOne(), "D1" );
+
+t.drop();
+t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2,1,2]}}}, true );
+assert.eq( o, t.findOne(), "D2" );
+
+t.drop();
+t.insert( {_id: 1} );
+t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2,2,1]}}} );
+assert.eq( o, t.findOne(), "D3" );
+
+t.update( {_id: 1}, {$addToSet: {a: {$each: [3,2,2,3,3]}}} );
+o.a.push( 3 );
+assert.eq( o, t.findOne(), "D4" );
diff --git a/jstests/update_addToSet2.js b/jstests/update_addToSet2.js
new file mode 100644
index 00000000000..cb168f8d15e
--- /dev/null
+++ b/jstests/update_addToSet2.js
@@ -0,0 +1,11 @@
+
+t = db.update_addToSet2
+t.drop();
+
+o = { _id : 1 }
+t.insert( { _id : 1 } );
+
+t.update({},{$addToSet : {'kids' :{ 'name' : 'Bob', 'age': '4'}}})
+t.update({},{$addToSet : {'kids' :{ 'name' : 'Dan', 'age': '2'}}})
+
+printjson( t.findOne() );
diff --git a/jstests/update_addToSet3.js b/jstests/update_addToSet3.js
new file mode 100644
index 00000000000..e9da58eb6e0
--- /dev/null
+++ b/jstests/update_addToSet3.js
@@ -0,0 +1,18 @@
+
+t = db.update_addToSet3
+t.drop()
+
+t.insert( { _id : 1 } )
+
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 6 , 5 , 4 ] } } } )
+assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 ] } , "A1" )
+
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 2 , 1 ] } } } )
+assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 ] } , "A2" )
+
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 4 , 7 , 9 , 2 ] } } } )
+assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 , 7 , 9 ] } , "A3" )
+
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 12 , 13 , 12 ] } } } )
+assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 , 7 , 9 , 12 , 13 ] } , "A4" )
+
diff --git a/jstests/update_arraymatch1.js b/jstests/update_arraymatch1.js
new file mode 100644
index 00000000000..521271d7f85
--- /dev/null
+++ b/jstests/update_arraymatch1.js
@@ -0,0 +1,16 @@
+
+t = db.update_arraymatch1
+t.drop();
+
+o = { _id : 1 , a : [ { x : 1 , y : 1 } , { x : 2 , y : 2 } , { x : 3 , y : 3 } ] }
+t.insert( o );
+assert.eq( o , t.findOne() , "A1" );
+
+q = { "a.x" : 2 }
+t.update( q , { $set : { b : 5 } } )
+o.b = 5
+assert.eq( o , t.findOne() , "A2" )
+
+t.update( { "a.x" : 2 } , { $inc : { "a.$.y" : 1 } } )
+o.a[1].y++;
+assert.eq( o , t.findOne() , "A3" );
diff --git a/jstests/update_arraymatch2.js b/jstests/update_arraymatch2.js
new file mode 100644
index 00000000000..c07a61c378c
--- /dev/null
+++ b/jstests/update_arraymatch2.js
@@ -0,0 +1,16 @@
+t = db.update_arraymatch2;
+t.drop();
+
+t.insert( { } );
+t.insert( { x : [1,2,3] } );
+t.insert( { x : 99 } );
+t.update( {x : 2}, { $inc : { "x.$" : 1 } } , false, true );
+assert( t.findOne({x:1}).x[1] == 3, "A1" );
+
+t.insert( { x : { y : [8,7,6] } } )
+t.update( {'x.y' : 7}, { $inc : { "x.y.$" : 1 } } , false, true )
+assert.eq( 8 , t.findOne({"x.y" : 8}).x.y[1] , "B1" );
+
+t.insert( { x : [90,91,92], y : ['a', 'b', 'c'] } );
+t.update( { x : 92} , { $set : { 'y.$' : 'z' } }, false, true );
+assert.eq( 'z', t.findOne({x:92}).y[2], "B2" );
diff --git a/jstests/update_arraymatch3.js b/jstests/update_arraymatch3.js
new file mode 100644
index 00000000000..116ac6be2e3
--- /dev/null
+++ b/jstests/update_arraymatch3.js
@@ -0,0 +1,17 @@
+
+t = db.update_arraymatch3;
+t.drop();
+
+o = { _id : 1 ,
+ title : "ABC",
+ comments : [ { "by" : "joe", "votes" : 3 },
+ { "by" : "jane", "votes" : 7 }
+ ]
+ }
+
+t.save( o );
+assert.eq( o , t.findOne() , "A1" );
+
+t.update( {'comments.by':'joe'}, {$inc:{'comments.$.votes':1}}, false, true )
+o.comments[0].votes++;
+assert.eq( o , t.findOne() , "A2" );
diff --git a/jstests/update_arraymatch4.js b/jstests/update_arraymatch4.js
new file mode 100644
index 00000000000..5abd0aa3bf0
--- /dev/null
+++ b/jstests/update_arraymatch4.js
@@ -0,0 +1,18 @@
+
+t = db.update_arraymatch4
+t.drop()
+
+x = { _id : 1 , arr : ["A1","B1","C1"] }
+t.insert( x )
+assert.eq( x , t.findOne() , "A1" )
+
+x.arr[0] = "A2"
+t.update( { arr : "A1" } , { $set : { "arr.$" : "A2" } } )
+assert.eq( x , t.findOne() , "A2" )
+
+t.ensureIndex( { arr : 1 } )
+x.arr[0] = "A3"
+t.update( { arr : "A2" } , { $set : { "arr.$" : "A3" } } )
+assert.eq( x , t.findOne() , "A3" ); // SERVER-1055
+
+
diff --git a/jstests/update_arraymatch5.js b/jstests/update_arraymatch5.js
new file mode 100644
index 00000000000..aff1a0323ef
--- /dev/null
+++ b/jstests/update_arraymatch5.js
@@ -0,0 +1,15 @@
+
+t = db.update_arraymatch5
+t.drop();
+
+t.insert({abc:{visible:true}, testarray:[{foobar_id:316, visible:true, xxx: 1}]});
+t.ensureIndex({'abc.visible':1, 'testarray.visible':1 , 'testarray.xxx': 1});
+assert( t.findOne({'abc.visible':true, testarray:{'$elemMatch': {visible:true, xxx:1}}}) , "A1" )
+assert( t.findOne({testarray:{'$elemMatch': {visible:true, xxx:1}}}) , "A2" );
+
+t.update({'testarray.foobar_id':316}, {'$set': {'testarray.$.visible': true, 'testarray.$.xxx': 2}}, false, true);
+
+assert( t.findOne() , "B1" );
+assert( t.findOne({testarray:{'$elemMatch': {visible:true, xxx:2}}}) , "B2" )
+assert( t.findOne({'abc.visible':true, testarray:{'$elemMatch': {visible:true, xxx:2}}}) , "B3" );
+assert.eq( 1 , t.find().count() , "B4" );
diff --git a/jstests/update_arraymatch6.js b/jstests/update_arraymatch6.js
new file mode 100644
index 00000000000..8892e6fcc68
--- /dev/null
+++ b/jstests/update_arraymatch6.js
@@ -0,0 +1,14 @@
+t = db.jstests_update_arraymatch6;
+t.drop();
+
+function doTest() {
+ t.save( {a: [{id: 1, x: [5,6,7]}, {id: 2, x: [8,9,10]}]} );
+ t.update({'a.id': 1}, {$set: {'a.$.x': [1,1,1]}});
+ assert.automsg( "!db.getLastError()" );
+ assert.eq.automsg( "1", "t.findOne().a[ 0 ].x[ 0 ]" );
+}
+
+doTest();
+t.drop();
+t.ensureIndex( { 'a.id':1 } );
+doTest(); \ No newline at end of file
diff --git a/jstests/update_arraymatch7.js b/jstests/update_arraymatch7.js
new file mode 100644
index 00000000000..5621f60c39e
--- /dev/null
+++ b/jstests/update_arraymatch7.js
@@ -0,0 +1,19 @@
+// Check that the positional operator works properly when an index only match is used for the update
+// query spec. SERVER-5067
+
+t = db.jstests_update_arraymatch7;
+t.drop();
+
+function testPositionalInc() {
+ t.remove({});
+ t.save( { a:[ { b:'match', count:0 } ] } );
+ t.update( { 'a.b':'match' }, { $inc:{ 'a.$.count':1 } } );
+ // Check that the positional $inc succeeded.
+ assert( t.findOne( { 'a.count':1 } ) );
+}
+
+testPositionalInc();
+
+// Now check with a non multikey index.
+t.ensureIndex( { 'a.b' : 1 } );
+testPositionalInc();
diff --git a/jstests/update_arraymatch8.js b/jstests/update_arraymatch8.js
new file mode 100644
index 00000000000..1e8ce377862
--- /dev/null
+++ b/jstests/update_arraymatch8.js
@@ -0,0 +1,158 @@
+// Checking for positional array updates with either .$ or .0 at the end
+// SERVER-7511
+
+// array.$.name
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.name': 1} );
+t.insert( {'array': [{'name': 'old'}]} );
+assert( t.findOne({'array.name': 'old'}) );
+t.update( {'array.name': 'old'}, {$set: {'array.$.name': 'new'}} );
+assert( t.findOne({'array.name': 'new'}) );
+assert( !t.findOne({'array.name': 'old'}) );
+
+// array.$ (failed in 2.2.2)
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.name': 1} );
+t.insert( {'array': [{'name': 'old'}]} );
+assert( t.findOne({'array.name': 'old'}) );
+t.update( {'array.name': 'old'}, {$set: {'array.$': {'name':'new'}}} );
+assert( t.findOne({'array.name': 'new'}) );
+assert( !t.findOne({'array.name': 'old'}) );
+
+// array.0.name
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.name': 1} );
+t.insert( {'array': [{'name': 'old'}]} );
+assert( t.findOne({'array.name': 'old'}) );
+t.update( {'array.name': 'old'}, {$set: {'array.0.name': 'new'}} );
+assert( t.findOne({'array.name': 'new'}) );
+assert( !t.findOne({'array.name': 'old'}) );
+
+// array.0 (failed in 2.2.2)
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.name': 1} );
+t.insert( {'array': [{'name': 'old'}]} );
+assert( t.findOne({'array.name': 'old'}) );
+t.update( {'array.name': 'old'}, {$set: {'array.0': {'name':'new'}}} );
+assert( t.findOne({'array.name': 'new'}) );
+assert( !t.findOne({'array.name': 'old'}) );
+
+// // array.12.name
+t = db.jstests_update_arraymatch8;
+t.drop();
+arr = new Array();
+for (var i=0; i<20; i++) {
+ arr.push({'name': 'old'});
+}
+t.ensureIndex( {'array.name': 1} );
+t.insert( {_id:0, 'array': arr} );
+assert( t.findOne({'array.name': 'old'}) );
+t.update( {_id:0}, {$set: {'array.12.name': 'new'}} );
+// note: both documents now have to be in the array
+assert( t.findOne({'array.name': 'new'}) );
+assert( t.findOne({'array.name': 'old'}) );
+
+// array.12 (failed in 2.2.2)
+t = db.jstests_update_arraymatch8;
+t.drop();
+arr = new Array();
+for (var i=0; i<20; i++) {
+ arr.push({'name': 'old'});
+}
+t.ensureIndex( {'array.name': 1} );
+t.insert( {_id:0, 'array': arr} );
+assert( t.findOne({'array.name': 'old'}) );
+t.update( {_id:0}, {$set: {'array.12': {'name':'new'}}} );
+// note: both documents now have to be in the array
+assert( t.findOne({'array.name': 'new'}) );
+assert( t.findOne({'array.name': 'old'}) );
+
+// array.$.123a.name
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.123a.name': 1} );
+t.insert( {'array': [{'123a':{'name': 'old'}}]} );
+assert( t.findOne({'array.123a.name': 'old'}) );
+t.update( {'array.123a.name': 'old'}, {$set: {'array.$.123a.name': 'new'}} );
+assert( t.findOne({'array.123a.name': 'new'}) );
+assert( !t.findOne({'array.123a.name': 'old'}) );
+
+// array.$.123a
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.name': 1} );
+t.insert( {'array': [{'123a':{'name': 'old'}}]} );
+assert( t.findOne({'array.123a.name': 'old'}) );
+t.update( {'array.123a.name': 'old'}, {$set: {'array.$.123a': {'name': 'new'}}} );
+assert( t.findOne({'array.123a.name': 'new'}) );
+assert( !t.findOne({'array.123a.name': 'old'}) );
+
+// array.0.123a.name
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.123a.name': 1} );
+t.insert( {'array': [{'123a':{'name': 'old'}}]} );
+assert( t.findOne({'array.123a.name': 'old'}) );
+t.update( {'array.123a.name': 'old'}, {$set: {'array.0.123a.name': 'new'}} );
+assert( t.findOne({'array.123a.name': 'new'}) );
+assert( !t.findOne({'array.123a.name': 'old'}) );
+
+// array.0.123a
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'array.name': 1} );
+t.insert( {'array': [{'123a':{'name': 'old'}}]} );
+assert( t.findOne({'array.123a.name': 'old'}) );
+t.update( {'array.123a.name': 'old'}, {$set: {'array.0.123a': {'name': 'new'}}} );
+assert( t.findOne({'array.123a.name': 'new'}) );
+assert( !t.findOne({'array.123a.name': 'old'}) );
+
+// a.0.b
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'a.0.b': 1} );
+t.insert( {'a': [ [ { b:'old' } ] ] } );
+assert( t.findOne({'a.0.0.b': 'old'}) );
+assert( t.findOne({'a.0.b': 'old'}) );
+t.update( {}, {$set: {'a.0.0.b': 'new'}} );
+assert( t.findOne({'a.0.b': 'new'}) );
+assert( !t.findOne({'a.0.b': 'old'}) );
+
+// a.0.b.c
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'a.0.b.c': 1} );
+t.insert( {'a': [ { b:[ { c:'old' } ] } ] } );
+assert( t.findOne({'a.0.b.0.c': 'old'}) );
+assert( t.findOne({'a.b.0.c': 'old'}) );
+assert( t.findOne({'a.0.b.c': 'old'}) );
+assert( t.findOne({'a.b.c': 'old'}) );
+t.update( {}, {$set: {'a.0.b.0.c': 'new'}} );
+assert( t.findOne({'a.0.b.c': 'new'}) );
+assert( !t.findOne({'a.0.b.c': 'old'}) );
+
+// a.b.$ref
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'a.b.$ref': 1} );
+t.insert( {'a': [ { 'b':{ '$ref':'old', '$id':0 } } ] } );
+assert( t.findOne({'a.b.$ref': 'old'}) );
+assert( t.findOne({'a.0.b.$ref': 'old'}) );
+t.update( {}, {$set: {'a.0.b.$ref': 'new'}} );
+assert( t.findOne({'a.b.$ref': 'new'}) );
+assert( !t.findOne({'a.b.$ref': 'old'}) );
+
+// a.b and a-b
+t = db.jstests_update_arraymatch8;
+t.drop();
+t.ensureIndex( {'a.b': 1} );
+t.ensureIndex( {'a-b': 1} );
+t.insert( {'a':{'b':'old'}} );
+assert( t.findOne({'a.b': 'old'}) );
+t.update( {}, {$set: {'a': {'b': 'new'}}} );
+assert( t.findOne({'a.b': 'new'}) );
+assert( !t.findOne({'a.b': 'old'}) );
diff --git a/jstests/update_bit_examples.js b/jstests/update_bit_examples.js
new file mode 100644
index 00000000000..f277630a7da
--- /dev/null
+++ b/jstests/update_bit_examples.js
@@ -0,0 +1,24 @@
+// Basic examples for $bit
+var coll = db.update_bit;
+coll.drop();
+
+// $bit and
+coll.remove({})
+coll.save({_id:1, a:NumberInt(2)});
+coll.update({}, {$bit: {a: {and: NumberInt(4)}}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne().a, 0)
+
+// $bit or
+coll.remove({})
+coll.save({_id:1, a:NumberInt(2)});
+coll.update({}, {$bit: {a: {or: NumberInt(4)}}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne().a, 6)
+
+// $bit xor
+coll.remove({})
+coll.save({_id:1, a:NumberInt(0)});
+coll.update({}, {$bit: {a: {xor: NumberInt(4)}}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne().a, 4)
diff --git a/jstests/update_blank1.js b/jstests/update_blank1.js
new file mode 100644
index 00000000000..a2344035dc3
--- /dev/null
+++ b/jstests/update_blank1.js
@@ -0,0 +1,10 @@
+
+t = db.update_blank1
+t.drop();
+
+orig = { "" : 1 , _id : 2 , "a" : 3 , "b" : 4 };
+t.insert( orig );
+t.update( {} , { $set : { "c" : 5 } } );
+print( db.getLastError() );
+orig["c"] = 5;
+assert.docEq( orig , t.findOne() , "after $set" ); // SERVER-2651 \ No newline at end of file
diff --git a/jstests/update_currentdate_examples.js b/jstests/update_currentdate_examples.js
new file mode 100644
index 00000000000..055bd3089da
--- /dev/null
+++ b/jstests/update_currentdate_examples.js
@@ -0,0 +1,24 @@
+// Basic examples for $currentDate
+var coll = db.update_currentdate;
+coll.drop();
+
+// $currentDate default
+coll.remove({})
+coll.save({_id:1, a:2});
+coll.update({}, {$currentDate: {a: true}})
+assert.gleSuccess(coll.getDB())
+assert(coll.findOne().a.constructor == Date)
+
+// $currentDate type = date
+coll.remove({})
+coll.save({_id:1, a:2});
+coll.update({}, {$currentDate: {a: {$type: "date"}}})
+assert.gleSuccess(coll.getDB())
+assert(coll.findOne().a.constructor == Date)
+
+// $currentDate type = timestamp
+coll.remove({})
+coll.save({_id:1, a:2});
+coll.update({}, {$currentDate: {a: {$type: "timestamp"}}})
+assert.gleSuccess(coll.getDB())
+assert(coll.findOne().a.constructor == Timestamp)
diff --git a/jstests/update_dbref.js b/jstests/update_dbref.js
new file mode 100644
index 00000000000..bf31566fc28
--- /dev/null
+++ b/jstests/update_dbref.js
@@ -0,0 +1,36 @@
+// Test that we can update DBRefs, but not dbref fields outside a DBRef
+
+t = db.jstests_update_dbref;
+t.drop();
+
+t.save({_id:1, a: new DBRef("a", "b")});
+assert.gleSuccess(db, "failed to save dbref");
+assert.docEq({_id:1, a: new DBRef("a", "b")}, t.findOne());
+
+t.update({}, {$set: {"a.$id": 2}});
+assert.gleSuccess(db, "a.$id update");
+assert.docEq({_id:1, a: new DBRef("a", 2)}, t.findOne());
+
+t.update({}, {$set: {"a.$ref": "b"}});
+assert.gleSuccess(db, "a.$ref update");
+
+assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+
+// Bad updates
+t.update({}, {$set: {"$id": 3}});
+assert.gleErrorRegex(db, /\$id/, "expected bad update because of $id")
+assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+
+t.update({}, {$set: {"$ref": "foo"}});
+assert.gleErrorRegex(db, /\$ref/, "expected bad update because of $ref")
+assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+
+t.update({}, {$set: {"$db": "aDB"}});
+assert.gleErrorRegex(db, /\$db/, "expected bad update because of $db")
+assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+
+t.update({}, {$set: {"b.$id": 2}});
+assert.gleError(db, function() { return "b.$id update -- doc:" + tojson(t.findOne())});
+
+t.update({}, {$set: {"b.$ref": 2}});
+assert.gleError(db, function() { return "b.$ref update -- doc:" + tojson(t.findOne())});
diff --git a/jstests/update_invalid1.js b/jstests/update_invalid1.js
new file mode 100644
index 00000000000..7c94507f560
--- /dev/null
+++ b/jstests/update_invalid1.js
@@ -0,0 +1,6 @@
+
+t = db.update_invalid1
+t.drop()
+
+t.update( { _id : 5 } , { $set : { $inc : { x : 5 } } } , true );
+assert.eq( 0 , t.count() , "A1" );
diff --git a/jstests/update_min_max_examples.js b/jstests/update_min_max_examples.js
new file mode 100644
index 00000000000..ef84cff3635
--- /dev/null
+++ b/jstests/update_min_max_examples.js
@@ -0,0 +1,31 @@
+// Basic examples for $min/$max
+var coll = db.update_min_max;
+coll.drop();
+
+// $min for number
+coll.insert({_id:1, a:2});
+coll.update({_id:1}, {$min: {a: 1}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne({_id:1}).a, 1)
+
+// $max for number
+coll.insert({_id:2, a:2});
+coll.update({_id:2}, {$max: {a: 1}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne({_id:2}).a, 2)
+
+// $min for Date
+coll.insert({_id:3, a: new Date()});
+var origDoc = coll.findOne({_id:3})
+sleep(2)
+coll.update({_id:3}, {$min: {a: new Date()}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne({_id:3}).a, origDoc.a)
+
+// $max for Date
+coll.insert({_id:4, a: new Date()});
+sleep(2)
+var newDate = new Date();
+coll.update({_id:4}, {$max: {a: newDate}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne({_id:4}).a, newDate)
diff --git a/jstests/update_mul_examples.js b/jstests/update_mul_examples.js
new file mode 100644
index 00000000000..a57fa0a3380
--- /dev/null
+++ b/jstests/update_mul_examples.js
@@ -0,0 +1,24 @@
+// Basic examples for $mul (multiply)
+var coll = db.update_mul;
+coll.drop();
+
+// $mul positive
+coll.remove({})
+coll.save({_id:1, a:2});
+coll.update({}, {$mul: {a: 10}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne().a, 20)
+
+// $mul negative
+coll.remove({})
+coll.save({_id:1, a:2});
+coll.update({}, {$mul: {a: -10}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne().a, -20)
+
+// $mul zero
+coll.remove({})
+coll.save({_id:1, a:2});
+coll.update({}, {$mul: {a: 0}})
+assert.gleSuccess(coll.getDB())
+assert.eq(coll.findOne().a, 0)
diff --git a/jstests/update_multi3.js b/jstests/update_multi3.js
new file mode 100644
index 00000000000..903d8265b63
--- /dev/null
+++ b/jstests/update_multi3.js
@@ -0,0 +1,25 @@
+
+t = db.update_multi3;
+
+function test( useIndex ){
+ t.drop();
+
+ if ( useIndex )
+ t.ensureIndex({k:1})
+
+ for (i=0; i<10; i++) {
+ t.save({ _id : i , k: 'x', a: []});
+ }
+
+ t.update({k: 'x'}, {$push: {a: 'y'}}, false, true);
+
+ t.find( { k : "x" } ).forEach(
+ function(z){
+ assert.eq( [ "y" ] , z.a , "useIndex: " + useIndex )
+ }
+ );
+
+}
+
+test( false )
+test( true )
diff --git a/jstests/update_multi4.js b/jstests/update_multi4.js
new file mode 100644
index 00000000000..e81a19a5feb
--- /dev/null
+++ b/jstests/update_multi4.js
@@ -0,0 +1,18 @@
+
+t = db.update_mulit4;
+t.drop();
+
+for(i=0;i<1000;i++){
+ t.insert( { _id:i ,
+ k:i%12,
+ v:"v"+i%12 } );
+}
+
+t.ensureIndex({k:1})
+
+assert.eq( 84 , t.count({k:2,v:"v2"} ) , "A0" );
+
+t.update({k:2},{$set:{v:"two v2"}},false,true)
+
+assert.eq( 0 , t.count({k:2,v:"v2"} ) , "A1" );
+assert.eq( 84 , t.count({k:2,v:"two v2"} ) , "A2" );
diff --git a/jstests/update_multi5.js b/jstests/update_multi5.js
new file mode 100644
index 00000000000..46ef8f36da5
--- /dev/null
+++ b/jstests/update_multi5.js
@@ -0,0 +1,17 @@
+
+t = db.update_multi5;
+
+t.drop()
+
+t.insert({path: 'r1', subscribers: [1,2]});
+t.insert({path: 'r2', subscribers: [3,4]});
+
+t.update({}, {$addToSet: {subscribers: 5}}, false, true);
+
+t.find().forEach(
+ function(z){
+ assert.eq( 3 , z.subscribers.length , z );
+ }
+);
+
+
diff --git a/jstests/update_multi6.js b/jstests/update_multi6.js
new file mode 100644
index 00000000000..dcc1ff04034
--- /dev/null
+++ b/jstests/update_multi6.js
@@ -0,0 +1,10 @@
+
+t = db.update_multi6
+t.drop();
+
+t.update( { _id : 1 } , { _id : 1 , x : 1 , y : 2 } , true , false );
+assert( t.findOne( { _id : 1 } ) , "A" )
+
+t.update( { _id : 2 } , { _id : 2 , x : 1 , y : 2 } , true , true );
+assert( db.getLastError() , "B: " + tojson(db.getLastErrorCmd()) );
+
diff --git a/jstests/update_replace.js b/jstests/update_replace.js
new file mode 100644
index 00000000000..0f9ef8fbe39
--- /dev/null
+++ b/jstests/update_replace.js
@@ -0,0 +1,50 @@
+// This test checks validation of the replaced doc (on the server) for dots, $prefix and _id
+
+// Create a new connection object so it won't affect the global connection when we modify
+// it's settings.
+var conn = new Mongo(db.getMongo().host);
+t = conn.getDB(db.getName()).jstests_update_replace;
+t.drop();
+
+var myDB = t.getDB();
+
+// Bypass validation in shell so we can test the server.
+conn._skipValidation = true;
+
+// Should not allow "." in field names
+t.save({_id:1, "a.a":1})
+assert.gleError(myDB, "a.a");
+
+// Should not allow "." in field names, embedded
+t.save({_id:1, a :{"a.a":1}})
+assert.gleError(myDB, "a: a.a");
+
+// Should not allow "$"-prefixed field names, caught before "." check
+t.save({_id:1, $a :{"a.a":1}})
+assert.gleError(myDB, "$a: a.a");
+
+// Should not allow "$"-prefixed field names
+t.save({_id:1, $a: 1})
+assert.gleError(myDB, "$a");
+
+// _id validation checks
+
+// Should not allow regex _id
+t.save({_id: /a/})
+assert.gleError(myDB, "_id regex");
+
+// Should not allow regex _id, even if not first
+t.save({a:2, _id: /a/})
+assert.gleError(myDB, "a _id regex");
+
+// Should not allow array _id
+t.save({_id: [9]})
+assert.gleError(myDB, "_id array");
+
+// This is fine since _id isn't a top level field
+t.save({a :{ _id: [9]}})
+assert.gleSuccess(myDB, "embedded _id array");
+
+// This is fine since _id isn't a top level field
+t.save({b:1, a :{ _id: [9]}})
+assert.gleSuccess(myDB, "b embedded _id array");
diff --git a/jstests/update_setOnInsert.js b/jstests/update_setOnInsert.js
new file mode 100644
index 00000000000..be215ab408d
--- /dev/null
+++ b/jstests/update_setOnInsert.js
@@ -0,0 +1,47 @@
+// This tests that $setOnInsert works and allow setting the _id
+t = db.update_setOnInsert;
+
+db.setProfilingLevel( 2 );
+
+function getLastOp() {
+ var cursor = db.system.profile.find( { ns : t.getFullName() , op : "update" } );
+ cursor = cursor.sort( { $natural : -1 } ).limit(1);
+ return cursor[0];
+}
+
+function dotest( useIndex ) {
+ t.drop();
+ if ( useIndex ) {
+ t.ensureIndex( { a : 1 } );
+ }
+
+ t.update( { _id: 5 }, { $inc : { x: 2 }, $setOnInsert : { a : 3 } }, true );
+ assert.docEq( { _id : 5, a: 3, x : 2 }, t.findOne() );
+
+ t.update( { _id: 5 }, { $set : { a : 4 } }, true );
+
+ t.update( { _id: 5 }, { $inc : { x: 2 }, $setOnInsert : { a : 3 } }, true );
+ assert.docEq( { _id : 5, a: 4, x : 4 }, t.findOne() );
+
+ op = getLastOp();
+ assert( op.fastmod );
+}
+
+dotest( false );
+dotest( true );
+
+
+// Cases for SERVER-9958 -- Allow _id $setOnInsert during insert (if upsert:true, and not doc found)
+t.drop();
+
+t.update( {_id: 1} , { $setOnInsert: { "_id.a": new Date() } } , true );
+assert.gleError(db, function(gle) {
+ return "$setOnInsert _id.a - " + tojson(gle) + tojson(t.findOne()) } );
+
+t.update( {"_id.a": 4} , { $setOnInsert: { "_id.b": 1 } } , true );
+assert.gleError(db, function(gle) {
+ return "$setOnInsert _id.b - " + tojson(gle) + tojson(t.findOne()) } );
+
+t.update( {"_id.a": 4} , { $setOnInsert: { "_id": {a:4, b:1} } } , true );
+assert.gleError(db, function(gle) {
+ return "$setOnInsert _id 3 - " + tojson(gle) + tojson(t.findOne()) } );
diff --git a/jstests/updatea.js b/jstests/updatea.js
new file mode 100644
index 00000000000..40b900d0c9d
--- /dev/null
+++ b/jstests/updatea.js
@@ -0,0 +1,67 @@
+
+t = db.updatea;
+t.drop();
+
+orig = { _id : 1 , a : [ { x : 1 , y : 2 } , { x : 10 , y : 11 } ] }
+
+t.save( orig )
+assert.gleSuccess(db, "orig");
+
+// SERVER-181
+t.update( {} , { $set : { "a.0.x" : 3 } } )
+assert.gleSuccess(db, "a.0.x");
+orig.a[0].x = 3;
+assert.eq( orig , t.findOne() , "A1" );
+
+t.update( {} , { $set : { "a.1.z" : 17 } } )
+assert.gleSuccess(db, "a.1.z");
+orig.a[1].z = 17;
+assert.eq( orig , t.findOne() , "A2" );
+
+// SERVER-273
+t.update( {} , { $unset : { "a.1.y" : 1 } } )
+assert.gleSuccess(db, "a.1.y");
+delete orig.a[1].y
+assert.eq( orig , t.findOne() , "A3" );
+
+// SERVER-333
+t.drop();
+orig = { _id : 1 , comments : [ { name : "blah" , rate_up : 0 , rate_ups : [] } ] }
+t.save( orig );
+assert.gleSuccess(db, "save");
+
+
+t.update( {} , { $inc: { "comments.0.rate_up" : 1 } , $push: { "comments.0.rate_ups" : 99 } } )
+assert.gleSuccess(db, "comments.0.rate_up");
+orig.comments[0].rate_up++;
+orig.comments[0].rate_ups.push( 99 )
+assert.eq( orig , t.findOne() , "B1" )
+
+t.drop();
+orig = { _id : 1 , a : [] }
+for ( i=0; i<12; i++ )
+ orig.a.push( i );
+
+
+t.save( orig );
+assert.gleSuccess(db, "C1");
+assert.eq( orig , t.findOne() , "C1" );
+
+t.update( {} , { $inc: { "a.0" : 1 } } );
+assert.gleSuccess(db, "C2");
+orig.a[0]++;
+assert.eq( orig , t.findOne() , "C2" );
+
+t.update( {} , { $inc: { "a.10" : 1 } } );
+assert.gleSuccess(db, "a.10");
+orig.a[10]++;
+
+
+// SERVER-3218
+t.drop()
+t.insert({"a":{"c00":1}, 'c':2})
+t.update({"c":2}, {'$inc':{'a.c000':1}})
+assert.gleSuccess(db, "D1");
+
+assert.eq( { "c00" : 1 , "c000" : 1 } , t.findOne().a , "D1" )
+
diff --git a/jstests/updateb.js b/jstests/updateb.js
new file mode 100644
index 00000000000..d85e19a36bc
--- /dev/null
+++ b/jstests/updateb.js
@@ -0,0 +1,11 @@
+
+t = db.updateb;
+t.drop();
+
+t.update( { "x.y" : 2 } , { $inc : { a : 7 } } , true );
+
+correct = { a : 7 , x : { y : 2 } };
+got = t.findOne();
+delete got._id;
+assert.docEq( correct , got , "A" )
+
diff --git a/jstests/updatec.js b/jstests/updatec.js
new file mode 100644
index 00000000000..0c77b8b3cda
--- /dev/null
+++ b/jstests/updatec.js
@@ -0,0 +1,14 @@
+
+t = db.updatec;
+t.drop();
+
+t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
+t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
+
+assert.docEq(
+ {
+ "_id" : 123,
+ "f" : [ 234, 234 ] ,
+ "v" : { "i" : 123, "a" : 456 }
+ } , t.findOne() );
+
diff --git a/jstests/updated.js b/jstests/updated.js
new file mode 100644
index 00000000000..c202e8d435f
--- /dev/null
+++ b/jstests/updated.js
@@ -0,0 +1,20 @@
+
+t = db.updated;
+t.drop()
+
+o = { _id : Math.random() ,
+ items:[null,null,null,null]
+ };
+
+t.insert( o );
+assert.docEq( o , t.findOne() , "A1" );
+
+o.items[0] = {amount:9000,itemId:1};
+t.update({},{$set:{"items.0":o.items[0]}});
+assert.docEq( o , t.findOne() , "A2" );
+
+o.items[0].amount += 1000;
+o.items[1] = {amount:1,itemId:2};
+t.update({},{$inc:{"items.0.amount":1000},$set:{"items.1":o.items[1]}});
+assert.docEq( o , t.findOne() , "A3" );
+
diff --git a/jstests/updatee.js b/jstests/updatee.js
new file mode 100644
index 00000000000..85ba37c5c05
--- /dev/null
+++ b/jstests/updatee.js
@@ -0,0 +1,71 @@
+// big numeric updates (used to overflow)
+
+t = db.updatee;
+t.drop();
+
+var o = { "_id" : 1,
+ "actual" : {
+ "key1" : "val1",
+ "key2" : "val2",
+ "001" : "val3",
+ "002" : "val4",
+ "0020000000000000000000" : "val5"
+ },
+ "profile-id" : "test" };
+
+
+t.insert( o );
+assert.eq( o , t.findOne() , "A1" );
+
+t.update({"profile-id" : "test"}, {$set: {"actual.0030000000000000000000": "val6"}});
+
+var q = t.findOne();
+
+// server-1347
+assert.eq(q.actual["0020000000000000000000"], "val5", "A2");
+assert.eq(q.actual["0030000000000000000000"], "val6", "A3");
+
+t.update({"profile-id" : "test"}, {$set: {"actual.02": "v4"}});
+
+q = t.findOne();
+assert.eq(q.actual["02"], "v4", "A4");
+assert.eq(q.actual["002"], "val4", "A5");
+
+t.update({"_id" : 1}, {$set : {"actual.2139043290148390248219423941.b" : 4}});
+q = t.findOne();
+assert.eq(q.actual["2139043290148390248219423941"].b, 4, "A6");
+
+// non-nested
+t.update({"_id" : 1}, {$set : {"7213647182934612837492342341" : 1}});
+t.update({"_id" : 1}, {$set : {"7213647182934612837492342342" : 2}});
+
+q = t.findOne();
+assert.eq(q["7213647182934612837492342341"], 1, "A7 1");
+assert.eq(q["7213647182934612837492342342"], 2, "A7 2");
+
+// 0s
+t.update({"_id" : 1}, {$set : {"actual.000" : "val000"}});
+q = t.findOne();
+assert.eq(q.actual["000"], "val000", "A8 zeros");
+
+t.update({"_id" : 1}, {$set : {"actual.00" : "val00"}});
+q = t.findOne();
+assert.eq(q.actual["00"], "val00", "A8 00");
+assert.eq(q.actual["000"], "val000", "A9");
+
+t.update({"_id" : 1}, {$set : {"actual.000" : "val000"}});
+q = t.findOne();
+assert.eq(q.actual["000"], "val000", "A9");
+assert.eq(q.actual["00"], "val00", "A10");
+
+t.update({"_id" : 1}, {$set : {"actual.01" : "val01"}});
+q = t.findOne();
+assert.eq(q.actual["000"], "val000", "A11");
+assert.eq(q.actual["01"], "val01", "A12");
+
+// shouldn't work, but shouldn't do anything too heinous, either
+t.update({"_id" : 1}, {$set : {"0.." : "val01"}});
+t.update({"_id" : 1}, {$set : {"0..0" : "val01"}});
+t.update({"_id" : 1}, {$set : {".0" : "val01"}});
+t.update({"_id" : 1}, {$set : {"..0" : "val01"}});
+t.update({"_id" : 1}, {$set : {"0.0..0" : "val01"}});
diff --git a/jstests/updatef.js b/jstests/updatef.js
new file mode 100644
index 00000000000..69425932f19
--- /dev/null
+++ b/jstests/updatef.js
@@ -0,0 +1,24 @@
+// Test unsafe management of nsdt on update command yield SERVER-3208
+
+prefixNS = db.jstests_updatef;
+prefixNS.save( {} );
+
+t = db.jstests_updatef_actual;
+t.drop();
+
+t.save( {a:0,b:[]} );
+for( i = 0; i < 1000; ++i ) {
+ t.save( {a:100} );
+}
+t.save( {a:0,b:[]} );
+
+db.getLastError();
+// Repeatedly rename jstests_updatef to jstests_updatef_ and back. This will
+// invalidate the jstests_updatef_actual NamespaceDetailsTransient object.
+s = startParallelShell( "for( i=0; i < 100; ++i ) { db.jstests_updatef.renameCollection( 'jstests_updatef_' ); db.jstests_updatef_.renameCollection( 'jstests_updatef' ); }" );
+
+for( i=0; i < 20; ++i ) {
+ t.update( {a:0}, {$push:{b:i}}, false, true );
+}
+
+s();
diff --git a/jstests/updateg.js b/jstests/updateg.js
new file mode 100644
index 00000000000..f8d452f71b2
--- /dev/null
+++ b/jstests/updateg.js
@@ -0,0 +1,17 @@
+// SERVER-3370 check modifiers with field name characters comparing less than '.' character.
+
+t = db.jstests_updateg;
+
+t.drop();
+t.update({}, { '$inc' : { 'all.t' : 1, 'all-copy.t' : 1 }}, true);
+assert.eq( 1, t.count( {all:{t:1},'all-copy':{t:1}} ) );
+
+t.drop();
+t.save({ 'all' : {}, 'all-copy' : {}});
+t.update({}, { '$inc' : { 'all.t' : 1, 'all-copy.t' : 1 }});
+assert.eq( 1, t.count( {all:{t:1},'all-copy':{t:1}} ) );
+
+t.drop();
+t.save({ 'all11' : {}, 'all2' : {}});
+t.update({}, { '$inc' : { 'all11.t' : 1, 'all2.t' : 1 }});
+assert.eq( 1, t.count( {all11:{t:1},'all2':{t:1}} ) );
diff --git a/jstests/updateh.js b/jstests/updateh.js
new file mode 100644
index 00000000000..2a39f6a0975
--- /dev/null
+++ b/jstests/updateh.js
@@ -0,0 +1,39 @@
+// Disallow $ in field names - SERVER-3730
+
+t = db.jstest_updateh
+t.drop()
+
+t.insert( {x:1} )
+
+t.update( {x:1}, {$set: {y:1}} ) // ok
+e = db.getLastErrorObj()
+assert.eq( e.err, null )
+
+t.update( {x:1}, {$set: {$z:1}} ) // not ok
+e = db.getLastErrorObj()
+assert( e.err != null )
+
+// TODO: This shouldn't be supported, and it isn't with the new update framework, but we
+// currently don't have a good way to check which mode we are in. When we do have that, add
+// this test guarded under that condition. Or, when we remove the old update path just enable
+// this test.
+//
+// t.update( {x:1}, {$set: {'a.$b':1}} ) // not ok
+// e = db.getLastErrorObj()
+// assert( e.err != null )
+
+t.update( {x:1}, {$unset: {$z:1}} ) // unset ok to remove bad fields
+e = db.getLastErrorObj()
+assert.eq( e.err, null )
+
+t.update( {x:1}, {$inc: {$z:1}} ) // not ok
+e = db.getLastErrorObj()
+assert( e.err != null )
+
+t.update( {x:1}, {$pushAll: {$z:[1,2,3]}} ) // not ok
+e = db.getLastErrorObj()
+assert( e.err != null )
+
+t.update( {x:1}, {$pushAll: {z:[1,2,3]}} ) // ok
+e = db.getLastErrorObj()
+assert.eq( e.err, null )
diff --git a/jstests/updatei.js b/jstests/updatei.js
new file mode 100644
index 00000000000..e45b3fde5bb
--- /dev/null
+++ b/jstests/updatei.js
@@ -0,0 +1,86 @@
+// Test new (optional) update syntax
+// SERVER-4176
+t = db.updatei;
+
+// Using a multi update
+
+t.drop();
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "x" }, { $push: { a: "y" }}, { multi: true });
+t.find({ k : "x" }).forEach(function(z) {
+ assert.eq([ "y" ], z.a, "multi update using object arg");
+});
+
+t.drop();
+
+// Using a single update
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "x" }, { $push: { a: "y" }}, { multi: false });
+assert.eq(1, t.find({ "a": "y" }).count(), "update using object arg");
+
+t.drop();
+
+// Using upsert, found
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: true });
+assert.eq(1, t.find({ "k": "x", "a": "y" }).count(), "upsert (found) using object arg");
+
+t.drop();
+
+// Using upsert + multi, found
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: true, multi: true });
+t.find({ k : "x" }).forEach(function(z) {
+ assert.eq([ "y" ], z.a, "multi + upsert (found) using object arg");
+});
+
+t.drop();
+
+// Using upsert, not found
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "y" }, { $push: { a: "y" }}, { upsert: true });
+assert.eq(1, t.find({ "k": "y", "a": "y" }).count(), "upsert (not found) using object arg");
+
+t.drop();
+
+// Without upsert, found
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: false });
+assert.eq(1, t.find({ "a": "y" }).count(), "no upsert (found) using object arg");
+
+t.drop();
+
+// Without upsert, not found
+
+for (i=0; i<10; i++) {
+ t.save({ _id : i, k: "x", a: [] });
+}
+
+t.update({ k: "y" }, { $push: { a: "y" }}, { upsert: false });
+assert.eq(0, t.find({ "a": "y" }).count(), "no upsert (not found) using object arg");
+
+t.drop();
diff --git a/jstests/updatej.js b/jstests/updatej.js
new file mode 100644
index 00000000000..6a70a4c2d51
--- /dev/null
+++ b/jstests/updatej.js
@@ -0,0 +1,12 @@
+// Test that update validation failure terminates the update without modifying subsequent
+// documents. SERVER-4779
+
+t = db.jstests_updatej;
+t.drop();
+
+t.save( {a:[]} );
+t.save( {a:1} );
+t.save( {a:[]} );
+
+t.update( {}, {$push:{a:2}}, false, true );
+assert.eq( 1, t.count( {a:2} ) );
diff --git a/jstests/updatek.js b/jstests/updatek.js
new file mode 100644
index 00000000000..b96f3138a81
--- /dev/null
+++ b/jstests/updatek.js
@@ -0,0 +1,14 @@
+// Test modifier operations on numerically equivalent string field names. SERVER-4776
+
+t = db.jstests_updatek;
+
+t.drop();
+t.save( { _id:0, '1':{}, '01':{} } );
+t.update( {}, { $set:{ '1.b':1, '1.c':2 } } );
+assert.docEq( { "01" : { }, "1" : { "b" : 1, "c" : 2 }, "_id" : 0 }, t.findOne() );
+
+t.drop();
+t.save( { _id:0, '1':{}, '01':{} } );
+t.update( {}, { $set:{ '1.b':1, '01.c':2 } } );
+assert.docEq( { "01" : { "c" : 2 }, "1" : { "b" : 1 }, "_id" : 0 }, t.findOne() );
+
diff --git a/jstests/updatel.js b/jstests/updatel.js
new file mode 100644
index 00000000000..be4b95cf99f
--- /dev/null
+++ b/jstests/updatel.js
@@ -0,0 +1,48 @@
+// The positional operator allows an update modifier field path to contain a sentinel ('$') path
+// part that is replaced with the numeric position of an array element matched by the update's query
+// spec. <http://dochub.mongodb.org/core/positionaloperator>
+
+// If no array element position from a query is available to substitute for the positional operator
+// setinel ('$'), the update fails with an error. SERVER-6669 SERVER-4713
+
+t = db.jstests_updatel;
+t.drop();
+
+
+
+// The collection is empty, forcing an upsert. In this case the query has no array position match
+// to substiture for the positional operator. SERVER-4713
+t.update( {}, { $set:{ 'a.$.b':1 } }, true );
+assert( db.getLastError(), "An error is reported." );
+assert.eq( 0, t.count(), "No upsert occurred." );
+
+
+
+// Save a document to the collection so it is no longer empty.
+t.save( { _id:0 } );
+
+// Now, with an existing document, trigger an update rather than an upsert. The query has no array
+// position match to substiture for the positional operator. SERVER-6669
+t.update( {}, { $set:{ 'a.$.b':1 } } );
+assert( db.getLastError(), "An error is reported." );
+assert.eq( [ { _id:0 } ], t.find().toArray(), "No update occurred." );
+
+
+
+// Now, try with an update by _id (without a query array match).
+t.update( { _id:0 }, { $set:{ 'a.$.b':1 } } );
+assert( db.getLastError(), "An error is reported." );
+assert.eq( [ { _id:0 } ], t.find().toArray(), "No update occurred." );
+
+
+
+// Seed the collection with a document suitable for the following check.
+t.remove({});
+t.save( { _id:0, a:[ { b:{ c:1 } } ] } );
+
+// Now, attempt to apply an update with two nested positional operators. There is a positional
+// query match for the first positional operator but not the second. Note that dollar sign
+// substitution for multiple positional opertors is not implemented (SERVER-831).
+t.update( { 'a.b.c':1 }, { $set:{ 'a.$.b.$.c':2 } } );
+assert( db.getLastError(), "An error is reported" );
+assert.eq( [ { _id:0, a:[ { b:{ c:1 } } ] } ], t.find().toArray(), "No update occurred." );
diff --git a/jstests/updatem.js b/jstests/updatem.js
new file mode 100644
index 00000000000..3d46d2a15f3
--- /dev/null
+++ b/jstests/updatem.js
@@ -0,0 +1,20 @@
+// Tests that _id will exist in all updated docs.
+
+t = db.jstests_updatem;
+t.drop();
+
+// new _id from insert (upsert:true)
+t.update({a:1}, {$inc:{b:1}}, true)
+var doc = t.findOne({a:1});
+assert(doc["_id"], "missing _id")
+
+// new _id from insert (upsert:true)
+t.update({a:1}, {$inc:{b:1}}, true)
+var doc = t.findOne({a:1});
+assert(doc["_id"], "missing _id")
+
+// no _id on existing doc
+t.getDB().runCommand({godinsert:t.getName(), obj:{a:2}})
+t.update({a:2}, {$inc:{b:1}}, true)
+var doc = t.findOne({a:2});
+assert(doc["_id"], "missing _id after update")
diff --git a/jstests/upsert1.js b/jstests/upsert1.js
new file mode 100644
index 00000000000..21f24ae8281
--- /dev/null
+++ b/jstests/upsert1.js
@@ -0,0 +1,59 @@
+// tests to make sure that the new _id is returned after the insert
+t = db.upsert1;
+t.drop();
+
+// make sure the new _id is returned when $mods are used
+t.update( { x : 1 } , { $inc : { y : 1 } } , true );
+l = db.getLastErrorCmd();
+assert( l.upserted , "A1 - " + tojson(l) );
+assert.eq( l.upserted.str , t.findOne()._id.str , "A2" );
+
+// make sure the new _id is returned on a replacement (no $mod in update)
+t.update( { x : 2 } , { x : 2 , y : 3 } , true );
+l = db.getLastErrorCmd();
+assert( l.upserted , "B1 - " + tojson(l) );
+assert.eq( l.upserted.str , t.findOne( { x : 2 } )._id.str , "B2" );
+assert.eq( 2 , t.find().count() , "B3" );
+
+// use the _id from the query for the insert
+t.update({_id:3}, {$set: {a:'123'}}, true)
+l = db.getLastErrorCmd();
+assert( l.upserted , "C1 - " + tojson(l) );
+assert.eq( l.upserted , 3 , "C2 - " + tojson(l) );
+
+// test with an embedded doc for the _id field
+t.update({_id:{a:1}}, {$set: {a:123}}, true)
+l = db.getLastErrorCmd();
+assert( l.upserted , "D1 - " + tojson(l) );
+assert.eq( l.upserted , {a:1} , "D2 - " + tojson(l) );
+
+// test with a range query
+t.update({_id: {$gt:100}}, {$set: {a:123}}, true)
+l = db.getLastErrorCmd();
+assert( l.upserted , "E1 - " + tojson(l) );
+assert.neq( l.upserted , 100 , "E2 - " + tojson(l) );
+
+// test with an _id query
+t.update({_id: 1233}, {$set: {a:123}}, true)
+l = db.getLastErrorCmd();
+assert( l.upserted , "F1 - " + tojson(l) );
+assert.eq( l.upserted , 1233 , "F2 - " + tojson(l) );
+
+// test with an embedded _id query
+t.update({_id: {a:1, b:2}}, {$set: {a:123}}, true)
+l = db.getLastErrorCmd();
+assert( l.upserted , "G1 - " + tojson(l) );
+assert.eq( l.upserted , {a:1, b:2} , "G2 - " + tojson(l) );
+
+// test with no _id inserted
+db.no_id.drop();
+db.createCollection("no_id", {autoIndexId:false})
+db.no_id.update({foo:1}, {$set:{a:1}}, true)
+l = db.getLastErrorCmd();
+assert( l.upserted , "H1 - " + tojson(l) );
+assert( !l.err, "H1.5 No error expected - " + tojson(l) )
+assert.eq( 0, db.no_id.getIndexes().length, "H2" );
+assert.eq( 1, db.no_id.count(), "H3" );
+var newDoc = db.no_id.findOne();
+delete newDoc["_id"];
+assert.eq( { foo : 1, a : 1 }, newDoc, "H4" );
diff --git a/jstests/upsert2.js b/jstests/upsert2.js
new file mode 100644
index 00000000000..7184ed807d1
--- /dev/null
+++ b/jstests/upsert2.js
@@ -0,0 +1,20 @@
+// A query field with a $not operator should be excluded when constructing the object to which mods
+// will be applied when performing an upsert. SERVER-8178
+
+t = db.jstests_upsert2;
+
+// The a:$not query operator does not cause an 'a' field to be added to the upsert document.
+t.drop();
+t.update( { a:{ $not:{ $lt:1 } } }, { $set:{ b:1 } }, true );
+assert( !t.findOne().a );
+
+// The a:$not query operator does not cause an 'a' field to be added to the upsert document.
+t.drop();
+t.update( { a:{ $not:{ $elemMatch:{ a:1 } } } }, { $set:{ b:1 } }, true );
+assert( !t.findOne().a );
+
+// The a:$not query operator does not cause an 'a' field to be added to the upsert document, and as
+// a result $push can be applied to the (missing) 'a' field.
+t.drop();
+t.update( { a:{ $not:{ $elemMatch:{ a:1 } } } }, { $push:{ a:{ b:1, c:0 } } }, true );
+assert.eq( [ { b:1, c:0 } ], t.findOne().a );
diff --git a/jstests/upsert3.js b/jstests/upsert3.js
new file mode 100644
index 00000000000..34e37bde33d
--- /dev/null
+++ b/jstests/upsert3.js
@@ -0,0 +1,60 @@
+// tests to make sure no dup fields are created when using query to do upsert
+t = db.upsert3;
+t.drop();
+
+//make sure we validate query
+t.update( {a: {"a.a": 1}} , {$inc: {y: 1}} , true );
+assert.gleError(db, function(gle) {
+ return "a.a.a-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+t.update( {a: {$a: 1}} , {$inc: {y: 1}} , true );
+assert.gleError(db, function(gle) {
+ return "a.$a-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+// make sure the new _id is not duplicated
+t.update( {"a.b": 1, a: {a: 1, b: 1}} , {$inc: {y: 1}} , true );
+assert.gleError(db, function(gle) {
+ return "a.b-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+t.update( {"_id.a": 1, _id: {a: 1, b: 1}} , {$inc : {y: 1}} , true );
+assert.gleError(db, function(gle) {
+ return "_id-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+t.update( {_id: {a: 1, b: 1}, "_id.a": 1} , { $inc: {y: 1}} , true );
+assert.gleError(db, function(gle) {
+ return "_id-2 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+// Should be redundant, but including from SERVER-11363
+t.update( {_id: {a: 1, b: 1}, "_id.a": 1} , {$setOnInsert: {y: 1}} , true );
+assert.gleError(db, function(gle) {
+ return "_id-3 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+//Should be redundant, but including from SERVER-11514
+t.update( {"a": {}, "a.c": 2} , {$set: {x: 1}}, true );
+assert.gleError(db, function(gle) {
+ return "a.c-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+// Should be redundant, but including from SERVER-4830
+t.update( {'a': {b: 1}, 'a.c': 1}, {$inc: {z: 1}}, true );
+assert.gleError(db, function(gle) {
+ return "a-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+// Should be redundant, but including from SERVER-4830
+t.update( {a: 1, "a.b": 1, a: [1, {b: 1}]}, {$inc: {z: 1}}, true );
+assert.gleError(db, function(gle) {
+ return "a-2 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
+
+// Replacement tests
+// Query is ignored for replacements, except _id field.
+t.update( {r: {a: 1, b: 1}, "r.a": 1} , {y: 1} , true );
+assert.gleSuccess(db, "r-1");
+assert(t.findOne().y, 1, "inserted doc missing field")
+var docMinusId = t.findOne();
+delete docMinusId._id
+assert.docEq({y: 1}, docMinusId, "r-1")
+t.drop()
+
+t.update( {_id: {a:1, b:1}, "_id.a": 1} , {y: 1} , true );
+assert.gleSuccess(db, "_id-4");
+assert.docEq({_id: {a: 1, b: 1}, y: 1}, t.findOne(), "_id-4")
+t.drop() \ No newline at end of file
diff --git a/jstests/upsert4.js b/jstests/upsert4.js
new file mode 100644
index 00000000000..cbf7f2646f3
--- /dev/null
+++ b/jstests/upsert4.js
@@ -0,0 +1,36 @@
+// tests to ensure fields in $and conditions are created when using the query to do upsert
+coll = db.upsert4;
+coll.drop();
+
+coll.update({_id: 1, $and: [{c: 1}, {d: 1}], a: 12} , {$inc: {y: 1}} , true);
+assert.gleSuccess(db, "");
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, a: 12, y: 1})
+
+coll.remove({})
+coll.update({$and: [{c: 1}, {d: 1}]} , {$setOnInsert: {_id: 1}} , true);
+assert.gleSuccess(db, "");
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
+
+coll.remove({})
+coll.update({$and: [{c: 1}, {d: 1}, {$or: [{x:1}]}]} , {$setOnInsert: {_id: 1}} , true);
+assert.gleSuccess(db, "");
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, x:1})
+
+coll.remove({})
+coll.update({$and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]} , {$setOnInsert: {_id: 1}} , true);
+assert.gleSuccess(db, "");
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
+
+coll.remove({})
+coll.update({r: {$gt: 3}, $and: [{c: 1}, {d: 1}], $or: [{x:1}, {x:2}]} , {$setOnInsert: {_id: 1}} , true);
+assert.gleSuccess(db, "");
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
+
+coll.remove({})
+coll.update({r: /s/, $and: [{c: 1}, {d: 1}], $or: [{x:1}, {x:2}]} , {$setOnInsert: {_id: 1}} , true);
+assert.gleSuccess(db, "");
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
+
+coll.remove({})
+coll.update({c:2, $and: [{c: 1}, {d: 1}]} , {$setOnInsert: {_id: 1}} , true);
+assert.gleError(db, "");
diff --git a/jstests/use_power_of_2.js b/jstests/use_power_of_2.js
new file mode 100644
index 00000000000..3200c937452
--- /dev/null
+++ b/jstests/use_power_of_2.js
@@ -0,0 +1,86 @@
+/* This test ensures that the usePowerOf2 user flag
+ * effectively reuses space. The test repeatedly inserts and
+ * then deletes a batch of variable-length strings, then checks
+ * that doing so does not cause the storageSize to grow. */
+
+// A bunch of strings of length 0 to 100
+var var_length_strings =
+ [ "" ,
+ "aaaaa" ,
+ "aaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ]
+
+//insert all the strings
+var batch_insert = function(coll){
+ for ( i=0; i < var_length_strings.length; i++ ){
+ coll.insert( { a : var_length_strings[i] } );
+ }
+}
+
+//delete the same strings
+var batch_delete = function(coll){
+ for ( i=0; i < var_length_strings.length; i++ ){
+ coll.remove( { a : var_length_strings[i] } );
+ }
+}
+
+//number of times to repeat batch inserts/deletes
+var numrepeats = 1000;
+
+var testStorageSize = function(ns){
+ //insert docs and measure storage size
+ batch_insert(ns);
+ var oldSize = ns.stats().storageSize;
+
+ //remove and add same docs a bunch of times
+ for ( n=0 ; n < numrepeats ; n++ ){
+ batch_delete(ns);
+ batch_insert(ns);
+ }
+
+ //check size didn't change
+ var newSize = ns.stats().storageSize;
+ assert.eq( oldSize , newSize , "storage size changed");
+}
+
+/****************** TEST 1 *****************************/
+
+//create fresh collection, set flag to true, test storage size
+var coll = "usepower1"
+var t = db.getCollection(coll);
+t.drop();
+db.createCollection(coll);
+var res = db.runCommand( { "collMod" : coll , "usePowerOf2Sizes" : true } );
+assert.eq( res.ok , 1 , "collMod failed" );
+
+res = db.runCommand( { "collMod" : coll , "usePowerOf2Sizess" : true } )
+assert.eq( res.ok , 0 , "collMod should have failed: " + tojson( res ) )
+
+testStorageSize(t);
+
+/**************** Test 2 *****************************/
+
+//repeat previous test, but with flag set at creation time
+var coll = "usepower2"
+var t = db.getCollection(coll);
+t.drop();
+db.runCommand({"create" : coll, "flags" : 1 });
+
+testStorageSize(t);
diff --git a/jstests/useindexonobjgtlt.js b/jstests/useindexonobjgtlt.js
new file mode 100755
index 00000000000..06e94a812f6
--- /dev/null
+++ b/jstests/useindexonobjgtlt.js
@@ -0,0 +1,15 @@
+t = db.factories
+t.drop()
+t.insert( { name: "xyz", metro: { city: "New York", state: "NY" } } )
+t.ensureIndex( { metro : 1 } )
+
+assert( db.factories.find().count() )
+
+assert( db.factories.find( { metro: { city: "New York", state: "NY" } } ).count() )
+
+assert( db.factories.find( { metro: { city: "New York", state: "NY" } } ).explain().cursor == "BtreeCursor metro_1" )
+
+assert( db.factories.find( { metro: { $gte : { city: "New York" } } } ).explain().cursor == "BtreeCursor metro_1" )
+
+assert( db.factories.find( { metro: { $gte : { city: "New York" } } } ).count() == 1 )
+
diff --git a/jstests/user_management_helpers.js b/jstests/user_management_helpers.js
new file mode 100644
index 00000000000..50707f584ab
--- /dev/null
+++ b/jstests/user_management_helpers.js
@@ -0,0 +1,94 @@
+// This test is a basic sanity check of the shell helpers for manipulating user objects
+// It is not a comprehensive test of the functionality of the user manipulation commands
+function assertHasRole(rolesArray, roleName, roleDB) {
+ for (i in rolesArray) {
+ var curRole = rolesArray[i];
+ if (curRole.role == roleName && curRole.db == roleDB) {
+ return;
+ }
+ }
+ assert(false, "role " + roleName + "@" + roleDB + " not found in array: " + tojson(rolesArray));
+}
+
+
+(function(db) {
+ var db = db.getSiblingDB("user_management_helpers");
+ db.dropDatabase();
+ db.dropAllUsers();
+
+ db.createUser({user: "spencer", pwd: "password", roles: ['readWrite']});
+ db.createUser({user: "andy", pwd: "password", roles: ['readWrite']});
+
+ // Test getUser
+ var userObj = db.getUser('spencer');
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+
+ // Test getUsers
+ var users = db.getUsers();
+ assert.eq(2, users.length);
+ assert(users[0].user == 'spencer' || users[1].user == 'spencer');
+ assert(users[0].user == 'andy' || users[1].user == 'andy');
+ assert.eq(1, users[0].roles.length);
+ assert.eq(1, users[1].roles.length);
+ assertHasRole(users[0].roles, "readWrite", db.getName());
+ assertHasRole(users[1].roles, "readWrite", db.getName());
+
+ // Granting roles to nonexistent user fails
+ assert.throws(function() { db.grantRolesToUser("fakeUser", ['dbAdmin']); });
+ // Granting non-existant role fails
+ assert.throws(function() { db.grantRolesToUser("spencer", ['dbAdmin', 'fakeRole']); });
+
+ userObj = db.getUser('spencer');
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+
+ // Granting a role you already have is no problem
+ db.grantRolesToUser("spencer", ['readWrite', 'dbAdmin']);
+ userObj = db.getUser('spencer');
+ assert.eq(2, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+ assertHasRole(userObj.roles, "dbAdmin", db.getName());
+
+ // Revoking roles the user doesn't have is fine
+ db.revokeRolesFromUser("spencer", ['dbAdmin', 'read']);
+ userObj = db.getUser('spencer');
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+
+ // Update user
+ db.updateUser("spencer", {customData: {hello: 'world'}, roles:['read']});
+ userObj = db.getUser('spencer');
+ assert.eq('world', userObj.customData.hello);
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "read", db.getName());
+
+ // Test dropUser
+ db.dropUser('andy');
+ assert.throws(function() {printjson(db.getUser('andy'));});
+
+ // Test dropAllUsers
+ db.dropAllUsers()
+ assert.eq(0, db.getUsers().length);
+
+ // Test password digestion
+ assert.throws(function() {
+ db.createUser({user:'user1', pwd:'x', roles:[], digestPassword: true});});
+ assert.throws(function() {
+ db.createUser({user:'user1', pwd:'x', roles:[], digestPassword: false});});
+ assert.throws(function() {
+ db.createUser({user:'user1', pwd:'x', roles:[], passwordDigestor: 'foo'});});
+ db.createUser({user:'user1', pwd:'x', roles:[], passwordDigestor:"server"});
+ db.createUser({user:'user2', pwd:'x', roles:[], passwordDigestor:"client"});
+ assert(db.auth('user1', 'x'));
+ assert(db.auth('user2', 'x'));
+
+ assert.throws(function() { db.updateUser('user1', {pwd:'y', digestPassword: true});});
+ assert.throws(function() { db.updateUser('user1', {pwd:'y', digestPassword: false});});
+ assert.throws(function() { db.updateUser('user1', {pwd:'y', passwordDigestor: 'foo'});});
+ db.updateUser('user1', {pwd:'y', passwordDigestor: 'server'});
+ db.updateUser('user2', {pwd:'y', passwordDigestor: 'client'});
+ assert(db.auth('user1', 'y'));
+ assert(db.auth('user2', 'y'));
+
+}(db)); \ No newline at end of file
diff --git a/jstests/validate_cmd_ns.js b/jstests/validate_cmd_ns.js
new file mode 100644
index 00000000000..b13a0d98159
--- /dev/null
+++ b/jstests/validate_cmd_ns.js
@@ -0,0 +1,25 @@
+/**
+ * Tests that query against the $cmd namespace will error out when the request has
+ * a number to return value other than 1 or -1. Note that users cannot have
+ * collections named $cmd since $ is an illegal character.
+ */
+
+// Note: _exec gives you the raw response from the server.
+var res = db.$cmd.find({ whatsmyuri: 1 })._exec().next();
+assert(res.$err != null);
+assert(res.$err.indexOf('bad numberToReturn') > -1);
+
+res = db.$cmd.find({ whatsmyuri: 1 }).limit(0)._exec().next();
+assert(res.$err != null);
+assert(res.$err.indexOf('bad numberToReturn') > -1);
+
+res = db.$cmd.find({ whatsmyuri: 1 }).limit(-2)._exec().next();
+assert(res.$err != null);
+assert(res.$err.indexOf('bad numberToReturn') > -1);
+
+var res = db.$cmd.find({ whatsmyuri: 1 }).limit(1).next();
+assert(res.ok);
+
+res = db.$cmd.find({ whatsmyuri: 1 }).limit(-1).next();
+assert(res.ok);
+
diff --git a/jstests/validate_user_documents.js b/jstests/validate_user_documents.js
new file mode 100644
index 00000000000..825e1e7de11
--- /dev/null
+++ b/jstests/validate_user_documents.js
@@ -0,0 +1,65 @@
+// Ensure that inserts and updates of the system.users collection validate the schema of inserted
+// documents.
+
+mydb = db.getSisterDB( "validate_user_documents" );
+
+function assertGLEOK(status) {
+ assert(status.ok && status.err === null,
+ "Expected OK status object; found " + tojson(status));
+}
+
+function assertGLENotOK(status) {
+ assert(status.ok && status.err !== null,
+ "Expected not-OK status object; found " + tojson(status));
+}
+
+mydb.dropDatabase();
+mydb.dropAllUsers();
+
+//
+// Tests of the insert path
+//
+
+// V0 user document document; insert should fail.
+assert.commandFailed(mydb.runCommand({ createUser:1,
+ user: "spencer",
+ pwd: "password",
+ readOnly: true }));
+
+// V1 user document; insert should fail.
+assert.commandFailed(mydb.runCommand({ createUser:1,
+ user: "spencer",
+ userSource: "test2",
+ roles: ["dbAdmin"] }));
+
+// Valid V2 user document; insert should succeed.
+assert.commandWorked(mydb.runCommand({ createUser: "spencer",
+ pwd: "password",
+ roles: ["dbAdmin"] }));
+
+// Valid V2 user document; insert should succeed.
+assert.commandWorked(mydb.runCommand({ createUser: "andy",
+ pwd: "password",
+ roles: [{role: "dbAdmin",
+ db: "validate_user_documents",
+ hasRole: true,
+ canDelegate: false}] }));
+
+// Non-existent role; insert should fail
+assert.commandFailed(mydb.runCommand({ createUser: "bob",
+ pwd: "password",
+ roles: ["fakeRole123"] }));
+
+//
+// Tests of the update path
+//
+
+// Update a document in a legal way, expect success.
+assert.commandWorked(mydb.runCommand({updateUser: 'spencer', roles: ['read']}));
+
+// Update a document in a way that is illegal, expect failure.
+assert.commandFailed(mydb.runCommand({updateUser: 'spencer', readOnly: true}));
+assert.commandFailed(mydb.runCommand({updateUser: 'spencer', pwd: ""}));
+assert.commandFailed(mydb.runCommand({updateUser: 'spencer', roles: ['fakeRole123']}));
+
+mydb.dropDatabase();
diff --git a/jstests/verify_update_mods.js b/jstests/verify_update_mods.js
new file mode 100644
index 00000000000..b31130ec6eb
--- /dev/null
+++ b/jstests/verify_update_mods.js
@@ -0,0 +1,82 @@
+// Verify update mods exist
+t = db.update_mods;
+t.drop();
+
+t.save({_id:1});
+t.update({}, {$set:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$unset:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$inc:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$mul:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$push:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$pushAll:{a:[1]}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$addToSet:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$pull:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$pop:{a:true}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$rename:{a:"b"}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$bit:{a:{and:NumberLong(1)}}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+// SERVER-3223 test $bit can do an upsert
+t.update({_id:1}, {$bit:{a:{and:NumberLong(3)}}}, true);
+assert.eq(t.findOne({_id:1}).a, NumberLong(0), "$bit upsert with and");
+t.update({_id:2}, {$bit:{b:{or:NumberLong(3)}}}, true);
+assert.eq(t.findOne({_id:2}).b, NumberLong(3), "$bit upsert with or (long)");
+t.update({_id:3}, {$bit:{"c.d":{or:NumberInt(3)}}}, true);
+assert.eq(t.findOne({_id:3}).c.d, NumberInt(3), "$bit upsert with or (int)");
+t.remove({});
+
+t.save({_id:1});
+t.update({}, {$currentDate:{a:true}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$max:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
+
+t.save({_id:1});
+t.update({}, {$min:{a:1}})
+assert.automsg( "!db.getLastError()" );
+t.remove({})
diff --git a/jstests/where1.js b/jstests/where1.js
new file mode 100644
index 00000000000..7ff20a53620
--- /dev/null
+++ b/jstests/where1.js
@@ -0,0 +1,28 @@
+
+t = db.getCollection( "where1" );
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+
+assert.eq( 1 , t.find( function(){ return this.a == 2; } ).length() , "A" );
+
+assert.eq( 1 , t.find( { $where : "return this.a == 2" } ).toArray().length , "B" );
+assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "C" );
+
+assert.eq( 1 , t.find( "this.a == 2" ).toArray().length , "D" );
+
+// SERVER-12117
+// positional $ projection should fail on a $where query
+assert.throws( function() { t.find( { $where : "return this.a;" }, { 'a.$' : 1 } ).itcount(); } );
+
+// SERVER-12439: $where must be top-level
+assert.throws( function() { t.find( { a: 1, b: { $where : "this.a;" } } ).itcount(); } );
+assert.throws( function() { t.find( { a: { $where : "this.a;" } } ).itcount(); } );
+assert.throws( function() {
+ t.find( { a: { $elemMatch : { $where : "this.a;" } } } ).itcount();
+} );
+assert.throws( function() {
+ t.find( { a: 3, "b.c": { $where : "this.a;" } } ).itcount();
+} );
diff --git a/jstests/where2.js b/jstests/where2.js
new file mode 100644
index 00000000000..9262b3076b3
--- /dev/null
+++ b/jstests/where2.js
@@ -0,0 +1,10 @@
+
+t = db.getCollection( "where2" );
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+
+assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "A" );
+assert.eq( 1 , t.find( { $where : "\nthis.a == 2" } ).toArray().length , "B" );
diff --git a/jstests/where3.js b/jstests/where3.js
new file mode 100644
index 00000000000..c062ed11513
--- /dev/null
+++ b/jstests/where3.js
@@ -0,0 +1,10 @@
+
+t = db.where3;
+t.drop()
+
+t.save( { returned_date : 5 } );
+t.save( { returned_date : 6 } );
+
+assert.eq( 1 , t.find( function(){ return this.returned_date == 5; } ).count() , "A" );
+assert.eq( 1 , t.find( { $where : "return this.returned_date == 5;" } ).count() , "B" );
+assert.eq( 1 , t.find( { $where : "this.returned_date == 5;" } ).count() , "C" );
diff --git a/jstests/where4.js b/jstests/where4.js
new file mode 100644
index 00000000000..61ec3771bed
--- /dev/null
+++ b/jstests/where4.js
@@ -0,0 +1,27 @@
+
+db.where4.drop();
+
+db.system.js.insert( { _id : "w4" , value : "5" } )
+
+db.where4.insert( { x : 1 , y : 1 } )
+db.where4.insert( { x : 2 , y : 1 } )
+
+db.where4.update( { $where : function() { return this.x == 1; } } ,
+ { $inc : { y : 1 } } , false , true );
+
+
+assert.eq( 2 , db.where4.findOne( { x : 1 } ).y )
+assert.eq( 1 , db.where4.findOne( { x : 2 } ).y )
+
+// Test that where queries work with stored javascript
+db.system.js.save( { _id : "where4_addOne" , value : function(x) { return x + 1; } } )
+
+db.where4.update( { $where : "where4_addOne(this.x) == 2" } ,
+ { $inc : { y : 1 } } , false , true );
+
+assert.eq( 3 , db.where4.findOne( { x : 1 } ).y )
+assert.eq( 1 , db.where4.findOne( { x : 2 } ).y )
+
+db.system.js.remove( { _id : "where4_equalsOne" } )
+
+db.system.js.remove( { _id : "w4" } )