summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--jstests/_fail.js4
-rw-r--r--jstests/_lodeRunner.js4
-rw-r--r--jstests/_runner.js28
-rw-r--r--jstests/_runner_leak.js44
-rw-r--r--jstests/_runner_leak_nojni.js42
-rw-r--r--jstests/_runner_sharding.js35
-rw-r--r--jstests/_tst.js41
-rw-r--r--jstests/all.js47
-rw-r--r--jstests/all2.js86
-rw-r--r--jstests/all3.js28
-rw-r--r--jstests/all4.js30
-rw-r--r--jstests/all5.js28
-rw-r--r--jstests/and.js85
-rw-r--r--jstests/and2.js27
-rw-r--r--jstests/and3.js67
-rw-r--r--jstests/andor.js99
-rw-r--r--jstests/apitest_db.js77
-rw-r--r--jstests/apitest_dbcollection.js115
-rw-r--r--jstests/apply_ops1.js66
-rw-r--r--jstests/apply_ops2.js71
-rw-r--r--jstests/array1.js14
-rw-r--r--jstests/array3.js8
-rw-r--r--jstests/array4.js30
-rw-r--r--jstests/array_match1.js31
-rw-r--r--jstests/array_match2.js20
-rw-r--r--jstests/array_match3.js13
-rw-r--r--jstests/array_match4.js30
-rw-r--r--jstests/arrayfind1.js40
-rw-r--r--jstests/arrayfind2.js29
-rw-r--r--jstests/arrayfind3.js16
-rw-r--r--jstests/arrayfind4.js22
-rw-r--r--jstests/arrayfind5.js23
-rw-r--r--jstests/arrayfind6.js26
-rw-r--r--jstests/arrayfind7.js52
-rw-r--r--jstests/arrayfind8.js175
-rw-r--r--jstests/arrayfind9.js34
-rw-r--r--jstests/arrayfinda.js21
-rw-r--r--jstests/auth1.js54
-rw-r--r--jstests/auth2.js9
-rw-r--r--jstests/auth_copydb.js19
-rw-r--r--jstests/autoid.js11
-rw-r--r--jstests/bad_index_plugin.js11
-rw-r--r--jstests/basic1.js21
-rw-r--r--jstests/basic2.js16
-rw-r--r--jstests/basic3.js45
-rw-r--r--jstests/basic4.js12
-rw-r--r--jstests/basic5.js6
-rw-r--r--jstests/basic6.js8
-rw-r--r--jstests/basic7.js11
-rw-r--r--jstests/basic8.js11
-rw-r--r--jstests/basic9.js19
-rw-r--r--jstests/basica.js33
-rw-r--r--jstests/basicb.js7
-rw-r--r--jstests/basicc.js21
-rw-r--r--jstests/batch_size.js75
-rw-r--r--jstests/bench_test1.js37
-rw-r--r--jstests/bench_test2.js49
-rw-r--r--jstests/bench_test3.js28
-rw-r--r--jstests/big_object1.js54
-rw-r--r--jstests/binData.js14
-rw-r--r--jstests/block_check_supported.js118
-rw-r--r--jstests/bulk_insert.js22
-rw-r--r--jstests/capped.js11
-rw-r--r--jstests/capped1.js11
-rw-r--r--jstests/capped2.js62
-rw-r--r--jstests/capped3.js45
-rw-r--r--jstests/capped5.js40
-rw-r--r--jstests/capped6.js109
-rw-r--r--jstests/capped7.js89
-rw-r--r--jstests/capped8.js108
-rw-r--r--jstests/capped9.js28
-rw-r--r--jstests/capped_empty.js24
-rw-r--r--jstests/capped_max.js29
-rw-r--r--jstests/capped_server2639.js27
-rw-r--r--jstests/capped_server7543.js11
-rw-r--r--jstests/cappeda.js33
-rw-r--r--jstests/check_shard_index.js141
-rw-r--r--jstests/collmod.js82
-rw-r--r--jstests/compact.js76
-rw-r--r--jstests/compact2.js52
-rw-r--r--jstests/compactPreservePadding.js26
-rw-r--r--jstests/connection_status.js27
-rw-r--r--jstests/connection_string_validation.js106
-rw-r--r--jstests/constructors.js314
-rw-r--r--jstests/copydb.js20
-rw-r--r--jstests/count.js25
-rw-r--r--jstests/count10.js61
-rw-r--r--jstests/count2.js28
-rw-r--r--jstests/count3.js26
-rw-r--r--jstests/count4.js17
-rw-r--r--jstests/count5.js30
-rw-r--r--jstests/count6.js61
-rw-r--r--jstests/count7.js25
-rw-r--r--jstests/count9.js28
-rw-r--r--jstests/count_hint.js20
-rw-r--r--jstests/counta.js14
-rw-r--r--jstests/countb.js11
-rw-r--r--jstests/countc.js124
-rw-r--r--jstests/coveredIndex1.js64
-rw-r--r--jstests/coveredIndex2.js18
-rw-r--r--jstests/coveredIndex3.js54
-rw-r--r--jstests/coveredIndex4.js40
-rw-r--r--jstests/coveredIndex5.js70
-rw-r--r--jstests/covered_index_compound_1.js45
-rw-r--r--jstests/covered_index_geo_1.js18
-rw-r--r--jstests/covered_index_geo_2.js22
-rw-r--r--jstests/covered_index_negative_1.js61
-rw-r--r--jstests/covered_index_simple_1.js55
-rw-r--r--jstests/covered_index_simple_2.js43
-rw-r--r--jstests/covered_index_simple_3.js58
-rw-r--r--jstests/covered_index_simple_id.js42
-rw-r--r--jstests/covered_index_sort_1.js34
-rw-r--r--jstests/covered_index_sort_2.js17
-rw-r--r--jstests/covered_index_sort_3.js16
-rw-r--r--jstests/create_indexes.js48
-rw-r--r--jstests/currentop.js80
-rw-r--r--jstests/cursor1.js20
-rw-r--r--jstests/cursor2.js24
-rw-r--r--jstests/cursor3.js35
-rw-r--r--jstests/cursor4.js47
-rw-r--r--jstests/cursor5.js36
-rw-r--r--jstests/cursor6.js100
-rw-r--r--jstests/cursor7.js42
-rw-r--r--jstests/cursora.js47
-rw-r--r--jstests/cursorb.js17
-rw-r--r--jstests/datasize.js35
-rw-r--r--jstests/datasize2.js27
-rw-r--r--jstests/datasize3.js34
-rw-r--r--jstests/date1.js17
-rw-r--r--jstests/date2.js13
-rw-r--r--jstests/date3.js31
-rw-r--r--jstests/db.js11
-rw-r--r--jstests/dbadmin.js105
-rw-r--r--jstests/dbcase.js29
-rw-r--r--jstests/dbcase2.js9
-rw-r--r--jstests/dbhash.js58
-rw-r--r--jstests/dbhash2.js22
-rw-r--r--jstests/dbref1.js10
-rw-r--r--jstests/dbref2.js20
-rw-r--r--jstests/dbref3.js45
-rw-r--r--jstests/delx.js32
-rw-r--r--jstests/depth_limit.js56
-rw-r--r--jstests/distinct1.js40
-rw-r--r--jstests/distinct2.js13
-rw-r--r--jstests/distinct3.js33
-rw-r--r--jstests/distinct_array1.js91
-rw-r--r--jstests/distinct_index1.js72
-rw-r--r--jstests/distinct_index2.js41
-rw-r--r--jstests/distinct_speed1.js26
-rw-r--r--jstests/drop.js25
-rw-r--r--jstests/drop2.js53
-rw-r--r--jstests/drop3.js29
-rw-r--r--jstests/drop_index.js20
-rw-r--r--jstests/dropdb.js26
-rw-r--r--jstests/dropdb_race.js44
-rwxr-xr-xjstests/dur/a_quick.js32
-rw-r--r--jstests/elemMatchProjection.js265
-rw-r--r--jstests/error2.js21
-rw-r--r--jstests/error5.js8
-rw-r--r--jstests/eval0.js8
-rw-r--r--jstests/eval1.js17
-rw-r--r--jstests/eval2.js28
-rw-r--r--jstests/eval3.js21
-rw-r--r--jstests/eval4.js23
-rw-r--r--jstests/eval5.js23
-rw-r--r--jstests/eval6.js15
-rw-r--r--jstests/eval7.js3
-rw-r--r--jstests/eval8.js19
-rw-r--r--jstests/eval9.js22
-rw-r--r--jstests/eval_nolock.js16
-rw-r--r--jstests/evala.js9
-rw-r--r--jstests/evalb.js40
-rw-r--r--jstests/evalc.js25
-rw-r--r--jstests/evald.js98
-rw-r--r--jstests/evale.js5
-rw-r--r--jstests/evalf.js27
-rw-r--r--jstests/exists.js49
-rw-r--r--jstests/exists2.js16
-rw-r--r--jstests/exists3.js21
-rw-r--r--jstests/exists4.js20
-rw-r--r--jstests/exists5.js33
-rw-r--r--jstests/exists6.js79
-rw-r--r--jstests/exists7.js21
-rw-r--r--jstests/exists8.js76
-rw-r--r--jstests/exists9.js41
-rw-r--r--jstests/existsa.js114
-rw-r--r--jstests/existsb.js76
-rw-r--r--jstests/explain1.js48
-rw-r--r--jstests/explain2.js27
-rw-r--r--jstests/explain3.js24
-rw-r--r--jstests/explain4.js68
-rw-r--r--jstests/explain5.js38
-rw-r--r--jstests/explain6.js25
-rw-r--r--jstests/explain7.js193
-rw-r--r--jstests/explain8.js24
-rw-r--r--jstests/explain9.js24
-rw-r--r--jstests/explain_batch_size.js19
-rw-r--r--jstests/explaina.js28
-rw-r--r--jstests/explainb.js46
-rw-r--r--jstests/extent.js11
-rw-r--r--jstests/extent2.js34
-rw-r--r--jstests/filemd5.js11
-rw-r--r--jstests/find1.js54
-rw-r--r--jstests/find2.js16
-rw-r--r--jstests/find3.js10
-rw-r--r--jstests/find4.js26
-rw-r--r--jstests/find5.js51
-rw-r--r--jstests/find6.js41
-rw-r--r--jstests/find7.js8
-rw-r--r--jstests/find8.js27
-rw-r--r--jstests/find9.js28
-rw-r--r--jstests/find_and_modify.js38
-rw-r--r--jstests/find_and_modify2.js16
-rw-r--r--jstests/find_and_modify3.js21
-rw-r--r--jstests/find_and_modify4.js55
-rw-r--r--jstests/find_and_modify_server6226.js7
-rw-r--r--jstests/find_and_modify_server6254.js10
-rw-r--r--jstests/find_and_modify_server6582.js18
-rw-r--r--jstests/find_and_modify_server6588.js22
-rw-r--r--jstests/find_and_modify_server6659.js7
-rw-r--r--jstests/find_and_modify_server6909.js21
-rw-r--r--jstests/find_and_modify_server6993.js9
-rw-r--r--jstests/find_and_modify_server7660.js18
-rw-r--r--jstests/find_and_modify_where.js10
-rw-r--r--jstests/find_dedup.js35
-rw-r--r--jstests/find_size.js26
-rw-r--r--jstests/finda.js106
-rw-r--r--jstests/fm1.js12
-rw-r--r--jstests/fm2.js9
-rw-r--r--jstests/fm3.js37
-rw-r--r--jstests/fm4.js16
-rw-r--r--jstests/fsync.js22
-rw-r--r--jstests/fts1.js29
-rw-r--r--jstests/fts2.js24
-rw-r--r--jstests/fts3.js22
-rw-r--r--jstests/fts4.js22
-rw-r--r--jstests/fts5.js22
-rw-r--r--jstests/fts6.js16
-rw-r--r--jstests/fts_blog.js26
-rw-r--r--jstests/fts_blogwild.js40
-rw-r--r--jstests/fts_enabled.js5
-rw-r--r--jstests/fts_explain.js18
-rw-r--r--jstests/fts_index.js138
-rw-r--r--jstests/fts_mix.js159
-rw-r--r--jstests/fts_partition1.js23
-rw-r--r--jstests/fts_partition_no_multikey.js17
-rw-r--r--jstests/fts_phrase.js25
-rw-r--r--jstests/fts_proj.js20
-rw-r--r--jstests/fts_projection.js99
-rw-r--r--jstests/fts_querylang.js93
-rw-r--r--jstests/fts_score_sort.js28
-rw-r--r--jstests/fts_spanish.js31
-rw-r--r--jstests/geo1.js41
-rw-r--r--jstests/geo10.js21
-rw-r--r--jstests/geo2.js40
-rw-r--r--jstests/geo3.js77
-rw-r--r--jstests/geo4.js10
-rw-r--r--jstests/geo5.js18
-rw-r--r--jstests/geo6.js24
-rw-r--r--jstests/geo7.js20
-rw-r--r--jstests/geo8.js13
-rw-r--r--jstests/geo9.js28
-rw-r--r--jstests/geo_2d_explain.js29
-rw-r--r--jstests/geo_2d_with_geojson_point.js20
-rw-r--r--jstests/geo_allowedcomparisons.js107
-rw-r--r--jstests/geo_array0.js27
-rw-r--r--jstests/geo_array1.js38
-rw-r--r--jstests/geo_array2.js163
-rw-r--r--jstests/geo_borders.js168
-rw-r--r--jstests/geo_box1.js43
-rw-r--r--jstests/geo_box1_noindex.js32
-rw-r--r--jstests/geo_box2.js52
-rw-r--r--jstests/geo_box3.js36
-rw-r--r--jstests/geo_center_sphere1.js96
-rw-r--r--jstests/geo_center_sphere2.js164
-rw-r--r--jstests/geo_circle1.js43
-rw-r--r--jstests/geo_circle1_noindex.js29
-rw-r--r--jstests/geo_circle2.js26
-rw-r--r--jstests/geo_circle2a.js37
-rw-r--r--jstests/geo_circle3.js28
-rw-r--r--jstests/geo_circle4.js31
-rw-r--r--jstests/geo_circle5.js28
-rw-r--r--jstests/geo_distinct.js16
-rw-r--r--jstests/geo_exactfetch.js17
-rw-r--r--jstests/geo_fiddly_box.js46
-rw-r--r--jstests/geo_fiddly_box2.js32
-rw-r--r--jstests/geo_group.js35
-rw-r--r--jstests/geo_haystack1.js59
-rw-r--r--jstests/geo_haystack2.js60
-rw-r--r--jstests/geo_haystack3.js28
-rw-r--r--jstests/geo_invalid_polygon.js33
-rw-r--r--jstests/geo_mapreduce.js56
-rw-r--r--jstests/geo_mapreduce2.js36
-rw-r--r--jstests/geo_max.js49
-rw-r--r--jstests/geo_mindistance.js214
-rw-r--r--jstests/geo_mindistance_boundaries.js124
-rw-r--r--jstests/geo_multikey0.js26
-rw-r--r--jstests/geo_multikey1.js20
-rw-r--r--jstests/geo_multinest0.js63
-rw-r--r--jstests/geo_multinest1.js37
-rw-r--r--jstests/geo_near_random1.js12
-rw-r--r--jstests/geo_near_random2.js21
-rw-r--r--jstests/geo_nearwithin.js27
-rw-r--r--jstests/geo_oob_sphere.js42
-rw-r--r--jstests/geo_or.js62
-rw-r--r--jstests/geo_poly_edge.js22
-rw-r--r--jstests/geo_poly_line.js17
-rw-r--r--jstests/geo_polygon1.js74
-rw-r--r--jstests/geo_polygon1_noindex.js47
-rw-r--r--jstests/geo_polygon2.js266
-rw-r--r--jstests/geo_polygon3.js54
-rw-r--r--jstests/geo_queryoptimizer.js27
-rw-r--r--jstests/geo_regex0.js18
-rw-r--r--jstests/geo_s2cursorlimitskip.js69
-rw-r--r--jstests/geo_s2dedupnear.js11
-rw-r--r--jstests/geo_s2descindex.js64
-rw-r--r--jstests/geo_s2disjoint_holes.js94
-rw-r--r--jstests/geo_s2dupe_points.js72
-rwxr-xr-xjstests/geo_s2edgecases.js40
-rw-r--r--jstests/geo_s2exact.js21
-rw-r--r--jstests/geo_s2holesameasshell.js46
-rwxr-xr-xjstests/geo_s2index.js107
-rwxr-xr-xjstests/geo_s2indexoldformat.js28
-rw-r--r--jstests/geo_s2indexversion1.js150
-rw-r--r--jstests/geo_s2intersection.js141
-rw-r--r--jstests/geo_s2largewithin.js45
-rw-r--r--jstests/geo_s2meridian.js109
-rw-r--r--jstests/geo_s2multi.js50
-rw-r--r--jstests/geo_s2near.js84
-rw-r--r--jstests/geo_s2nearComplex.js269
-rw-r--r--jstests/geo_s2near_equator_opposite.js31
-rw-r--r--jstests/geo_s2nearcorrect.js12
-rw-r--r--jstests/geo_s2nearwithin.js42
-rw-r--r--jstests/geo_s2nongeoarray.js28
-rwxr-xr-xjstests/geo_s2nonstring.js22
-rw-r--r--jstests/geo_s2nopoints.js7
-rw-r--r--jstests/geo_s2oddshapes.js138
-rw-r--r--jstests/geo_s2ordering.js47
-rw-r--r--jstests/geo_s2overlappingpolys.js213
-rwxr-xr-xjstests/geo_s2polywithholes.js48
-rw-r--r--jstests/geo_s2selfintersectingpoly.js12
-rw-r--r--jstests/geo_s2sparse.js113
-rw-r--r--jstests/geo_s2twofields.js64
-rw-r--r--jstests/geo_s2validindex.js26
-rw-r--r--jstests/geo_s2within.js36
-rw-r--r--jstests/geo_small_large.js151
-rw-r--r--jstests/geo_sort1.js22
-rw-r--r--jstests/geo_uniqueDocs.js40
-rw-r--r--jstests/geo_uniqueDocs2.js80
-rw-r--r--jstests/geo_update.js37
-rw-r--r--jstests/geo_update1.js38
-rw-r--r--jstests/geo_update2.js40
-rw-r--r--jstests/geo_update_btree.js25
-rw-r--r--jstests/geo_update_btree2.js71
-rw-r--r--jstests/geo_update_dedup.js60
-rw-r--r--jstests/geo_withinquery.js15
-rw-r--r--jstests/geoa.js12
-rw-r--r--jstests/geob.js35
-rw-r--r--jstests/geoc.js24
-rw-r--r--jstests/geod.js14
-rw-r--r--jstests/geoe.js32
-rw-r--r--jstests/geof.js19
-rw-r--r--jstests/geonear_cmd_input_validation.js119
-rw-r--r--jstests/geonear_validate.js8
-rw-r--r--jstests/getlog1.js24
-rw-r--r--jstests/getlog2.js47
-rw-r--r--jstests/group1.js64
-rw-r--r--jstests/group2.js38
-rw-r--r--jstests/group3.js43
-rw-r--r--jstests/group4.js45
-rw-r--r--jstests/group5.js38
-rw-r--r--jstests/group6.js32
-rw-r--r--jstests/group7.js45
-rw-r--r--jstests/group_empty.js8
-rw-r--r--jstests/grow_hash_table.js45
-rw-r--r--jstests/hashindex1.js94
-rw-r--r--jstests/hashtest1.js78
-rw-r--r--jstests/hint1.js16
-rw-r--r--jstests/hostinfo.js33
-rw-r--r--jstests/id1.js16
-rw-r--r--jstests/idhack.js59
-rw-r--r--jstests/in.js24
-rw-r--r--jstests/in2.js33
-rw-r--r--jstests/in3.js11
-rw-r--r--jstests/in4.js42
-rw-r--r--jstests/in5.js56
-rw-r--r--jstests/in6.js13
-rw-r--r--jstests/in8.js23
-rw-r--r--jstests/in9.js35
-rw-r--r--jstests/ina.js15
-rw-r--r--jstests/inb.js19
-rw-r--r--jstests/inc-SERVER-7446.js43
-rw-r--r--jstests/inc1.js32
-rw-r--r--jstests/inc2.js22
-rw-r--r--jstests/inc3.js16
-rw-r--r--jstests/index1.js24
-rw-r--r--jstests/index10.js32
-rw-r--r--jstests/index13.js147
-rw-r--r--jstests/index2.js40
-rw-r--r--jstests/index3.js16
-rw-r--r--jstests/index4.js33
-rw-r--r--jstests/index5.js24
-rw-r--r--jstests/index6.js8
-rw-r--r--jstests/index7.js67
-rw-r--r--jstests/index8.js62
-rw-r--r--jstests/index9.js25
-rw-r--r--jstests/indexOtherNamespace.js21
-rw-r--r--jstests/indexStatsCommand.js88
-rw-r--r--jstests/index_arr1.js23
-rw-r--r--jstests/index_arr2.js51
-rw-r--r--jstests/index_big1.js36
-rwxr-xr-xjstests/index_bigkeys.js59
-rw-r--r--jstests/index_bigkeys_update.js20
-rw-r--r--jstests/index_bounds_number_edge_cases.js50
-rw-r--r--jstests/index_check1.js31
-rw-r--r--jstests/index_check2.js41
-rw-r--r--jstests/index_check3.js63
-rw-r--r--jstests/index_check5.js17
-rw-r--r--jstests/index_check6.js82
-rw-r--r--jstests/index_check7.js15
-rw-r--r--jstests/index_check8.js21
-rw-r--r--jstests/index_diag.js50
-rw-r--r--jstests/index_elemmatch1.js41
-rw-r--r--jstests/index_filter_commands.js181
-rw-r--r--jstests/index_many.js51
-rw-r--r--jstests/index_many2.js31
-rw-r--r--jstests/index_sparse1.js46
-rw-r--r--jstests/index_sparse2.js23
-rw-r--r--jstests/indexa.js22
-rw-r--r--jstests/indexapi.js40
-rw-r--r--jstests/indexb.js29
-rw-r--r--jstests/indexc.js20
-rw-r--r--jstests/indexd.js10
-rw-r--r--jstests/indexe.js22
-rw-r--r--jstests/indexes_on_indexes.js19
-rw-r--r--jstests/indexf.js13
-rw-r--r--jstests/indexg.js13
-rw-r--r--jstests/indexh.js41
-rw-r--r--jstests/indexi.js43
-rw-r--r--jstests/indexj.js44
-rw-r--r--jstests/indexl.js27
-rw-r--r--jstests/indexm.js38
-rw-r--r--jstests/indexn.js49
-rw-r--r--jstests/indexo.js17
-rw-r--r--jstests/indexp.js54
-rw-r--r--jstests/indexq.js20
-rw-r--r--jstests/indexr.js44
-rw-r--r--jstests/indexs.js21
-rw-r--r--jstests/indext.js21
-rw-r--r--jstests/indexu.js137
-rw-r--r--jstests/indexv.js18
-rw-r--r--jstests/indexw.js15
-rw-r--r--jstests/insert1.js44
-rw-r--r--jstests/insert2.js8
-rw-r--r--jstests/insert_id_undefined.js6
-rw-r--r--jstests/insert_illegal_doc.js22
-rw-r--r--jstests/insert_long_index_key.js10
-rw-r--r--jstests/ismaster.js28
-rw-r--r--jstests/js1.js12
-rw-r--r--jstests/js2.js23
-rw-r--r--jstests/js3.js76
-rw-r--r--jstests/js4.js49
-rw-r--r--jstests/js5.js10
-rw-r--r--jstests/js7.js5
-rw-r--r--jstests/js8.js14
-rw-r--r--jstests/js9.js24
-rw-r--r--jstests/json1.js28
-rw-r--r--jstests/killop.js62
-rw-r--r--jstests/loadserverscripts.js57
-rw-r--r--jstests/loglong.js32
-rw-r--r--jstests/logprocessdetails.js39
-rw-r--r--jstests/long_index_rename.js18
-rw-r--r--jstests/map1.js24
-rw-r--r--jstests/max_message_size.js88
-rw-r--r--jstests/max_time_ms.js303
-rw-r--r--jstests/maxscan.js18
-rw-r--r--jstests/minmax.js54
-rw-r--r--jstests/mod1.js25
-rw-r--r--jstests/mr1.js184
-rw-r--r--jstests/mr2.js83
-rw-r--r--jstests/mr3.js73
-rw-r--r--jstests/mr4.js45
-rw-r--r--jstests/mr5.js58
-rw-r--r--jstests/mr_bigobject.js46
-rw-r--r--jstests/mr_comments.js28
-rw-r--r--jstests/mr_drop.js42
-rw-r--r--jstests/mr_errorhandling.js49
-rw-r--r--jstests/mr_index.js43
-rw-r--r--jstests/mr_index2.js22
-rw-r--r--jstests/mr_index3.js50
-rw-r--r--jstests/mr_killop.js156
-rw-r--r--jstests/mr_merge.js60
-rw-r--r--jstests/mr_merge2.js37
-rw-r--r--jstests/mr_mutable_properties.js62
-rw-r--r--jstests/mr_optim.js48
-rw-r--r--jstests/mr_outreduce.js49
-rw-r--r--jstests/mr_outreduce2.js27
-rw-r--r--jstests/mr_replaceIntoDB.js45
-rw-r--r--jstests/mr_sort.js44
-rw-r--r--jstests/mr_stored.js66
-rw-r--r--jstests/mr_undef.js22
-rw-r--r--jstests/multi.js24
-rw-r--r--jstests/multi2.js23
-rw-r--r--jstests/ne1.js11
-rw-r--r--jstests/ne2.js16
-rw-r--r--jstests/ne3.js12
-rw-r--r--jstests/nestedarr1.js30
-rw-r--r--jstests/nestedobj1.js30
-rw-r--r--jstests/nin.js58
-rw-r--r--jstests/nin2.js67
-rw-r--r--jstests/not1.js20
-rw-r--r--jstests/not2.js84
-rw-r--r--jstests/notablescan.js31
-rw-r--r--jstests/ns_length.js85
-rw-r--r--jstests/null.js26
-rw-r--r--jstests/null2.js45
-rw-r--r--jstests/null_field_name.js8
-rw-r--r--jstests/numberint.js92
-rw-r--r--jstests/numberlong.js55
-rw-r--r--jstests/numberlong2.js28
-rw-r--r--jstests/numberlong3.js25
-rw-r--r--jstests/numberlong4.js21
-rw-r--r--jstests/objid1.js16
-rw-r--r--jstests/objid2.js7
-rw-r--r--jstests/objid3.js9
-rw-r--r--jstests/objid4.js16
-rw-r--r--jstests/objid5.js19
-rw-r--r--jstests/objid6.js16
-rw-r--r--jstests/objid7.js13
-rw-r--r--jstests/or1.js57
-rw-r--r--jstests/or2.js69
-rw-r--r--jstests/or3.js62
-rw-r--r--jstests/or4.js99
-rw-r--r--jstests/or5.js70
-rw-r--r--jstests/or6.js23
-rw-r--r--jstests/or7.js41
-rw-r--r--jstests/or8.js28
-rw-r--r--jstests/or9.js64
-rw-r--r--jstests/ora.js17
-rw-r--r--jstests/orb.js17
-rw-r--r--jstests/orc.js29
-rw-r--r--jstests/ord.js35
-rw-r--r--jstests/ore.js13
-rw-r--r--jstests/orf.js27
-rw-r--r--jstests/org.js19
-rw-r--r--jstests/orh.js17
-rw-r--r--jstests/orj.js121
-rw-r--r--jstests/ork.js11
-rw-r--r--jstests/orl.js13
-rw-r--r--jstests/oro.js27
-rw-r--r--jstests/orp.js43
-rw-r--r--jstests/padding.js66
-rw-r--r--jstests/plan_cache_commands.js433
-rw-r--r--jstests/profile1.js170
-rw-r--r--jstests/profile2.js25
-rw-r--r--jstests/profile3.js54
-rw-r--r--jstests/profile4.js119
-rw-r--r--jstests/proj_key1.js28
-rw-r--r--jstests/pull.js33
-rw-r--r--jstests/pull2.js31
-rw-r--r--jstests/pull_or.js21
-rw-r--r--jstests/pull_remove1.js14
-rw-r--r--jstests/pullall.js31
-rw-r--r--jstests/pullall2.js20
-rw-r--r--jstests/push.js54
-rw-r--r--jstests/push2.js21
-rw-r--r--jstests/push_sort.js96
-rw-r--r--jstests/pushall.js20
-rw-r--r--jstests/query1.js26
-rw-r--r--jstests/queryoptimizer3.js33
-rw-r--r--jstests/queryoptimizer6.js16
-rw-r--r--jstests/queryoptimizera.js92
-rw-r--r--jstests/ref.js19
-rw-r--r--jstests/ref2.js14
-rw-r--r--jstests/ref3.js19
-rw-r--r--jstests/ref4.js20
-rw-r--r--jstests/regex.js24
-rw-r--r--jstests/regex2.js70
-rw-r--r--jstests/regex3.js36
-rw-r--r--jstests/regex4.js18
-rw-r--r--jstests/regex5.js53
-rw-r--r--jstests/regex6.js29
-rw-r--r--jstests/regex7.js26
-rw-r--r--jstests/regex8.js19
-rw-r--r--jstests/regex9.js11
-rw-r--r--jstests/regex_embed1.js25
-rw-r--r--jstests/regex_limit.js22
-rw-r--r--jstests/regex_options.js7
-rw-r--r--jstests/regex_util.js27
-rw-r--r--jstests/regexa.js19
-rw-r--r--jstests/regexb.js14
-rw-r--r--jstests/regexc.js28
-rw-r--r--jstests/remove.js27
-rw-r--r--jstests/remove2.js46
-rw-r--r--jstests/remove3.js18
-rw-r--r--jstests/remove4.js10
-rw-r--r--jstests/remove5.js24
-rw-r--r--jstests/remove6.js38
-rw-r--r--jstests/remove7.js35
-rw-r--r--jstests/remove8.js21
-rw-r--r--jstests/remove9.js16
-rw-r--r--jstests/remove_justone.js16
-rw-r--r--jstests/remove_undefined.js28
-rw-r--r--jstests/removea.js23
-rw-r--r--jstests/removeb.js39
-rw-r--r--jstests/removec.js40
-rw-r--r--jstests/rename.js56
-rw-r--r--jstests/rename2.js19
-rw-r--r--jstests/rename3.js25
-rw-r--r--jstests/rename4.js145
-rw-r--r--jstests/rename5.js46
-rw-r--r--jstests/rename6.js24
-rw-r--r--jstests/rename7.js56
-rw-r--r--jstests/rename8.js25
-rw-r--r--jstests/rename_stayTemp.js24
-rw-r--r--jstests/repair.js28
-rw-r--r--jstests/repair_server12955.js15
-rw-r--r--jstests/reversecursor.js34
-rw-r--r--jstests/role_management_helpers.js137
-rw-r--r--jstests/run_program1.js19
-rw-r--r--jstests/server1470.js20
-rw-r--r--jstests/server5346.js15
-rw-r--r--jstests/server7756.js12
-rw-r--r--jstests/server9385.js16
-rw-r--r--jstests/server9547.js21
-rw-r--r--jstests/set1.js9
-rw-r--r--jstests/set2.js18
-rw-r--r--jstests/set3.js11
-rw-r--r--jstests/set4.js15
-rw-r--r--jstests/set5.js17
-rw-r--r--jstests/set6.js20
-rw-r--r--jstests/set7.js67
-rw-r--r--jstests/set_param1.js9
-rw-r--r--jstests/shell1.js15
-rw-r--r--jstests/shell_writeconcern.js72
-rw-r--r--jstests/shellkillop.js61
-rw-r--r--jstests/shellstartparallel.js17
-rw-r--r--jstests/shelltypes.js53
-rw-r--r--jstests/showdiskloc.js25
-rw-r--r--jstests/skip1.js15
-rw-r--r--jstests/slice1.js68
-rw-r--r--jstests/sort1.js48
-rw-r--r--jstests/sort10.js48
-rw-r--r--jstests/sort2.js32
-rw-r--r--jstests/sort3.js16
-rw-r--r--jstests/sort4.js43
-rw-r--r--jstests/sort5.js21
-rw-r--r--jstests/sort6.js38
-rw-r--r--jstests/sort7.js25
-rw-r--r--jstests/sort8.js30
-rw-r--r--jstests/sort9.js26
-rw-r--r--jstests/sort_numeric.js35
-rw-r--r--jstests/sorta.js26
-rw-r--r--jstests/sortb.js27
-rw-r--r--jstests/sortc.js37
-rw-r--r--jstests/sortd.js70
-rw-r--r--jstests/sorte.js30
-rw-r--r--jstests/sortf.js20
-rw-r--r--jstests/sortg.js64
-rw-r--r--jstests/sorth.js140
-rw-r--r--jstests/sorti.js25
-rw-r--r--jstests/sortj.js17
-rw-r--r--jstests/sortk.js140
-rw-r--r--jstests/splitvector.js309
-rw-r--r--jstests/stages_and_hash.js42
-rw-r--r--jstests/stages_and_sorted.js49
-rw-r--r--jstests/stages_collection_scan.js43
-rw-r--r--jstests/stages_fetch.js33
-rw-r--r--jstests/stages_ixscan.js76
-rw-r--r--jstests/stages_limit_skip.js34
-rw-r--r--jstests/stages_mergesort.js32
-rw-r--r--jstests/stages_or.js33
-rw-r--r--jstests/stages_sort.js36
-rw-r--r--jstests/stages_text.js17
-rw-r--r--jstests/stats.js23
-rw-r--r--jstests/storageDetailsCommand.js99
-rw-r--r--jstests/storefunc.js44
-rw-r--r--jstests/string_with_nul_bytes.js9
-rw-r--r--jstests/sub1.js14
-rw-r--r--jstests/temp_cleanup.js16
-rw-r--r--jstests/testminmax.js14
-rw-r--r--jstests/touch1.js15
-rw-r--r--jstests/ts1.js38
-rw-r--r--jstests/type1.js24
-rw-r--r--jstests/type2.js19
-rw-r--r--jstests/type3.js68
-rw-r--r--jstests/unique2.js112
-rw-r--r--jstests/uniqueness.js58
-rw-r--r--jstests/unset.js19
-rw-r--r--jstests/unset2.js23
-rw-r--r--jstests/update.js40
-rw-r--r--jstests/update2.js18
-rw-r--r--jstests/update3.js28
-rw-r--r--jstests/update4.js33
-rw-r--r--jstests/update5.js41
-rw-r--r--jstests/update6.js46
-rw-r--r--jstests/update7.js138
-rw-r--r--jstests/update8.js11
-rw-r--r--jstests/update9.js19
-rw-r--r--jstests/update_addToSet.js58
-rw-r--r--jstests/update_addToSet2.js11
-rw-r--r--jstests/update_addToSet3.js18
-rw-r--r--jstests/update_arraymatch1.js16
-rw-r--r--jstests/update_arraymatch2.js16
-rw-r--r--jstests/update_arraymatch3.js17
-rw-r--r--jstests/update_arraymatch4.js18
-rw-r--r--jstests/update_arraymatch5.js15
-rw-r--r--jstests/update_arraymatch6.js14
-rw-r--r--jstests/update_arraymatch7.js19
-rw-r--r--jstests/update_arraymatch8.js158
-rw-r--r--jstests/update_bit_examples.js24
-rw-r--r--jstests/update_blank1.js10
-rw-r--r--jstests/update_currentdate_examples.js24
-rw-r--r--jstests/update_dbref.js36
-rw-r--r--jstests/update_invalid1.js6
-rw-r--r--jstests/update_min_max_examples.js31
-rw-r--r--jstests/update_mul_examples.js24
-rw-r--r--jstests/update_multi3.js25
-rw-r--r--jstests/update_multi4.js18
-rw-r--r--jstests/update_multi5.js17
-rw-r--r--jstests/update_multi6.js10
-rw-r--r--jstests/update_replace.js50
-rw-r--r--jstests/update_server-12848.js19
-rw-r--r--jstests/update_setOnInsert.js47
-rw-r--r--jstests/updatea.js67
-rw-r--r--jstests/updateb.js11
-rw-r--r--jstests/updatec.js14
-rw-r--r--jstests/updated.js20
-rw-r--r--jstests/updatee.js71
-rw-r--r--jstests/updatef.js24
-rw-r--r--jstests/updateg.js17
-rw-r--r--jstests/updateh.js39
-rw-r--r--jstests/updatei.js86
-rw-r--r--jstests/updatej.js12
-rw-r--r--jstests/updatek.js14
-rw-r--r--jstests/updatel.js48
-rw-r--r--jstests/updatem.js20
-rw-r--r--jstests/upsert1.js59
-rw-r--r--jstests/upsert2.js20
-rw-r--r--jstests/upsert3.js60
-rw-r--r--jstests/upsert4.js36
-rw-r--r--jstests/use_power_of_2.js86
-rwxr-xr-xjstests/useindexonobjgtlt.js15
-rw-r--r--jstests/user_management_helpers.js94
-rw-r--r--jstests/validate_cmd_ns.js25
-rw-r--r--jstests/validate_user_documents.js65
-rw-r--r--jstests/verify_update_mods.js82
-rw-r--r--jstests/where1.js28
-rw-r--r--jstests/where2.js10
-rw-r--r--jstests/where3.js10
-rw-r--r--jstests/where4.js27
751 files changed, 31 insertions, 31700 deletions
diff --git a/jstests/_fail.js b/jstests/_fail.js
deleted file mode 100644
index 9d41d2562d2..00000000000
--- a/jstests/_fail.js
+++ /dev/null
@@ -1,4 +0,0 @@
-// For testing the test runner.
-assert.eq(1, 2, "fail1")
-
-print("you should not see this") \ No newline at end of file
diff --git a/jstests/_lodeRunner.js b/jstests/_lodeRunner.js
deleted file mode 100644
index 1aceee64987..00000000000
--- a/jstests/_lodeRunner.js
+++ /dev/null
@@ -1,4 +0,0 @@
-// Start mongod and run jstests/_runner.js
-
-db = startMongod( "--port", "27018", "--dbpath", MongoRunner.dataDir + "/jstests" ).getDB( "test" );
-load( "jstests/_runner.js" );
diff --git a/jstests/_runner.js b/jstests/_runner.js
deleted file mode 100644
index 48619c18901..00000000000
--- a/jstests/_runner.js
+++ /dev/null
@@ -1,28 +0,0 @@
-//
-// simple runner to run toplevel tests in jstests
-//
-var files = listFiles("jstests");
-
-var runnerStart = new Date()
-
-files.forEach(
- function(x) {
-
- if ( /[\/\\]_/.test(x.name) ||
- ! /\.js$/.test(x.name ) ){
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
-
-
- print(" *******************************************");
- print(" Test : " + x.name + " ...");
- print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
-
- }
-);
-
-
-var runnerEnd = new Date()
-
-print( "total runner time: " + ( ( runnerEnd.getTime() - runnerStart.getTime() ) / 1000 ) + "secs" )
diff --git a/jstests/_runner_leak.js b/jstests/_runner_leak.js
deleted file mode 100644
index 18d7fb256f3..00000000000
--- a/jstests/_runner_leak.js
+++ /dev/null
@@ -1,44 +0,0 @@
-//
-// simple runner to run toplevel tests in jstests
-//
-var files = listFiles("jstests");
-
-var dummyDb = db.getSisterDB( "dummyDBdummydummy" );
-
-dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
-prev = dummyDb.serverStatus();
-
-print( "START : " + tojson( prev ) );
-
-files.forEach(
- function(x) {
-
- if ( /_runner/.test(x.name) ||
- /_lodeRunner/.test(x.name) ||
- ! /\.js$/.test(x.name ) ){
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
-
-
- print(" *******************************************");
- print(" Test : " + x.name + " ...");
- print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
-
- assert( dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" ).ok == 1 , "closeAllDatabases failed" );
- var now = dummyDb.serverStatus();
- var leaked = now.mem.virtual - prev.mem.virtual;
- if ( leaked > 0 ){
- print( " LEAK : " + prev.mem.virtual + " -->> " + now.mem.virtual );
- printjson( now );
- if ( leaked > 20 )
- throw -1;
- }
- prev = now;
- }
-);
-
-
-
-dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
-print( "END : " + tojson( dummyDb.serverStatus() ) );
diff --git a/jstests/_runner_leak_nojni.js b/jstests/_runner_leak_nojni.js
deleted file mode 100644
index fe2c6b23902..00000000000
--- a/jstests/_runner_leak_nojni.js
+++ /dev/null
@@ -1,42 +0,0 @@
-//
-// simple runner to run toplevel tests in jstests
-//
-var files = listFiles("jstests");
-
-var dummyDb = db.getSisterDB( "dummyDBdummydummy" );
-
-dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
-prev = dummyDb.runCommand( "meminfo" );
-
-print( "START : " + tojson( prev ) );
-
-files.forEach(
- function(x) {
-
- if ( /_runner/.test(x.name) ||
- /_lodeRunner/.test(x.name) ||
- /jni/.test(x.name) ||
- /eval/.test(x.name) ||
- /where/.test(x.name) ||
- ! /\.js$/.test(x.name ) ){
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
-
-
- print(" *******************************************");
- print(" Test : " + x.name + " ...");
- print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
-
- assert( dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" ).ok == 1 , "closeAllDatabases failed" );
- var now = dummyDb.runCommand( "meminfo" );
- if ( now.virtual > prev.virtual )
- print( " LEAK : " + prev.virtual + " -->> " + now.virtual );
- prev = now;
- }
-);
-
-
-
-dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
-print( "END : " + tojson( dummyDb.runCommand( "meminfo" ) ) );
diff --git a/jstests/_runner_sharding.js b/jstests/_runner_sharding.js
deleted file mode 100644
index 761b9dfc2ff..00000000000
--- a/jstests/_runner_sharding.js
+++ /dev/null
@@ -1,35 +0,0 @@
-//
-// simple runner to run toplevel tests in jstests
-//
-var files = listFiles("jstests/sharding");
-
-var num = 0;
-
-files.forEach(
- function(x) {
-
- if ( /_runner/.test(x.name) ||
- /_lodeRunner/.test(x.name) ||
- ! /\.js$/.test(x.name ) ){
- print(" >>>>>>>>>>>>>>> skipping " + x.name);
- return;
- }
-
- if ( num++ > 0 ){
- sleep( 1000 ); // let things fully come down
- }
-
- print(" *******************************************");
- print(" Test : " + x.name + " ...");
- try {
- print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
- }
- catch ( e ){
- print( " ERROR on " + x.name + "!! " + e );
- throw e;
- }
-
- }
-);
-
-
diff --git a/jstests/_tst.js b/jstests/_tst.js
deleted file mode 100644
index f208164e4b8..00000000000
--- a/jstests/_tst.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/* a general testing framework (helpers) for us in the jstests/
-
- to use, from your test file:
- testname="mytestname";
- load("jstests/_tst.js");
-*/
-
-if( typeof tst == "undefined" ) {
- tst = {}
-
- tst.log = function (optional_msg) {
- print("\n\nstep " + ++this._step + " " + (optional_msg || ""));
- }
-
- tst.success = function () {
- print(testname + " SUCCESS");
- }
-
- /* diff files a and b, returning the difference (empty str if no difference) */
- tst.diff = function(a, b) {
- function reSlash(s) {
- var x = s;
- if (_isWindows()) {
- while (1) {
- var y = x.replace('/', '\\');
- if (y == x)
- break;
- x = y;
- }
- }
- return x;
- }
- a = reSlash(a);
- b = reSlash(b);
- print("diff " + a + " " + b);
- return run("diff", a, b);
- }
-}
-
-print(testname + " BEGIN");
-tst._step = 0;
diff --git a/jstests/all.js b/jstests/all.js
deleted file mode 100644
index 50087882ad1..00000000000
--- a/jstests/all.js
+++ /dev/null
@@ -1,47 +0,0 @@
-t = db.jstests_all;
-t.drop();
-
-doTest = function() {
-
- t.save( { a:[ 1,2,3 ] } );
- t.save( { a:[ 1,2,4 ] } );
- t.save( { a:[ 1,8,5 ] } );
- t.save( { a:[ 1,8,6 ] } );
- t.save( { a:[ 1,9,7 ] } );
- t.save( { a : [] } );
- t.save( {} );
-
- assert.eq( 5, t.find( { a: { $all: [ 1 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 1, 2 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 1, 8 ] } } ).count() );
- assert.eq( 1, t.find( { a: { $all: [ 1, 3 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 2 ] } } ).count() );
- assert.eq( 1, t.find( { a: { $all: [ 2, 3 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 2, 1 ] } } ).count() );
-
- t.save( { a: [ 2, 2 ] } );
- assert.eq( 3, t.find( { a: { $all: [ 2, 2 ] } } ).count() );
-
- t.save( { a: [ [ 2 ] ] } );
- assert.eq( 3, t.find( { a: { $all: [ 2 ] } } ).count() );
-
- t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 10 ] } } ).count() );
- assert.eq( 1, t.find( { a: { $all: [ 11 ] } } ).count() );
-
- t.save( { a: { b: [ 20, 30 ] } } );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() );
-
-
- assert.eq( 5 , t.find( { a : { $all : [1] } } ).count() , "E1" );
- assert.eq( 0 , t.find( { a : { $all : [19] } } ).count() , "E2" );
- assert.eq( 0 , t.find( { a : { $all : [] } } ).count() , "E3" );
-
-
-}
-
-doTest();
-t.drop();
-t.ensureIndex( {a:1} );
-doTest();
diff --git a/jstests/all2.js b/jstests/all2.js
deleted file mode 100644
index 64372ca5e97..00000000000
--- a/jstests/all2.js
+++ /dev/null
@@ -1,86 +0,0 @@
-
-t = db.all2;
-t.drop();
-
-t.save( { a : [ { x : 1 } , { x : 2 } ] } )
-t.save( { a : [ { x : 2 } , { x : 3 } ] } )
-t.save( { a : [ { x : 3 } , { x : 4 } ] } )
-
-state = "no index";
-
-function check( n , q , e ){
- assert.eq( n , t.find( q ).count() , tojson( q ) + " " + e + " count " + state );
- assert.eq( n , t.find( q ).itcount() , tojson( q ) + " " + e + " itcount" + state );
-}
-
-check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
-
-check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
-
-check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
-
-t.ensureIndex( { "a.x" : 1 } );
-state = "index";
-
-check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
-
-check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
-
-check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
-
-// --- more
-
-t.drop();
-
-t.save( { a : [ 1 , 2 ] } )
-t.save( { a : [ 2 , 3 ] } )
-t.save( { a : [ 3 , 4 ] } )
-
-state = "more no index";
-
-check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
-
-check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
-
-check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
-
-t.ensureIndex( { "a" : 1 } );
-state = "more index";
-
-check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
-
-check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
-
-check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
-
-
-// more 2
-
-state = "more 2"
-
-t.drop();
-t.save( { name : [ "harry","jack","tom" ] } )
-check( 0 , { name : { $all : ["harry","john"] } } , "A" );
-t.ensureIndex( { name : 1 } );
-check( 0 , { name : { $all : ["harry","john"] } } , "B" );
-
diff --git a/jstests/all3.js b/jstests/all3.js
deleted file mode 100644
index b7a05321bbf..00000000000
--- a/jstests/all3.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Check that $all matching null is consistent with $in - SERVER-3820
-
-t = db.jstests_all3;
-t.drop();
-
-t.save({});
-
-assert.eq( 1, t.count( {foo:{$in:[null]}} ) );
-assert.eq( 1, t.count( {foo:{$all:[null]}} ) );
-assert.eq( 0, t.count( {foo:{$not:{$all:[null]}}} ) );
-assert.eq( 0, t.count( {foo:{$not:{$in:[null]}}} ) );
-
-t.remove({});
-t.save({foo:1});
-assert.eq( 0, t.count( {foo:{$in:[null]}} ) );
-assert.eq( 0, t.count( {foo:{$all:[null]}} ) );
-assert.eq( 1, t.count( {foo:{$not:{$in:[null]}}} ) );
-assert.eq( 1, t.count( {foo:{$not:{$all:[null]}}} ) );
-
-t.remove({});
-t.save( {foo:[0,1]} );
-assert.eq( 1, t.count( {foo:{$in:[[0,1]]}} ) );
-assert.eq( 1, t.count( {foo:{$all:[[0,1]]}} ) );
-
-t.remove({});
-t.save( {foo:[]} );
-assert.eq( 1, t.count( {foo:{$in:[[]]}} ) );
-assert.eq( 1, t.count( {foo:{$all:[[]]}} ) );
diff --git a/jstests/all4.js b/jstests/all4.js
deleted file mode 100644
index 18acbf4f46a..00000000000
--- a/jstests/all4.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Test $all/$elemMatch with missing field - SERVER-4492
-
-t = db.jstests_all4;
-t.drop();
-
-function checkQuery( query, val ) {
- assert.eq( val, t.count(query) );
- assert( !db.getLastError() );
- assert.eq( val, t.find(query).itcount() );
- assert( !db.getLastError() );
-}
-
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 0 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
-
-t.save({});
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 0 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
-
-t.save({a:1});
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 1 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
-
-t.save({a:[{b:1}]});
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 1 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 1 );
diff --git a/jstests/all5.js b/jstests/all5.js
deleted file mode 100644
index a5d9e312292..00000000000
--- a/jstests/all5.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Test $all/$elemMatch/null matching - SERVER-4517
-
-t = db.jstests_all5;
-t.drop();
-
-function checkMatch( doc ) {
- t.drop();
- t.save( doc );
- assert.eq( 1, t.count( {a:{$elemMatch:{b:null}}} ) );
- assert.eq( 1, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) );
-}
-
-function checkNoMatch( doc ) {
- t.drop();
- t.save( doc );
- assert.eq( 0, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) );
-}
-
-checkNoMatch( {} );
-checkNoMatch( {a:1} );
-
-checkNoMatch( {a:[]} );
-checkNoMatch( {a:[1]} );
-
-checkMatch( {a:[{}]} );
-checkMatch( {a:[{c:1}]} );
-checkMatch( {a:[{b:null}]} );
-checkNoMatch( {a:[{b:1}]}, 0 );
diff --git a/jstests/and.js b/jstests/and.js
deleted file mode 100644
index 4d8c2cd7d49..00000000000
--- a/jstests/and.js
+++ /dev/null
@@ -1,85 +0,0 @@
-// Some tests for $and SERVER-1089
-
-t = db.jstests_and;
-t.drop();
-
-t.save( {a:[1,2]} );
-t.save( {a:'foo'} );
-
-function check() {
- // $and must be an array
- assert.throws( function() { t.find( {$and:4} ).toArray() } );
- // $and array must not be empty
- assert.throws( function() { t.find( {$and:[]} ).toArray() } );
- // $and elements must be objects
- assert.throws( function() { t.find( {$and:[4]} ).toArray() } );
-
- // Check equality matching
- assert.eq( 1, t.count( {$and:[{a:1}]} ) );
- assert.eq( 1, t.count( {$and:[{a:1},{a:2}]} ) );
- assert.eq( 0, t.count( {$and:[{a:1},{a:3}]} ) );
- assert.eq( 0, t.count( {$and:[{a:1},{a:2},{a:3}]} ) );
- assert.eq( 1, t.count( {$and:[{a:'foo'}]} ) );
- assert.eq( 0, t.count( {$and:[{a:'foo'},{a:'g'}]} ) );
-
- // Check $and with other fields
- assert.eq( 1, t.count( {a:2,$and:[{a:1}]} ) );
- assert.eq( 0, t.count( {a:0,$and:[{a:1}]} ) );
- assert.eq( 0, t.count( {a:2,$and:[{a:0}]} ) );
- assert.eq( 1, t.count( {a:1,$and:[{a:1}]} ) );
-
- // Check recursive $and
- assert.eq( 1, t.count( {a:2,$and:[{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {a:0,$and:[{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {a:2,$and:[{$and:[{a:0}]}]} ) );
- assert.eq( 1, t.count( {a:1,$and:[{$and:[{a:1}]}]} ) );
-
- assert.eq( 1, t.count( {$and:[{a:2},{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {$and:[{a:0},{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {$and:[{a:2},{$and:[{a:0}]}]} ) );
- assert.eq( 1, t.count( {$and:[{a:1},{$and:[{a:1}]}]} ) );
-
- // Some of these cases were more important with an alternative $and syntax
- // that was rejected, but they're still valid checks.
-
- // Check simple regex
- assert.eq( 1, t.count( {$and:[{a:/foo/}]} ) );
- // Check multiple regexes
- assert.eq( 1, t.count( {$and:[{a:/foo/},{a:/^f/},{a:/o/}]} ) );
- assert.eq( 0, t.count( {$and:[{a:/foo/},{a:/^g/}]} ) );
- assert.eq( 1, t.count( {$and:[{a:/^f/},{a:'foo'}]} ) );
- // Check regex flags
- assert.eq( 0, t.count( {$and:[{a:/^F/},{a:'foo'}]} ) );
- assert.eq( 1, t.count( {$and:[{a:/^F/i},{a:'foo'}]} ) );
-
-
-
- // Check operator
- assert.eq( 1, t.count( {$and:[{a:{$gt:0}}]} ) );
-
- // Check where
- assert.eq( 1, t.count( {a:'foo',$where:'this.a=="foo"'} ) );
- assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) );
- assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) );
-
- // Nested where ok
- assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}]}) );
- assert.eq( 1, t.count({$and:[{a:'foo'},{$where:'this.a=="foo"'}]}) );
- assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}],$where:'this.a=="foo"'}) );
-}
-
-check();
-t.ensureIndex( {a:1} );
-check();
-var e = t.find( {$and:[{a:1}]} ).explain();
-assert.eq( 'BtreeCursor a_1', e.cursor );
-assert.eq( [[1,1]], e.indexBounds.a );
-
-function checkBounds( query ) {
- var e = t.find( query ).explain(true);
- printjson(e);
- assert.eq( 1, e.n );
-}
-
-checkBounds( {a:1,$and:[{a:2}]} );
-checkBounds( {$and:[{a:1},{a:2}]} );
diff --git a/jstests/and2.js b/jstests/and2.js
deleted file mode 100644
index 0bd13eb7a1d..00000000000
--- a/jstests/and2.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Test dollar sign operator with $and SERVER-1089
-
-t = db.jstests_and2;
-
-t.drop();
-t.save( {a:[1,2]} );
-t.update( {a:1}, {$set:{'a.$':5}} );
-assert.eq( [5,2], t.findOne().a );
-
-t.drop();
-t.save( {a:[1,2]} );
-t.update( {$and:[{a:1}]}, {$set:{'a.$':5}} );
-assert.eq( [5,2], t.findOne().a );
-
-// Make sure dollar sign operator with $and is consistent with no $and case
-t.drop();
-t.save( {a:[1,2],b:[3,4]} );
-t.update( {a:1,b:4}, {$set:{'a.$':5}} );
-// Probably not what we want here, just trying to make sure $and is consistent
-assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] );
-
-// Make sure dollar sign operator with $and is consistent with no $and case
-t.drop();
-t.save( {a:[1,2],b:[3,4]} );
-t.update( {a:1,$and:[{b:4}]}, {$set:{'a.$':5}} );
-// Probably not what we want here, just trying to make sure $and is consistent
-assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] );
diff --git a/jstests/and3.js b/jstests/and3.js
deleted file mode 100644
index 036c63c02f0..00000000000
--- a/jstests/and3.js
+++ /dev/null
@@ -1,67 +0,0 @@
-// Check key match with sub matchers - part of SERVER-3192
-
-t = db.jstests_and3;
-t.drop();
-
-t.save( {a:1} );
-t.save( {a:'foo'} );
-
-t.ensureIndex( {a:1} );
-
-function checkScanMatch( query, nscannedObjects, n ) {
- var e = t.find( query ).hint( {a:1} ).explain();
- assert.eq( nscannedObjects, e.nscannedObjects );
- assert.eq( n, e.n );
-}
-
-checkScanMatch( {a:/o/}, 1, 1 );
-checkScanMatch( {a:/a/}, 0, 0 );
-checkScanMatch( {a:{$not:/o/}}, 2, 1 );
-checkScanMatch( {a:{$not:/a/}}, 2, 2 );
-
-checkScanMatch( {$and:[{a:/o/}]}, 1, 1 );
-checkScanMatch( {$and:[{a:/a/}]}, 0, 0 );
-checkScanMatch( {$and:[{a:{$not:/o/}}]}, 2, 1 );
-checkScanMatch( {$and:[{a:{$not:/a/}}]}, 2, 2 );
-checkScanMatch( {$and:[{a:/o/},{a:{$not:/o/}}]}, 1, 0 );
-checkScanMatch( {$and:[{a:/o/},{a:{$not:/a/}}]}, 1, 1 );
-checkScanMatch( {$or:[{a:/o/}]}, 1, 1 );
-checkScanMatch( {$or:[{a:/a/}]}, 0, 0 );
-checkScanMatch( {$nor:[{a:/o/}]}, 2, 1 );
-checkScanMatch( {$nor:[{a:/a/}]}, 2, 2 );
-
-checkScanMatch( {$and:[{$and:[{a:/o/}]}]}, 1, 1 );
-checkScanMatch( {$and:[{$and:[{a:/a/}]}]}, 0, 0 );
-checkScanMatch( {$and:[{$and:[{a:{$not:/o/}}]}]}, 2, 1 );
-checkScanMatch( {$and:[{$and:[{a:{$not:/a/}}]}]}, 2, 2 );
-checkScanMatch( {$and:[{$or:[{a:/o/}]}]}, 1, 1 );
-checkScanMatch( {$and:[{$or:[{a:/a/}]}]}, 0, 0 );
-checkScanMatch( {$or:[{a:{$not:/o/}}]}, 2, 1 );
-checkScanMatch( {$and:[{$or:[{a:{$not:/o/}}]}]}, 2, 1 );
-checkScanMatch( {$and:[{$or:[{a:{$not:/a/}}]}]}, 2, 2 );
-checkScanMatch( {$and:[{$nor:[{a:/o/}]}]}, 2, 1 );
-checkScanMatch( {$and:[{$nor:[{a:/a/}]}]}, 2, 2 );
-
-checkScanMatch( {$where:'this.a==1'}, 2, 1 );
-checkScanMatch( {$and:[{$where:'this.a==1'}]}, 2, 1 );
-
-checkScanMatch( {a:1,$where:'this.a==1'}, 1, 1 );
-checkScanMatch( {a:1,$and:[{$where:'this.a==1'}]}, 1, 1 );
-checkScanMatch( {$and:[{a:1},{$where:'this.a==1'}]}, 1, 1 );
-checkScanMatch( {$and:[{a:1,$where:'this.a==1'}]}, 1, 1 );
-checkScanMatch( {a:1,$and:[{a:1},{a:1,$where:'this.a==1'}]}, 1, 1 );
-
-function checkImpossibleMatch( query ) {
- var e = t.find( query ).explain();
- assert.eq( 0, e.n );
- // The explain output should include the indexBounds field.
- // The presence of the indexBounds field indicates that the
- // query can make use of an index.
- assert('indexBounds' in e, 'index bounds are missing');
-}
-
-// With a single key index, all bounds are utilized.
-assert.eq( [[1,1]], t.find( {$and:[{a:1}]} ).explain().indexBounds.a );
-assert.eq( [[1,1]], t.find( {a:1,$and:[{a:1}]} ).explain().indexBounds.a );
-checkImpossibleMatch( {a:1,$and:[{a:2}]} );
-checkImpossibleMatch( {$and:[{a:1},{a:2}]} );
diff --git a/jstests/andor.js b/jstests/andor.js
deleted file mode 100644
index f433ade8228..00000000000
--- a/jstests/andor.js
+++ /dev/null
@@ -1,99 +0,0 @@
-// SERVER-1089 Test and/or nesting
-
-t = db.jstests_andor;
-t.drop();
-
-// not ok
-function ok( q ) {
- assert.eq( 1, t.find( q ).itcount() );
-}
-
-t.save( {a:1} );
-
-test = function() {
-
- ok( {a:1} );
-
- ok( {$and:[{a:1}]} );
- ok( {$or:[{a:1}]} );
-
- ok( {$and:[{$and:[{a:1}]}]} );
- ok( {$or:[{$or:[{a:1}]}]} );
-
- ok( {$and:[{$or:[{a:1}]}]} );
- ok( {$or:[{$and:[{a:1}]}]} );
-
- ok( {$and:[{$and:[{$or:[{a:1}]}]}]} );
- ok( {$and:[{$or:[{$and:[{a:1}]}]}]} );
- ok( {$or:[{$and:[{$and:[{a:1}]}]}]} );
-
- ok( {$or:[{$and:[{$or:[{a:1}]}]}]} );
-
- // now test $nor
-
- ok( {$and:[{a:1}]} );
- ok( {$nor:[{a:2}]} );
-
- ok( {$and:[{$and:[{a:1}]}]} );
- ok( {$nor:[{$nor:[{a:1}]}]} );
-
- ok( {$and:[{$nor:[{a:2}]}]} );
- ok( {$nor:[{$and:[{a:2}]}]} );
-
- ok( {$and:[{$and:[{$nor:[{a:2}]}]}]} );
- ok( {$and:[{$nor:[{$and:[{a:2}]}]}]} );
- ok( {$nor:[{$and:[{$and:[{a:2}]}]}]} );
-
- ok( {$nor:[{$and:[{$nor:[{a:1}]}]}]} );
-
-}
-
-test();
-t.ensureIndex( {a:1} );
-test();
-
-// Test an inequality base match.
-
-test = function() {
-
- ok( {a:{$ne:2}} );
-
- ok( {$and:[{a:{$ne:2}}]} );
- ok( {$or:[{a:{$ne:2}}]} );
-
- ok( {$and:[{$and:[{a:{$ne:2}}]}]} );
- ok( {$or:[{$or:[{a:{$ne:2}}]}]} );
-
- ok( {$and:[{$or:[{a:{$ne:2}}]}]} );
- ok( {$or:[{$and:[{a:{$ne:2}}]}]} );
-
- ok( {$and:[{$and:[{$or:[{a:{$ne:2}}]}]}]} );
- ok( {$and:[{$or:[{$and:[{a:{$ne:2}}]}]}]} );
- ok( {$or:[{$and:[{$and:[{a:{$ne:2}}]}]}]} );
-
- ok( {$or:[{$and:[{$or:[{a:{$ne:2}}]}]}]} );
-
- // now test $nor
-
- ok( {$and:[{a:{$ne:2}}]} );
- ok( {$nor:[{a:{$ne:1}}]} );
-
- ok( {$and:[{$and:[{a:{$ne:2}}]}]} );
- ok( {$nor:[{$nor:[{a:{$ne:2}}]}]} );
-
- ok( {$and:[{$nor:[{a:{$ne:1}}]}]} );
- ok( {$nor:[{$and:[{a:{$ne:1}}]}]} );
-
- ok( {$and:[{$and:[{$nor:[{a:{$ne:1}}]}]}]} );
- ok( {$and:[{$nor:[{$and:[{a:{$ne:1}}]}]}]} );
- ok( {$nor:[{$and:[{$and:[{a:{$ne:1}}]}]}]} );
-
- ok( {$nor:[{$and:[{$nor:[{a:{$ne:2}}]}]}]} );
-
-}
-
-t.drop();
-t.save( {a:1} );
-test();
-t.ensureIndex( {a:1} );
-test();
diff --git a/jstests/apitest_db.js b/jstests/apitest_db.js
deleted file mode 100644
index c734d67bba7..00000000000
--- a/jstests/apitest_db.js
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Tests for the db object enhancement
- */
-
-assert( "test" == db, "wrong database currently not test" );
-
-dd = function( x ){
- //print( x );
-}
-
-dd( "a" );
-
-
-dd( "b" );
-
-/*
- * be sure the public collection API is complete
- */
-assert(db.createCollection , "createCollection" );
-assert(db.getProfilingLevel , "getProfilingLevel" );
-assert(db.setProfilingLevel , "setProfilingLevel" );
-assert(db.dbEval , "dbEval" );
-assert(db.group , "group" );
-
-dd( "c" );
-
-/*
- * test createCollection
- */
-
-db.getCollection( "test" ).drop();
-db.getCollection( "system.namespaces" ).find().forEach( function(x) { assert(x.name != "test.test"); });
-
-dd( "d" );
-
-db.createCollection("test");
-var found = false;
-db.getCollection( "system.namespaces" ).find().forEach( function(x) { if (x.name == "test.test") found = true; });
-assert(found, "found test.test in system.namespaces");
-
-dd( "e" );
-
-/*
- * profile level
- */
-
-db.setProfilingLevel(0);
-assert(db.getProfilingLevel() == 0, "prof level 0");
-
-db.setProfilingLevel(1);
-assert(db.getProfilingLevel() == 1, "p1");
-
-db.setProfilingLevel(2);
-assert(db.getProfilingLevel() == 2, "p2");
-
-db.setProfilingLevel(0);
-assert(db.getProfilingLevel() == 0, "prof level 0");
-
-dd( "f" );
-asserted = false;
-try {
- db.setProfilingLevel(10);
- assert(false);
-}
-catch (e) {
- asserted = true;
- assert(e.dbSetProfilingException);
-}
-assert( asserted, "should have asserted" );
-
-dd( "g" );
-
-
-
-assert.eq( "foo" , db.getSisterDB( "foo" ).getName() )
-assert.eq( "foo" , db.getSiblingDB( "foo" ).getName() )
-
diff --git a/jstests/apitest_dbcollection.js b/jstests/apitest_dbcollection.js
deleted file mode 100644
index 0983b065477..00000000000
--- a/jstests/apitest_dbcollection.js
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Tests for the db collection
- */
-
-
-
-/*
- * test drop
- */
-db.getCollection( "test_db" ).drop();
-assert(db.getCollection( "test_db" ).find().length() == 0,1);
-
-db.getCollection( "test_db" ).save({a:1});
-assert(db.getCollection( "test_db" ).find().length() == 1,2);
-
-db.getCollection( "test_db" ).drop();
-assert(db.getCollection( "test_db" ).find().length() == 0,3);
-
-/*
- * test count
- */
-
-assert(db.getCollection( "test_db" ).count() == 0,4);
-db.getCollection( "test_db" ).save({a:1});
-assert(db.getCollection( "test_db" ).count() == 1,5);
-for (i = 0; i < 100; i++) {
- db.getCollection( "test_db" ).save({a:1});
-}
-assert(db.getCollection( "test_db" ).count() == 101,6);
-db.getCollection( "test_db" ).drop();
-assert(db.getCollection( "test_db" ).count() == 0,7);
-
-/*
- * test clean (not sure... just be sure it doen't blow up, I guess
- */
-
- db.getCollection( "test_db" ).clean();
-
- /*
- * test validate
- */
-
-db.getCollection( "test_db" ).drop();
-assert(db.getCollection( "test_db" ).count() == 0,8);
-
-for (i = 0; i < 100; i++) {
- db.getCollection( "test_db" ).save({a:1});
-}
-
-var v = db.getCollection( "test_db" ).validate();
-if( v.ns != "test.test_db" ) {
- print("Error: wrong ns name");
- print(tojson(v));
-}
-assert (v.ns == "test.test_db",9);
-assert (v.ok == 1,10);
-
-assert.eq(100,v.nrecords,11)
-
-/*
- * test deleteIndex, deleteIndexes
- */
-
-db.getCollection( "test_db" ).drop();
-assert(db.getCollection( "test_db" ).count() == 0,12);
-db.getCollection( "test_db" ).dropIndexes();
-assert(db.getCollection( "test_db" ).getIndexes().length == 0,13);
-
-db.getCollection( "test_db" ).save({a:10});
-assert(db.getCollection( "test_db" ).getIndexes().length == 1,14);
-
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).save({a:10});
-
-print( tojson( db.getCollection( "test_db" ).getIndexes() ) );
-assert.eq(db.getCollection( "test_db" ).getIndexes().length , 2,15);
-
-db.getCollection( "test_db" ).dropIndex({a:1});
-assert(db.getCollection( "test_db" ).getIndexes().length == 1,16);
-
-db.getCollection( "test_db" ).save({a:10});
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).save({a:10});
-
-assert(db.getCollection( "test_db" ).getIndexes().length == 2,17);
-
-db.getCollection( "test_db" ).dropIndex("a_1");
-assert.eq( db.getCollection( "test_db" ).getIndexes().length , 1,18);
-
-db.getCollection( "test_db" ).save({a:10, b:11});
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).ensureIndex({b:1});
-db.getCollection( "test_db" ).save({a:10, b:12});
-
-assert(db.getCollection( "test_db" ).getIndexes().length == 3,19);
-
-db.getCollection( "test_db" ).dropIndex({b:1});
-assert(db.getCollection( "test_db" ).getIndexes().length == 2,20);
-db.getCollection( "test_db" ).dropIndex({a:1});
-assert(db.getCollection( "test_db" ).getIndexes().length == 1,21);
-
-db.getCollection( "test_db" ).save({a:10, b:11});
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).ensureIndex({b:1});
-db.getCollection( "test_db" ).save({a:10, b:12});
-
-assert(db.getCollection( "test_db" ).getIndexes().length == 3,22);
-
-db.getCollection( "test_db" ).dropIndexes();
-assert(db.getCollection( "test_db" ).getIndexes().length == 1,23);
-
-db.getCollection( "test_db" ).find();
-
-db.getCollection( "test_db" ).drop();
-assert(db.getCollection( "test_db" ).getIndexes().length == 0,24);
diff --git a/jstests/apply_ops1.js b/jstests/apply_ops1.js
deleted file mode 100644
index 9e6cb39f7c7..00000000000
--- a/jstests/apply_ops1.js
+++ /dev/null
@@ -1,66 +0,0 @@
-
-t = db.apply_ops1;
-t.drop();
-
-assert.eq( 0 , t.find().count() , "A0" );
-a = db.adminCommand( { applyOps : [ { "op" : "i" , "ns" : t.getFullName() , "o" : { _id : 5 , x : 17 } } ] } )
-assert.eq( 1 , t.find().count() , "A1a" );
-assert.eq( true, a.results[0], "A1b" );
-
-o = { _id : 5 , x : 17 }
-assert.eq( o , t.findOne() , "A2" );
-
-res = db.runCommand( { applyOps : [
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } }
-] } )
-
-o.x++;
-o.x++;
-
-assert.eq( 1 , t.find().count() , "A3" );
-assert.eq( o , t.findOne() , "A4" );
-assert.eq( true, res.results[0], "A1b" );
-assert.eq( true, res.results[1], "A1b" );
-
-
-res = db.runCommand( { applyOps :
- [
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } }
- ]
- ,
- preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ]
- } );
-
-o.x++;
-o.x++;
-
-assert.eq( 1 , t.find().count() , "B1" );
-assert.eq( o , t.findOne() , "B2" );
-assert.eq( true, res.results[0], "B2a" );
-assert.eq( true, res.results[1], "B2b" );
-
-
-res = db.runCommand( { applyOps :
- [
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } }
- ]
- ,
- preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ]
- } );
-
-assert.eq( 1 , t.find().count() , "B3" );
-assert.eq( o , t.findOne() , "B4" );
-
-res = db.runCommand( { applyOps :
- [
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
- { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 6 } , "o" : { $inc : { x : 1 } } }
- ]
- } );
-
-assert.eq( true, res.results[0], "B5" );
-assert.eq( true, res.results[1], "B6" );
-
diff --git a/jstests/apply_ops2.js b/jstests/apply_ops2.js
deleted file mode 100644
index 1a5923c3465..00000000000
--- a/jstests/apply_ops2.js
+++ /dev/null
@@ -1,71 +0,0 @@
-//Test applyops upsert flag SERVER-7452
-
-var t = db.apply_ops2;
-t.drop();
-
-assert.eq(0, t.find().count(), "test collection not empty");
-
-t.insert({_id:1, x:"init"});
-
-//alwaysUpsert = true
-print("Testing applyOps with alwaysUpsert = true");
-
-var res = db.runCommand({ applyOps: [
- {
- op: "u",
- ns: t.getFullName(),
- o2 : { _id: 1 },
- o: { $set: { x: "upsert=true existing" }}
- },
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 2 },
- o: { $set : { x: "upsert=true non-existing" }}
- }], alwaysUpsert: true });
-
-assert.eq(true, res.results[0], "upsert = true, existing doc update failed");
-assert.eq(true, res.results[1], "upsert = true, nonexisting doc not upserted");
-assert.eq(2, t.find().count(), "2 docs expected after upsert");
-
-//alwaysUpsert = false
-print("Testing applyOps with alwaysUpsert = false");
-
-res = db.runCommand({ applyOps: [
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 1 },
- o: { $set : { x: "upsert=false existing" }}
- },
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 3 },
- o: { $set: { x: "upsert=false non-existing" }}
- }], alwaysUpsert: false });
-
-assert.eq(true, res.results[0], "upsert = false, existing doc update failed");
-assert.eq(false, res.results[1], "upsert = false, nonexisting doc upserted");
-assert.eq(2, t.find().count(), "2 docs expected after upsert failure");
-
-//alwaysUpsert not specified, should default to true
-print("Testing applyOps with default alwaysUpsert");
-
-res = db.runCommand({ applyOps: [
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 1 },
- o: { $set: { x: "upsert=default existing" }}
- },
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 4 },
- o: { $set: { x: "upsert=defaults non-existing" }}
- }]});
-
-assert.eq(true, res.results[0], "default upsert, existing doc update failed");
-assert.eq(true, res.results[1], "default upsert, nonexisting doc not upserted");
-assert.eq(3, t.find().count(), "2 docs expected after upsert failure");
diff --git a/jstests/array1.js b/jstests/array1.js
deleted file mode 100644
index 4409b7bb4d3..00000000000
--- a/jstests/array1.js
+++ /dev/null
@@ -1,14 +0,0 @@
-t = db.array1
-t.drop()
-
-x = { a : [ 1 , 2 ] };
-
-t.save( { a : [ [1,2] ] } );
-assert.eq( 1 , t.find( x ).count() , "A" );
-
-t.save( x );
-delete x._id;
-assert.eq( 2 , t.find( x ).count() , "B" );
-
-t.ensureIndex( { a : 1 } );
-assert.eq( 2 , t.find( x ).count() , "C" ); // TODO SERVER-146
diff --git a/jstests/array3.js b/jstests/array3.js
deleted file mode 100644
index 3d053f99417..00000000000
--- a/jstests/array3.js
+++ /dev/null
@@ -1,8 +0,0 @@
-
-assert.eq( 5 , Array.sum( [ 1 , 4 ] ), "A" )
-assert.eq( 2.5 , Array.avg( [ 1 , 4 ] ), "B" )
-
-arr = [ 2 , 4 , 4 , 4 , 5 , 5 , 7 , 9 ]
-assert.eq( 5 , Array.avg( arr ) , "C" )
-assert.eq( 2 , Array.stdDev( arr ) , "D" )
-
diff --git a/jstests/array4.js b/jstests/array4.js
deleted file mode 100644
index 1053e160f11..00000000000
--- a/jstests/array4.js
+++ /dev/null
@@ -1,30 +0,0 @@
-
-t = db.array4;
-t.drop();
-
-t.insert({"a": ["1", "2", "3"]});
-t.insert({"a" : ["2", "1"]});
-
-var x = {'a.0' : /1/};
-
-assert.eq(t.count(x), 1);
-
-assert.eq(t.findOne(x).a[0], 1);
-assert.eq(t.findOne(x).a[1], 2);
-
-t.drop();
-
-t.insert({"a" : {"0" : "1"}});
-t.insert({"a" : ["2", "1"]});
-
-assert.eq(t.count(x), 1);
-assert.eq(t.findOne(x).a[0], 1);
-
-t.drop();
-
-t.insert({"a" : ["0", "1", "2", "3", "4", "5", "6", "1", "1", "1", "2", "3", "2", "1"]});
-t.insert({"a" : ["2", "1"]});
-
-x = {"a.12" : /2/};
-assert.eq(t.count(x), 1);
-assert.eq(t.findOne(x).a[0], 0);
diff --git a/jstests/array_match1.js b/jstests/array_match1.js
deleted file mode 100644
index f764fb913b1..00000000000
--- a/jstests/array_match1.js
+++ /dev/null
@@ -1,31 +0,0 @@
-
-t = db.array_match1
-t.drop();
-
-t.insert( { _id : 1 , a : [ 5 , 5 ] } )
-t.insert( { _id : 2 , a : [ 6 , 6 ] } )
-t.insert( { _id : 3 , a : [ 5 , 5 ] } )
-
-function test( f , m ){
- var q = {};
-
- q[f] = [5,5];
- assert.eq( 2 , t.find( q ).itcount() , m + "1" )
-
- q[f] = [6,6];
- assert.eq( 1 , t.find( q ).itcount() , m + "2" )
-}
-
-test( "a" , "A" );
-t.ensureIndex( { a : 1 } )
-test( "a" , "B" );
-
-t.drop();
-
-t.insert( { _id : 1 , a : { b : [ 5 , 5 ] } } )
-t.insert( { _id : 2 , a : { b : [ 6 , 6 ] } } )
-t.insert( { _id : 3 , a : { b : [ 5 , 5 ] } } )
-
-test( "a.b" , "C" );
-t.ensureIndex( { a : 1 } )
-test( "a.b" , "D" );
diff --git a/jstests/array_match2.js b/jstests/array_match2.js
deleted file mode 100644
index d254b0a3fdd..00000000000
--- a/jstests/array_match2.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.jstests_array_match2;
-t.drop();
-
-t.save( {a:[{1:4},5]} );
-// When the array index is the last field, both of these match types work.
-assert.eq( 1, t.count( {'a.1':4} ) );
-assert.eq( 1, t.count( {'a.1':5} ) );
-
-t.remove({});
-// When the array index is not the last field, only one of the match types works.
-t.save( {a:[{1:{foo:4}},{foo:5}]} );
-assert.eq( 1, t.count( {'a.1.foo':4} ) );
-assert.eq( 1, t.count( {'a.1.foo':5} ) );
-
-// Same issue with the $exists operator
-t.remove({});
-t.save( {a:[{1:{foo:4}},{}]} );
-assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1.foo':{$exists:true}} ) );
diff --git a/jstests/array_match3.js b/jstests/array_match3.js
deleted file mode 100644
index c8653430770..00000000000
--- a/jstests/array_match3.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// SERVER-2902 Test indexing of numerically referenced array elements.
-
-t = db.jstests_array_match3;
-t.drop();
-
-// Test matching numericallly referenced array element.
-t.save( {a:{'0':5}} );
-t.save( {a:[5]} );
-assert.eq( 2, t.count( {'a.0':5} ) );
-
-// Test with index.
-t.ensureIndex( {'a.0':1} );
-assert.eq( 2, t.count( {'a.0':5} ) );
diff --git a/jstests/array_match4.js b/jstests/array_match4.js
deleted file mode 100644
index b4cdec5143a..00000000000
--- a/jstests/array_match4.js
+++ /dev/null
@@ -1,30 +0,0 @@
-var t = db.array_match4;
-
-t.drop();
-t.save({a: [1, 2]});
-
-var query_gte = {a: {$gte: [1, 2]}};
-
-//
-// without index
-//
-
-assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (without index)');
-assert.eq(1, t.find(query_gte).itcount(), '$gte (without index)');
-
-//
-// with index
-//
-
-t.ensureIndex({a: 1});
-assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (with index)');
-
-// display explain output (for index bounds)
-var explain = t.find(query_gte).explain();
-print('explain for ' + tojson(query_gte, '', true) + ' = ' + tojson(explain));
-
-// number of documents returned by indexes query should be consistent
-// with non-indexed case.
-// XXX: The following assertion documents current behavior.
-// XXX: 2.4 and 2.6 both return 0 documents.
-assert.eq(0, t.find(query_gte).itcount(), '$gte (with index)');
diff --git a/jstests/arrayfind1.js b/jstests/arrayfind1.js
deleted file mode 100644
index 539fa6193a1..00000000000
--- a/jstests/arrayfind1.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-t = db.arrayfind1;
-t.drop();
-
-t.save( { a : [ { x : 1 } ] } )
-t.save( { a : [ { x : 1 , y : 2 , z : 1 } ] } )
-t.save( { a : [ { x : 1 , y : 1 , z : 3 } ] } )
-
-function test( exptected , q , name ){
- assert.eq( exptected , t.find( q ).itcount() , name + " " + tojson( q ) + " itcount" );
- assert.eq( exptected , t.find( q ).count() , name + " " + tojson( q ) + " count" );
-}
-
-test( 3 , {} , "A1" );
-test( 1 , { "a.y" : 2 } , "A2" );
-test( 1 , { "a" : { x : 1 } } , "A3" );
-test( 3 , { "a" : { $elemMatch : { x : 1 } } } , "A4" ); // SERVER-377
-
-
-t.save( { a : [ { x : 2 } ] } )
-t.save( { a : [ { x : 3 } ] } )
-t.save( { a : [ { x : 4 } ] } )
-
-assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "B1" );
-assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "B2" );
-
-t.ensureIndex( { "a.x" : 1 } );
-assert( t.find( { "a" : { $elemMatch : { x : 1 } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "C1" );
-
-assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "D1" );
-
-t.find( { "a.x" : 1 } ).count();
-t.find( { "a.x" : { $gt : 1 } } ).count();
-
-res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain()
-assert( res.cursor.indexOf( "BtreeC" ) == 0 , "D2" );
-assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" );
-
-assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" );
-assert( t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "E2" );
diff --git a/jstests/arrayfind2.js b/jstests/arrayfind2.js
deleted file mode 100644
index c6a78042c3d..00000000000
--- a/jstests/arrayfind2.js
+++ /dev/null
@@ -1,29 +0,0 @@
-
-t = db.arrayfind2;
-t.drop();
-
-function go( prefix ){
- assert.eq( 3 , t.count() , prefix + " A1" );
- assert.eq( 3 , t.find( { a : { $elemMatch : { x : { $gt : 4 } } } } ).count() , prefix + " A2" );
- assert.eq( 1 , t.find( { a : { $elemMatch : { x : { $lt : 2 } } } } ).count() , prefix + " A3" );
- assert.eq( 1 , t.find( { a : { $all : [ { $elemMatch : { x : { $lt : 4 } } } ,
- { $elemMatch : { x : { $gt : 5 } } } ] } } ).count() , prefix + " A4" );
-
- assert.throws( function() { return t.findOne( { a : { $all : [ 1, { $elemMatch : { x : 3 } } ] } } ) } );
- assert.throws( function() { return t.findOne( { a : { $all : [ /a/, { $elemMatch : { x : 3 } } ] } } ) } );
-
-}
-
-t.save( { a : [ { x : 1 } , { x : 5 } ] } )
-t.save( { a : [ { x : 3 } , { x : 5 } ] } )
-t.save( { a : [ { x : 3 } , { x : 6 } ] } )
-
-go( "no index" );
-t.ensureIndex( { a : 1 } );
-go( "index(a)" );
-
-t.ensureIndex( { "a.x": 1 } );
-
-assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } } ] } } ).explain().indexBounds );
-// only first $elemMatch used to find bounds
-assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } }, { $elemMatch : { y : 5 } } ] } } ).explain().indexBounds );
diff --git a/jstests/arrayfind3.js b/jstests/arrayfind3.js
deleted file mode 100644
index de038c84264..00000000000
--- a/jstests/arrayfind3.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.arrayfind3;
-t.drop()
-
-t.save({a:[1,2]})
-t.save({a:[1, 2, 6]})
-t.save({a:[1, 4, 6]})
-
-
-assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "A1" )
-assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "A2" )
-
-t.ensureIndex( { a : 1 } )
-
-assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "B1" );
-assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "B2" )
diff --git a/jstests/arrayfind4.js b/jstests/arrayfind4.js
deleted file mode 100644
index 17b02c8886b..00000000000
--- a/jstests/arrayfind4.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// Test query empty array SERVER-2258
-
-t = db.jstests_arrayfind4;
-t.drop();
-
-t.save( {a:[]} );
-t.ensureIndex( {a:1} );
-
-assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() );
-
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() );
-
-t.remove({});
-t.save( {a:[[]]} );
-
-assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() );
-
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() );
diff --git a/jstests/arrayfind5.js b/jstests/arrayfind5.js
deleted file mode 100644
index 9ff6e2b8a5f..00000000000
--- a/jstests/arrayfind5.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Test indexed elemmatch of missing field.
-
-t = db.jstests_arrayfind5;
-t.drop();
-
-function check( nullElemMatch ) {
- assert.eq( 1, t.find( {'a.b':1} ).itcount() );
- assert.eq( 1, t.find( {a:{$elemMatch:{b:1}}} ).itcount() );
- assert.eq( nullElemMatch ? 1 : 0 , t.find( {'a.b':null} ).itcount() );
- assert.eq( nullElemMatch ? 1 : 0, t.find( {a:{$elemMatch:{b:null}}} ).itcount() ); // see SERVER-3377
-}
-
-t.save( {a:[{},{b:1}]} );
-check( true );
-t.ensureIndex( {'a.b':1} );
-check( true );
-
-t.drop();
-
-t.save( {a:[5,{b:1}]} );
-check( false );
-t.ensureIndex( {'a.b':1} );
-check( false );
diff --git a/jstests/arrayfind6.js b/jstests/arrayfind6.js
deleted file mode 100644
index f4531cea96a..00000000000
--- a/jstests/arrayfind6.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Check index bound determination for $not:$elemMatch queries. SERVER-5740
-
-t = db.jstests_arrayfind6;
-t.drop();
-
-t.save( { a:[ { b:1, c:2 } ] } );
-
-function checkElemMatchMatches() {
- assert.eq( 1, t.count( { a:{ $elemMatch:{ b:1, c:2 } } } ) );
- assert.eq( 0, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:2 } } } } ) );
- assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:3 } } } } ) );
- assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:1 }, c:3 } } } } ) );
- // Index bounds must be determined for $not:$elemMatch, not $not:$ne. In this case if index
- // bounds are determined for $not:$ne, the a.b index will be constrained to the interval [2,2]
- // and the saved document will not be matched as it should.
- assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ) );
-}
-
-checkElemMatchMatches();
-t.ensureIndex( { 'a.b':1 } );
-checkElemMatchMatches();
-
-// We currently never use an index for negations of
-// ELEM_MATCH_OBJECT expressions.
-var explain = t.find( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ).explain();
-assert.eq( "BasicCursor", explain.cursor );
diff --git a/jstests/arrayfind7.js b/jstests/arrayfind7.js
deleted file mode 100644
index 7c44de1dc1d..00000000000
--- a/jstests/arrayfind7.js
+++ /dev/null
@@ -1,52 +0,0 @@
-// Nested $elemMatch clauses. SERVER-5741
-
-t = db.jstests_arrayfind7;
-t.drop();
-
-t.save( { a:[ { b:[ { c:1, d:2 } ] } ] } );
-
-function checkElemMatchMatches() {
- assert.eq( 1, t.count( { a:{ $elemMatch:{ b:{ $elemMatch:{ c:1, d:2 } } } } } ) );
-}
-
-// The document is matched using nested $elemMatch expressions, with and without an index.
-checkElemMatchMatches();
-t.ensureIndex( { 'a.b.c':1 } );
-checkElemMatchMatches();
-
-function checkElemMatch( index, document, query ) {
- // The document is matched without an index, and with single and multi key indexes.
- t.drop();
- t.save( document );
- assert.eq( 1, t.count( query ) );
- t.ensureIndex( index );
- assert.eq( 1, t.count( query ) );
- t.save( { a:{ b:{ c:[ 10, 11 ] } } } ); // Make the index multikey.
- assert.eq( 1, t.count( query ) );
-}
-
-// Two constraints within a nested $elemMatch expression.
-checkElemMatch( { 'a.b.c':1 },
- { a:[ { b:[ { c:1 } ] } ] },
- { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $lte:1 } } } } } });
-
-// Two constraints within a nested $elemMatch expression, one of which contains the other.
-checkElemMatch( { 'a.b.c':1 },
- { a:[ { b:[ { c:2 } ] } ] },
- { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $in:[2] } } } } } });
-
-// Two nested $elemMatch expressions.
-checkElemMatch( { 'a.d.e':1, 'a.b.c':1 },
- { a:[ { b:[ { c:1 } ], d:[ { e:1 } ] } ] },
- { a:{ $elemMatch:{ d:{ $elemMatch:{ e:{ $lte:1 } } },
- b:{ $elemMatch:{ c:{ $gte:1 } } } } } });
-
-// A non $elemMatch expression and a nested $elemMatch expression.
-checkElemMatch( { 'a.x':1, 'a.b.c':1 },
- { a:[ { b:[ { c:1 } ], x:1 } ] },
- { 'a.x':1, a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1 } } } } } });
-
-// $elemMatch is applied directly to a top level field.
-checkElemMatch( { 'a.b.c':1 },
- { a:[ { b:[ { c:[ 1 ] } ] } ] },
- { a:{ $elemMatch:{ 'b.c':{ $elemMatch:{ $gte:1, $lte:1 } } } } });
diff --git a/jstests/arrayfind8.js b/jstests/arrayfind8.js
deleted file mode 100644
index 07d44ace26e..00000000000
--- a/jstests/arrayfind8.js
+++ /dev/null
@@ -1,175 +0,0 @@
-// Matching behavior for $elemMatch applied to a top level element.
-// SERVER-1264
-// SERVER-4180
-
-t = db.jstests_arrayfind8;
-t.drop();
-
-function debug( x ) {
- if ( debuggingEnabled = false ) {
- printjson( x );
- }
-}
-
-/** Set index state for the test. */
-function setIndexKey( key ) {
- indexKey = key;
- indexSpec = {};
- indexSpec[ key ] = 1;
-}
-
-setIndexKey( 'a' );
-
-function indexBounds( query ) {
- debug( query );
- debug( t.find( query ).hint( indexSpec ).explain() );
- return t.find( query ).hint( indexSpec ).explain().indexBounds[ indexKey ];
-}
-
-/** Check that the query results match the documents in the 'expected' array. */
-function assertResults( expected, query, context ) {
- debug( query );
- assert.eq( expected.length, t.count( query ), 'unexpected count in ' + context );
- results = t.find( query ).toArray();
- for( i in results ) {
- found = false;
- for( j in expected ) {
- if ( friendlyEqual( expected[ j ], results[ i ].a ) ) {
- found = true;
- }
- }
- assert( found, 'unexpected result ' + results[ i ] + ' in ' + context );
- }
-}
-
-/**
- * Check matching for different query types.
- * @param bothMatch - document matched by both standardQuery and elemMatchQuery
- * @param elemMatch - document matched by elemMatchQuery but not standardQuery
- * @param notElemMatch - document matched by standardQuery but not elemMatchQuery
- */
-function checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, context ) {
-
- function mayPush( arr, elt ) {
- if ( elt ) {
- arr.push( elt );
- }
- }
-
- expectedStandardQueryResults = [];
- mayPush( expectedStandardQueryResults, bothMatch );
- mayPush( expectedStandardQueryResults, nonElemMatch );
- assertResults( expectedStandardQueryResults, standardQuery, context + ' standard query' );
-
- expectedElemMatchQueryResults = [];
- mayPush( expectedElemMatchQueryResults, bothMatch );
- mayPush( expectedElemMatchQueryResults, elemMatch );
- assertResults( expectedElemMatchQueryResults, elemMatchQuery, context + ' elemMatch query' );
-}
-
-/**
- * Check matching and for different query types.
- * @param subQuery - part of a query, to be provided as is for a standard query and within a
- * $elemMatch clause for a $elemMatch query
- * @param bothMatch - document matched by both standardQuery and elemMatchQuery
- * @param elemMatch - document matched by elemMatchQuery but not standardQuery
- * @param notElemMatch - document matched by standardQuery but not elemMatchQuery
- * @param additionalConstraints - additional query parameters not generated from @param subQuery
- */
-function checkQuery( subQuery, bothMatch, elemMatch, nonElemMatch,
- additionalConstraints ) {
- t.drop();
- additionalConstraints = additionalConstraints || {};
-
- // Construct standard and elemMatch queries from subQuery.
- firstSubQueryKey = Object.keySet( subQuery )[ 0 ];
- if ( firstSubQueryKey[ 0 ] == '$' ) {
- standardQuery = { $and:[ { a:subQuery }, additionalConstraints ] };
- }
- else {
- // If the subQuery contains a field rather than operators, append to the 'a' field.
- modifiedSubQuery = {};
- modifiedSubQuery[ 'a.' + firstSubQueryKey ] = subQuery[ firstSubQueryKey ];
- standardQuery = { $and:[ modifiedSubQuery, additionalConstraints ] };
- }
- elemMatchQuery = { $and:[ { a:{ $elemMatch:subQuery } }, additionalConstraints ] };
- debug( elemMatchQuery );
-
- function maySave( aValue ) {
- if ( aValue ) {
- debug( { a:aValue } );
- t.save( { a:aValue } );
- }
- }
-
- // Save all documents and check matching without indexes.
- maySave( bothMatch );
- maySave( elemMatch );
- maySave( nonElemMatch );
-
- checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'unindexed' );
-
- // Check matching and index bounds for a single key index.
-
- t.drop();
- maySave( bothMatch );
- maySave( elemMatch );
- // The nonElemMatch document is not tested here, as it will often make the index multikey.
- t.ensureIndex( indexSpec );
- checkMatch( bothMatch, elemMatch, null, standardQuery, elemMatchQuery, 'single key index' );
-
- // Check matching and index bounds for a multikey index.
-
- // Now the nonElemMatch document is tested.
- maySave( nonElemMatch );
- // Force the index to be multikey.
- t.save( { a:[ -1, -2 ] } );
- t.save( { a:{ b:[ -1, -2 ] } } );
- checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery,
- 'multikey index' );
-}
-
-maxNumber = Infinity;
-
-// Basic test.
-checkQuery( { $gt:4 }, [ 5 ] );
-
-// Multiple constraints within a $elemMatch clause.
-checkQuery( { $gt:4, $lt:6 }, [ 5 ], null, [ 3, 7 ] );
-checkQuery( { $gt:4, $not:{ $gte:6 } }, [ 5 ] );
-checkQuery( { $gt:4, $not:{ $ne:6 } }, [ 6 ] );
-checkQuery( { $gte:5, $lte:5 }, [ 5 ], null, [ 4, 6 ] );
-checkQuery( { $in:[ 4, 6 ], $gt:5 }, [ 6 ], null, [ 4, 7 ] );
-checkQuery( { $regex:'^a' }, [ 'a' ] );
-
-// Some constraints within a $elemMatch clause and other constraints outside of it.
-checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $lt:6 } } );
-checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $lte:5 } } );
-checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $gt:5 } } );
-
-// Constraints in different $elemMatch clauses.
-checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } );
-checkQuery( { $gt:4 }, [ 3, 7 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } );
-checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lte:5 } } } );
-checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $elemMatch:{ $gt:5 } } } );
-
-// TODO SERVER-1264
-if ( 0 ) {
-checkQuery( { $elemMatch:{ $in:[ 5 ] } }, null, [[ 5 ]], [ 5 ], null );
-}
-
-setIndexKey( 'a.b' );
-checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:1 } ]],
- [ { b:1 } ], null );
-checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:[ 0, 2 ] } ]],
- [ { b:[ 0, 2 ] } ], null );
-
-// Constraints for a top level (SERVER-1264 style) $elemMatch nested within a non top level
-// $elemMatch.
-checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:1 } } }, [ { b:[ 1 ] } ] );
-checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 1 ] } ] );
-
-checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 2 ] } ], null,
- null, { 'a.b':{ $in:[ 2, 5 ] } } );
-checkQuery( { b:{ $elemMatch:{ $in:[ 1, 2 ] }, $in:[ 2, 3 ] } },
- [ { b:[ 2 ] } ], null, [ { b:[ 1 ] }, { b:[ 3 ] } ], null );
diff --git a/jstests/arrayfind9.js b/jstests/arrayfind9.js
deleted file mode 100644
index 4ee14c56580..00000000000
--- a/jstests/arrayfind9.js
+++ /dev/null
@@ -1,34 +0,0 @@
-// Assorted $elemMatch behavior checks.
-
-t = db.jstests_arrayfind9;
-t.drop();
-
-// Top level field $elemMatch:$not matching
-t.save( { a:[ 1 ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ $not:{ $ne:1 } } } } ) );
-
-// Top level field object $elemMatch matching.
-t.drop();
-t.save( { a:[ {} ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ $gte:{} } } } ) );
-
-// Top level field array $elemMatch matching.
-t.drop();
-t.save( { a:[ [] ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ $in:[ [] ] } } } ) );
-
-// Matching by array index.
-t.drop();
-t.save( { a:[ [ 'x' ] ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ '0':'x' } } } ) );
-
-// Matching multiple values of a nested array.
-t.drop();
-t.save( { a:[ { b:[ 0, 2 ] } ] } );
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { 'a.b':1 } );
-plans = [ { $natural:1 }, { a:1 }, { 'a.b':1 } ];
-for( i in plans ) {
- p = plans[ i ];
- assert.eq( 1, t.find( { a:{ $elemMatch:{ b:{ $gte:1, $lte:1 } } } } ).hint( p ).itcount() );
-}
diff --git a/jstests/arrayfinda.js b/jstests/arrayfinda.js
deleted file mode 100644
index 179d3985580..00000000000
--- a/jstests/arrayfinda.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Assorted $elemMatch matching behavior checks.
-
-t = db.jstests_arrayfinda;
-t.drop();
-
-// $elemMatch only matches elements within arrays (a descriptive, not a normative test).
-t.save( { a:[ { b:1 } ] } );
-t.save( { a:{ b:1 } } );
-
-function assertExpectedMatch( cursor ) {
- assert.eq( [ { b:1 } ], cursor.next().a );
- assert( !cursor.hasNext() );
-}
-
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ) );
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ) );
-
-// $elemMatch is not used to perform key matching. SERVER-6001
-t.ensureIndex( { a:1 } );
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ).hint( { a:1 } ) );
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ).hint( { a:1 } ) );
diff --git a/jstests/auth1.js b/jstests/auth1.js
deleted file mode 100644
index 4ab26e8d2e5..00000000000
--- a/jstests/auth1.js
+++ /dev/null
@@ -1,54 +0,0 @@
-var mydb = db.getSiblingDB('auth1_db');
-mydb.dropAllUsers();
-
-pass = "a" + Math.random();
-//print( "password [" + pass + "]" );
-
-mydb.createUser({user: "eliot" ,pwd: pass, roles: jsTest.basicUserRoles});
-
-assert( mydb.auth( "eliot" , pass ) , "auth failed" );
-assert( ! mydb.auth( "eliot" , pass + "a" ) , "auth should have failed" );
-
-pass2 = "b" + Math.random();
-mydb.changeUserPassword("eliot", pass2);
-
-assert( ! mydb.auth( "eliot" , pass ) , "failed to change password failed" );
-assert( mydb.auth( "eliot" , pass2 ) , "new password didn't take" );
-
-assert( mydb.auth( "eliot" , pass2 ) , "what?" );
-mydb.dropUser( "eliot" );
-assert( ! mydb.auth( "eliot" , pass2 ) , "didn't drop user" );
-
-
-var a = mydb.getMongo().getDB( "admin" );
-a.dropAllUsers();
-pass = "c" + Math.random();
-a.createUser({user: "super", pwd: pass, roles: jsTest.adminUserRoles});
-assert( a.auth( "super" , pass ) , "auth failed" );
-assert( !a.auth( "super" , pass + "a" ) , "auth should have failed" );
-
-mydb.dropAllUsers();
-pass = "a" + Math.random();
-
-mydb.createUser({user: "eliot" , pwd: pass, roles: jsTest.basicUserRoles});
-
-assert.commandFailed( mydb.runCommand( { authenticate: 1, user: "eliot", nonce: "foo", key: "bar" } ) );
-
-// check sanity check SERVER-3003
-
-var before = a.system.users.count({db: mydb.getName()});
-
-assert.throws( function(){
- mydb.createUser({ user: "" , pwd: "abc", roles: jsTest.basicUserRoles});
-} , null , "C1" )
-assert.throws( function(){
- mydb.createUser({ user: "abc" , pwd: "", roles: jsTest.basicUserRoles});
-} , null , "C2" )
-
-
-var after = a.system.users.count({db: mydb.getName()});
-assert( before > 0 , "C3" )
-assert.eq( before , after , "C4" )
-
-// Clean up after ourselves so other tests using authentication don't get messed up.
-mydb.dropAllUsers()
diff --git a/jstests/auth2.js b/jstests/auth2.js
deleted file mode 100644
index 9c2b38f682d..00000000000
--- a/jstests/auth2.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// just make sure logout doesn't break anything
-
-// SERVER-724
-db.runCommand({logout : 1});
-x = db.runCommand({logout : 1});
-assert.eq( 1 , x.ok , "A" )
-
-x = db.logout();
-assert.eq( 1 , x.ok , "B" )
diff --git a/jstests/auth_copydb.js b/jstests/auth_copydb.js
deleted file mode 100644
index f04cd0b0d29..00000000000
--- a/jstests/auth_copydb.js
+++ /dev/null
@@ -1,19 +0,0 @@
-a = db.getSisterDB( "copydb2-test-a" );
-b = db.getSisterDB( "copydb2-test-b" );
-
-a.dropDatabase();
-b.dropDatabase();
-a.dropAllUsers();
-b.dropAllUsers();
-
-a.foo.save( { a : 1 } );
-
-a.createUser({user: "chevy" , pwd: "chase", roles: jsTest.basicUserRoles});
-
-assert.eq( 1 , a.foo.count() , "A" );
-assert.eq( 0 , b.foo.count() , "B" );
-
-// SERVER-727
-a.copyDatabase( a._name , b._name, "" , "chevy" , "chase" );
-assert.eq( 1 , a.foo.count() , "C" );
-assert.eq( 1 , b.foo.count() , "D" );
diff --git a/jstests/autoid.js b/jstests/autoid.js
deleted file mode 100644
index 6c8062fd093..00000000000
--- a/jstests/autoid.js
+++ /dev/null
@@ -1,11 +0,0 @@
-f = db.jstests_autoid;
-f.drop();
-
-f.save( {z:1} );
-a = f.findOne( {z:1} );
-f.update( {z:1}, {z:2} );
-b = f.findOne( {z:2} );
-assert.eq( a._id.str, b._id.str );
-c = f.update( {z:2}, {z:"abcdefgabcdefgabcdefg"} );
-c = f.findOne( {} );
-assert.eq( a._id.str, c._id.str );
diff --git a/jstests/bad_index_plugin.js b/jstests/bad_index_plugin.js
deleted file mode 100644
index 370eca4c4d7..00000000000
--- a/jstests/bad_index_plugin.js
+++ /dev/null
@@ -1,11 +0,0 @@
-// SERVER-5826 ensure you can't build an index with a non-existent plugin
-t = db.bad_index_plugin;
-
-assert.eq(t.ensureIndex({good: 1}), undefined);
-assert.eq(t.getIndexes().length, 2); // good + _id
-
-err = t.ensureIndex({bad: 'bad'});
-assert.neq(err, undefined);
-assert(err.code >= 0);
-
-assert.eq(t.getIndexes().length, 2); // good + _id (no bad)
diff --git a/jstests/basic1.js b/jstests/basic1.js
deleted file mode 100644
index e5fa577f0b2..00000000000
--- a/jstests/basic1.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-t = db.getCollection( "basic1" );
-t.drop();
-
-o = { a : 1 };
-t.save( o );
-
-assert.eq( 1 , t.findOne().a , "first" );
-assert( o._id , "now had id" );
-assert( o._id.str , "id not a real id" );
-
-o.a = 2;
-t.save( o );
-
-assert.eq( 2 , t.findOne().a , "second" );
-
-assert(t.validate().valid);
-
-// not a very good test of currentOp, but tests that it at least
-// is sort of there:
-assert( db.currentOp().inprog != null );
diff --git a/jstests/basic2.js b/jstests/basic2.js
deleted file mode 100644
index aaa3de4366e..00000000000
--- a/jstests/basic2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.getCollection( "basic2" );
-t.drop();
-
-o = { n : 2 };
-t.save( o );
-
-assert.eq( 1 , t.find().count() );
-
-assert.eq( 2 , t.find( o._id ).toArray()[0].n );
-assert.eq( 2 , t.find( o._id , { n : 1 } ).toArray()[0].n );
-
-t.remove( o._id );
-assert.eq( 0 , t.find().count() );
-
-assert(t.validate().valid);
diff --git a/jstests/basic3.js b/jstests/basic3.js
deleted file mode 100644
index d778974f64a..00000000000
--- a/jstests/basic3.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// Tests that "." cannot be in field names
-t = db.getCollection( "foo_basic3" );
-t.drop()
-
-//more diagnostics on bad save, if exception fails
-doBadSave = function(param) {
- print("doing save with " + tojson(param))
- t.save(param);
- // Should not get here.
- printjson(db.getLastErrorObj());
-}
-
-//more diagnostics on bad save, if exception fails
-doBadUpdate = function(query, update) {
- print("doing update with " + tojson(query) + " " + tojson(update))
- t.update(query, update);
- // Should not get here.
- printjson(db.getLastErrorObj());
-}
-
-assert.throws(doBadSave, [{"a.b":5}], ". in names aren't allowed doesn't work");
-
-assert.throws(doBadSave,
- [{ "x" : { "a.b" : 5 } }],
- ". in embedded names aren't allowed doesn't work");
-
-// following tests make sure update keys are checked
-t.save({"a": 0,"b": 1})
-
-assert.throws(doBadUpdate, [{a:0}, { "b.b" : 1 }],
- "must deny '.' in key of update");
-
-// upsert with embedded doc
-assert.throws(doBadUpdate, [{a:10}, { c: {"b.b" : 1 }}],
- "must deny embedded '.' in key of update");
-
-// if it is a modifier, it should still go through
-t.update({"a": 0}, {$set: { "c.c": 1}})
-t.update({"a": 0}, {$inc: { "c.c": 1}})
-
-// edge cases
-assert.throws(doBadUpdate, [{a:0}, { "":{"b.b" : 1} }],
- "must deny '' embedded '.' in key of update");
-t.update({"a": 0}, {})
-
diff --git a/jstests/basic4.js b/jstests/basic4.js
deleted file mode 100644
index 0cf7a261e63..00000000000
--- a/jstests/basic4.js
+++ /dev/null
@@ -1,12 +0,0 @@
-t = db.getCollection( "basic4" );
-t.drop();
-
-t.save( { a : 1 , b : 1.0 } );
-
-assert( t.findOne() );
-assert( t.findOne( { a : 1 } ) );
-assert( t.findOne( { a : 1.0 } ) );
-assert( t.findOne( { b : 1 } ) );
-assert( t.findOne( { b : 1.0 } ) );
-
-assert( ! t.findOne( { b : 2.0 } ) );
diff --git a/jstests/basic5.js b/jstests/basic5.js
deleted file mode 100644
index bfa40fb8f5e..00000000000
--- a/jstests/basic5.js
+++ /dev/null
@@ -1,6 +0,0 @@
-t = db.getCollection( "basic5" );
-t.drop();
-
-t.save( { a : 1 , b : [ 1 , 2 , 3 ] } );
-assert.eq( 3 , t.findOne().b.length );
-
diff --git a/jstests/basic6.js b/jstests/basic6.js
deleted file mode 100644
index e0cd6f1586e..00000000000
--- a/jstests/basic6.js
+++ /dev/null
@@ -1,8 +0,0 @@
-
-t = db.basic6;
-
-t.findOne();
-t.a.findOne();
-
-assert.eq( "test.basic6" , t.toString() );
-assert.eq( "test.basic6.a" , t.a.toString() );
diff --git a/jstests/basic7.js b/jstests/basic7.js
deleted file mode 100644
index 7bb0d470e82..00000000000
--- a/jstests/basic7.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.basic7;
-t.drop();
-
-t.save( { a : 1 } )
-t.ensureIndex( { a : 1 } );
-
-assert.eq( t.find().toArray()[0].a , 1 );
-assert.eq( t.find().arrayAccess(0).a , 1 );
-assert.eq( t.find()[0].a , 1 );
-
diff --git a/jstests/basic8.js b/jstests/basic8.js
deleted file mode 100644
index 513da0d15d1..00000000000
--- a/jstests/basic8.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.basic8;
-t.drop();
-
-t.save( { a : 1 } );
-o = t.findOne();
-o.b = 2;
-t.save( o );
-
-assert.eq( 1 , t.find().count() , "A" );
-assert.eq( 2 , t.findOne().b , "B" );
diff --git a/jstests/basic9.js b/jstests/basic9.js
deleted file mode 100644
index b8308fba7d0..00000000000
--- a/jstests/basic9.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// Tests that $<prefix> field names are not allowed, but you can use a $ anywhere else.
-t = db.getCollection( "foo_basic9" );
-t.drop()
-
-// more diagnostics on bad save, if exception fails
-doBadSave = function(param) {
- print("doing save with " + tojson(param))
- t.save(param);
- // Should not get here.
- printjson(db.getLastErrorObj());
-}
-
-t.save({foo$foo:5});
-t.save({foo$:5});
-
-assert.throws(doBadSave, [{$foo:5}], "key names aren't allowed to start with $ doesn't work");
-assert.throws(doBadSave,
- [{x:{$foo:5}}],
- "embedded key names aren't allowed to start with $ doesn't work");
diff --git a/jstests/basica.js b/jstests/basica.js
deleted file mode 100644
index 0cc364beb42..00000000000
--- a/jstests/basica.js
+++ /dev/null
@@ -1,33 +0,0 @@
-
-t = db.basica;
-
-
-t.drop();
-
-t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
-
-x = t.findOne();
-x.b["0"].x = 4;
-x.b["0"].z = 4;
-x.b[0].m = 9;
-x.b[0]["asd"] = 11;
-x.a = 2;
-x.z = 11;
-
-tojson( x );
-t.save( x );
-assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" );
-
-// -----
-
-t.drop();
-
-t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
-
-x = t.findOne();
-x.b["0"].z = 4;
-
-//printjson( x );
-t.save( x );
-assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" );
-
diff --git a/jstests/basicb.js b/jstests/basicb.js
deleted file mode 100644
index 0070f70068f..00000000000
--- a/jstests/basicb.js
+++ /dev/null
@@ -1,7 +0,0 @@
-
-t = db.basicb;
-t.drop();
-
-assert.throws( "t.insert( { '$a' : 5 } );" );
-t.insert( { '$a' : 5 } , 0, true );
-
diff --git a/jstests/basicc.js b/jstests/basicc.js
deleted file mode 100644
index 0cd71ad32a0..00000000000
--- a/jstests/basicc.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// test writing to two db's at the same time.
-
-t1 = db.jstests_basicc;
-var db = db.getSisterDB("test_basicc");
-t2 = db.jstests_basicc;
-t1.drop();
-t2.drop();
-
-js = "while( 1 ) { db.jstests.basicc1.save( {} ); }";
-pid = startMongoProgramNoConnect( "mongo" , "--eval" , js , db.getMongo().host );
-
-for( var i = 0; i < 1000; ++i ) {
- t2.save( {} );
-}
-assert.automsg( "!db.getLastError()" );
-stopMongoProgramByPid( pid );
-// put things back the way we found it
-t1.drop();
-t2.drop();
-db.dropDatabase();
-db = db.getSisterDB("test"); \ No newline at end of file
diff --git a/jstests/batch_size.js b/jstests/batch_size.js
deleted file mode 100644
index 6cbc45dc803..00000000000
--- a/jstests/batch_size.js
+++ /dev/null
@@ -1,75 +0,0 @@
-// Test subtleties of batchSize and limit.
-
-var t = db.jstests_batch_size;
-t.drop();
-
-for (var i = 0; i < 4; i++) {
- t.save({_id: i, a: i});
-}
-
-function runIndexedTests() {
- // With limit, indexed.
- assert.eq(2, t.find().limit(2).itcount(), 'G');
- assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'H');
-
- // With batchSize, indexed.
- // SERVER-12438: If there is an index that provides the sort,
- // then a plan with an unindexed sort should never be used.
- // Consequently, batchSize will NOT be a hard limit in this case.
- // WARNING: the behavior described above may change in the future.
- assert.eq(4, t.find().batchSize(2).itcount(), 'I');
- assert.eq(4, t.find().sort({a: 1}).batchSize(2).itcount(), 'J');
-}
-
-// Without batch size or limit, unindexed.
-assert.eq(4, t.find().itcount(), 'A');
-assert.eq(4, t.find().sort({a: 1}).itcount(), 'B');
-
-// With limit, unindexed.
-assert.eq(2, t.find().limit(2).itcount(), 'C');
-assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'D');
-
-assert.eq(4, t.find().batchSize(2).itcount(), 'E');
-assert.eq(4, t.find().sort({a: 1}).batchSize(2).itcount(), 'F');
-
-// Run the tests with the index twice in order to double check plan caching.
-t.ensureIndex({a: 1});
-for (var i = 0; i < 2; i++) {
- runIndexedTests();
-}
-
-// The next tests make sure that we obey limit and batchSize properly when
-// the sort could be either indexed or unindexed.
-t.drop();
-t.ensureIndex({a: 1});
-t.ensureIndex({b: 1});
-
-for (var i = 0; i < 100; i++) {
- t.save({_id: i, a: i, b: 1});
-}
-
-// Without a hint. Do it twice to make sure caching is ok.
-for (var i = 0; i < 2; i++) {
- assert.eq(15, t.find({a: {$gte: 85}}).sort({b: 1}).batchSize(2).itcount(), 'K');
- assert.eq(6, t.find({a: {$gte: 85}}).sort({b: 1}).limit(6).itcount(), 'L');
-}
-
-// Hinting 'a'.
-assert.eq(15, t.find({a: {$gte: 85}}).sort({b: 1}).hint({a: 1}).batchSize(2).itcount(), 'M');
-assert.eq(6, t.find({a: {$gte: 85}}).sort({b: 1}).hint({a: 1}).limit(6).itcount(), 'N');
-
-// Hinting 'b'.
-assert.eq(15, t.find({a: {$gte: 85}}).sort({b: 1}).hint({b: 1}).batchSize(2).itcount(), 'O');
-assert.eq(6, t.find({a: {$gte: 85}}).sort({b: 1}).hint({b: 1}).limit(6).itcount(), 'P');
-
-// With explain.
-assert.eq(15, t.find({a: {$gte: 85}}).sort({b: 1}).batchSize(2).explain().n, 'Q');
-assert.eq(6, t.find({a: {$gte: 85}}).sort({b: 1}).limit(6).explain().n, 'R');
-
-// Double check that we're not scanning more stuff than we have to.
-// In order to get the sort using index 'a', we should need to scan
-// about 50 keys and 50 documents.
-var explain = t.find({a: {$gte: 50}}).sort({b: 1}).hint({a: 1}).limit(6).explain();
-assert.lte(explain.nscanned, 60, 'S');
-assert.lte(explain.nscannedObjects, 60, 'T');
-assert.eq(explain.n, 6, 'U');
diff --git a/jstests/bench_test1.js b/jstests/bench_test1.js
deleted file mode 100644
index bb1423ee8b8..00000000000
--- a/jstests/bench_test1.js
+++ /dev/null
@@ -1,37 +0,0 @@
-
-t = db.bench_test1;
-t.drop();
-
-t.insert( { _id : 1 , x : 1 } )
-t.insert( { _id : 2 , x : 1 } )
-
-ops = [
- { op : "findOne" , ns : t.getFullName() , query : { _id : 1 } } ,
- { op : "update" , ns : t.getFullName() , query : { _id : 1 } , update : { $inc : { x : 1 } } }
-]
-
-seconds = .7
-
-benchArgs = { ops : ops , parallel : 2 , seconds : seconds , host : db.getMongo().host };
-
-if (jsTest.options().auth) {
- benchArgs['db'] = 'admin';
- benchArgs['username'] = jsTest.options().adminUser;
- benchArgs['password'] = jsTest.options().adminPassword;
-}
-res = benchRun( benchArgs );
-
-assert.lte( seconds * res.update , t.findOne( { _id : 1 } ).x * 1.05 , "A1" )
-
-
-assert.eq( 1 , t.getIndexes().length , "B1" )
-benchArgs['ops']=[ { op : "createIndex" , ns : t.getFullName() , key : { x : 1 } } ];
-benchArgs['parallel']=1;
-benchArgs['seconds']=1;
-benchRun( benchArgs );
-assert.eq( 2 , t.getIndexes().length , "B2" )
-benchArgs['ops']=[ { op : "dropIndex" , ns : t.getFullName() , key : { x : 1 } } ];
-benchRun( benchArgs );
-assert.soon( function(){ return t.getIndexes().length == 1; } );
-
-
diff --git a/jstests/bench_test2.js b/jstests/bench_test2.js
deleted file mode 100644
index e2057ac693e..00000000000
--- a/jstests/bench_test2.js
+++ /dev/null
@@ -1,49 +0,0 @@
-
-t = db.bench_test2
-t.drop();
-
-for ( i=0; i<100; i++ )
- t.insert( { _id : i , x : 0 } );
-db.getLastError();
-
-benchArgs = { ops : [ { ns : t.getFullName() ,
- op : "update" ,
- query : { _id : { "#RAND_INT" : [ 0 , 100 ] } } ,
- update : { $inc : { x : 1 } } } ] ,
- parallel : 2 ,
- seconds : 1 ,
- totals : true ,
- host : db.getMongo().host }
-
-if (jsTest.options().auth) {
- benchArgs['db'] = 'admin';
- benchArgs['username'] = jsTest.options().adminUser;
- benchArgs['password'] = jsTest.options().adminPassword;
-}
-
-res = benchRun( benchArgs )
-printjson( res );
-
-sumsq = 0
-sum = 0
-
-min = 1000
-max = 0;
-t.find().forEach(
- function(z){
- sum += z.x;
- sumsq += Math.pow( ( res.update / 100 ) - z.x , 2 );
- min = Math.min( z.x , min );
- max = Math.max( z.x , max );
- }
-)
-
-avg = sum / 100
-std = Math.sqrt( sumsq / 100 )
-
-print( "Avg: " + avg )
-print( "Std: " + std )
-print( "Min: " + min )
-print( "Max: " + max )
-
-
diff --git a/jstests/bench_test3.js b/jstests/bench_test3.js
deleted file mode 100644
index 34e033fe52a..00000000000
--- a/jstests/bench_test3.js
+++ /dev/null
@@ -1,28 +0,0 @@
-t = db.bench_test3
-t.drop();
-
-
-benchArgs = { ops : [ { ns : t.getFullName() ,
- op : "update" ,
- upsert : true ,
- query : { _id : { "#RAND_INT" : [ 0 , 5 , 4 ] } } ,
- update : { $inc : { x : 1 } } } ] ,
- parallel : 2 ,
- seconds : 5 ,
- totals : true ,
- host : db.getMongo().host }
-
-if (jsTest.options().auth) {
- benchArgs['db'] = 'admin';
- benchArgs['username'] = jsTest.options().adminUser;
- benchArgs['password'] = jsTest.options().adminPassword;
-}
-
-res = benchRun( benchArgs )
-printjson( res );
-
-var keys = []
-var totals = {}
-db.bench_test3.find().sort( { _id : 1 } ).forEach( function(z){ keys.push( z._id ); totals[z._id] = z.x } );
-printjson(totals);
-assert.eq( [ 0 , 4 , 8 , 12 , 16 ] , keys )
diff --git a/jstests/big_object1.js b/jstests/big_object1.js
deleted file mode 100644
index 07c4150fb53..00000000000
--- a/jstests/big_object1.js
+++ /dev/null
@@ -1,54 +0,0 @@
-
-t = db.big_object1
-t.drop();
-
-if ( db.adminCommand( "buildinfo" ).bits == 64 ){
-
- var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
- var s = large;
- while ( s.length < 850 * 1024 ){
- s += large;
- }
- x = 0;
- while ( true ){
- n = { _id : x , a : [] }
- for ( i=0; i<14+x; i++ )
- n.a.push( s )
- try {
- t.insert( n )
- o = n
- }
- catch ( e ){
- break;
- }
-
- if ( db.getLastError() != null )
- break;
- x++;
- }
-
- printjson( t.stats(1024*1024) )
-
- assert.lt( 15 * 1024 * 1024 , Object.bsonsize( o ) , "A1" )
- assert.gt( 17 * 1024 * 1024 , Object.bsonsize( o ) , "A2" )
-
- assert.eq( x , t.count() , "A3" )
-
- for ( i=0; i<x; i++ ){
- o = t.findOne( { _id : i } )
- try {
- // test large mongo -> js conversion
- var a = o.a;
- } catch(e) {
- assert(false, "Caught exception trying to insert during iteration " + i + ": " + e);
- }
- assert( o , "B" + i );
- }
-
- t.drop()
-}
-else {
- print( "skipping big_object1 b/c not 64-bit" )
-}
-
-print("SUCCESS");
diff --git a/jstests/binData.js b/jstests/binData.js
deleted file mode 100644
index 3f037650e05..00000000000
--- a/jstests/binData.js
+++ /dev/null
@@ -1,14 +0,0 @@
-
-var x = new BinData(3, "OEJTfmD8twzaj/LPKLIVkA==");
-assert.eq(x.hex(), "3842537e60fcb70cda8ff2cf28b21590", "bad hex");
-assert.eq(x.base64(), "OEJTfmD8twzaj/LPKLIVkA==", "bad base64");
-assert.eq(x.type, 3, "bad type");
-assert.eq(x.length(), 16, "bad length");
-
-x = new BinData(0, "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=");
-assert.eq(x.hex(), "4d616e2069732064697374696e677569736865642c206e6f74206f6e6c792062792068697320726561736f6e2c2062757420627920746869732073696e67756c61722070617373696f6e2066726f6d206f7468657220616e696d616c732c2077686963682069732061206c757374206f6620746865206d696e642c20746861742062792061207065727365766572616e6365206f662064656c6967687420696e2074686520636f6e74696e75656420616e6420696e6465666174696761626c652067656e65726174696f6e206f66206b6e6f776c656467652c2065786365656473207468652073686f727420766568656d656e6365206f6620616e79206361726e616c20706c6561737572652e", "bad hex");
-assert.eq(x.base64(), "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=", "bad base64");
-assert.eq(x.type, 0, "bad type");
-assert.eq(x.length(), 269, "bad length");
-
-
diff --git a/jstests/block_check_supported.js b/jstests/block_check_supported.js
deleted file mode 100644
index 21d04ca93c7..00000000000
--- a/jstests/block_check_supported.js
+++ /dev/null
@@ -1,118 +0,0 @@
-// Test that serverStatus() features dependent on the ProcessInfo::blockCheckSupported() routine
-// work correctly. These features are db.serverStatus({workingSet:1}).workingSet and
-// db.serverStatus().indexCounters.
-// Related to SERVER-9242, SERVER-6450.
-
-// Check that an object contains a specific set of fields and only those fields
-// NOTE: destroys 'item'
-//
-var testExpectedFields = function(itemString, item, fieldList) {
- print('Testing ' + itemString + ' for expected fields');
- for (var i = 0; i < fieldList.length; ++i) {
- var field = fieldList[i];
- if (typeof item[field] == 'undefined') {
- doassert('Test FAILED: missing "' + field + '" field');
- }
- delete item[field];
- }
- if (!friendlyEqual({}, item)) {
- doassert('Test FAILED: found unexpected field(s): ' + tojsononeline(item));
- }
-}
-
-// Run test as function to keep cruft out of global namespace
-//
-var doTest = function () {
-
- print('Testing workingSet and indexCounters portions of serverStatus');
- var hostInfo = db.hostInfo();
- var isXP = (hostInfo.os.name == 'Windows XP') ? true : false;
- var isEmpty = (hostInfo.os.name == '') ? true : false;
-
- // Check that the serverStatus command returns something for these sub-documents
- //
- var serverStatus = db.serverStatus({ workingSet: 1 });
- if (!serverStatus) {
- doassert('Test FAILED: db.serverStatus({workingSet:1}) did not return a value');
- }
- if (!serverStatus.workingSet) {
- doassert('Test FAILED: db.serverStatus({workingSet:1}).workingSet was not returned');
- }
- if (!serverStatus.indexCounters) {
- doassert('Test FAILED: db.serverStatus().indexCounters was not returned');
- }
- var workingSet_1 = serverStatus.workingSet;
- var indexCounters_1 = serverStatus.indexCounters;
-
- if (isXP) {
- // Windows XP is the only supported platform that should be missing this data; make sure
- // that we don't get bogus data back
- //
- var expectedResult = { info: 'not supported' };
- print('Testing db.serverStatus({workingSet:1}).workingSet on Windows XP -- expecting ' +
- tojsononeline(expectedResult));
- assert.eq(expectedResult, workingSet_1,
- 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' +
- ' did not return the expected value');
- expectedResult = { note: 'not supported on this platform' };
- print('Testing db.serverStatus().indexCounters on Windows XP -- expecting ' +
- tojsononeline(expectedResult));
- assert.eq(expectedResult, indexCounters_1,
- 'Test FAILED: db.serverStatus().indexCounters' +
- ' did not return the expected value');
- }
- else if (isEmpty) {
- // Until SERVER-9325 is fixed, Solaris/SmartOS will also be missing this data; make sure
- // that we don't get bogus data back
- //
- expectedResult = { info: 'not supported' };
- print('Testing db.serverStatus({workingSet:1}).workingSet on "" (Solaris?) -- expecting ' +
- tojsononeline(expectedResult));
- assert.eq(expectedResult, workingSet_1,
- 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' +
- ' did not return the expected value');
- expectedResult = { note: 'not supported on this platform' };
- print('Testing db.serverStatus().indexCounters on "" (Solaris?) -- expecting ' +
- tojsononeline(expectedResult));
- assert.eq(expectedResult, indexCounters_1,
- 'Test FAILED: db.serverStatus().indexCounters' +
- ' did not return the expected value');
- }
- else {
- // Check that we get both workingSet and indexCounters and that all expected
- // fields are present with no unexpected fields
- //
- testExpectedFields('db.serverStatus({workingSet:1}).workingSet',
- workingSet_1,
- ['note', 'pagesInMemory', 'computationTimeMicros', 'overSeconds']);
- testExpectedFields('db.serverStatus().indexCounters',
- indexCounters_1,
- ['accesses', 'hits', 'misses', 'resets', 'missRatio']);
-
- if (0) { // comment out until SERVER-9284 is fixed
- // See if we can make the index counters values change
- //
- print('Testing that indexCounters accesses and hits increase by 1 on indexed find()');
- var blockDB = db.getSiblingDB('block_check_supported');
- blockDB.dropDatabase();
- blockDB.coll.insert({ a: 1 });
- blockDB.coll.ensureIndex({ a: 1 });
- indexCounters_1 = db.serverStatus().indexCounters;
- var doc = blockDB.coll.findOne({ a: 1 });
- var indexCounters_2 = db.serverStatus().indexCounters;
- assert.gt(indexCounters_2.accesses, indexCounters_1.accesses,
- 'Test FAILED: db.serverStatus().indexCounters.accesses' +
- ' should have had a value greater than ' + indexCounters_1.accesses +
- ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) +
- ', after find(): ' + tojsononeline(indexCounters_2));
- assert.gt(indexCounters_2.hits, indexCounters_1.hits,
- 'Test FAILED: db.serverStatus().indexCounters.hits' +
- ' should have had a value greater than ' + indexCounters_1.hits +
- ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) +
- ', after find(): ' + tojsononeline(indexCounters_2));
- } // comment out until SERVER-9284 is fixed
- }
- print('Test PASSED!');
-};
-
-doTest();
diff --git a/jstests/bulk_insert.js b/jstests/bulk_insert.js
deleted file mode 100644
index e26b323c6d9..00000000000
--- a/jstests/bulk_insert.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// Tests bulk insert of docs from the shell
-
-var coll = db.bulkInsertTest
-coll.drop()
-
-Random.srand( new Date().getTime() )
-
-var bulkSize = Math.floor( Random.rand() * 200 ) + 1
-var numInserts = Math.floor( Random.rand() * 300 ) + 1
-
-print( "Inserting " + numInserts + " bulks of " + bulkSize + " documents." )
-
-for( var i = 0; i < numInserts; i++ ){
- var bulk = []
- for( var j = 0; j < bulkSize; j++ ){
- bulk.push({ hi : "there", i : i, j : j })
- }
-
- coll.insert( bulk )
-}
-
-assert.eq( coll.count(), bulkSize * numInserts )
diff --git a/jstests/capped.js b/jstests/capped.js
deleted file mode 100644
index 421132b6f75..00000000000
--- a/jstests/capped.js
+++ /dev/null
@@ -1,11 +0,0 @@
-db.jstests_capped.drop();
-db.createCollection("jstests_capped", {capped:true, size:30000});
-
-assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_capped"} ).count(), "expected a count of one index for new capped collection" );
-t = db.jstests_capped;
-
-t.save({x:1});
-t.save({x:2});
-
-assert( t.find().sort({$natural:1})[0].x == 1 , "expected obj.x==1");
-assert( t.find().sort({$natural:-1})[0].x == 2, "expected obj.x == 2");
diff --git a/jstests/capped1.js b/jstests/capped1.js
deleted file mode 100644
index 0bbeaa40894..00000000000
--- a/jstests/capped1.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.capped1;
-t.drop();
-
-db.createCollection("capped1" , {capped:true, size:1024 });
-v = t.validate();
-assert( v.valid , "A : " + tojson( v ) ); // SERVER-485
-
-t.save( { x : 1 } )
-assert( t.validate().valid , "B" )
-
diff --git a/jstests/capped2.js b/jstests/capped2.js
deleted file mode 100644
index 65bb82f4c07..00000000000
--- a/jstests/capped2.js
+++ /dev/null
@@ -1,62 +0,0 @@
-db.capped2.drop();
-db._dbCommand( { create: "capped2", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
-tzz = db.capped2;
-
-function debug( x ) {
-// print( x );
-}
-
-var val = new Array( 2000 );
-var c = "";
-for( i = 0; i < 2000; ++i, c += "---" ) { // bigger and bigger objects through the array...
- val[ i ] = { a: c };
-}
-
-function checkIncreasing( i ) {
- res = tzz.find().sort( { $natural: -1 } );
- assert( res.hasNext(), "A" );
- var j = i;
- while( res.hasNext() ) {
- try {
- assert.eq( val[ j-- ].a, res.next().a, "B" );
- } catch( e ) {
- debug( "capped2 err " + j );
- throw e;
- }
- }
- res = tzz.find().sort( { $natural: 1 } );
- assert( res.hasNext(), "C" );
- while( res.hasNext() )
- assert.eq( val[ ++j ].a, res.next().a, "D" );
- assert.eq( j, i, "E" );
-}
-
-function checkDecreasing( i ) {
- res = tzz.find().sort( { $natural: -1 } );
- assert( res.hasNext(), "F" );
- var j = i;
- while( res.hasNext() ) {
- assert.eq( val[ j++ ].a, res.next().a, "G" );
- }
- res = tzz.find().sort( { $natural: 1 } );
- assert( res.hasNext(), "H" );
- while( res.hasNext() )
- assert.eq( val[ --j ].a, res.next().a, "I" );
- assert.eq( j, i, "J" );
-}
-
-for( i = 0 ;; ++i ) {
- debug( "capped 2: " + i );
- tzz.insert( val[ i ] );
- if ( tzz.count() == 0 ) {
- assert( i > 100, "K" );
- break;
- }
- checkIncreasing( i );
-}
-
-for( i = 600 ; i >= 0 ; --i ) {
- debug( "capped 2: " + i );
- tzz.insert( val[ i ] );
- checkDecreasing( i );
-}
diff --git a/jstests/capped3.js b/jstests/capped3.js
deleted file mode 100644
index 2e5e6790cb7..00000000000
--- a/jstests/capped3.js
+++ /dev/null
@@ -1,45 +0,0 @@
-t = db.jstests_capped3;
-t2 = db.jstests_capped3_clone;
-t.drop();
-t2.drop();
-for( i = 0; i < 1000; ++i ) {
- t.save( {i:i} );
-}
-assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ), "A" );
-c = t2.find();
-for( i = 0; i < 1000; ++i ) {
- assert.eq( i, c.next().i, "B" );
-}
-assert( !c.hasNext(), "C" );
-
-t.drop();
-t2.drop();
-
-for( i = 0; i < 1000; ++i ) {
- t.save( {i:i} );
-}
-assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ), "D" );
-c = t2.find().sort( {$natural:-1} );
-i = 999;
-while( c.hasNext() ) {
- assert.eq( i--, c.next().i, "E" );
-}
-//print( "i: " + i );
-var str = tojson( t2.stats() );
-//print( "stats: " + tojson( t2.stats() ) );
-assert( i < 990, "F" );
-
-t.drop();
-t2.drop();
-
-for( i = 0; i < 1000; ++i ) {
- t.save( {i:i} );
-}
-assert.commandWorked( t.convertToCapped( 1000 ), "G" );
-c = t.find().sort( {$natural:-1} );
-i = 999;
-while( c.hasNext() ) {
- assert.eq( i--, c.next().i, "H" );
-}
-assert( i < 990, "I" );
-assert( i > 900, "J" );
diff --git a/jstests/capped5.js b/jstests/capped5.js
deleted file mode 100644
index 37b776ee1ca..00000000000
--- a/jstests/capped5.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-tn = "capped5"
-
-t = db[tn]
-t.drop();
-
-
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.insert( { _id : 5 , x : 11 , z : 52 } );
-assert.eq( 1 , t.getIndexKeys().length , "A0" ) //now we assume _id index even on capped coll
-assert.eq( 52 , t.findOne( { x : 11 } ).z , "A1" );
-
-t.ensureIndex( { _id : 1 } )
-t.ensureIndex( { x : 1 } )
-
-assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" );
-assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" );
-
-t.drop();
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.insert( { _id : 5 , x : 11 } );
-t.insert( { _id : 5 , x : 12 } );
-assert.eq( 1, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index
-assert.eq( 1, t.find().toArray().length ); //_id index unique, so second insert fails
-
-t.drop();
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.insert( { _id : 5 , x : 11 } );
-t.insert( { _id : 6 , x : 12 } );
-t.ensureIndex( { x:1 }, {unique:true} );
-assert.eq( 2, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index
-assert.eq( 2, t.find().hint( {x:1} ).toArray().length );
-
-// SERVER-525 (closed) unique indexes in capped collection
-t.drop();
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.ensureIndex( { _id:1 } ); // note we assume will be automatically unique because it is _id
-t.insert( { _id : 5 , x : 11 } );
-t.insert( { _id : 5 , x : 12 } );
-assert.eq( 1, t.find().toArray().length );
diff --git a/jstests/capped6.js b/jstests/capped6.js
deleted file mode 100644
index 5db12b2fcf9..00000000000
--- a/jstests/capped6.js
+++ /dev/null
@@ -1,109 +0,0 @@
-// Test NamespaceDetails::cappedTruncateAfter via 'captrunc' command
-
-Random.setRandomSeed();
-
-db.capped6.drop();
-db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
-tzz = db.capped6;
-
-function debug( x ) {
-// print( x );
-}
-
-/**
- * Check that documents in the collection are in order according to the value
- * of a, which corresponds to the insert order. This is a check that the oldest
- * document(s) is/are deleted when space is needed for the newest document. The
- * check is performed in both forward and reverse directions.
- */
-function checkOrder( i ) {
- res = tzz.find().sort( { $natural: -1 } );
- assert( res.hasNext(), "A" );
- var j = i;
- while( res.hasNext() ) {
- try {
- assert.eq( val[ j-- ].a, res.next().a, "B" );
- } catch( e ) {
- debug( "capped6 err " + j );
- throw e;
- }
- }
- res = tzz.find().sort( { $natural: 1 } );
- assert( res.hasNext(), "C" );
- while( res.hasNext() )
- assert.eq( val[ ++j ].a, res.next().a, "D" );
- assert.eq( j, i, "E" );
-}
-
-var val = new Array( 500 );
-var c = "";
-for( i = 0; i < 500; ++i, c += "-" ) {
- // The a values are strings of increasing length.
- val[ i ] = { a: c };
-}
-
-var oldMax = Random.randInt( 500 );
-var max = 0;
-
-/**
- * Insert new documents until there are 'oldMax' documents in the collection,
- * then remove a random number of documents (often all but one) via one or more
- * 'captrunc' requests.
- */
-function doTest() {
- for( var i = max; i < oldMax; ++i ) {
- tzz.insert( val[ i ] );
- }
- max = oldMax;
- count = tzz.count();
-
- var min = 1;
- if ( Random.rand() > 0.3 ) {
- min = Random.randInt( count ) + 1;
- }
-
- // Iteratively remove a random number of documents until we have no more
- // than 'min' documents.
- while( count > min ) {
- // 'n' is the number of documents to remove - we must account for the
- // possibility that 'inc' will be true, and avoid removing all documents
- // from the collection in that case, as removing all documents is not
- // allowed by 'captrunc'
- var n = Random.randInt( count - min - 1 ); // 0 <= x <= count - min - 1
- var inc = Random.rand() > 0.5;
- debug( count + " " + n + " " + inc );
- assert.commandWorked( db.runCommand( { captrunc:"capped6", n:n, inc:inc } ) );
- if ( inc ) {
- n += 1;
- }
- count -= n;
- max -= n;
- // Validate the remaining documents.
- checkOrder( max - 1 );
- }
-}
-
-// Repeatedly add up to 'oldMax' documents and then truncate the newest
-// documents. Newer documents take up more space than older documents.
-for( var i = 0; i < 10; ++i ) {
- doTest();
-}
-
-// reverse order of values
-var val = new Array( 500 );
-
-var c = "";
-for( i = 499; i >= 0; --i, c += "-" ) {
- val[ i ] = { a: c };
-}
-db.capped6.drop();
-db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
-tzz = db.capped6;
-
-// Same test as above, but now the newer documents take less space than the
-// older documents instead of more.
-for( var i = 0; i < 10; ++i ) {
- doTest();
-}
-
-tzz.drop();
diff --git a/jstests/capped7.js b/jstests/capped7.js
deleted file mode 100644
index 693828da85f..00000000000
--- a/jstests/capped7.js
+++ /dev/null
@@ -1,89 +0,0 @@
-// Test NamespaceDetails::emptyCappedCollection via 'emptycapped' command
-
-Random.setRandomSeed();
-
-db.capped7.drop();
-db._dbCommand( { create: "capped7", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
-tzz = db.capped7;
-
-var ten = new Array( 11 ).toString().replace( /,/g, "-" );
-
-count = 0;
-
-/**
- * Insert new documents until the capped collection loops and the document
- * count doesn't increase on insert.
- */
-function insertUntilFull() {
-count = tzz.count();
- var j = 0;
-while( 1 ) {
- tzz.save( {i:ten,j:j++} );
- var newCount = tzz.count();
- if ( count == newCount ) {
- break;
- }
- count = newCount;
-}
-}
-
-insertUntilFull();
-
-// oldCount == count before empty
-oldCount = count;
-
-assert.eq.automsg( "11", "tzz.stats().numExtents" );
-
-// oldSize == size before empty
-var oldSize = tzz.stats().storageSize;
-
-assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) );
-
-// check that collection storage parameters are the same after empty
-assert.eq.automsg( "11", "tzz.stats().numExtents" );
-assert.eq.automsg( "oldSize", "tzz.stats().storageSize" );
-
-// check that the collection is empty after empty
-assert.eq.automsg( "0", "tzz.find().itcount()" );
-assert.eq.automsg( "0", "tzz.count()" );
-
-// check that we can reuse the empty collection, inserting as many documents
-// as we were able to the first time through.
-insertUntilFull();
-assert.eq.automsg( "oldCount", "count" );
-assert.eq.automsg( "oldCount", "tzz.find().itcount()" );
-assert.eq.automsg( "oldCount", "tzz.count()" );
-
-assert.eq.automsg( "11", "tzz.stats().numExtents" );
-var oldSize = tzz.stats().storageSize;
-
-assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) );
-
-// check that the collection storage parameters are unchanged after another empty
-assert.eq.automsg( "11", "tzz.stats().numExtents" );
-assert.eq.automsg( "oldSize", "tzz.stats().storageSize" );
-
-// insert an arbitrary number of documents
-var total = Random.randInt( 2000 );
-for( var j = 1; j <= total; ++j ) {
- tzz.save( {i:ten,j:j} );
- // occasionally check that only the oldest documents are removed to make room
- // for the newest documents
- if ( Random.rand() > 0.95 ) {
- assert.automsg( "j >= tzz.count()" );
- assert.eq.automsg( "tzz.count()", "tzz.find().itcount()" );
- var c = tzz.find().sort( {$natural:-1} );
- var k = j;
- assert.automsg( "c.hasNext()" );
- while( c.hasNext() ) {
- assert.eq.automsg( "c.next().j", "k--" );
- }
- // check the same thing with a reverse iterator as well
- var c = tzz.find().sort( {$natural:1} );
- assert.automsg( "c.hasNext()" );
- while( c.hasNext() ) {
- assert.eq.automsg( "c.next().j", "++k" );
- }
- assert.eq.automsg( "j", "k" );
- }
-} \ No newline at end of file
diff --git a/jstests/capped8.js b/jstests/capped8.js
deleted file mode 100644
index 0f30e37aebf..00000000000
--- a/jstests/capped8.js
+++ /dev/null
@@ -1,108 +0,0 @@
-// Test NamespaceDetails::cappedTruncateAfter with empty extents
-
-Random.setRandomSeed();
-
-t = db.jstests_capped8;
-
-function debug( x ) {
-// printjson( x );
-}
-
-/** Generate an object with a string field of specified length */
-function obj( size, x ) {
- return {X:x, a:new Array( size + 1 ).toString()};;
-}
-
-function withinOne( a, b ) {
- assert( Math.abs( a - b ) <= 1, "not within one: " + a + ", " + b )
-}
-
-var X = 0;
-
-/**
- * Insert enough documents of the given size spec that the collection will
- * contain only documents having this size spec.
- */
-function insertManyRollingOver( objsize ) {
- // Add some variability, as the precise number can trigger different cases.
- X++;
- n = 250 + Random.randInt(10);
-
- assert(t.count() == 0 || t.findOne().X != X);
-
- for( i = 0; i < n; ++i ) {
- t.save( obj( objsize, X ) );
- debug( t.count() );
- }
-
- if (t.findOne().X != X) {
- printjson(t.findOne());
- print("\n\nERROR didn't roll over in insertManyRollingOver " + objsize);
- print("approx amountwritten: " + (objsize * n));
- printjson(t.stats());
- assert(false);
- }
-}
-
-/**
- * Insert some documents in such a way that there may be an empty extent, then
- * truncate the capped collection.
- */
-function insertAndTruncate( first ) {
- myInitialCount = t.count();
- // Insert enough documents to make the capped allocation loop over.
- insertManyRollingOver( 150 );
- myFiftyCount = t.count();
- // Insert documents that are too big to fit in the smaller extents.
- insertManyRollingOver( 5000 );
- myTwokCount = t.count();
- if ( first ) {
- initialCount = myInitialCount;
- fiftyCount = myFiftyCount;
- twokCount = myTwokCount;
- // Sanity checks for collection count
- assert( fiftyCount > initialCount );
- assert( fiftyCount > twokCount );
- } else {
- // Check that we are able to insert roughly the same number of documents
- // after truncating. The exact values are slightly variable as a result
- // of the capped allocation algorithm.
- withinOne( initialCount, myInitialCount );
- withinOne( fiftyCount, myFiftyCount );
- withinOne( twokCount, myTwokCount );
- }
- count = t.count();
- // Check that we can truncate the collection successfully.
- assert.commandWorked( db.runCommand( { captrunc:"jstests_capped8", n:count - 1, inc:false } ) );
-}
-
-/** Test truncating and subsequent inserts */
-function testTruncate() {
- insertAndTruncate( true );
- insertAndTruncate( false );
- insertAndTruncate( false );
-}
-
-var pass = 1;
-
-print("pass " + pass++);
-t.drop();
-db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 10000, 4000 ] } );
-testTruncate();
-
-print("pass " + pass++);
-t.drop();
-db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 1000, 4000 ] } );
-testTruncate();
-
-print("pass " + pass++);
-t.drop();
-db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 4000 ] } );
-testTruncate();
-
-print("pass " + pass++);
-t.drop();
-db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000 ] } );
-testTruncate();
-
-t.drop();
diff --git a/jstests/capped9.js b/jstests/capped9.js
deleted file mode 100644
index 9ea506ce795..00000000000
--- a/jstests/capped9.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.capped9;
-t.drop();
-
-db.createCollection("capped9" , {capped:true, size:1024*50 });
-
-t.insert( { _id : 1 , x : 2 , y : 3 } )
-
-assert.eq( 1 , t.find( { x : 2 } ).itcount() , "A1" )
-assert.eq( 1 , t.find( { y : 3 } ).itcount() , "A2" )
-//assert.throws( function(){ t.find( { _id : 1 } ).itcount(); } , [] , "A3" ); // SERVER-3064
-
-t.update( { _id : 1 } , { $set : { y : 4 } } )
-//assert( db.getLastError() , "B1" ); // SERVER-3064
-//assert.eq( 3 , t.findOne().y , "B2" ); // SERVER-3064
-
-t.ensureIndex( { _id : 1 } )
-
-assert.eq( 1 , t.find( { _id : 1 } ).itcount() , "D1" )
-
-t.update( { _id : 1 } , { $set : { y : 4 } } )
-assert( null == db.getLastError() , "D1: " + tojson( db.getLastError() ) )
-assert.eq( 4 , t.findOne().y , "D2" )
-
-
-
-
-
diff --git a/jstests/capped_empty.js b/jstests/capped_empty.js
deleted file mode 100644
index 5b0fb6b8f8e..00000000000
--- a/jstests/capped_empty.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-t = db.capped_empty;
-t.drop();
-
-db.createCollection( t.getName() , { capped : true , size : 100 } )
-
-t.insert( { x : 1 } );
-t.insert( { x : 2 } );
-t.insert( { x : 3 } );
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 3 , t.count() );
-assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned );
-
-t.runCommand( "emptycapped" );
-
-assert.eq( 0 , t.count() );
-
-t.insert( { x : 1 } );
-t.insert( { x : 2 } );
-t.insert( { x : 3 } );
-
-assert.eq( 3 , t.count() );
-assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned );
diff --git a/jstests/capped_max.js b/jstests/capped_max.js
deleted file mode 100644
index 1d7cbc3ef23..00000000000
--- a/jstests/capped_max.js
+++ /dev/null
@@ -1,29 +0,0 @@
-
-t = db.capped_max;
-sz = 1024 * 16;
-
-t.drop();
-db.createCollection( t.getName() , {capped: true, size: sz } );
-assert.lt( Math.pow( 2, 62 ), t.stats().max.floatApprox )
-
-t.drop();
-db.createCollection( t.getName() , {capped: true, size: sz, max: 123456 } );
-assert.eq( 123456, t.stats().max );
-
-// create a collection with the max possible doc cap (2^31-2 docs)
-t.drop();
-mm = Math.pow(2, 31) - 2;
-db.createCollection( t.getName() , {capped: true, size: sz, max: mm } );
-assert.eq( mm, t.stats().max );
-
-// create a collection with the 'no max' value (2^31-1 docs)
-t.drop();
-mm = Math.pow(2, 31) - 1;
-db.createCollection( t.getName() , {capped: true, size: sz, max: mm } );
-assert.eq(NumberLong("9223372036854775807"), t.stats().max );
-
-t.drop();
-res = db.createCollection( t.getName() , {capped: true, size: sz, max: Math.pow(2, 31) } );
-assert.eq( 0, res.ok, tojson(res) );
-assert.eq( 0, t.stats().ok )
-
diff --git a/jstests/capped_server2639.js b/jstests/capped_server2639.js
deleted file mode 100644
index 465fd4ae874..00000000000
--- a/jstests/capped_server2639.js
+++ /dev/null
@@ -1,27 +0,0 @@
-
-name = "server2639"
-
-t = db.getCollection( name );
-t.drop();
-
-
-db.createCollection( name , { capped : true , size : 1 } );
-
-size = t.stats().storageSize;
-
-bigString = "";
-while ( bigString.length < size )
- bigString += ".";
-
-t.insert( { x : 1 } );
-
-t.insert( { x : 2 , bigString : bigString } );
-gle = db.getLastErrorObj();
-assert.eq( 16328 , gle.code , tojson( gle ) )
-
-assert.eq( 1 , t.count() ); // make sure small doc didn't get deleted
-assert.eq( 1 , t.findOne().x );
-
-// make sure can still insert
-t.insert( { x : 2 } );
-assert.eq( 2 , t.count() );
diff --git a/jstests/capped_server7543.js b/jstests/capped_server7543.js
deleted file mode 100644
index 514cd7964b2..00000000000
--- a/jstests/capped_server7543.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-mydb = db.getSisterDB( "capped_server7543" );
-mydb.dropDatabase();
-
-mydb.createCollection( "foo" , { capped : true , size : 12288 } );
-
-assert.eq( 12288, mydb.foo.stats().storageSize );
-assert.eq( 1, mydb.foo.validate(true).extentCount );
-
-mydb.dropDatabase();
-
diff --git a/jstests/cappeda.js b/jstests/cappeda.js
deleted file mode 100644
index 4a4b14a64e5..00000000000
--- a/jstests/cappeda.js
+++ /dev/null
@@ -1,33 +0,0 @@
-
-t = db.scan_capped_id;
-t.drop()
-
-x = t.runCommand( "create" , { capped : true , size : 10000 } )
-assert( x.ok )
-
-for ( i=0; i<100; i++ )
- t.insert( { _id : i , x : 1 } )
-
-function q() {
- return t.findOne( { _id : 5 } )
-}
-
-function u() {
- t.update( { _id : 5 } , { $set : { x : 2 } } );
- var gle = db.getLastError();
- if ( gle )
- throw gle;
-}
-
-
-// SERVER-3064
-//assert.throws( q , [] , "A1" );
-//assert.throws( u , [] , "B1" );
-
-t.ensureIndex( { _id : 1 } )
-
-assert.eq( 1 , q().x )
-q()
-u()
-
-assert.eq( 2 , q().x )
diff --git a/jstests/check_shard_index.js b/jstests/check_shard_index.js
deleted file mode 100644
index f85071124fb..00000000000
--- a/jstests/check_shard_index.js
+++ /dev/null
@@ -1,141 +0,0 @@
-// -------------------------
-// CHECKSHARDINGINDEX TEST UTILS
-// -------------------------
-
-f = db.jstests_shardingindex;
-f.drop();
-
-
-// -------------------------
-// Case 1: all entries filled or empty should make a valid index
-//
-
-f.drop();
-f.ensureIndex( { x: 1 , y: 1 } );
-assert.eq( 0 , f.count() , "1. initial count should be zero" );
-
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok, "1a" );
-
-f.save( { x: 1 , y : 1 } );
-assert.eq( 1 , f.count() , "1. count after initial insert should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "1b" );
-
-
-// -------------------------
-// Case 2: entry with null values would make an index unsuitable
-//
-
-f.drop();
-f.ensureIndex( { x: 1 , y: 1 } );
-assert.eq( 0 , f.count() , "2. initial count should be zero" );
-
-f.save( { x: 1 , y : 1 } );
-f.save( { x: null , y : 1 } );
-
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "2a " + tojson(res) );
-
-f.save( { y: 2 } );
-assert.eq( 3 , f.count() , "2. count after initial insert should be 3" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "2b " + tojson(res) );
-
-// Check _id index
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {_id:1} });
-assert.eq( true , res.ok , "2c " + tojson(res) );
-assert( res.idskip , "2d " + tojson(res) )
-
-// -------------------------
-// Case 3: entry with array values would make an index unsuitable
-//
-
-f.drop();
-f.ensureIndex( { x: 1 , y: 1 } );
-assert.eq( 0 , f.count() , "3. initial count should be zero" );
-
-f.save( { x: 1 , y : 1 } );
-f.save( { x: [1, 2] , y : 2 } );
-
-assert.eq( 2 , f.count() , "3. count after initial insert should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "3a " + tojson(res) );
-
-f.remove( { y : 2 } );
-f.reIndex();
-
-assert.eq( 1 , f.count() , "3. count after removing array value should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "3b " + tojson(res) );
-
-f.save( { x : 2, y : [1, 2] } )
-
-assert.eq( 2 , f.count() , "3. count after adding array value should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "3c " + tojson(res) );
-
-// -------------------------
-// Case 4: Handles prefix shard key indexes.
-//
-
-f.drop();
-f.ensureIndex( { x: 1 , y: 1, z: 1 } );
-assert.eq( 0 , f.count() , "4. initial count should be zero" );
-
-f.save( { x: 1 , y : 1, z : 1 } );
-
-assert.eq( 1 , f.count() , "4. count after initial insert should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( true , res.ok , "4a " + tojson(res) );
-
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "4b " + tojson(res) );
-
-f.save( { x: [1, 2] , y : 2, z : 2 } );
-
-assert.eq( 2 , f.count() , "4. count after adding array value should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( false , res.ok , "4c " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "4d " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( false , res.ok , "4e " + tojson(res) );
-
-
-f.remove( { y : 2 } );
-f.reIndex();
-
-assert.eq( 1 , f.count() , "4. count after removing array value should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( true , res.ok , "4f " + tojson(res) );
-
-f.save( { x : 3, y : [1, 2], z : 3 } )
-
-assert.eq( 2 , f.count() , "4. count after adding array value on second key should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( false , res.ok , "4g " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "4h " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( false , res.ok , "4i " + tojson(res) );
-
-f.remove( { x : 3 } );
-f.reIndex(); // Necessary so that the index is no longer marked as multikey
-
-assert.eq( 1 , f.count() , "4. count after removing array value should be 1 again" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( true , res.ok , "4e " + tojson(res) );
-
-f.save( { x : 4, y : 4, z : [1, 2] } )
-
-assert.eq( 2 , f.count() , "4. count after adding array value on third key should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( false , res.ok , "4c " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "4d " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( false , res.ok , "4e " + tojson(res) );
-
-
-print("PASSED");
diff --git a/jstests/collmod.js b/jstests/collmod.js
deleted file mode 100644
index 2dc5555f3ec..00000000000
--- a/jstests/collmod.js
+++ /dev/null
@@ -1,82 +0,0 @@
-// Basic js tests for the collMod command.
-// Test setting the usePowerOf2Sizes flag, and modifying TTL indexes.
-
-function debug( x ) {
- //printjson( x );
-}
-
-var coll = "collModTest";
-var t = db.getCollection( coll );
-t.drop();
-
-db.createCollection( coll );
-
-
-// Verify the new collection has userFlags set to 1
-printjson(t.stats());
-assert.eq( t.stats().userFlags , 1 , "fresh collection doesn't have userFlags = 1 ");
-
-// Modify the collection with the usePowerOf2Sizes flag. Verify userFlags now = 0.
-var res = db.runCommand( { "collMod" : coll, "usePowerOf2Sizes" : false } );
-debug( res );
-assert.eq( res.ok , 1 , "collMod failed" );
-assert.eq( t.stats().userFlags , 0 , "modified collection should have userFlags = 0 ");
-var nso = db.system.namespaces.findOne( { name : t.getFullName() } );
-debug( nso );
-assert.eq( 0, nso.options.flags, "options didn't sync to system.namespaces: " + tojson( nso ) );
-
-// Try to modify it with some unrecognized value
-var res = db.runCommand( { "collMod" : coll, "unrecognized" : true } );
-debug( res );
-assert.eq( res.ok , 0 , "collMod shouldn't return ok with unrecognized value" );
-
-// add a TTL index
-t.ensureIndex( {a : 1}, { "expireAfterSeconds": 50 } )
-assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 50 } ),
- "TTL index not added" );
-
-// try to modify it with a bad key pattern
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : "bad" , "expireAfterSeconds" : 100 } } );
-debug( res );
-assert.eq( 0 , res.ok , "mod shouldn't work with bad keypattern");
-
-// try to modify it without expireAfterSeconds field
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1} } } );
-debug( res );
-assert.eq( 0 , res.ok , "TTL mod shouldn't work without expireAfterSeconds");
-
-// try to modify it with a non-numeric expireAfterSeconds field
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : "100" } } );
-debug( res );
-assert.eq( 0 , res.ok , "TTL mod shouldn't work with non-numeric expireAfterSeconds");
-
-// this time modifying should finally work
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : 100 } } );
-debug( res );
-assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ),
- "TTL index not modified" );
-
-// try to modify a faulty TTL index with a non-numeric expireAfterSeconds field
-t.dropIndex( {a : 1 } );
-t.ensureIndex( {a : 1} , { "expireAfterSeconds": "50" } )
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } );
-debug( res );
-assert.eq( 0, res.ok, "shouldn't be able to modify faulty index spec" );
-
-// try with new index, this time set both expireAfterSeconds and the usePowerOf2Sizes flag
-t.dropIndex( {a : 1 } );
-t.ensureIndex( {a : 1} , { "expireAfterSeconds": 50 } )
-var res = db.runCommand( { "collMod" : coll ,
- "usePowerOf2Sizes" : true,
- "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } );
-debug( res );
-assert.eq( 1, res.ok, "should be able to modify both userFlags and expireAfterSeconds" );
-assert.eq( t.stats().userFlags , 1 , "userflags should be 1 now");
-assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ),
- "TTL index should be 100 now" );
-
diff --git a/jstests/compact.js b/jstests/compact.js
deleted file mode 100644
index 2121debc17e..00000000000
--- a/jstests/compact.js
+++ /dev/null
@@ -1,76 +0,0 @@
-// compact.js
-
-var mydb = db.getSiblingDB('compact');
-t = mydb.compacttest;
-t.drop();
-t.insert({ x: 3 });
-t.insert({ x: 3 });
-t.insert({ x: 5 });
-t.insert({ x: 4, z: 2, k: 'aaa' });
-t.insert({ x: 4, z: 2, k: 'aaa' });
-t.insert({ x: 4, z: 2, k: 'aaa' });
-t.insert({ x: 4, z: 2, k: 'aaa' });
-t.insert({ x: 4, z: 2, k: 'aaa' });
-t.insert({ x: 4, z: 2, k: 'aaa' });
-t.ensureIndex({ x: 1 });
-
-print("1");
-
-var res = mydb.runCommand({ compact: 'compacttest', dev: true, force: true });
-printjson(res);
-assert(res.ok);
-assert(t.count() == 9);
-var v = t.validate(true);
-assert(v.ok);
-assert(v.extentCount == 1);
-assert(v.deletedCount == 1);
-assert(t.getIndexes().length == 2);
-var ssize = t.stats().storageSize;
-
-print("2");
-res = mydb.runCommand({ compact: 'compacttest', dev: true,paddingBytes:1000, force:true });
-assert(res.ok);
-assert(t.count() == 9);
-var v = t.validate(true);
-assert(v.ok);
-assert(t.stats().storageSize > ssize, "expected more storage given padding is higher. however it rounds off so if something changed this could be");
-//printjson(t.stats());
-
-print("z");
-
-t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
-t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
-t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
-t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
-t.insert({ x: 4, z: null, k: { f: "", b: ""} });
-t.insert({ x: 4, z: null, k: { c: ""} });
-t.insert({ x: 4, z: null, k: { h: ""} });
-t.insert({ x: 4, z: null });
-t.insert({ x: 4, z: 3});
-t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
-t.insert({ x: 4, z: null, k: { c: ""} });
-t.insert({ x: 4, z: null, k: { c: ""} });
-t.insert({ x: 4, z: 3, k: { c: ""} });
-
-t.ensureIndex({ z: 1, k: 1 });
-//t.ensureIndex({ z: 1, k: 1 }, { unique: true });
-//t.ensureIndex({ z: 1, k: 1 }, { dropDups: true, unique:true });
-
-res = mydb.runCommand({ compact: 'compacttest', dev: true, paddingFactor: 1.2, force:true });
-printjson(res);
-assert(res.ok);
-assert(t.count() > 13);
-var v = t.validate(true);
-assert(v.ok);
-
-print("3");
-
-// works on an empty collection?
-t.remove({});
-assert(mydb.runCommand({ compact: 'compacttest', dev: true, force:true }).ok);
-assert(t.count() == 0);
-v = t.validate(true);
-assert(v.ok);
-assert(v.extentCount == 1);
-assert(t.getIndexes().length == 3);
-
diff --git a/jstests/compact2.js b/jstests/compact2.js
deleted file mode 100644
index 0a7c343a3f9..00000000000
--- a/jstests/compact2.js
+++ /dev/null
@@ -1,52 +0,0 @@
-// Compaction of a v0 index converts it to a v1 index using a v1 index comparator during external
-// sort. SERVER-6499
-
-t = db.jstests_compact2;
-t.drop();
-
-/**
- * Assert that the index is of the expected version and its keys are ordered consistently with this
- * version, and that the unique and background fields are set correctly.
- */
-function assertIndex( expectedVersion, unique, background ) {
- indexSpec = db.system.indexes.findOne( { ns:t.toString(), key:{ date:1 } } );
- // The index version is as expected.
- assert.eq( expectedVersion, indexSpec.v );
- // The index uniqueness is as expected (treat missing and false unique specs as equivalent).
- assert.eq( !unique, !indexSpec.unique );
- // Background is as expected.
- assert.eq( !background, !indexSpec.background );
- // Check that 'date' key ordering is consistent with the index version.
- dates = t.find().hint( { date:1 } ).toArray().map( function( x ) { return x.date; } );
- if ( expectedVersion == 0 ) {
- // Under v0 index comparison, new Date( -1 ) > new Date( 1 ).
- assert.eq( [ new Date( 1 ), new Date( -1 ) ], dates );
- }
- else {
- // Under v1 index comparsion, new Date( -1 ) < new Date( 1 ).
- assert.eq( [ new Date( -1 ), new Date( 1 ) ], dates );
- }
-}
-
-/** Compact a collection and check the resulting indexes. */
-function checkCompact( originalVersion, unique, background ) {
- t.drop();
- t.save( { date:new Date( 1 ) } );
- t.save( { date:new Date( -1 ) } );
- t.ensureIndex( { date:1 }, { unique:unique, v:originalVersion, background:background } );
- assertIndex( originalVersion, unique, background );
-
- // Under SERVER-6499, compact fails when a v0 index is converted to a v1 index and key
- // comparisons are inconsistent, as with the date values in this test.
- assert.commandWorked( t.runCommand( "compact" ) );
- assert( !db.getLastError() );
-
- // Compact built an index with the default index version (v1). Uniqueness is maintained, but
- // background always becomes false.
- assertIndex( 1, unique, false );
-}
-
-checkCompact( 0, true, true );
-checkCompact( 0, false, false );
-checkCompact( 1, true, false );
-checkCompact( 1, false, true );
diff --git a/jstests/compactPreservePadding.js b/jstests/compactPreservePadding.js
deleted file mode 100644
index 4748afb9a82..00000000000
--- a/jstests/compactPreservePadding.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// test preservePadding
-
-var mydb = db.getSiblingDB('compactPreservePadding');
-var collName = "compactPreservePadding";
-var t = mydb.getCollection(collName);
-t.drop();
-
-// use larger keyname to avoid hitting an edge case with extents
-for (i = 0; i < 10000; i++) {
- t.insert({useLargerKeyName:i});
-}
-
-// remove half the entries
-t.remove({useLargerKeyName:{$mod:[2,0]}})
-printjson(t.stats());
-originalSize = t.stats().size;
-originalStorage = t.stats().storageSize;
-
-// compact!
-mydb.runCommand({compact: collName, preservePadding: true});
-printjson(t.stats());
-
-// object sizes ('size') should be the same (unless we hit an edge case involving extents, which
-// this test doesn't) and storage size should shrink
-assert(originalSize == t.stats().size);
-assert(originalStorage > t.stats().storageSize);
diff --git a/jstests/connection_status.js b/jstests/connection_status.js
deleted file mode 100644
index 08d05cbf28d..00000000000
--- a/jstests/connection_status.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Tests the connectionStatus command
-
-var dbName = 'connection_status';
-var myDB = db.getSiblingDB(dbName);
-myDB.dropAllUsers();
-
-function test(userName) {
- myDB.createUser({user: userName, pwd: "weak password", roles: jsTest.basicUserRoles});
- myDB.auth(userName, "weak password");
-
- var output = myDB.runCommand("connectionStatus");
- assert.commandWorked(output);
- var users = output.authInfo.authenticatedUsers;
-
- var matches = 0;
- for (var i=0; i < users.length; i++) {
- if (users[i].db != dbName)
- continue;
-
- assert.eq(users[i].user, userName);
- matches++;
- }
- assert.eq(matches, 1);
-}
-
-test("someone");
-test("someone else"); // replaces someone
diff --git a/jstests/connection_string_validation.js b/jstests/connection_string_validation.js
deleted file mode 100644
index 4ecd1f926ee..00000000000
--- a/jstests/connection_string_validation.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// Test validation of connection strings passed to the JavaScript "connect()" function.
-// Related to SERVER-8030.
-
-port = "27017"
-
-if ( db.getMongo().host.indexOf( ":" ) >= 0 ) {
- var idx = db.getMongo().host.indexOf( ":" );
- port = db.getMongo().host.substring( idx + 1 );
-}
-
-var goodStrings = [
- "localhost:" + port + "/test",
- "127.0.0.1:" + port + "/test"
- ];
-
-var badStrings = [
- { s: undefined, r: /^Missing connection string$/ },
- { s: 7, r: /^Incorrect type/ },
- { s: null, r: /^Incorrect type/ },
- { s: "", r: /^Empty connection string$/ },
- { s: " ", r: /^Empty connection string$/ },
- { s: ":", r: /^Missing host name/ },
- { s: "/", r: /^Missing host name/ },
- { s: ":/", r: /^Missing host name/ },
- { s: ":/test", r: /^Missing host name/ },
- { s: ":" + port + "/", r: /^Missing host name/ },
- { s: ":" + port + "/test", r: /^Missing host name/ },
- { s: "/test", r: /^Missing host name/ },
- { s: "localhost:/test", r: /^Missing port number/ },
- { s: "127.0.0.1:/test", r: /^Missing port number/ },
- { s: "127.0.0.1:cat/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:1cat/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:123456/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:65536/test", r: /^Invalid port number/ },
- { s: "::1:65536/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:" + port + "/", r: /^Missing database name/ },
- { s: "::1:" + port + "/", r: /^Missing database name/ }
- ];
-
-function testGood(i, connectionString) {
- print("\nTesting good connection string " + i + " (\"" + connectionString + "\") ...");
- var gotException = false;
- var exception;
- try {
- var connectDB = connect(connectionString);
- connectDB = null;
- }
- catch (e) {
- gotException = true;
- exception = e;
- }
- if (!gotException) {
- print("Good connection string " + i +
- " (\"" + connectionString + "\") correctly validated");
- return;
- }
- var message = "FAILED to correctly validate goodString " + i +
- " (\"" + connectionString + "\"): exception was \"" + tojson(exception) + "\"";
- doassert(message);
-}
-
-function testBad(i, connectionString, errorRegex) {
- print("\nTesting bad connection string " + i + " (\"" + connectionString + "\") ...");
- var gotException = false;
- var gotCorrectErrorText = false;
- var exception;
- try {
- var connectDB = connect(connectionString);
- connectDB = null;
- }
- catch (e) {
- gotException = true;
- exception = e;
- if (errorRegex.test(e.message)) {
- gotCorrectErrorText = true;
- }
- }
- if (gotCorrectErrorText) {
- print("Bad connection string " + i + " (\"" + connectionString +
- "\") correctly rejected:\n" + tojson(exception));
- return;
- }
- var message = "FAILED to generate correct exception for badString " + i +
- " (\"" + connectionString + "\"): ";
- if (gotException) {
- message += "exception was \"" + tojson(exception) +
- "\", it should have matched \"" + errorRegex.toString() + "\"";
- }
- else {
- message += "no exception was thrown";
- }
- doassert(message);
-}
-
-var i;
-jsTest.log("TESTING " + goodStrings.length + " good connection strings");
-for (i = 0; i < goodStrings.length; ++i) {
- testGood(i, goodStrings[i]);
-}
-
-jsTest.log("TESTING " + badStrings.length + " bad connection strings");
-for (i = 0; i < badStrings.length; ++i) {
- testBad(i, badStrings[i].s, badStrings[i].r);
-}
-
-jsTest.log("SUCCESSFUL test completion");
diff --git a/jstests/constructors.js b/jstests/constructors.js
deleted file mode 100644
index 5d4dd177425..00000000000
--- a/jstests/constructors.js
+++ /dev/null
@@ -1,314 +0,0 @@
-// Tests to see what validity checks are done for 10gen specific object construction
-
-// Takes a list of constructors and returns a new list with an extra entry for each constructor with
-// "new" prepended
-function addConstructorsWithNew (constructorList) {
- function prependNew (constructor) {
- return "new " + constructor;
- }
-
- var valid = constructorList.valid;
- var invalid = constructorList.invalid;
- // We use slice(0) here to make a copy of our lists
- var validWithNew = valid.concat(valid.slice(0).map(prependNew));
- var invalidWithNew = invalid.concat(invalid.slice(0).map(prependNew));
- return { "valid" : validWithNew, "invalid" : invalidWithNew };
-}
-
-function clientEvalConstructorTest (constructorList) {
- constructorList = addConstructorsWithNew(constructorList);
- constructorList.valid.forEach(function (constructor) {
- try {
- eval(constructor);
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in eval context: " + e);
- }
- });
- constructorList.invalid.forEach(function (constructor) {
- assert.throws(function () { eval(constructor) },
- [], "invalid constructor did not throw error in eval context: " + constructor);
- });
-}
-
-function dbEvalConstructorTest (constructorList) {
- constructorList = addConstructorsWithNew(constructorList);
- constructorList.valid.forEach(function (constructor) {
- try {
- db.eval(constructor);
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in db.eval context: " + e);
- }
- });
- constructorList.invalid.forEach(function (constructor) {
- assert.throws(function () { db.eval(constructor) },
- [], "invalid constructor did not throw error in db.eval context: " + constructor);
- });
-}
-
-function mapReduceConstructorTest (constructorList) {
- constructorList = addConstructorsWithNew(constructorList);
- t = db.mr_constructors;
- t.drop();
-
- t.save( { "partner" : 1, "visits" : 9 } )
- t.save( { "partner" : 2, "visits" : 9 } )
- t.save( { "partner" : 1, "visits" : 11 } )
- t.save( { "partner" : 1, "visits" : 30 } )
- t.save( { "partner" : 2, "visits" : 41 } )
- t.save( { "partner" : 2, "visits" : 41 } )
-
- constructorList.valid.forEach(function (constructor) {
- try {
- m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }");
-
- r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }");
-
- res = t.mapReduce( m , r , { out : "mr_constructors_out" , scope : { xx : 1 } } );
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in mapReduce context: " + e);
- }
- });
- constructorList.invalid.forEach(function (constructor) {
- m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }");
-
- r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }");
-
- assert.throws(function () { res = t.mapReduce( m , r ,
- { out : "mr_constructors_out" , scope : { xx : 1 } } ) },
- [], "invalid constructor did not throw error in mapReduce context: " + constructor);
- });
-
- db.mr_constructors_out.drop();
- t.drop();
-}
-
-function whereConstructorTest (constructorList) {
- constructorList = addConstructorsWithNew(constructorList);
- t = db.where_constructors;
- t.drop();
- t.insert({ x : 1 });
- assert(!db.getLastError());
-
- constructorList.valid.forEach(function (constructor) {
- try {
- t.findOne({ $where : constructor });
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in $where query: " + e);
- }
- });
- constructorList.invalid.forEach(function (constructor) {
- assert.throws(function () { t.findOne({ $where : constructor }) },
- [], "invalid constructor did not throw error in $where query: " + constructor);
- });
-}
-
-var dbrefConstructors = {
- "valid" : [
- "DBRef(\"namespace\", 0)",
- "DBRef(\"namespace\", \"test\")",
- "DBRef(\"namespace\", ObjectId())",
- "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"))",
- ],
- "invalid" : [
- "DBRef()",
- "DBRef(true, ObjectId())",
- "DBRef(\"namespace\")",
- "DBRef(\"namespace\", ObjectId(), true)",
- ]
-}
-
-var dbpointerConstructors = {
- "valid" : [
- "DBPointer(\"namespace\", ObjectId())",
- "DBPointer(\"namespace\", ObjectId(\"000000000000000000000000\"))",
- ],
- "invalid" : [
- "DBPointer()",
- "DBPointer(true, ObjectId())",
- "DBPointer(\"namespace\", 0)",
- "DBPointer(\"namespace\", \"test\")",
- "DBPointer(\"namespace\")",
- "DBPointer(\"namespace\", ObjectId(), true)",
- ]
-}
-
-
-var objectidConstructors = {
- "valid" : [
- 'ObjectId()',
- 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFF")',
- ],
- "invalid" : [
- 'ObjectId(5)',
- 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFQ")',
- ]
-}
-
-var timestampConstructors = {
- "valid" : [
- 'Timestamp()',
- 'Timestamp(0,0)',
- 'Timestamp(1.0,1.0)',
- ],
- "invalid" : [
- 'Timestamp(0)',
- 'Timestamp(0,0,0)',
- 'Timestamp("test","test")',
- 'Timestamp("test",0)',
- 'Timestamp(0,"test")',
- 'Timestamp(true,true)',
- 'Timestamp(true,0)',
- 'Timestamp(0,true)',
- ]
-}
-
-var bindataConstructors = {
- "valid" : [
- 'BinData(0,"test")',
- ],
- "invalid" : [
- 'BinData(0,"test", "test")',
- 'BinData()',
- 'BinData(-1, "")',
- 'BinData(256, "")',
- 'BinData("string","aaaa")',
- // SERVER-10152
- //'BinData(0, true)',
- //'BinData(0, null)',
- //'BinData(0, undefined)',
- //'BinData(0, {})',
- //'BinData(0, [])',
- //'BinData(0, function () {})',
- ]
-}
-
-var uuidConstructors = {
- "valid" : [
- 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- ],
- "invalid" : [
- 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
- 'UUID()',
- 'UUID("aa")',
- 'UUID("invalidhex")',
- // SERVER-9686
- //'UUID("invalidhexbutstilltherequiredlen")',
- 'UUID(true)',
- 'UUID(null)',
- 'UUID(undefined)',
- 'UUID({})',
- 'UUID([])',
- 'UUID(function () {})',
- ]
-}
-
-var md5Constructors = {
- "valid" : [
- 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- ],
- "invalid" : [
- 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
- 'MD5()',
- 'MD5("aa")',
- 'MD5("invalidhex")',
- // SERVER-9686
- //'MD5("invalidhexbutstilltherequiredlen")',
- 'MD5(true)',
- 'MD5(null)',
- 'MD5(undefined)',
- 'MD5({})',
- 'MD5([])',
- 'MD5(function () {})',
- ]
-}
-
-var hexdataConstructors = {
- "valid" : [
- 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- // Numbers as the payload are converted to strings, so HexData(0, 100) == HexData(0, "100")
- 'HexData(0, 100)',
- 'HexData(0, "")',
- 'HexData(0, "aaa")',
- 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- 'HexData(0, "000000000000000000000005")', // SERVER-9605
- ],
- "invalid" : [
- 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
- 'HexData()',
- 'HexData(0)',
- 'HexData(-1, "")',
- 'HexData(256, "")',
- 'HexData("string","aaaa")',
- // SERVER-10152
- //'HexData(0, true)',
- //'HexData(0, null)',
- //'HexData(0, undefined)',
- //'HexData(0, {})',
- //'HexData(0, [])',
- //'HexData(0, function () {})',
- // SERVER-9686
- //'HexData(0, "invalidhex")',
- ]
-}
-
-var dateConstructors = {
- "valid" : [
- 'Date()',
- 'Date(0)',
- 'Date(0,0)',
- 'Date(0,0,0)',
- 'Date("foo")',
- ],
- "invalid" : [
- ]
-}
-
-clientEvalConstructorTest(dbrefConstructors);
-clientEvalConstructorTest(dbpointerConstructors);
-clientEvalConstructorTest(objectidConstructors);
-clientEvalConstructorTest(timestampConstructors);
-clientEvalConstructorTest(bindataConstructors);
-clientEvalConstructorTest(uuidConstructors);
-clientEvalConstructorTest(md5Constructors);
-clientEvalConstructorTest(hexdataConstructors);
-clientEvalConstructorTest(dateConstructors);
-
-dbEvalConstructorTest(dbrefConstructors);
-dbEvalConstructorTest(dbpointerConstructors);
-dbEvalConstructorTest(objectidConstructors);
-dbEvalConstructorTest(timestampConstructors);
-dbEvalConstructorTest(bindataConstructors);
-dbEvalConstructorTest(uuidConstructors);
-dbEvalConstructorTest(md5Constructors);
-dbEvalConstructorTest(hexdataConstructors);
-dbEvalConstructorTest(dateConstructors);
-
-// SERVER-8963
-if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") {
- mapReduceConstructorTest(dbrefConstructors);
- mapReduceConstructorTest(dbpointerConstructors);
- mapReduceConstructorTest(objectidConstructors);
- mapReduceConstructorTest(timestampConstructors);
- mapReduceConstructorTest(bindataConstructors);
- mapReduceConstructorTest(uuidConstructors);
- mapReduceConstructorTest(md5Constructors);
- mapReduceConstructorTest(hexdataConstructors);
-}
-mapReduceConstructorTest(dateConstructors);
-
-// SERVER-8963
-if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") {
- whereConstructorTest(dbrefConstructors);
- whereConstructorTest(dbpointerConstructors);
- whereConstructorTest(objectidConstructors);
- whereConstructorTest(timestampConstructors);
- whereConstructorTest(bindataConstructors);
- whereConstructorTest(uuidConstructors);
- whereConstructorTest(md5Constructors);
- whereConstructorTest(hexdataConstructors);
-}
-whereConstructorTest(dateConstructors);
diff --git a/jstests/copydb.js b/jstests/copydb.js
deleted file mode 100644
index 7c7c02542a4..00000000000
--- a/jstests/copydb.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
-
-a = db.getSisterDB( "copydb-test-a" );
-b = db.getSisterDB( "copydb-test-b" );
-
-a.dropDatabase();
-b.dropDatabase();
-
-a.foo.save( { a : 1 } );
-
-assert.eq( 1 , a.foo.count() , "A" );
-assert.eq( 0 , b.foo.count() , "B" );
-
-a.copyDatabase( a._name , b._name );
-
-assert.eq( 1 , a.foo.count() , "C" );
-assert.eq( 1 , b.foo.count() , "D" );
-
diff --git a/jstests/count.js b/jstests/count.js
deleted file mode 100644
index 5502d7176c1..00000000000
--- a/jstests/count.js
+++ /dev/null
@@ -1,25 +0,0 @@
-t = db.jstests_count;
-
-t.drop();
-t.save( { i: 1 } );
-t.save( { i: 2 } );
-assert.eq( 1, t.find( { i: 1 } ).count(), "A" );
-assert.eq( 1, t.count( { i: 1 } ) , "B" );
-assert.eq( 2, t.find().count() , "C" );
-assert.eq( 2, t.find( undefined ).count() , "D" );
-assert.eq( 2, t.find( null ).count() , "E" );
-assert.eq( 2, t.count() , "F" );
-
-t.drop();
-t.save( {a:true,b:false} );
-t.ensureIndex( {b:1,a:1} );
-assert.eq( 1, t.find( {a:true,b:false} ).count() , "G" );
-assert.eq( 1, t.find( {b:false,a:true} ).count() , "H" );
-
-t.drop();
-t.save( {a:true,b:false} );
-t.ensureIndex( {b:1,a:1,c:1} );
-
-assert.eq( 1, t.find( {a:true,b:false} ).count() , "I" );
-assert.eq( 1, t.find( {b:false,a:true} ).count() , "J" );
-
diff --git a/jstests/count10.js b/jstests/count10.js
deleted file mode 100644
index ed966ffe3fe..00000000000
--- a/jstests/count10.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// Test that interrupting a count returns an error code.
-
-t = db.count10;
-t.drop();
-
-for ( i=0; i<100; i++ ){
- t.save( { x : i } );
-}
-// Make sure data is written.
-db.getLastError();
-
-// Start a parallel shell which repeatedly checks for a count
-// query using db.currentOp(). As soon as the op is found,
-// kill it via db.killOp().
-s = startParallelShell(
- 'assert.soon(function() {' +
- ' current = db.currentOp({"ns": db.count10.getFullName(), ' +
- ' "query.count": db.count10.getName()}); ' +
-
- // Check that we found the count op. If not, return false so
- // that assert.soon will retry.
- ' assert("inprog" in current); ' +
- ' if (current.inprog.length === 0) { ' +
- ' jsTest.log("count10.js: did not find count op, retrying"); ' +
- ' printjson(current); ' +
- ' return false; ' +
- ' } ' +
- ' countOp = current.inprog[0]; ' +
- ' if (!countOp) { ' +
- ' jsTest.log("count10.js: did not find count op, retrying"); ' +
- ' printjson(current); ' +
- ' return false; ' +
- ' } ' +
-
- // Found the count op. Try to kill it.
- ' jsTest.log("count10.js: found count op:"); ' +
- ' printjson(current); ' +
- ' printjson(db.killOp(countOp.opid)); ' +
- ' return true; ' +
- '}, "count10.js: could not find count op after retrying, gave up");'
-);
-
-function getKilledCount() {
- try {
- db.count10.find("sleep(1000)").count();
- jsTest.log("count10.js: count op completed without being killed");
- } catch (e) {
- return e;
- }
-}
-
-var res = getKilledCount();
-jsTest.log("count10.js: killed count output start");
-printjson(res);
-jsTest.log("count10.js: killed count output end");
-assert(res);
-assert(res.match(/count failed/) !== null);
-assert(res.match(/\"code\"/) !== null);
-
-s();
-
diff --git a/jstests/count2.js b/jstests/count2.js
deleted file mode 100644
index 4d060aaac20..00000000000
--- a/jstests/count2.js
+++ /dev/null
@@ -1,28 +0,0 @@
-t = db.count2;
-t.drop();
-
-for ( var i=0; i<1000; i++ ){
- t.save( { num : i , m : i % 20 } );
-}
-
-assert.eq( 1000 , t.count() , "A" )
-assert.eq( 1000 , t.find().count() , "B" )
-assert.eq( 1000 , t.find().toArray().length , "C" )
-
-assert.eq( 50 , t.find( { m : 5 } ).toArray().length , "D" )
-assert.eq( 50 , t.find( { m : 5 } ).count() , "E" )
-
-assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).toArray().length , "F" )
-assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).count() , "G" )
-assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).countReturn() , "H" )
-
-assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).toArray().length , "I" )
-assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).limit(20).count() , "J" )
-assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).countReturn() , "K" )
-
-assert.eq( 5 , t.find( { m : 5 } ).skip( 45 ).limit(20).countReturn() , "L" )
-
-// Negative skip values should return error
-var negSkipResult = db.runCommand({ count: 't', skip : -2 });
-assert( ! negSkipResult.ok , "negative skip value shouldn't work, n = " + negSkipResult.n );
-assert( negSkipResult.errmsg.length > 0 , "no error msg for negative skip" );
diff --git a/jstests/count3.js b/jstests/count3.js
deleted file mode 100644
index a8c3ef5faad..00000000000
--- a/jstests/count3.js
+++ /dev/null
@@ -1,26 +0,0 @@
-
-t = db.count3;
-
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 1 , b : 2 } );
-
-assert.eq( 2 , t.find( { a : 1 } ).itcount() , "A" );
-assert.eq( 2 , t.find( { a : 1 } ).count() , "B" );
-
-assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).itcount() , "C" );
-assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).count() , "D" );
-
-t.drop();
-
-t.save( { a : 1 } );
-
-assert.eq( 1 , t.find( { a : 1 } ).itcount() , "E" );
-assert.eq( 1 , t.find( { a : 1 } ).count() , "F" );
-
-assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).itcount() , "G" );
-assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).count() , "H" );
-
-
-
diff --git a/jstests/count4.js b/jstests/count4.js
deleted file mode 100644
index 7be74362603..00000000000
--- a/jstests/count4.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.count4;
-t.drop();
-
-for ( i=0; i<100; i++ ){
- t.save( { x : i } );
-}
-
-q = { x : { $gt : 25 , $lte : 75 } }
-
-assert.eq( 50 , t.find( q ).count() , "A" );
-assert.eq( 50 , t.find( q ).itcount() , "B" );
-
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 50 , t.find( q ).count() , "C" );
-assert.eq( 50 , t.find( q ).itcount() , "D" );
diff --git a/jstests/count5.js b/jstests/count5.js
deleted file mode 100644
index b6bbc543352..00000000000
--- a/jstests/count5.js
+++ /dev/null
@@ -1,30 +0,0 @@
-
-t = db.count5;
-t.drop();
-
-for ( i=0; i<100; i++ ){
- t.save( { x : i } );
-}
-
-q = { x : { $gt : 25 , $lte : 75 } };
-
-assert.eq( 50 , t.find( q ).count() , "A" );
-assert.eq( 50 , t.find( q ).itcount() , "B" );
-
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 50 , t.find( q ).count() , "C" );
-assert.eq( 50 , t.find( q ).itcount() , "D" );
-
-assert.eq( 50 , t.find( q ).limit(1).count() , "E" );
-assert.eq( 1 , t.find( q ).limit(1).itcount() , "F" );
-
-assert.eq( 5 , t.find( q ).limit(5).size() , "G" );
-assert.eq( 5 , t.find( q ).skip(5).limit(5).size() , "H" );
-assert.eq( 2 , t.find( q ).skip(48).limit(5).size() , "I" );
-
-assert.eq( 20 , t.find().limit(20).size() , "J" );
-
-assert.eq( 0 , t.find().skip(120).size() , "K" );
-assert.eq( 1 , db.$cmd.findOne( { count: "count5" } )["ok"] , "L" );
-assert.eq( 1 , db.$cmd.findOne( { count: "count5", skip: 120 } )["ok"] , "M" );
diff --git a/jstests/count6.js b/jstests/count6.js
deleted file mode 100644
index 44c5fa33bc7..00000000000
--- a/jstests/count6.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// Some correctness checks for fast and normal count modes, including with skip and limit.
-
-t = db.jstests_count6;
-
-function checkCountForObject( obj ) {
- t.drop();
- t.ensureIndex( {b:1,a:1} );
-
- function checkCounts( query, expected ) {
- assert.eq( expected, t.count( query ) , "A1" );
- assert.eq( expected, t.find( query ).skip( 0 ).limit( 0 ).count( true ) , "A2" );
- // Check proper counts with various skip and limit specs.
- for( var skip = 1; skip <= 2; ++skip ) {
- for( var limit = 1; limit <= 2; ++limit ) {
- assert.eq( Math.max( expected - skip, 0 ), t.find( query ).skip( skip ).count( true ) , "B1" );
- assert.eq( Math.min( expected, limit ), t.find( query ).limit( limit ).count( true ) , "B2" );
- assert.eq( Math.min( Math.max( expected - skip, 0 ), limit ), t.find( query ).skip( skip ).limit( limit ).count( true ) , "B4" );
-
- // Check limit(x) = limit(-x)
- assert.eq( t.find( query ).limit( limit ).count( true ),
- t.find( query ).limit( -limit ).count( true ) , "C1" );
- assert.eq( t.find( query ).skip( skip ).limit( limit ).count( true ),
- t.find( query ).skip( skip ).limit( -limit ).count( true ) , "C2" );
- }
- }
-
- // Check limit(0) has no effect
- assert.eq( expected, t.find( query ).limit( 0 ).count( true ) , "D1" );
- assert.eq( Math.max( expected - skip, 0 ),
- t.find( query ).skip( skip ).limit( 0 ).count( true ) , "D2" );
- assert.eq( expected, t.getDB().runCommand({ count: t.getName(),
- query: query, limit: 0 }).n , "D3" );
- assert.eq( Math.max( expected - skip, 0 ),
- t.getDB().runCommand({ count: t.getName(),
- query: query, limit: 0, skip: skip }).n , "D4" );
- }
-
- for( var i = 0; i < 5; ++i ) {
- checkCounts( {a:obj.a,b:obj.b}, i );
- checkCounts( {b:obj.b,a:obj.a}, i );
- t.insert( obj );
- }
-
- t.insert( {a:true,b:true} );
- t.insert( {a:true,b:1} );
- t.insert( {a:false,b:1} );
- t.insert( {a:false,b:true} );
- t.insert( {a:false,b:false} );
-
- checkCounts( {a:obj.a,b:obj.b}, i );
- checkCounts( {b:obj.b,a:obj.a}, i );
-
- // Check with no query
- checkCounts( {}, 10 );
-}
-
-// Check fast count mode.
-checkCountForObject( {a:true,b:false} );
-
-// Check normal count mode.
-checkCountForObject( {a:1,b:0} );
diff --git a/jstests/count7.js b/jstests/count7.js
deleted file mode 100644
index c2c1260d49b..00000000000
--- a/jstests/count7.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Check normal count matching and deduping.
-
-t = db.jstests_count7;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.save( {a:'algebra'} );
-t.save( {a:'apple'} );
-t.save( {a:'azores'} );
-t.save( {a:'bumper'} );
-t.save( {a:'supper'} );
-t.save( {a:'termite'} );
-t.save( {a:'zeppelin'} );
-t.save( {a:'ziggurat'} );
-t.save( {a:'zope'} );
-
-assert.eq( 5, t.count( {a:/p/} ) );
-
-t.remove({});
-
-t.save( {a:[1,2,3]} );
-t.save( {a:[1,2,3]} );
-t.save( {a:[1]} );
-
-assert.eq( 2, t.count( {a:{$gt:1}} ) );
diff --git a/jstests/count9.js b/jstests/count9.js
deleted file mode 100644
index 888ffe3b544..00000000000
--- a/jstests/count9.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Test fast mode count with multikey entries.
-
-t = db.jstests_count9;
-t.drop();
-
-t.ensureIndex( {a:1} );
-
-t.save( {a:['a','b','a']} );
-assert.eq( 1, t.count( {a:'a'} ) );
-
-t.save( {a:['a','b','a']} );
-assert.eq( 2, t.count( {a:'a'} ) );
-
-t.drop();
-t.ensureIndex( {a:1,b:1} );
-
-t.save( {a:['a','b','a'],b:'r'} );
-assert.eq( 1, t.count( {a:'a',b:'r'} ) );
-assert.eq( 1, t.count( {a:'a'} ) );
-
-t.save( {a:['a','b','a'],b:'r'} );
-assert.eq( 2, t.count( {a:'a',b:'r'} ) );
-assert.eq( 2, t.count( {a:'a'} ) );
-
-t.drop();
-t.ensureIndex( {'a.b':1,'a.c':1} );
-t.save( {a:[{b:'b',c:'c'},{b:'b',c:'c'}]} );
-assert.eq( 1, t.count( {'a.b':'b','a.c':'c'} ) );
diff --git a/jstests/count_hint.js b/jstests/count_hint.js
deleted file mode 100644
index 93322d282db..00000000000
--- a/jstests/count_hint.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// test passing hint to the count cmd
-// hints are ignored if there is no query predicate
-t = db.jstests_count_hint;
-t.drop();
-
-t.save( { i: 1 } );
-t.save( { i: 2 } );
-assert.eq( 2, t.find().count() );
-
-t.ensureIndex( { i:1 } );
-
-assert.eq( 1, t.find( { i: 1 } ).hint( "_id_" ).count(), "A" );
-assert.eq( 2, t.find().hint( "_id_" ).count(), "B" );
-assert.throws( function() { t.find( { i: 1 } ).hint( "BAD HINT" ).count(); } );
-
-// create a sparse index which should have no entries
-t.ensureIndex( { x:1 }, { sparse:true } );
-
-assert.eq( 0, t.find( { i: 1 } ).hint( "x_1" ).count(), "C" );
-assert.eq( 2, t.find().hint( "x_1" ).count(), "D" );
diff --git a/jstests/counta.js b/jstests/counta.js
deleted file mode 100644
index f0834d455dd..00000000000
--- a/jstests/counta.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// Check that count returns 0 in some exception cases.
-
-t = db.jstests_counta;
-t.drop();
-
-for( i = 0; i < 10; ++i ) {
- t.save( {a:i} );
-}
-
-// f() is undefined, causing an assertion
-assert.throws(
- function(){
- t.count( { $where:function() { if ( this.a < 5 ) { return true; } else { f(); } } } );
- } );
diff --git a/jstests/countb.js b/jstests/countb.js
deleted file mode 100644
index 8f7131a5a6c..00000000000
--- a/jstests/countb.js
+++ /dev/null
@@ -1,11 +0,0 @@
-// Test fast count mode with single key index unsatisfiable constraints on a multi key index.
-
-t = db.jstests_countb;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.save( {a:['a','b']} );
-assert.eq( 0, t.find( {a:{$in:['a'],$gt:'b'}} ).count() );
-assert.eq( 0, t.find( {$and:[{a:'a'},{a:{$gt:'b'}}]} ).count() );
-assert.eq( 1, t.find( {$and:[{a:'a'},{$where:"this.a[1]=='b'"}]} ).count() );
-assert.eq( 0, t.find( {$and:[{a:'a'},{$where:"this.a[1]!='b'"}]} ).count() );
diff --git a/jstests/countc.js b/jstests/countc.js
deleted file mode 100644
index 260dbb1f264..00000000000
--- a/jstests/countc.js
+++ /dev/null
@@ -1,124 +0,0 @@
-// In fast count mode the Matcher is bypassed when matching can be performed by a BtreeCursor and
-// its delegate FieldRangeVector or an IntervalBtreeCursor. The tests below check that fast count
-// mode is implemented appropriately in specific cases.
-//
-// SERVER-1752
-
-t = db.jstests_countc;
-t.drop();
-
-
-// Match a subset of inserted values within a $in operator.
-t.drop();
-t.ensureIndex( { a:1 } );
-// Save 'a' values 0, 0.5, 1.5, 2.5 ... 97.5, 98.5, 99.
-t.save( { a:0 } );
-t.save( { a:99 } );
-for( i = 0; i < 99; ++i ) {
- t.save( { a:( i + 0.5 ) } );
-}
-// Query 'a' values $in 0, 1, 2, ..., 99.
-vals = [];
-for( i = 0; i < 100; ++i ) {
- vals.push( i );
-}
-// Only values 0 and 99 of the $in set are present in the collection, so the expected count is 2.
-assert.eq( 2, t.count( { a:{ $in:vals } } ) );
-
-
-// Match 'a' values within upper and lower limits.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:[ 1, 2 ] } ); // Will match because 'a' is in range.
-t.save( { a:9 } ); // Will not match because 'a' is not in range.
-// Only one document matches.
-assert.eq( 1, t.count( { a:{ $gt:0, $lt:5 } } ) );
-
-
-// Match two nested fields within an array.
-t.drop();
-t.ensureIndex( { 'a.b':1, 'a.c':1 } );
-t.save( { a:[ { b:2, c:3 }, {} ] } );
-// The document does not match because its c value is 3.
-assert.eq( 0, t.count( { 'a.b':2, 'a.c':2 } ) );
-
-
-// $gt:string only matches strings.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:'a' } ); // Will match.
-t.save( { a:{} } ); // Will not match because {} is not a string.
-// Only one document matches.
-assert.eq( 1, t.count( { a:{ $gte:'' } } ) );
-
-
-// $lte:date only matches dates.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:new Date( 1 ) } ); // Will match.
-t.save( { a:true } ); // Will not match because 'true' is not a date.
-// Only one document matches.
-assert.eq( 1, t.count( { a:{ $lte:new Date( 1 ) } } ) );
-
-
-// Querying for 'undefined' triggers an error.
-t.drop();
-t.ensureIndex( { a:1 } );
-assert.throws( function() { t.count( { a:undefined } ); } );
-
-
-// Count using a descending order index.
-t.drop();
-t.ensureIndex( { a:-1 } );
-t.save( { a:1 } );
-t.save( { a:2 } );
-t.save( { a:3 } );
-assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
-assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
-assert.eq( 2, t.count( { a:{ $lte:2 } } ) );
-assert.eq( 2, t.count( { a:{ $lt:3 } } ) );
-
-
-// Count using a compound index.
-t.drop();
-t.ensureIndex( { a:1, b:1 } );
-t.save( { a:1, b:2 } );
-t.save( { a:2, b:1 } );
-t.save( { a:2, b:3 } );
-t.save( { a:3, b:4 } );
-assert.eq( 1, t.count( { a:1 }));
-assert.eq( 2, t.count( { a:2 }));
-assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
-assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
-assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) );
-assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) );
-assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) );
-
-
-// Count using a compound descending order index.
-t.drop();
-t.ensureIndex( { a:1, b:-1 } );
-t.save( { a:1, b:2 } );
-t.save( { a:2, b:1 } );
-t.save( { a:2, b:3 } );
-t.save( { a:3, b:4 } );
-assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
-assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
-assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) );
-assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) );
-assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) );
-
-
-// Count with a multikey value.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:[ 1, 2 ] } );
-assert.eq( 1, t.count( { a:{ $gt:0, $lte:2 } } ) );
-
-
-// Count with a match constraint on an unindexed field.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:1, b:1 } );
-t.save( { a:1, b:2 } );
-assert.eq( 1, t.count( { a:1, $where:'this.b == 1' } ) );
diff --git a/jstests/coveredIndex1.js b/jstests/coveredIndex1.js
deleted file mode 100644
index ce11f89ceed..00000000000
--- a/jstests/coveredIndex1.js
+++ /dev/null
@@ -1,64 +0,0 @@
-
-t = db["jstests_coveredIndex1"];
-t.drop();
-
-t.save({fn: "john", ln: "doe"})
-t.save({fn: "jack", ln: "doe"})
-t.save({fn: "john", ln: "smith"})
-t.save({fn: "jack", ln: "black"})
-t.save({fn: "bob", ln: "murray"})
-t.save({fn: "aaa", ln: "bbb", obj: {a: 1, b: "blah"}})
-assert.eq( t.findOne({ln: "doe"}).fn, "john", "Cannot find right record" );
-assert.eq( t.count(), 6, "Not right length" );
-
-// use simple index
-t.ensureIndex({ln: 1});
-assert.eq( t.find({ln: "doe"}).explain().indexOnly, false, "Find using covered index but all fields are returned");
-assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find using covered index but _id is returned");
-assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-
-// this time, without a query spec
-// SERVER-2109
-//assert.eq( t.find({}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-assert.eq( t.find({}, {ln: 1, _id: 0}).hint({ln: 1}).explain().indexOnly, true, "Find is not using covered index");
-
-// use compound index
-t.dropIndex({ln: 1})
-t.ensureIndex({ln: 1, fn: 1});
-// return 1 field
-assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-// return both fields, multiple docs returned
-assert.eq( t.find({ln: "doe"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-// match 1 record using both fields
-assert.eq( t.find({ln: "doe", fn: "john"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-// change ordering
-assert.eq( t.find({fn: "john", ln: "doe"}, {fn: 1, ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-// ask from 2nd index key
-assert.eq( t.find({fn: "john"}, {fn: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key");
-
-// repeat above but with _id field
-t.dropIndex({ln: 1, fn: 1})
-t.ensureIndex({_id: 1, ln: 1});
-// return 1 field
-assert.eq( t.find({_id: 123, ln: "doe"}, {_id: 1}).explain().indexOnly, true, "Find is not using covered index");
-// match 1 record using both fields
-assert.eq( t.find({_id: 123, ln: "doe"}, {ln: 1}).explain().indexOnly, true, "Find is not using covered index");
-// change ordering
-assert.eq( t.find({ln: "doe", _id: 123}, {ln: 1, _id: 1}).explain().indexOnly, true, "Find is not using covered index");
-// ask from 2nd index key
-assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key");
-
-// repeat above but with embedded obj
-t.dropIndex({_id: 1, ln: 1})
-t.ensureIndex({obj: 1});
-assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object");
-assert.eq( t.find({obj: {a: 1, b: "blah"}}).explain().indexOnly, false, "Index doesnt have all fields to cover");
-assert.eq( t.find({obj: {a: 1, b: "blah"}}, {obj: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-
-// repeat above but with index on sub obj field
-t.dropIndex({obj: 1});
-t.ensureIndex({"obj.a": 1, "obj.b": 1})
-assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object");
-
-assert(t.validate().valid);
-
diff --git a/jstests/coveredIndex2.js b/jstests/coveredIndex2.js
deleted file mode 100644
index 56a23f43211..00000000000
--- a/jstests/coveredIndex2.js
+++ /dev/null
@@ -1,18 +0,0 @@
-t = db["jstests_coveredIndex2"];
-t.drop();
-
-t.save({a: 1})
-t.save({a: 2})
-assert.eq( t.findOne({a: 1}).a, 1, "Cannot find right record" );
-assert.eq( t.count(), 2, "Not right length" );
-
-// use simple index
-t.ensureIndex({a: 1});
-assert.eq( t.find({a:1}).explain().indexOnly, false, "Find using covered index but all fields are returned");
-assert.eq( t.find({a:1}, {a: 1}).explain().indexOnly, false, "Find using covered index but _id is returned");
-assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-
-// add multikey
-t.save({a:[3,4]})
-assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index even after multikey insert");
-
diff --git a/jstests/coveredIndex3.js b/jstests/coveredIndex3.js
deleted file mode 100644
index 66180342605..00000000000
--- a/jstests/coveredIndex3.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// Check proper covered index handling when query and processGetMore yield.
-// SERVER-4975
-
-if ( 0 ) { // SERVER-4975
-
-t = db.jstests_coveredIndex3;
-t2 = db.jstests_coveredIndex3_other;
-t.drop();
-t2.drop();
-
-function doTest( batchSize ) {
-
- // Insert an array, which will make the { a:1 } index multikey and should disable covered index
- // matching.
- p1 = startParallelShell(
- 'for( i = 0; i < 60; ++i ) { \
- db.jstests_coveredIndex3.save( { a:[ 2000, 2001 ] } ); \
- sleep( 300 ); \
- }'
- );
-
- // Frequent writes cause the find operation to yield.
- p2 = startParallelShell(
- 'for( i = 0; i < 1800; ++i ) { \
- db.jstests_coveredIndex3_other.save( {} ); \
- sleep( 10 ); \
- }'
- );
-
- for( i = 0; i < 30; ++i ) {
- t.drop();
- t.ensureIndex( { a:1 } );
-
- for( j = 0; j < 1000; ++j ) {
- t.save( { a:j } );
- }
-
- c = t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).batchSize( batchSize );
- while( c.hasNext() ) {
- o = c.next();
- // If o contains a high numeric 'a' value, it must come from an array saved in p1.
- assert( !( o.a > 1500 ), 'improper object returned ' + tojson( o ) );
- }
- }
-
- p1();
- p2();
-
-}
-
-doTest( 2000 ); // Test query.
-doTest( 500 ); // Try to test getMore - not clear if this will actually trigger the getMore issue.
-
-}
diff --git a/jstests/coveredIndex4.js b/jstests/coveredIndex4.js
deleted file mode 100644
index 136eba603cf..00000000000
--- a/jstests/coveredIndex4.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// Test covered index projection with $or clause, specifically in getMore
-// SERVER-4980
-
-t = db.jstests_coveredIndex4;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-
-orClause = [];
-for( i = 0; i < 200; ++i ) {
- if ( i % 2 == 0 ) {
- t.save( { a:i } );
- orClause.push( { a:i } );
- }
- else {
- t.save( { b:i } );
- orClause.push( { b:i } );
- }
-}
-
-c = t.find( { $or:orClause }, { _id:0, a:1 } );
-
-// No odd values of a were saved, so we should not see any in the results.
-while( c.hasNext() ) {
- o = c.next();
- if ( o.a ) {
- assert.eq( 0, o.a % 2, 'unexpected result: ' + tojson( o ) );
- }
-}
-
-c = t.find( { $or:orClause }, { _id:0, b:1 } );
-
-// No even values of b were saved, so we should not see any in the results.
-while( c.hasNext() ) {
- o = c.next();
- if ( o.b ) {
- assert.eq( 1, o.b % 2, 'unexpected result: ' + tojson( o ) );
- }
-}
diff --git a/jstests/coveredIndex5.js b/jstests/coveredIndex5.js
deleted file mode 100644
index ee383cd93e2..00000000000
--- a/jstests/coveredIndex5.js
+++ /dev/null
@@ -1,70 +0,0 @@
-// Test use of covered indexes when there are multiple candidate indexes.
-
-t = db.jstests_coveredIndex5;
-t.drop();
-
-t.ensureIndex( { a:1, b:1 } );
-t.ensureIndex( { a:1, c:1 } );
-
-function checkFields( query, projection ) {
- t.ensureIndex( { z:1 } ); // clear query patterns
- t.dropIndex( { z:1 } );
-
- results = t.find( query, projection ).toArray();
-
- expectedFields = [];
- for ( k in projection ) {
- if ( k != '_id' ) {
- expectedFields.push( k );
- }
- }
-
- vals = [];
- for ( i in results ) {
- r = results[ i ];
- printjson(r);
- assert.eq( 0, r.a );
- assert.eq( expectedFields, Object.keySet( r ) );
- for ( k in projection ) {
- if ( k != '_id' && k != 'a' ) {
- vals.push( r[ k ] );
- }
- }
- }
-
- if ( vals.length != 0 ) {
- vals.sort();
- assert.eq( [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ], vals );
- }
-}
-
-function checkCursorCovered( cursor, covered, count, query, projection ) {
- checkFields( query, projection );
- explain = t.find( query, projection ).explain( true );
- if (covered) {
- assert.eq( cursor, explain.cursor );
- }
- assert.eq( covered, explain.indexOnly );
- assert.eq( count, explain.n );
-}
-
-for( i = 0; i < 10; ++i ) {
- t.save( { a:0, b:i, c:9-i } );
-}
-
-checkCursorCovered( 'BtreeCursor a_1_b_1', true, 10, { a:0 }, { _id:0, a:1 } );
-
-checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1 } );
-checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, b:1 } );
-
-// Covered index on a,c not preferentially selected.
-checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, c:1 } );
-
-t.save( { a:0, c:[ 1, 2 ] } );
-t.save( { a:1 } );
-checkCursorCovered( 'BtreeCursor a_1_b_1', false, 11, { a:0, d:null }, { _id:0, a:1 } );
-
-t.save( { a:0, b:[ 1, 2 ] } );
-t.save( { a:1 } );
-checkCursorCovered( 'BtreeCursor a_1_b_1', false, 12, { a:0, d:null }, { _id:0, a:1 } );
-
diff --git a/jstests/covered_index_compound_1.js b/jstests/covered_index_compound_1.js
deleted file mode 100644
index 7e529785d12..00000000000
--- a/jstests/covered_index_compound_1.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// Compound index covered query tests
-
-var coll = db.getCollection("covered_compound_1")
-coll.drop()
-for (i=0;i<100;i++) {
- coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)})
-}
-coll.ensureIndex({a:1,b:-1,c:1})
-
-// Test equality - all indexed fields queried and projected
-var plan = coll.find({a:10, b:"strvar_10", c:0}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query")
-
-// Test query on subset of fields queried and project all
-var plan = coll.find({a:26, b:"strvar_0"}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.2 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.2 - nscannedObjects should be 0 for covered query")
-
-// Test query on all fields queried and project subset
-var plan = coll.find({a:38, b:"strvar_12", c: 8}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.3 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.3 - nscannedObjects should be 0 for covered query")
-
-// Test no query
-var plan = coll.find({}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.4 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.4 - nscannedObjects should be 0 for covered query")
-
-// Test range query
-var plan = coll.find({a:{$gt:25,$lt:43}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.5 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.5 - nscannedObjects should be 0 for covered query")
-
-// Test in query
-var plan = coll.find({a:38, b:"strvar_12", c:{$in:[5,8]}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.6 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.6 - nscannedObjects should be 0 for covered query")
-
-// Test no result
-var plan = coll.find({a:38, b:"strvar_12", c:55},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.7 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.7 - nscannedObjects should be 0 for covered query")
-
-print('all tests passed')
diff --git a/jstests/covered_index_geo_1.js b/jstests/covered_index_geo_1.js
deleted file mode 100644
index 1d647dfa94c..00000000000
--- a/jstests/covered_index_geo_1.js
+++ /dev/null
@@ -1,18 +0,0 @@
-var coll = db.getCollection("covered_geo_1")
-coll.drop()
-
-coll.insert({_id : 1, loc : [ 5 , 5 ], type : "type1"})
-coll.insert({_id : 2, loc : [ 6 , 6 ], type : "type2"})
-coll.insert({_id : 3, loc : [ 7 , 7 ], type : "type3"})
-
-coll.ensureIndex({loc : "2d", type : 1});
-
-var plan = coll.find({loc : [ 6 , 6 ]}, {loc:1, type:1, _id:0}).hint({loc:"2d", type:1}).explain();
-assert.eq(false, plan.indexOnly, "geo.1.1 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "geo.1.1 - nscannedObjects should not be 0 for a non covered query")
-
-var plan = coll.find({loc : [ 6 , 6 ]}, {type:1, _id:0}).hint({loc:"2d", type:1}).explain();
-assert.eq(false, plan.indexOnly, "geo.1.2 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "geo.1.2 - nscannedObjects should not be 0 for a non covered query")
-
-print("all tests passed") \ No newline at end of file
diff --git a/jstests/covered_index_geo_2.js b/jstests/covered_index_geo_2.js
deleted file mode 100644
index 52f610b7e64..00000000000
--- a/jstests/covered_index_geo_2.js
+++ /dev/null
@@ -1,22 +0,0 @@
-var coll = db.getCollection("covered_geo_2")
-coll.drop()
-
-coll.insert({_id : 1, loc1 : [ 5 , 5 ], type1 : "type1",
- loc2 : [ 5 , 5 ], type2 : 1})
-coll.insert({_id : 2, loc1 : [ 6 , 6 ], type1 : "type2",
- loc2 : [ 5 , 5 ], type2 : 2})
-coll.insert({_id : 3, loc1 : [ 7 , 7 ], type1 : "type3",
- loc2 : [ 5 , 5 ], type2 : 3})
-
-coll.ensureIndex({loc1 : "2dsphere", type1 : 1});
-coll.ensureIndex({type2: 1, loc2 : "2dsphere"});
-
-var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {loc1:1, type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain();
-assert.eq(false, plan.indexOnly, "geo.2.1 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "geo.2.1 - nscannedObjects should not be 0 for a non covered query")
-
-var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain();
-assert.eq(false, plan.indexOnly, "geo.2.2 - indexOnly should be false for a non covered query")
-assert.neq(0, plan.nscannedObjects, "geo.2.2 - nscannedObjects should not be 0 for a non covered query")
-
-print("all tests passed")
diff --git a/jstests/covered_index_negative_1.js b/jstests/covered_index_negative_1.js
deleted file mode 100644
index ab03e7566f6..00000000000
--- a/jstests/covered_index_negative_1.js
+++ /dev/null
@@ -1,61 +0,0 @@
-// Miscellaneous covered query tests. Mostly negative tests
-// These are tests where we do not expect the query to be a
-// covered index query. Hence we expect indexOnly=false and
-// nscannedObjects > 0
-
-var coll = db.getCollection("covered_negative_1")
-coll.drop()
-for (i=0;i<100;i++) {
- coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10), d: i*10, e: [i, i%10],
- f:i})
-}
-coll.ensureIndex({a:1,b:-1,c:1})
-coll.ensureIndex({e:1})
-coll.ensureIndex({d:1})
-coll.ensureIndex({f:"hashed"})
-
-// Test no projection
-var plan = coll.find({a:10, b:"strvar_10", c:0}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(false, plan.indexOnly, "negative.1.1 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "negative.1.1 - nscannedObjects should not be 0 for a non covered query")
-
-// Test projection and not excluding _id
-var plan = coll.find({a:10, b:"strvar_10", c:0},{a:1, b:1, c:1}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(false, plan.indexOnly, "negative.1.2 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "negative.1.2 - nscannedObjects should not be 0 for a non covered query")
-
-// Test projection of non-indexed field
-var plan = coll.find({d:100},{d:1, c:1, _id:0}).hint({d:1}).explain()
-assert.eq(false, plan.indexOnly, "negative.1.3 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "negative.1.3 - nscannedObjects should not be 0 for a non covered query")
-
-// Test query and projection on a multi-key index
-var plan = coll.find({e:99},{e:1, _id:0}).hint({e:1}).explain()
-assert.eq(false, plan.indexOnly, "negative.1.4 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "negative.1.4 - nscannedObjects should not be 0 for a non covered query")
-
-// Commenting out negative.1.5 and 1.6 pending fix in SERVER-8650
-// // Test projection and $natural sort
-// var plan = coll.find({a:{$gt:70}},{a:1, b:1, c:1, _id:0}).sort({$natural:1}).hint({a:1, b:-1, c:1}).explain()
-// // indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8561
-// assert.eq(true, plan.indexOnly, "negative.1.5 - indexOnly should be false on a non covered query")
-// assert.neq(0, plan.nscannedObjects, "negative.1.5 - nscannedObjects should not be 0 for a non covered query")
-
-// // Test sort on non-indexed field
-// var plan = coll.find({d:{$lt:1000}},{d:1, _id:0}).sort({c:1}).hint({d:1}).explain()
-// //indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562
-// assert.eq(true, plan.indexOnly, "negative.1.6 - indexOnly should be false on a non covered query")
-// assert.neq(0, plan.nscannedObjects, "negative.1.6 - nscannedObjects should not be 0 for a non covered query")
-
-// Test query on non-indexed field
-var plan = coll.find({d:{$lt:1000}},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
-//indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562
-// assert.eq(true, plan.indexOnly, "negative.1.7 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "negative.1.7 - nscannedObjects should not be 0 for a non covered query")
-
-// Test query on hashed indexed field
-var plan = coll.find({f:10},{f:1, _id:0}).hint({f:"hashed"}).explain()
-assert.eq(false, plan.indexOnly, "negative.1.8 - indexOnly should be false on a non covered query")
-assert.neq(0, plan.nscannedObjects, "negative.1.8 - nscannedObjects should not be 0 for a non covered query")
-
-print('all tests passed')
diff --git a/jstests/covered_index_simple_1.js b/jstests/covered_index_simple_1.js
deleted file mode 100644
index 44e3c00a9f8..00000000000
--- a/jstests/covered_index_simple_1.js
+++ /dev/null
@@ -1,55 +0,0 @@
-// Simple covered index query test
-
-var coll = db.getCollection("covered_simple_1")
-coll.drop()
-for (i=0;i<10;i++) {
- coll.insert({foo:i})
-}
-for (i=0;i<10;i++) {
- coll.insert({foo:i})
-}
-for (i=0;i<5;i++) {
- coll.insert({bar:i})
-}
-coll.insert({foo:"string"})
-coll.insert({foo:{bar:1}})
-coll.insert({foo:null})
-coll.ensureIndex({foo:1})
-
-// Test equality with int value
-var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.1 - nscannedObjects should be 0 for covered query")
-
-// Test equality with string value
-var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.2 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.2 - nscannedObjects should be 0 for covered query")
-
-// Test equality with doc value
-var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.3 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.3 - nscannedObjects should be 0 for covered query")
-
-// Test no query
-var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.4 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.4 - nscannedObjects should be 0 for covered query")
-
-// Test range query
-var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.5 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.5 - nscannedObjects should be 0 for covered query")
-
-// Test in query
-var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.6 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.6 - nscannedObjects should be 0 for covered query")
-
-// Test no return
-var plan = coll.find({foo:"2"}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.1.7 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.1.7 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass')
-
diff --git a/jstests/covered_index_simple_2.js b/jstests/covered_index_simple_2.js
deleted file mode 100644
index 313cca439d8..00000000000
--- a/jstests/covered_index_simple_2.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// Simple covered index query test with unique index
-
-var coll = db.getCollection("covered_simple_2")
-coll.drop()
-for (i=0;i<10;i++) {
- coll.insert({foo:i})
-}
-coll.insert({foo:"string"})
-coll.insert({foo:{bar:1}})
-coll.insert({foo:null})
-coll.ensureIndex({foo:1},{unique:true})
-
-// Test equality with int value
-var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.2.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.2.1 - nscannedObjects should be 0 for covered query")
-
-// Test equality with string value
-var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.2.2 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.2.2 - nscannedObjects should be 0 for covered query")
-
-// Test equality with int value on a dotted field
-var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.2.3 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.2.3 - nscannedObjects should be 0 for covered query")
-
-// Test no query
-var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.2.4 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.2.4 - nscannedObjects should be 0 for covered query")
-
-// Test range query
-var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.2.5 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.2.5 - nscannedObjects should be 0 for covered query")
-
-// Test in query
-var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.2.6 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.2.6 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass')
diff --git a/jstests/covered_index_simple_3.js b/jstests/covered_index_simple_3.js
deleted file mode 100644
index ffd80f73b5b..00000000000
--- a/jstests/covered_index_simple_3.js
+++ /dev/null
@@ -1,58 +0,0 @@
-// Simple covered index query test with a unique sparse index
-
-var coll = db.getCollection("covered_simple_3");
-coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
-}
-for (i=0;i<5;i++) {
- coll.insert({bar:i});
-}
-coll.insert({foo:"string"});
-coll.insert({foo:{bar:1}});
-coll.insert({foo:null});
-coll.ensureIndex({foo:1}, {sparse:true, unique:true});
-
-// Test equality with int value
-var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.1 - nscannedObjects should be 0 for covered query");
-
-// Test equality with string value
-var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.2 - nscannedObjects should be 0 for covered query");
-
-// Test equality with int value on a dotted field
-var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.3 - nscannedObjects should be 0 for covered query");
-
-// Test no query
-var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.4 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.4 - nscannedObjects should be 0 for covered query");
-
-// Test range query
-var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.5 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.5 - nscannedObjects should be 0 for covered query");
-
-// Test in query
-var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.6 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.6 - nscannedObjects should be 0 for covered query");
-
-// Test $exists true
-var plan = coll.find({foo:{$exists:true}}, {foo:1, _id:0}).hint({foo:1}).explain();
-assert.eq(true, plan.indexOnly, "simple.3.7 - indexOnly should be true on covered query");
-assert.eq(0, plan.nscannedObjects, "simple.3.7 - nscannedObjects should be 0 for covered query");
-
-// Check that $nin can be covered.
-coll.dropIndexes();
-coll.ensureIndex({bar: 1});
-var plan = coll.find({bar:{$nin:[5,8]}}, {bar:1, _id:0}).hint({bar:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.3.8 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.3.8 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass')
diff --git a/jstests/covered_index_simple_id.js b/jstests/covered_index_simple_id.js
deleted file mode 100644
index c7f6811a33c..00000000000
--- a/jstests/covered_index_simple_id.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// Simple covered index query test
-
-var coll = db.getCollection("covered_simple_id")
-coll.drop()
-for (i=0;i<10;i++) {
- coll.insert({_id:i})
-}
-coll.insert({_id:"string"})
-coll.insert({_id:{bar:1}})
-coll.insert({_id:null})
-
-// Test equality with int value
-var plan = coll.find({_id:1}, {_id:1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.id.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.id.1 - nscannedObjects should be 0 for covered query")
-
-// Test equality with string value
-var plan = coll.find({_id:"string"}, {_id:1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.id.2 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.id.2 - nscannedObjects should be 0 for covered query")
-
-// Test equality with int value on a dotted field
-var plan = coll.find({_id:{bar:1}}, {_id:1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.id.3 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.id.3 - nscannedObjects should be 0 for covered query")
-
-// Test no query
-var plan = coll.find({}, {_id:1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.id.4 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.id.4 - nscannedObjects should be 0 for covered query")
-
-// Test range query
-var plan = coll.find({_id:{$gt:2,$lt:6}}, {_id:1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.id.5 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.id.5 - nscannedObjects should be 0 for covered query")
-
-// Test in query
-var plan = coll.find({_id:{$in:[5,8]}}, {_id:1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "simple.id.6 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "simple.id.6 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass')
diff --git a/jstests/covered_index_sort_1.js b/jstests/covered_index_sort_1.js
deleted file mode 100644
index adfcb5c6cb6..00000000000
--- a/jstests/covered_index_sort_1.js
+++ /dev/null
@@ -1,34 +0,0 @@
-// Simple covered index query test with sort
-
-var coll = db.getCollection("covered_sort_1")
-coll.drop()
-for (i=0;i<10;i++) {
- coll.insert({foo:i})
-}
-for (i=0;i<10;i++) {
- coll.insert({foo:i})
-}
-for (i=0;i<5;i++) {
- coll.insert({bar:i})
-}
-coll.insert({foo:"1"})
-coll.insert({foo:{bar:1}})
-coll.insert({foo:null})
-coll.ensureIndex({foo:1})
-
-// Test no query and sort ascending
-var plan = coll.find({}, {foo:1, _id:0}).sort({foo:1}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "sort.1.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "sort.1.1 - nscannedObjects should be 0 for covered query")
-
-// Test no query and sort descending
-var plan = coll.find({}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "sort.1.2 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "sort.1.2 - nscannedObjects should be 0 for covered query")
-
-// Test range query with sort
-var plan = coll.find({foo:{$gt:2}}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain()
-assert.eq(true, plan.indexOnly, "sort.1.5 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "sort.1.5 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass') \ No newline at end of file
diff --git a/jstests/covered_index_sort_2.js b/jstests/covered_index_sort_2.js
deleted file mode 100644
index e5dd48b47af..00000000000
--- a/jstests/covered_index_sort_2.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// Simple covered index query test with sort on _id
-
-var coll = db.getCollection("covered_sort_2")
-coll.drop()
-for (i=0;i<10;i++) {
- coll.insert({_id:i})
-}
-coll.insert({_id:"1"})
-coll.insert({_id:{bar:1}})
-coll.insert({_id:null})
-
-// Test no query
-var plan = coll.find({}, {_id:1}).sort({_id:-1}).hint({_id:1}).explain()
-assert.eq(true, plan.indexOnly, "sort.2.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "sort.2.1 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass') \ No newline at end of file
diff --git a/jstests/covered_index_sort_3.js b/jstests/covered_index_sort_3.js
deleted file mode 100644
index 8f5986c4d76..00000000000
--- a/jstests/covered_index_sort_3.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// Compound index covered query tests with sort
-
-var coll = db.getCollection("covered_sort_3")
-coll.drop()
-for (i=0;i<100;i++) {
- coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)})
-}
-coll.insert
-coll.ensureIndex({a:1,b:-1,c:1})
-
-// Test no query, sort on all fields in index order
-var plan = coll.find({}, {b:1, c:1, _id:0}).sort({a:1,b:-1,c:1}).hint({a:1, b:-1, c:1}).explain()
-assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query")
-assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query")
-
-print ('all tests pass')
diff --git a/jstests/create_indexes.js b/jstests/create_indexes.js
deleted file mode 100644
index 63163ac3990..00000000000
--- a/jstests/create_indexes.js
+++ /dev/null
@@ -1,48 +0,0 @@
-
-t = db.create_indexes;
-t.drop();
-
-res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } );
-assert( res.createdCollectionAutomatically );
-assert.eq( 1, res.numIndexesBefore );
-assert.eq( 2, res.numIndexesAfter );
-
-res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } );
-assert.eq( 2, res.numIndexesBefore );
-assert.isnull( res.numIndexesAfter );
-
-res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" },
- { key : { "y" : 1 }, name : "y_1" } ] } );
-assert( !res.createdCollectionAutomatically );
-assert.eq( 2, res.numIndexesBefore );
-assert.eq( 3, res.numIndexesAfter );
-
-res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" },
- { key : { "b" : 1 }, name : "b_1" } ] } );
-assert( !res.createdCollectionAutomatically );
-assert.eq( 3, res.numIndexesBefore );
-assert.eq( 5, res.numIndexesAfter );
-
-res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" },
- { key : { "b" : 1 }, name : "b_1" } ] } );
-assert.eq( 5, res.numIndexesBefore );
-assert.isnull( res.numIndexesAfter );
-
-res = t.runCommand( "createIndexes", { indexes : [ {} ] } );
-assert( !res.ok );
-
-res = t.runCommand( "createIndexes", { indexes : [ {} , { key : { m : 1 }, name : "asd" } ] } );
-assert( !res.ok );
-
-assert.eq( 5, t.getIndexes().length );
-
-res = t.runCommand( "createIndexes",
- { indexes : [ { key : { "c" : 1 }, sparse : true, name : "c_1" } ] } )
-assert.eq( 6, t.getIndexes().length );
-assert.eq( 1, t.getIndexes().filter( function(z){ return z.sparse; } ).length );
-
-res = t.runCommand( "createIndexes",
- { indexes : [ { key : { "x" : "foo" }, name : "x_1" } ] } );
-assert( !res.ok )
-
-assert.eq( 6, t.getIndexes().length );
diff --git a/jstests/currentop.js b/jstests/currentop.js
deleted file mode 100644
index be822bf88f2..00000000000
--- a/jstests/currentop.js
+++ /dev/null
@@ -1,80 +0,0 @@
-print("BEGIN currentop.js");
-
-// test basic currentop functionality + querying of nested documents
-t = db.jstests_currentop
-t.drop();
-
-for(i=0;i<100;i++) {
- t.save({ "num": i });
-}
-// Make sure data is written before we start reading it in parallel shells.
-db.getLastError();
-
-print("count:" + t.count());
-
-function ops(q) {
- printjson( db.currentOp().inprog );
- return db.currentOp(q).inprog;
-}
-
-print("start shell");
-
-// sleep for a second for each (of 100) documents; can be killed in between documents & test should complete before 100 seconds
-s1 = startParallelShell("db.jstests_currentop.count( { '$where': function() { sleep(1000); } } )");
-
-print("sleep");
-sleep(1000);
-
-print("inprog:");
-printjson(db.currentOp().inprog)
-print()
-sleep(1);
-print("inprog:");
-printjson(db.currentOp().inprog)
-print()
-
-// need to wait for read to start
-print("wait have some ops");
-assert.soon( function(){
- return ops( { "locks.^test": "r", "ns": "test.jstests_currentop" } ).length +
- ops({ "locks.^test": "R", "ns": "test.jstests_currentop" }).length >= 1;
-}, "have_some_ops");
-print("ok");
-
-s2 = startParallelShell( "db.jstests_currentop.update( { '$where': function() { sleep(150); } }, { 'num': 1 }, false, true ); db.getLastError()" );
-
-o = [];
-
-function f() {
- o = ops({ "ns": "test.jstests_currentop" });
-
- printjson(o);
-
- var writes = ops({ "locks.^test": "w", "ns": "test.jstests_currentop" }).length;
-
- var readops = ops({ "locks.^test": "r", "ns": "test.jstests_currentop" });
- print("readops:");
- printjson(readops);
- var reads = readops.length;
-
- print("total: " + o.length + " w: " + writes + " r:" + reads);
-
- return o.length > writes && o.length > reads;
-}
-
-print("go");
-
-assert.soon( f, "f" );
-
-// avoid waiting for the operations to complete (if soon succeeded)
-for(var i in o) {
- db.killOp(o[i].opid);
-}
-
-start = new Date();
-
-s1();
-s2();
-
-// don't want to pass if timeout killed the js function
-assert( ( new Date() ) - start < 30000 );
diff --git a/jstests/cursor1.js b/jstests/cursor1.js
deleted file mode 100644
index 8448752bb0c..00000000000
--- a/jstests/cursor1.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.cursor1
-t.drop();
-
-big = "";
-while ( big.length < 50000 )
- big += "asdasdasdasdsdsdadsasdasdasD";
-
-num = Math.ceil( 10000000 / big.length );
-
-for ( var i=0; i<num; i++ ){
- t.save( { num : i , str : big } );
-}
-
-assert.eq( num , t.find().count() );
-assert.eq( num , t.find().itcount() );
-
-assert.eq( num / 2 , t.find().limit(num/2).itcount() );
-
-t.drop(); // save some space
diff --git a/jstests/cursor2.js b/jstests/cursor2.js
deleted file mode 100644
index 2389a6a5d74..00000000000
--- a/jstests/cursor2.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-/**
- * test to see if the count returned from the cursor is the number of objects that would be returned
- *
- * BUG 884
- */
-function testCursorCountVsArrLen(dbConn) {
-
- var coll = dbConn.ed_db_cursor2_ccvsal;
-
- coll.drop();
-
- coll.save({ a: 1, b : 1});
- coll.save({ a: 2, b : 1});
- coll.save({ a: 3});
-
- var fromCount = coll.find({}, {b:1}).count();
- var fromArrLen = coll.find({}, {b:1}).toArray().length;
-
- assert(fromCount == fromArrLen, "count from cursor [" + fromCount + "] != count from arrlen [" + fromArrLen + "]");
-}
-
-
-testCursorCountVsArrLen(db);
diff --git a/jstests/cursor3.js b/jstests/cursor3.js
deleted file mode 100644
index d23264c94ba..00000000000
--- a/jstests/cursor3.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// Test inequality bounds combined with ordering for a single-field index.
-// BUG 1079 (fixed)
-
-testNum = 1;
-
-function checkResults( expected, cursor , testNum ) {
- assert.eq( expected.length, cursor.count() , "testNum: " + testNum + " A : " + tojson( cursor.toArray() ) + " " + tojson( cursor.explain() ) );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ], cursor[ i ][ "a" ] , "testNum: " + testNum + " B" );
- }
-}
-
-t = db.cursor3;
-t.drop()
-
-t.save( { a: 0 } );
-t.save( { a: 1 } );
-t.save( { a: 2 } );
-
-t.ensureIndex( { a: 1 } );
-
-
-
-checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
-
-checkResults( [ 1, 2 ], t.find( { a: { $gt: 0 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 2, 1 ], t.find( { a: { $gt: 0 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 1, 2 ], t.find( { a: { $gte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 2, 1 ], t.find( { a: { $gte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
-
-checkResults( [ 0, 1 ], t.find( { a: { $lt: 2 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 1, 0 ], t.find( { a: { $lt: 2 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 0, 1 ], t.find( { a: { $lte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
-checkResults( [ 1, 0 ], t.find( { a: { $lte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
diff --git a/jstests/cursor4.js b/jstests/cursor4.js
deleted file mode 100644
index b08a72f62e5..00000000000
--- a/jstests/cursor4.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// Test inequality bounds with multi-field sorting
-
-function checkResults( expected, cursor ) {
- assert.eq( expected.length, cursor.count() );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ].a, cursor[ i ].a );
- assert.eq( expected[ i ].b, cursor[ i ].b );
- }
-}
-
-function testConstrainedFindMultiFieldSorting( db ) {
- r = db.ed_db_cursor4_cfmfs;
- r.drop();
-
- entries = [ { a: 0, b: 0 },
- { a: 0, b: 1 },
- { a: 1, b: 1 },
- { a: 1, b: 1 },
- { a: 2, b: 0 } ];
- for( i = 0; i < entries.length; ++i )
- r.save( entries[ i ] );
- r.ensureIndex( { a: 1, b: 1 } );
- reverseEntries = entries.slice();
- reverseEntries.reverse();
-
- checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( entries.slice( 2, 5 ), r.find( { a: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 0, 3 ), r.find( { a: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 0, 4 ), r.find( { a: { $lt: 2 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 1, 5 ), r.find( { a: { $lt: 2 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( entries.slice( 4, 5 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 2, 4 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( reverseEntries.slice( 0, 1 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 1, 3 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( entries.slice( 0, 1 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( reverseEntries.slice( 4, 5 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-}
-
-testConstrainedFindMultiFieldSorting( db );
diff --git a/jstests/cursor5.js b/jstests/cursor5.js
deleted file mode 100644
index 6434d2b3887..00000000000
--- a/jstests/cursor5.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Test bounds with subobject indexes.
-
-function checkResults( expected, cursor ) {
- assert.eq( expected.length, cursor.count() );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ].a.b, cursor[ i ].a.b );
- assert.eq( expected[ i ].a.c, cursor[ i ].a.c );
- assert.eq( expected[ i ].a.d, cursor[ i ].a.d );
- assert.eq( expected[ i ].e, cursor[ i ].e );
- }
-}
-
-function testBoundsWithSubobjectIndexes( db ) {
- r = db.ed_db_cursor5_bwsi;
- r.drop();
-
- z = [ { a: { b: 1, c: 2, d: 3 }, e: 4 },
- { a: { b: 1, c: 2, d: 3 }, e: 5 },
- { a: { b: 1, c: 2, d: 4 }, e: 4 },
- { a: { b: 1, c: 2, d: 4 }, e: 5 },
- { a: { b: 2, c: 2, d: 3 }, e: 4 },
- { a: { b: 2, c: 2, d: 3 }, e: 5 } ];
- for( i = 0; i < z.length; ++i )
- r.save( z[ i ] );
- idx = { "a.d": 1, a: 1, e: -1 };
- rIdx = { "a.d": -1, a: -1, e: 1 };
- r.ensureIndex( idx );
-
- checkResults( [ z[ 0 ], z[ 4 ], z[ 2 ] ], r.find( { e: 4 } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 1 ], z[ 3 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( idx ).hint( idx ) );
-
- checkResults( [ z[ 2 ], z[ 4 ], z[ 0 ] ], r.find( { e: 4 } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 3 ], z[ 1 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( rIdx ).hint( idx ) );
-}
-
-testBoundsWithSubobjectIndexes( db );
diff --git a/jstests/cursor6.js b/jstests/cursor6.js
deleted file mode 100644
index 33944eafd3a..00000000000
--- a/jstests/cursor6.js
+++ /dev/null
@@ -1,100 +0,0 @@
-// Test different directions for compound indexes
-
-function eq( one, two ) {
- assert.eq( one.a, two.a );
- assert.eq( one.b, two.b );
-}
-
-function checkExplain( e, idx, reverse, nScanned ) {
- if ( !reverse ) {
- if ( idx ) {
- assert.eq( "BtreeCursor a_1_b_-1", e.cursor );
- } else {
- assert.eq( "BasicCursor", e.cursor );
- }
- } else {
- if ( idx ) {
- assert.eq( "BtreeCursor a_1_b_-1 reverse", e.cursor );
- } else {
- assert( false );
- }
- }
- assert.eq( nScanned, e.nscanned );
-}
-
-function check( indexed ) {
- var hint;
- if ( indexed ) {
- hint = { a: 1, b: -1 };
- } else {
- hint = { $natural: 1 };
- }
-
- e = r.find().sort( { a: 1, b: 1 } ).hint( hint ).explain();
- checkExplain( e, indexed, false, 4 );
- f = r.find().sort( { a: 1, b: 1 } ).hint( hint );
- eq( z[ 0 ], f[ 0 ] );
- eq( z[ 1 ], f[ 1 ] );
- eq( z[ 2 ], f[ 2 ] );
- eq( z[ 3 ], f[ 3 ] );
-
- e = r.find().sort( { a: 1, b: -1 } ).hint( hint ).explain();
- checkExplain( e, indexed, false, 4 );
- f = r.find().sort( { a: 1, b: -1 } ).hint( hint );
- eq( z[ 1 ], f[ 0 ] );
- eq( z[ 0 ], f[ 1 ] );
- eq( z[ 3 ], f[ 2 ] );
- eq( z[ 2 ], f[ 3 ] );
-
- e = r.find().sort( { a: -1, b: 1 } ).hint( hint ).explain();
- checkExplain( e, indexed, true && indexed, 4 );
- f = r.find().sort( { a: -1, b: 1 } ).hint( hint );
- eq( z[ 2 ], f[ 0 ] );
- eq( z[ 3 ], f[ 1 ] );
- eq( z[ 0 ], f[ 2 ] );
- eq( z[ 1 ], f[ 3 ] );
-
- e = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint ).explain();
- checkExplain( e, indexed, false, indexed ? 2 : 4 );
- f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint );
- eq( z[ 3 ], f[ 0 ] );
- eq( z[ 2 ], f[ 1 ] );
-
- e = r.find( { a : { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint ).explain();
- checkExplain( e, indexed, true && indexed, indexed ? 2 : 4 );
- f = r.find( { a: { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint );
- eq( z[ 2 ], f[ 0 ] );
- eq( z[ 3 ], f[ 1 ] );
-
- e = r.find( { a : { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint ).explain();
- checkExplain( e, indexed, false, indexed ? 2 : 4 );
- f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint );
- eq( z[ 2 ], f[ 0 ] );
- eq( z[ 3 ], f[ 1 ] );
-
- e = r.find().sort( { a: -1, b: -1 } ).hint( hint ).explain();
- checkExplain( e, indexed, false, 4 );
- f = r.find().sort( { a: -1, b: -1 } ).hint( hint );
- eq( z[ 3 ], f[ 0 ] );
- eq( z[ 2 ], f[ 1 ] );
- eq( z[ 1 ], f[ 2 ] );
- eq( z[ 0 ], f[ 3 ] );
-}
-
-db.setProfilingLevel( 1 );
-r = db.ed_db_cursor6;
-r.drop();
-
-z = [ { a: 1, b: 1 },
- { a: 1, b: 2 },
- { a: 2, b: 1 },
- { a: 2, b: 2 } ];
-for( i = 0; i < z.length; ++i )
- r.save( z[ i ] );
-
-r.ensureIndex( { a: 1, b: -1 } );
-
-check( false );
-check( true );
-
-assert.eq( "BasicCursor", r.find().sort( { a: 1, b: -1, z: 1 } ).hint( { $natural: -1 } ).explain().cursor );
diff --git a/jstests/cursor7.js b/jstests/cursor7.js
deleted file mode 100644
index 97cfbb738b3..00000000000
--- a/jstests/cursor7.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// Test bounds with multiple inequalities and sorting.
-
-function checkResults( expected, cursor ) {
- assert.eq( expected.length, cursor.count() );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ].a, cursor[ i ].a );
- assert.eq( expected[ i ].b, cursor[ i ].b );
- }
-}
-
-function testMultipleInequalities( db ) {
- r = db.ed_db_cursor_mi;
- r.drop();
-
- z = [ { a: 1, b: 2 },
- { a: 3, b: 4 },
- { a: 5, b: 6 },
- { a: 7, b: 8 } ];
- for( i = 0; i < z.length; ++i )
- r.save( z[ i ] );
- idx = { a: 1, b: 1 };
- rIdx = { a: -1, b: -1 };
- r.ensureIndex( idx );
-
- checkResults( [ z[ 2 ], z[ 3 ] ], r.find( { a: { $gt: 3 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gt: 3 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( idx ).hint( idx ) );
-
- checkResults( [ z[ 3 ], z[ 2 ] ], r.find( { a: { $gt: 3 } } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gt: 3 } } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( rIdx ).hint( idx ) );
-
- checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( rIdx ).hint( idx ) );
-
- checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
-}
-
-testMultipleInequalities( db );
diff --git a/jstests/cursora.js b/jstests/cursora.js
deleted file mode 100644
index a46688a7147..00000000000
--- a/jstests/cursora.js
+++ /dev/null
@@ -1,47 +0,0 @@
-t = db.cursora
-
-function run( n , atomic ){
- if( !isNumber(n) ) {
- print("n:");
- printjson(n);
- assert(isNumber(n), "cursora.js isNumber");
- }
- t.drop()
-
- for ( i=0; i<n; i++ )
- t.insert( { _id : i } )
- db.getLastError()
-
- print("cursora.js startParallelShell n:"+n+" atomic:"+atomic)
- join = startParallelShell( "sleep(50); db.cursora.remove( {" + ( atomic ? "$atomic:true" : "" ) + "} ); db.getLastError();" );
-
- var start = null;
- var ex = null;
- var num = null;
- var end = null;
- try {
- start = new Date()
- ex = t.find(function () { num = 2; for (var x = 0; x < 1000; x++) num += 2; return num > 0; }).sort({ _id: -1 }).explain()
- num = ex.n
- end = new Date()
- }
- catch (e) {
- print("cursora.js FAIL " + e);
- join();
- throw e;
- }
-
- join()
-
- //print( "cursora.js num: " + num + " time:" + ( end.getTime() - start.getTime() ) )
- assert.eq( 0 , t.count() , "after remove: " + tojson( ex ) )
- // assert.lt( 0 , ex.nYields , "not enough yields : " + tojson( ex ) ); // TODO make this more reliable so cen re-enable assert
- if ( n == num )
- print( "cursora.js warning: shouldn't have counted all n: " + n + " num: " + num );
-}
-
-run( 1500 )
-run( 5000 )
-run( 1500 , true )
-run( 5000 , true )
-print("cursora.js SUCCESS")
diff --git a/jstests/cursorb.js b/jstests/cursorb.js
deleted file mode 100644
index 65e356e89cb..00000000000
--- a/jstests/cursorb.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// The 'cursor not found in map -1' warning is not logged when get more exhausts a client cursor.
-// SERVER-6931
-
-t = db.jstests_cursorb;
-t.drop();
-
-// Exhaust a client cursor in get more.
-for( i = 0; i < 200; ++i ) {
- t.save( { a:i } );
-}
-t.find().itcount();
-
-// Check that the 'cursor not found in map -1' message is not printed. This message indicates an
-// attempt to look up a cursor with an invalid id and should never appear in the log.
-log = db.adminCommand( { getLog:'global' } ).log
-log.forEach( function( line ) { assert( !line.match( /cursor not found in map -1 / ),
- 'Cursor map lookup with id -1.' ); } );
diff --git a/jstests/datasize.js b/jstests/datasize.js
deleted file mode 100644
index 13e9f11bf0c..00000000000
--- a/jstests/datasize.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// test end-to-end data allocation without powerOf2Sizes enabled
-f = db.jstests_datasize;
-f.drop();
-
-// this test requires usePowerOf2Sizes to be off
-db.createCollection( f.getName(), { usePowerOf2Sizes: false } );
-assert.eq(0, f.stats().userFlags);
-
-assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
-f.save( {qq:'c'} );
-assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
-f.save( {qq:'fg'} );
-assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
-
-f.drop();
-db.createCollection( f.getName(), { usePowerOf2Sizes: false} );
-
-f.ensureIndex( {qq:1} );
-assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
-f.save( {qq:'c'} );
-assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
-f.save( {qq:'fg'} );
-assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
-
-assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}} ).ok );
-
-assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'z' }} ).size );
-assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }} ).size );
-assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{qq:1}} ).size );
-assert.eq( 36, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'d'}, max:{qq:'z' }, keyPattern:{qq:1}} ).size );
-
-assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'c' }} ).size );
-assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'d' }} ).size );
-
-assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{a:1}} ).ok );
diff --git a/jstests/datasize2.js b/jstests/datasize2.js
deleted file mode 100644
index 103cb2096ee..00000000000
--- a/jstests/datasize2.js
+++ /dev/null
@@ -1,27 +0,0 @@
-
-t = db.datasize2
-t.drop();
-
-N = 1000
-for ( i=0; i<N; i++ ){
- t.insert( { _id : i , s : "asdasdasdasdasdasdasd" } );
-}
-
-c = { dataSize : "test.datasize2" ,
- "keyPattern" : {
- "_id" : 1
- },
- "min" : {
- "_id" : 0
- },
- "max" : {
- "_id" : N
- }
- };
-
-
-assert.eq( N , db.runCommand( c ).numObjects , "A" )
-
-c.maxObjects = 100;
-assert.eq( 101 , db.runCommand( c ).numObjects , "B" )
-
diff --git a/jstests/datasize3.js b/jstests/datasize3.js
deleted file mode 100644
index df79e6d9b04..00000000000
--- a/jstests/datasize3.js
+++ /dev/null
@@ -1,34 +0,0 @@
-
-t = db.datasize3;
-t.drop()
-
-function run( options ){
- var c = { dataSize : "test.datasize3" };
- if ( options )
- Object.extend( c , options );
- return db.runCommand( c );
-}
-
-t.insert( { x : 1 } )
-
-a = run()
-b = run( { estimate : true } )
-
-assert.eq( a.size , b.size );
-
-
-t.ensureIndex( { x : 1 } )
-
-for ( i=2; i<100; i++ )
- t.insert( { x : i } )
-
-a = run( { min : { x : 20 } , max : { x : 50 } } ).size
-b = run( { min : { x : 20 } , max : { x : 50 } , estimate : true } ).size
-
-ratio = Math.min( a , b ) / Math.max( a , b );
-
-assert.lt( 0.97 , ratio , "sizes not equal a: " + a + " b: " + b );
-
-
-
-
diff --git a/jstests/date1.js b/jstests/date1.js
deleted file mode 100644
index e6fc147f9f4..00000000000
--- a/jstests/date1.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.date1;
-
-
-function go( d , msg ){
- t.drop();
- t.save({ a: 1, d: d });
-// printjson(d);
-// printjson(t.findOne().d);
- assert.eq( d , t.findOne().d , msg )
-}
-
-go( new Date() , "A" )
-go( new Date( 1 ) , "B")
-go( new Date( 0 ) , "C (old spidermonkey lib fails this test)")
-go(new Date(-10), "neg")
-
diff --git a/jstests/date2.js b/jstests/date2.js
deleted file mode 100644
index ec13865a862..00000000000
--- a/jstests/date2.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// Check that it's possible to compare a Date to a Timestamp - SERVER-3304
-// Check Date / Timestamp comparison equivalence - SERVER-3222
-
-t = db.jstests_date2;
-t.drop();
-
-t.ensureIndex( {a:1} );
-
-t.save( {a:new Timestamp()} );
-
-if ( 0 ) { // SERVER-3304
-assert.eq( 1, t.find( {a:{$gt:new Date(0)}} ).itcount() );
-}
diff --git a/jstests/date3.js b/jstests/date3.js
deleted file mode 100644
index e7ddf717c73..00000000000
--- a/jstests/date3.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Check dates before Unix epoch - SERVER-405
-
-t = db.date3;
-t.drop()
-
-d1 = new Date(-1000)
-dz = new Date(0)
-d2 = new Date(1000)
-
-t.save( {x: 3, d: dz} )
-t.save( {x: 2, d: d2} )
-t.save( {x: 1, d: d1} )
-
-function test () {
- var list = t.find( {d: {$lt: dz}} )
- assert.eq ( 1, list.size() )
- assert.eq ( 1, list[0].x )
- assert.eq ( d1, list[0].d )
- var list = t.find( {d: {$gt: dz}} )
- assert.eq ( 1, list.size() )
- assert.eq ( 2, list[0].x )
- var list = t.find().sort( {d:1} )
- assert.eq ( 3, list.size() )
- assert.eq ( 1, list[0].x )
- assert.eq ( 3, list[1].x )
- assert.eq ( 2, list[2].x )
-}
-
-test()
-t.ensureIndex( {d: 1} )
-test()
diff --git a/jstests/db.js b/jstests/db.js
deleted file mode 100644
index 66a0bd73ede..00000000000
--- a/jstests/db.js
+++ /dev/null
@@ -1,11 +0,0 @@
-function testInvalidDBNameThrowsExceptionWithConstructor() {
- assert.throws( function() { return new DB( null, "/\\" ); } );
-}
-
-function testInvalidDBNameThrowsExceptionWithSibling() {
- assert.throws( function() { return db.getSiblingDB( "/\\" ); } );
-}
-
-testInvalidDBNameThrowsExceptionWithConstructor();
-testInvalidDBNameThrowsExceptionWithSibling();
-
diff --git a/jstests/dbadmin.js b/jstests/dbadmin.js
deleted file mode 100644
index bab348d5700..00000000000
--- a/jstests/dbadmin.js
+++ /dev/null
@@ -1,105 +0,0 @@
-load('jstests/aggregation/extras/utils.js');
-
-// Check that smallArray is entirely contained by largeArray
-// returns false if a member of smallArray is not in largeArray
-function arrayIsSubset(smallArray, largeArray) {
-
- for(var i = 0; i < smallArray.length; i++) {
- if(!Array.contains(largeArray, smallArray[i])) {
- print("Could not find " + smallArray[i] + " in largeArray");
- return false;
- }
- }
-
- return true;
-}
-
-t = db.dbadmin;
-t.save( { x : 1 } );
-
-before = db._adminCommand( "serverStatus" )
-if ( before.mem.supported ){
- cmdres = db._adminCommand( "closeAllDatabases" );
- after = db._adminCommand( "serverStatus" );
- assert( before.mem.mapped > after.mem.mapped , "closeAllDatabases does something before:" + tojson( before.mem ) + " after:" + tojson( after.mem ) + " cmd res:" + tojson( cmdres ) );
- print( before.mem.mapped + " -->> " + after.mem.mapped );
-}
-else {
- print( "can't test serverStatus on this machine" );
-}
-
-t.save( { x : 1 } );
-
-res = db._adminCommand( "listDatabases" );
-assert( res.databases && res.databases.length > 0 , "listDatabases 1 " + tojson(res) );
-
-now = new Date();
-x = db._adminCommand( "ismaster" );
-assert( x.ismaster , "ismaster failed: " + tojson( x ) )
-assert( x.localTime, "ismaster didn't include time: " + tojson(x))
-localTimeSkew = x.localTime - now
-if ( localTimeSkew >= 50 ) {
- print( "Warning: localTimeSkew " + localTimeSkew + " > 50ms." )
-}
-assert.lt( localTimeSkew, 500, "isMaster.localTime" )
-
-before = db.runCommand( "serverStatus" )
-print(before.uptimeEstimate);
-sleep( 5000 )
-after = db.runCommand( "serverStatus" )
-print(after.uptimeEstimate);
-assert.lt( 2 , after.uptimeEstimate , "up1" )
-assert.gt( after.uptimeEstimate , before.uptimeEstimate , "up2" )
-
-// Test startup_log
-var stats = db.getSisterDB( "local" ).startup_log.stats();
-assert(stats.capped);
-
-var latestStartUpLog = db.getSisterDB( "local" ).startup_log.find().sort( { $natural: -1 } ).limit(1).next();
-var serverStatus = db._adminCommand( "serverStatus" );
-var cmdLine = db._adminCommand( "getCmdLineOpts" ).parsed;
-
-// Test that the startup log has the expected keys
-var verbose = false;
-var expectedKeys = ["_id", "hostname", "startTime", "startTimeLocal", "cmdLine", "pid", "buildinfo"];
-var keys = Object.keySet(latestStartUpLog);
-assert(arrayEq(expectedKeys, keys, verbose), 'startup_log keys failed');
-
-// Tests _id implicitly - should be comprised of host-timestamp
-// Setup expected startTime and startTimeLocal from the supplied timestamp
-var _id = latestStartUpLog._id.split('-'); // _id should consist of host-timestamp
-var _idUptime = _id.pop();
-var _idHost = _id.join('-');
-var uptimeSinceEpochRounded = Math.floor(_idUptime/1000) * 1000;
-var startTime = new Date(uptimeSinceEpochRounded); // Expected startTime
-
-assert.eq(_idHost, latestStartUpLog.hostname, "Hostname doesn't match one from _id");
-assert.eq(serverStatus.host.split(':')[0], latestStartUpLog.hostname, "Hostname doesn't match one in server status");
-assert.closeWithinMS(startTime, latestStartUpLog.startTime,
- "StartTime doesn't match one from _id", 2000); // Expect less than 2 sec delta
-assert.eq(cmdLine, latestStartUpLog.cmdLine, "cmdLine doesn't match that from getCmdLineOpts");
-assert.eq(serverStatus.pid, latestStartUpLog.pid, "pid doesn't match that from serverStatus");
-
-// Test buildinfo
-var buildinfo = db.runCommand( "buildinfo" );
-delete buildinfo.ok; // Delete extra meta info not in startup_log
-var isMaster = db._adminCommand( "ismaster" );
-
-// Test buildinfo has the expected keys
-var expectedKeys = ["version", "gitVersion", "OpenSSLVersion", "sysInfo", "loaderFlags", "compilerFlags", "allocator", "versionArray", "javascriptEngine", "bits", "debug", "maxBsonObjectSize"];
-var keys = Object.keySet(latestStartUpLog.buildinfo);
-// Disabled to check
-assert(arrayIsSubset(expectedKeys, keys), "buildinfo keys failed! \n expected:\t" + expectedKeys + "\n actual:\t" + keys);
-assert.eq(buildinfo, latestStartUpLog.buildinfo, "buildinfo doesn't match that from buildinfo command");
-
-// Test version and version Array
-var version = latestStartUpLog.buildinfo.version.split('-')[0];
-var versionArray = latestStartUpLog.buildinfo.versionArray;
-var versionArrayCleaned = [];
-// Only create a string with 2 dots (2.5.5, not 2.5.5.0)
-for (var i = 0; i < (versionArray.length - 1); i++) if (versionArray[i] >= 0) { versionArrayCleaned.push(versionArray[i]); }
-
-assert.eq(serverStatus.version, latestStartUpLog.buildinfo.version, "Mongo version doesn't match that from ServerStatus");
-assert.eq(version, versionArrayCleaned.join('.'), "version doesn't match that from the versionArray");
-assert(["V8", "SpiderMonkey", "Unknown"].indexOf(latestStartUpLog.buildinfo.javascriptEngine) > -1);
-assert.eq(isMaster.maxBsonObjectSize, latestStartUpLog.buildinfo.maxBsonObjectSize, "maxBsonObjectSize doesn't match one from ismaster");
diff --git a/jstests/dbcase.js b/jstests/dbcase.js
deleted file mode 100644
index 25c0bcab37a..00000000000
--- a/jstests/dbcase.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// Check db name duplication constraint SERVER-2111
-
-a = db.getSisterDB( "dbcasetest_dbnamea" )
-b = db.getSisterDB( "dbcasetest_dbnameA" )
-
-a.dropDatabase();
-b.dropDatabase();
-
-a.foo.save( { x : 1 } )
-z = db.getLastErrorObj();
-assert.eq( 0 , z.code || 0 , "A : " + tojson(z) )
-
-b.foo.save( { x : 1 } )
-z = db.getLastErrorObj();
-assert.eq( 13297 , z.code || 0 , "B : " + tojson(z) )
-
-assert.neq( -1, db.getMongo().getDBNames().indexOf( a.getName() ) );
-assert.eq( -1, db.getMongo().getDBNames().indexOf( b.getName() ) );
-printjson( db.getMongo().getDBs().databases );
-
-a.dropDatabase();
-b.dropDatabase();
-
-ai = db.getMongo().getDBNames().indexOf( a.getName() );
-bi = db.getMongo().getDBNames().indexOf( b.getName() );
-// One of these dbs may exist if there is a slave active, but they must
-// not both exist.
-assert( ai == -1 || bi == -1 );
-printjson( db.getMongo().getDBs().databases );
diff --git a/jstests/dbcase2.js b/jstests/dbcase2.js
deleted file mode 100644
index f9973d98837..00000000000
--- a/jstests/dbcase2.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// SERVER-2111 Check that an in memory db name will block creation of a db with a similar but differently cased name.
-
-a = db.getSisterDB( "dbcase2test_dbnamea" )
-b = db.getSisterDB( "dbcase2test_dbnameA" )
-
-a.c.count();
-assert.throws( function() { b.c.count() } );
-
-assert.eq( -1, db.getMongo().getDBNames().indexOf( "dbcase2test_dbnameA" ) );
diff --git a/jstests/dbhash.js b/jstests/dbhash.js
deleted file mode 100644
index 7fea4b4d50c..00000000000
--- a/jstests/dbhash.js
+++ /dev/null
@@ -1,58 +0,0 @@
-
-a = db.dbhasha;
-b = db.dbhashb;
-
-a.drop();
-b.drop();
-
-// debug SERVER-761
-db.getCollectionNames().forEach( function( x ) {
- v = db[ x ].validate();
- if ( !v.valid ) {
- print( x );
- printjson( v );
- }
- } );
-
-function dbhash( mydb ) {
- var ret = mydb.runCommand( "dbhash" );
- assert.commandWorked( ret, "dbhash failure" );
- return ret;
-}
-
-function gh( coll , mydb ){
- if ( ! mydb ) mydb = db;
- var x = dbhash( mydb ).collections[coll.getName()];
- if ( ! x )
- return "";
- return x;
-}
-
-function dbh( mydb ){
- return dbhash( mydb ).md5;
-}
-
-assert.eq( gh( a ) , gh( b ) , "A1" );
-
-a.insert( { _id : 5 } );
-assert.neq( gh( a ) , gh( b ) , "A2" );
-
-b.insert( { _id : 5 } );
-assert.eq( gh( a ) , gh( b ) , "A3" );
-
-dba = db.getSisterDB( "dbhasha" );
-dbb = db.getSisterDB( "dbhashb" );
-
-dba.dropDatabase();
-dbb.dropDatabase();
-
-assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B1" );
-assert.eq( dbh( dba ) , dbh( dbb ) , "C1" );
-
-dba.foo.insert( { _id : 5 } );
-assert.neq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B2" );
-assert.neq( dbh( dba ) , dbh( dbb ) , "C2" );
-
-dbb.foo.insert( { _id : 5 } );
-assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B3" );
-assert.eq( dbh( dba ) , dbh( dbb ) , "C3" );
diff --git a/jstests/dbhash2.js b/jstests/dbhash2.js
deleted file mode 100644
index ac491291c2b..00000000000
--- a/jstests/dbhash2.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-mydb = db.getSisterDB( "config" );
-
-t = mydb.foo;
-t.drop();
-
-t.insert( { x : 1 } );
-res1 = mydb.runCommand( "dbhash" );
-assert( res1.fromCache.indexOf( "config.foo" ) == -1 );
-
-res2 = mydb.runCommand( "dbhash" );
-assert( res2.fromCache.indexOf( "config.foo" ) >= 0 );
-assert.eq( res1.collections.foo, res2.collections.foo );
-
-t.insert( { x : 2 } );
-res3 = mydb.runCommand( "dbhash" );
-assert( res3.fromCache.indexOf( "config.foo" ) < 0 );
-assert.neq( res1.collections.foo, res3.collections.foo );
-
-
-
-
diff --git a/jstests/dbref1.js b/jstests/dbref1.js
deleted file mode 100644
index 4a827662c1a..00000000000
--- a/jstests/dbref1.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-a = db.dbref1a;
-b = db.dbref1b;
-
-a.drop();
-b.drop();
-
-a.save( { name : "eliot" } );
-b.save( { num : 1 , link : new DBPointer( "dbref1a" , a.findOne()._id ) } );
-assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
diff --git a/jstests/dbref2.js b/jstests/dbref2.js
deleted file mode 100644
index d1b4870322d..00000000000
--- a/jstests/dbref2.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-a = db.dbref2a;
-b = db.dbref2b;
-c = db.dbref2c;
-
-a.drop();
-b.drop();
-c.drop();
-
-a.save( { name : "eliot" } );
-b.save( { num : 1 , link : new DBRef( "dbref2a" , a.findOne()._id ) } );
-c.save( { num : 1 , links : [ new DBRef( "dbref2a" , a.findOne()._id ) ] } );
-
-assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
-assert.neq( "el" , b.findOne().link.fetch().name , "B" );
-
-// $elemMatch value
-var doc = c.findOne( { links: { $elemMatch: { $ref : "dbref2a", $id : a.findOne()._id } } } );
-assert.eq( "eliot" , doc.links[0].fetch().name , "C" );
-assert.neq( "el" , doc.links[0].fetch().name , "D" );
diff --git a/jstests/dbref3.js b/jstests/dbref3.js
deleted file mode 100644
index 2f3ab8fa79c..00000000000
--- a/jstests/dbref3.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// Make sure we only make a DBRef object for objects where the first field is a string named $ref
-// and the second field is $id with any type. Only the first two fields matter for deciding if it
-// is a DBRef. See http://docs.mongodb.org/manual/reference/database-references/#dbrefs.
-
-var t = db.dbref3;
-
-t.drop();
-
-// true cases
-t.insert({sub: {$ref: "foo", $id: "bar"}, dbref: true});
-t.insert({sub: {$ref: "foo", $id: "bar", $db: "baz"}, dbref: true});
-t.insert({sub: {$ref: "foo", $id: "bar", db: "baz"}, dbref: true}); // out of spec but accepted
-t.insert({sub: {$ref: "foo", $id: ObjectId()}, dbref: true});
-t.insert({sub: {$ref: "foo", $id: 1}, dbref: true});
-
-t.insert({sub: {$ref: 123/*not a string*/, $id: "bar"}, dbref: false});
-t.insert({sub: {$id: "bar", $ref: "foo"}, dbref: false});
-t.insert({sub: {$ref: "foo"}, dbref: false});
-t.insert({sub: {$id: "foo"}, dbref: false});
-t.insert({sub: {other: 1, $ref: "foo", $id: "bar"}, dbref: false});
-
-t.find().forEach(function(obj) {
- assert.eq(obj.sub.constructor == DBRef, obj.dbref, tojson(obj));
-});
-
-// We should be able to run distinct against DBRef fields.
-var distinctRefs = t.distinct('sub.$ref');
-print('distinct $ref = ' + distinctRefs);
-
-var distinctIDs = t.distinct('sub.$id');
-print('distinct $id = ' + distinctIDs);
-
-var distinctDBs = t.distinct('sub.$db');
-print('distinct $db = ' + distinctDBs);
-
-// Confirm number of unique values in each DBRef field.
-assert.eq(2, distinctRefs.length);
-assert.eq(4, distinctIDs.length);
-assert.eq(1, distinctDBs.length);
-
-// $id is an array. perform positional projection on $id.
-t.insert({sub: {$ref: "foo", $id: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}]}});
-var k = t.findOne({'sub.$id': {$elemMatch: {x: 2}}}, {_id: 0, 'sub.$id.$': 1});
-print('k = ' + tojson(k));
-assert.eq({sub: {$id: [{x: 2, y:2}]}}, k); \ No newline at end of file
diff --git a/jstests/delx.js b/jstests/delx.js
deleted file mode 100644
index aa858e92cbd..00000000000
--- a/jstests/delx.js
+++ /dev/null
@@ -1,32 +0,0 @@
-
-a = db.getSisterDB("delxa" )
-b = db.getSisterDB("delxb" )
-
-function setup( mydb ){
- mydb.dropDatabase();
- for ( i=0; i<100; i++ ){
- mydb.foo.insert( { _id : i } );
- }
- mydb.getLastError();
-}
-
-setup( a );
-setup( b );
-
-assert.eq( 100 , a.foo.find().itcount() , "A1" )
-assert.eq( 100 , b.foo.find().itcount() , "A2" )
-
-x = a.foo.find().sort( { _id : 1 } ).batchSize( 60 )
-y = b.foo.find().sort( { _id : 1 } ).batchSize( 60 )
-
-x.next();
-y.next();
-
-a.foo.remove( { _id : { $gt : 50 } } );
-db.getLastError();
-
-assert.eq( 51 , a.foo.find().itcount() , "B1" )
-assert.eq( 100 , b.foo.find().itcount() , "B2" )
-
-assert.eq( 59 , x.itcount() , "C1" )
-assert.eq( 99 , y.itcount() , "C2" ); // this was asserting because ClientCursor byLoc doesn't take db into consideration
diff --git a/jstests/depth_limit.js b/jstests/depth_limit.js
deleted file mode 100644
index 7523a1fc9fe..00000000000
--- a/jstests/depth_limit.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// SERVER-11781 Don't crash when converting deeply nested or cyclical JS objects to BSON.
-
-function test() {
- function assertTooBig(obj) {
- // This used to crash rather than throwing an exception.
- assert.throws(function(){Object.bsonsize(obj)});
- }
-
- function assertNotTooBig(obj) {
- assert.doesNotThrow(function(){Object.bsonsize(obj)});
- }
-
- function objWithDepth(depth) {
- var out = 1;
- while (depth--) {
- out = {o: out};
- }
- return out;
- }
-
- function arrayWithDepth(depth) {
- var out = 1;
- while (depth--) {
- out = [out];
- }
- return out;
- }
-
- assertNotTooBig({});
- assertNotTooBig({array: []});
-
- var objCycle = {};
- objCycle.cycle = objCycle;
- assertTooBig(objCycle);
-
- var arrayCycle = [];
- arrayCycle.push(arrayCycle);
- assertTooBig({array: arrayCycle});
-
- var objDepthLimit = 150;
- assertNotTooBig(objWithDepth(objDepthLimit - 1));
- assertTooBig(objWithDepth(objDepthLimit));
-
-
- var arrayDepthLimit = objDepthLimit - 1; // one lower due to wrapping object
- assertNotTooBig({array: arrayWithDepth(arrayDepthLimit - 1)});
- assertTooBig({array: arrayWithDepth(arrayDepthLimit)});
-}
-
-// test in shell
-test();
-
-// test on server
-db.depth_limit.drop();
-db.depth_limit.insert({});
-db.depth_limit.find({$where: test}).itcount(); // itcount ensures that cursor is executed on server
diff --git a/jstests/distinct1.js b/jstests/distinct1.js
deleted file mode 100644
index 03e425af761..00000000000
--- a/jstests/distinct1.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-t = db.distinct1;
-t.drop();
-
-assert.eq( 0 , t.distinct( "a" ).length , "test empty" );
-
-t.save( { a : 1 } )
-t.save( { a : 2 } )
-t.save( { a : 2 } )
-t.save( { a : 2 } )
-t.save( { a : 3 } )
-
-
-res = t.distinct( "a" );
-assert.eq( "1,2,3" , res.toString() , "A1" );
-
-assert.eq( "1,2" , t.distinct( "a" , { a : { $lt : 3 } } ) , "A2" );
-
-t.drop();
-
-t.save( { a : { b : "a" } , c : 12 } );
-t.save( { a : { b : "b" } , c : 12 } );
-t.save( { a : { b : "c" } , c : 12 } );
-t.save( { a : { b : "c" } , c : 12 } );
-
-res = t.distinct( "a.b" );
-assert.eq( "a,b,c" , res.toString() , "B1" );
-printjson(t._distinct( "a.b" ).stats);
-assert.eq( "BasicCursor" , t._distinct( "a.b" ).stats.cursor , "B2" )
-
-t.drop();
-
-t.save({_id: 1, a: 1});
-t.save({_id: 2, a: 2});
-
-// Test distinct with _id.
-res = t.distinct( "_id" );
-assert.eq( "1,2", res.toString(), "C1" );
-res = t.distinct( "a", {_id: 1} );
-assert.eq( "1", res.toString(), "C2" );
diff --git a/jstests/distinct2.js b/jstests/distinct2.js
deleted file mode 100644
index 41ee78c5117..00000000000
--- a/jstests/distinct2.js
+++ /dev/null
@@ -1,13 +0,0 @@
-
-t = db.distinct2;
-t.drop();
-
-t.save({a:null});
-assert.eq( 0 , t.distinct('a.b').length , "A" );
-
-t.drop();
-t.save( { a : 1 } );
-assert.eq( [1] , t.distinct( "a" ) , "B" );
-t.save( {} )
-assert.eq( [1] , t.distinct( "a" ) , "C" );
-
diff --git a/jstests/distinct3.js b/jstests/distinct3.js
deleted file mode 100644
index 336663d3b0d..00000000000
--- a/jstests/distinct3.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Yield and delete test case for query optimizer cursor. SERVER-4401
-
-t = db.jstests_distinct3;
-t.drop();
-
-t.ensureIndex({a:1});
-t.ensureIndex({b:1});
-
-for( i = 0; i < 50; ++i ) {
- for( j = 0; j < 20; ++j ) {
- t.save({a:i,c:i,d:j});
- }
-}
-for( i = 0; i < 1000; ++i ) {
- t.save({b:i,c:i+50});
-}
-db.getLastError();
-
-// Attempt to remove the last match for the {a:1} index scan while distinct is yielding.
-p = startParallelShell( 'for( i = 0; i < 2500; ++i ) { ' +
- ' db.jstests_distinct3.remove( { a:49 } ); ' +
- ' for( j = 0; j < 20; ++j ) { ' +
- ' db.jstests_distinct3.save( { a:49, c:49, d:j } ); ' +
- ' } ' +
- '} ' +
- 'db.getLastError(); ' );
-
-for( i = 0; i < 100; ++i ) {
- count = t.distinct( 'c', {$or:[{a:{$gte:0},d:0},{b:{$gte:0}}]} ).length;
- assert.gt( count, 1000 );
-}
-
-p();
diff --git a/jstests/distinct_array1.js b/jstests/distinct_array1.js
deleted file mode 100644
index 2f289ad2e79..00000000000
--- a/jstests/distinct_array1.js
+++ /dev/null
@@ -1,91 +0,0 @@
-t = db.distinct_array1;
-t.drop();
-
-t.save( { a : [1,2,3] } )
-t.save( { a : [2,3,4] } )
-t.save( { a : [3,4,5] } )
-t.save( { a : 9 } )
-
-
-// Without index.
-res = t.distinct( "a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "A1" );
-
-// Array element 0 without index.
-res = t.distinct( "a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "A2" );
-
-// Array element 1 without index.
-res = t.distinct( "a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "A3" );
-
-// With index.
-t.ensureIndex( { a : 1 } );
-res = t.distinct( "a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "A4" );
-
-// Array element 0 with index.
-res = t.distinct( "a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "A5" );
-
-// Array element 1 with index.
-res = t.distinct( "a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "A6" );
-
-//t.drop();
-
-t.save( { a : [{b:"a"}, {b:"d"}] , c : 12 } );
-t.save( { a : [{b:"b"}, {b:"d"}] , c : 12 } );
-t.save( { a : [{b:"c"}, {b:"e"}] , c : 12 } );
-t.save( { a : [{b:"c"}, {b:"f"}] , c : 12 } );
-t.save( { a : [] , c : 12 } );
-t.save( { a : { b : "z"} , c : 12 } );
-
-// Without index.
-res = t.distinct( "a.b" ).sort();
-assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B1" );
-
-// Array element 0 without index
-res = t.distinct( "a.0.b" ).sort();
-assert.eq( "a,b,c" , res.toString() , "B2" );
-
-// Array element 1 without index
-res = t.distinct( "a.1.b" ).sort();
-assert.eq( "d,e,f" , res.toString() , "B3" );
-
-// With index.
-t.ensureIndex( { "a.b" : 1 } );
-res = t.distinct( "a.b" );
-res.sort()
-assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B4" );
-
-// _id as an document containing an array
-t.save( { _id : { a : [1,2,3] } } )
-t.save( { _id : { a : [2,3,4] } } )
-t.save( { _id : { a : [3,4,5] } } )
-t.save( { _id : { a : 9 } } )
-
-// Without index.
-res = t.distinct( "_id.a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "C1" );
-
-// Array element 0 without index.
-res = t.distinct( "_id.a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "C2" );
-
-// Array element 1 without index.
-res = t.distinct( "_id.a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "C3" );
-
-// With index.
-t.ensureIndex( { "_id.a" : 1 } );
-res = t.distinct( "_id.a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "C4" );
-
-// Array element 0 with index.
-res = t.distinct( "_id.a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "C5" );
-
-// Array element 1 with index.
-res = t.distinct( "_id.a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "C6" );
diff --git a/jstests/distinct_index1.js b/jstests/distinct_index1.js
deleted file mode 100644
index 73682788bda..00000000000
--- a/jstests/distinct_index1.js
+++ /dev/null
@@ -1,72 +0,0 @@
-
-t = db.distinct_index1
-t.drop();
-
-function r( x ){
- return Math.floor( Math.sqrt( x * 123123 ) ) % 10;
-}
-
-function d( k , q ){
- return t.runCommand( "distinct" , { key : k , query : q || {} } )
-}
-
-for ( i=0; i<1000; i++ ){
- o = { a : r(i*5) , b : r(i) };
- t.insert( o );
-}
-
-x = d( "a" );
-assert.eq( 1000 , x.stats.n , "AA1" )
-assert.eq( 1000 , x.stats.nscanned , "AA2" )
-assert.eq( 1000 , x.stats.nscannedObjects , "AA3" )
-
-x = d( "a" , { a : { $gt : 5 } } );
-assert.eq( 398 , x.stats.n , "AB1" )
-assert.eq( 1000 , x.stats.nscanned , "AB2" )
-assert.eq( 1000 , x.stats.nscannedObjects , "AB3" )
-
-x = d( "b" , { a : { $gt : 5 } } );
-assert.eq( 398 , x.stats.n , "AC1" )
-assert.eq( 1000 , x.stats.nscanned , "AC2" )
-assert.eq( 1000 , x.stats.nscannedObjects , "AC3" )
-
-
-
-t.ensureIndex( { a : 1 } )
-
-x = d( "a" );
-// There are only 10 values. We use the fast distinct hack and only examine each value once.
-assert.eq( 10 , x.stats.n , "BA1" )
-assert.eq( 10 , x.stats.nscanned , "BA2" )
-
-x = d( "a" , { a : { $gt : 5 } } );
-// Only 4 values of a are >= 5 and we use the fast distinct hack.
-assert.eq(4, x.stats.n , "BB1" )
-assert.eq(4, x.stats.nscanned , "BB2" )
-assert.eq(0, x.stats.nscannedObjects , "BB3" )
-
-x = d( "b" , { a : { $gt : 5 } } );
-// We can't use the fast distinct hack here because we're distinct-ing over 'b'.
-assert.eq( 398 , x.stats.n , "BC1" )
-assert.eq( 398 , x.stats.nscanned , "BC2" )
-assert.eq( 398 , x.stats.nscannedObjects , "BC3" )
-
-// Check proper nscannedObjects count when using a query optimizer cursor.
-t.dropIndexes();
-t.ensureIndex( { a : 1, b : 1 } );
-x = d( "b" , { a : { $gt : 5 }, b : { $gt : 5 } } );
-printjson(x);
-// 171 is the # of results we happen to scan when we don't use a distinct
-// hack. When we use the distinct hack we scan 16, currently.
-assert.lte(x.stats.n, 171);
-assert.eq(171, x.stats.nscannedObjects , "BD3" )
-
-
-
-// Cursor name should not be empty when using $or with hashed index.
-//
-t.dropIndexes();
-t.ensureIndex( { a : "hashed" } );
-x = d( "a", { $or : [ { a : 3 }, { a : 5 } ] } );
-assert.eq( 188, x.stats.n, "DA1" );
-assert.neq( "", x.stats.cursor, "DA2" );
diff --git a/jstests/distinct_index2.js b/jstests/distinct_index2.js
deleted file mode 100644
index 67d28b8b95e..00000000000
--- a/jstests/distinct_index2.js
+++ /dev/null
@@ -1,41 +0,0 @@
-t = db.distinct_index2;
-t.drop();
-
-t.ensureIndex( { a : 1 , b : 1 } )
-t.ensureIndex( { c : 1 } )
-
-// Uniformly distributed dataset.
-// If we use a randomly generated dataset, we might not
-// generate all the distinct values in the range [0, 10).
-for ( var a=0; a<10; a++ ) {
- for ( var b=0; b<10; b++ ) {
- for ( var c=0; c<10; c++ ) {
- t.insert( { a : a , b : b , c : c } );
- }
- }
-}
-
-correct = []
-for ( i=0; i<10; i++ )
- correct.push( i )
-
-function check( field ){
- res = t.distinct( field )
- res = res.sort()
- assert.eq( correct , res , "check: " + field );
-
- if ( field != "a" ){
- res = t.distinct( field , { a : 1 } )
- res = res.sort()
- assert.eq( correct , res , "check 2: " + field );
- }
-}
-
-check( "a" )
-check( "b" )
-check( "c" )
-
-// hashed index should produce same results.
-t.dropIndexes();
-t.ensureIndex( { a : "hashed" } );
-check( "a" );
diff --git a/jstests/distinct_speed1.js b/jstests/distinct_speed1.js
deleted file mode 100644
index 4cae5b0ae06..00000000000
--- a/jstests/distinct_speed1.js
+++ /dev/null
@@ -1,26 +0,0 @@
-
-t = db.distinct_speed1;
-
-t.drop();
-for ( var i=0; i<10000; i++ ){
- t.save( { x : i % 10 } );
-}
-
-assert.eq( 10 , t.distinct("x").length , "A1" );
-
-function fast(){
- t.find().explain().millis;
-}
-
-function slow(){
- t.distinct("x");
-}
-
-for ( i=0; i<3; i++ ){
- print( "it: " + Date.timeFunc( fast ) );
- print( "di: " + Date.timeFunc( slow ) );
-}
-
-
-t.ensureIndex( { x : 1 } );
-t.distinct( "x" , { x : 5 } )
diff --git a/jstests/drop.js b/jstests/drop.js
deleted file mode 100644
index 154c35d1db3..00000000000
--- a/jstests/drop.js
+++ /dev/null
@@ -1,25 +0,0 @@
-var coll = db.jstests_drop;
-
-coll.drop();
-
-res = coll.runCommand("drop");
-assert( !res.ok, tojson( res ) );
-
-
-assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "A");
-coll.save({});
-assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "B");
-coll.ensureIndex({a : 1});
-assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "C");
-assert.commandWorked(db.runCommand({drop : coll.getName()}));
-assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "D");
-
-coll.ensureIndex({a : 1});
-assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "E");
-assert.commandWorked(db.runCommand({deleteIndexes : coll.getName(), index : "*"}),
- "delete indexes A");
-assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "G");
-
-// make sure we can still use it
-coll.save({});
-assert.eq(1, coll.find().hint("_id_").toArray().length, "H");
diff --git a/jstests/drop2.js b/jstests/drop2.js
deleted file mode 100644
index 9eb3aef93cd..00000000000
--- a/jstests/drop2.js
+++ /dev/null
@@ -1,53 +0,0 @@
-var coll = db.jstests_drop2;
-coll.drop();
-
-function debug( x ) {
- printjson( x );
-}
-
-coll.save( {} );
-db.getLastError();
-
-function getOpId( drop ) {
- var inProg = db.currentOp().inprog;
- debug( inProg );
- for ( var id in inProg ) {
- var op = inProg[ id ];
- if ( drop ) {
- if ( op.query && op.query.drop && op.query.drop == coll.getName() ) {
- return op.opid;
- }
- } else {
- if ( op.query && op.query.query && op.query.query.$where && op.ns == (coll + "") ) {
- return op.opid;
- }
- }
- }
- return null;
-}
-
-var shell1 = startParallelShell( "print(\"Count thread started\");"
- + "db.getMongo().getCollection(\""
- + (coll + "") + "\")"
- + ".count( { $where: function() {"
- + "while( 1 ) { sleep( 1 ); } } } );"
- + "print(\"Count thread terminating\");" );
-countOpId = null;
-assert.soon( function() { countOpId = getOpId( false ); return countOpId; } );
-
-var shell2 = startParallelShell( "print(\"Drop thread started\");"
- + "print(\"drop result: \" + "
- + "db.getMongo().getCollection(\""
- + (coll + "") + "\")"
- + ".drop() );"
- + "print(\"Drop thread terminating\")" );
-dropOpId = null;
-assert.soon( function() { dropOpId = getOpId( true ); return dropOpId; } );
-
-db.killOp( dropOpId );
-db.killOp( countOpId );
-
-shell1();
-shell2();
-
-coll.drop(); // in SERVER-1818, this fails
diff --git a/jstests/drop3.js b/jstests/drop3.js
deleted file mode 100644
index b2ca94a1550..00000000000
--- a/jstests/drop3.js
+++ /dev/null
@@ -1,29 +0,0 @@
-t = db.jstests_drop3;
-sub = t.sub;
-
-t.drop();
-sub.drop();
-
-
-for (var i = 0; i < 10; i++){
- t.insert({});
- sub.insert({});
-}
-
-var cursor = t.find().batchSize(2);
-var subcursor = sub.find().batchSize(2);
-
-cursor.next();
-subcursor.next();
-assert.eq( cursor.objsLeftInBatch(), 1 );
-assert.eq( subcursor.objsLeftInBatch(), 1 );
-
-t.drop(); // should invalidate cursor, but not subcursor
-db.getLastError();
-
-assert.throws( function(){ cursor.itcount() } ); // throws "cursor doesn't exist on server" error on getMore
-assert.eq( subcursor.itcount(), 9 ); //one already seen
-
-
-
-
diff --git a/jstests/drop_index.js b/jstests/drop_index.js
deleted file mode 100644
index 8e2278d00c5..00000000000
--- a/jstests/drop_index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.dropIndex;
-t.drop();
-
-t.insert( { _id : 1 , a : 2 , b : 3 } );
-assert.eq( 1 , t.getIndexes().length , "A1" );
-
-t.ensureIndex( { a : 1 } );
-t.ensureIndex( { b : 1 } );
-assert.eq( 3 , t.getIndexes().length , "A2" );
-
-x = db._dbCommand( { dropIndexes: t.getName() , index : t._genIndexName( { a : 1 } ) } );
-assert.eq( 2 , t.getIndexes().length , "B1 " + tojson(x) );
-
-x = db._dbCommand( { dropIndexes: t.getName() , index : { b : 1 } } )
-assert.eq( 1 , t.getIndexes().length , "B2" );
-
-// ensure you can recreate indexes, even if you don't use dropIndex method
-t.ensureIndex({a:1});
-assert.eq(2 , t.getIndexes().length);
diff --git a/jstests/dropdb.js b/jstests/dropdb.js
deleted file mode 100644
index 58e3dd9fdaa..00000000000
--- a/jstests/dropdb.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Test that a db does not exist after it is dropped.
-// Disabled in the small oplog suite because the slave may create a master db
-// with the same name as the dropped db when requesting a clone.
-
-m = db.getMongo();
-baseName = "jstests_dropdb";
-ddb = db.getSisterDB( baseName );
-
-print("initial dbs: " + tojson(m.getDBNames()));
-
-function check(shouldExist) {
- var dbs = m.getDBNames();
- assert.eq(Array.contains(dbs, baseName), shouldExist,
- "DB " + baseName + " should " + (shouldExist ? "" : "not ") + "exist."
- + " dbs: " + tojson(dbs) + "\n" + tojson( m.getDBs() ) );
-}
-
-ddb.c.save( {} );
-ddb.getLastError();
-check(true);
-
-ddb.dropDatabase();
-check(false);
-
-ddb.dropDatabase();
-check(false);
diff --git a/jstests/dropdb_race.js b/jstests/dropdb_race.js
deleted file mode 100644
index bff7980011a..00000000000
--- a/jstests/dropdb_race.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// test dropping a db with simultaneous commits
-
-m = db.getMongo();
-baseName = "jstests_dur_droprace";
-d = db.getSisterDB(baseName);
-t = d.foo;
-
-assert(d.adminCommand({ setParameter: 1, syncdelay: 5 }).ok);
-
-var s = 0;
-
-var start = new Date();
-
-for (var pass = 0; pass < 100; pass++) {
- if (pass % 2 == 0) {
- // sometimes wait for create db first, to vary the timing of things
- t.insert({});
- if( pass % 4 == 0 )
- d.runCommand({getLastError:1,j:1});
- else
- d.getLastError();
- }
- t.insert({ x: 1 });
- t.insert({ x: 3 });
- t.ensureIndex({ x: 1 });
- sleep(s);
- if (pass % 37 == 0)
- d.adminCommand("closeAllDatabases");
- else if (pass % 13 == 0)
- t.drop();
- else if (pass % 17 == 0)
- t.dropIndexes();
- else
- d.dropDatabase();
- if (pass % 7 == 0)
- d.runCommand({getLastError:1,j:1});
- d.getLastError();
- s = (s + 1) % 25;
- //print(pass);
- if ((new Date()) - start > 60000) {
- print("stopping early");
- break;
- }
-}
diff --git a/jstests/dur/a_quick.js b/jstests/dur/a_quick.js
index cb8a2f2c57a..ab36f91327e 100755
--- a/jstests/dur/a_quick.js
+++ b/jstests/dur/a_quick.js
@@ -5,7 +5,37 @@
*/
testname = "a_quick";
-load("jstests/_tst.js");
+tst = {}
+
+tst.log = function (optional_msg) {
+ print("\n\nstep " + ++this._step + " " + (optional_msg || ""));
+}
+
+tst.success = function () {
+ print(testname + " SUCCESS");
+}
+
+/* diff files a and b, returning the difference (empty str if no difference) */
+tst.diff = function(a, b) {
+ function reSlash(s) {
+ var x = s;
+ if (_isWindows()) {
+ while (1) {
+ var y = x.replace('/', '\\');
+ if (y == x)
+ break;
+ x = y;
+ }
+ }
+ return x;
+ }
+ a = reSlash(a);
+ b = reSlash(b);
+ print("diff " + a + " " + b);
+ return run("diff", a, b);
+}
+print(testname + " BEGIN");
+tst._step = 0;
function checkNoJournalFiles(path, pass) {
var files = listFiles(path);
diff --git a/jstests/elemMatchProjection.js b/jstests/elemMatchProjection.js
deleted file mode 100644
index 73088fab699..00000000000
--- a/jstests/elemMatchProjection.js
+++ /dev/null
@@ -1,265 +0,0 @@
-// Tests for $elemMatch projections and $ positional operator projection.
-t = db.SERVER828Test;
-t.drop();
-
-date1 = new Date();
-
-// Insert various styles of arrays
-for ( i = 0; i < 100; i++ ) {
- t.insert({ group: 1, x: [ 1, 2, 3, 4, 5 ] });
- t.insert({ group: 2, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ] });
- t.insert({ group: 3, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ],
- y: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] });
- t.insert({ group: 3, x: [ { a: 1, b: 3 }, { a: -6, c: 3 } ] });
- t.insert({ group: 4, x: [ { a: 1, b: 4 }, { a: -6, c: 3 } ] });
- t.insert({ group: 5, x: [ new Date(), 5, 10, 'string', new ObjectId(), 123.456 ] });
- t.insert({ group: 6, x: [ { a: 'string', b: date1 },
- { a: new ObjectId(), b: 1.2345 },
- { a: 'string2', b: date1 } ] });
- t.insert({ group: 7, x: [ { y: [ 1, 2, 3, 4 ] } ] });
- t.insert({ group: 8, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] });
- t.insert({ group: 9, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] },
- { z: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] });
- t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
- y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
- t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
- y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
- t.insert({ group: 11, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ],
- covered: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] });
- t.insert({ group: 12, x: { y : [ { a: 1, b: 1 }, { a: 1, b: 2} ] } } );
- t.insert({ group: 13, x: [ { a: 1, b: 1 }, {a: 1, b: 2 } ] } );
- t.insert({ group: 13, x: [ { a: 1, b: 2 }, {a: 1, b: 1 } ] } );
-}
-t.ensureIndex({group:1, 'y.d':1}); // for regular index test (not sure if this is really adding anything useful)
-t.ensureIndex({group:1, covered:1}); // for covered index test
-
-//
-// SERVER-828: Positional operator ($) projection tests
-//
-assert.eq( 1,
- t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).toArray()[0].x.length,
- "single object match (array length match)" );
-
-assert.eq( 2,
- t.find( { group:3, 'x.a':1 }, { 'x.$':1 } ).toArray()[0].x[0].b,
- "single object match first" );
-
-assert.eq( undefined,
- t.find( { group:3, 'x.a':2 }, { _id:0, 'x.$':1 } ).toArray()[0]._id,
- "single object match with filtered _id" );
-
-assert.eq( 1,
- t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { _id:1 } ).toArray()[0].x.length,
- "sorted single object match with filtered _id (array length match)" );
-
-assert.eq( 1,
- t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':1 } ).toArray()[0].x.length,
- "single object match with elemMatch" );
-
-assert.eq( 1,
- t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':1} } ).toArray()[0].x.length,
- "single object match with elemMatch and positive slice" );
-
-assert.eq( 1,
- t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':-1} } ).toArray()[0].x.length,
- "single object match with elemMatch and negative slice" );
-
-assert.eq( 1,
- t.find( { 'group':12, 'x.y.a':1 }, { 'x.y.$': 1 } ).toArray()[0].x.y.length,
- "single object match with two level dot notation" );
-
-assert.eq( 1,
- t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { x:1 } ).toArray()[0].x.length,
- "sorted object match (array length match)" );
-
-assert.eq( { aa:1, dd:5 },
- t.find( { group:3, 'y.dd':5 }, { 'y.$':1 } ).toArray()[0].y[0],
- "single object match (value match)" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).toArray();
- }, [], "throw on invalid projection (field mismatch)" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).sort( { x:1 } ).toArray()
- }, [], "throw on invalid sorted projection (field mismatch)" );
-
-assert.throws( function() {x
- t.find( { group:3, 'x.a':2 }, { 'x.$':1, group:0 } ).sort( { x:1 } ).toArray();
- }, [], "throw on invalid projection combination (include and exclude)" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':1, 'y.aa':1 }, { 'x.$':1, 'y.$':1 } ).toArray();
- }, [], "throw on multiple projections" );
-
-assert.throws( function() {
- t.find( { group:3}, { 'g.$':1 } ).toArray()
- }, [], "throw on invalid projection (non-array field)" );
-
-assert.eq( { aa:1, dd:5 },
- t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).toArray()[0].covered[0],
- "single object match (covered index)" );
-
-assert.eq( { aa:1, dd:5 },
- t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).sort( { covered:1 } ).toArray()[0].covered[0],
- "single object match (sorted covered index)" );
-
-assert.eq( 1,
- t.find( { group:10, 'y.d': 4 }, { 'y.$':1 } ).toArray()[0].y.length,
- "single object match (regular index" );
-
-if (false) {
-
- assert.eq( 2, // SERVER-1013: allow multiple positional operators
- t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].y[0].bb,
- "multi match, multi proj 1" );
-
- assert.eq( 5, // SSERVER-1013: allow multiple positional operators
- t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].x[0].d,
- "multi match, multi proj 2" );
-
- assert.eq( 2, // SERVER-1243: allow multiple results from same matcher
- t.find( { group:2, x: { $elemMatchAll: { a:1 } } }, { 'x.$':1 } ).toArray()[0].x.length,
- "multi element match, single proj" );
-
- assert.eq( 2, // SERVER-1013: multiple array matches with one prositional operator
- t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1 } ).toArray()[0].y[0].bb,
- "multi match, single proj 1" );
-
- assert.eq( 2, // SERVER-1013: multiple array matches with one positional operator
- t.find( { group:3, 'y.cc':3, 'x.b':2 }, { 'x.$':1 } ).toArray()[0].x[0].b,
- "multi match, single proj 2" );
-
-}
-
-//
-// SERVER-2238: $elemMatch projections
-//
-assert.eq( -6,
- t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x[0].a,
- "single object match" );
-
-assert.eq( 1,
- t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x.length,
- "filters non-matching array elements" );
-
-assert.eq( 1,
- t.find( { group:4 }, { x: { $elemMatch: { a:-6, c:3 } } } ).toArray()[0].x.length,
- "filters non-matching array elements with multiple elemMatch criteria" );
-
-assert.eq( 1,
- t.find( { group: 13 }, { 'x' : {'$elemMatch' : { a: {$gt: 0, $lt: 2} } } } ).toArray()[0].x.length,
- "filters non-matching array elements with multiple criteria for a single element in the array" );
-
-assert.eq( 3,
- t.find( { group:4 }, { x: { $elemMatch: { a:{ $lt:1 } } } } ).toArray()[0].x[0].c,
- "object operator match" );
-
-assert.eq( [ 4 ],
- t.find( { group:1 }, { x: { $elemMatch: { $in:[100, 4, -123] } } } ).toArray()[0].x,
- "$in number match" );
-
-assert.eq( [ {a : 1, b : 2} ],
- t.find( { group:2 }, { x: { $elemMatch: { a: { $in:[1] } } } } ).toArray()[0].x,
- "$in number match" );
-
-assert.eq( [1],
- t.find( { group:1 }, { x: { $elemMatch: { $nin:[4, 5, 6] } } } ).toArray()[0].x,
- "$nin number match" );
-
-// but this may become a user assertion, since a single element of an array can't match more than one value
-assert.eq( [ 1],
- t.find( { group:1 }, { x: { $elemMatch: { $all:[1] } } } ).toArray()[0].x,
- "$in number match" );
-
-assert.eq( [ { a: 'string', b: date1 } ],
- t.find( { group:6 }, { x: { $elemMatch: { a:'string' } } } ).toArray()[0].x,
- "mixed object match on string eq" );
-
-assert.eq( [ { a: 'string2', b: date1 } ],
- t.find( { group:6 }, { x: { $elemMatch: { a:/ring2/ } } } ).toArray()[0].x,
- "mixed object match on regexp" );
-
-assert.eq( [ { a: 'string', b: date1 } ],
- t.find( { group:6 }, { x: { $elemMatch: { a: { $type: 2 } } } } ).toArray()[0].x,
- "mixed object match on type" );
-
-assert.eq( [ { a : 2, c : 3} ],
- t.find( { group:2 }, { x: { $elemMatch: { a: { $ne: 1 } } } } ).toArray()[0].x,
- "mixed object match on ne" );
-
-assert.eq( [ {a : 1, d : 5} ],
- t.find( { group:3 }, { x: { $elemMatch: { d: { $exists: true } } } } ).toArray()[0].x,
- "mixed object match on exists" );
-
-assert.eq( [ {a : 2, c : 3} ],
- t.find( { group:3 }, { x: { $elemMatch: { a: { $mod : [2, 0 ] } } } } ).toArray()[0].x,
- "mixed object match on mod" );
-
-assert.eq( {"x" : [ { "a" : 1, "b" : 2 } ], "y" : [ { "c" : 3, "d" : 4 } ] },
- t.find( { group:10 }, { _id : 0,
- x: { $elemMatch: { a: 1 } },
- y: { $elemMatch: { c: 3 } } } ).toArray()[0],
- "multiple $elemMatch on unique fields 1" );
-
-if (false) {
-
- assert.eq( 2 , // SERVER-1243: handle multiple $elemMatch results
- t.find( { group:4 }, { x: { $elemMatchAll: { a:{ $lte:2 } } } } ).toArray()[0].x.length,
- "multi object match" );
-
- assert.eq( 3 , // SERVER-1243: handle multiple $elemMatch results
- t.find( { group:1 }, { x: { $elemMatchAll: { $in:[1, 2, 3] } } } ).toArray()[0].x.length,
- "$in number match" );
-
- assert.eq( 1 , // SERVER-1243: handle multiple $elemMatch results
- t.find( { group:5 }, { x: { $elemMatchAll: { $ne: 5 } } } ).toArray()[0].x.length,
- "single mixed type match 1" );
-
- assert.eq( 1 , // SERVER-831: handle nested arrays
- t.find( { group:9 }, { 'x.y': { $elemMatch: { a: 1 } } } ).toArray()[0].x.length,
- "single dotted match" );
-
-}
-
-//
-// Batch/getMore tests
-//
-// test positional operator across multiple batches
-a = t.find( { group:3, 'x.b':2 }, { 'x.$':1 } ).batchSize(1)
-while ( a.hasNext() ) {
- assert.eq( 2, a.next().x[0].b, "positional getMore test");
-}
-
-// test $elemMatch operator across multiple batches
-a = t.find( { group:3 }, { x:{$elemMatch:{a:1}} } ).batchSize(1)
-while ( a.hasNext() ) {
- assert.eq( 1, a.next().x[0].a, "positional getMore test");
-}
-
-// verify the positional update operator matches the same element as the the positional find. this
-// is to ensure consistent behavior with updates until SERVER-1013 is resolved, at which point the
-// following tests should be updated.
-
-t.update({ group: 10, 'x.a': 3, 'y.c':1 }, { $set:{'x.$':100} }, false, true );
-// updated the wrong element, so the following assertions should be true
-assert.eq( 100,
- t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0],
- "wrong single element match after update" );
-
-assert.eq( 100,
- t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0],
- "wrong single element match after update" );
-
-t.remove({ group: 10 });
-t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
- y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
-
-t.update({ group: 10, 'y.c':1, 'x.a': 3 }, { $set:{'x.$':100} }, false, true );
-// updated the correct element
-assert.eq( 100,
- t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0],
- "right single element match after update" );
-assert.eq( 100,
- t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0],
- "right single element match after update" );
diff --git a/jstests/error2.js b/jstests/error2.js
deleted file mode 100644
index 8c27d6250e1..00000000000
--- a/jstests/error2.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Test that client gets stack trace on failed invoke
-
-f = db.jstests_error2;
-
-f.drop();
-
-f.save( {a:1} );
-
-assert.throws(
- function(){
- c = f.find({$where : function(){ return a() }});
- c.next();
- }
-);
-
-assert.throws(
- function(){
- db.eval( function() { return a(); } );
- }
-);
-
diff --git a/jstests/error5.js b/jstests/error5.js
deleted file mode 100644
index 5884d20d8c1..00000000000
--- a/jstests/error5.js
+++ /dev/null
@@ -1,8 +0,0 @@
-
-t = db.error5
-t.drop();
-
-assert.throws( function(){ t.save( 4 ); printjson( t.findOne() ) } , null , "A" );
-t.save( { a : 1 } )
-assert.eq( 1 , t.count() , "B" );
-
diff --git a/jstests/eval0.js b/jstests/eval0.js
deleted file mode 100644
index 4375cace839..00000000000
--- a/jstests/eval0.js
+++ /dev/null
@@ -1,8 +0,0 @@
-
-assert.eq( 17 , db.eval( function(){ return 11 + 6; } ) , "A" );
-assert.eq( 17 , db.eval( function( x ){ return 10 + x; } , 7 ) , "B" );
-
-// check that functions in system.js work
-db.system.js.insert({_id: "add", value: function(x,y){ return x + y;}});
-assert.eq( 20 , db.eval( "this.add(15, 5);" ) , "C" );
-
diff --git a/jstests/eval1.js b/jstests/eval1.js
deleted file mode 100644
index 4a5ca75f09b..00000000000
--- a/jstests/eval1.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.eval1;
-t.drop();
-
-t.save( { _id : 1 , name : "eliot" } );
-t.save( { _id : 2 , name : "sara" } );
-
-f = function(id){
- return db["eval1"].findOne( { _id : id } ).name;
-}
-
-
-assert.eq( "eliot" , f( 1 ) , "A" );
-assert.eq( "sara" , f( 2 ) , "B" );
-assert.eq( "eliot" , db.eval( f , 1 ) , "C" );
-assert.eq( "sara" , db.eval( f , 2 ) , "D" );
-
diff --git a/jstests/eval2.js b/jstests/eval2.js
deleted file mode 100644
index 6e39bb4a7bd..00000000000
--- a/jstests/eval2.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.eval2;
-t.drop();
-t.save({a:1});
-t.save({a:1});
-
-var f = db.group(
- {
- ns: t.getName(),
- key: { a:true},
- cond: { a:1 },
- reduce: function(obj,prev) { prev.csum++; } ,
- initial: { csum: 0}
- }
-);
-
-assert(f[0].a == 1 && f[0].csum == 2 , "on db" );
-
-var f = t.group(
- {
- key: { a:true},
- cond: { a:1 },
- reduce: function(obj,prev) { prev.csum++; } ,
- initial: { csum: 0}
- }
-);
-
-assert(f[0].a == 1 && f[0].csum == 2 , "on coll" );
diff --git a/jstests/eval3.js b/jstests/eval3.js
deleted file mode 100644
index 404d4d863b7..00000000000
--- a/jstests/eval3.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-t = db.eval3;
-t.drop();
-
-t.save( { _id : 1 , name : "eliot" } );
-assert.eq( 1 , t.count() , "A" );
-
-function z( a , b ){
- db.eval3.save( { _id : a , name : b } );
- return b;
-}
-
-z( 2 , "sara" );
-assert.eq( 2 , t.count() , "B" );
-
-assert.eq( "eliot,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
-
-assert.eq( "joe" , db.eval( z , 3 , "joe" ) , "C" );
-assert.eq( 3 , t.count() , "D" );
-
-assert.eq( "eliot,joe,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
diff --git a/jstests/eval4.js b/jstests/eval4.js
deleted file mode 100644
index 31d6ef0c2a8..00000000000
--- a/jstests/eval4.js
+++ /dev/null
@@ -1,23 +0,0 @@
-
-t = db.eval4;
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
-
-assert.eq( 3 , t.count() , "A" );
-
-function f( x ){
- db.eval4.remove( { a : x } );
-}
-
-f( 2 );
-assert.eq( 2 , t.count() , "B" );
-
-db.eval( f , 2 );
-assert.eq( 2 , t.count() , "C" );
-
-db.eval( f , 3 );
-assert.eq( 1 , t.count() , "D" );
-
diff --git a/jstests/eval5.js b/jstests/eval5.js
deleted file mode 100644
index a9223a555a6..00000000000
--- a/jstests/eval5.js
+++ /dev/null
@@ -1,23 +0,0 @@
-
-t = db.eval5;
-t.drop();
-
-t.save( { a : 1 , b : 2 , c : 3 } );
-
-assert.eq( 3 ,
- db.eval(
- function(z){
- return db.eval5.find().toArray()[0].c;
- }
- ) ,
- "something weird A"
- );
-
-assert.isnull(
- db.eval(
- function(z){
- return db.eval5.find( {} , { a : 1 } ).toArray()[0].c;
- }
- ),
- "field spec didn't work"
- );
diff --git a/jstests/eval6.js b/jstests/eval6.js
deleted file mode 100644
index 5fe096974c6..00000000000
--- a/jstests/eval6.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-t = db.eval6;
-t.drop();
-
-t.save( { a : 1 } );
-
-db.eval(
- function(){
- o = db.eval6.findOne();
- o.b = 2;
- db.eval6.save( o );
- }
-);
-
-assert.eq( 2 , t.findOne().b );
diff --git a/jstests/eval7.js b/jstests/eval7.js
deleted file mode 100644
index 45e06af276c..00000000000
--- a/jstests/eval7.js
+++ /dev/null
@@ -1,3 +0,0 @@
-
-assert.eq( 6 , db.eval( "5 + 1" ) , "A" )
-assert.throws( function(z){ db.eval( "5 + function x; + 1" )} );
diff --git a/jstests/eval8.js b/jstests/eval8.js
deleted file mode 100644
index 072a890e80a..00000000000
--- a/jstests/eval8.js
+++ /dev/null
@@ -1,19 +0,0 @@
-
-t = db.eval8;
-t.drop();
-
-x = { a : 1 , b : 2 };
-t.save( x );
-x = t.findOne();
-
-assert( x.a && x.b , "A" );
-delete x.b;
-
-assert( x.a && ! x.b , "B" )
-x.b = 3;
-assert( x.a && x.b , "C" );
-assert.eq( 3 , x.b , "D" );
-
-t.save( x );
-y = t.findOne();
-assert.eq( tojson( x ) , tojson( y ) , "E" );
diff --git a/jstests/eval9.js b/jstests/eval9.js
deleted file mode 100644
index 9c6642901e4..00000000000
--- a/jstests/eval9.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-a = [ 1 , "asd" , null , [ 2 , 3 ] , new Date() , { x : 1 } ]
-
-for ( var i=0; i<a.length; i++ ){
- var ret = db.eval( "function( a , i ){ return a[i]; }" , a , i );
- assert.eq( typeof( a[i] ) , typeof( ret ) , "type test" );
- assert.eq( a[i] , ret , "val test: " + typeof( a[i] ) );
-}
-
-db.eval9.drop();
-db.eval9.save( { a : 17 } );
-
-assert.eq( 1 , db.eval( "return db.eval9.find().toArray()" ).length , "A" );
-assert.eq( 17 , db.eval( "return db.eval9.find().toArray()" )[0].a , "B" );
-
-// just to make sure these things don't crash (but may throw an exception)
-try {
- db.eval( "return db.eval9.find()" );
- db.eval( "return db.eval9" );
- db.eval( "return db" );
- db.eval( "return print" );
-} catch (ex) { } \ No newline at end of file
diff --git a/jstests/eval_nolock.js b/jstests/eval_nolock.js
deleted file mode 100644
index 2ab96a302a5..00000000000
--- a/jstests/eval_nolock.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.eval_nolock
-t.drop();
-
-for ( i=0; i<10; i++ )
- t.insert( { _id : i } );
-
-res = db.runCommand( { eval :
- function(){
- db.eval_nolock.insert( { _id : 123 } );
- return db.eval_nolock.count();
- }
- , nolock : true } );
-
-assert.eq( 11 , res.retval , "A" )
-
diff --git a/jstests/evala.js b/jstests/evala.js
deleted file mode 100644
index ed72582fbb6..00000000000
--- a/jstests/evala.js
+++ /dev/null
@@ -1,9 +0,0 @@
-
-t = db.evala;
-t.drop()
-
-t.save( { x : 5 } )
-
-assert.eq( 5 , db.eval( "function(){ return db.evala.findOne().x; }" ) , "A" );
-assert.eq( 5 , db.eval( "/* abc */function(){ return db.evala.findOne().x; }" ) , "B" );
-
diff --git a/jstests/evalb.js b/jstests/evalb.js
deleted file mode 100644
index 0caae39498b..00000000000
--- a/jstests/evalb.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// Check the return value of a db.eval function running a database query, and ensure the function's
-// contents are logged in the profile log.
-
-// Use a reserved database name to avoid a conflict in the parallel test suite.
-var stddb = db;
-var db = db.getSisterDB( 'evalb' );
-
-function profileCursor() {
- return db.system.profile.find( { user:username + "@" + db.getName() } );
-}
-
-function lastOp() {
- return profileCursor().sort( { $natural:-1 } ).next();
-}
-
-try {
-
- username = 'jstests_evalb_user';
- db.createUser({user: username, pwd: 'password', roles: jsTest.basicUserRoles});
- db.auth( username, 'password' );
-
- t = db.evalb;
- t.drop();
-
- t.save( { x:3 } );
-
- assert.eq( 3, db.eval( function() { return db.evalb.findOne().x; } ), 'A' );
-
- db.setProfilingLevel( 2 );
-
- assert.eq( 3, db.eval( function() { return db.evalb.findOne().x; } ), 'B' );
-
- o = lastOp();
- assert( tojson( o ).indexOf( 'findOne().x' ) > 0, 'C : ' + tojson( o ) );
-}
-finally {
-
- db.setProfilingLevel(0);
- db = stddb;
-}
diff --git a/jstests/evalc.js b/jstests/evalc.js
deleted file mode 100644
index 0320ecd5133..00000000000
--- a/jstests/evalc.js
+++ /dev/null
@@ -1,25 +0,0 @@
-t = db.jstests_evalc;
-t.drop();
-
-t2 = db.evalc_done
-t2.drop()
-
-for( i = 0; i < 10; ++i ) {
- t.save( {i:i} );
-}
-
-// SERVER-1610
-
-assert.eq( 0 , t2.count() , "X1" )
-
-s = startParallelShell( "print( 'starting forked:' + Date() ); for ( i=0; i<50000; i++ ){ db.currentOp(); } print( 'ending forked:' + Date() ); db.evalc_done.insert( { x : 1 } ); " )
-
-print( "starting eval: " + Date() )
-while ( true ) {
- db.eval( "db.jstests_evalc.count( {i:10} );" );
- if ( t2.count() > 0 )
- break;
-}
-print( "end eval: " + Date() )
-
-s();
diff --git a/jstests/evald.js b/jstests/evald.js
deleted file mode 100644
index 77b1f42d52b..00000000000
--- a/jstests/evald.js
+++ /dev/null
@@ -1,98 +0,0 @@
-t = db.jstests_evald;
-t.drop();
-
-function debug( x ) {
-// printjson( x );
-}
-
-for( i = 0; i < 10; ++i ) {
- t.save( {i:i} );
-}
-db.getLastError();
-
-function op( ev, where ) {
- p = db.currentOp().inprog;
- debug( p );
- for ( var i in p ) {
- var o = p[ i ];
- if ( where ) {
- if ( o.active && o.query && o.query.query && o.query.query.$where && o.ns == "test.jstests_evald" ) {
- return o.opid;
- }
- } else {
- if ( o.active && o.query && o.query.$eval && o.query.$eval == ev ) {
- return o.opid;
- }
- }
- }
- return -1;
-}
-
-function doIt( ev, wait, where ) {
-
- if ( where ) {
- s = startParallelShell( ev );
- } else {
- s = startParallelShell( "db.eval( '" + ev + "' )" );
- }
-
- o = null;
- assert.soon( function() { o = op( ev, where ); return o != -1 } );
-
- if ( wait ) {
- sleep( 2000 );
- }
-
- debug( "going to kill" );
-
- db.killOp( o );
-
- debug( "sent kill" );
-
- s();
-
-}
-
-// nested scope with nested invoke()
-doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", true, true);
-doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", false, true);
-
-// simple tight loop tests with callback
-doIt("while(1) { sleep(1); }", false);
-doIt("while(1) { sleep(1); }", true);
-
-// simple tight loop tests without callback
-doIt("while(1) {;}", false);
-doIt("while(1) {;}", true);
-
-// the for loops are currently required, as a spawned op masks the parent op - see SERVER-1931
-doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", true);
-doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", false);
-doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", true);
-doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", false);
-
-// try/catch with tight-loop kill tests. Catch testing is important
-// due to v8::TerminateExecution internals.
-// native callback with nested invoke(), drop JS exceptions
-doIt("while(1) { " +
- " for(var i = 0; i < 10000; ++i) {;} " +
- " try { " +
- " db.jstests_evald.count({i:10}); " +
- " } catch (e) {} " +
- "}", true );
-
-// native callback, drop JS exceptions
-doIt("while(1) { " +
- " try { " +
- " while(1) { " +
- " sleep(1); " +
- " } " +
- " } catch (e) {} " +
- "}", true );
-
-// no native callback and drop JS exceptions
-doIt("while(1) { " +
- " try { " +
- " while(1) {;} " +
- " } catch (e) {} " +
- "}", true );
diff --git a/jstests/evale.js b/jstests/evale.js
deleted file mode 100644
index af5a303f167..00000000000
--- a/jstests/evale.js
+++ /dev/null
@@ -1,5 +0,0 @@
-t = db.jstests_evale;
-t.drop();
-
-db.eval( function() { return db.jstests_evale.count( { $where:function() { return true; } } ) } );
-db.eval( "db.jstests_evale.count( { $where:function() { return true; } } )" ); \ No newline at end of file
diff --git a/jstests/evalf.js b/jstests/evalf.js
deleted file mode 100644
index 01b7907ba93..00000000000
--- a/jstests/evalf.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// test that killing a parent op interrupts the child op
-
-t = db.jstests_evalf;
-t.drop();
-
-//if ( typeof _threadInject == "undefined" ) { // don't run in v8 mode - SERVER-1900
-
-// the code in eval must be under 512 chars because otherwise it's not displayed in curOp()
-try {
-db.eval( function() {
- opid = null;
- while( opid == null ) {
- ops = db.currentOp().inprog;
- for( i in ops ) {
- o = ops[ i ];
- if ( o.active && o.query && o.query.$eval ) { opid = o.opid; }
- }}
- db.jstests_evalf.save( {"opid":opid} );
- db.jstests_evalf.count( { $where:function() { var id = db.jstests_evalf.findOne().opid; db.killOp( id ); while( 1 ) { ; } } } );
- } );
-} catch (ex) {
- // exception is thrown in V8 when job gets killed. Does not seem like bad behavior.
-}
-
-// make sure server and JS still work
-db.eval( function() { db.jstests_evalf.count(); });
-//}
diff --git a/jstests/exists.js b/jstests/exists.js
deleted file mode 100644
index 3f1e904e52f..00000000000
--- a/jstests/exists.js
+++ /dev/null
@@ -1,49 +0,0 @@
-t = db.jstests_exists;
-t.drop();
-
-t.save( {} );
-t.save( {a:1} );
-t.save( {a:{b:1}} );
-t.save( {a:{b:{c:1}}} );
-t.save( {a:{b:{c:{d:null}}}} );
-
-function dotest( n ){
-
- assert.eq( 5, t.count() , n );
- assert.eq( 1, t.count( {a:null} ) , n );
- assert.eq( 2, t.count( {'a.b':null} ) , n );
- assert.eq( 3, t.count( {'a.b.c':null} ) , n );
- assert.eq( 5, t.count( {'a.b.c.d':null} ) , n );
-
- assert.eq( 5, t.count() , n );
- assert.eq( 4, t.count( {a:{$ne:null}} ) , n );
- assert.eq( 3, t.count( {'a.b':{$ne:null}} ) , n );
- assert.eq( 2, t.count( {'a.b.c':{$ne:null}} ) , n );
- assert.eq( 0, t.count( {'a.b.c.d':{$ne:null}} ) , n );
-
- assert.eq( 4, t.count( {a: {$exists:true}} ) , n );
- assert.eq( 3, t.count( {'a.b': {$exists:true}} ) , n );
- assert.eq( 2, t.count( {'a.b.c': {$exists:true}} ) , n );
- assert.eq( 1, t.count( {'a.b.c.d': {$exists:true}} ) , n );
-
- assert.eq( 1, t.count( {a: {$exists:false}} ) , n );
- assert.eq( 2, t.count( {'a.b': {$exists:false}} ) , n );
- assert.eq( 3, t.count( {'a.b.c': {$exists:false}} ) , n );
- assert.eq( 4, t.count( {'a.b.c.d': {$exists:false}} ) , n );
-}
-
-dotest( "before index" )
-t.ensureIndex( { "a" : 1 } )
-t.ensureIndex( { "a.b" : 1 } )
-t.ensureIndex( { "a.b.c" : 1 } )
-t.ensureIndex( { "a.b.c.d" : 1 } )
-dotest( "after index" )
-assert.eq( 1, t.find( {a: {$exists:false}} ).hint( {a:1} ).itcount() );
-
-t.drop();
-
-t.save( {r:[{s:1}]} );
-assert( t.findOne( {'r.s':{$exists:true}} ) );
-assert( !t.findOne( {'r.s':{$exists:false}} ) );
-assert( !t.findOne( {'r.t':{$exists:true}} ) );
-assert( t.findOne( {'r.t':{$exists:false}} ) );
diff --git a/jstests/exists2.js b/jstests/exists2.js
deleted file mode 100644
index e925c168f50..00000000000
--- a/jstests/exists2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.exists2;
-t.drop();
-
-t.save( { a : 1 , b : 1 } )
-t.save( { a : 1 , b : 1 , c : 1 } )
-
-assert.eq( 2 , t.find().itcount() , "A1" );
-assert.eq( 2 , t.find( { a : 1 , b : 1 } ).itcount() , "A2" );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "A3" );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "A4" );
-
-t.ensureIndex( { a : 1 , b : 1 , c : 1 } )
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "B1" );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "B2" );
-
diff --git a/jstests/exists3.js b/jstests/exists3.js
deleted file mode 100644
index 53a69d6c3bb..00000000000
--- a/jstests/exists3.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Check exists with non empty document, based on SERVER-2470 example.
-
-t = db.jstests_exists3;
-t.drop();
-
-t.insert({a: 1, b: 2});
-
-assert.eq( 1, t.find({}).sort({c: -1}).itcount() );
-assert.eq( 1, t.count({c: {$exists: false}}) );
-assert.eq( 1, t.find({c: {$exists: false}}).itcount() );
-assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() );
-
-// now we have an index on the sort key
-t.ensureIndex({c: -1})
-
-assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() );
-assert.eq( 1, t.find({c: {$exists: false}}).itcount() );
-// still ok without the $exists
-assert.eq( 1, t.find({}).sort({c: -1}).itcount() );
-// and ok with a convoluted $not $exists
-assert.eq( 1, t.find({c: {$not: {$exists: true}}}).sort({c: -1}).itcount() );
diff --git a/jstests/exists4.js b/jstests/exists4.js
deleted file mode 100644
index fb801ed62e9..00000000000
--- a/jstests/exists4.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// Check various exists cases, based on SERVER-1735 example.
-
-t = db.jstests_exists4;
-t.drop();
-
-t.ensureIndex({date: -1, country_code: 1, user_id: 1}, {unique: 1, background: 1});
-t.insert({ date: new Date("08/27/2010"), tot_visit: 100});
-t.insert({ date: new Date("08/27/2010"), country_code: "IT", tot_visit: 77});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", tot_visit: 23});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "and...@spacca.org", tot_visit: 11});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@gmail.com", tot_visit: 5});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@progloedizioni.com", tot_visit: 7});
-
-assert.eq( 6, t.find({date: new Date("08/27/2010")}).count() );
-assert.eq( 5, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}}).count() );
-assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: {$exists: false}}).count() );
-assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: null}).count() );
-assert.eq( 3, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: true}}).count() );
-assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: false}}).count() );
-assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: null}).count() );
diff --git a/jstests/exists5.js b/jstests/exists5.js
deleted file mode 100644
index a90a94f908f..00000000000
--- a/jstests/exists5.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Test some $not/$exists cases.
-
-t = db.jstests_exists5;
-t.drop();
-
-t.save( {a:1} );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) );
-assert.eq( 1, t.count( {'c.d':{$not:{$exists:true}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) );
-assert.eq( 0, t.count( {'c.d':{$not:{$exists:false}}} ) );
-
-t.drop();
-t.save( {a:{b:1}} );
-assert.eq( 1, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) );
-
-t.drop();
-t.save( {a:[1]} );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) );
-
-t.drop();
-t.save( {a:[{b:1}]} );
-assert.eq( 1, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) );
diff --git a/jstests/exists6.js b/jstests/exists6.js
deleted file mode 100644
index 2fa4ba85d49..00000000000
--- a/jstests/exists6.js
+++ /dev/null
@@ -1,79 +0,0 @@
-// SERVER-393 Test indexed matching with $exists.
-
-t = db.jstests_exists6;
-t.drop();
-
-t.ensureIndex( {b:1} );
-t.save( {} );
-t.save( {b:1} );
-t.save( {b:null} );
-
-//---------------------------------
-
-function checkIndexUse( query, usesIndex, index, bounds ) {
- var x = t.find( query ).explain()
- if ( usesIndex ) {
- assert.eq( x.cursor.indexOf(index), 0 , tojson(x) );
- if ( ! x.indexBounds ) x.indexBounds = {}
- assert.eq( bounds, x.indexBounds.b , tojson(x) );
- }
- else {
- assert.eq( 'BasicCursor', x.cursor, tojson(x) );
- }
-}
-
-function checkExists( query, usesIndex, bounds ) {
- checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds );
- // Whether we use an index or not, we will always scan all docs.
- assert.eq( 3, t.find( query ).explain().nscanned );
- // 2 docs will match.
- assert.eq( 2, t.find( query ).itcount() );
-}
-
-function checkMissing( query, usesIndex, bounds ) {
- checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds );
- // Nscanned changes based on index usage.
- if ( usesIndex ) assert.eq( 2, t.find( query ).explain().nscanned );
- else assert.eq( 3, t.find( query ).explain().nscanned );
- // 1 doc is missing 'b'.
- assert.eq( 1, t.find( query ).itcount() );
-}
-
-function checkExistsCompound( query, usesIndex, bounds ) {
- checkIndexUse( query, usesIndex, 'BtreeCursor', bounds );
- if ( usesIndex ) assert.eq( 3, t.find( query ).explain().nscanned );
- else assert.eq( 3, t.find( query ).explain().nscanned );
- // 2 docs have a:1 and b:exists.
- assert.eq( 2, t.find( query ).itcount() );
-}
-
-function checkMissingCompound( query, usesIndex, bounds ) {
- checkIndexUse( query, usesIndex, 'BtreeCursor', bounds );
- // two possible indexes to use
- // 1 doc should match
- assert.eq( 1, t.find( query ).itcount() );
-}
-
-//---------------------------------
-
-var allValues = [ [ { $minElement:1 }, { $maxElement:1 } ] ];
-var nullNull = [ [ null, null ] ];
-
-// Basic cases
-checkExists( {b:{$exists:true}}, true, allValues );
-// We change this to not -> not -> exists:true, and get allValue for bounds
-// but we use a BasicCursor?
-checkExists( {b:{$not:{$exists:false}}}, false, allValues );
-checkMissing( {b:{$exists:false}}, true, nullNull );
-checkMissing( {b:{$not:{$exists:true}}}, true, nullNull );
-
-// Now check existence of second compound field.
-t.ensureIndex( {a:1,b:1} );
-t.save( {a:1} );
-t.save( {a:1,b:1} );
-t.save( {a:1,b:null} );
-
-checkExistsCompound( {a:1,b:{$exists:true}}, true, allValues );
-checkExistsCompound( {a:1,b:{$not:{$exists:false}}}, true, allValues );
-checkMissingCompound( {a:1,b:{$exists:false}}, true, nullNull );
-checkMissingCompound( {a:1,b:{$not:{$exists:true}}}, true, nullNull );
diff --git a/jstests/exists7.js b/jstests/exists7.js
deleted file mode 100644
index 91fd589f30d..00000000000
--- a/jstests/exists7.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-// Test that non boolean value types are allowed with $explain spec. SERVER-2322
-
-t = db.jstests_explain7;
-t.drop();
-
-function testIntegerExistsSpec() {
- t.remove({});
- t.save( {} );
- t.save( {a:1} );
- t.save( {a:2} );
- t.save( {a:3, b:3} );
- t.save( {a:4, b:4} );
-
- assert.eq( 2, t.count( {b:{$exists:1}} ) );
- assert.eq( 3, t.count( {b:{$exists:0}} ) );
-}
-
-testIntegerExistsSpec();
-t.ensureIndex( {b:1} );
-testIntegerExistsSpec();
diff --git a/jstests/exists8.js b/jstests/exists8.js
deleted file mode 100644
index ca62ebeb9ab..00000000000
--- a/jstests/exists8.js
+++ /dev/null
@@ -1,76 +0,0 @@
-// Test $exists with array element field names SERVER-2897
-
-t = db.jstests_exists8;
-t.drop();
-
-t.save( {a:[1]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:true}} ) );
-
-t.remove({});
-t.save( {a:[1,2]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
-
-t.remove({});
-t.save( {a:[{}]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:true}} ) );
-
-t.remove({});
-t.save( {a:[{},{}]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
-
-t.remove({});
-t.save( {a:[{'b':2},{'a':1}]} );
-assert.eq( 1, t.count( {'a.a':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1.a':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.a':{$exists:false}} ) );
-
-t.remove({});
-t.save( {a:[[1]]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.0':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.0.0':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0.0.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.0.0':{$exists:false}} ) );
-
-t.remove({});
-t.save( {a:[[[1]]]} );
-assert.eq( 1, t.count( {'a.0.0.0':{$exists:true}} ) );
-
-t.remove({});
-t.save( {a:[[{b:1}]]} );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.0.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.0.b':{$exists:false}} ) );
-
-t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 0, t.count( {'a.0.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.b':{$exists:false}} ) );
-
-t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 1, t.count( {'a.1.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1.b':{$exists:false}} ) );
-
-t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 1, t.count( {'a.1.0.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1.0.b':{$exists:false}} ) );
-
-t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 0, t.count( {'a.1.1.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1.1.b':{$exists:false}} ) );
diff --git a/jstests/exists9.js b/jstests/exists9.js
deleted file mode 100644
index 66378d1b424..00000000000
--- a/jstests/exists9.js
+++ /dev/null
@@ -1,41 +0,0 @@
-// SERVER-393 Test exists with various empty array and empty object cases.
-
-t = db.jstests_exists9;
-t.drop();
-
-// Check existence of missing nested field.
-t.save( {a:{}} );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-
-// With index.
-t.ensureIndex( {'a.b':1} );
-assert.eq( 1, t.find( {'a.b':{$exists:false}} ).hint( {'a.b':1} ).itcount() );
-assert.eq( 0, t.find( {'a.b':{$exists:true}} ).hint( {'a.b':1} ).itcount() );
-
-t.drop();
-
-// Check that an empty array 'exists'.
-t.save( {} );
-t.save( {a:[]} );
-assert.eq( 1, t.count( {a:{$exists:true}} ) );
-assert.eq( 1, t.count( {a:{$exists:false}} ) );
-
-// With index.
-t.ensureIndex( {a:1} );
-assert.eq( 1, t.find( {a:{$exists:true}} ).hint( {a:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).explain().nscanned );
-
-t.drop();
-
-// Check that an indexed field within an empty array does not exist.
-t.save( {a:{'0':1}} );
-t.save( {a:[]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0':{$exists:false}} ) );
-
-// With index.
-t.ensureIndex( {'a.0':1} );
-assert.eq( 1, t.find( {'a.0':{$exists:true}} ).hint( {'a.0':1} ).itcount() );
-assert.eq( 1, t.find( {'a.0':{$exists:false}} ).hint( {'a.0':1} ).itcount() );
diff --git a/jstests/existsa.js b/jstests/existsa.js
deleted file mode 100644
index 9ef7e9f374c..00000000000
--- a/jstests/existsa.js
+++ /dev/null
@@ -1,114 +0,0 @@
-// Sparse indexes are disallowed for $exists:false queries. SERVER-3918
-
-t = db.jstests_existsa;
-t.drop();
-
-t.save( {} );
-t.save( { a:1 } );
-t.save( { a:{ x:1 }, b:1 } );
-
-/** Configure testing of an index { <indexKeyField>:1 }. */
-function setIndex( _indexKeyField ) {
- indexKeyField = _indexKeyField;
- indexKeySpec = {};
- indexKeySpec[ indexKeyField ] = 1;
- t.ensureIndex( indexKeySpec, { sparse:true } );
- indexCursorName = 'BtreeCursor ' + indexKeyField + '_1';
-}
-setIndex( 'a' );
-
-/** Validate the prefix of 'str'. */
-function assertPrefix( prefix, str ) {
- assert.eq( prefix, str.substring( 0, prefix.length ) );
-}
-
-/** @return count when hinting the index to use. */
-function hintedCount( query ) {
- assertPrefix( indexCursorName, t.find( query ).hint( indexKeySpec ).explain().cursor );
- return t.find( query ).hint( indexKeySpec ).itcount();
-}
-
-/** The query field does not exist and the sparse index is not used without a hint. */
-function assertMissing( query, expectedMissing, expectedIndexedMissing ) {
- expectedMissing = expectedMissing || 1;
- expectedIndexedMissing = expectedIndexedMissing || 0;
- assert.eq( expectedMissing, t.count( query ) );
- assert.eq( 'BasicCursor', t.find( query ).explain().cursor );
- // We also shouldn't get a different count depending on whether
- // an index is used or not.
- assert.eq( expectedIndexedMissing, hintedCount( query ) );
-}
-
-/** The query field exists and the sparse index is used without a hint. */
-function assertExists( query, expectedExists ) {
- expectedExists = expectedExists || 2;
- assert.eq( expectedExists, t.count( query ) );
- assert.eq( 0, t.find( query ).explain().cursor.indexOf('BtreeCursor') );
- // An $exists:true predicate generates no index filters. Add another predicate on the index key
- // to trigger use of the index.
- andClause = {}
- andClause[ indexKeyField ] = { $ne:null };
- Object.extend( query, { $and:[ andClause ] } );
- assert.eq( expectedExists, t.count( query ) );
- assertPrefix( indexCursorName, t.find( query ).explain().cursor );
- assert.eq( expectedExists, hintedCount( query ) );
-}
-
-/** The query field exists and the sparse index is not used without a hint. */
-function assertExistsUnindexed( query, expectedExists ) {
- expectedExists = expectedExists || 2;
- assert.eq( expectedExists, t.count( query ) );
- assert.eq( 'BasicCursor', t.find( query ).explain().cursor );
- // Even with another predicate on the index key, the sparse index is disallowed.
- andClause = {}
- andClause[ indexKeyField ] = { $ne:null };
- Object.extend( query, { $and:[ andClause ] } );
- assert.eq( expectedExists, t.count( query ) );
- assert.eq( 'BasicCursor', t.find( query ).explain().cursor );
- assert.eq( expectedExists, hintedCount( query ) );
-}
-
-// $exists:false queries match the proper number of documents and disallow the sparse index.
-assertMissing( { a:{ $exists:false } } );
-assertMissing( { a:{ $not:{ $exists:true } } } );
-assertMissing( { $and:[ { a:{ $exists:false } } ] } );
-assertMissing( { $or:[ { a:{ $exists:false } } ] } );
-assertMissing( { $nor:[ { a:{ $exists:true } } ] } );
-assertMissing( { 'a.x':{ $exists:false } }, 2, 1 );
-
-// Currently a sparse index is disallowed even if the $exists:false query is on a different field.
-assertMissing( { b:{ $exists:false } }, 2, 1 );
-assertMissing( { b:{ $exists:false }, a:{ $ne:6 } }, 2, 1 );
-assertMissing( { b:{ $not:{ $exists:true } } }, 2, 1 );
-
-// Top level $exists:true queries match the proper number of documents
-// and use the sparse index on { a : 1 }.
-assertExists( { a:{ $exists:true } } );
-
-// Nested $exists queries match the proper number of documents and disallow the sparse index.
-assertExistsUnindexed( { $nor:[ { a:{ $exists:false } } ] } );
-assertExistsUnindexed( { $nor:[ { 'a.x':{ $exists:false } } ] }, 1 );
-assertExistsUnindexed( { a:{ $not:{ $exists:false } } } );
-
-// Nested $exists queries disallow the sparse index in some cases where it is not strictly
-// necessary to do so. (Descriptive tests.)
-assertExistsUnindexed( { $nor:[ { b:{ $exists:false } } ] }, 1 ); // Unindexed field.
-assertExists( { $or:[ { a:{ $exists:true } } ] } ); // $exists:true not $exists:false.
-
-// Behavior is similar with $elemMatch.
-t.drop();
-t.save( { a:[ {} ] } );
-t.save( { a:[ { b:1 } ] } );
-t.save( { a:[ { b:1 } ] } );
-setIndex( 'a.b' );
-
-assertMissing( { a:{ $elemMatch:{ b:{ $exists:false } } } } );
-// A $elemMatch predicate is treated as nested, and the index should be used for $exists:true.
-assertExists( { a:{ $elemMatch:{ b:{ $exists:true } } } } );
-
-// A non sparse index will not be disallowed.
-t.drop();
-t.save( {} );
-t.ensureIndex( { a:1 } );
-assert.eq( 1, t.find( { a:{ $exists:false } } ).itcount() );
-assert.eq( 'BtreeCursor a_1', t.find( { a:{ $exists:false } } ).explain().cursor );
diff --git a/jstests/existsb.js b/jstests/existsb.js
deleted file mode 100644
index a212be145c0..00000000000
--- a/jstests/existsb.js
+++ /dev/null
@@ -1,76 +0,0 @@
-// Tests for $exists against documents that store a null value
-//
-// A document with a missing value for an indexed field
-// is indexed *as if* it had the value 'null' explicitly.
-// Therefore:
-// { b : 1 }
-// { a : null, b : 1 }
-// look identical based on a standard index on { a : 1 }.
-//
-// -- HOWEVER!! --
-// A sparse index on { a : 1 } would include { a : null, b : 1 },
-// but would not include { b : 1 }. In this case, the two documents
-// are treated equally.
-//
-// Also, super special edge case around sparse, compound indexes
-// from Mathias:
-// If we have a sparse index on { a : 1, b : 1 }
-// And we insert docs {}, { a : 1 },
-// { b : 1 }, and { a : 1, b : 1 }
-// everything but {} will have an index entry.
-// Let's make sure we handle this properly!
-
-t = db.jstests_existsb;
-t.drop();
-
-t.save( {} );
-t.save( { a: 1 } );
-t.save( { b: 1 } );
-t.save( { a: 1, b: null } );
-t.save( { a: 1, b: 1 } );
-
-/** run a series of checks, just on the number of docs found */
-function checkExistsNull() {
- // Basic cases
- assert.eq( 3, t.count({ a:{ $exists: true }}) );
- assert.eq( 2, t.count({ a:{ $exists: false }}) );
- assert.eq( 3, t.count({ b:{ $exists: true }}) );
- assert.eq( 2, t.count({ b:{ $exists: false }}) );
- // With negations
- assert.eq( 3, t.count({ a:{ $not:{ $exists: false }}}) );
- assert.eq( 2, t.count({ a:{ $not:{ $exists: true }}}) );
- assert.eq( 3, t.count({ b:{ $not:{ $exists: false }}}) );
- assert.eq( 2, t.count({ b:{ $not:{ $exists: true }}}) );
- // Both fields
- assert.eq( 2, t.count({ a:1, b: { $exists: true }}) );
- assert.eq( 1, t.count({ a:1, b: { $exists: false }}) );
- assert.eq( 1, t.count({ a:{ $exists: true }, b:1}) );
- assert.eq( 1, t.count({ a:{ $exists: false }, b:1}) );
- // Both fields, both $exists
- assert.eq( 2, t.count({ a:{ $exists: true }, b:{ $exists: true }}) );
- assert.eq( 1, t.count({ a:{ $exists: true }, b:{ $exists: false }}) );
- assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: true }}) );
- assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: false }}) );
-}
-
-// with no index, make sure we get correct results
-checkExistsNull();
-
-// try with a standard index
-t.ensureIndex({ a : 1 });
-checkExistsNull();
-
-// try with a sparse index
-t.dropIndexes();
-t.ensureIndex({ a : 1 }, { sparse:true });
-checkExistsNull();
-
-// try with a compound index
-t.dropIndexes();
-t.ensureIndex({ a : 1, b : 1 });
-checkExistsNull();
-
-// try with sparse compound index
-t.dropIndexes();
-t.ensureIndex({ a : 1, b : 1 }, { sparse:true });
-checkExistsNull();
diff --git a/jstests/explain1.js b/jstests/explain1.js
deleted file mode 100644
index 4c92b102e38..00000000000
--- a/jstests/explain1.js
+++ /dev/null
@@ -1,48 +0,0 @@
-
-t = db.explain1;
-t.drop();
-
-for ( var i=0; i<100; i++ ){
- t.save( { x : i } );
-}
-
-q = { x : { $gt : 50 } };
-
-assert.eq( 49 , t.find( q ).count() , "A" );
-assert.eq( 49 , t.find( q ).itcount() , "B" );
-assert.eq( 20 , t.find( q ).limit(20).itcount() , "C" );
-
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 49 , t.find( q ).count() , "D" );
-assert.eq( 49 , t.find( q ).itcount() , "E" );
-assert.eq( 20 , t.find( q ).limit(20).itcount() , "F" );
-
-assert.eq( 49 , t.find(q).explain().n , "G" );
-assert.eq( 20 , t.find(q).limit(20).explain().n , "H" );
-assert.eq( 20 , t.find(q).limit(-20).explain().n , "I" );
-assert.eq( 49 , t.find(q).batchSize(20).explain().n , "J" );
-
-// verbose explain output with stats
-// display index bounds
-
-var explainGt = t.find({x: {$gt: 5}}).explain(true);
-var boundsVerboseGt = explainGt.stats.children[0].boundsVerbose;
-
-print('explain stats for $gt = ' + tojson(explainGt.stats));
-
-var explainGte = t.find({x: {$gte: 5}}).explain(true);
-var boundsVerboseGte = explainGte.stats.children[0].boundsVerbose;
-
-print('explain stats for $gte = ' + tojson(explainGte.stats));
-
-print('index bounds for $gt = ' + tojson(explainGt.indexBounds));
-print('index bounds for $gte = ' + tojson(explainGte.indexBounds));
-
-print('verbose bounds for $gt = ' + tojson(boundsVerboseGt));
-print('verbose bounds for $gte = ' + tojson(boundsVerboseGte));
-
-// Since the verbose bounds are opaque, all we try to confirm is that the
-// verbose bounds for $gt is different from those generated for $gte.
-assert.neq(boundsVerboseGt, boundsVerboseGte,
- 'verbose bounds for $gt and $gte should not be the same');
diff --git a/jstests/explain2.js b/jstests/explain2.js
deleted file mode 100644
index b70ffdc0b1e..00000000000
--- a/jstests/explain2.js
+++ /dev/null
@@ -1,27 +0,0 @@
-
-t = db.explain2
-t.drop();
-
-t.ensureIndex( { a : 1 , b : 1 } );
-
-for ( i=1; i<10; i++ ){
- t.insert( { _id : i , a : i , b : i , c : i } );
-}
-
-function go( q , c , b , o ){
- var e = t.find( q ).hint( {a:1,b:1} ).explain();
- assert.eq( c , e.n , "count " + tojson( q ) )
- assert.eq( b , e.nscanned , "nscanned " + tojson( q ) )
- assert.eq( o , e.nscannedObjects , "nscannedObjects " + tojson( q ) )
-}
-
-q = { a : { $gt : 3 } }
-go( q , 6 , 6 , 6 );
-
-q.b = 5
-go( q , 1 , 6 , 1 );
-
-delete q.b
-q.c = 5
-go( q , 1 , 6 , 6 );
-
diff --git a/jstests/explain3.js b/jstests/explain3.js
deleted file mode 100644
index 69dcac531b9..00000000000
--- a/jstests/explain3.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/** SERVER-2451 Kill cursor while explain is yielding */
-
-t = db.jstests_explain3;
-t.drop();
-
-t.ensureIndex( {i:1} );
-for( var i = 0; i < 10000; ++i ) {
- t.save( {i:i,j:0} );
-}
-db.getLastError();
-
-s = startParallelShell( "sleep( 20 ); db.jstests_explain3.dropIndex( {i:1} );" );
-
-try {
- t.find( {i:{$gt:-1},j:1} ).hint( {i:1} ).explain()
-} catch (e) {
- print( "got exception" );
- printjson( e );
-}
-
-s();
-
-// Sanity check to make sure mongod didn't seg fault.
-assert.eq( 10000, t.count() ); \ No newline at end of file
diff --git a/jstests/explain4.js b/jstests/explain4.js
deleted file mode 100644
index d6d3d818a72..00000000000
--- a/jstests/explain4.js
+++ /dev/null
@@ -1,68 +0,0 @@
-// Basic validation of explain output fields.
-
-t = db.jstests_explain4;
-t.drop();
-
-function checkField( explain, name, value ) {
- assert( explain.hasOwnProperty( name ) );
- if ( value != null ) {
- assert.eq( value, explain[ name ], name );
- // Check that the value is of the expected type. SERVER-5288
- assert.eq( typeof( value ), typeof( explain[ name ] ), 'type ' + name );
- }
-}
-
-function checkNonCursorPlanFields( explain, matches, n ) {
- checkField( explain, "n", n );
- checkField( explain, "nscannedObjects", matches );
- checkField( explain, "nscanned", matches );
-}
-
-function checkPlanFields( explain, matches, n ) {
- checkField( explain, "cursor", "BasicCursor" );
- // index related fields do not appear in non-indexed plan
- assert(!("indexBounds" in explain));
- checkNonCursorPlanFields( explain, matches, n );
-}
-
-function checkFields( matches, sort, limit ) {
- cursor = t.find();
- if ( sort ) {
- print("sort is {a:1}");
- cursor.sort({a:1});
- }
- if ( limit ) {
- print("limit = " + limit);
- cursor.limit( limit );
- }
- explain = cursor.explain( true );
- printjson( explain );
- checkPlanFields( explain, matches, matches > 0 ? 1 : 0 );
- checkField( explain, "scanAndOrder", sort );
- checkField( explain, "millis" );
- checkField( explain, "nYields" );
- checkField( explain, "nChunkSkips", 0 );
- checkField( explain, "isMultiKey", false );
- checkField( explain, "indexOnly", false );
- checkField( explain, "server" );
- checkField( explain, "allPlans" );
- explain.allPlans.forEach( function( x ) { checkPlanFields( x, matches, matches ); } );
-}
-
-checkFields( 0, false );
-
-// If there's nothing in the collection, there's no point in verifying that a sort
-// is done.
-// checkFields( 0, true );
-
-t.save( {} );
-checkFields( 1, false );
-checkFields( 1, true );
-
-t.save( {} );
-checkFields( 1, false, 1 );
-
-// Check basic fields with multiple clauses.
-t.save( { _id:0 } );
-explain = t.find( { $or:[ { _id:0 }, { _id:1 } ] } ).explain( true );
-checkNonCursorPlanFields( explain, 1, 1 );
diff --git a/jstests/explain5.js b/jstests/explain5.js
deleted file mode 100644
index a90f0726317..00000000000
--- a/jstests/explain5.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Check that the explain result count does proper deduping.
-
-t = db.jstests_explain5;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-t.save( {a:[1,2,3],b:[4,5,6]} );
-for( i = 0; i < 10; ++i ) {
- t.save( {} );
-}
-
-// Check with a single in order plan.
-
-explain = t.find( {a:{$gt:0}} ).explain( true );
-assert.eq( 1, explain.n );
-assert.eq( 1, explain.allPlans[ 0 ].n );
-
-// Check with a single out of order plan.
-
-explain = t.find( {a:{$gt:0}} ).sort( {z:1} ).hint( {a:1} ).explain( true );
-assert.eq( 1, explain.n );
-assert.eq( 1, explain.allPlans[ 0 ].n );
-
-// Check with multiple plans.
-
-explain = t.find( {a:{$gt:0},b:{$gt:0}} ).explain( true );
-assert.eq( 1, explain.n );
-assert.eq( 1, explain.allPlans[ 0 ].n );
-assert.eq( 1, explain.allPlans[ 1 ].n );
-
-explain = t.find( {$or:[{a:{$gt:0},b:{$gt:0}},{a:{$gt:-1},b:{$gt:-1}}]} ).explain( true );
-assert.eq( 1, explain.n );
-// Check 'n' for every alternative query plan.
-for (var i = 0; i < explain.allPlans.length; ++i) {
- assert.eq( 1, explain.allPlans[i].n );
-}
diff --git a/jstests/explain6.js b/jstests/explain6.js
deleted file mode 100644
index 47d8d2fd731..00000000000
--- a/jstests/explain6.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Test explain result count when a skip parameter is used.
-
-t = db.jstests_explain6;
-t.drop();
-
-t.save( {} );
-explain = t.find().skip( 1 ).explain( true );
-assert.eq( 0, explain.n );
-// With only one plan, the skip information is known for the plan. This is an arbitrary
-// implementation detail, but it changes the way n is calculated.
-assert.eq( 0, explain.allPlans[ 0 ].n );
-
-t.ensureIndex( {a:1} );
-explain = t.find( {a:null,b:null} ).skip( 1 ).explain( true );
-assert.eq( 0, explain.n );
-
-printjson( explain );
-assert.eq( 0, explain.allPlans[ 0 ].n );
-
-t.dropIndexes();
-explain = t.find().skip( 1 ).sort({a:1}).explain( true );
-// Skip is applied for an in memory sort.
-assert.eq( 0, explain.n );
-printjson(explain);
-assert.eq( 0, explain.allPlans[ 0 ].n );
diff --git a/jstests/explain7.js b/jstests/explain7.js
deleted file mode 100644
index f2850e56bea..00000000000
--- a/jstests/explain7.js
+++ /dev/null
@@ -1,193 +0,0 @@
-// Test cases for explain()'s nscannedObjects. SERVER-4161
-
-t = db.jstests_explain7;
-t.drop();
-
-t.save( { a:1 } );
-t.ensureIndex( { a:1 } );
-
-function assertExplain( expected, explain, checkAllPlans ) {
- for( field in expected ) {
- assert.eq( expected[ field ], explain[ field ], field );
- }
- if ( checkAllPlans && explain.allPlans && explain.allPlans.length == 1 ) {
- for( field in expected ) {
- assert.eq( expected[ field ], explain.allPlans[ 0 ][ field ], field );
- }
- }
- return explain;
-}
-
-function assertHintedExplain( expected, cursor ) {
- return assertExplain( expected, cursor.hint( { a:1 } ).explain( true ), true );
-}
-
-function assertUnhintedExplain( expected, cursor, checkAllPlans ) {
- return assertExplain( expected, cursor.explain( true ), checkAllPlans );
-}
-
-// Standard query.
-assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
- t.find( { a:1 } ) );
-
-// Covered index query.
-assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 /* no object loaded */ },
- t.find( { a:1 }, { _id:0, a:1 } ) );
-
-// Covered index query, but matching requires loading document.
-assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
- t.find( { a:1, b:null }, { _id:0, a:1 } ) );
-
-// $returnKey query.
-assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 },
- t.find( { a:1 } )._addSpecial( "$returnKey", true ) );
-
-// $returnKey query but matching requires loading document.
-assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
- t.find( { a:1, b:null } )._addSpecial( "$returnKey", true ) );
-
-// Skip a result.
-assertHintedExplain( { n:0, nscanned:1, nscannedObjects:1 },
- t.find( { a:1 } ).skip( 1 ) );
-
-// Cursor sorted covered index query.
-assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0, scanAndOrder:false },
- t.find( { a:1 }, { _id:0, a:1 } ).sort( { a:1 } ) );
-
-t.dropIndex( { a:1 } );
-t.ensureIndex( { a:1, b:1 } );
-
-// In memory sort covered index query.
-assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1, scanAndOrder:true },
- t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } )
- .hint( { a:1, b:1 } ) );
-
-// In memory sort $returnKey query.
-assertUnhintedExplain( { n:1, nscanned:1, scanAndOrder:true },
- t.find( { a:{ $gt:0 } } )._addSpecial( "$returnKey", true ).sort( { b:1 } )
- .hint( { a:1, b:1 } ) );
-
-// In memory sort with skip.
-assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 /* The record is still loaded. */ },
- t.find( { a:{ $gt:0 } } ).sort( { b:1 } ).skip( 1 ).hint( { a:1, b:1 } ),
- false );
-
-// With a multikey index.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:[ 1, 2 ] } );
-
-assertHintedExplain( { n:1, scanAndOrder:false },
- t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ) );
-assertHintedExplain( { n:1, scanAndOrder:true },
- t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } ) );
-
-// Dedup matches from multiple query plans.
-t.drop();
-t.ensureIndex( { a:1, b:1 } );
-t.ensureIndex( { b:1, a:1 } );
-t.save( { a:1, b:1 } );
-
-// Document matched by three query plans.
-assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
- t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ) );
-
-// Document matched by three query plans, with sorting.
-assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
- t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).sort( { c:1 } ) );
-
-// Document matched by three query plans, with a skip.
-assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 },
- t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).skip( 1 ) );
-
-// Hybrid ordered and unordered plans.
-
-t.drop();
-t.ensureIndex( { a:1, b:1 } );
-t.ensureIndex( { b:1 } );
-for( i = 0; i < 30; ++i ) {
- t.save( { a:i, b:i } );
-}
-
-// Ordered plan chosen.
-assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:30,
- scanAndOrder:false },
- t.find( { b:{ $gte:0 } } ).sort( { a:1 } ) );
-
-// SERVER-12769: When an index is used to provide a sort, our covering
-// analysis isn't good. This could execute as a covered query, but currently
-// does not.
-/*
-// Ordered plan chosen with a covered index.
-//assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:0,
- //scanAndOrder:false },
- //t.find( { b:{ $gte:0 } }, { _id:0, b:1 } ).sort( { a:1 } ) );
-*/
-
-// Ordered plan chosen, with a skip. Skip is not included in counting nscannedObjects for a single
-// plan.
-assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:29, nscanned:30, nscannedObjects:30,
- scanAndOrder:false },
- t.find( { b:{ $gte:0 } } ).sort( { a:1 } ).skip( 1 ) );
-
-// Unordered plan chosen.
-assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1,
- //nscannedObjects:1, nscannedObjectsAllPlans:2,
- scanAndOrder:true },
- t.find( { b:1 } ).sort( { a:1 } ) );
-
-// Unordered plan chosen and projected.
-assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1,
- scanAndOrder:true },
- t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ) );
-
-// Unordered plan chosen, with a skip.
-// Note that all plans are equally unproductive here, so we can't test which one is picked reliably.
-assertUnhintedExplain( { n:0 },
- t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ).skip( 1 ) );
-
-// Unordered plan chosen, $returnKey specified.
-assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, scanAndOrder:true },
- t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } )
- ._addSpecial( "$returnKey", true ) );
-
-// Unordered plan chosen, $returnKey specified, matching requires loading document.
-assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1,
- scanAndOrder:true },
- t.find( { b:1, c:null }, { _id:0, b:1 } ).sort( { a:1 } )
- ._addSpecial( "$returnKey", true ) );
-
-t.ensureIndex( { a:1, b:1, c:1 } );
-
-// Documents matched by four query plans.
-assertUnhintedExplain( { n:30, nscanned:30, nscannedObjects:30,
- //nscannedObjectsAllPlans:90 // Not 120 because deduping occurs before
- // loading results.
- },
- t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).sort( { b:1 } ) );
-
-for( i = 30; i < 150; ++i ) {
- t.save( { a:i, b:i } );
-}
-
-// Non-covered $or query.
-explain = assertUnhintedExplain( { n:150, nscannedObjects:300 },
- t.find( { $or:[ { a:{ $gte:-1, $lte:200 },
- b:{ $gte:0, $lte:201 } },
- { a:{ $gte:0, $lte:201 },
- b:{ $gte:-1, $lte:200 } } ] },
- { _id:1, a:1, b:1 } ).hint( { a:1, b:1 } ) );
-printjson(explain);
-assert.eq( 150, explain.clauses[ 0 ].nscannedObjects );
-assert.eq( 150, explain.clauses[ 1 ].nscannedObjects );
-
-// Covered $or query.
-explain = assertUnhintedExplain( { n:150, nscannedObjects:0 },
- t.find( { $or:[ { a:{ $gte:-1, $lte:200 },
- b:{ $gte:0, $lte:201 } },
- { a:{ $gte:0, $lte:201 },
- b:{ $gte:-1, $lte:200 } } ] },
- { _id:0, a:1, b:1 } ).hint( { a:1, b:1 } ) );
-printjson(explain);
-assert.eq( 0, explain.clauses[ 0 ].nscannedObjects );
-assert.eq( 0, explain.clauses[ 1 ].nscannedObjects );
diff --git a/jstests/explain8.js b/jstests/explain8.js
deleted file mode 100644
index fde6adbd8f4..00000000000
--- a/jstests/explain8.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Test calculation of the 'millis' field in explain output.
-
-t = db.jstests_explain8;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-for( i = 1000; i < 4000; i += 1000 ) {
- t.save( { a:i } );
-}
-
-// Run a query with one $or clause per a-value, each of which sleeps for 'a' milliseconds.
-function slow() {
- sleep( this.a );
- return true;
-}
-clauses = [];
-for( i = 1000; i < 4000; i += 1000 ) {
- clauses.push( { a:i, $where:slow } );
-}
-explain = t.find( { $or:clauses } ).explain( true );
-//printjson( explain );
-
-// Verify the duration of the whole query, and of each clause.
-assert.gt( explain.millis, 1000 - 500 + 2000 - 500 + 3000 - 500 );
diff --git a/jstests/explain9.js b/jstests/explain9.js
deleted file mode 100644
index 80cab856aa7..00000000000
--- a/jstests/explain9.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Test that limit is applied by explain when there are both in order and out of order candidate
-// plans. SERVER-4150
-
-t = db.jstests_explain9;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-
-for( i = 0; i < 10; ++i ) {
- t.save( { a:i, b:0 } );
-}
-
-explain = t.find( { a:{ $gte:0 }, b:0 } ).sort( { a:1 } ).limit( 5 ).explain( true );
-// Five results are expected, matching the limit spec.
-assert.eq( 5, explain.n );
-explain.allPlans.forEach( function( x ) {
- // Five results are expected for the in order plan.
- if ( x.cursor == "BtreeCursor a_1" ) {
- assert.eq( 5, x.n );
- }
- else {
- assert.gte( 5, x.n );
- }
- } );
diff --git a/jstests/explain_batch_size.js b/jstests/explain_batch_size.js
deleted file mode 100644
index 65bc1df40d7..00000000000
--- a/jstests/explain_batch_size.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// minimal test to check handling of batch size when explain info is requested
-// expected behavior is to return explain.n = total number of documents matching query
-// batch size is also tested in another smoke test jstest/explain1.js but that test
-// also covers the use of an indexed collection and includes a couple of test cases
-// using limit()
-
-t = db.explain_batch_size;
-t.drop();
-
-n = 3
-for (i=0; i<n; i++) {
- t.save( { x : i } );
-}
-
-q = {};
-
-assert.eq( n , t.find( q ).count() , "A" );
-assert.eq( n , t.find( q ).itcount() , "B" );
-assert.eq( n , t.find( q ).batchSize(1).explain().n , "C" );
diff --git a/jstests/explaina.js b/jstests/explaina.js
deleted file mode 100644
index 65be1f7bc27..00000000000
--- a/jstests/explaina.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Check explain results when an in order plan is selected among mixed in order and out of order
-// plans.
-
-t = db.jstests_explaina;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-
-for( i = 0; i < 1000; ++i ) {
- t.save( { a:i, b:i%3 } );
-}
-
-// Query with an initial set of documents.
-explain1 = t.find( { a:{ $gte:0 }, b:2 } ).sort( { a:1 } ).hint( { a:1 } ).explain();
-printjson(explain1);
-assert.eq( 333, explain1.n, 'wrong n for explain1' );
-assert.eq( 1000, explain1.nscanned, 'wrong nscanned for explain1' );
-
-for( i = 1000; i < 2000; ++i ) {
- t.save( { a:i, b:i%3 } );
-}
-
-// Query with some additional documents.
-explain2 = t.find( { a:{ $gte:0 }, b:2 } ).sort( { a:1 } ).hint ( { a:1 } ).explain();
-printjson(explain2);
-assert.eq( 666, explain2.n, 'wrong n for explain2' );
-assert.eq( 2000, explain2.nscanned, 'wrong nscanned for explain2' );
diff --git a/jstests/explainb.js b/jstests/explainb.js
deleted file mode 100644
index ab49a38ca72..00000000000
--- a/jstests/explainb.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// nscanned and nscannedObjects report results for the winning plan; nscannedAllPlans and
-// nscannedObjectsAllPlans report results for all plans. SERVER-6268
-//
-// This file tests the output of .explain.
-
-t = db.jstests_explainb;
-t.drop();
-
-t.ensureIndex( { a:1, b:1 } );
-t.ensureIndex( { b:1, a:1 } );
-
-t.save( { a:0, b:1 } );
-t.save( { a:1, b:0 } );
-
-explain = t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).explain( true );
-
-// We don't check explain.cursor because all plans perform the same.
-assert.eq( 2, explain.n );
-// nscanned and nscannedObjects are reported.
-assert.eq( 2, explain.nscanned );
-assert.eq( 2, explain.nscannedObjects );
-
-// A limit of 2.
-explain = t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).limit( -2 ).explain( true );
-assert.eq( 2, explain.n );
-
-// A $or query.
-explain = t.find( { $or:[ { a:{ $gte:0 }, b:{ $gte:1 } },
- { a:{ $gte:1 }, b:{ $gte:0 } } ] } ).explain( true );
-// One result from the first $or clause
-assert.eq( 1, explain.clauses[ 0 ].n );
-// But 2 total.
-assert.eq( 2, explain.n );
-
-// These are computed by summing the values for each clause.
-printjson(explain);
-assert.eq( 2, explain.n );
-
-// A non $or case where nscanned != number of results
-t.remove({});
-
-t.save( { a:'0', b:'1' } );
-t.save( { a:'1', b:'0' } );
-explain = t.find( { a:/0/, b:/1/ } ).explain( true );
-assert.eq( 1, explain.n );
-assert.eq( 2, explain.nscanned );
diff --git a/jstests/extent.js b/jstests/extent.js
deleted file mode 100644
index 47ae868606a..00000000000
--- a/jstests/extent.js
+++ /dev/null
@@ -1,11 +0,0 @@
-t = db.reclaimExtentsTest;
-t.drop();
-
-for ( var i=0; i<50; i++ ) { // enough iterations to break 32 bit.
- db.createCollection('reclaimExtentsTest', { size : 100000000 });
- t.insert({x:1});
- assert( t.count() == 1 );
- t.drop();
-}
-t.drop();
-
diff --git a/jstests/extent2.js b/jstests/extent2.js
deleted file mode 100644
index 75bf0d0b1b8..00000000000
--- a/jstests/extent2.js
+++ /dev/null
@@ -1,34 +0,0 @@
-
-
-mydb = db.getSisterDB( "test_extent2" );
-mydb.dropDatabase();
-
-t = mydb.foo;
-
-function insert(){
- t.insert( { _id : 1 , x : 1 } )
- t.insert( { _id : 2 , x : 1 } )
- t.insert( { _id : 3 , x : 1 } )
- t.ensureIndex( { x : 1 } );
-}
-
-insert();
-t.drop();
-
-start = mydb.stats();
-
-for ( i=0; i<100; i++ ) {
- insert();
- t.drop();
-}
-
-end = mydb.stats();
-
-printjson( start );
-printjson( end )
-assert.eq( start.extentFreeList.num, end.extentFreeList.num );
-
-// 3: 1 data, 1 _id idx, 1 x idx
-// used to be 4, but we no longer waste an extent for the freelist
-assert.eq( 3, start.extentFreeList.num );
-assert.eq( 3, end.extentFreeList.num );
diff --git a/jstests/filemd5.js b/jstests/filemd5.js
deleted file mode 100644
index 41d03a1bb30..00000000000
--- a/jstests/filemd5.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-db.fs.chunks.drop();
-db.fs.chunks.insert({files_id:1,n:0,data:new BinData(0,"test")})
-
-x = db.runCommand({"filemd5":1,"root":"fs"});
-assert( ! x.ok , tojson(x) )
-
-db.fs.chunks.ensureIndex({files_id:1,n:1})
-x = db.runCommand({"filemd5":1,"root":"fs"});
-assert( x.ok , tojson(x) )
-
diff --git a/jstests/find1.js b/jstests/find1.js
deleted file mode 100644
index ed79c3dd2c7..00000000000
--- a/jstests/find1.js
+++ /dev/null
@@ -1,54 +0,0 @@
-t = db.find1;
-t.drop();
-
-lookAtDocumentMetrics = false;
-
-// QUERY MIGRATION
-// New system is still not connected to server status
-if ( db.serverStatus().metrics ) {
- // var ss = db.serverStatus();
- // lookAtDocumentMetrics = ss.metrics.document != null && ss.metrics.queryExecutor.scanned != null;
-}
-
-print( "lookAtDocumentMetrics: " + lookAtDocumentMetrics );
-
-if ( lookAtDocumentMetrics ) {
- // ignore mongos
- nscannedStart = db.serverStatus().metrics.queryExecutor.scanned
-}
-
-
-t.save( { a : 1 , b : "hi" } );
-t.save( { a : 2 , b : "hi" } );
-
-/* very basic test of $snapshot just that we get some result */
-// we are assumign here that snapshot uses the id index; maybe one day it doesn't if so this would need to change then
-assert( t.find({$query:{},$snapshot:1})[0].a == 1 , "$snapshot simple test 1" );
-var q = t.findOne();
-q.c = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz";
-t.save(q); // will move a:1 object to after a:2 in the file
-assert( t.find({$query:{},$snapshot:1})[0].a == 1 , "$snapshot simple test 2" );
-
-assert( t.findOne( { a : 1 } ).b != null , "A" );
-assert( t.findOne( { a : 1 } , { a : 1 } ).b == null , "B");
-
-assert( t.find( { a : 1 } )[0].b != null , "C" );
-assert( t.find( { a : 1 } , { a : 1 } )[0].b == null , "D" );
-assert( t.find( { a : 1 } , { a : 1 } ).sort( { a : 1 } )[0].b == null , "D" );
-
-id = t.findOne()._id;
-
-assert( t.findOne( id ) , "E" );
-assert( t.findOne( id ).a , "F" );
-assert( t.findOne( id ).b , "G" );
-
-assert( t.findOne( id , { a : 1 } ).a , "H" );
-assert( ! t.findOne( id , { a : 1 } ).b , "I" );
-
-assert(t.validate().valid,"not valid");
-
-if ( lookAtDocumentMetrics ) {
- // ignore mongos
- nscannedEnd = db.serverStatus().metrics.queryExecutor.scanned
- assert.lte( nscannedStart + 16, nscannedEnd );
-}
diff --git a/jstests/find2.js b/jstests/find2.js
deleted file mode 100644
index f72203419bc..00000000000
--- a/jstests/find2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// Test object id sorting.
-
-function testObjectIdFind( db ) {
- r = db.ed_db_find2_oif;
- r.drop();
-
- for( i = 0; i < 3; ++i )
- r.save( {} );
-
- f = r.find().sort( { _id: 1 } );
- assert.eq( 3, f.count() );
- assert( f[ 0 ]._id < f[ 1 ]._id );
- assert( f[ 1 ]._id < f[ 2 ]._id );
-}
-
-testObjectIdFind( db );
diff --git a/jstests/find3.js b/jstests/find3.js
deleted file mode 100644
index a5e4b7a4d66..00000000000
--- a/jstests/find3.js
+++ /dev/null
@@ -1,10 +0,0 @@
-t = db.find3;
-t.drop();
-
-for ( i=1; i<=50; i++)
- t.save( { a : i } );
-
-assert.eq( 50 , t.find().toArray().length );
-assert.eq( 20 , t.find().limit(20).toArray().length );
-
-assert(t.validate().valid);
diff --git a/jstests/find4.js b/jstests/find4.js
deleted file mode 100644
index 17639d3a684..00000000000
--- a/jstests/find4.js
+++ /dev/null
@@ -1,26 +0,0 @@
-
-t = db.find4;
-t.drop();
-
-t.save( { a : 1123 , b : 54332 } );
-
-o = t.find( {} , {} )[0];
-assert.eq( 1123 , o.a , "A" );
-assert.eq( 54332 , o.b , "B" );
-assert( o._id.str , "C" );
-
-o = t.find( {} , { a : 1 } )[0];
-assert.eq( 1123 , o.a , "D" );
-assert( o._id.str , "E" );
-assert( ! o.b , "F" );
-
-o = t.find( {} , { b : 1 } )[0];
-assert.eq( 54332 , o.b , "G" );
-assert( o._id.str , "H" );
-assert( ! o.a , "I" );
-
-t.drop();
-t.save( { a : 1 , b : 1 } );
-t.save( { a : 2 , b : 2 } );
-assert.eq( "1-1,2-2" , t.find().map( function(z){ return z.a + "-" + z.b } ).toString() );
-assert.eq( "1-undefined,2-undefined" , t.find( {} , { a : 1 }).map( function(z){ return z.a + "-" + z.b } ).toString() );
diff --git a/jstests/find5.js b/jstests/find5.js
deleted file mode 100644
index b4a2c0f8865..00000000000
--- a/jstests/find5.js
+++ /dev/null
@@ -1,51 +0,0 @@
-
-t = db.find5;
-t.drop();
-
-t.save({a: 1});
-t.save({b: 5});
-
-assert.eq( 2 , t.find({}, {b:1}).count(), "A");
-
-function getIds( f ){
- return t.find( {} , f ).map( function(z){ return z._id; } );
-}
-
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( {} ) ) , "B1 " );
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { a : 1 } ) ) , "B2 " );
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { b : 1 } ) ) , "B3 " );
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { c : 1 } ) ) , "B4 " );
-
-x = t.find( {} , { a : 1 } )[0];
-assert.eq( 1 , x.a , "C1" );
-assert.isnull( x.b , "C2" );
-
-x = t.find( {} , { a : 1 } )[1];
-assert.isnull( x.a , "C3" );
-assert.isnull( x.b , "C4" );
-
-x = t.find( {} , { b : 1 } )[0];
-assert.isnull( x.a , "C5" );
-assert.isnull( x.b , "C6" );
-
-x = t.find( {} , { b : 1 } )[1];
-assert.isnull( x.a , "C7" );
-assert.eq( 5 , x.b , "C8" );
-
-t.drop();
-
-
-t.save( { a : 1 , b : { c : 2 , d : 3 , e : 4 } } );
-assert.eq( 2 , t.find( {} , { "b.c" : 1 } ).toArray()[0].b.c , "D" );
-
-o = t.find( {} , { "b.c" : 1 , "b.d" : 1 } ).toArray()[0];
-assert( o.b.c , "E 1" );
-assert( o.b.d , "E 2" );
-assert( !o.b.e , "E 3" );
-
-assert( ! t.find( {} , { "b.c" : 1 } ).toArray()[0].b.d , "F" );
-
-t.drop();
-t.save( { a : { b : { c : 1 } } } )
-assert.eq( 1 , t.find( {} , { "a.b.c" : 1 } )[0].a.b.c , "G" );
-
diff --git a/jstests/find6.js b/jstests/find6.js
deleted file mode 100644
index c4efd3b88d3..00000000000
--- a/jstests/find6.js
+++ /dev/null
@@ -1,41 +0,0 @@
-
-t = db.find6;
-t.drop();
-
-t.save( { a : 1 } )
-t.save( { a : 1 , b : 1 } )
-
-assert.eq( 2 , t.find().count() , "A" );
-assert.eq( 1 , t.find( { b : null } ).count() , "B" );
-assert.eq( 1 , t.find( "function() { return this.b == null; }" ).itcount() , "C" );
-assert.eq( 1 , t.find( "function() { return this.b == null; }" ).count() , "D" );
-
-/* test some stuff with dot array notation */
-q = db.find6a;
-q.drop();
-q.insert( { "a" : [ { "0" : 1 } ] } );
-q.insert( { "a" : [ { "0" : 2 } ] } );
-q.insert( { "a" : [ 1 ] } );
-q.insert( { "a" : [ 9, 1 ] } );
-
-function f() {
-
- assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da1");
- assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da2");
-
- assert.eq( 1, q.find( { 'a.0' : { $gt : 8 } } ).count(), "da3");
- assert.eq( 0, q.find( { 'a.0' : { $lt : 0 } } ).count(), "da4");
-
-}
-
-for( var pass = 0; pass <= 1 ; pass++ ) {
- f();
- q.ensureIndex({a:1});
-}
-
-t = db.multidim;
-t.drop();
-t.insert({"a" : [ [ ], 1, [ 3, 4 ] ] });
-assert.eq(1, t.find({"a.2":[3,4]}).count(), "md1");
-assert.eq(1, t.find({"a.2.1":4}).count(), "md2");
-assert.eq(0, t.find({"a.2.1":3}).count(), "md3");
diff --git a/jstests/find7.js b/jstests/find7.js
deleted file mode 100644
index ca4c7d449bf..00000000000
--- a/jstests/find7.js
+++ /dev/null
@@ -1,8 +0,0 @@
-t = db.find7;
-t.drop();
-
-x = { "_id" : { "d" : 3649, "w" : "signed" }, "u" : { "3649" : 5 } };
-t.insert(x );
-assert.eq( x , t.findOne() , "A1" );
-assert.eq( x , t.findOne( { _id : x._id } ) , "A2" );
-
diff --git a/jstests/find8.js b/jstests/find8.js
deleted file mode 100644
index 60f66a500e3..00000000000
--- a/jstests/find8.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// SERVER-1932 Test unindexed matching of a range that is only valid in a multikey context.
-
-t = db.jstests_find8;
-t.drop();
-
-t.save( {a:[1,10]} );
-assert.eq( 1, t.count( { a: { $gt:2,$lt:5} } ) );
-
-// Check that we can do a query with 'invalid' range.
-assert.eq( 1, t.count( { a: { $gt:5,$lt:2} } ) );
-
-t.save( {a:[-1,12]} );
-
-// Check that we can do a query with 'invalid' range and sort.
-assert.eq( 2, t.find( { a: { $gt:5,$lt:2} } ).sort( {a:1} ).itcount() );
-assert.eq( 2, t.find( { a: { $gt:5,$lt:2} } ).sort( {$natural:-1} ).itcount() );
-
-// SERVER-2864
-if( 0 ) {
-t.find( { a: { $gt:5,$lt:2} } ).itcount();
-// Check that we can record a plan for an 'invalid' range.
-assert( t.find( { a: { $gt:5,$lt:2} } ).explain( true ).oldPlan );
-}
-
-t.ensureIndex( {b:1} );
-// Check that if we do a table scan of an 'invalid' range in an or clause we don't check subsequent clauses.
-assert.eq( "BasicCursor", t.find( { $or:[{ a: { $gt:5,$lt:2} }, {b:1}] } ).explain().cursor );
diff --git a/jstests/find9.js b/jstests/find9.js
deleted file mode 100644
index 85adf93cc98..00000000000
--- a/jstests/find9.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Test that the MaxBytesToReturnToClientAtOnce limit is enforced.
-
-t = db.jstests_find9;
-t.drop();
-
-big = new Array( 500000 ).toString();
-for( i = 0; i < 20; ++i ) {
- t.save( { a:i, b:big } );
-}
-
-// Check size limit with a simple query.
-assert.eq( 20, t.find( {}, { a:1 } ).objsLeftInBatch() ); // Projection resizes the result set.
-assert.gt( 20, t.find().objsLeftInBatch() );
-
-// Check size limit on a query with an explicit batch size.
-assert.eq( 20, t.find( {}, { a:1 } ).batchSize( 30 ).objsLeftInBatch() );
-assert.gt( 20, t.find().batchSize( 30 ).objsLeftInBatch() );
-
-for( i = 0; i < 20; ++i ) {
- t.save( { a:i, b:big } );
-}
-
-// Check size limit with get more.
-c = t.find().batchSize( 30 );
-while( c.hasNext() ) {
- assert.gt( 20, c.objsLeftInBatch() );
- c.next();
-}
diff --git a/jstests/find_and_modify.js b/jstests/find_and_modify.js
deleted file mode 100644
index a80859ab60c..00000000000
--- a/jstests/find_and_modify.js
+++ /dev/null
@@ -1,38 +0,0 @@
-t = db.find_and_modify;
-t.drop();
-
-// fill db
-for(var i=1; i<=10; i++) {
- t.insert({priority:i, inprogress:false, value:0});
-}
-
-// returns old
-out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}});
-assert.eq(out.value, 0);
-assert.eq(out.inprogress, false);
-t.update({_id: out._id}, {$set: {inprogress: false}});
-
-// returns new
-out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}, 'new': true});
-assert.eq(out.value, 2);
-assert.eq(out.inprogress, true);
-t.update({_id: out._id}, {$set: {inprogress: false}});
-
-// update highest priority
-out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
-assert.eq(out.priority, 10);
-// update next highest priority
-out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
-assert.eq(out.priority, 9);
-
-// remove lowest priority
-out = t.findAndModify({sort:{priority:1}, remove:true});
-assert.eq(out.priority, 1);
-
-// remove next lowest priority
-out = t.findAndModify({sort:{priority:1}, remove:1});
-assert.eq(out.priority, 2);
-
-// return null (was {} before 1.5.4) if no matches (drivers may handle this differently)
-out = t.findAndModify({query:{no_such_field:1}, remove:1});
-assert.eq(out, null);
diff --git a/jstests/find_and_modify2.js b/jstests/find_and_modify2.js
deleted file mode 100644
index 2c8ab5b3bb6..00000000000
--- a/jstests/find_and_modify2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-t = db.find_and_modify2;
-t.drop();
-
-t.insert({_id:1, i:0, j:0});
-
-out = t.findAndModify({update: {$inc: {i:1}}, 'new': true, fields: {i:1}});
-assert.eq(out, {_id:1, i:1});
-
-out = t.findAndModify({update: {$inc: {i:1}}, fields: {i:0}});
-assert.eq(out, {_id:1, j:0});
-
-out = t.findAndModify({update: {$inc: {i:1}}, fields: {_id:0, j:1}});
-assert.eq(out, {j:0});
-
-out = t.findAndModify({update: {$inc: {i:1}}, fields: {_id:0, j:1}, 'new': true});
-assert.eq(out, {j:0});
diff --git a/jstests/find_and_modify3.js b/jstests/find_and_modify3.js
deleted file mode 100644
index 4214dfbcd0a..00000000000
--- a/jstests/find_and_modify3.js
+++ /dev/null
@@ -1,21 +0,0 @@
-t = db.find_and_modify3;
-t.drop();
-
-t.insert({_id:0, other:0, comments:[{i:0, j:0}, {i:1, j:1}]});
-t.insert({_id:1, other:1, comments:[{i:0, j:0}, {i:1, j:1}]}); // this is the only one that gets modded
-t.insert({_id:2, other:2, comments:[{i:0, j:0}, {i:1, j:1}]});
-
-orig0 = t.findOne({_id:0})
-orig2 = t.findOne({_id:2})
-
-out = t.findAndModify({query: {_id:1, 'comments.i':0}, update: {$set: {'comments.$.j':2}}, 'new': true, sort:{other:1}});
-assert.eq(out.comments[0], {i:0, j:2});
-assert.eq(out.comments[1], {i:1, j:1});
-assert.eq(t.findOne({_id:0}), orig0);
-assert.eq(t.findOne({_id:2}), orig2);
-
-out = t.findAndModify({query: {other:1, 'comments.i':1}, update: {$set: {'comments.$.j':3}}, 'new': true, sort:{other:1}});
-assert.eq(out.comments[0], {i:0, j:2});
-assert.eq(out.comments[1], {i:1, j:3});
-assert.eq(t.findOne({_id:0}), orig0);
-assert.eq(t.findOne({_id:2}), orig2);
diff --git a/jstests/find_and_modify4.js b/jstests/find_and_modify4.js
deleted file mode 100644
index 04abc2f1ce7..00000000000
--- a/jstests/find_and_modify4.js
+++ /dev/null
@@ -1,55 +0,0 @@
-t = db.find_and_modify4;
-t.drop();
-
-// this is the best way to build auto-increment
-function getNextVal(counterName){
- var ret = t.findAndModify({
- query: {_id: counterName},
- update: {$inc: {val: 1}},
- upsert: true,
- 'new': true,
- });
- return ret;
-}
-
-assert.eq(getNextVal("a"), {_id:"a", val:1});
-assert.eq(getNextVal("a"), {_id:"a", val:2});
-assert.eq(getNextVal("a"), {_id:"a", val:3});
-assert.eq(getNextVal("z"), {_id:"z", val:1});
-assert.eq(getNextVal("z"), {_id:"z", val:2});
-assert.eq(getNextVal("a"), {_id:"a", val:4});
-
-t.drop();
-
-function helper(upsert){
- return t.findAndModify({
- query: {_id: "asdf"},
- update: {$inc: {val: 1}},
- upsert: upsert,
- 'new': false // the default
- });
-}
-
-// upsert:false so nothing there before and after
-assert.eq(helper(false), null);
-assert.eq(t.count(), 0);
-
-// upsert:true so nothing there before; something there after
-assert.eq(helper(true), null);
-assert.eq(t.count(), 1);
-assert.eq(helper(true), {_id: 'asdf', val: 1});
-assert.eq(helper(false), {_id: 'asdf', val: 2}); // upsert only matters when obj doesn't exist
-assert.eq(helper(true), {_id: 'asdf', val: 3});
-
-
-// _id created if not specified
-var out = t.findAndModify({
- query: {a:1},
- update: {$set: {b: 2}},
- upsert: true,
- 'new': true
- });
-assert.neq(out._id, undefined);
-assert.eq(out.a, 1);
-assert.eq(out.b, 2);
-
diff --git a/jstests/find_and_modify_server6226.js b/jstests/find_and_modify_server6226.js
deleted file mode 100644
index a44cb59cb05..00000000000
--- a/jstests/find_and_modify_server6226.js
+++ /dev/null
@@ -1,7 +0,0 @@
-
-t = db.find_and_modify_server6226;
-t.drop();
-
-ret = t.findAndModify( { query : { _id : 1 } , update : { "$inc" : { i : 1 } } , upsert : true } )
-assert.isnull( ret )
-
diff --git a/jstests/find_and_modify_server6254.js b/jstests/find_and_modify_server6254.js
deleted file mode 100644
index 7c7c340b7ed..00000000000
--- a/jstests/find_and_modify_server6254.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.find_and_modify_server6254;
-t.drop();
-
-t.insert( { x : 1 } )
-ret = t.findAndModify( { query : { x : 1 } , update : { $set : { x : 2 } } , new : true } )
-assert.eq( 2 , ret.x , tojson( ret ) )
-
-assert.eq( 1 , t.count() )
-
diff --git a/jstests/find_and_modify_server6582.js b/jstests/find_and_modify_server6582.js
deleted file mode 100644
index 88ce5a7c4f4..00000000000
--- a/jstests/find_and_modify_server6582.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.find_and_modify_server6582;
-
-t.drop();
-x = t.runCommand( "findAndModify" , {query:{f:1}, update:{$set:{f:2}}, upsert:true, new:true})
-le = x.lastErrorObject
-assert.eq( le.updatedExisting, false )
-assert.eq( le.n, 1 )
-assert.eq( le.upserted, x.value._id )
-
-t.drop();
-t.insert( { f : 1 } )
-x = t.runCommand( "findAndModify" , {query:{f:1}, remove : true } )
-le = x.lastErrorObject
-assert.eq( le.n, 1 )
-
-
-
diff --git a/jstests/find_and_modify_server6588.js b/jstests/find_and_modify_server6588.js
deleted file mode 100644
index a21855f5c38..00000000000
--- a/jstests/find_and_modify_server6588.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-t = db.find_and_modify_sever6588;
-
-initial = { _id : 1 , a : [ { b : 1 } ] , z : 1 }
-up = { "$set" : { "a.$.b" : 2 } }
-q = { _id : 1 , "a.b" : 1 }
-correct = { _id : 1 , a : [ { b : 2 } ] , z : 1 }
-
-t.drop();
-t.insert( initial )
-t.update( q , up )
-assert.eq( correct , t.findOne() )
-
-t.drop()
-t.insert( initial )
-x = t.findAndModify( { query : q , update : up } )
-assert.eq( correct , t.findOne() )
-
-t.drop()
-t.insert( initial )
-x = t.findAndModify( { query : { z : 1 , "a.b" : 1 } , update : up } )
-assert.eq( correct , t.findOne() )
diff --git a/jstests/find_and_modify_server6659.js b/jstests/find_and_modify_server6659.js
deleted file mode 100644
index 7a0419cc72b..00000000000
--- a/jstests/find_and_modify_server6659.js
+++ /dev/null
@@ -1,7 +0,0 @@
-
-t = db.find_and_modify_server6659;
-t.drop();
-
-x = t.findAndModify({query:{f:1}, update:{$set:{f:2}}, upsert:true, new:true})
-assert.eq( 2, x.f );
-assert.eq( 2, t.findOne().f );
diff --git a/jstests/find_and_modify_server6909.js b/jstests/find_and_modify_server6909.js
deleted file mode 100644
index 2f688459698..00000000000
--- a/jstests/find_and_modify_server6909.js
+++ /dev/null
@@ -1,21 +0,0 @@
-c = db.find_and_modify_server6906;
-
-
-c.drop();
-
-c.insert( { _id : 5 , a:{ b:1 } } );
-ret = c.findAndModify( { query:{ 'a.b':1 },
- update:{ $set:{ 'a.b':2 } }, // Ensure the query on 'a.b' no longer matches.
- new:true } );
-assert.eq( 5, ret._id );
-assert.eq( 2, ret.a.b );
-
-
-c.drop();
-
-c.insert( { _id : null , a:{ b:1 } } );
-ret = c.findAndModify( { query:{ 'a.b':1 },
- update:{ $set:{ 'a.b':2 } }, // Ensure the query on 'a.b' no longer matches.
- new:true } );
-assert.eq( 2, ret.a.b );
-
diff --git a/jstests/find_and_modify_server6993.js b/jstests/find_and_modify_server6993.js
deleted file mode 100644
index b8a31915372..00000000000
--- a/jstests/find_and_modify_server6993.js
+++ /dev/null
@@ -1,9 +0,0 @@
-
-c = db.find_and_modify_server6993;
-c.drop();
-
-c.insert( { a:[ 1, 2 ] } );
-
-c.findAndModify( { query:{ a:1 }, update:{ $set:{ 'a.$':5 } } } );
-
-assert.eq( 5, c.findOne().a[ 0 ] );
diff --git a/jstests/find_and_modify_server7660.js b/jstests/find_and_modify_server7660.js
deleted file mode 100644
index cae50d17dcc..00000000000
--- a/jstests/find_and_modify_server7660.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.find_and_modify_server7660;
-t.drop();
-
-a = t.findAndModify({
- query : { foo : 'bar' },
- update : { $set : { bob : 'john' } },
- sort: { foo : 1},
- upsert: true,
- new : true
-});
-
-b = t.findOne();
-assert.eq( a, b );
-assert.eq( "bar", a.foo );
-assert.eq( "john", a.bob )
-
-
diff --git a/jstests/find_and_modify_where.js b/jstests/find_and_modify_where.js
deleted file mode 100644
index 2092dc5566f..00000000000
--- a/jstests/find_and_modify_where.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.find_and_modify_where;
-t.drop();
-
-t.insert( { _id : 1 , x : 1 } );
-
-res = t.findAndModify( { query : { $where : "return this.x == 1" } , update : { $set : { y : 1 } } } )
-
-assert.eq( 1 , t.findOne().y )
-
diff --git a/jstests/find_dedup.js b/jstests/find_dedup.js
deleted file mode 100644
index 401384ceb7a..00000000000
--- a/jstests/find_dedup.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// Test that duplicate query results are not returned.
-
-var t = db.jstests_find_dedup;
-
-function checkDedup(query, idArray) {
- resultsArr = t.find(query).toArray();
- assert.eq(resultsArr.length, idArray.length, "same number of results");
-
- for (var i = 0; i < idArray.length; i++) {
- assert(("_id" in resultsArr[i]), "result doc missing _id");
- assert.eq(idArray[i], resultsArr[i]._id, "_id mismatch for doc " + i);
- }
-}
-
-// Deduping $or
-t.drop();
-t.ensureIndex({a: 1, b: 1});
-t.save({_id: 1, a: 1, b: 1});
-t.save({_id: 2, a: 1, b: 1});
-t.save({_id: 3, a: 2, b: 2});
-t.save({_id: 4, a: 3, b: 3});
-t.save({_id: 5, a: 3, b: 3});
-checkDedup({$or: [{a:{$gte:0,$lte:2},b:{$gte:0,$lte:2}},
- {a:{$gte:1,$lte:3},b:{$gte:1,$lte:3}},
- {a:{$gte:1,$lte:4},b:{$gte:1,$lte:4}}]},
- [1, 2, 3, 4, 5]);
-
-// Deduping multikey
-t.drop();
-t.save({_id: 1, a: [1, 2, 3], b: [4, 5, 6]});
-t.save({_id: 2, a: [1, 2, 3], b: [4, 5, 6]});
-assert.eq( 2, t.count() );
-checkDedup({$or: [{a: {$in: [1, 2]}}, {b: {$in: [4, 5]}}]}, [1, 2]);
-t.ensureIndex( { a : 1 } );
-checkDedup({$or: [{a: {$in: [1, 2]}}, {b: {$in: [4, 5]}}]}, [1, 2]);
diff --git a/jstests/find_size.js b/jstests/find_size.js
deleted file mode 100644
index d5a93d59cd2..00000000000
--- a/jstests/find_size.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Basic test for $size.
-
-var t = db.jstests_find_size;
-t.drop();
-
-t.save({arr: []});
-t.save({arr: []});
-t.save({arr: [1]});
-t.save({arr: [1, 2, 3, 4]});
-
-// ints and longs
-assert.eq(2, t.count({arr: {$size: 0}}));
-assert.eq(2, t.count({arr: {$size: NumberLong(0)}}));
-assert.eq(0, t.count({arr: {$size: -1}}));
-assert.eq(0, t.count({arr: {$size: NumberLong(-10000)}}));
-assert.eq(1, t.count({arr: {$size: NumberInt(4)}}));
-
-// Descriptive test: string is equivalent to {$size: 0}
-assert.eq(2, t.count({arr: {$size: "str"}}));
-
-// doubles return nothing
-assert.eq(0, t.count({arr: {$size: 3.2}}));
-assert.eq(0, t.count({arr: {$size: 0.1}}));
-
-// SERVER-11952
-assert.eq(0, t.count({arr: {$size: NumberLong(-9223372036854775808)}}));
diff --git a/jstests/finda.js b/jstests/finda.js
deleted file mode 100644
index cf717d5b929..00000000000
--- a/jstests/finda.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// Tests where the QueryOptimizerCursor enters takeover mode during a query rather than a get more.
-
-t = db.jstests_finda;
-t.drop();
-
-numDocs = 200;
-
-function clearQueryPlanCache() {
- t.ensureIndex( { c:1 } );
- t.dropIndex( { c:1 } );
-}
-
-function assertAllFound( matches ) {
-// printjson( matches );
- found = new Array( numDocs );
- for( i = 0; i < numDocs; ++i ) {
- found[ i ] = false;
- }
- for( i in matches ) {
- m = matches[ i ];
- found[ m._id ] = true;
- }
- for( i = 0; i < numDocs; ++i ) {
- assert( found[ i ], i );
- }
-}
-
-function makeCursor( query, projection, sort, batchSize, returnKey ) {
- print("\n*** query:");
- printjson(query);
- print("proj:");
- printjson(projection);
- cursor = t.find( query, projection );
- if ( sort ) {
- cursor.sort( sort );
- print("sort:");
- printjson(sort);
- }
- if ( batchSize ) {
- cursor.batchSize( batchSize );
- print("bs: " + batchSize);
- }
- if ( returnKey ) {
- cursor._addSpecial( "$returnKey", true );
- }
- return cursor;
-}
-
-function checkCursorWithBatchSizeProjection( query, projection, sort, batchSize,
- expectedLeftInBatch ) {
- clearQueryPlanCache();
- cursor = makeCursor( query, projection, sort, batchSize );
- // XXX: this
- assert.eq( expectedLeftInBatch, cursor.objsLeftInBatch() );
- assertAllFound( cursor.toArray() );
-}
-
-function checkCursorWithBatchSize( query, sort, batchSize, expectedLeftInBatch ) {
- checkCursorWithBatchSizeProjection( query, {}, sort, batchSize, expectedLeftInBatch );
- checkCursorWithBatchSizeProjection( query, { a:1, _id:1 }, sort, batchSize,
- expectedLeftInBatch );
- // In the cases tested, when expectedLeftInBatch is high enough takeover will occur during
- // the query operation rather than getMore and the last few matches should properly return keys
- // from the a,_id index.
- clearQueryPlanCache();
- if ( expectedLeftInBatch > 110 ) {
- cursor = makeCursor( query, {}, sort, batchSize, true );
- lastNonAIndexResult = -1;
- for( i = 0; i < expectedLeftInBatch; ++i ) {
- next = cursor.next();
- // Identify the query plan used by checking the fields of a returnKey query.
- if ( !friendlyEqual( [ 'a', '_id' ], Object.keySet( next ) ) ) {
- lastNonAIndexResult = i;
- }
- }
- // The last results should come from the a,_id index.
- assert.lt( lastNonAIndexResult, expectedLeftInBatch - 5 );
- }
-}
-
-function queryWithPlanTypes( withDups ) {
- t.drop();
- for( i = 1; i < numDocs; ++i ) {
- t.save( { _id:i, a:i, b:0 } );
- }
- if ( withDups ) {
- t.save( { _id:0, a:[ 0, numDocs ], b:0 } ); // Add a dup on a:1 index.
- }
- else {
- t.save( { _id:0, a:0, b:0 } );
- }
- t.ensureIndex( { a:1, _id:1 } ); // Include _id for a covered index projection.
-
- // All plans in order.
- checkCursorWithBatchSize( { a:{ $gte:0 } }, null, 150, 150 );
-
- // All plans out of order.
- checkCursorWithBatchSize( { a:{ $gte:0 } }, { c:1 }, null, 101 );
-
- // Some plans in order, some out of order.
- checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, 150, 150 );
- checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, null, 101 );
-}
-
-queryWithPlanTypes( false );
-queryWithPlanTypes( true );
diff --git a/jstests/fm1.js b/jstests/fm1.js
deleted file mode 100644
index bc60a3d8911..00000000000
--- a/jstests/fm1.js
+++ /dev/null
@@ -1,12 +0,0 @@
-
-t = db.fm1;
-t.drop();
-
-t.insert({foo:{bar:1}})
-t.find({},{foo:1}).toArray();
-t.find({},{'foo.bar':1}).toArray();
-t.find({},{'baz':1}).toArray();
-t.find({},{'baz.qux':1}).toArray();
-t.find({},{'foo.qux':1}).toArray();
-
-
diff --git a/jstests/fm2.js b/jstests/fm2.js
deleted file mode 100644
index 00ccdf4afee..00000000000
--- a/jstests/fm2.js
+++ /dev/null
@@ -1,9 +0,0 @@
-
-t = db.fm2
-t.drop();
-
-t.insert( { "one" : { "two" : {"three":"four"} } } );
-
-x = t.find({},{"one.two":1})[0]
-assert.eq( 1 , Object.keySet( x.one ).length , "ks l 1" );
-
diff --git a/jstests/fm3.js b/jstests/fm3.js
deleted file mode 100644
index 8ccde6d5ab3..00000000000
--- a/jstests/fm3.js
+++ /dev/null
@@ -1,37 +0,0 @@
-t = db.fm3
-t.drop();
-
-t.insert( {a:[{c:{e:1, f:1}}, {d:2}, 'z'], b:1} );
-
-
-res = t.findOne({}, {a:1});
-assert.eq(res.a, [{c:{e:1, f:1}}, {d:2}, 'z'], "one a");
-assert.eq(res.b, undefined, "one b");
-
-res = t.findOne({}, {a:0});
-assert.eq(res.a, undefined, "two a");
-assert.eq(res.b, 1, "two b");
-
-res = t.findOne({}, {'a.d':1});
-assert.eq(res.a, [{}, {d:2}], "three a");
-assert.eq(res.b, undefined, "three b");
-
-res = t.findOne({}, {'a.d':0});
-assert.eq(res.a, [{c:{e:1, f:1}}, {}, 'z'], "four a");
-assert.eq(res.b, 1, "four b");
-
-res = t.findOne({}, {'a.c':1});
-assert.eq(res.a, [{c:{e:1, f:1}}, {}], "five a");
-assert.eq(res.b, undefined, "five b");
-
-res = t.findOne({}, {'a.c':0});
-assert.eq(res.a, [{}, {d:2}, 'z'], "six a");
-assert.eq(res.b, 1, "six b");
-
-res = t.findOne({}, {'a.c.e':1});
-assert.eq(res.a, [{c:{e:1}}, {}], "seven a");
-assert.eq(res.b, undefined, "seven b");
-
-res = t.findOne({}, {'a.c.e':0});
-assert.eq(res.a, [{c:{f:1}}, {d:2}, 'z'], "eight a");
-assert.eq(res.b, 1, "eight b");
diff --git a/jstests/fm4.js b/jstests/fm4.js
deleted file mode 100644
index 1ce947ad5e7..00000000000
--- a/jstests/fm4.js
+++ /dev/null
@@ -1,16 +0,0 @@
-t = db.fm4
-t.drop();
-
-t.insert({_id:1, a:1, b:1});
-
-assert.eq( t.findOne({}, {_id:1}), {_id:1}, 1)
-assert.eq( t.findOne({}, {_id:0}), {a:1, b:1}, 2)
-
-assert.eq( t.findOne({}, {_id:1, a:1}), {_id:1, a:1}, 3)
-assert.eq( t.findOne({}, {_id:0, a:1}), {a:1}, 4)
-
-assert.eq( t.findOne({}, {_id:0, a:0}), {b:1}, 6)
-assert.eq( t.findOne({}, { a:0}), {_id:1, b:1}, 5)
-
-// not sure if we want to suport this since it is the same as above
-//assert.eq( t.findOne({}, {_id:1, a:0}), {_id:1, b:1}, 5)
diff --git a/jstests/fsync.js b/jstests/fsync.js
deleted file mode 100644
index 0cfece75c10..00000000000
--- a/jstests/fsync.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// test the lock/unlock snapshotting feature a bit
-
-x=db.runCommand({fsync:1,lock:1}); // not on admin db
-assert(!x.ok,"D");
-
-x=db.fsyncLock(); // uses admin automatically
-
-assert(x.ok,"C");
-
-y = db.currentOp();
-assert(y.fsyncLock,"B");
-
-z = db.fsyncUnlock();
-assert( db.currentOp().fsyncLock == null, "A2" );
-
-// make sure the db is unlocked
-db.jstests_fsync.insert({x:1});
-db.getLastError();
-
-assert( db.currentOp().fsyncLock == null, "A" );
-
-assert( !db.eval('db.fsyncLock()').ok, "eval('db.fsyncLock()') should fail." )
diff --git a/jstests/fts1.js b/jstests/fts1.js
deleted file mode 100644
index 6bd138d6c25..00000000000
--- a/jstests/fts1.js
+++ /dev/null
@@ -1,29 +0,0 @@
-load( "jstests/libs/fts.js" );
-
-t = db.text1;
-t.drop();
-
-// this test requires usePowerOf2Sizes to be off
-db.createCollection( t.getName(), {"usePowerOf2Sizes" : false } );
-assert.eq(0, t.stats().userFlags);
-
-assert.eq( [] , queryIDS( t , "az" ) , "A0" );
-
-t.save( { _id : 1 , x : "az b c" } );
-t.save( { _id : 2 , x : "az b" } );
-t.save( { _id : 3 , x : "b c" } );
-t.save( { _id : 4 , x : "b c d" } );
-
-assert.eq(t.stats().userFlags, 0,
- "A new collection should not have power-of-2 storage allocation strategy");
-t.ensureIndex( { x : "text" } );
-assert.eq(t.stats().userFlags, 1,
- "Creating a text index on a collection should change the allocation strategy " +
- "to power-of-2.");
-
-assert.eq( [1,2,3,4] , queryIDS( t , "c az" ) , "A1" );
-assert.eq( [4] , queryIDS( t , "d" ) , "A2" );
-
-idx = db.system.indexes.findOne( { ns: t.getFullName(), "weights.x" : 1 } )
-assert( idx.v >= 1, tojson( idx ) )
-assert( idx.textIndexVersion >= 1, tojson( idx ) )
diff --git a/jstests/fts2.js b/jstests/fts2.js
deleted file mode 100644
index e0e7469fa5e..00000000000
--- a/jstests/fts2.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-load( "jstests/libs/fts.js" );
-
-t = db.text2;
-t.drop();
-
-t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } );
-t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } );
-
-t.ensureIndex( { x : "text" } , { weights : { x : 10 , y : 1 } } );
-
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
-
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
-
-printjson(lastCommadResult);
-assert.eq( 2 , lastCommadResult.stats.nscannedObjects , "B3" );
-assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
-
diff --git a/jstests/fts3.js b/jstests/fts3.js
deleted file mode 100644
index f5f72c4df0a..00000000000
--- a/jstests/fts3.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-load( "jstests/libs/fts.js" );
-
-t = db.text3;
-t.drop();
-
-t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } );
-t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } );
-
-t.ensureIndex( { x : "text" , z : 1 } , { weights : { x : 10 , y : 1 } } );
-
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
-
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
-
-assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" );
-assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
diff --git a/jstests/fts4.js b/jstests/fts4.js
deleted file mode 100644
index 8598457b033..00000000000
--- a/jstests/fts4.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-load( "jstests/libs/fts.js" );
-
-t = db.text4;
-t.drop();
-
-t.save( { _id : 1 , x : [ "az" , "b" , "x" ] , y : [ "c" , "d" , "m" ] , z : 1 } );
-t.save( { _id : 2 , x : [ "c" , "d" , "y" ] , y : [ "az" , "b" , "n" ] , z : 2 } );
-
-t.ensureIndex( { y : "text" , z : 1 } , { weights : { x : 10 } } );
-
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
-
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
-
-assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" );
-assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
diff --git a/jstests/fts5.js b/jstests/fts5.js
deleted file mode 100644
index a3097b47a4a..00000000000
--- a/jstests/fts5.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-load( "jstests/libs/fts.js" );
-
-t = db.text5;
-t.drop();
-
-t.save( { _id: 1 , x: [ { a: "az" } , { a: "b" } , { a: "x" } ] , y: [ "c" , "d" , "m" ] , z: 1 } );
-t.save( { _id: 2 , x: [ { a: "c" } , { a: "d" } , { a: "y" } ] , y: [ "az" , "b" , "n" ] , z: 2 } );
-
-t.ensureIndex( { y: "text" , z: 1 } , { weights: { "x.a": 10 } } );
-
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
-
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z: 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z: 1 } ) , "B2" );
-
-assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" );
-assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
diff --git a/jstests/fts6.js b/jstests/fts6.js
deleted file mode 100644
index 15c537bb235..00000000000
--- a/jstests/fts6.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// SERVER-13039. Confirm that we return the right results when $text is
-// inside an $or.
-
-var t = db.jstests_fts6;
-t.drop();
-
-t.ensureIndex({a: 1});
-t.ensureIndex({b: "text"});
-
-t.save({_id: 1, a: 0});
-t.save({_id: 2, a: 0, b: "foo"});
-
-var cursor = t.find({a: 0, $or: [{_id: 2}, {$text: {$search: "foo"}}]});
-var results = cursor.toArray();
-assert.eq(1, results.length, "unexpected number of results");
-assert.eq(2, results[0]["_id"], "unexpected document returned");
diff --git a/jstests/fts_blog.js b/jstests/fts_blog.js
deleted file mode 100644
index 38cbb826eff..00000000000
--- a/jstests/fts_blog.js
+++ /dev/null
@@ -1,26 +0,0 @@
-t = db.text_blog;
-t.drop();
-
-t.save( { _id : 1 , title : "my blog post" , text : "this is a new blog i am writing. yay" } );
-t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am writing. yay" } );
-t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } );
-
-// default weight is 1
-// specify weights if you want a field to be more meaningull
-t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } );
-
-res = t.runCommand( "text" , { search : "blog" } )
-assert.eq( 3, res.results.length );
-assert.eq( 1, res.results[0].obj._id );
-
-res = t.runCommand( "text" , { search : "write" } )
-assert.eq( 3, res.results.length );
-assert.eq( res.results[0].score, res.results[1].score );
-assert.eq( res.results[0].score, res.results[2].score );
-
-
-
-
-
-
-
diff --git a/jstests/fts_blogwild.js b/jstests/fts_blogwild.js
deleted file mode 100644
index ecad0ce0b19..00000000000
--- a/jstests/fts_blogwild.js
+++ /dev/null
@@ -1,40 +0,0 @@
-t = db.text_blogwild;
-t.drop();
-
-t.save( { _id: 1 , title: "my blog post" , text: "this is a new blog i am writing. yay eliot" } );
-t.save( { _id: 2 , title: "my 2nd post" , text: "this is a new blog i am writing. yay" } );
-t.save( { _id: 3 , title: "knives are Fun for writing eliot" , text: "this is a new blog i am writing. yay" } );
-
-// default weight is 1
-// specify weights if you want a field to be more meaningull
-t.ensureIndex( { dummy: "text" } , { weights: "$**" } );
-
-res = t.runCommand( "text" , { search: "blog" } );
-assert.eq( 3 , res.stats.n , "A1" );
-
-res = t.runCommand( "text" , { search: "write" } );
-assert.eq( 3 , res.stats.n , "B1" );
-
-// mixing
-t.dropIndex( "dummy_text" );
-assert.eq( 1 , t.getIndexKeys().length , "C1" );
-t.ensureIndex( { dummy: "text" } , { weights: { "$**": 1 , title: 2 } } );
-
-
-res = t.runCommand( "text" , { search: "write" } );
-assert.eq( 3 , res.stats.n , "C2" );
-assert.eq( 3 , res.results[0].obj._id , "C3" );
-
-res = t.runCommand( "text" , { search: "blog" } );
-assert.eq( 3 , res.stats.n , "D1" );
-assert.eq( 1 , res.results[0].obj._id , "D2" );
-
-res = t.runCommand( "text" , { search: "eliot" } );
-assert.eq( 2 , res.stats.n , "E1" );
-assert.eq( 3 , res.results[0].obj._id , "E2" );
-
-
-
-
-
-
diff --git a/jstests/fts_enabled.js b/jstests/fts_enabled.js
deleted file mode 100644
index 8617caff59f..00000000000
--- a/jstests/fts_enabled.js
+++ /dev/null
@@ -1,5 +0,0 @@
-// Test that the textSearchEnabled server parameter works correctly (now deprecated).
-
-// Value true is accepted, value false is rejected.
-assert.commandWorked(db.adminCommand({setParameter: 1, textSearchEnabled: true}));
-assert.commandFailed(db.adminCommand({setParameter: 1, textSearchEnabled: false}));
diff --git a/jstests/fts_explain.js b/jstests/fts_explain.js
deleted file mode 100644
index 0d9c1fd7a9d..00000000000
--- a/jstests/fts_explain.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// Test $text explain. SERVER-12037.
-
-var coll = db.fts_explain;
-
-coll.drop();
-coll.ensureIndex({content: "text"}, {default_language: "none"});
-assert.gleSuccess(db);
-
-coll.insert({content: "some data"});
-assert.gleSuccess(db);
-
-var explain = coll.find({$text:{$search: "\"a\" -b -\"c\""}}).explain(true);
-assert.eq(explain.cursor, "TextCursor");
-assert.eq(explain.stats.type, "TEXT");
-assert.eq(explain.stats.parsedTextQuery.terms, ["a"]);
-assert.eq(explain.stats.parsedTextQuery.negatedTerms, ["b"]);
-assert.eq(explain.stats.parsedTextQuery.phrases, ["a"]);
-assert.eq(explain.stats.parsedTextQuery.negatedPhrases, ["c"]);
diff --git a/jstests/fts_index.js b/jstests/fts_index.js
deleted file mode 100644
index 480bfb05fa2..00000000000
--- a/jstests/fts_index.js
+++ /dev/null
@@ -1,138 +0,0 @@
-// Test that:
-// 1. Text indexes properly validate the index spec used to create them.
-// 2. Text indexes properly enforce a schema on the language_override field.
-// 3. Collections may have at most one text index.
-// 4. Text indexes properly handle large documents.
-
-var coll = db.fts_index;
-var indexName = "textIndex";
-coll.drop();
-coll.getDB().createCollection(coll.getName());
-
-//
-// 1. Text indexes properly validate the index spec used to create them.
-//
-
-// Spec passes text-specific index validation.
-coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanish"});
-assert(!db.getLastError());
-assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName}));
-coll.dropIndexes();
-
-// Spec fails text-specific index validation ("spanglish" unrecognized).
-coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanglish"});
-assert(db.getLastError());
-assert.eq(0, coll.system.indexes.count({ns: coll.getFullName(), name: indexName}));
-coll.dropIndexes();
-
-// Spec passes general index validation.
-coll.ensureIndex({"$**": "text"}, {name: indexName});
-assert(!db.getLastError());
-assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName}));
-coll.dropIndexes();
-
-// Spec fails general index validation ("a.$**" invalid field name for key).
-coll.ensureIndex({"a.$**": "text"}, {name: indexName});
-assert(db.getLastError());
-assert.eq(0, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName}));
-coll.dropIndexes();
-
-//
-// 2. Text indexes properly enforce a schema on the language_override field.
-//
-
-// Can create a text index on a collection where no documents have invalid language_override.
-coll.insert({a: ""});
-coll.insert({a: "", language: "spanish"});
-coll.ensureIndex({a: "text"});
-assert(!db.getLastError());
-coll.drop();
-
-// Can't create a text index on a collection containing document with an invalid language_override.
-coll.insert({a: "", language: "spanglish"});
-coll.ensureIndex({a: "text"});
-assert(db.getLastError());
-coll.drop();
-
-// Can insert documents with valid language_override into text-indexed collection.
-coll.ensureIndex({a: "text"});
-assert(!db.getLastError());
-coll.insert({a: ""});
-coll.insert({a: "", language: "spanish"});
-assert(!db.getLastError());
-coll.drop();
-
-// Can't insert documents with invalid language_override into text-indexed collection.
-coll.ensureIndex({a: "text"});
-assert(!db.getLastError());
-coll.insert({a: "", language: "spanglish"});
-assert(db.getLastError());
-coll.drop();
-
-//
-// 3. Collections may have at most one text index.
-//
-
-coll.ensureIndex({a: 1, b: "text", c: 1});
-assert(!db.getLastError());
-assert.eq(2, coll.getIndexes().length);
-
-// ensureIndex() becomes a no-op on an equivalent index spec.
-coll.ensureIndex({a: 1, b: "text", c: 1});
-assert(!db.getLastError());
-assert.eq(2, coll.getIndexes().length);
-coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {b: 1}});
-assert(!db.getLastError());
-assert.eq(2, coll.getIndexes().length);
-coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "english"});
-assert(!db.getLastError());
-assert.eq(2, coll.getIndexes().length);
-coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 2});
-assert(!db.getLastError());
-assert.eq(2, coll.getIndexes().length);
-coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "language"});
-assert(!db.getLastError());
-assert.eq(2, coll.getIndexes().length);
-
-// ensureIndex() fails if a second text index would be built.
-coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {d: 1}});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "none"});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 1});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "idioma"});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, b: "text", c: 1}, {weights: {d: 1}});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, b: "text", d: 1});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, d: "text", c: 1});
-assert(db.getLastError());
-coll.ensureIndex({b: "text"});
-assert(db.getLastError());
-coll.ensureIndex({b: "text", c: 1});
-assert(db.getLastError());
-coll.ensureIndex({a: 1, b: "text"});
-assert(db.getLastError());
-
-coll.dropIndexes();
-
-//
-// 4. Text indexes properly handle large keys.
-//
-
-coll.ensureIndex({a: "text"});
-assert(!db.getLastError());
-
-var longstring = "";
-var longstring2 = "";
-for(var i = 0; i < 1024 * 1024; ++i) {
- longstring = longstring + "a";
- longstring2 = longstring2 + "b";
-}
-coll.insert({a: longstring});
-coll.insert({a: longstring2});
-assert.eq(1, coll.find({$text: {$search: longstring}}).itcount(), "long string not found in index");
-
-coll.drop();
diff --git a/jstests/fts_mix.js b/jstests/fts_mix.js
deleted file mode 100644
index 56da123cdc3..00000000000
--- a/jstests/fts_mix.js
+++ /dev/null
@@ -1,159 +0,0 @@
-
-load( "jstests/libs/fts.js" );
-
-// test collection
-tc = db.text_mix;
-tc.drop();
-
-// creation of collection documents
-// content generated using wikipedia random article
-tc.save( { _id: 1, title: "Olivia Shakespear",text: "Olivia Shakespear (born Olivia Tucker; 17 March 1863 – 3 October 1938) was a British novelist, playwright, and patron of the arts. She wrote six books that are described as \"marriage problem\" novels. Her works sold poorly, sometimes only a few hundred copies. Her last novel, Uncle Hilary, is considered her best. She wrote two plays in collaboration with Florence Farr." } );
-tc.save( { _id: 2, title: "Mahim Bora", text: "Mahim Bora (born 1926) is an Indian writer and educationist from Assam state. He was born at a tea estate of Sonitpur district. He is an M.A. in Assamese literature from Gauhati University and had been a teacher in the Nowgong College for most of his teaching career. He has now retired and lives at Nagaon. Bora spent a good part of his childhood in the culture-rich surroundings of rural Nagaon, where the river Kalong was the life-blood of a community. His impressionable mind was to capture a myriad memories of that childhood, later to find expression in his poems, short stories and novels with humour, irony and pathos woven into their texture. When this river was dammed up, its disturbing effect was on the entire community dependant on nature's bounty." } );
-tc.save( { _id: 3, title: "A break away!", text: "A break away! is an 1891 painting by Australian artist Tom Roberts. The painting depicts a mob of thirsty sheep stampeding towards a dam. A drover on horseback is attempting to turn the mob before they drown or crush each other in their desire to drink. The painting, an \"icon of Australian art\", is part of a series of works by Roberts that \"captures what was an emerging spirit of national identity.\" Roberts painted the work at Corowa. The painting depicts a time of drought, with little grass and the soil kicked up as dust. The work itself is a reflection on the pioneering days of the pastoral industry, which were coming to an end by the 1890s." } );
-tc.save( { _id: 4, title: "Linn-Kristin Riegelhuth Koren", text: "Linn-Kristin Riegelhuth Koren (born 1 August 1984, in Ski) is a Norwegian handballer playing for Larvik HK and the Norwegian national team. She is commonly known as Linka. Outside handball she is a qualified nurse." } );
-tc.save( { _id: 5, title: "Morten Jensen", text: "Morten Jensen (born December 2, 1982 in Lynge) is a Danish athlete. He primarily participates in long jump, 100 metres and 200 metres. He competed at the World Championships in 2005 and 2007, the 2006 World Indoor Championships, the 2006 European Championships, the 2007 World Championships and the 2008 Olympic Games without qualifying for the final round. He was runner-up in the 2010 Finnish Elite Games rankings, just missing out to Levern Spencer for that year's jackpot. He holds the Danish record in both long jump and 100 metres. He also holds the Danish indoor record in the 200 metres. He has been a part of the Sparta teamsine 2005, before then he was a part of FIF Hillerd. His coach was Leif Dahlberg after the 2010 European Championships he change to Lars Nielsen and Anders Miller." } );
-tc.save( { _id: 6, title: "Janet Laurence", text: "Janet Laurence (born 1947) is a Sydney based Australian artist who works in mixed media and installation. Her work has been included in major survey exhibitions, nationally and internationally and is regularly exhibited in Sydney, Melbourne and Japan. Her work explores a relationship to the natural world, often from an architectural context. It extends from the gallery space into the urban fabric, and has been realized in many site specific projects, often involving collaborations with architects, landscape architects and environmental scientists. She has received many grants and awards including a Rockefeller Residency in 1997. Laurence was a Trustee of the Art Gallery of NSW from 1995 to 2005. Laurence was the subject of John Beard's winning entry for the 2007 Archibald Prize." } );
-tc.save( { _id: 7, title: "Glen-Coats Baronets", text: "The Glen-Coats Baronetcy, of Ferguslie Park in the Parish of Abbey in the County of Renfrew, was a title in the Baronetage of the United Kingdom. It was created on 25 June 1894 for Thomas Glen-Coats, Director of the thread-making firm of J. & P. Coats, Ltd, and later Liberal Member of Parliament for Renfrewshire West. Born Thomas Coats, he assumed the additional surname of Glen, which was that of his maternal grandfather. He was succeeded by his son, the second Baronet. He won a gold medal in sailing at the 1908 Summer Olympics. The title became extinct on his death in 1954. Two other members of the Coats family also gained distinction. George Coats, 1st Baron Glentanar, was the younger brother of the first Baronet, while Sir James Coats, 1st Baronet (see Coats Baronets), was the first cousin of the first Baronet." } );
-tc.save( { _id: 8, title: "Grapeleaf Skeletonizer", text: "The Grapeleaf Skeletonizer, Harrisina americana is a moth in the family Zygaenidae. It is widespread in the eastern half of the United States, and commonly noticed defoliating grapes, especially of the Virginia creeper (Parthenocissus quinquefolia). The western grapeleaf skeletonizer, Harrisina brillians is very similar to and slightly larger than H. americana, but their distributions are different. Members of this family all produce hydrogen cyanide, a potent antipredator toxin." } );
-tc.save( { _id: 9, title: "Physics World", text: "Physics World is the membership magazine of the Institute of Physics, one of the largest physical societies in the world. It is an international monthly magazine covering all areas of physics, both pure and applied, and is aimed at physicists in research, industry and education worldwide. It was launched in 1988 by IOP Publishing Ltd and has established itself as one of the world's leading physics magazines. The magazine is sent free to members of the Institute of Physics, who can also access a digital edition of the magazine, although selected articles can be read by anyone for free online. It was redesigned in September 2005 and has an audited circulation of just under 35000. The current editor is Matin Durrani. Also on the team are Dens Milne (associate editor), Michael Banks (news editor), Louise Mayor (features editor) and Margaret Harris (reviews and careers editor). Hamish Johnston is the editor of the magazine's website physicsworld.com and James Dacey is its reporter." } );
-tc.save( { _id: 10, title: "Mallacoota, Victoria", text: "Mallacoota is a small town in the East Gippsland region of Victoria, Australia. At the 2006 census, Mallacoota had a population of 972. At holiday times, particularly Easter and Christmas, the population increases by about 8,000. It is one of the most isolated towns in the state of Victoria, 25 kilometres off the Princes Highway and 523 kilometres (325 mi) from Melbourne. It is 526 kilometres (327 mi) from Sydney, New South Wales. It is halfway between Melbourne and Sydney when travelling via Princes Highway, though that is a long route between Australia's two main cities. It is the last official township on Victoria's east coast before the border with New South Wales. Mallacoota has a regional airport (Mallacoota Airport) YMCO (XMC) consisting of a grassed field for private light planes. It is known for its wild flowers, abalone industry, the inlet estuary consisting of Top Lake and Bottom Lake, and Croajingolong National Park that surround it. It is a popular and beautiful holiday spot for boating, fishing, walking the wilderness coast, swimming, birdwatching, and surfing. The Mallacoota Arts Council runs events throughout each year. Mallacoota Inlet is one of the main villages along the wilderness coast walk from NSW to Victoria, Australia." } );
-
-// begin tests
-
-// -------------------------------------------- INDEXING & WEIGHTING -------------------------------
-
-// start with basic index, one item with default weight
-tc.ensureIndex( { "title": "text" } );
-
-// test the single result case..
-res = tc.runCommand( "text", { search: "Victoria" } );
-assert.eq( 1, res.results.length );
-assert.eq( 10, res.results[0].obj._id );
-
-tc.dropIndexes();
-
-// now let's see about multiple fields, with specific weighting
-tc.ensureIndex( { "title": "text", "text": "text" }, { weights: { "title": 10 } } );
-assert.eq( [9,7,8], queryIDS( tc, "members physics" ) );
-
-tc.dropIndexes();
-
-// test all-1 weighting with "$**"
-tc.ensureIndex( { "$**": "text" } );
-assert.eq( [2,8,7], queryIDS( tc, "family tea estate" ) );
-
-tc.dropIndexes();
-
-// non-1 weight on "$**" + other weight specified for some field
-tc.ensureIndex( { "$**": "text" }, { weights: { "$**": 10, "text": 2 } } );
-assert.eq( [7,5], queryIDS( tc, "Olympic Games gold medal" ) );
-
-tc.dropIndexes();
-
-// -------------------------------------------- SEARCHING ------------------------------------------
-
-// go back to "$**": 1, "title": 10.. and test more specific search functionality!
-tc.ensureIndex( { "$**": "text" }, { weights: { "title": 10 } } );
-
-// -------------------------------------------- STEMMING -------------------------------------------
-
-// tests stemming for basic plural case
-res = tc.runCommand( "text", { search: "member" } );
-res2 = tc.runCommand( "text", { search: "members" } );
-assert.eq( getIDS( res ), getIDS( res2 ) );
-
-// search for something with potential 's bug.
-res = tc.runCommand( "text", { search: "magazine's" } );
-res2 = tc.runCommand( "text", { search: "magazine" } );
-assert.eq( getIDS( res ), getIDS( res2 ) );
-
-// -------------------------------------------- LANGUAGE -------------------------------------------
-
-res = tc.runCommand( "text", { search: "member", language: "spanglish" } );
-assert.commandFailed( res );
-res = tc.runCommand( "text", { search: "member", language: "english" } );
-assert.commandWorked( res );
-
-// -------------------------------------------- LIMIT RESULTS --------------------------------------
-
-// ensure limit limits results
-assert.eq( [2], queryIDS( tc, "rural river dam", null , { limit : 1 } ) );
-
-// ensure top results are the same regardless of limit
-// make sure that this uses a case where it wouldn't be otherwise..
-res = tc.runCommand( "text", { search: "united kingdom british princes", limit: 1 } );
-res2 = tc.runCommand( "text", { search: "united kingdom british princes" } );
-assert.eq( 1, res.results.length );
-assert.eq( 4, res2.results.length );
-assert.eq( res.results[0].obj._id, res2.results[0].obj._id );
-
-// -------------------------------------------- PROJECTION -----------------------------------------
-
-// test projection.. show just title and id
-res = tc.runCommand( "text", { search: "Morten Jensen", project: { title: 1 } } );
-assert.eq( 1, res.results.length );
-assert.eq( 5, res.results[0].obj._id );
-assert.eq( null, res.results[0].obj.text );
-assert.neq( null, res.results[0].obj.title );
-assert.neq( null, res.results[0].obj._id );
-
-// test negative projection, ie. show everything but text
-res = tc.runCommand( "text", { search: "handball", project: { text: 0 } } );
-assert.eq( 1, res.results.length );
-assert.eq( 4, res.results[0].obj._id );
-assert.eq( null, res.results[0].obj.text );
-assert.neq( null, res.results[0].obj.title );
-assert.neq( null, res.results[0].obj._id );
-
-// test projection only title, no id
-res = tc.runCommand( "text", { search: "Mahim Bora", project: { _id: 0, title: 1 } } );
-assert.eq( 1, res.results.length );
-assert.eq( "Mahim Bora", res.results[0].obj.title );
-assert.eq( null, res.results[0].obj.text );
-assert.neq( null, res.results[0].obj.title );
-assert.eq( null, res.results[0].obj._id );
-
-// -------------------------------------------- NEGATION -------------------------------------------
-
-// test negation
-assert.eq( [8], queryIDS( tc, "United -Kingdom" ) );
-assert.eq( -1, tc.findOne( { _id : 8 } ).text.search(/Kingdom/i) );
-
-// test negation edge cases... hyphens, double dash, etc.
-assert.eq( [4], queryIDS( tc, "Linn-Kristin" ) );
-
-// -------------------------------------------- PHRASE MATCHING ------------------------------------
-
-// test exact phrase matching on
-assert.eq( [7], queryIDS( tc, "\"Summer Olympics\"" ) );
-assert.neq( -1, tc.findOne( { _id: 7 } ).text.indexOf("Summer Olympics") );
-
-// phrasematch with other stuff.. negation, other terms, etc.
-assert.eq( [10], queryIDS( tc, "\"wild flowers\" Sydney" ) );
-
-assert.eq( [3], queryIDS( tc, "\"industry\" -Melbourne -Physics" ) );
-
-// -------------------------------------------- EDGE CASES -----------------------------------------
-
-// test empty string
-res = tc.runCommand( "text", { search: "" } );
-assert.eq( 0, res.ok )
-
-// test string with a space in it
-res = tc.runCommand( "text", { search: " " } );
-assert.eq( 0, res.results.length );
-
-// -------------------------------------------- FILTERING ------------------------------------------
-
-assert.eq( [2], queryIDS( tc, "Mahim" ) );
-assert.eq( [2], queryIDS( tc, "Mahim", { _id: 2 } ) );
-assert.eq( [], queryIDS( tc, "Mahim", { _id: 1 } ) );
-assert.eq( [], queryIDS( tc, "Mahim", { _id: { $gte: 4 } } ) );
-assert.eq( [2], queryIDS( tc, "Mahim", { _id: { $lte: 4 } } ) );
-
-// using regex conditional filtering
-assert.eq( [9], queryIDS( tc, "members", { title: { $regex: /Phy.*/i } } ) );
-
-// -------------------------------------------------------------------------------------------------
-
-assert( tc.validate().valid );
diff --git a/jstests/fts_partition1.js b/jstests/fts_partition1.js
deleted file mode 100644
index f1b4c437c3c..00000000000
--- a/jstests/fts_partition1.js
+++ /dev/null
@@ -1,23 +0,0 @@
-load( "jstests/libs/fts.js" )
-
-t = db.text_parition1;
-t.drop();
-
-t.insert( { _id : 1 , x : 1 , y : "foo" } );
-t.insert( { _id : 2 , x : 1 , y : "bar" } );
-t.insert( { _id : 3 , x : 2 , y : "foo" } );
-t.insert( { _id : 4 , x : 2 , y : "bar" } );
-
-t.ensureIndex( { x : 1, y : "text" } );
-
-res = t.runCommand( "text", { search : "foo" } );
-assert.eq( 0, res.ok, tojson(res) );
-
-assert.eq( [ 1 ], queryIDS( t, "foo" , { x : 1 } ) );
-
-res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } } );
-assert( res.results[0].score > 0, tojson( res ) )
-
-// repeat search with "language" specified, SERVER-8999
-res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } , language : "english" } );
-assert( res.results[0].score > 0, tojson( res ) )
diff --git a/jstests/fts_partition_no_multikey.js b/jstests/fts_partition_no_multikey.js
deleted file mode 100644
index a6320cc4a9e..00000000000
--- a/jstests/fts_partition_no_multikey.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.fts_partition_no_multikey;
-t.drop();
-
-t.ensureIndex( { x : 1, y : "text" } )
-
-t.insert( { x : 5 , y : "this is fun" } );
-assert.isnull( db.getLastError() );
-
-t.insert( { x : [] , y : "this is fun" } );
-assert( db.getLastError() );
-
-t.insert( { x : [1] , y : "this is fun" } );
-assert( db.getLastError() );
-
-t.insert( { x : [1,2] , y : "this is fun" } );
-assert( db.getLastError() );
diff --git a/jstests/fts_phrase.js b/jstests/fts_phrase.js
deleted file mode 100644
index 0b58bef817e..00000000000
--- a/jstests/fts_phrase.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-t = db.text_phrase;
-t.drop()
-
-t.save( { _id : 1 , title : "my blog post" , text : "i am writing a blog. yay" } );
-t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am typing. yay" } );
-t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } );
-
-t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } );
-
-res = t.runCommand( "text" , { search : "blog write" } );
-assert.eq( 3, res.results.length );
-assert.eq( 1, res.results[0].obj._id );
-assert( res.results[0].score > (res.results[1].score*2), tojson(res) );
-
-res = t.runCommand( "text" , { search : "write blog" } );
-assert.eq( 3, res.results.length );
-assert.eq( 1, res.results[0].obj._id );
-assert( res.results[0].score > (res.results[1].score*2), tojson(res) );
-
-
-
-
-
-
diff --git a/jstests/fts_proj.js b/jstests/fts_proj.js
deleted file mode 100644
index 1ecc6688d1b..00000000000
--- a/jstests/fts_proj.js
+++ /dev/null
@@ -1,20 +0,0 @@
-t = db.text_proj;
-t.drop();
-
-t.save( { _id : 1 , x : "a", y: "b", z : "c"});
-t.save( { _id : 2 , x : "d", y: "e", z : "f"});
-t.save( { _id : 3 , x : "a", y: "g", z : "h"});
-
-t.ensureIndex( { x : "text"} , { default_language : "none" } );
-
-res = t.runCommand("text", {search : "a"});
-assert.eq( 2, res.results.length );
-assert( res.results[0].obj.y, tojson(res) );
-
-res = t.runCommand("text", {search : "a", project: {x: 1}});
-assert.eq( 2, res.results.length );
-assert( !res.results[0].obj.y, tojson(res) );
-
-
-
-
diff --git a/jstests/fts_projection.js b/jstests/fts_projection.js
deleted file mode 100644
index 9bdb9dbca8a..00000000000
--- a/jstests/fts_projection.js
+++ /dev/null
@@ -1,99 +0,0 @@
-// Test $text with $textScore projection.
-
-var t = db.getSiblingDB("test").getCollection("fts_projection");
-t.drop();
-
-db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
-
-t.insert({_id: 0, a: "textual content"});
-t.insert({_id: 1, a: "additional content", b: -1});
-t.insert({_id: 2, a: "irrelevant content"});
-t.ensureIndex({a:"text"});
-
-// Project the text score.
-var results = t.find({$text: {$search: "textual content -irrelevant"}}, {_idCopy:0, score:{$meta: "textScore"}}).toArray();
-// printjson(results);
-// Scores should exist.
-assert.eq(results.length, 2);
-assert(results[0].score);
-assert(results[1].score);
-
-// indexed by _id.
-var scores = [0, 0, 0];
-scores[results[0]._id] = results[0].score;
-scores[results[1]._id] = results[1].score;
-
-//
-// Edge/error cases:
-//
-
-// Project text score into 2 fields.
-results = t.find({$text: {$search: "textual content -irrelevant"}}, {otherScore: {$meta: "textScore"}, score:{$meta: "textScore"}}).toArray();
-assert.eq(2, results.length);
-for (var i = 0; i < results.length; ++i) {
- assert.close(scores[results[i]._id], results[i].score);
- assert.close(scores[results[i]._id], results[i].otherScore);
-}
-
-// printjson(results);
-
-// Project text score into "x.$" shouldn't crash
-assert.throws(function() { t.find({$text: {$search: "textual content -irrelevant"}}, {'x.$': {$meta: "textScore"}}).toArray(); });
-
-// TODO: We can't project 'x.y':1 and 'x':1 (yet).
-
-// Clobber an existing field and behave nicely.
-results = t.find({$text: {$search: "textual content -irrelevant"}},
- {b: {$meta: "textScore"}}).toArray();
-assert.eq(2, results.length);
-for (var i = 0; i < results.length; ++i) {
- assert.close(scores[results[i]._id], results[i].b,
- i + ': existing field in ' + tojson(results[i], '', true) +
- ' is not clobbered with score');
-}
-
-assert.neq(-1, results[0].b);
-
-// Don't crash if we have no text score.
-var results = t.find({a: /text/}, {score: {$meta: "textScore"}}).toArray();
-// printjson(results);
-
-// No textScore proj. with nested fields
-assert.throws(function() { t.find({$text: {$search: "blah"}}, {'x.y':{$meta: "textScore"}}).toArray(); });
-
-// SERVER-12173
-// When $text operator is in $or, should evaluate first
-results = t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {_id: 1}]},
- {_idCopy:0, score:{$meta: "textScore"}}).toArray();
-printjson(results);
-assert.eq(2, results.length);
-for (var i = 0; i < results.length; ++i) {
- assert.close(scores[results[i]._id], results[i].score,
- i + ': TEXT under OR invalid score: ' + tojson(results[i], '', true));
-}
-
-// SERVER-12592
-// When $text operator is in $or, all non-$text children must be indexed. Otherwise, we should produce
-// a readable error.
-var errorMessage = '';
-assert.throws( function() {
- try {
- t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {b: 1}]}).itcount();
- }
- catch (e) {
- errorMessage = e;
- throw e;
- }
-}, null, 'Expected error from failed TEXT under OR planning');
-assert.neq(-1, errorMessage.indexOf('TEXT'),
- 'message from failed text planning does not mention TEXT: ' + errorMessage);
-assert.neq(-1, errorMessage.indexOf('OR'),
- 'message from failed text planning does not mention OR: ' + errorMessage);
-
-// Scores should exist.
-assert.eq(results.length, 2);
-assert(results[0].score,
- "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or");
-assert(results[1].score,
- "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or");
-
diff --git a/jstests/fts_querylang.js b/jstests/fts_querylang.js
deleted file mode 100644
index 2a139f5b766..00000000000
--- a/jstests/fts_querylang.js
+++ /dev/null
@@ -1,93 +0,0 @@
-// Test $text query operator.
-
-var t = db.getSiblingDB("test").getCollection("fts_querylang");
-var cursor;
-var results;
-
-db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
-
-t.drop();
-
-t.insert({_id: 0, unindexedField: 0, a: "textual content"});
-t.insert({_id: 1, unindexedField: 1, a: "additional content"});
-t.insert({_id: 2, unindexedField: 2, a: "irrelevant content"});
-t.ensureIndex({a: "text"});
-
-// Test text query with no results.
-assert.eq(false, t.find({$text: {$search: "words"}}).hasNext());
-
-// Test basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).toArray();
-assert.eq(results.length, 2);
-assert.neq(results[0]._id, 2);
-assert.neq(results[1]._id, 2);
-
-// Test sort with basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).toArray();
-assert.eq(results.length, 2);
-assert.eq(results[0]._id, 0);
-assert.eq(results[1]._id, 1);
-
-// Test skip with basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).skip(1).toArray();
-assert.eq(results.length, 1);
-assert.eq(results[0]._id, 1);
-
-// Test limit with basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).limit(1).toArray();
-assert.eq(results.length, 1);
-assert.eq(results[0]._id, 0);
-
-// TODO Test basic text query with sort, once sort is enabled in the new query framework.
-
-// TODO Test basic text query with projection, once projection is enabled in the new query
-// framework.
-
-// Test $and of basic text query with indexed expression.
-results = t.find({$text: {$search: "content -irrelevant"},
- _id: 1}).toArray();
-assert.eq(results.length, 1);
-assert.eq(results[0]._id, 1);
-
-// Test $and of basic text query with unindexed expression.
-results = t.find({$text: {$search: "content -irrelevant"},
- unindexedField: 1}).toArray();
-assert.eq(results.length, 1);
-assert.eq(results[0]._id, 1);
-
-// TODO Test invalid inputs for $text, $search, $language.
-
-// Test $language.
-cursor = t.find({$text: {$search: "contents", $language: "none"}});
-assert.eq(false, cursor.hasNext());
-
-cursor = t.find({$text: {$search: "contents", $language: "EN"}});
-assert.eq(true, cursor.hasNext());
-
-cursor = t.find({$text: {$search: "contents", $language: "spanglish"}});
-assert.throws(function() { cursor.next() });
-
-// TODO Test $and of basic text query with geo expression.
-
-// Test update with $text.
-t.update({$text: {$search: "textual content -irrelevant"}}, {$set: {b: 1}}, {multi: true});
-assert.eq(2, t.find({b: 1}).itcount(),
- 'incorrect number of documents updated');
-
-// TODO Test remove with $text, once it is enabled with the new query framework.
-
-// TODO Test count with $text, once it is enabled with the new query framework.
-
-// TODO Test findAndModify with $text, once it is enabled with the new query framework.
-
-// TODO Test aggregate with $text, once it is enabled with the new query framework.
-
-// TODO Test that old query framework rejects $text queries.
-
-// TODO Test that $text fails without a text index.
-
-// TODO Test that $text accepts a hint of the text index.
-
-// TODO Test that $text fails if a different index is hinted.
-
-// TODO Test $text with {$natural:1} sort, {$natural:1} hint.
diff --git a/jstests/fts_score_sort.js b/jstests/fts_score_sort.js
deleted file mode 100644
index 59fb852a774..00000000000
--- a/jstests/fts_score_sort.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Test sorting with text score metadata.
-
-var t = db.getSiblingDB("test").getCollection("fts_score_sort");
-t.drop();
-
-db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
-
-t.insert({_id: 0, a: "textual content"});
-t.insert({_id: 1, a: "additional content"});
-t.insert({_id: 2, a: "irrelevant content"});
-t.ensureIndex({a:"text"});
-
-// Sort by the text score.
-var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({score: {$meta: "textScore"}}).toArray();
-// printjson(results);
-assert.eq(results.length, 2);
-assert.eq(results[0]._id, 0);
-assert.eq(results[1]._id, 1);
-assert(results[0].score > results[1].score);
-
-// Sort by {_id descending, score} and verify the order is right.
-var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({_id: -1, score: {$meta: "textScore"}}).toArray();
-printjson(results);
-assert.eq(results.length, 2);
-assert.eq(results[0]._id, 1);
-assert.eq(results[1]._id, 0);
-// Note the reversal from above.
-assert(results[0].score < results[1].score);
diff --git a/jstests/fts_spanish.js b/jstests/fts_spanish.js
deleted file mode 100644
index cdf73343b5f..00000000000
--- a/jstests/fts_spanish.js
+++ /dev/null
@@ -1,31 +0,0 @@
-
-load( "jstests/libs/fts.js" );
-
-t = db.text_spanish;
-t.drop();
-
-t.save( { _id: 1, title: "mi blog", text: "Este es un blog de prueba" } );
-t.save( { _id: 2, title: "mi segundo post", text: "Este es un blog de prueba" } );
-t.save( { _id: 3, title: "cuchillos son divertidos", text: "este es mi tercer blog stemmed" } );
-t.save( { _id: 4, language: "en", title: "My fourth blog", text: "This stemmed blog is in english" } );
-
-// default weight is 1
-// specify weights if you want a field to be more meaningull
-t.ensureIndex( { "title": "text", text: "text" }, { weights: { title: 10 },
- default_language: "es" } );
-
-res = t.runCommand( "text", { search: "blog" } );
-assert.eq( 4, res.results.length );
-
-assert.eq( [4], queryIDS( t, "stem" ) );
-assert.eq( [3], queryIDS( t, "stemmed" ) );
-assert.eq( [4], queryIDS( t, "stemmed", null, { language : "en" } ) );
-
-assert.eq( [1,2], queryIDS( t, "prueba" ) );
-
-t.save( { _id: 5, language: "spanglish", title: "", text: "" } );
-assert( db.getLastError() );
-
-t.dropIndexes();
-t.ensureIndex( { "title": "text", text: "text" }, { default_language: "spanglish" } );
-assert( db.getLastError() );
diff --git a/jstests/geo1.js b/jstests/geo1.js
deleted file mode 100644
index 338d96eb23c..00000000000
--- a/jstests/geo1.js
+++ /dev/null
@@ -1,41 +0,0 @@
-
-t = db.geo1
-t.drop();
-
-idx = { loc : "2d" , zip : 1 }
-
-t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } )
-t.insert( { zip : "10024" , loc : [ 40.786387 , 73.97709 ] } )
-t.insert( { zip : "94061" , loc : [ 37.463911 , 122.23396 ] } )
-assert.isnull( db.getLastError() )
-
-// test "2d" has to be first
-assert.eq( 1 , t.getIndexKeys().length , "S1" );
-t.ensureIndex( { zip : 1 , loc : "2d" } );
-assert.eq( 1 , t.getIndexKeys().length , "S2" );
-
-t.ensureIndex( idx );
-assert.eq( 2 , t.getIndexKeys().length , "S3" );
-
-assert.eq( 3 , t.count() , "B1" );
-t.insert( { loc : [ 200 , 200 ] } )
-assert( db.getLastError() , "B2" )
-assert.eq( 3 , t.count() , "B3" );
-
-// test normal access
-
-wb = t.findOne( { zip : "06525" } )
-assert( wb , "C1" );
-
-assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" )
-assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" )
-// assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" )
-
-// test config options
-
-t.drop();
-
-t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } );
-t.insert( { loc : [ 200 , 200 ] } )
-assert.isnull( db.getLastError() , "D1" )
-
diff --git a/jstests/geo10.js b/jstests/geo10.js
deleted file mode 100644
index 39da09fb9ef..00000000000
--- a/jstests/geo10.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Test for SERVER-2746
-
-coll = db.geo10
-coll.drop();
-
-db.geo10.ensureIndex( { c : '2d', t : 1 }, { min : 0, max : Math.pow( 2, 40 ) } )
-assert( db.getLastError() == null, "B" )
-assert( db.system.indexes.count({ ns : "test.geo10" }) == 2, "A3" )
-
-printjson( db.system.indexes.find().toArray() )
-
-db.geo10.insert( { c : [ 1, 1 ], t : 1 } )
-assert.eq( db.getLastError(), null, "C" )
-
-db.geo10.insert( { c : [ 3600, 3600 ], t : 1 } )
-assert( db.getLastError() == null, "D" )
-
-db.geo10.insert( { c : [ 0.001, 0.001 ], t : 1 } )
-assert( db.getLastError() == null, "E" )
-
-printjson( db.geo10.find({ c : { $within : { $box : [[0.001, 0.001], [Math.pow(2, 40) - 0.001, Math.pow(2, 40) - 0.001]] } }, t : 1 }).toArray() )
diff --git a/jstests/geo2.js b/jstests/geo2.js
deleted file mode 100644
index f9632ebd16d..00000000000
--- a/jstests/geo2.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-t = db.geo2
-t.drop();
-
-n = 1
-for ( var x=-100; x<100; x+=2 ){
- for ( var y=-100; y<100; y+=2 ){
- t.insert( { _id : n++ , loc : [ x , y ] } )
- }
-}
-
-t.ensureIndex( { loc : "2d" } )
-
-fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
-
-function a( cur ){
- var total = 0;
- var outof = 0;
- while ( cur.hasNext() ){
- var o = cur.next();
- total += Geo.distance( [ 50 , 50 ] , o.loc );
- outof++;
- }
- return total/outof;
-}
-
-assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B1" )
-assert.close( 1.33333 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(3) ) , "B2" );
-assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B3" );
-
-printjson( t.find( { loc : { $near : [ 50 , 50 ] } } ).explain() )
-
-
-assert.lt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(50) ) , "C1" )
-assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 , 3 ] } } ).limit(50) ) , "C2" )
-assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] , $maxDistance : 3 } } ).limit(50) ) , "C3" )
-
-// SERVER-8974 - test if $geoNear operator works with 2d index as well
-var geoNear_cursor = t.find( { loc : { $geoNear : [50, 50] } } );
-assert.eq( geoNear_cursor.count(), 100 )
diff --git a/jstests/geo3.js b/jstests/geo3.js
deleted file mode 100644
index 47637783f5b..00000000000
--- a/jstests/geo3.js
+++ /dev/null
@@ -1,77 +0,0 @@
-
-t = db.geo3
-t.drop();
-
-n = 1
-for ( var x=-100; x<100; x+=2 ){
- for ( var y=-100; y<100; y+=2 ){
- t.insert( { _id : n++ , loc : [ x , y ] , a : Math.abs( x ) % 5 , b : Math.abs( y ) % 5 } )
- }
-}
-
-
-t.ensureIndex( { loc : "2d" } )
-
-fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
-
-// test filter
-
-filtered1 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
-assert.eq( 10 , filtered1.results.length , "B1" );
-filtered1.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B2: " + tojson( z ) ); } )
-//printjson( filtered1.stats );
-
-function avgA( q , len ){
- if ( ! len )
- len = 10;
- var realq = { loc : { $near : [ 50 , 50 ] } };
- if ( q )
- Object.extend( realq , q );
- var as =
- t.find( realq ).limit(len).map(
- function(z){
- return z.a;
- }
- );
- assert.eq( len , as.length , "length in avgA" );
- return Array.avg( as );
-}
-
-function testFiltering( msg ){
- assert.gt( 2 , avgA( {} ) , msg + " testFiltering 1 " );
- assert.eq( 2 , avgA( { a : 2 } ) , msg + " testFiltering 2 " );
- assert.eq( 4 , avgA( { a : 4 } ) , msg + " testFiltering 3 " );
-}
-
-testFiltering( "just loc" );
-
-t.dropIndex( { loc : "2d" } )
-assert.eq( 1 , t.getIndexKeys().length , "setup 3a" )
-t.ensureIndex( { loc : "2d" , a : 1 } )
-assert.eq( 2 , t.getIndexKeys().length , "setup 3b" )
-
-filtered2 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
-assert.eq( 10 , filtered2.results.length , "B3" );
-filtered2.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B4: " + tojson( z ) ); } )
-
-assert.eq( filtered1.stats.avgDistance , filtered2.stats.avgDistance , "C1" )
-assert.eq( filtered1.stats.nscanned , filtered2.stats.nscanned , "C3" )
-assert.gt( filtered1.stats.objectsLoaded , filtered2.stats.objectsLoaded , "C3" )
-
-testFiltering( "loc and a" );
-
-t.dropIndex( { loc : "2d" , a : 1 } )
-assert.eq( 1 , t.getIndexKeys().length , "setup 4a" )
-t.ensureIndex( { loc : "2d" , b : 1 } )
-assert.eq( 2 , t.getIndexKeys().length , "setup 4b" )
-
-testFiltering( "loc and b" );
-
-
-q = { loc : { $near : [ 50 , 50 ] } }
-assert.eq( 100 , t.find( q ).limit(100).itcount() , "D1" )
-assert.eq( 100 , t.find( q ).limit(100).count() , "D2" )
-
-assert.eq( 20 , t.find( q ).limit(20).itcount() , "D3" )
-assert.eq( 20 , t.find( q ).limit(20).size() , "D4" )
-
diff --git a/jstests/geo4.js b/jstests/geo4.js
deleted file mode 100644
index 78404ab720c..00000000000
--- a/jstests/geo4.js
+++ /dev/null
@@ -1,10 +0,0 @@
-var t = db.geo4;
-t.drop();
-
-t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } );
-
-t.ensureIndex( { loc : "2d" }, { bits : 33 } );
-assert.eq( db.getLastError() , "bits in geo index must be between 1 and 32" , "a" );
-
-t.ensureIndex( { loc : "2d" }, { bits : 32 } );
-assert( !db.getLastError(), "b" );
diff --git a/jstests/geo5.js b/jstests/geo5.js
deleted file mode 100644
index 67b00f85b44..00000000000
--- a/jstests/geo5.js
+++ /dev/null
@@ -1,18 +0,0 @@
-t = db.geo5;
-t.drop();
-
-t.insert( { p : [ 0,0 ] } )
-t.ensureIndex( { p : "2d" } )
-
-res = t.runCommand( "geoNear" , { near : [1,1] } );
-assert.eq( 1 , res.results.length , "A1" );
-
-t.insert( { p : [ 1,1 ] } )
-t.insert( { p : [ -1,-1 ] } )
-res = t.runCommand( "geoNear" , { near : [50,50] } );
-assert.eq( 3 , res.results.length , "A2" );
-
-t.insert( { p : [ -1,-1 ] } )
-res = t.runCommand( "geoNear" , { near : [50,50] } );
-assert.eq( 4 , res.results.length , "A3" );
-
diff --git a/jstests/geo6.js b/jstests/geo6.js
deleted file mode 100644
index 185795c57ba..00000000000
--- a/jstests/geo6.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-t = db.geo6;
-t.drop();
-
-t.ensureIndex( { loc : "2d" } );
-
-assert.eq( 0 , t.find().itcount() , "pre0" );
-assert.eq( 0 , t.find( { loc : { $near : [50,50] } } ).itcount() , "pre1" )
-
-t.insert( { _id : 1 , loc : [ 1 , 1 ] } )
-t.insert( { _id : 2 , loc : [ 1 , 2 ] } )
-t.insert( { _id : 3 } )
-
-assert.eq( 3 , t.find().itcount() , "A1" )
-assert.eq( 2 , t.find().hint( { loc : "2d" } ).itcount() , "A2" )
-assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).itcount() , "A3" )
-
-t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).forEach(printjson);
-assert.eq( 1 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).next()._id , "B1" )
-assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : -1 } ).next()._id , "B1" )
-
-
-t.insert( { _id : 4 , loc : [] } )
-assert.eq( 4 , t.find().itcount() , "C1" )
diff --git a/jstests/geo7.js b/jstests/geo7.js
deleted file mode 100644
index c220da54249..00000000000
--- a/jstests/geo7.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.geo7;
-t.drop();
-
-t.insert({_id:1,y:[1,1]})
-t.insert({_id:2,y:[1,1],z:3})
-t.insert({_id:3,y:[1,1],z:4})
-t.insert({_id:4,y:[1,1],z:5})
-
-t.ensureIndex({y:"2d",z:1})
-
-assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A1" );
-
-t.dropIndex({y:"2d",z:1})
-
-t.ensureIndex({y:"2d"})
-assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A2" );
-
-t.insert( { _id : 5 , y : 5 } );
-assert.eq( 5 , t.findOne( { y : 5 } )._id , "B1" );
diff --git a/jstests/geo8.js b/jstests/geo8.js
deleted file mode 100644
index 301f3bcc0d1..00000000000
--- a/jstests/geo8.js
+++ /dev/null
@@ -1,13 +0,0 @@
-
-t = db.geo8
-t.drop()
-
-t.insert( { loc : [ 5 , 5 ] } )
-t.insert( { loc : [ 5 , 6 ] } )
-t.insert( { loc : [ 5 , 7 ] } )
-t.insert( { loc : [ 4 , 5 ] } )
-t.insert( { loc : [ 100 , 100 ] } )
-
-t.ensureIndex( { loc : "2d" } )
-
-t.runCommand( "geoWalk" );
diff --git a/jstests/geo9.js b/jstests/geo9.js
deleted file mode 100644
index 8b6510f03b5..00000000000
--- a/jstests/geo9.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.geo9
-t.drop();
-
-t.save( { _id : 1 , a : [ 10 , 10 ] , b : [ 50 , 50 ] } )
-t.save( { _id : 2 , a : [ 11 , 11 ] , b : [ 51 , 52 ] } )
-t.save( { _id : 3 , a : [ 12 , 12 ] , b : [ 52 , 52 ] } )
-
-t.save( { _id : 4 , a : [ 50 , 50 ] , b : [ 10 , 10 ] } )
-t.save( { _id : 5 , a : [ 51 , 51 ] , b : [ 11 , 11 ] } )
-t.save( { _id : 6 , a : [ 52 , 52 ] , b : [ 12 , 12 ] } )
-
-t.ensureIndex( { a : "2d" } )
-t.ensureIndex( { b : "2d" } )
-
-function check( field ){
- var q = {}
- q[field] = { $near : [ 11 , 11 ] }
- arr = t.find( q ).limit(3).map(
- function(z){
- return Geo.distance( [ 11 , 11 ] , z[field] );
- }
- );
- assert.eq( 2 * Math.sqrt( 2 ) , Array.sum( arr ) , "test " + field );
-}
-
-check( "a" )
-check( "b" )
diff --git a/jstests/geo_2d_explain.js b/jstests/geo_2d_explain.js
deleted file mode 100644
index 8195642aabc..00000000000
--- a/jstests/geo_2d_explain.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var t = db.geo_2d_explain;
-
-t.drop();
-
-var n = 1000;
-
-// insert n documents with integer _id, a can be 1-5, loc is close to [40, 40]
-t.drop()
-t.ensureIndex({loc: "2d", _id: 1})
-
-var x = 40;
-var y = 40;
-for (var i = 0; i < n; i++) {
- // random number in range [1, 5]
- var a = Math.floor(Math.random() * 5) + 1;
- var dist = 4.0;
- var dx = (Math.random() - 0.5) * dist;
- var dy = (Math.random() - 0.5) * dist;
- var loc = [x + dx, y + dy];
- t.save({_id: i, a: a, loc: loc});
-}
-
-var explain = t.find({loc: {$near: [40, 40]}, _id: {$lt: 50}}).explain();
-
-print('explain = ' + tojson(explain));
-
-assert.eq({}, explain.indexBounds);
-assert.eq(explain.n, explain.nscannedObjects);
-assert.lte(explain.n, explain.nscanned);
diff --git a/jstests/geo_2d_with_geojson_point.js b/jstests/geo_2d_with_geojson_point.js
deleted file mode 100644
index b5afc8b77b8..00000000000
--- a/jstests/geo_2d_with_geojson_point.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Use of GeoJSON points should be prohibited with a 2d index, SERVER-10636.
- */
-
-var t = db.geo_2d_with_geojson_point;
-t.drop();
-t.ensureIndex({loc: '2d'});
-
-var geoJSONPoint = {
- type: 'Point',
- coordinates: [0, 0]
-};
-
-print(assert.throws(
- function() {
- t.findOne({
- loc: {$near: {$geometry: geoJSONPoint}}});
- },
- [],
- 'querying 2d index with GeoJSON point.'));
diff --git a/jstests/geo_allowedcomparisons.js b/jstests/geo_allowedcomparisons.js
deleted file mode 100644
index 171178d0c7b..00000000000
--- a/jstests/geo_allowedcomparisons.js
+++ /dev/null
@@ -1,107 +0,0 @@
-// A test for what geometries can interact with what other geometries.
-t = db.geo_allowedcomparisons;
-
-// Any GeoJSON object can intersect with any geojson object.
-geojsonPoint = { "type" : "Point", "coordinates": [ 0, 0 ] };
-oldPoint = [0,0];
-
-// GeoJSON polygons can contain any geojson object and OLD points.
-geojsonPoly = { "type" : "Polygon",
- "coordinates" : [ [ [-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]]};
-
-// This can be contained by GJ polygons, intersected by anything GJ and old points.
-geojsonLine = { "type" : "LineString", "coordinates": [ [ 0, 0], [1, 1]]}
-
-// $centerSphere can contain old or new points.
-oldCenterSphere = [[0, 0], Math.PI / 180];
-// $box can contain old points.
-oldBox = [[-5,-5], [5,5]];
-// $polygon can contain old points.
-oldPolygon = [[-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]
-// $center can contain old points.
-oldCenter = [[0, 0], 1];
-
-t.drop();
-t.ensureIndex({geo: "2d"});
-// 2d doesn't know what to do w/this
-t.insert({geo: geojsonPoint});
-assert(db.getLastError());
-// Old points are OK.
-t.insert({geo: oldPoint})
-assert(!db.getLastError());
-// Lines not OK in 2d
-t.insert({geo: geojsonLine})
-assert(db.getLastError())
-// Shapes are not OK to insert in 2d
-t.insert({geo: geojsonPoly})
-assert(db.getLastError());
-t.insert({geo: oldCenterSphere})
-assert(db.getLastError());
-t.insert({geo: oldCenter})
-assert(db.getLastError());
-// If we try to insert a polygon, it thinks it's an array of points. Let's not
-// do that. Ditto for the box.
-
-// Verify that even if we can't index them, we can use them in a matcher.
-t.insert({gj: geojsonLine})
-t.insert({gj: geojsonPoly})
-geojsonPoint2 = { "type" : "Point", "coordinates": [ 0, 0.001 ] };
-t.insert({gjp: geojsonPoint2})
-
-// We convert between old and new style points.
-assert.eq(1, t.find({gjp: {$geoWithin: {$box: oldBox}}}).itcount());
-assert.eq(1, t.find({gjp: {$geoWithin: {$polygon: oldPolygon}}}).itcount());
-assert.eq(1, t.find({gjp: {$geoWithin: {$center: oldCenter}}}).itcount());
-assert.eq(1, t.find({gjp: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount())
-
-function runTests() {
- // Each find the box, the polygon, and the old point.
- assert.eq(1, t.find({geo: {$geoWithin: {$box: oldBox}}}).itcount())
- assert.eq(1, t.find({geo: {$geoWithin: {$polygon: oldPolygon}}}).itcount())
- // Each find the old point.
- assert.eq(1, t.find({geo: {$geoWithin: {$center: oldCenter}}}).itcount())
- assert.eq(1, t.find({geo: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount())
- // Using geojson with 2d-style geoWithin syntax should choke.
- assert.throws(function() { return t.find({geo: {$geoWithin: {$polygon: geojsonPoly}}})
- .itcount();})
- // Using old polygon w/new syntax should choke too.
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldPolygon}}})
- .itcount();})
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldBox}}})
- .itcount();})
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenter}}})
- .itcount();})
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenterSphere}}})
- .itcount();})
- // Even if we only have a 2d index, the 2d suitability function should
- // allow the matcher to deal with this. If we have a 2dsphere index we use it.
- assert.eq(1, t.find({geo: {$geoWithin: {$geometry: geojsonPoly}}}).itcount())
- assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoly}}}).itcount())
- assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: oldPoint}}}).itcount())
- assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoint}}}).itcount())
-}
-
-// We have a 2d index right now. Let's see what it does.
-runTests();
-
-// No index now.
-t.dropIndex({geo: "2d"})
-runTests();
-
-// 2dsphere index now.
-t.ensureIndex({geo: "2dsphere"})
-assert(!db.getLastError())
-// 2dsphere does not support arrays of points.
-t.insert({geo: [geojsonPoint2, geojsonPoint]})
-assert(db.getLastError())
-runTests();
-
-// Old stuff is not GeoJSON (or old-style point). All should fail.
-t.insert({geo: oldBox})
-assert(db.getLastError())
-t.insert({geo: oldPolygon})
-assert(db.getLastError())
-t.insert({geo: oldCenter})
-assert(db.getLastError())
-t.insert({geo: oldCenterSphere})
-assert(db.getLastError())
diff --git a/jstests/geo_array0.js b/jstests/geo_array0.js
deleted file mode 100644
index 5fe46781d1d..00000000000
--- a/jstests/geo_array0.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Make sure the very basics of geo arrays are sane by creating a few multi location docs
-t = db.geoarray
-
-function test(index) {
- t.drop();
- t.insert( { zip : "10001", loc : { home : [ 10, 10 ], work : [ 50, 50 ] } } )
- t.insert( { zip : "10002", loc : { home : [ 20, 20 ], work : [ 50, 50 ] } } )
- t.insert( { zip : "10003", loc : { home : [ 30, 30 ], work : [ 50, 50 ] } } )
- assert.isnull( db.getLastError() )
-
- if (index) {
- t.ensureIndex( { loc : "2d", zip : 1 } );
- assert.isnull( db.getLastError() )
- assert.eq( 2, t.getIndexKeys().length )
- }
-
- t.insert( { zip : "10004", loc : { home : [ 40, 40 ], work : [ 50, 50 ] } } )
- assert.isnull( db.getLastError() )
-
- // test normal access
- printjson( t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() )
- assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
- assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
-}
-
-//test(false); // this was removed as part of SERVER-6400
-test(true)
diff --git a/jstests/geo_array1.js b/jstests/geo_array1.js
deleted file mode 100644
index ca61050c888..00000000000
--- a/jstests/geo_array1.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Make sure many locations in one doc works, in the form of an array
-
-t = db.geoarray1
-function test(index) {
- t.drop();
-
- var locObj = []
- // Add locations everywhere
- for ( var i = 0; i < 10; i++ ) {
- for ( var j = 0; j < 10; j++ ) {
- if ( j % 2 == 0 )
- locObj.push( [ i, j ] )
- else
- locObj.push( { x : i, y : j } )
- }
- }
-
- // Add docs with all these locations
- for( var i = 0; i < 300; i++ ){
- t.insert( { loc : locObj } )
- }
-
- if (index) {
- t.ensureIndex( { loc : "2d" } )
- }
-
- // Pull them back
- for ( var i = 0; i < 10; i++ ) {
- for ( var j = 0; j < 10; j++ ) {
- assert.eq(300, t.find({loc: {$within: {$box: [[i - 0.5, j - 0.5 ],
- [i + 0.5,j + 0.5]]}}})
- .count())
- }
- }
-}
-
-test(true)
-test(false)
diff --git a/jstests/geo_array2.js b/jstests/geo_array2.js
deleted file mode 100644
index 0e8d57dd855..00000000000
--- a/jstests/geo_array2.js
+++ /dev/null
@@ -1,163 +0,0 @@
-// Check the semantics of near calls with multiple locations
-
-t = db.geoarray2
-t.drop();
-
-var numObjs = 10;
-var numLocs = 100;
-
-// Test the semantics of near / nearSphere / etc. queries with multiple keys per object
-
-for( var i = -1; i < 2; i++ ){
- for(var j = -1; j < 2; j++ ){
-
- locObj = []
-
- if( i != 0 || j != 0 )
- locObj.push( { x : i * 50 + Random.rand(),
- y : j * 50 + Random.rand() } )
- locObj.push( { x : Random.rand(),
- y : Random.rand() } )
- locObj.push( { x : Random.rand(),
- y : Random.rand() } )
-
- t.insert({ name : "" + i + "" + j , loc : locObj , type : "A" })
- t.insert({ name : "" + i + "" + j , loc : locObj , type : "B" })
- }
-}
-
-t.ensureIndex({ loc : "2d" , type : 1 })
-
-assert.isnull( db.getLastError() )
-
-print( "Starting testing phase... ")
-
-for( var t = 0; t < 2; t++ ){
-
-var type = t == 0 ? "A" : "B"
-
-for( var i = -1; i < 2; i++ ){
- for(var j = -1; j < 2; j++ ){
-
- var center = [ i * 50 , j * 50 ]
- var count = i == 0 && j == 0 ? 9 : 1
- var objCount = 1
-
- // Do near check
-
- var nearResults = db.runCommand( { geoNear : "geoarray2" ,
- near : center ,
- num : count,
- query : { type : type } } ).results
- //printjson( nearResults )
-
- var objsFound = {}
- var lastResult = 0;
- for( var k = 0; k < nearResults.length; k++ ){
-
- // All distances should be small, for the # of results
- assert.gt( 1.5 , nearResults[k].dis )
- // Distances should be increasing
- assert.lte( lastResult, nearResults[k].dis )
- // Objs should be of the right type
- assert.eq( type, nearResults[k].obj.type )
-
- lastResult = nearResults[k].dis
-
- var objKey = "" + nearResults[k].obj._id
-
- if( objKey in objsFound ) objsFound[ objKey ]++
- else objsFound[ objKey ] = 1
-
- }
-
- // Make sure we found the right objects each time
- // Note: Multiple objects could be found for diff distances.
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] )
- }
-
-
- // Do nearSphere check
-
- // Earth Radius
- var eRad = 6371
-
- nearResults = db.geoarray2.find( { loc : { $nearSphere : center , $maxDistance : 500 /* km */ / eRad }, type : type } ).toArray()
-
- assert.eq( nearResults.length , count )
-
- objsFound = {}
- lastResult = 0;
- for( var k = 0; k < nearResults.length; k++ ){
- var objKey = "" + nearResults[k]._id
- if( objKey in objsFound ) objsFound[ objKey ]++
- else objsFound[ objKey ] = 1
-
- }
-
- // Make sure we found the right objects each time
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] )
- }
-
-
-
- // Within results do not return duplicate documents
-
- var count = i == 0 && j == 0 ? 9 : 1
- var objCount = i == 0 && j == 0 ? 1 : 1
-
- // Do within check
- objsFound = {}
-
- var box = [ [center[0] - 1, center[1] - 1] , [center[0] + 1, center[1] + 1] ]
-
- //printjson( box )
-
- var withinResults = db.geoarray2.find({ loc : { $within : { $box : box } } , type : type }).toArray()
-
- assert.eq( withinResults.length , count )
-
- for( var k = 0; k < withinResults.length; k++ ){
- var objKey = "" + withinResults[k]._id
- if( objKey in objsFound ) objsFound[ objKey ]++
- else objsFound[ objKey ] = 1
- }
-
- //printjson( objsFound )
-
- // Make sure we found the right objects each time
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] )
- }
-
-
- // Do within check (circle)
- objsFound = {}
-
- withinResults = db.geoarray2.find({ loc : { $within : { $center : [ center, 1.5 ] } } , type : type }).toArray()
-
- assert.eq( withinResults.length , count )
-
- for( var k = 0; k < withinResults.length; k++ ){
- var objKey = "" + withinResults[k]._id
- if( objKey in objsFound ) objsFound[ objKey ]++
- else objsFound[ objKey ] = 1
- }
-
- // Make sure we found the right objects each time
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] )
- }
-
-
-
- }
-}
-
-}
-
-
-
-
diff --git a/jstests/geo_borders.js b/jstests/geo_borders.js
deleted file mode 100644
index 953850ad7f3..00000000000
--- a/jstests/geo_borders.js
+++ /dev/null
@@ -1,168 +0,0 @@
-t = db.borders
-t.drop()
-
-epsilon = 0.0001;
-
-// For these tests, *required* that step ends exactly on max
-min = -1
-max = 1
-step = 1
-numItems = 0;
-
-for ( var x = min; x <= max; x += step ) {
- for ( var y = min; y <= max; y += step ) {
- t.insert( { loc : { x : x, y : y } } )
- numItems++;
- }
-}
-
-overallMin = -1
-overallMax = 1
-
-// Create a point index slightly smaller than the points we have
-t.ensureIndex( { loc : "2d" }, { max : overallMax - epsilon / 2, min : overallMin + epsilon / 2 } )
-assert( db.getLastError() )
-
-// Create a point index only slightly bigger than the points we have
-t.ensureIndex( { loc : "2d" }, { max : overallMax + epsilon, min : overallMin - epsilon } )
-assert.isnull( db.getLastError() )
-
-// ************
-// Box Tests
-// ************
-
-// If the bounds are bigger than the box itself, just clip at the borders
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMin - 2 * epsilon, overallMin - 2 * epsilon ],
- [ overallMax + 2 * epsilon, overallMax + 2 * epsilon ] ] } } } ).count() );
-
-// Check this works also for bounds where only a single dimension is off-bounds
-assert.eq( numItems - 5, t.find(
- { loc : { $within : { $box : [
- [ overallMin - 2 * epsilon, overallMin - 0.5 * epsilon ],
- [ overallMax - epsilon, overallMax - epsilon ] ] } } } ).count() );
-
-// Make sure we can get at least close to the bounds of the index
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMin - epsilon / 2, overallMin - epsilon / 2 ],
- [ overallMax + epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() );
-
-// Make sure we can get at least close to the bounds of the index
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMax + epsilon / 2, overallMax + epsilon / 2 ],
- [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() );
-
-// Check that swapping min/max has good behavior
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMax + epsilon / 2, overallMax + epsilon / 2 ],
- [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() );
-
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMax + epsilon / 2, overallMin - epsilon / 2 ],
- [ overallMin - epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() );
-
-// **************
-// Circle tests
-// **************
-
-center = ( overallMax + overallMin ) / 2
-center = [ center, center ]
-radius = overallMax
-
-offCenter = [ center[0] + radius, center[1] + radius ]
-onBounds = [ offCenter[0] + epsilon, offCenter[1] + epsilon ]
-offBounds = [ onBounds[0] + epsilon, onBounds[1] + epsilon ]
-onBoundsNeg = [ -onBounds[0], -onBounds[1] ]
-
-// Make sure we can get all points when radius is exactly at full bounds
-assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + epsilon ] } } } ).count() );
-
-// Make sure we can get points when radius is over full bounds
-assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + 2 * epsilon ] } } } ).count() );
-
-// Make sure we can get points when radius is over full bounds, off-centered
-assert.lt( 0, t.find( { loc : { $within : { $center : [ offCenter, radius + 2 * epsilon ] } } } ).count() );
-
-// Make sure we get correct corner point when center is in bounds
-// (x bounds wrap, so could get other corner)
-cornerPt = t.findOne( { loc : { $within : { $center : [ offCenter, step / 2 ] } } } );
-assert.eq( cornerPt.loc.y, overallMax )
-
-// Make sure we get correct corner point when center is on bounds
-// NOTE: Only valid points on MIN bounds
-cornerPt = t
- .findOne( { loc : { $within : { $center : [ onBoundsNeg, Math.sqrt( 2 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
-assert.eq( cornerPt.loc.y, overallMin )
-
-// Make sure we can't get corner point when center is over bounds
-try {
- t.findOne( { loc : { $within : { $center : [ offBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
- assert( false )
-} catch (e) {
-}
-
-// Make sure we can't get corner point when center is on max bounds
-try {
- t.findOne( { loc : { $within : { $center : [ onBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
- assert( false )
-} catch (e) {
-}
-
-// ***********
-// Near tests
-// ***********
-
-// Make sure we can get all nearby points to point in range
-assert.eq( overallMax, t.find( { loc : { $near : offCenter } } ).next().loc.y );
-
-// Make sure we can get all nearby points to point on boundary
-assert.eq( overallMin, t.find( { loc : { $near : onBoundsNeg } } ).next().loc.y );
-
-// Make sure we can't get all nearby points to point over boundary
-try {
- t.findOne( { loc : { $near : offBounds } } )
- assert( false )
-} catch (e) {
-}
-// Make sure we can't get all nearby points to point on max boundary
-try {
- t.findOne( { loc : { $near : onBoundsNeg } } )
- assert( false )
-} catch (e) {
-}
-
-// Make sure we can get all nearby points within one step (4 points in top
-// corner)
-assert.eq( 4, t.find( { loc : { $near : offCenter, $maxDistance : step * 1.9 } } ).count() );
-
-// **************
-// Command Tests
-// **************
-// Make sure we can get all nearby points to point in range
-assert.eq( overallMax, db.runCommand( { geoNear : "borders", near : offCenter } ).results[0].obj.loc.y );
-
-// Make sure we can get all nearby points to point on boundary
-assert.eq( overallMin, db.runCommand( { geoNear : "borders", near : onBoundsNeg } ).results[0].obj.loc.y );
-
-// Make sure we can't get all nearby points to point over boundary
-try {
- db.runCommand( { geoNear : "borders", near : offBounds } ).results.length
- assert( false )
-} catch (e) {
-}
-
-// Make sure we can't get all nearby points to point on max boundary
-try {
- db.runCommand( { geoNear : "borders", near : onBounds } ).results.length
- assert( false )
-} catch (e) {
-}
-
-// Make sure we can get all nearby points within one step (4 points in top
-// corner)
-assert.eq( 4, db.runCommand( { geoNear : "borders", near : offCenter, maxDistance : step * 1.5 } ).results.length );
diff --git a/jstests/geo_box1.js b/jstests/geo_box1.js
deleted file mode 100644
index 5ef335158e1..00000000000
--- a/jstests/geo_box1.js
+++ /dev/null
@@ -1,43 +0,0 @@
-
-t = db.geo_box1;
-t.drop();
-
-num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] }
- t.save( o )
- }
-}
-
-t.ensureIndex( { loc : "2d" } );
-
-searches = [
- [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
- [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
-];
-
-
-for ( i=0; i<searches.length; i++ ){
- b = searches[i];
- //printjson( b );
-
- q = { loc : { $within : { $box : b } } }
- numWanetd = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
- assert.eq( numWanetd , t.find(q).itcount() , "itcount: " + tojson( q ) );
- printjson( t.find(q).explain() )
-}
-
-
-
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" )
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" )
-
-
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" )
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" )
-
-assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" )
-
diff --git a/jstests/geo_box1_noindex.js b/jstests/geo_box1_noindex.js
deleted file mode 100644
index 8c1ae87e08e..00000000000
--- a/jstests/geo_box1_noindex.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// SERVER-7343: allow $within without a geo index.
-t = db.geo_box1_noindex;
-t.drop();
-
-num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] }
- t.save( o )
- }
-}
-
-searches = [
- [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
- [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
-];
-
-for ( i=0; i<searches.length; i++ ){
- b = searches[i];
- q = { loc : { $within : { $box : b } } }
- numWanted = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
- assert.eq( numWanted , t.find(q).itcount() , "itcount: " + tojson( q ) );
- printjson( t.find(q).explain() )
-}
-
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" )
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" )
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" )
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" )
-assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" )
diff --git a/jstests/geo_box2.js b/jstests/geo_box2.js
deleted file mode 100644
index 4905a6cda8d..00000000000
--- a/jstests/geo_box2.js
+++ /dev/null
@@ -1,52 +0,0 @@
-
-t = db.geo_box2;
-
-t.drop()
-
-for (i=1; i<10; i++) {
- for(j=1; j<10; j++) {
- t.insert({loc : [i,j]});
- }
-}
-
-t.ensureIndex({"loc" : "2d"} )
-assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "A1" );
-
-t.dropIndex( { "loc" : "2d" } )
-
-t.ensureIndex({"loc" : "2d"} , {"min" : 0, "max" : 10})
-assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "B1" );
-
-// 'indexBounds.loc' in explain output should be filled in with at least
-// one bounding box.
-// Actual values is dependent on implementation of 2d execution stage.
-var explain = t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).explain(true);
-print( 'explain = ' + tojson(explain) );
-assert.neq( undefined, explain.indexBounds.loc, "C1" );
-assert.gt( explain.indexBounds.loc.length, 0, "C2" );
-
-// Check covering.
-var covering = explain.indexBounds.loc[0];
-for (var i = 1; i < explain.indexBounds.loc.length; ++i) {
- var currentBox = explain.indexBounds.loc[i];
- // min X
- covering[0][0] = Math.min(covering[0][0], currentBox[0][0]);
- // min Y
- covering[0][1] = Math.min(covering[0][1], currentBox[0][1]);
- // max X
- covering[1][0] = Math.max(covering[1][0], currentBox[1][0]);
- // max Y
- covering[1][1] = Math.max(covering[1][1], currentBox[1][1]);
-}
-print('covering computed from index bounds = ' +
- '(' + covering[0][0] + ',' + covering[0][1] + ') -->> ' +
- '(' + covering[1][0] + ',' + covering[1][1] + ')');
-// Compare covering against $box coordinates.
-// min X
-assert.lte(covering[0][0], 4);
-// min Y
-assert.lte(covering[0][1], 4);
-// max X
-assert.gte(covering[1][0], 6);
-// max Y
-assert.gte(covering[1][1], 6);
diff --git a/jstests/geo_box3.js b/jstests/geo_box3.js
deleted file mode 100644
index 8941f637518..00000000000
--- a/jstests/geo_box3.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// How to construct a test to stress the flaw in SERVER-994:
-// construct an index, think up a bounding box inside the index that
-// doesn't include the center of the index, and put a point inside the
-// bounding box.
-
-// This is the bug reported in SERVER-994.
-t=db.geo_box3;
-t.drop();
-t.insert({ point : { x : -15000000, y : 10000000 } });
-t.ensureIndex( { point : "2d" } , { min : -21000000 , max : 21000000 } );
-var c=t.find({point: {"$within": {"$box": [[-20000000, 7000000], [0, 15000000]]} } });
-assert.eq(1, c.count(), "A1");
-
-// Same thing, modulo 1000000.
-t=db.geo_box3;
-t.drop();
-t.insert({ point : { x : -15, y : 10 } });
-t.ensureIndex( { point : "2d" } , { min : -21 , max : 21 } );
-var c=t.find({point: {"$within": {"$box": [[-20, 7], [0, 15]]} } });
-assert.eq(1, c.count(), "B1");
-
-// Two more examples, one where the index is centered at the origin,
-// one not.
-t=db.geo_box3;
-t.drop();
-t.insert({ point : { x : 1.0 , y : 1.0 } });
-t.ensureIndex( { point : "2d" } , { min : -2 , max : 2 } );
-var c=t.find({point: {"$within": {"$box": [[.1, .1], [1.99, 1.99]]} } });
-assert.eq(1, c.count(), "C1");
-
-t=db.geo_box3;
-t.drop();
-t.insert({ point : { x : 3.9 , y : 3.9 } });
-t.ensureIndex( { point : "2d" } , { min : 0 , max : 4 } );
-var c=t.find({point: {"$within": {"$box": [[2.05, 2.05], [3.99, 3.99]]} } });
-assert.eq(1, c.count(), "D1");
diff --git a/jstests/geo_center_sphere1.js b/jstests/geo_center_sphere1.js
deleted file mode 100644
index 9f5eaec8764..00000000000
--- a/jstests/geo_center_sphere1.js
+++ /dev/null
@@ -1,96 +0,0 @@
-t = db.geo_center_sphere1;
-
-function test(index) {
- t.drop();
- skip = 3 // lower for more rigor, higher for more speed (tested with .5, .678, 1, 2, 3, and 4)
-
- searches = [
- // x , y rad
- [ [ 5 , 0 ] , 0.05 ] , // ~200 miles
- [ [ 135 , 0 ] , 0.05 ] ,
-
- [ [ 5 , 70 ] , 0.05 ] ,
- [ [ 135 , 70 ] , 0.05 ] ,
- [ [ 5 , 85 ] , 0.05 ] ,
-
- [ [ 20 , 0 ] , 0.25 ] , // ~1000 miles
- [ [ 20 , -45 ] , 0.25 ] ,
- [ [ -20 , 60 ] , 0.25 ] ,
- [ [ -20 , -70 ] , 0.25 ] ,
- ];
- correct = searches.map( function(z){ return []; } );
-
- num = 0;
-
- for ( x=-179; x<=179; x += skip ){
- for ( y=-89; y<=89; y += skip ){
- o = { _id : num++ , loc : [ x , y ] }
- t.save( o )
- for ( i=0; i<searches.length; i++ ){
- if ( Geo.sphereDistance( [ x , y ] , searches[i][0] ) <= searches[i][1])
- correct[i].push( o );
- }
- }
- gc(); // needed with low skip values
- }
-
- if (index) {
- t.ensureIndex( { loc : index } );
- }
-
- for ( i=0; i<searches.length; i++ ){
- print('------------');
- print( tojson( searches[i] ) + "\t" + correct[i].length )
- q = { loc : { $within : { $centerSphere : searches[i] } } }
-
- //correct[i].forEach( printjson )
- //printjson( q );
- //t.find( q ).forEach( printjson )
-
- //printjson(t.find( q ).explain())
-
- //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
- //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
-
- var numExpected = correct[i].length
- var x = correct[i].map( function(z){ return z._id; } )
- var y = t.find(q).map( function(z){ return z._id; } )
-
- missing = [];
- epsilon = 0.001; // allow tenth of a percent error due to conversions
- for (var j=0; j<x.length; j++){
- if (!Array.contains(y, x[j])){
- missing.push(x[j]);
- var obj = t.findOne({_id: x[j]});
- var dist = Geo.sphereDistance(searches[i][0], obj.loc);
- print("missing: " + tojson(obj) + " " + dist)
- if ((Math.abs(dist - searches[i][1]) / dist) < epsilon)
- numExpected -= 1;
- }
- }
- for (var j=0; j<y.length; j++){
- if (!Array.contains(x, y[j])){
- missing.push(y[j]);
- var obj = t.findOne({_id: y[j]});
- var dist = Geo.sphereDistance(searches[i][0], obj.loc);
- print("extra: " + tojson(obj) + " " + dist)
- if ((Math.abs(dist - searches[i][1]) / dist) < epsilon)
- numExpected += 1;
- }
- }
-
-
- assert.eq( numExpected , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
- assert.eq( numExpected , t.find( q ).count() , "count : " + tojson( searches[i] ) );
- if (index == "2d") {
- var explain = t.find( q ).explain();
- print( 'explain for ' + tojson( q , '' , true ) + ' = ' + tojson( explain ) );
- assert.gt( numExpected * 2 , explain.nscanned ,
- "nscanned : " + tojson( searches[i] ) )
- }
- }
-}
-
-test("2d")
-test("2dsphere")
-test(false)
diff --git a/jstests/geo_center_sphere2.js b/jstests/geo_center_sphere2.js
deleted file mode 100644
index 5e17790e668..00000000000
--- a/jstests/geo_center_sphere2.js
+++ /dev/null
@@ -1,164 +0,0 @@
-//
-// Tests the error handling of spherical queries
-// along with multi-location documents.
-// This is necessary since the error handling must manage
-// multiple documents, and so requires simultaneous testing.
-//
-
-function deg2rad(arg) { return arg * Math.PI / 180.0; }
-function rad2deg(arg) { return arg * 180.0 / Math.PI; }
-
-function computexscandist(y, maxDistDegrees) {
- return maxDistDegrees / Math.min(Math.cos(deg2rad(Math.min(89.0, y + maxDistDegrees))),
- Math.cos(deg2rad(Math.max(-89.0, y - maxDistDegrees))));
-}
-
-function pointIsOK(startPoint, radius) {
- yscandist = rad2deg(radius) + 0.01;
- xscandist = computexscandist(startPoint[1], yscandist);
- return (startPoint[0] + xscandist < 180)
- && (startPoint[0] - xscandist > -180)
- && (startPoint[1] + yscandist < 90)
- && (startPoint[1] - yscandist > -90);
-}
-
-var numTests = 30
-
-for ( var test = 0; test < numTests; test++ ) {
-
- //var fixedTest = 6017
- //if( fixedTest ) test = fixedTest
-
- Random.srand( 1337 + test );
-
- var radius = 5000 * Random.rand() // km
- radius = radius / 6371 // radians
- var numDocs = Math.floor( 400 * Random.rand() )
- // TODO: Wrapping uses the error value to figure out what would overlap...
- var bits = Math.floor( 5 + Random.rand() * 28 )
- var maxPointsPerDoc = 50
-
- t = db.sphere
-
- var randomPoint = function() {
- return [ Random.rand() * 360 - 180, Random.rand() * 180 - 90 ];
- }
-
- // Get a start point that doesn't require wrapping
- // TODO: Are we a bit too aggressive with wrapping issues?
- var startPoint
- var ex = null
- do {
- t.drop()
- startPoint = randomPoint()
- t.ensureIndex( { loc : "2d" }, { bits : bits } )
- } while (!pointIsOK(startPoint, radius))
-
- var pointsIn = 0
- var pointsOut = 0
- var docsIn = 0
- var docsOut = 0
- var totalPoints = 0
-
- for ( var i = 0; i < numDocs; i++ ) {
-
- var numPoints = Math.floor( Random.rand() * maxPointsPerDoc + 1 )
- var docIn = false
- var multiPoint = []
-
- totalPoints += numPoints
-
- for ( var p = 0; p < numPoints; p++ ) {
- var point = randomPoint()
- multiPoint.push( point )
-
- if ( Geo.sphereDistance( startPoint, point ) <= radius ) {
- pointsIn++
- docIn = true
- } else {
- pointsOut++
- }
- }
-
- t.insert( { loc : multiPoint } )
-
- if ( docIn )
- docsIn++
- else
- docsOut++
-
- }
-
- printjson( { test: test,
- radius : radius, bits : bits, numDocs : numDocs, pointsIn : pointsIn, docsIn : docsIn, pointsOut : pointsOut,
- docsOut : docsOut } )
-
- assert.isnull( db.getLastError() )
- assert.eq( docsIn + docsOut, numDocs )
- assert.eq( pointsIn + pointsOut, totalPoints )
-
- // $centerSphere
- assert.eq( docsIn, t.find( { loc : { $within : { $centerSphere : [ startPoint, radius ] } } } ).count() )
-
- // $nearSphere
- var results = t.find( { loc : { $nearSphere : startPoint, $maxDistance : radius } } ).limit( 2 * pointsIn )
- .toArray()
-
- assert.eq( docsIn, results.length )
-
- var distance = 0;
- for ( var i = 0; i < results.length; i++ ) {
-
- var minNewDistance = radius + 1
- for( var j = 0; j < results[i].loc.length; j++ ){
- var newDistance = Geo.sphereDistance( startPoint, results[i].loc[j] )
- if( newDistance < minNewDistance && newDistance >= distance ) minNewDistance = newDistance
- }
-
- //print( "Dist from : " + results[i].loc[j] + " to " + startPoint + " is "
- // + minNewDistance + " vs " + radius )
-
- assert.lte( minNewDistance, radius )
- assert.gte( minNewDistance, distance )
- distance = minNewDistance
-
- }
-
- // geoNear
- var results = db.runCommand( {
- geoNear : "sphere", near : startPoint, maxDistance : radius, num : 2 * pointsIn, spherical : true } ).results
-
- /*
- printjson( results );
-
- for ( var j = 0; j < results[0].obj.loc.length; j++ ) {
- var newDistance = Geo.sphereDistance( startPoint, results[0].obj.loc[j] )
- if( newDistance <= radius ) print( results[0].obj.loc[j] + " : " + newDistance )
- }
- */
-
- assert.eq( docsIn, results.length )
-
- var distance = 0;
- for ( var i = 0; i < results.length; i++ ) {
- var retDistance = results[i].dis
-
- // print( "Dist from : " + results[i].loc + " to " + startPoint + " is "
- // + retDistance + " vs " + radius )
-
- var distInObj = false
- for ( var j = 0; j < results[i].obj.loc.length && distInObj == false; j++ ) {
- var newDistance = Geo.sphereDistance( startPoint, results[i].obj.loc[j] )
- distInObj = ( newDistance >= retDistance - 0.0001 && newDistance <= retDistance + 0.0001 )
- }
-
- assert( distInObj )
- assert.lte( retDistance, radius )
- assert.gte( retDistance, distance )
- distance = retDistance
- }
-
- //break;
-}
-
-
diff --git a/jstests/geo_circle1.js b/jstests/geo_circle1.js
deleted file mode 100644
index 852b60d186b..00000000000
--- a/jstests/geo_circle1.js
+++ /dev/null
@@ -1,43 +0,0 @@
-
-t = db.geo_circle1;
-t.drop();
-
-searches = [
- [ [ 5 , 5 ] , 3 ] ,
- [ [ 5 , 5 ] , 1 ] ,
- [ [ 5 , 5 ] , 5 ] ,
- [ [ 0 , 5 ] , 5 ] ,
-];
-correct = searches.map( function(z){ return []; } );
-
-num = 0;
-
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] }
- t.save( o )
- for ( i=0; i<searches.length; i++ )
- if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
- correct[i].push( o );
- }
-}
-
-t.ensureIndex( { loc : "2d" } );
-
-for ( i=0; i<searches.length; i++ ){
- //print( tojson( searches[i] ) + "\t" + correct[i].length )
- q = { loc : { $within : { $center : searches[i] } } }
-
- //correct[i].forEach( printjson )
- //printjson( q );
- //t.find( q ).forEach( printjson )
-
- //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
- //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
-
- assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
- assert.eq( correct[i].length , t.find( q ).count() , "count : " + tojson( searches[i] ) );
- var explain = t.find( q ).explain();
- print( 'explain for ' + tojson( q , '' , true ) + ' = ' + tojson( explain ) );
- assert.gt( correct[i].length * 2 , explain.nscanned , "nscanned : " + tojson( searches[i] ) );
-}
diff --git a/jstests/geo_circle1_noindex.js b/jstests/geo_circle1_noindex.js
deleted file mode 100644
index 1569174a679..00000000000
--- a/jstests/geo_circle1_noindex.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// SERVER-7343: allow $within without a geo index.
-t = db.geo_circle1_noindex;
-t.drop();
-
-searches = [
- [ [ 5 , 5 ] , 3 ] ,
- [ [ 5 , 5 ] , 1 ] ,
- [ [ 5 , 5 ] , 5 ] ,
- [ [ 0 , 5 ] , 5 ] ,
-];
-correct = searches.map( function(z){ return []; } );
-
-num = 0;
-
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] }
- t.save( o )
- for ( i=0; i<searches.length; i++ )
- if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
- correct[i].push( o );
- }
-}
-
-for ( i=0; i<searches.length; i++ ){
- q = { loc : { $within : { $center : searches[i] } } }
- assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
- assert.eq( correct[i].length , t.find( q ).count() , "count : " + tojson( searches[i] ) );
-}
diff --git a/jstests/geo_circle2.js b/jstests/geo_circle2.js
deleted file mode 100644
index ef76884609c..00000000000
--- a/jstests/geo_circle2.js
+++ /dev/null
@@ -1,26 +0,0 @@
-
-t = db.geo_circle2;
-t.drop();
-
-t.ensureIndex({loc : "2d", categories:1}, {"name":"placesIdx", "min": -100, "max": 100});
-
-t.insert({ "uid" : 368900 , "loc" : { "x" : -36 , "y" : -8} ,"categories" : [ "sports" , "hotel" , "restaurant"]});
-t.insert({ "uid" : 555344 , "loc" : { "x" : 13 , "y" : 29} ,"categories" : [ "sports" , "hotel"]});
-t.insert({ "uid" : 855878 , "loc" : { "x" : 38 , "y" : 30} ,"categories" : [ "sports" , "hotel"]});
-t.insert({ "uid" : 917347 , "loc" : { "x" : 15 , "y" : 46} ,"categories" : [ "hotel"]});
-t.insert({ "uid" : 647874 , "loc" : { "x" : 25 , "y" : 23} ,"categories" : [ "hotel" , "restaurant"]});
-t.insert({ "uid" : 518482 , "loc" : { "x" : 4 , "y" : 25} ,"categories" : [ ]});
-t.insert({ "uid" : 193466 , "loc" : { "x" : -39 , "y" : 22} ,"categories" : [ "sports" , "hotel"]});
-t.insert({ "uid" : 622442 , "loc" : { "x" : -24 , "y" : -46} ,"categories" : [ "hotel"]});
-t.insert({ "uid" : 297426 , "loc" : { "x" : 33 , "y" : -49} ,"categories" : [ "hotel"]});
-t.insert({ "uid" : 528464 , "loc" : { "x" : -43 , "y" : 48} ,"categories" : [ "restaurant"]});
-t.insert({ "uid" : 90579 , "loc" : { "x" : -4 , "y" : -23} ,"categories" : [ "restaurant"]});
-t.insert({ "uid" : 368895 , "loc" : { "x" : -8 , "y" : 14} ,"categories" : [ "sports" ]});
-t.insert({ "uid" : 355844 , "loc" : { "x" : 34 , "y" : -4} ,"categories" : [ "sports" , "hotel"]});
-
-
-assert.eq( 10 , t.find({ "loc" : { "$within" : { "$center" : [ { "x" : 0 ,"y" : 0} , 50]}} } ).itcount() , "A" );
-assert.eq( 6 , t.find({ "loc" : { "$within" : { "$center" : [ { "x" : 0 ,"y" : 0} , 50]}}, "categories" : "sports" } ).itcount() , "B" );
-
-// When not a $near or $within query, geo index should not be used. Fails if geo index is used.
-assert.eq( 1 , t.find({ "loc" : { "x" : -36, "y" : -8}, "categories" : "sports" }).itcount(), "C" )
diff --git a/jstests/geo_circle2a.js b/jstests/geo_circle2a.js
deleted file mode 100644
index 67a6ba17243..00000000000
--- a/jstests/geo_circle2a.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// From SERVER-2381
-// Tests to make sure that nested multi-key indexing works for geo indexes and is not used for direct position
-// lookups
-
-var coll = db.geo_circle2a;
-coll.drop();
-coll.insert({ p : [1112,3473], t : [{ k : 'a', v : 'b' }, { k : 'c', v : 'd' }] })
-coll.ensureIndex({ p : '2d', 't.k' : 1 }, { min : 0, max : 10000 })
-
-// Succeeds, since on direct lookup should not use the index
-assert(1 == coll.find({p:[1112,3473],'t.k':'a'}).count(), "A")
-// Succeeds and uses the geo index
-assert(1 == coll.find({p:{$within:{$box:[[1111,3472],[1113,3475]]}}, 't.k' : 'a' }).count(), "B")
-
-
-coll.drop()
-coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' }, { k : 'key2', v : 123 } ] })
-coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' } ] })
-
-coll.ensureIndex({ point : "2d" , "tags.k" : 1, "tags.v" : 1 })
-
-// Succeeds, since should now lookup multi-keys correctly
-assert(2 == coll.find({ point : { $within : { $box : [[0,0],[12,12]] } } }).count(), "C")
-// Succeeds, and should not use geoindex
-assert(2 == coll.find({ point : [1, 10] }).count(), "D")
-assert(2 == coll.find({ point : [1, 10], "tags.v" : "value" }).count(), "E")
-assert(1 == coll.find({ point : [1, 10], "tags.v" : 123 }).count(), "F")
-
-
-coll.drop()
-coll.insert({ point:[ 1, 10 ], tags : [ { k : { 'hello' : 'world'}, v : 'value' }, { k : 'key2', v : 123 } ] })
-coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' } ] })
-
-coll.ensureIndex({ point : "2d" , "tags.k" : 1, "tags.v" : 1 })
-
-// Succeeds, should be able to look up the complex element
-assert(1 == coll.find({ point : { $within : { $box : [[0,0],[12,12]] } }, 'tags.k' : { 'hello' : 'world' } }).count(), "G") \ No newline at end of file
diff --git a/jstests/geo_circle3.js b/jstests/geo_circle3.js
deleted file mode 100644
index 2882b47378e..00000000000
--- a/jstests/geo_circle3.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// SERVER-848 and SERVER-1191.
-db.places.drop()
-
-n = 0;
-db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 52 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 52 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 52 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 52.0001 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 51.9999 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 52.0001 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 52.0001 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 51.9999 } })
-db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 51.9999 } })
-db.places.ensureIndex( { loc : "2d" } )
-radius=0.0001
-center=[5,52]
-//print(db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).count())
-// FIXME: we want an assert, e.g., that there be 5 answers in the find().
-db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).forEach(printjson);
-
-
-// the result:
-// { "_id" : ObjectId("4bb1f2f088df513435bcb4e1"), "loc" : { "x" : 5, "y" : 52 } }
-// { "_id" : ObjectId("4bb1f54383459c40223a8ae7"), "loc" : { "x" : 5, "y" : 51.9999 } }
-// { "_id" : ObjectId("4bb1f54583459c40223a8aeb"), "loc" : { "x" : 5.0001, "y" : 51.9999 } }
-// { "_id" : ObjectId("4bb1f2e588df513435bcb4e0"), "loc" : { "x" : 4.9999, "y" : 52 } }
-// { "_id" : ObjectId("4bb1f30888df513435bcb4e2"), "loc" : { "x" : 5.0001, "y" : 52 } }
-// { "_id" : ObjectId("4bb1f54383459c40223a8ae8"), "loc" : { "x" : 4.9999, "y" : 52.0001 } }
diff --git a/jstests/geo_circle4.js b/jstests/geo_circle4.js
deleted file mode 100644
index dc3c564389e..00000000000
--- a/jstests/geo_circle4.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Reported as server-848.
-function test(index) {
- db.server848.drop();
-
- radius=0.0001;
- center=[5,52];
-
- db.server848.save({ "_id": 1, "loc" : { "x" : 4.9999, "y" : 52 } });
- db.server848.save({ "_id": 2, "loc" : { "x" : 5, "y" : 52 } });
- db.server848.save({ "_id": 3, "loc" : { "x" : 5.0001, "y" : 52 } });
- db.server848.save({ "_id": 4, "loc" : { "x" : 5, "y" : 52.0001 } });
- db.server848.save({ "_id": 5, "loc" : { "x" : 5, "y" : 51.9999 } });
- db.server848.save({ "_id": 6, "loc" : { "x" : 4.9999, "y" : 52.0001 } });
- db.server848.save({ "_id": 7, "loc" : { "x" : 5.0001, "y" : 52.0001 } });
- db.server848.save({ "_id": 8, "loc" : { "x" : 4.9999, "y" : 51.9999 } });
- db.server848.save({ "_id": 9, "loc" : { "x" : 5.0001, "y" : 51.9999 } });
- if (index) {
- db.server848.ensureIndex( { loc : "2d" } );
- }
- r=db.server848.find({"loc" : {"$within" : {"$center" : [center, radius]}}}, {_id:1});
- assert.eq(5, r.count(), "A1");
- // FIXME: surely code like this belongs in utils.js.
- a=r.toArray();
- x=[];
- for (k in a) { x.push(a[k]["_id"]) }
- x.sort()
- assert.eq([1,2,3,4,5], x, "B1");
-}
-
-test(false)
-test(true)
diff --git a/jstests/geo_circle5.js b/jstests/geo_circle5.js
deleted file mode 100644
index fea9c56fd02..00000000000
--- a/jstests/geo_circle5.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// reported as server-1238.
-
-db.server1238.drop();
-db.server1238.remove({})
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 1})
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 2})
-db.server1238.ensureIndex( { loc : "2d" } , { min : -21000000 , max : 21000000 } )
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 3})
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 4})
-
-c1=db.server1238.find({"loc" : {"$within" : {"$center" : [[5000000, 900000], 1.0]}}}).count()
-
-c2=db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000], 5.0]}}}).count()
-
-
-assert.eq(4, c1, "A1");
-assert.eq(c1, c2, "B1");
-//print(db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000], 5.0]}}}).toArray());
-// [
-// {
-// "_id" : ObjectId("4c173306f5d9d34a46cb7b11"),
-// "loc" : [
-// 5000000,
-// 900000
-// ],
-// "id" : 4
-// }
-// ]
diff --git a/jstests/geo_distinct.js b/jstests/geo_distinct.js
deleted file mode 100644
index 60e0d15d22d..00000000000
--- a/jstests/geo_distinct.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// Test distinct with geo queries SERVER-2135
-
-t = db.commits
-t.drop()
-
-t.save( { _id : ObjectId( "4ce63ec2f360622431000013" ), loc : [ 55.59664, 13.00156 ], author : "FredrikL" } )
-
-printjson( db.runCommand( { distinct : 'commits', key : 'loc' } ) )
-assert.isnull( db.getLastError() )
-
-t.ensureIndex( { loc : '2d' } )
-
-printjson( t.getIndexes() )
-
-printjson( db.runCommand( { distinct : 'commits', key : 'loc' } ) )
-assert.isnull( db.getLastError() ) \ No newline at end of file
diff --git a/jstests/geo_exactfetch.js b/jstests/geo_exactfetch.js
deleted file mode 100644
index c0a0e714eae..00000000000
--- a/jstests/geo_exactfetch.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// SERVER-7322
-t = db.geo_exactfetch
-t.drop();
-
-function test(indexname) {
- assert.eq(1, t.find({lon_lat: [-71.34895, 42.46037]}).itcount(), indexname);
- t.ensureIndex({lon_lat: indexname, population: -1})
- assert.eq(2, t.find({lon_lat: {$nearSphere: [-71.34895, 42.46037]}}).itcount(), indexname);
- assert.eq(1, t.find({lon_lat: [-71.34895, 42.46037]}).itcount(), indexname);
- t.dropIndex({lon_lat: indexname, population: -1})
-}
-
-t.insert({ city: "B", lon_lat: [-71.34895, 42.46037], population: 1000})
-t.insert({ city: "A", lon_lat: [1.48736, 42.55327], population: 100})
-
-test("2d")
-test("2dsphere")
diff --git a/jstests/geo_fiddly_box.js b/jstests/geo_fiddly_box.js
deleted file mode 100644
index 95f33a32987..00000000000
--- a/jstests/geo_fiddly_box.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// Reproduces simple test for SERVER-2832
-
-// The setup to reproduce was/is to create a set of points where the
-// "expand" portion of the geo-lookup expands the 2d range in only one
-// direction (so points are required on either side of the expanding range)
-
-t = db.geo_fiddly_box
-
-t.drop();
-t.ensureIndex({ loc : "2d" })
-
-t.insert({ "loc" : [3, 1] })
-t.insert({ "loc" : [3, 0.5] })
-t.insert({ "loc" : [3, 0.25] })
-t.insert({ "loc" : [3, -0.01] })
-t.insert({ "loc" : [3, -0.25] })
-t.insert({ "loc" : [3, -0.5] })
-t.insert({ "loc" : [3, -1] })
-
-// OK!
-print( t.count() )
-assert.eq( 7, t.count({ "loc" : { "$within" : { "$box" : [ [2, -2], [46, 2] ] } } }), "Not all locations found!" );
-
-
-// Test normal lookup of a small square of points as a sanity check.
-
-epsilon = 0.0001;
-min = -1
-max = 1
-step = 1
-numItems = 0;
-
-t.drop()
-t.ensureIndex({ loc : "2d" }, { max : max + epsilon / 2, min : min - epsilon / 2 })
-
-for(var x = min; x <= max; x += step){
- for(var y = min; y <= max; y += step){
- t.insert({ "loc" : { x : x, y : y } })
- numItems++;
- }
-}
-
-assert.eq( numItems, t.count({ loc : { $within : { $box : [[min - epsilon / 3,
- min - epsilon / 3],
- [max + epsilon / 3,
- max + epsilon / 3]] } } }), "Not all locations found!");
diff --git a/jstests/geo_fiddly_box2.js b/jstests/geo_fiddly_box2.js
deleted file mode 100644
index 0588abfa1de..00000000000
--- a/jstests/geo_fiddly_box2.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Reproduces simple test for SERVER-2115
-
-// The setup to reproduce is to create a set of points and a really big bounds so that we are required to do
-// exact lookups on the points to get correct results.
-
-t = db.geo_fiddly_box2
-t.drop()
-
-t.insert( { "letter" : "S", "position" : [ -3, 0 ] } )
-t.insert( { "letter" : "C", "position" : [ -2, 0 ] } )
-t.insert( { "letter" : "R", "position" : [ -1, 0 ] } )
-t.insert( { "letter" : "A", "position" : [ 0, 0 ] } )
-t.insert( { "letter" : "B", "position" : [ 1, 0 ] } )
-t.insert( { "letter" : "B", "position" : [ 2, 0 ] } )
-t.insert( { "letter" : "L", "position" : [ 3, 0 ] } )
-t.insert( { "letter" : "E", "position" : [ 4, 0 ] } )
-
-t.ensureIndex( { position : "2d" } )
-result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } )
-assert.eq( 4, result.count() )
-
-t.dropIndex( { position : "2d" } )
-t.ensureIndex( { position : "2d" }, { min : -10000000, max : 10000000 } )
-
-result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } )
-assert.eq( 4, result.count() )
-
-t.dropIndex( { position : "2d" } )
-t.ensureIndex( { position : "2d" }, { min : -1000000000, max : 1000000000 } )
-
-result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } )
-assert.eq( 4, result.count() )
diff --git a/jstests/geo_group.js b/jstests/geo_group.js
deleted file mode 100644
index 4e038f94b03..00000000000
--- a/jstests/geo_group.js
+++ /dev/null
@@ -1,35 +0,0 @@
-t = db.geo_group;
-t.drop();
-
-n = 1;
-for ( var x=-100; x<100; x+=2 ){
- for ( var y=-100; y<100; y+=2 ){
- t.insert( { _id : n++ , loc : [ x , y ] } )
- }
-}
-
-t.ensureIndex( { loc : "2d" } );
-
-// Test basic count with $near
-assert.eq(t.find().count(), 10000);
-assert.eq(t.find( { loc : { $within : {$center : [[56,8], 10]}}}).count(), 81);
-assert.eq(t.find( { loc : { $near : [56, 8, 10] } } ).count(), 81);
-
-// Test basic group that effectively does a count
-assert.eq(
- t.group( {
- reduce : function (obj, prev) { prev.sums = { count : prev.sums.count + 1} },
- initial : { sums:{count:0} } }
- ),
- [ { "sums" : { "count" : 10000 } } ]
-);
-
-// Test basic group + $near that does a count
-assert.eq(
- t.group( {
- reduce : function (obj, prev) { prev.sums = { count : prev.sums.count + 1} },
- initial : { sums:{count:0} },
- cond : { loc : { $near : [56, 8, 10] } } }
- ),
- [ { "sums" : { "count" : 81 } } ]
-);
diff --git a/jstests/geo_haystack1.js b/jstests/geo_haystack1.js
deleted file mode 100644
index f4035ecbcf2..00000000000
--- a/jstests/geo_haystack1.js
+++ /dev/null
@@ -1,59 +0,0 @@
-
-t = db.geo_haystack1
-t.drop()
-
-function distance( a , b ){
- var x = a[0] - b[0];
- var y = a[1] - b[1];
- return Math.sqrt( ( x * x ) + ( y * y ) );
-}
-
-function distanceTotal( a , arr , f ){
- var total = 0;
- for ( var i=0; i<arr.length; i++ ){
- total += distance( a , arr[i][f] );
- }
- return total;
-}
-
-queries = [
- { near : [ 7 , 8 ] , maxDistance : 3 , search : { z : 3 } } ,
-]
-
-answers = queries.map( function(){ return { totalDistance : 0 , results : [] }; } )
-
-
-n = 0;
-for ( x=0; x<20; x++ ){
- for ( y=0; y<20; y++ ){
- t.insert( { _id : n , loc : [ x , y ] , z : n % 5 } );
-
- for ( i=0; i<queries.length; i++ ){
- var d = distance( queries[i].near , [ x , y ] )
- if ( d > queries[i].maxDistance )
- continue;
- if ( queries[i].search.z != n % 5 )
- continue;
- answers[i].results.push( { _id : n , loc : [ x , y ]} )
- answers[i].totalDistance += d;
- }
-
- n++;
- }
-}
-
-t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } );
-
-for ( i=0; i<queries.length; i++ ){
- print( "---------" );
- printjson( queries[i] );
- res = t.runCommand( "geoSearch" , queries[i] )
- print( "\t" + tojson( res.stats ) );
- print( "\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n );
- assert.eq( answers[i].results.length , res.stats.n, "num:"+ i + " number matches" )
- assert.eq( answers[i].totalDistance , distanceTotal( queries[i].near , res.results , "loc" ), "num:"+ i + " totalDistance" )
- //printjson( res );
- //printjson( answers[i].length );
-}
-
-
diff --git a/jstests/geo_haystack2.js b/jstests/geo_haystack2.js
deleted file mode 100644
index 2e0eb5710fb..00000000000
--- a/jstests/geo_haystack2.js
+++ /dev/null
@@ -1,60 +0,0 @@
-
-t = db.geo_haystack2
-t.drop()
-
-function distance( a , b ){
- var x = a[0] - b[0];
- var y = a[1] - b[1];
- return Math.sqrt( ( x * x ) + ( y * y ) );
-}
-
-function distanceTotal( a , arr , f ){
- var total = 0;
- for ( var i=0; i<arr.length; i++ ){
- total += distance( a , arr[i][f] );
- }
- return total;
-}
-
-queries = [
- { near : [ 7 , 8 ] , maxDistance : 3 , search : { z : 3 } } ,
-]
-
-answers = queries.map( function(){ return { totalDistance : 0 , results : [] }; } )
-
-
-n = 0;
-for ( x=0; x<20; x++ ){
- for ( y=0; y<20; y++ ){
- t.insert( { _id : n , loc : [ x , y ] , z : [ n % 10 , ( n + 5 ) % 10 ] } );
-
- for ( i=0; i<queries.length; i++ ){
- var d = distance( queries[i].near , [ x , y ] )
- if ( d > queries[i].maxDistance )
- continue;
- if ( queries[i].search.z != n % 10 &&
- queries[i].search.z != ( n + 5 ) % 10 )
- continue;
- answers[i].results.push( { _id : n , loc : [ x , y ] } )
- answers[i].totalDistance += d;
- }
-
- n++;
- }
-}
-
-t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } );
-
-for ( i=0; i<queries.length; i++ ){
- print( "---------" );
- printjson( queries[i] );
- res = t.runCommand( "geoSearch" , queries[i] )
- print( "\t" + tojson( res.stats ) );
- print( "\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n );
- assert.eq( answers[i].results.length , res.stats.n, "num:"+ i + " number matches" )
- assert.eq( answers[i].totalDistance , distanceTotal( queries[i].near , res.results , "loc" ), "num:"+ i + " totalDistance" )
- //printjson( res );
- //printjson( answers[i].length );
-}
-
-
diff --git a/jstests/geo_haystack3.js b/jstests/geo_haystack3.js
deleted file mode 100644
index f5a2ab7becb..00000000000
--- a/jstests/geo_haystack3.js
+++ /dev/null
@@ -1,28 +0,0 @@
-t = db.geo_haystack3
-t.drop()
-
-t.insert({ pos : { long : 34, lat : 33 }})
-t.insert({ pos : { long : 34.2, lat : 33.3 }, type : ["bar", "restaurant" ]})
-t.insert({ pos : { long : 34.2, lat : 37.3 }, type : ["bar", "chicken" ]})
-t.insert({ pos : { long : 59.1, lat : 87.2 }, type : ["baz", "office" ]})
-t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
-
-// This only matches the first insert. What do we want? First 3 or just the first?
-res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : {}, limit : 30 })
-assert.eq(1, res.stats.n, "Right # of matches");
-assert.eq(34, res.results[0].pos.long, "expected longitude");
-assert.eq(33, res.results[0].pos.lat, "expected latitude");
-
-// This matches the middle 2 of the 4 elements above.
-res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
- limit : 2 })
-assert.eq(2, res.stats.n, "Right # of matches");
-assert.eq("bar", res.results[0].type[0], "expected value for type");
-assert.eq("bar", res.results[1].type[0], "expected value for type");
-assert.neq(res.results[0].type[1], res.results[1].type[1], "should get 2 diff results");
-
-// This is a test for the limit being reached/only 1 returned.
-res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
- limit : 1 })
-assert.eq(1, res.stats.n, "Right # of matches");
-assert.eq("bar", res.results[0].type[0], "expected value for type");
diff --git a/jstests/geo_invalid_polygon.js b/jstests/geo_invalid_polygon.js
deleted file mode 100644
index af5545b7819..00000000000
--- a/jstests/geo_invalid_polygon.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// With invalid geometry, error message should include _id
-// SERVER-8992
-t = db.geo_invalid_polygon;
-t.drop();
-
-// Self-intersecting polygon, triggers
-// "Exterior shell of polygon is invalid".
-var geometry = {
- type: "Polygon",
- coordinates: [
- [
- [ 0, 0 ],
- [ 0, 1 ],
- [ 1, 1 ],
- [-2,-1 ],
- [ 0, 0 ]
- ]
- ]
-};
-
-t.insert({_id: 42, geometry: geometry});
-t.createIndex({geometry: '2dsphere'});
-var gleResult = db.getLastErrorCmd(1);
-
-// Verify that we triggered the error we're trying to test.
-assert.eq(16755, gleResult.code);
-
-// Document's _id should be in error message.
-assert(
- -1 != gleResult.err.indexOf('42'),
- "Error message didn't contain document _id.\nMessage: \"" + gleResult.err
- + '"\n'
-);
diff --git a/jstests/geo_mapreduce.js b/jstests/geo_mapreduce.js
deleted file mode 100644
index a6ecf763ae1..00000000000
--- a/jstests/geo_mapreduce.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Test script from SERVER-1742
-
-// MongoDB test script for mapreduce with geo query
-
-// setup test collection
-db.apples.drop()
-db.apples.insert( { "geo" : { "lat" : 32.68331909, "long" : 69.41610718 }, "apples" : 5 } );
-db.apples.insert( { "geo" : { "lat" : 35.01860809, "long" : 70.92027283 }, "apples" : 2 } );
-db.apples.insert( { "geo" : { "lat" : 31.11639023, "long" : 64.19970703 }, "apples" : 11 } );
-db.apples.insert( { "geo" : { "lat" : 32.64500046, "long" : 69.36251068 }, "apples" : 4 } );
-db.apples.insert( { "geo" : { "lat" : 33.23638916, "long" : 69.81360626 }, "apples" : 9 } );
-db.apples.ensureIndex( { "geo" : "2d" } );
-
-center = [ 32.68, 69.41 ];
-radius = 10 / 111; // 10km; 1 arcdegree ~= 111km
-geo_query = { geo : { '$within' : { '$center' : [ center, radius ] } } };
-
-// geo query on collection works fine
-res = db.apples.find( geo_query );
-assert.eq( 2, res.count() );
-
-// map function
-m = function() {
- emit( null, { "apples" : this.apples } );
-};
-
-// reduce function
-r = function(key, values) {
- var total = 0;
- for ( var i = 0; i < values.length; i++ ) {
- total += values[i].apples;
- }
- return { "apples" : total };
-};
-
-// mapreduce without geo query works fine
-res = db.apples.mapReduce( m, r, { out : { inline : 1 } } );
-
-printjson( res )
-total = res.results[0];
-assert.eq( 31, total.value.apples );
-
-// mapreduce with regular query works fine too
-res = db.apples.mapReduce( m, r, { out : { inline : 1 }, query : { apples : { '$lt' : 9 } } } );
-total = res.results[0];
-assert.eq( 11, total.value.apples );
-
-// mapreduce with geo query gives error on mongodb version 1.6.2
-// uncaught exception: map reduce failed: {
-// "assertion" : "manual matcher config not allowed",
-// "assertionCode" : 13285,
-// "errmsg" : "db assertion failure",
-// "ok" : 0 }
-res = db.apples.mapReduce( m, r, { out : { inline : 1 }, query : geo_query } );
-total = res.results[0];
-assert.eq( 9, total.value.apples );
diff --git a/jstests/geo_mapreduce2.js b/jstests/geo_mapreduce2.js
deleted file mode 100644
index 9c393457c7b..00000000000
--- a/jstests/geo_mapreduce2.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Geo mapreduce 2 from SERVER-3478
-
-var coll = db.geoMR2
-coll.drop()
-
-for( var i = 0; i < 300; i++ )
- coll.insert({ i : i, location : [ 10, 20 ] })
-
-coll.ensureIndex({ location : "2d" })
-
-// map function
-m = function() {
- emit( null, { count : this.i } )
-}
-
-// reduce function
-r = function( key, values ) {
-
- var total = 0
- for ( var i = 0; i < values.length; i++ ) {
- total += values[i].count
- }
-
- return { count : total }
-};
-
-try{ coll.mapReduce( m, r,
- { out : coll.getName() + "_mr",
- sort : { _id : 1 },
- query : { 'location' : { $within : { $centerSphere : [[ 10, 20 ], 0.01 ] } } } })
-
-}
-catch( e ){
- // This should occur, since we can't in-mem sort for mreduce
- printjson( e )
-}
diff --git a/jstests/geo_max.js b/jstests/geo_max.js
deleted file mode 100644
index 03932004b75..00000000000
--- a/jstests/geo_max.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// Test where points are on _max (180)
-// Using GeoNearRandom because this test needs a lot of points in the index.
-// If there aren't enough points the test passes even if the code is broken.
-load("jstests/libs/geo_near_random.js");
-
-var test = new GeoNearRandomTest("geo_near_max")
-
-test.insertPts(/*numPts*/1000, /*indexBounds*/{min:-180, max:180}, /*scale*/0.9);
-
-test.t.insert({loc: [ 180, 0]})
-test.t.insert({loc: [-180, 0]})
-test.t.insert({loc: [ 179.999, 0]})
-test.t.insert({loc: [-179.999, 0]})
-
-assertXIsNegative = function(obj) { assert.lt(obj.loc[0], 0); }
-assertXIsPositive = function(obj) { assert.gt(obj.loc[0], 0); }
-
-assert.eq(test.t.count({loc:{$within: {$center:[[ 180, 0], 1]}}}), 2)
-assert.eq(test.t.count({loc:{$within: {$center:[[-180, 0], 1]}}}), 2)
-test.t.find({loc:{$within: {$center:[[ 180, 0], 1]}}}).forEach(assertXIsPositive)
-test.t.find({loc:{$within: {$center:[[-180, 0], 1]}}}).forEach(assertXIsNegative)
-
-var oneDegree = Math.PI / 180; // in radians
-
-// errors out due to SERVER-1760
-if (0) {
-assert.eq(test.t.count({loc:{$within: {$centerSphere:[[ 180, 0], oneDegree]}}}), 2)
-assert.eq(test.t.count({loc:{$within: {$centerSphere:[[-180, 0], oneDegree]}}}), 2)
-test.t.find({loc:{$within: {$centerSphere:[[ 180, 0], oneDegree]}}}).forEach(assertXIsPositive)
-test.t.find({loc:{$within: {$centerSphere:[[-180, 0], oneDegree]}}}).forEach(assertXIsNegative)
-}
-
-assert.eq(test.t.count({loc:{$within: {$box:[[ 180, 0.1], [ 179, -0.1]]}}}), 2)
-assert.eq(test.t.count({loc:{$within: {$box:[[-180, 0.1], [-179, -0.1]]}}}), 2)
-test.t.find({loc:{$within: {$box:[[ 180, 0.1], [ 179, -0.1]]}}}).forEach(assertXIsPositive)
-test.t.find({loc:{$within: {$box:[[-180, 0.1], [-179, -0.1]]}}}).forEach(assertXIsNegative)
-
-assert.eq(test.t.count({loc:{$within: {$polygon:[[ 180, 0], [ 179, 0], [ 179.5, 0.5]]}}}), 2)
-assert.eq(test.t.count({loc:{$within: {$polygon:[[-180, 0], [-179, 0], [ 179.5, 0.5]]}}}), 2)
-test.t.find({loc:{$within: {$polygon:[[ 180, 0], [ 179, 0], [ 179.5, 0.5]]}}}).forEach(assertXIsPositive)
-test.t.find({loc:{$within: {$polygon:[[-180, 0], [-179, 0], [ 179.5, 0.5]]}}}).forEach(assertXIsNegative)
-
-assert.eq(test.t.find({loc:{$near:[ 180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [ 180, 0]}, {loc: [ 179.999, 0]}])
-assert.eq(test.t.find({loc:{$near:[-180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [-180, 0]}, {loc: [-179.999, 0]}])
-
-// These will need to change when SERVER-1760 is fixed
-assert.eq(test.t.find({loc:{$nearSphere:[ 180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [ 180, 0]}, {loc: [ 179.999, 0]}])
-assert.eq(test.t.find({loc:{$nearSphere:[-180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [-180, 0]}, {loc: [-179.999, 0]}])
-
diff --git a/jstests/geo_mindistance.js b/jstests/geo_mindistance.js
deleted file mode 100644
index b429eacb708..00000000000
--- a/jstests/geo_mindistance.js
+++ /dev/null
@@ -1,214 +0,0 @@
-/* Test $minDistance option for $near and $nearSphere queries, and geoNear
- * command. SERVER-9395.
-*/
-var t = db.geo_mindistance;
-t.drop();
-
-//
-// Useful constants and functions.
-//
-
-var km = 1000,
- earthRadiusMeters = 6378.1 * km;
-
-function metersToRadians(m) { return m / earthRadiusMeters; }
-
-/* Count documents within some radius of (0, 0), in kilometers.
- * With this function we can use the existing $maxDistance option to test
- * the newer $minDistance option's behavior.
- */
-function n_docs_within(radius_km) {
- // geoNear's distances are in meters for geoJSON points.
- var cmdResult = db.runCommand({
- geoNear: t.getName(),
- near: {type: 'Point', coordinates: [0, 0]},
- spherical: true,
- maxDistance: radius_km * km,
- num: 1000
- });
-
- return cmdResult.results.length;
-}
-
-//
-// Setup.
-//
-
-/* Make 121 points from long, lat = (0, 0) (in Gulf of Guinea) to (10, 10)
- * (inland Nigeria).
- */
-for (var x = 0; x <= 10; x += 1) {
- for (var y = 0; y <= 10; y += 1) {
- t.insert({loc: [x, y]});
- }
-}
-
-/* $minDistance is supported for 2dsphere index only, not 2d or geoHaystack. */
-t.ensureIndex({loc: "2dsphere"});
-
-var n_docs = t.count(),
- geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
- legacyPoint = [0, 0];
-
-//
-// Test $near with GeoJSON point (required for $near with 2dsphere index).
-// min/maxDistance are in meters.
-//
-
-var n_min1400_count = t.find({loc: {
- $near: {$geometry: geoJSONPoint, $minDistance: 1400 * km
-}}}).count();
-
-assert.eq(
- n_docs - n_docs_within(1400),
- n_min1400_count,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points $near (0, 0) with $minDistance 1400 km, got "
- + n_min1400_count
-);
-
-var n_bw500_and_1000_count = t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: 500 * km,
- $maxDistance: 1000 * km
-}}}).count();
-
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- n_bw500_and_1000_count,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points $near (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + n_bw500_and_1000_count
-);
-
-//
-// $nearSphere with 2dsphere index can take a legacy or GeoJSON point.
-// First test $nearSphere with legacy point.
-// min/maxDistance are in radians.
-//
-
-n_min1400_count = t.find({loc: {
- $nearSphere: legacyPoint, $minDistance: metersToRadians(1400 * km)
-}}).count();
-
-assert.eq(
- n_docs - n_docs_within(1400),
- n_min1400_count,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points $nearSphere (0, 0) with $minDistance 1400 km, got "
- + n_min1400_count
-);
-
-n_bw500_and_1000_count = t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: metersToRadians(500 * km),
- $maxDistance: metersToRadians(1000 * km)
-}}).count();
-
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- n_bw500_and_1000_count,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + n_bw500_and_1000_count
-);
-
-//
-// Test $nearSphere with GeoJSON point.
-// min/maxDistance are in meters.
-//
-
-n_min1400_count = t.find({loc: {
- $nearSphere: geoJSONPoint, $minDistance: 1400 * km
-}}).count();
-
-assert.eq(
- n_docs - n_docs_within(1400),
- n_min1400_count,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points $nearSphere (0, 0) with $minDistance 1400 km, got "
- + n_min1400_count
-);
-
-n_bw500_and_1000_count = t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: 500 * km,
- $maxDistance: 1000 * km
-}}).count();
-
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- n_bw500_and_1000_count,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + n_bw500_and_1000_count
-);
-
-
-//
-// Test geoNear command with GeoJSON point.
-// Distances are in meters.
-//
-
-var cmdResult = db.runCommand({
- geoNear: t.getName(),
- near: {type: 'Point', coordinates: [0, 0]},
- minDistance: 1400 * km,
- spherical: true // spherical required for 2dsphere index
-});
-assert.eq(
- n_docs - n_docs_within(1400),
- cmdResult.results.length,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points geoNear (0, 0) with $minDistance 1400 km, got "
- + cmdResult.results.length
-);
-
-cmdResult = db.runCommand({
- geoNear: t.getName(),
- near: {type: 'Point', coordinates: [0, 0]},
- minDistance: 500 * km,
- maxDistance: 1000 * km,
- spherical: true
-});
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- cmdResult.results.length,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + cmdResult.results.length
-);
-
-//
-// Test geoNear command with legacy point.
-// Distances are in radians.
-//
-
-cmdResult = db.runCommand({
- geoNear: t.getName(),
- near: legacyPoint,
- minDistance: metersToRadians(1400 * km),
- spherical: true // spherical required for 2dsphere index
-});
-assert.eq(
- n_docs - n_docs_within(1400),
- cmdResult.results.length,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points geoNear (0, 0) with $minDistance 1400 km, got "
- + cmdResult.results.length
-);
-
-cmdResult = db.runCommand({
- geoNear: t.getName(),
- near: legacyPoint,
- minDistance: metersToRadians(500 * km),
- maxDistance: metersToRadians(1000 * km),
- spherical: true
-});
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- cmdResult.results.length,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + cmdResult.results.length
-);
diff --git a/jstests/geo_mindistance_boundaries.js b/jstests/geo_mindistance_boundaries.js
deleted file mode 100644
index 80e933827b6..00000000000
--- a/jstests/geo_mindistance_boundaries.js
+++ /dev/null
@@ -1,124 +0,0 @@
-/* Test boundary conditions for $minDistance option for $near and $nearSphere
- * queries. SERVER-9395.
-*/
-var t = db.geo_mindistance_boundaries;
-t.drop();
-t.insert({loc: [1, 0]}); // 1 degree of longitude from origin.
-
-/* $minDistance is supported for 2dsphere index only, not 2d or geoHaystack. */
-t.ensureIndex({loc: "2dsphere"});
-
-//
-// Useful constants.
-//
-
-var km = 1000,
- earthRadiusMeters = 6378.1 * km,
- geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
- // One degree of longitude at the equator, about 111 km.
- degreeInMeters = 2 * Math.PI * earthRadiusMeters / 360,
- metersEpsilon = Number.MIN_VALUE;
-
-/* Grow epsilon's exponent until epsilon exceeds the margin of error for the
- * representation of degreeInMeters. The server uses 64-bit math, too, so we'll
- * find the smallest epsilon the server can detect.
-*/
-while (degreeInMeters + metersEpsilon == degreeInMeters) { metersEpsilon *= 2; }
-
-//
-// Test boundary conditions for $near and GeoJSON, in meters.
-//
-
-
-// minDistance must be within the args to $near, not on the side.
-assert.throws(function() { t.find({loc:{$near:{$geometry: geoJSONPoint},
- $minDistance:0.1}}).itcount();});
-
-assert.eq(
- 1, t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters
- }}}).itcount(),
- "Expected to find (0, 1) within $minDistance 1 degree from origin"
-);
-
-assert.eq(
- 1, t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters - metersEpsilon
- }}}).itcount(),
- "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
-);
-
-assert.eq(
- 0, t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters + metersEpsilon
- }}}).itcount(),
- "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
-);
-
-//
-// Test boundary conditions for $nearSphere and GeoJSON, in meters.
-//
-
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters
- }}}).itcount(),
- "Expected to find (0, 1) within $minDistance 1 degree from origin"
-);
-
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: degreeInMeters - metersEpsilon
- }}).itcount(),
- "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
-);
-
-assert.eq(
- 0, t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: degreeInMeters + metersEpsilon
- }}).itcount(),
- "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
-);
-
-//
-// Test boundary conditions for $nearSphere and a legacy point, in radians.
-//
-// $minDistance with legacy point requires $nearSphere; $near not
-// supported.
-//
-
-var legacyPoint = [0, 0],
- degreeInRadians = 2 * Math.PI / 360,
- radiansEpsilon = Number.MIN_VALUE;
-
-while (1 + radiansEpsilon == 1) { radiansEpsilon *= 2; }
-
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: degreeInRadians
- }}).itcount(),
- "Expected to find (0, 1) within $minDistance 1 degree from origin"
-);
-
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: degreeInRadians - radiansEpsilon
- }}).itcount(),
- "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
-);
-
-assert.eq(
- 0, t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: degreeInRadians + radiansEpsilon
- }}).itcount(),
- "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
-);
diff --git a/jstests/geo_multikey0.js b/jstests/geo_multikey0.js
deleted file mode 100644
index 7d0ea57e329..00000000000
--- a/jstests/geo_multikey0.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Multikey geo values tests - SERVER-3793.
-
-t = db.jstests_geo_multikey0;
-t.drop();
-
-// Check that conflicting constraints are satisfied by parallel array elements.
-t.save( {loc:[{x:20,y:30},{x:30,y:40}]} );
-assert.eq( 1, t.count( {loc:{x:20,y:30},$and:[{loc:{$gt:{x:20,y:35},$lt:{x:20,y:34}}}]} ) );
-
-// Check that conflicting constraints are satisfied by parallel array elements with a 2d index on loc.
-if ( 0 ) { // SERVER-3793
-t.ensureIndex( {loc:'2d'} );
-assert.eq( 1, t.count( {loc:{x:20,y:30},$and:[{loc:{$gt:{x:20,y:35},$lt:{x:20,y:34}}}]} ) );
-}
-
-t.drop();
-
-// Check that conflicting constraints are satisfied by parallel array elements of x.
-t.save( {loc:[20,30],x:[1,2]} );
-assert.eq( 1, t.count( {loc:[20,30],x:{$gt:1.7,$lt:1.2}} ) );
-
-// Check that conflicting constraints are satisfied by parallel array elements of x with a 2d index on loc,x.
-if ( 0 ) { // SERVER-3793
-t.ensureIndex( {loc:'2d',x:1} );
-assert.eq( 1, t.count( {loc:[20,30],x:{$gt:1.7,$lt:1.2}} ) );
-}
diff --git a/jstests/geo_multikey1.js b/jstests/geo_multikey1.js
deleted file mode 100644
index 5c949a8599a..00000000000
--- a/jstests/geo_multikey1.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// Multikey geo index tests with parallel arrays.
-
-t = db.jstests_geo_multikey1;
-t.drop();
-
-locArr = [];
-arr = [];
-for( i = 0; i < 10; ++i ) {
- locArr.push( [i,i+1] );
- arr.push( i );
-}
-t.save( {loc:locArr,a:arr,b:arr,c:arr} );
-
-// Parallel arrays are allowed for geo indexes.
-t.ensureIndex( {loc:'2d',a:1,b:1,c:1} );
-assert( !db.getLastError() );
-
-// Parallel arrays are not allowed for normal indexes.
-t.ensureIndex( {loc:1,a:1,b:1,c:1} );
-assert( db.getLastError() );
diff --git a/jstests/geo_multinest0.js b/jstests/geo_multinest0.js
deleted file mode 100644
index 68e609550d7..00000000000
--- a/jstests/geo_multinest0.js
+++ /dev/null
@@ -1,63 +0,0 @@
-// Make sure nesting of location arrays also works.
-
-t = db.geonest
-t.drop();
-
-t.insert( { zip : "10001", data : [ { loc : [ 10, 10 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } )
-t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } )
-t.insert( { zip : "10003", data : [ { loc : [ 30, 30 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } )
-assert.isnull( db.getLastError() )
-
-t.ensureIndex( { "data.loc" : "2d", zip : 1 } );
-assert.isnull( db.getLastError() )
-assert.eq( 2, t.getIndexKeys().length )
-
-t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } )
-assert.isnull( db.getLastError() )
-
-// test normal access
-
-printjson( t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() )
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
-
-
-
-
-
-// Try a complex nesting
-
-t = db.geonest
-t.drop();
-
-t.insert( { zip : "10001", data : [ { loc : [ [ 10, 10 ], { lat : 50, long : 50 } ], type : "home" } ] } )
-t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } )
-t.insert( { zip : "10003", data : [ { loc : [ { x : 30, y : 30 }, [ 50, 50 ] ], type : "home" } ] } )
-assert.isnull( db.getLastError() )
-
-t.ensureIndex( { "data.loc" : "2d", zip : 1 } );
-assert.isnull( db.getLastError() )
-assert.eq( 2, t.getIndexKeys().length )
-
-t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } )
-
-
-assert.isnull( db.getLastError() )
-
-// test normal access
-printjson( t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() )
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
-
-
-
diff --git a/jstests/geo_multinest1.js b/jstests/geo_multinest1.js
deleted file mode 100644
index 703283607d4..00000000000
--- a/jstests/geo_multinest1.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// Test distance queries with interleaved distances
-
-t = db.multinest
-t.drop();
-
-t.insert( { zip : "10001", data : [ { loc : [ 10, 10 ], type : "home" },
- { loc : [ 29, 29 ], type : "work" } ] } )
-t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
- { loc : [ 39, 39 ], type : "work" } ] } )
-t.insert( { zip : "10003", data : [ { loc : [ 30, 30 ], type : "home" },
- { loc : [ 49, 49 ], type : "work" } ] } )
-assert.isnull( db.getLastError() )
-
-t.ensureIndex( { "data.loc" : "2d", zip : 1 } );
-assert.isnull( db.getLastError() )
-assert.eq( 2, t.getIndexKeys().length )
-
-t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
- { loc : [ 59, 59 ], type : "work" } ] } )
-assert.isnull( db.getLastError() )
-
-// test normal access
-
-var result = t.find({ "data.loc" : { $near : [0, 0] } }).toArray();
-
-printjson( result )
-
-assert.eq( 4, result.length )
-
-var order = [ 1, 2, 3, 4 ]
-
-for( var i = 0; i < result.length; i++ ){
- assert.eq( "1000" + order[i], result[i].zip )
-}
-
-
-
diff --git a/jstests/geo_near_random1.js b/jstests/geo_near_random1.js
deleted file mode 100644
index 50539f3ea5d..00000000000
--- a/jstests/geo_near_random1.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// this tests all points using $near
-load("jstests/libs/geo_near_random.js");
-
-var test = new GeoNearRandomTest("geo_near_random1");
-
-test.insertPts(50);
-
-test.testPt([0,0]);
-test.testPt(test.mkPt());
-test.testPt(test.mkPt());
-test.testPt(test.mkPt());
-test.testPt(test.mkPt());
diff --git a/jstests/geo_near_random2.js b/jstests/geo_near_random2.js
deleted file mode 100644
index 1673abb88e7..00000000000
--- a/jstests/geo_near_random2.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// this tests 1% of all points using $near and $nearSphere
-load("jstests/libs/geo_near_random.js");
-
-var test = new GeoNearRandomTest("geo_near_random2");
-
-test.insertPts(5000);
-
-opts = {sphere:0, nToTest:test.nPts*0.01};
-test.testPt([0,0], opts);
-test.testPt(test.mkPt(), opts);
-test.testPt(test.mkPt(), opts);
-test.testPt(test.mkPt(), opts);
-test.testPt(test.mkPt(), opts);
-
-opts.sphere = 1
-test.testPt([0,0], opts);
-test.testPt(test.mkPt(0.8), opts);
-test.testPt(test.mkPt(0.8), opts);
-test.testPt(test.mkPt(0.8), opts);
-test.testPt(test.mkPt(0.8), opts);
-
diff --git a/jstests/geo_nearwithin.js b/jstests/geo_nearwithin.js
deleted file mode 100644
index 6f38f5dd7d9..00000000000
--- a/jstests/geo_nearwithin.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Test geoNear + $within.
-t = db.geo_nearwithin
-t.drop();
-
-points = 10
-for (var x = -points; x < points; x += 1) {
- for (var y = -points; y < points; y += 1) {
- t.insert({geo: [x, y]})
- }
-}
-
-t.ensureIndex({ geo : "2d" })
-
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 1]}}}})
-assert.eq(resNear.results.length, 5)
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 0]}}}})
-assert.eq(resNear.results.length, 1)
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 0.5]}}}})
-assert.eq(resNear.results.length, 1)
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 1.5]}}}})
-assert.eq(resNear.results.length, 9)
-
-// We want everything distance >1 from us but <1.5
-// These points are (-+1, -+1)
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {$and: [{geo: {$within: {$center: [[0, 0], 1.5]}}},
- {geo: {$not: {$within: {$center: [[0,0], 1]}}}}]}})
-assert.eq(resNear.results.length, 4)
diff --git a/jstests/geo_oob_sphere.js b/jstests/geo_oob_sphere.js
deleted file mode 100644
index f2c76457af9..00000000000
--- a/jstests/geo_oob_sphere.js
+++ /dev/null
@@ -1,42 +0,0 @@
-//
-// Ensures spherical queries report invalid latitude values in points and center positions
-//
-
-t = db.geooobsphere
-t.drop();
-
-t.insert({ loc : { x : 30, y : 89 } })
-t.insert({ loc : { x : 30, y : 89 } })
-t.insert({ loc : { x : 30, y : 89 } })
-t.insert({ loc : { x : 30, y : 89 } })
-t.insert({ loc : { x : 30, y : 89 } })
-t.insert({ loc : { x : 30, y : 89 } })
-t.insert({ loc : { x : 30, y : 91 } })
-
-t.ensureIndex({ loc : "2d" })
-assert.isnull( db.getLastError() )
-
-assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 91 ], $maxDistance : 0.25 } }).count() } );
-var err = db.getLastError()
-assert( err != null )
-printjson( err )
-
-assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 89 ], $maxDistance : 0.25 } }).count() } );
-var err = db.getLastError()
-assert( err != null )
-printjson( err )
-
-assert.throws( function() { t.find({ loc : { $within : { $centerSphere : [[ -180, -91 ], 0.25] } } }).count() } );
-var err = db.getLastError()
-assert( err != null )
-printjson( err )
-
-db.runCommand({ geoNear : "geooobsphere", near : [179, -91], maxDistance : 0.25, spherical : true })
-var err = db.getLastError()
-assert( err != null )
-printjson( err )
-
-db.runCommand({ geoNear : "geooobsphere", near : [30, 89], maxDistance : 0.25, spherical : true })
-var err = db.getLastError()
-assert( err != null )
-printjson( err ) \ No newline at end of file
diff --git a/jstests/geo_or.js b/jstests/geo_or.js
deleted file mode 100644
index fd9b7234a21..00000000000
--- a/jstests/geo_or.js
+++ /dev/null
@@ -1,62 +0,0 @@
-// multiple geo clauses with $or
-
-t = db.geoor;
-
-t.drop();
-
-var p = [-71.34895, 42.46037];
-var q = [1.48736, 42.55327];
-
-t.save({loc: p});
-t.save({loc: q});
-
-var indexname = "2dsphere";
-
-t.ensureIndex({loc: indexname})
-
-assert.eq(1, t.find({loc: p}).itcount(), indexname);
-
-// $or supports at most one $near clause
-assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}]}).itcount(),
- 'geo query not supported by $or. index type: ' + indexname);
-assert.throws(function() {
- assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}},
- {loc: {$nearSphere: q}}]}).itcount(),
- 'geo query not supported by $or. index type: ' + indexname);
-}, null, '$or with multiple $near clauses');
-
-// the following tests should match the points in the collection
-
-assert.eq(2, t.find({$or: [
- {loc: {$geoWithin: {$centerSphere: [p, 10]}}},
- {loc: {$geoWithin: {$centerSphere: [p, 10]}}}
- ]}).itcount(),
- 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname);
-assert.eq(2, t.find({$or: [
- {loc: {$geoIntersects: {$geometry: {type: 'LineString', coordinates: [p, q]}}}},
- {loc: {$geoIntersects: {$geometry: {type: 'LineString',
- coordinates: [[0,0], [1,1]]}}}}
- ]}).itcount(),
- 'multiple $geoIntersects LineString clauses not supported by $or. index type: ' + indexname);
-assert.eq(2, t.find({$or: [
- {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: p}}}},
- {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: q}}}}
- ]}).itcount(),
- 'multiple $geoIntersects Point clauses not supported by $or. index type: ' + indexname);
-assert.eq(2, t.find({$or: [
- {loc: {$geoIntersects: {$geometry: {type: 'Polygon',
- coordinates: [[[0, 0], p, q, [0, 0]]]}}}},
- {loc: {$geoIntersects: {$geometry:
- {type: 'Polygon', coordinates: [[[0, 0], [1, 1], [0, 1], [0, 0]]]}}}}
- ]}).itcount(),
- 'multiple $geoIntersects Polygon clauses not supported by $or. index type: ' + indexname);
-
-t.dropIndexes();
-
-var indexname = "2d";
-
-t.ensureIndex({loc: indexname})
-
-assert.eq(2, t.find({$or: [{loc: {$geoWithin: {$centerSphere: [p, 10]}}},
- {loc: {$geoWithin: {$centerSphere: [p, 10]}}}]}).itcount(),
- 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname);
diff --git a/jstests/geo_poly_edge.js b/jstests/geo_poly_edge.js
deleted file mode 100644
index 31a0849e67d..00000000000
--- a/jstests/geo_poly_edge.js
+++ /dev/null
@@ -1,22 +0,0 @@
-//
-// Tests polygon edge cases
-//
-
-var coll = db.getCollection( 'jstests_geo_poly_edge' )
-coll.drop();
-
-coll.ensureIndex({ loc : "2d" })
-
-coll.insert({ loc : [10, 10] })
-coll.insert({ loc : [10, -10] })
-
-assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, -10 ]] } } }).itcount(), 2 )
-
-assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, 10 ]] } } }).itcount(), 1 )
-
-
-coll.insert({ loc : [179, 0] })
-coll.insert({ loc : [0, 179] })
-
-assert.eq( coll.find({ loc : { $within : { $polygon : [[0, 0], [1000, 0], [1000, 1000], [0, 1000]] } } }).itcount(), 3 )
-
diff --git a/jstests/geo_poly_line.js b/jstests/geo_poly_line.js
deleted file mode 100644
index aca77b6ab0a..00000000000
--- a/jstests/geo_poly_line.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// Test that weird polygons work SERVER-3725
-
-t = db.geo_polygon5;
-t.drop();
-
-t.insert({loc:[0,0]})
-t.insert({loc:[1,0]})
-t.insert({loc:[2,0]})
-t.insert({loc:[3,0]})
-t.insert({loc:[4,0]})
-
-t.ensureIndex( { loc : "2d" } );
-
-printjson( t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).toArray() )
-
-assert.eq( 5, t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).itcount() )
-
diff --git a/jstests/geo_polygon1.js b/jstests/geo_polygon1.js
deleted file mode 100644
index 4b7427a4da2..00000000000
--- a/jstests/geo_polygon1.js
+++ /dev/null
@@ -1,74 +0,0 @@
-//
-// Tests for N-dimensional polygon querying
-//
-
-t = db.geo_polygon1;
-t.drop();
-
-num = 0;
-for ( x=1; x < 9; x++ ){
- for ( y= 1; y < 9; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
- }
-}
-
-t.ensureIndex( { loc : "2d" } );
-
-triangle = [[0,0], [1,1], [0,2]];
-
-// Look at only a small slice of the data within a triangle
-assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" );
-
-boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
-
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" );
-
-//Make sure we can add object-based polygons
-assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() )
-
-// Look in a box much bigger than the one we have data in
-boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" );
-
-t.drop();
-
-pacman = [
- [0,2], [0,4], [2,6], [4,6], // Head
- [6,4], [4,3], [6,2], // Mouth
- [4,0], [2,0] // Bottom
- ];
-
-t.save({loc: [1,3] }); // Add a point that's in
-t.ensureIndex( { loc : "2d" } );
-assert.isnull( db.getLastError() )
-
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" );
-
-t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening
-t.save({ loc : [3, 7] }) // Add a point above the center of the head
-t.save({ loc : [3,-1] }) // Add a point below the center of the bottom
-
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" );
-
-// Make sure we can't add bad polygons
-okay = true
-try{
- t.find( { loc : { $within : { $polygon : [1, 2] } } } ).toArray()
- okay = false
-}
-catch(e){}
-assert(okay)
-try{
- t.find( { loc : { $within : { $polygon : [[1, 2]] } } } ).toArray()
- okay = false
-}
-catch(e){}
-assert(okay)
-try{
- t.find( { loc : { $within : { $polygon : [[1, 2], [2, 3]] } } } ).toArray()
- okay = false
-}
-catch(e){}
-assert(okay)
-
diff --git a/jstests/geo_polygon1_noindex.js b/jstests/geo_polygon1_noindex.js
deleted file mode 100644
index 2a94bbbfd09..00000000000
--- a/jstests/geo_polygon1_noindex.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// SERVER-7343: allow $within without a geo index.
-
-t = db.geo_polygon1_noindex;
-t.drop();
-
-num = 0;
-for ( x=1; x < 9; x++ ){
- for ( y= 1; y < 9; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
- }
-}
-
-triangle = [[0,0], [1,1], [0,2]];
-
-// Look at only a small slice of the data within a triangle
-assert.eq( 1 , t.find({ loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" );
-
-boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
-
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" );
-
-//Make sure we can add object-based polygons
-assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() )
-
-// Look in a box much bigger than the one we have data in
-boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" );
-
-t.drop();
-
-pacman = [
- [0,2], [0,4], [2,6], [4,6], // Head
- [6,4], [4,3], [6,2], // Mouth
- [4,0], [2,0] // Bottom
- ];
-
-t.save({loc: [1,3] }); // Add a point that's in
-assert.isnull( db.getLastError() )
-
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" );
-
-t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening
-t.save({ loc : [3, 7] }) // Add a point above the center of the head
-t.save({ loc : [3,-1] }) // Add a point below the center of the bottom
-
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" );
diff --git a/jstests/geo_polygon2.js b/jstests/geo_polygon2.js
deleted file mode 100644
index 617801bfc7b..00000000000
--- a/jstests/geo_polygon2.js
+++ /dev/null
@@ -1,266 +0,0 @@
-//
-// More tests for N-dimensional polygon querying
-//
-
-// Create a polygon of some shape (no holes)
-// using turtle graphics. Basically, will look like a very contorted octopus (quad-pus?) shape.
-// There are no holes, but some edges will probably touch.
-
-var numTests = 10
-
-for ( var test = 0; test < numTests; test++ ) {
-
- Random.srand( 1337 + test );
-
- var numTurtles = 4;
- var gridSize = [ 40, 40 ];
- var turtleSteps = 500;
- var bounds = [ Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001 ]
- var rotation = Math.PI * Random.rand();
- var bits = Math.floor( Random.rand() * 32 );
-
- printjson( { test : test, rotation : rotation, bits : bits })
-
- var rotatePoint = function( x, y ) {
-
- if( y == undefined ){
- y = x[1]
- x = x[0]
- }
-
- xp = x * Math.cos( rotation ) - y * Math.sin( rotation )
- yp = y * Math.cos( rotation ) + x * Math.sin( rotation )
-
- var scaleX = (bounds[1] - bounds[0]) / 360
- var scaleY = (bounds[1] - bounds[0]) / 360
-
- x *= scaleX
- y *= scaleY
-
- return [xp, yp]
-
- }
-
-
- var grid = []
- for ( var i = 0; i < gridSize[0]; i++ ) {
- grid.push( new Array( gridSize[1] ) )
- }
-
- grid.toString = function() {
-
- var gridStr = "";
- for ( var j = grid[0].length - 1; j >= -1; j-- ) {
- for ( var i = 0; i < grid.length; i++ ) {
- if ( i == 0 )
- gridStr += ( j == -1 ? " " : ( j % 10) ) + ": "
- if ( j != -1 )
- gridStr += "[" + ( grid[i][j] != undefined ? grid[i][j] : " " ) + "]"
- else
- gridStr += " " + ( i % 10 ) + " "
- }
- gridStr += "\n"
- }
-
- return gridStr;
- }
-
- var turtles = []
- for ( var i = 0; i < numTurtles; i++ ) {
-
- var up = ( i % 2 == 0 ) ? i - 1 : 0;
- var left = ( i % 2 == 1 ) ? ( i - 1 ) - 1 : 0;
-
- turtles[i] = [
- [ Math.floor( gridSize[0] / 2 ), Math.floor( gridSize[1] / 2 ) ],
- [ Math.floor( gridSize[0] / 2 ) + left, Math.floor( gridSize[1] / 2 ) + up ] ];
-
- grid[turtles[i][1][0]][turtles[i][1][1]] = i
-
- }
-
- grid[Math.floor( gridSize[0] / 2 )][Math.floor( gridSize[1] / 2 )] = "S"
-
- // print( grid.toString() )
-
- var pickDirections = function() {
-
- var up = Math.floor( Random.rand() * 3 )
- if ( up == 2 )
- up = -1
-
- if ( up == 0 ) {
- var left = Math.floor( Random.rand() * 3 )
- if ( left == 2 )
- left = -1
- } else
- left = 0
-
- if ( Random.rand() < 0.5 ) {
- var swap = left
- left = up
- up = swap
- }
-
- return [ left, up ]
- }
-
- for ( var s = 0; s < turtleSteps; s++ ) {
-
- for ( var t = 0; t < numTurtles; t++ ) {
-
- var dirs = pickDirections()
- var up = dirs[0]
- var left = dirs[1]
-
- var lastTurtle = turtles[t][turtles[t].length - 1]
- var nextTurtle = [ lastTurtle[0] + left, lastTurtle[1] + up ]
-
- if ( nextTurtle[0] >= gridSize[0] || nextTurtle[1] >= gridSize[1] || nextTurtle[0] < 0 || nextTurtle[1] < 0 )
- continue;
-
- if ( grid[nextTurtle[0]][nextTurtle[1]] == undefined ) {
- turtles[t].push( nextTurtle )
- grid[nextTurtle[0]][nextTurtle[1]] = t;
- }
-
- }
- }
-
- // print( grid.toString() )
-
- turtlePaths = []
- for ( var t = 0; t < numTurtles; t++ ) {
-
- turtlePath = []
-
- var nextSeg = function(currTurtle, prevTurtle) {
-
- var pathX = currTurtle[0]
-
- if ( currTurtle[1] < prevTurtle[1] ) {
- pathX = currTurtle[0] + 1
- pathY = prevTurtle[1]
- } else if ( currTurtle[1] > prevTurtle[1] ) {
- pathX = currTurtle[0]
- pathY = currTurtle[1]
- } else if ( currTurtle[0] < prevTurtle[0] ) {
- pathX = prevTurtle[0]
- pathY = currTurtle[1]
- } else if ( currTurtle[0] > prevTurtle[0] ) {
- pathX = currTurtle[0]
- pathY = currTurtle[1] + 1
- }
-
- // print( " Prev : " + prevTurtle + " Curr : " + currTurtle + " path
- // : "
- // + [pathX, pathY]);
-
- return [ pathX, pathY ]
- }
-
- for ( var s = 1; s < turtles[t].length; s++ ) {
-
- currTurtle = turtles[t][s]
- prevTurtle = turtles[t][s - 1]
-
- turtlePath.push( nextSeg( currTurtle, prevTurtle ) )
-
- }
-
- for ( var s = turtles[t].length - 2; s >= 0; s-- ) {
-
- currTurtle = turtles[t][s]
- prevTurtle = turtles[t][s + 1]
-
- turtlePath.push( nextSeg( currTurtle, prevTurtle ) )
-
- }
-
- // printjson( turtlePath )
-
- // End of the line is not inside our polygon.
- var lastTurtle = turtles[t][turtles[t].length - 1]
- grid[lastTurtle[0]][lastTurtle[1]] = undefined
-
- fixedTurtlePath = []
- for ( var s = 1; s < turtlePath.length; s++ ) {
-
- if ( turtlePath[s - 1][0] == turtlePath[s][0] && turtlePath[s - 1][1] == turtlePath[s][1] )
- continue;
-
- var up = turtlePath[s][1] - turtlePath[s - 1][1]
- var right = turtlePath[s][0] - turtlePath[s - 1][0]
- var addPoint = ( up != 0 && right != 0 )
-
- if ( addPoint && up != right ) {
- fixedTurtlePath.push( [ turtlePath[s][0], turtlePath[s - 1][1] ] )
- } else if ( addPoint ) {
- fixedTurtlePath.push( [ turtlePath[s - 1][0], turtlePath[s][1] ] )
- }
-
- fixedTurtlePath.push( turtlePath[s] )
-
- }
-
- // printjson( fixedTurtlePath )
-
- turtlePaths.push( fixedTurtlePath )
-
- }
-
- // Uncomment to print polygon shape
- // print( grid.toString() )
-
- var polygon = []
- for ( var t = 0; t < turtlePaths.length; t++ ) {
- for ( var s = 0; s < turtlePaths[t].length; s++ ) {
- polygon.push( rotatePoint( turtlePaths[t][s] ) )
- }
- }
-
- // Uncomment to print out polygon
- // printjson( polygon )
-
- t = db.polytest2
- t.drop()
-
- // Test single and multi-location documents
- var pointsIn = 0
- var pointsOut = 0
- var allPointsIn = []
- var allPointsOut = []
-
- for ( var j = grid[0].length - 1; j >= 0; j-- ) {
- for ( var i = 0; i < grid.length; i++ ) {
-
- var point = rotatePoint( [ i + 0.5, j + 0.5 ] )
-
- t.insert( { loc : point } )
- if ( grid[i][j] != undefined ){
- allPointsIn.push( point )
- pointsIn++
- }
- else{
- allPointsOut.push( point )
- pointsOut++
- }
- }
- }
-
- t.ensureIndex( { loc : "2d" }, { bits : 1 + bits, max : bounds[1], min : bounds[0] } )
- assert.isnull( db.getLastError() )
-
- t.insert( { loc : allPointsIn } )
- t.insert( { loc : allPointsOut } )
- allPoints = allPointsIn.concat( allPointsOut )
- t.insert( { loc : allPoints } )
-
- print( "Points : " )
- printjson( { pointsIn : pointsIn, pointsOut : pointsOut } )
- //print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() )
-
- assert.eq( gridSize[0] * gridSize[1] + 3, t.find().count() )
- assert.eq( 2 + pointsIn, t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() );
-
-}
diff --git a/jstests/geo_polygon3.js b/jstests/geo_polygon3.js
deleted file mode 100644
index b144bfbc589..00000000000
--- a/jstests/geo_polygon3.js
+++ /dev/null
@@ -1,54 +0,0 @@
-//
-// Tests for polygon querying with varying levels of accuracy
-//
-
-var numTests = 31;
-
-for( var n = 0; n < numTests; n++ ){
-
- t = db.geo_polygon3;
- t.drop();
-
- num = 0;
- for ( x=1; x < 9; x++ ){
- for ( y= 1; y < 9; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
- }
- }
-
- t.ensureIndex( { loc : "2d" }, { bits : 2 + n } );
-
- triangle = [[0,0], [1,1], [0,2]];
-
- // Look at only a small slice of the data within a triangle
- assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).itcount() , "Triangle Test" );
-
-
- boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
-
- assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Bounding Box Test" );
-
- // Look in a box much bigger than the one we have data in
- boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
- assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Big Bounding Box Test" );
-
- t.drop();
-
- pacman = [
- [0,2], [0,4], [2,6], [4,6], // Head
- [6,4], [4,3], [6,2], // Mouth
- [4,0], [2,0] // Bottom
- ];
-
- t.save({loc: [1,3] }); // Add a point that's in
- t.ensureIndex( { loc : "2d" }, { bits : 2 + t } );
-
- assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman single point" );
-
- t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening
- t.save({ loc : [3, 7] }) // Add a point above the center of the head
- t.save({ loc : [3,-1] }) // Add a point below the center of the bottom
-
- assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman double point" );
-}
diff --git a/jstests/geo_queryoptimizer.js b/jstests/geo_queryoptimizer.js
deleted file mode 100644
index 7a438bce8fb..00000000000
--- a/jstests/geo_queryoptimizer.js
+++ /dev/null
@@ -1,27 +0,0 @@
-
-t = db.geo_qo1;
-t.drop()
-
-t.ensureIndex({loc:"2d"})
-
-t.insert({'issue':0})
-t.insert({'issue':1})
-t.insert({'issue':2})
-t.insert({'issue':2, 'loc':[30.12,-118]})
-t.insert({'issue':1, 'loc':[30.12,-118]})
-t.insert({'issue':0, 'loc':[30.12,-118]})
-
-assert.eq( 6 , t.find().itcount() , "A1" )
-
-assert.eq( 2 , t.find({'issue':0}).itcount() , "A2" )
-
-assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "A3" )
-
-assert.eq( 2 , t.find({'issue':0}).itcount() , "B1" )
-
-assert.eq( 6 , t.find().itcount() , "B2" )
-
-assert.eq( 2 , t.find({'issue':0}).itcount() , "B3" )
-
-assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "B4" )
-
diff --git a/jstests/geo_regex0.js b/jstests/geo_regex0.js
deleted file mode 100644
index 79042b9074e..00000000000
--- a/jstests/geo_regex0.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// From SERVER-2247
-// Tests to make sure regex works with geo indices
-
-t = db.regex0
-t.drop()
-
-t.ensureIndex( { point : '2d', words : 1 } )
-t.insert( { point : [ 1, 1 ], words : [ 'foo', 'bar' ] } )
-
-regex = { words : /^f/ }
-geo = { point : { $near : [ 1, 1 ] } }
-both = { point : { $near : [ 1, 1 ] }, words : /^f/ }
-
-assert.eq(1, t.find( regex ).count() )
-assert.eq(1, t.find( geo ).count() )
-assert.eq(1, t.find( both ).count() )
-
-
diff --git a/jstests/geo_s2cursorlimitskip.js b/jstests/geo_s2cursorlimitskip.js
deleted file mode 100644
index a4eaf74afbc..00000000000
--- a/jstests/geo_s2cursorlimitskip.js
+++ /dev/null
@@ -1,69 +0,0 @@
-// Test various cursor behaviors
-var t = db.geo_s2getmmm
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-
-Random.setRandomSeed();
-var random = Random.rand;
-
-/*
- * To test that getmore is working within 2dsphere index.
- * We insert a bunch of points, get a cursor, and fetch some
- * of the points. Then we insert a bunch more points, and
- * finally fetch a bunch more.
- * If the final fetches work successfully, then getmore should
- * be working
- */
-function sign() { return random() > 0.5 ? 1 : -1; }
-function insertRandomPoints(num, minDist, maxDist){
- for(var i = 0; i < num; i++){
- var lat = sign() * (minDist + random() * (maxDist - minDist));
- var lng = sign() * (minDist + random() * (maxDist - minDist));
- var point = { geo: { type: "Point", coordinates: [lng, lat] } };
- t.insert(point);
- assert(!db.getLastError());
- }
-}
-
-var initialPointCount = 200
-var smallBit = 10
-var secondPointCount = 100
-
-// Insert points between 0.01 and 1.0 away.
-insertRandomPoints(initialPointCount, 0.01, 1.0);
-
-var cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).batchSize(4);
-assert.eq(cursor.count(), initialPointCount);
-
-for(var j = 0; j < smallBit; j++){
- assert(cursor.hasNext());
- cursor.next();
-}
-// We looked at (initialPointCount - smallBit) points, should be more.
-assert(cursor.hasNext())
-
-// Insert points outside of the shell we've tested thus far
-insertRandomPoints(secondPointCount, 2.01, 3.0);
-assert.eq(cursor.count(), initialPointCount + secondPointCount)
-
-for(var k = 0; k < initialPointCount + secondPointCount - smallBit; k++){
- assert(cursor.hasNext())
- var tmpPoint = cursor.next();
-}
-// Shouldn't be any more points to look at now.
-assert(!cursor.hasNext())
-
-var someLimit = 23;
-// Make sure limit does something.
-cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit)
-// Count doesn't work here -- ignores limit/skip, so we use itcount.
-assert.eq(cursor.itcount(), someLimit)
-// Make sure skip works by skipping some stuff ourselves.
-var someSkip = 3;
-cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit + someSkip)
-for (var i = 0; i < someSkip; ++i) { cursor.next(); }
-var cursor2 = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).skip(someSkip).limit(someLimit)
-while (cursor.hasNext()) {
- assert(cursor2.hasNext());
- assert.eq(cursor.next(), cursor2.next());
-}
diff --git a/jstests/geo_s2dedupnear.js b/jstests/geo_s2dedupnear.js
deleted file mode 100644
index ac31e082891..00000000000
--- a/jstests/geo_s2dedupnear.js
+++ /dev/null
@@ -1,11 +0,0 @@
-// Make sure that we don't return several of the same result due to faulty
-// assumptions about the btree cursor. That is, don't return duplicate results.
-t = db.geo_s2dedupnear
-t.drop()
-
-t.ensureIndex( { geo : "2dsphere" } )
-var x = { "type" : "Polygon",
- "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]}
-t.insert({geo: x})
-res = t.find({geo: {$geoNear: {"type" : "Point", "coordinates" : [31, 41]}}})
-assert.eq(res.itcount(), 1)
diff --git a/jstests/geo_s2descindex.js b/jstests/geo_s2descindex.js
deleted file mode 100644
index 39d153a6e55..00000000000
--- a/jstests/geo_s2descindex.js
+++ /dev/null
@@ -1,64 +0,0 @@
-//
-// Tests 2dsphere with descending fields, ensures correct lookup
-//
-
-var coll = db.getCollection("twodspheredesc");
-
-var descriptors = [["field1", -1], ["field2", -1], ["coordinates", "2dsphere"]]
-var docA = {field1 : "a", field2 : 1, coordinates : [-118.2400013, 34.073893]}
-var docB = {field1 : "b", field2 : 1, coordinates : [-118.2400012, 34.073894]}
-
-// Try both regular and near index cursors
-var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893],
- 0.44915760491198753]}}};
-var queryNear = {coordinates : {$geoNear : {"type" : "Point", "coordinates" : [0, 0]}}};
-
-//
-// The idea here is we try "2dsphere" indexes in combination with descending
-// other fields in various
-// positions and ensure that we return correct results.
-//
-
-for ( var t = 0; t < descriptors.length; t++) {
-
- var descriptor = {};
- for ( var i = 0; i < descriptors.length; i++) {
- descriptor[descriptors[i][0]] = descriptors[i][1];
- }
-
- jsTest.log("Trying 2dsphere index with descriptor " + tojson(descriptor));
-
- coll.drop();
- coll.ensureIndex(descriptor);
-
- coll.insert(docA);
- coll.insert(docB);
-
- assert.eq(1, coll.count(Object.merge(query, {field1 : "a"})));
- assert.eq(1, coll.count(Object.merge(query, {field1 : "b"})));
- assert.eq(2, coll.count(Object.merge(query, {field2 : 1})));
- assert.eq(0, coll.count(Object.merge(query, {field2 : 0})));
-
- var firstEls = descriptors.splice(1);
- descriptors = firstEls.concat(descriptors);
-}
-
-//
-// Data taken from previously-hanging result
-//
-
-jsTest.log("Trying case found in wild...");
-
-coll.drop();
-coll.ensureIndex({coordinates : "2dsphere", field : -1});
-coll.insert({coordinates : [-118.240013, 34.073893]});
-var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893],
- 0.44915760491198753]}},
- field : 1};
-
-assert.eq(null, coll.findOne(query));
-coll.remove({})
-coll.insert({coordinates : [-118.240013, 34.073893], field : 1});
-assert.neq(null, coll.findOne(query));
-
-jsTest.log("Success!");
diff --git a/jstests/geo_s2disjoint_holes.js b/jstests/geo_s2disjoint_holes.js
deleted file mode 100644
index cd8f3f4d58f..00000000000
--- a/jstests/geo_s2disjoint_holes.js
+++ /dev/null
@@ -1,94 +0,0 @@
-//
-// We should prohibit polygons with holes not bounded by their exterior shells.
-//
-// From spec:
-//
-// "For Polygons with multiple rings, the first must be the exterior ring and
-// any others must be interior rings or holes."
-// http://geojson.org/geojson-spec.html#polygon
-//
-
-var t = db.geo_s2disjoint_holes,
- coordinates = [
- // One square.
- [[9, 9], [9, 11], [11, 11], [11, 9], [9, 9]],
- // Another disjoint square.
- [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]
- ],
- poly = {
- type: 'Polygon',
- coordinates: coordinates
- },
- multiPoly = {
- type: 'MultiPolygon',
- // Multi-polygon's coordinates are wrapped in one more array.
- coordinates: [coordinates]
- };
-
-t.drop();
-
-jsTest.log("We're going to print some error messages, don't be alarmed.");
-
-//
-// Can't query with a polygon or multi-polygon that has a non-contained hole.
-//
-print(assert.throws(
- function() {
- t.findOne({geo: {$geoWithin: {$geometry: poly}}});
- },
- [],
- "parsing a polygon with non-overlapping holes."));
-
-print(assert.throws(
- function() {
- t.findOne({geo: {$geoWithin: {$geometry: multiPoly}}});
- },
- [],
- "parsing a multi-polygon with non-overlapping holes."));
-
-//
-// Can't insert a bad polygon or a bad multi-polygon with a 2dsphere index.
-//
-t.createIndex({p: '2dsphere'});
-t.insert({p: poly});
-var error = t.getDB().getLastError();
-printjson(error);
-assert(error);
-
-t.insert({p: multiPoly});
-error = t.getDB().getLastError();
-printjson(error);
-assert(error);
-
-//
-// Can't create a 2dsphere index when the collection contains a bad polygon or
-// bad multi-polygon.
-//
-t.drop();
-t.insert({p: poly});
-t.createIndex({p: '2dsphere'});
-error = t.getDB().getLastError();
-printjson(error);
-assert(error);
-assert.eq(1, t.getIndexes().length);
-
-t.drop();
-t.insert({p: multiPoly});
-t.createIndex({p: '2dsphere'});
-error = t.getDB().getLastError();
-printjson(error);
-assert(error);
-assert.eq(1, t.getIndexes().length);
-
-//
-// But with no index we can insert bad polygons and bad multi-polygons.
-//
-t.drop();
-t.insert({p: poly});
-assert.eq(null, t.getDB().getLastError());
-t.insert({p: multiPoly});
-assert.eq(null, t.getDB().getLastError());
-
-t.drop();
-
-jsTest.log("Success.")
diff --git a/jstests/geo_s2dupe_points.js b/jstests/geo_s2dupe_points.js
deleted file mode 100644
index 74eb09fa63a..00000000000
--- a/jstests/geo_s2dupe_points.js
+++ /dev/null
@@ -1,72 +0,0 @@
-// See: SERVER-9240, SERVER-9401.
-// s2 rejects shapes with duplicate adjacent points as invalid, but they are
-// valid in GeoJSON. We store the duplicates, but internally remove them
-// before indexing or querying.
-t = db.geo_s2dupe_points
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-function testDuplicates(shapeName, shapeWithDupes, shapeWithoutDupes) {
- // insert a doc with dupes
- t.insert(shapeWithDupes)
- assert(!db.getLastError(), db.getLastError());
-
- // duplicates are preserved when the document is fetched by _id
- assert.eq(shapeWithDupes, t.findOne({_id: shapeName}));
- assert.neq(shapeWithoutDupes, t.findOne({_id: shapeName}).geo);
-
- // can query with $geoIntersects inserted doc using both the duplicated and de-duplicated docs
- assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithDupes.geo } } } ).itcount(), 1);
- assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithoutDupes } } } ).itcount(), 1);
-
- // direct document equality in queries is preserved
- assert.eq(t.find({ geo: shapeWithoutDupes} ).itcount(), 0);
- assert.eq(t.find({ geo: shapeWithDupes.geo } ).itcount(), 1);
-}
-
-// LineString
-var lineWithDupes = { _id: "line", geo: { type: "LineString",
- coordinates: [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6] ]
- }
-};
-var lineWithoutDupes = { type: "LineString", coordinates: [ [40,5], [41,6] ] };
-
-// Polygon
-var polygonWithDupes = { _id: "poly", geo: { type: "Polygon",
- coordinates: [
- [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ],
- [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0], [-2.0, -2.0] ]
- ] }
-};
-var polygonWithoutDupes = { type: "Polygon",
- coordinates: [
- [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ],
- [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0] ]
- ]
-};
-
-// MultiPolygon
-var multiPolygonWithDupes = { _id: "multi", geo: { type: "MultiPolygon", coordinates: [
- [
- [ [102.0, 2.0], [103.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ]
- ],
- [
- [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
- [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.8, 0.8], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
- ]
- ]
-} };
-var multiPolygonWithoutDupes = { type: "MultiPolygon", coordinates: [
- [
- [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ]
- ],
- [
- [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
- [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
- ]
- ]
-};
-
-testDuplicates("line", lineWithDupes, lineWithoutDupes);
-testDuplicates("poly", polygonWithDupes, polygonWithoutDupes);
-testDuplicates("multi", multiPolygonWithDupes, multiPolygonWithoutDupes);
diff --git a/jstests/geo_s2edgecases.js b/jstests/geo_s2edgecases.js
deleted file mode 100755
index bf46baba744..00000000000
--- a/jstests/geo_s2edgecases.js
+++ /dev/null
@@ -1,40 +0,0 @@
-t = db.geo_s2edgecases
-t.drop()
-
-roundworldpoint = { "type" : "Point", "coordinates": [ 180, 0 ] }
-
-// Opposite the equator
-roundworld = { "type" : "Polygon",
- "coordinates" : [ [ [179,1], [-179,1], [-179,-1], [179,-1], [179,1]]]}
-t.insert({geo : roundworld})
-
-roundworld2 = { "type" : "Polygon",
- "coordinates" : [ [ [179,1], [179,-1], [-179,-1], [-179,1], [179,1]]]}
-t.insert({geo : roundworld2})
-
-// North pole
-santapoint = { "type" : "Point", "coordinates": [ 180, 90 ] }
-santa = { "type" : "Polygon",
- "coordinates" : [ [ [179,89], [179,90], [-179,90], [-179,89], [179,89]]]}
-t.insert({geo : santa})
-santa2 = { "type" : "Polygon",
- "coordinates" : [ [ [179,89], [-179,89], [-179,90], [179,90], [179,89]]]}
-t.insert({geo : santa2})
-
-// South pole
-penguinpoint = { "type" : "Point", "coordinates": [ 0, -90 ] }
-penguin1 = { "type" : "Polygon",
- "coordinates" : [ [ [0,-89], [0,-90], [179,-90], [179,-89], [0,-89]]]}
-t.insert({geo : penguin1})
-penguin2 = { "type" : "Polygon",
- "coordinates" : [ [ [0,-89], [179,-89], [179,-90], [0,-90], [0,-89]]]}
-t.insert({geo : penguin2})
-
-t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } )
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : roundworldpoint} } });
-assert.eq(res.count(), 2);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : santapoint} } });
-assert.eq(res.count(), 2);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : penguinpoint} } });
-assert.eq(res.count(), 2);
diff --git a/jstests/geo_s2exact.js b/jstests/geo_s2exact.js
deleted file mode 100644
index a7cf9627765..00000000000
--- a/jstests/geo_s2exact.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Queries on exact geometry should return the exact geometry.
-t = db.geo_s2exact
-t.drop()
-
-function test(geometry) {
- t.insert({geo: geometry})
- assert.eq(1, t.find({geo: geometry}).itcount(), geometry)
- t.ensureIndex({geo: "2dsphere"})
- assert.eq(1, t.find({geo: geometry}).itcount(), geometry)
- t.dropIndex({geo: "2dsphere"})
-}
-
-pointA = { "type" : "Point", "coordinates": [ 40, 5 ] }
-test(pointA)
-
-someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]}
-test(someline)
-
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
-test(somepoly)
diff --git a/jstests/geo_s2holesameasshell.js b/jstests/geo_s2holesameasshell.js
deleted file mode 100644
index c3a127305ff..00000000000
--- a/jstests/geo_s2holesameasshell.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// If polygons have holes, the holes cannot be equal to the entire geometry.
-var t = db.geo_s2holessameasshell
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-
-var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]};
-var edgePoint = {"type": "Point", "coordinates": [0, 0.5]};
-var cornerPoint = {"type": "Point", "coordinates": [0, 0]};
-
-// Various "edge" cases. None of them should be returned by the non-polygon
-// polygon below.
-t.insert({geo : centerPoint});
-t.insert({geo : edgePoint});
-t.insert({geo : cornerPoint});
-
-// This generates an empty covering.
-var polygonWithFullHole = { "type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
- ]
-};
-
-// No keys for insert should error.
-t.insert({geo: polygonWithFullHole})
-assert(db.getLastError())
-
-// No covering to search over should give an empty result set.
-assert.throws(function() {
- return t.find({geo: {$geoWithin: {$geometry: polygonWithFullHole}}}).count()})
-
-// Similar polygon to the one above, but is covered by two holes instead of
-// one.
-var polygonWithTwoHolesCoveringWholeArea = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0,0], [0,0.5], [1, 0.5], [1, 0], [0, 0]],
- [[0,0.5], [0,1], [1, 1], [1, 0.5], [0, 0.5]]
- ]
-};
-
-// No keys for insert should error.
-t.insert({geo: polygonWithTwoHolesCoveringWholeArea});
-assert(db.getLastError());
-
-// No covering to search over should give an empty result set.
-assert.throws(function() {
- return t.find({geo: {$geoWithin: {$geometry: polygonWithTwoHolesCoveringWholeArea}}}).count()})
diff --git a/jstests/geo_s2index.js b/jstests/geo_s2index.js
deleted file mode 100755
index cabcea72d19..00000000000
--- a/jstests/geo_s2index.js
+++ /dev/null
@@ -1,107 +0,0 @@
-t = db.geo_s2index
-t.drop()
-
-// We internally drop adjacent duplicate points in lines.
-someline = { "type" : "LineString", "coordinates": [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6]]}
-t.insert( {geo : someline , nonGeo: "someline"})
-t.ensureIndex({geo: "2dsphere"})
-foo = t.find({geo: {$geoIntersects: {$geometry: {type: "Point", coordinates: [40,5]}}}}).next();
-assert.eq(foo.geo, someline);
-t.dropIndex({geo: "2dsphere"})
-
-pointA = { "type" : "Point", "coordinates": [ 40, 5 ] }
-t.insert( {geo : pointA , nonGeo: "pointA"})
-
-pointD = { "type" : "Point", "coordinates": [ 41.001, 6.001 ] }
-t.insert( {geo : pointD , nonGeo: "pointD"})
-
-pointB = { "type" : "Point", "coordinates": [ 41, 6 ] }
-t.insert( {geo : pointB , nonGeo: "pointB"})
-
-pointC = { "type" : "Point", "coordinates": [ 41, 6 ] }
-t.insert( {geo : pointC} )
-
-// Add a point within the polygon but not on the border. Don't want to be on
-// the path of the polyline.
-pointE = { "type" : "Point", "coordinates": [ 40.6, 5.4 ] }
-t.insert( {geo : pointE} )
-
-// Make sure we can index this without error.
-t.insert({nonGeo: "noGeoField!"})
-
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
-t.insert( {geo : somepoly, nonGeo: "somepoly" })
-
-t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } )
-// We have a point without any geo data. Don't error.
-assert(!db.getLastError())
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointA} } });
-assert.eq(res.itcount(), 3);
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointB} } });
-assert.eq(res.itcount(), 4);
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointD} } });
-assert.eq(res.itcount(), 1);
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : someline} } })
-assert.eq(res.itcount(), 5);
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 6);
-
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 6);
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }).limit(1)
-assert.eq(res.itcount(), 1);
-
-res = t.find({ "nonGeo": "pointA",
- "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 1);
-
-// Don't crash mongod if we give it bad input.
-t.drop()
-t.ensureIndex({loc: "2dsphere", x:1})
-t.save({loc: [0,0]})
-assert.throws(function() { return t.count({loc: {$foo:[0,0]}}) })
-assert.throws(function() { return t.find({ "nonGeo": "pointA",
- "geo" : { "$geoIntersects" : { "$geometry" : somepoly},
- "$near": {"$geometry" : somepoly }}}).count()})
-
-// If we specify a datum, it has to be valid (WGS84).
-t.drop()
-t.ensureIndex({loc: "2dsphere"})
-t.insert({loc: {type:'Point', coordinates: [40, 5], crs:{ type: 'name', properties:{name:'EPSG:2000'}}}})
-assert(db.getLastError());
-assert.eq(0, t.find().itcount())
-t.insert({loc: {type:'Point', coordinates: [40, 5]}})
-assert(!db.getLastError());
-t.insert({loc: {type:'Point', coordinates: [40, 5], crs:{ type: 'name', properties:{name:'EPSG:4326'}}}})
-assert(!db.getLastError());
-t.insert({loc: {type:'Point', coordinates: [40, 5], crs:{ type: 'name', properties:{name:'urn:ogc:def:crs:OGC:1.3:CRS84'}}}})
-assert(!db.getLastError());
-
-// We can pass level parameters and we verify that they're valid.
-// 0 <= coarsestIndexedLevel <= finestIndexedLevel <= 30.
-t.drop();
-t.save({loc: [0,0]})
-t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 17, coarsestIndexedLevel: 5})
-assert(!db.getLastError());
-
-t.drop();
-t.save({loc: [0,0]})
-t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 31, coarsestIndexedLevel: 5})
-assert(db.getLastError());
-
-t.drop();
-t.save({loc: [0,0]})
-t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 30, coarsestIndexedLevel: 0})
-assert(!db.getLastError());
-
-t.drop();
-t.save({loc: [0,0]})
-t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 30, coarsestIndexedLevel: -1})
-assert(db.getLastError());
diff --git a/jstests/geo_s2indexoldformat.js b/jstests/geo_s2indexoldformat.js
deleted file mode 100755
index 6af593a817c..00000000000
--- a/jstests/geo_s2indexoldformat.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Make sure that the 2dsphere index can deal with non-GeoJSON points.
-// 2dsphere does not accept legacy shapes, only legacy points.
-t = db.geo_s2indexoldformat
-t.drop()
-
-t.insert( {geo : [40, 5], nonGeo: ["pointA"]})
-t.insert( {geo : [41.001, 6.001], nonGeo: ["pointD"]})
-t.insert( {geo : [41, 6], nonGeo: ["pointB"]})
-t.insert( {geo : [41, 6]} )
-t.insert( {geo : {x:40.6, y:5.4}} )
-
-t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } )
-
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry": {x:40, y:5}}}})
-assert.eq(res.count(), 1);
-
-res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [41,6]}}})
-assert.eq(res.count(), 2);
-
-// We don't support legacy polygons in 2dsphere.
-t.insert( {geo : [[40,5],[40,6],[41,6],[41,5]], nonGeo: ["somepoly"] })
-assert(db.getLastError());
-
-t.insert( {geo : {a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}})
-assert(db.getLastError());
-
-res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [[40,5],[40,6],[41,6],[41,5]]}}})
-assert(db.getLastError());
diff --git a/jstests/geo_s2indexversion1.js b/jstests/geo_s2indexversion1.js
deleted file mode 100644
index 8524faeddbd..00000000000
--- a/jstests/geo_s2indexversion1.js
+++ /dev/null
@@ -1,150 +0,0 @@
-// Tests 2dsphere index option "2dsphereIndexVersion". Verifies that GeoJSON objects that are new
-// in version 2 are not allowed in version 1.
-
-var coll = db.getCollection("geo_s2indexversion1");
-coll.drop();
-
-//
-// Index build should fail for invalid values of "2dsphereIndexVersion".
-//
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": -1});
-assert.gleError(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 0});
-assert.gleError(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 3});
-assert.gleError(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": Infinity});
-assert.gleError(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": "foo"});
-assert.gleError(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": {a: 1}});
-assert.gleError(db);
-coll.drop();
-
-//
-// Index build should succeed for valid values of "2dsphereIndexVersion".
-//
-
-coll.ensureIndex({geo: "2dsphere"});
-assert.gleSuccess(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1});
-assert.gleSuccess(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(1)});
-assert.gleSuccess(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(1)});
-assert.gleSuccess(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2});
-assert.gleSuccess(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(2)});
-assert.gleSuccess(db);
-coll.drop();
-
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(2)});
-assert.gleSuccess(db);
-coll.drop();
-
-//
-// {2dsphereIndexVersion: 2} should be the default for new indexes.
-//
-
-coll.ensureIndex({geo: "2dsphere"});
-assert.gleSuccess(db);
-var specObj = coll.getDB().system.indexes.findOne({ns: coll.getFullName(), name: "geo_2dsphere"});
-assert.eq(2, specObj["2dsphereIndexVersion"]);
-coll.drop();
-
-//
-// Test compatibility of various GeoJSON objects with both 2dsphere index versions.
-//
-
-var pointDoc = {geo: {type: "Point", coordinates: [40, 5]}};
-var lineStringDoc = {geo: {type: "LineString", coordinates: [[40, 5], [41, 6]]}};
-var polygonDoc = {geo: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 1], [0, 0]]]}};
-var multiPointDoc = {geo: {type: "MultiPoint",
- coordinates: [[-73.9580, 40.8003], [-73.9498, 40.7968],
- [-73.9737, 40.7648], [-73.9814, 40.7681]]}};
-var multiLineStringDoc = {geo: {type: "MultiLineString",
- coordinates: [[[-73.96943, 40.78519], [-73.96082, 40.78095]],
- [[-73.96415, 40.79229], [-73.95544, 40.78854]],
- [[-73.97162, 40.78205], [-73.96374, 40.77715]],
- [[-73.97880, 40.77247], [-73.97036, 40.76811]]]}};
-var multiPolygonDoc = {geo: {type: "MultiPolygon",
- coordinates: [[[[-73.958, 40.8003], [-73.9498, 40.7968],
- [-73.9737, 40.7648], [-73.9814, 40.7681],
- [-73.958, 40.8003]]],
- [[[-73.958, 40.8003], [-73.9498, 40.7968],
- [-73.9737, 40.7648], [-73.958, 40.8003]]]]}};
-var geometryCollectionDoc = {geo: {type: "GeometryCollection",
- geometries: [{type: "MultiPoint",
- coordinates: [[-73.9580, 40.8003],
- [-73.9498, 40.7968],
- [-73.9737, 40.7648],
- [-73.9814, 40.7681]]},
- {type: "MultiLineString",
- coordinates: [[[-73.96943, 40.78519],
- [-73.96082, 40.78095]],
- [[-73.96415, 40.79229],
- [-73.95544, 40.78854]],
- [[-73.97162, 40.78205],
- [-73.96374, 40.77715]],
- [[-73.97880, 40.77247],
- [-73.97036, 40.76811]]]}]}};
-
-// {2dsphereIndexVersion: 2} indexes allow all supported GeoJSON objects.
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2});
-assert.gleSuccess(db);
-coll.insert(pointDoc);
-assert.gleSuccess(db);
-coll.insert(lineStringDoc);
-assert.gleSuccess(db);
-coll.insert(polygonDoc);
-assert.gleSuccess(db);
-coll.insert(multiPointDoc);
-assert.gleSuccess(db);
-coll.insert(multiLineStringDoc);
-assert.gleSuccess(db);
-coll.insert(multiPolygonDoc);
-assert.gleSuccess(db);
-coll.insert(geometryCollectionDoc);
-assert.gleSuccess(db);
-coll.drop();
-
-// {2dsphereIndexVersion: 1} indexes allow only Point, LineString, and Polygon.
-coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1});
-assert.gleSuccess(db);
-coll.insert(pointDoc);
-assert.gleSuccess(db);
-coll.insert(lineStringDoc);
-assert.gleSuccess(db);
-coll.insert(polygonDoc);
-assert.gleSuccess(db);
-coll.insert(multiPointDoc);
-assert.gleError(db);
-coll.insert(multiLineStringDoc);
-assert.gleError(db);
-coll.insert(multiPolygonDoc);
-assert.gleError(db);
-coll.insert(geometryCollectionDoc);
-assert.gleError(db);
-coll.drop();
diff --git a/jstests/geo_s2intersection.js b/jstests/geo_s2intersection.js
deleted file mode 100644
index 42abacca98d..00000000000
--- a/jstests/geo_s2intersection.js
+++ /dev/null
@@ -1,141 +0,0 @@
-var t = db.geo_s2intersectinglines
-t.drop()
-t.ensureIndex( { geo : "2dsphere" } );
-
-/* All the tests in this file are generally confirming intersections based upon
- * these three geo objects.
- */
-var canonLine = {
- name: 'canonLine',
- geo: {
- type: "LineString",
- coordinates: [[0.0, 0.0], [1.0, 0.0]]
- }
-};
-
-var canonPoint = {
- name: 'canonPoint',
- geo: {
- type: "Point",
- coordinates: [10.0, 10.0]
- }
-};
-
-var canonPoly = {
- name: 'canonPoly',
- geo: {
- type: "Polygon",
- coordinates: [
- [[50.0, 50.0], [51.0, 50.0], [51.0, 51.0], [50.0, 51.0], [50.0, 50.0]]
- ]
- }
-};
-
-t.insert(canonLine);
-t.insert(canonPoint);
-t.insert(canonPoly);
-
-
-//Case 1: Basic sanity intersection.
-var testLine = {type: "LineString",
- coordinates: [[0.5, 0.5], [0.5, -0.5]]};
-
-var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonLine');
-
-
-//Case 2: Basic Polygon intersection.
-// we expect that the canonLine should intersect with this polygon.
-var testPoly = {type: "Polygon",
- coordinates: [
- [[0.4, -0.1],[0.4, 0.1], [0.6, 0.1], [0.6, -0.1], [0.4, -0.1]]
- ]}
-
-result = t.find({geo: {$geoIntersects: {$geometry: testPoly}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonLine');
-
-
-//Case 3: Intersects the vertex of a line.
-// When a line intersects the vertex of a line, we expect this to
-// count as a geoIntersection.
-testLine = {type: "LineString",
- coordinates: [[0.0, 0.5], [0.0, -0.5]]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonLine');
-
-// Case 4: Sanity no intersection.
-// This line just misses the canonLine in the negative direction. This
-// should not count as a geoIntersection.
-testLine = {type: "LineString",
- coordinates: [[-0.1, 0.5], [-0.1, -0.5]]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 0);
-
-
-// Case 5: Overlapping line - only partially overlaps.
-// Undefined behaviour: does intersect
-testLine = {type: "LineString",
- coordinates: [[-0.5, 0.0], [0.5, 0.0]]};
-
-var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonLine');
-
-
-// Case 6: Contained line - this line is fully contained by the canonLine
-// Undefined behaviour: doesn't intersect.
-testLine = {type: "LineString",
- coordinates: [[0.1, 0.0], [0.9, 0.0]]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 0);
-
-// Case 7: Identical line in the identical position.
-// Undefined behaviour: does intersect.
-testLine = {type: "LineString",
- coordinates: [[0.0, 0.0], [1.0, 0.0]]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonLine');
-
-// Case 8: Point intersection - we search with a line that intersects
-// with the canonPoint.
-testLine = {type: "LineString",
- coordinates: [[10.0, 11.0], [10.0, 9.0]]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonPoint');
-
-// Case 9: Point point intersection
-// as above but with an identical point to the canonPoint. We expect an
-// intersection here.
-testPoint = {type: "Point",
- coordinates: [10.0, 10.0]}
-
-result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonPoint');
-
-
-//Case 10: Sanity point non-intersection.
-var testPoint = {type: "Point",
- coordinates: [12.0, 12.0]}
-
-result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
-assert.eq(result.count(), 0);
-
-// Case 11: Point polygon intersection
-// verify that a point inside a polygon $geoIntersects.
-testPoint = {type: "Point",
- coordinates: [50.5, 50.5]}
-
-result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
-assert.eq(result.count(), 1);
-assert.eq(result[0]['name'], 'canonPoly');
diff --git a/jstests/geo_s2largewithin.js b/jstests/geo_s2largewithin.js
deleted file mode 100644
index 2327f1fb02d..00000000000
--- a/jstests/geo_s2largewithin.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// If our $within is enormous, create a coarse covering for the search so it
-// doesn't take forever.
-t = db.geo_s2largewithin
-t.drop()
-t.ensureIndex( { geo : "2dsphere" } );
-
-testPoint = {
- name: "origin",
- geo: {
- type: "Point",
- coordinates: [0.0, 0.0]
- }
-};
-
-testHorizLine = {
- name: "horiz",
- geo: {
- type: "LineString",
- coordinates: [[-2.0, 10.0], [2.0, 10.0]]
- }
-};
-
-testVertLine = {
- name: "vert",
- geo: {
- type: "LineString",
- coordinates: [[10.0, -2.0], [10.0, 2.0]]
- }
-};
-
-t.insert(testPoint);
-t.insert(testHorizLine);
-t.insert(testVertLine);
-
-//Test a poly that runs horizontally along the equator.
-
-longPoly = {type: "Polygon",
- coordinates: [
- [[30.0, 1.0], [-30.0, 1.0], [-30.0, -1.0], [30.0, -1.0], [30.0, 1.0]]
- ]};
-
-result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
-assert.eq(result.itcount(), 1);
-result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
-assert.eq("origin", result[0].name)
diff --git a/jstests/geo_s2meridian.js b/jstests/geo_s2meridian.js
deleted file mode 100644
index 0d5b4b20e6d..00000000000
--- a/jstests/geo_s2meridian.js
+++ /dev/null
@@ -1,109 +0,0 @@
-t = db.geo_s2meridian;
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-
-/*
- * Test 1: check that intersection works on the meridian. We insert a line
- * that crosses the meridian, and then run a geoIntersect with a line
- * that runs along the meridian.
- */
-
-meridianCrossingLine = {
- geo: {
- type: "LineString",
- coordinates: [
- [-178.0, 10.0],
- [178.0, 10.0]]
- }
-};
-
-t.insert(meridianCrossingLine);
-assert(! db.getLastError());
-
-lineAlongMeridian = {
- type: "LineString",
- coordinates: [
- [180.0, 11.0],
- [180.0, 9.0]
- ]
-}
-
-result = t.find({geo: {$geoIntersects: {$geometry: lineAlongMeridian}}});
-assert.eq(result.itcount(), 1);
-
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-/*
- * Test 2: check that within work across the meridian. We insert points
- * on the meridian, and immediately on either side, and confirm that a poly
- * covering all of them returns them all.
- */
-pointOnNegativeSideOfMeridian = {
- geo: {
- type: "Point",
- coordinates: [-179.0, 1.0]
- }
-};
-pointOnMeridian = {
- geo: {
- type: "Point",
- coordinates: [180.0, 1.0]
- }
-};
-pointOnPositiveSideOfMeridian = {
- geo: {
- type: "Point",
- coordinates: [179.0, 1.0]
- }
-};
-
-t.insert(pointOnMeridian);
-t.insert(pointOnNegativeSideOfMeridian);
-t.insert(pointOnPositiveSideOfMeridian);
-
-meridianCrossingPoly = {
- type: "Polygon",
- coordinates: [
- [[-178.0, 10.0], [178.0, 10.0], [178.0, -10.0], [-178.0, -10.0], [-178.0, 10.0]]
- ]
-};
-
-result = t.find({geo: {$geoWithin: {$geometry: meridianCrossingPoly}}});
-assert.eq(result.itcount(), 3);
-
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-/*
- * Test 3: Check that near works around the meridian. Insert two points, one
- * closer, but across the meridian, and confirm they both come back, and
- * that the order is correct.
- */
-pointOnNegativeSideOfMerid = {
- name: "closer",
- geo: {
- type: "Point",
- coordinates: [-179.0, 0.0]
- }
-};
-
-pointOnPositiveSideOfMerid = {
- name: "farther",
- geo: {
- type: "Point",
- coordinates: [176.0, 0.0]
- }
-};
-
-t.insert(pointOnNegativeSideOfMerid);
-t.insert(pointOnPositiveSideOfMerid);
-
-pointOnPositiveSideOfMeridian = {
- type: "Point",
- coordinates: [179.0, 0.0]
-};
-
-result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}});
-assert.eq(result.itcount(), 2);
-result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}});
-assert.eq(result[0].name, "closer");
-assert.eq(result[1].name, "farther");
diff --git a/jstests/geo_s2multi.js b/jstests/geo_s2multi.js
deleted file mode 100644
index b40eef5543e..00000000000
--- a/jstests/geo_s2multi.js
+++ /dev/null
@@ -1,50 +0,0 @@
-t = db.geo_s2index
-t.drop()
-
-t.ensureIndex({geo: "2dsphere"})
-
-// Let's try the examples in the GeoJSON spec.
-multiPointA = { "type": "MultiPoint", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] }
-t.insert({geo: multiPointA});
-assert(!db.getLastError());
-
-multiLineStringA = { "type": "MultiLineString", "coordinates": [ [ [100.0, 0.0], [101.0, 1.0] ],
- [ [102.0, 2.0], [103.0, 3.0] ]]}
-t.insert({geo: multiLineStringA});
-assert(!db.getLastError());
-
-multiPolygonA = { "type": "MultiPolygon", "coordinates": [
- [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
- [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
- [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]]}
-t.insert({geo: multiPolygonA})
-assert(!db.getLastError());
-
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "Point", "coordinates": [100,0]}}}}).itcount());
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "Point", "coordinates": [101.0,1.0]}}}}).itcount());
-
-// Inside the hole in multiPolygonA
-assert.eq(0, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "Point", "coordinates": [100.21,0.21]}}}}).itcount());
-
-// One point inside the hole, one out.
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21]]}}}}).itcount());
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21],[101,1]]}}}}).itcount());
-// Polygon contains itself and the multipoint.
-assert.eq(2, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount());
-
-partialPolygonA = { "type": "Polygon", "coordinates":
- [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ] };
-t.insert({geo: partialPolygonA});
-assert(!db.getLastError());
-// Polygon contains itself, the partial poly, and the multipoint
-assert.eq(3, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount());
-
-assert.eq(1, t.find({geo: {$geoWithin: {$geometry: partialPolygonA}}}).itcount());
-
-// Itself, the multi poly, the multipoint...
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: partialPolygonA}}}).itcount());
diff --git a/jstests/geo_s2near.js b/jstests/geo_s2near.js
deleted file mode 100644
index 136e821b4b8..00000000000
--- a/jstests/geo_s2near.js
+++ /dev/null
@@ -1,84 +0,0 @@
-// Test 2dsphere near search, called via find and geoNear.
-t = db.geo_s2near
-t.drop();
-
-// Make sure that geoNear gives us back loc
-goldenPoint = {type: "Point", coordinates: [ 31.0, 41.0]}
-t.insert({geo: goldenPoint})
-t.ensureIndex({ geo : "2dsphere" })
-resNear = db.runCommand({geoNear : t.getName(), near: [30, 40], num: 1, spherical: true, includeLocs: true})
-assert.eq(resNear.results[0].loc, goldenPoint)
-
-// FYI:
-// One degree of long @ 0 is 111km or so.
-// One degree of lat @ 0 is 110km or so.
-lat = 0
-lng = 0
-points = 10
-for (var x = -points; x < points; x += 1) {
- for (var y = -points; y < points; y += 1) {
- t.insert({geo : { "type" : "Point", "coordinates" : [lng + x/1000.0, lat + y/1000.0]}})
- }
-}
-
-origin = { "type" : "Point", "coordinates": [ lng, lat ] }
-
-t.ensureIndex({ geo : "2dsphere" })
-
-// Near only works when the query is a point.
-someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]}
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
-assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : someline } } }).count()})
-assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : somepoly } } }).count()})
-assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: someline, spherical:true }).results.length})
-assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: somepoly, spherical:true }).results.length})
-
-// Do some basic near searches.
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 2000} } }).limit(10)
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, maxDistance: Math.PI, spherical: true})
-assert.eq(res.itcount(), resNear.results.length, 10)
-
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10)
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, spherical: true})
-assert.eq(res.itcount(), resNear.results.length, 10)
-
-// Find all the points!
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000)
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true})
-assert.eq(resNear.results.length, res.itcount(), (2 * points) * (2 * points))
-
-// longitude goes -180 to 180
-// latitude goes -90 to 90
-// Let's put in some perverse (polar) data and make sure we get it back.
-// Points go long, lat.
-t.insert({geo: { "type" : "Point", "coordinates" : [-180, -90]}})
-t.insert({geo: { "type" : "Point", "coordinates" : [180, -90]}})
-t.insert({geo: { "type" : "Point", "coordinates" : [180, 90]}})
-t.insert({geo: { "type" : "Point", "coordinates" : [-180, 90]}})
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000)
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true})
-assert.eq(res.itcount(), resNear.results.length, (2 * points) * (2 * points) + 4)
-
-function testRadAndDegreesOK(distance) {
- // Distance for old style points is radians.
- resRadians = t.find({geo: {$nearSphere: [0,0], $maxDistance: (distance / (6378.1 * 1000))}})
- // Distance for new style points is meters.
- resMeters = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: distance} } })
- // And we should get the same # of results no matter what.
- assert.eq(resRadians.itcount(), resMeters.itcount())
-
- // Also, geoNear should behave the same way.
- resGNMeters = db.runCommand({geoNear : t.getName(), near: origin, maxDistance: distance, spherical: true})
- resGNRadians = db.runCommand({geoNear : t.getName(), near: [0,0], maxDistance: (distance / (6378.1 * 1000)), spherical: true})
- assert.eq(resGNRadians.results.length, resGNMeters.results.length)
- for (var i = 0; i < resGNRadians.length; ++i) {
- // Radius of earth * radians = distance in meters.
- assert.close(resGNRadians.results[i].dis * 6378.1 * 1000, resGNMeters.results[i].dis)
- }
-}
-
-testRadAndDegreesOK(1);
-testRadAndDegreesOK(10)
-testRadAndDegreesOK(50)
-testRadAndDegreesOK(10000)
diff --git a/jstests/geo_s2nearComplex.js b/jstests/geo_s2nearComplex.js
deleted file mode 100644
index 16a24d6db24..00000000000
--- a/jstests/geo_s2nearComplex.js
+++ /dev/null
@@ -1,269 +0,0 @@
-var t = db.get_s2nearcomplex
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-/* Short names for math operations */
-Random.setRandomSeed();
-var random = Random.rand;
-var PI = Math.PI;
-var asin = Math.asin;
-var sin = Math.sin;
-var cos = Math.cos;
-var atan2 = Math.atan2
-
-
-var originGeo = {type: "Point", coordinates: [20.0, 20.0]};
-// Center point for all tests.
-var origin = {
- name: "origin",
- geo: originGeo
-}
-
-
-/*
- * Convenience function for checking that coordinates match. threshold let's you
- * specify how accurate equals should be.
- */
-function coordinateEqual(first, second, threshold){
- threshold = threshold || 0.001
- first = first['geo']['coordinates']
- second = second['geo']['coordinates']
- if(Math.abs(first[0] - second[0]) <= threshold){
- if(Math.abs(first[1] - second[1]) <= threshold){
- return true;
- }
- }
- return false;
-}
-
-/*
- * Creates `count` random and uniformly distributed points centered around `origin`
- * no points will be closer to origin than minDist, and no points will be further
- * than maxDist. Points will be inserted into the global `t` collection, and will
- * be returned.
- * based on this algorithm: http://williams.best.vwh.net/avform.htm#LL
- */
-function uniformPoints(origin, count, minDist, maxDist){
- var i;
- var lng = origin['geo']['coordinates'][0];
- var lat = origin['geo']['coordinates'][1];
- var distances = [];
- var points = [];
- for(i=0; i < count; i++){
- distances.push((random() * (maxDist - minDist)) + minDist);
- }
- distances.sort();
- while(points.length < count){
- var angle = random() * 2 * PI;
- var distance = distances[points.length];
- var pointLat = asin((sin(lat) * cos(distance)) + (cos(lat) * sin(distance) * cos(angle)));
- var pointDLng = atan2(sin(angle) * sin(distance) * cos(lat), cos(distance) - sin(lat) * sin(pointLat));
- var pointLng = ((lng - pointDLng + PI) % 2*PI) - PI;
-
- // Latitude must be [-90, 90]
- var newLat = lat + pointLat;
- if (newLat > 90) newLat -= 180;
- if (newLat < -90) newLat += 180;
-
- // Longitude must be [-180, 180]
- var newLng = lng + pointLng;
- if (newLng > 180) newLng -= 360;
- if (newLng < -180) newLng += 360;
-
- var newPoint = {
- geo: {
- type: "Point",
- //coordinates: [lng + pointLng, lat + pointLat]
- coordinates: [newLng, newLat]
- }
- };
-
- points.push(newPoint);
- }
- for(i=0; i < points.length; i++){
- t.insert(points[i]);
- assert(!db.getLastError());
- }
- return points;
-}
-
-/*
- * Creates a random uniform field as above, excepting for `numberOfHoles` gaps that
- * have `sizeOfHoles` points missing centered around a random point.
- */
-function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, sizeOfHoles){
- var points = uniformPoints(origin, count, minDist, maxDist);
- var i;
- for(i=0; i<numberOfHoles; i++){
- var randomPoint = points[Math.floor(random() * points.length)];
- removeNearest(randomPoint, sizeOfHoles);
- }
-}
-
-/*
- * Creates a random uniform field as above, expcepting for `numberOfClusters` clusters,
- * which will consist of N points where `minClusterSize` <= N <= `maxClusterSize.
- * you may specify an optional `distRatio` parameter which will specify the area that the cluster
- * covers as a fraction of the full area that points are created on. Defaults to 10.
- */
-function uniformPointsWithClusters(origin, count, minDist, maxDist, numberOfClusters, minClusterSize, maxClusterSize, distRatio){
- distRatio = distRatio || 10
- var points = uniformPoints(origin, count, minDist, maxDist);
- for(j=0; j<numberOfClusters; j++){
- var randomPoint = points[Math.floor(random() * points.length)];
- var clusterSize = (random() * (maxClusterSize - minClusterSize)) + minClusterSize;
- uniformPoints(randomPoint, clusterSize, minDist / distRatio, maxDist / distRatio);
- }
-}
-/*
- * Function used to create gaps in existing point field. Will remove the `number` nearest
- * geo objects to the specified `point`.
- */
-function removeNearest(point, number){
- var pointsToRemove = t.find({geo: {$geoNear: {$geometry: point['geo']}}}).limit(number);
- var idsToRemove = [];
- while(pointsToRemove.hasNext()){
- point = pointsToRemove.next();
- idsToRemove.push(point['_id']);
- }
-
- t.remove({_id: {$in: idsToRemove}});
-}
-/*
- * Validates the ordering of the nearest results is the same no matter how many
- * geo objects are requested. This could fail if two points have the same dist
- * from origin, because they may not be well-ordered. If we see strange failures,
- * we should consider that.
- */
-function validateOrdering(query){
- var near10 = t.find(query).limit(10);
- var near20 = t.find(query).limit(20);
- var near30 = t.find(query).limit(30);
- var near40 = t.find(query).limit(40);
-
- for(i=0;i<10;i++){
- assert(coordinateEqual(near10[i], near20[i]));
- assert(coordinateEqual(near10[i], near30[i]));
- assert(coordinateEqual(near10[i], near40[i]));
- }
-
- for(i=0;i<20;i++){
- assert(coordinateEqual(near20[i], near30[i]));
- assert(coordinateEqual(near20[i], near40[i]));
- }
-
- for(i=0;i<30;i++){
- assert(coordinateEqual(near30[i], near40[i]));
- }
-}
-
-var query = {geo: {$geoNear: {$geometry: originGeo}}};
-
-// Test a uniform distribution of 10000 points.
-uniformPoints(origin, 10000, 0.5, 1.5);
-
-validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
-
-print("Millis for uniform:")
-print(t.find(query).explain().millis)
-print("Total points:");
-print(t.find(query).itcount());
-
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-// Test a uniform distribution with 5 gaps each with 10 points missing.
-uniformPointsWithGaps(origin, 10000, 1, 10.0, 5, 10);
-
-validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
-
-print("Millis for uniform with gaps:")
-print(t.find(query).explain().millis)
-print("Total points:");
-print(t.find(query).itcount());
-
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-// Test a uniform distribution with 5 clusters each with between 10 and 100 points.
-uniformPointsWithClusters(origin, 10000, 1, 10.0, 5, 10, 100);
-
-validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
-
-print("Millis for uniform with clusters:");
-print(t.find(query).explain().millis);
-print("Total points:");
-print(t.find(query).itcount());
-
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-// Test a uniform near search with origin around the pole.
-
-// Center point near pole.
-originGeo = {type: "Point", coordinates: [0.0, 89.0]};
-origin = {
- name: "origin",
- geo: originGeo
-}
-uniformPoints(origin, 50, 0.5, 1.5);
-
-validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
-
-print("Millis for uniform near pole:")
-print(t.find({geo: {$geoNear: {$geometry: originGeo}}}).explain().millis)
-assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
-
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-// Center point near the meridian
-originGeo = {type: "Point", coordinates: [179.0, 0.0]};
-origin = {
- name: "origin",
- geo: originGeo
-}
-uniformPoints(origin, 50, 0.5, 1.5);
-
-validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
-
-print("Millis for uniform on meridian:")
-print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis)
-assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
-
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-// Center point near the negative meridian
-originGeo = {type: "Point", coordinates: [-179.0, 0.0]};
-origin = {
- name: "origin",
- geo: originGeo
-}
-uniformPoints(origin, 50, 0.5, 1.5);
-
-validateOrdering({geo: {$near: {$geometry: originGeo}}})
-
-print("Millis for uniform on negative meridian:");
-print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis);
-assert.eq(t.find({geo: {$near: {$geometry: originGeo}}}).itcount(), 50);
-
-// Near search with points that are really far away.
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-originGeo = {type: "Point", coordinates: [0.0, 0.0]};
-origin = {
- name: "origin",
- geo: originGeo
-}
-
-uniformPoints(origin, 10, 89, 90);
-
-cur = t.find({geo: {$near: {$geometry: originGeo}}})
-
-assert.eq(cur.itcount(), 10);
-cur = t.find({geo: {$near: {$geometry: originGeo}}})
-
-print("Near search on very distant points:");
-print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis);
-pt = cur.next();
-assert(pt)
diff --git a/jstests/geo_s2near_equator_opposite.js b/jstests/geo_s2near_equator_opposite.js
deleted file mode 100644
index 8ee5d486d5e..00000000000
--- a/jstests/geo_s2near_equator_opposite.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Tests geo near with 2 points diametrically opposite to each other
-// on the equator
-// First reported in SERVER-11830 as a regression in 2.5
-
-var t = db.geos2nearequatoropposite;
-
-t.drop();
-
-t.insert({loc: {type: 'Point', coordinates: [0, 0]}});
-t.insert({loc: {type: 'Point', coordinates: [-1, 0]}});
-
-t.ensureIndex({loc: '2dsphere'});
-
-// upper bound for half of earth's circumference in meters
-var dist = 40075000 / 2 + 1;
-
-var nearSphereCount = t.find({loc: {$nearSphere:
- {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}}).itcount();
-var nearCount = t.find({loc: {$near:
- {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}}).itcount();
-var geoNearResult = db.runCommand({geoNear: t.getName(), near:
- {type: 'Point', coordinates: [180, 0]}, spherical: true});
-
-print('nearSphere count = ' + nearSphereCount);
-print('near count = ' + nearCount);
-print('geoNearResults = ' + tojson(geoNearResult));
-
-assert.eq(2, nearSphereCount, 'unexpected document count for nearSphere');
-assert.eq(2, nearCount, 'unexpected document count for near');
-assert.eq(2, geoNearResult.results.length, 'unexpected document count in geoNear results');
-assert.gt(dist, geoNearResult.stats.maxDistance, 'unexpected maximum distance in geoNear results');
diff --git a/jstests/geo_s2nearcorrect.js b/jstests/geo_s2nearcorrect.js
deleted file mode 100644
index cdb5404a08d..00000000000
--- a/jstests/geo_s2nearcorrect.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// SERVER-9484
-// A geometry may have several covers, one of which is in a search ring and the other of which is
-// not. If we see the cover that's not in the search ring, we can't mark the object as 'seen' for
-// this ring.
-t = db.geo_s2nearcorrect
-t.drop()
-
-longline = { "type" : "LineString", "coordinates": [ [0,0], [179, 89]]};
-t.insert({geo: longline});
-t.ensureIndex({geo: "2dsphere"});
-origin = { "type" : "Point", "coordinates": [ 45, 45] }
-assert.eq(1, t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 20000000} } }).count());
diff --git a/jstests/geo_s2nearwithin.js b/jstests/geo_s2nearwithin.js
deleted file mode 100644
index 99ac12df803..00000000000
--- a/jstests/geo_s2nearwithin.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// Test geoNear + $within.
-t = db.geo_s2nearwithin
-t.drop();
-
-points = 10
-for (var x = -points; x < points; x += 1) {
- for (var y = -points; y < points; y += 1) {
- t.insert({geo: [x, y]})
- }
-}
-
-origin = { "type" : "Point", "coordinates": [ 0, 0] }
-
-t.ensureIndex({ geo : "2dsphere" })
-// Near requires an index, and 2dsphere is an index. Spherical isn't
-// specified so this doesn't work.
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0],
- query: {geo: {$within: {$center: [[0, 0], 1]}}}})
-assert(db.getLastError());
-
-// Spherical is specified so this does work. Old style points are weird
-// because you can use them with both $center and $centerSphere. Points are
-// the only things we will do this conversion for.
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$center: [[0, 0], 1]}}}})
-assert.eq(resNear.results.length, 5)
-
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$centerSphere: [[0, 0], Math.PI/180.0]}}}})
-assert.eq(resNear.results.length, 5)
-
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$centerSphere: [[0, 0], 0]}}}})
-assert.eq(resNear.results.length, 1)
-
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$centerSphere: [[1, 0], 0.5 * Math.PI/180.0]}}}})
-assert.eq(resNear.results.length, 1)
-
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$center: [[1, 0], 1.5]}}}})
-assert.eq(resNear.results.length, 9)
diff --git a/jstests/geo_s2nongeoarray.js b/jstests/geo_s2nongeoarray.js
deleted file mode 100644
index a1d51929563..00000000000
--- a/jstests/geo_s2nongeoarray.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Explode arrays when indexing non-geo fields in 2dsphere, and make sure that
-// we find them with queries.
-t = db.geo_s2nongeoarray
-
-oldPoint = [40,5]
-
-var data = {geo: oldPoint, nonGeo: [123,456], otherNonGeo: [{b:[1,2]},{b:[3,4]}]};
-
-t.drop();
-t.insert(data);
-assert(!db.getLastError());
-t.ensureIndex({otherNonGeo: 1});
-assert(!db.getLastError());
-assert.eq(1, t.find({otherNonGeo: {b:[1,2]}}).itcount());
-assert.eq(0, t.find({otherNonGeo: 1}).itcount());
-assert.eq(1, t.find({'otherNonGeo.b': 1}).itcount());
-
-t.drop();
-t.insert(data);
-t.ensureIndex({geo: "2d", nonGeo: 1, otherNonGeo: 1})
-assert.eq(t.find({nonGeo: 123, geo: {$nearSphere: oldPoint}}).itcount(), 1);
-assert.eq(t.find({'otherNonGeo.b': 1, geo: {$nearSphere: oldPoint}}).itcount(), 1);
-
-t.drop()
-t.insert(data);
-t.ensureIndex({geo: "2dsphere", nonGeo: 1, otherNonGeo: 1})
-assert.eq(t.find({nonGeo: 123, geo: {$nearSphere: oldPoint}}).itcount(), 1);
-assert.eq(t.find({'otherNonGeo.b': 1, geo: {$nearSphere: oldPoint}}).itcount(), 1);
diff --git a/jstests/geo_s2nonstring.js b/jstests/geo_s2nonstring.js
deleted file mode 100755
index 11fc8f4f4c4..00000000000
--- a/jstests/geo_s2nonstring.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// Added to make sure that S2 indexing's string AND non-string keys work.
-t = db.geo_s2nonstring
-t.drop()
-
-t.ensureIndex( { geo:'2dsphere', x:1 } );
-
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:'a' } );
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:5 } );
-
-t.drop()
-t.ensureIndex( { geo:'2dsphere', x:1 } );
-
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:'a' } );
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] } } );
-
-// Expect 1 match, where x is 'a'
-assert.eq( 1, t.count( { geo:{ $near:{ $geometry:{ type:'Point', coordinates:[ 0, 0 ] },
- $maxDistance: 20 } }, x:'a' } ) );
-
-// Expect 1 match, where x matches null (missing matches null).
-assert.eq( 1, t.count( { geo:{ $near:{ $geometry:{ type:'Point', coordinates:[ 0, 0 ] },
- $maxDistance: 20 } }, x:null } ) );
diff --git a/jstests/geo_s2nopoints.js b/jstests/geo_s2nopoints.js
deleted file mode 100644
index c897f39f815..00000000000
--- a/jstests/geo_s2nopoints.js
+++ /dev/null
@@ -1,7 +0,0 @@
-// See SERVER-7794.
-t = db.geo_s2nopoints
-t.drop()
-
-t.ensureIndex({loc: "2dsphere", x:1})
-assert.eq(0, t.count({loc: {$near: {$geometry: {type: 'Point', coordinates:[0,0]},
- $maxDistance: 10}}}))
diff --git a/jstests/geo_s2oddshapes.js b/jstests/geo_s2oddshapes.js
deleted file mode 100644
index 24a318d5b98..00000000000
--- a/jstests/geo_s2oddshapes.js
+++ /dev/null
@@ -1,138 +0,0 @@
-// Verify that odd polygons (huge or "narrow") behave as we expect.
-// Note that since 2dsphere is spherical, polygons that seem narrow are actually
-// rather wide if their latitude (or longitude) range is large.
-var t = db.geo_s2oddshapes
-t.drop()
-t.ensureIndex( { geo : "2dsphere" } );
-
-var testPoint = {
- name: "origin",
- geo: {
- type: "Point",
- coordinates: [0.0, 0.0]
- }
-};
-
-var testHorizLine = {
- name: "horiz",
- geo: {
- type: "LineString",
- coordinates: [[-2.0, 10.0], [2.0, 10.0]]
- }
-};
-
-var testVertLine = {
- name: "vert",
- geo: {
- type: "LineString",
- coordinates: [[10.0, -2.0], [10.0, 2.0]]
- }
-};
-
-t.insert(testPoint);
-t.insert(testHorizLine);
-t.insert(testVertLine);
-
-//Test a poly that runs vertically all the way along the meridian.
-
-var tallPoly = {type: "Polygon",
- coordinates: [
- [[1.0, 89.0], [-1.0, 89.0], [-1.0, -89.0], [1.0, -89.0], [1.0, 89.0]]
- ]};
-//We expect that the testPoint (at the origin) will be within this poly.
-var result = t.find({geo: {$within: {$geometry: tallPoly}}});
-assert.eq(result.itcount(), 1);
-var result = t.find({geo: {$within: {$geometry: tallPoly}}});
-assert.eq(result[0].name, 'origin');
-
-//We expect that the testPoint, and the testHorizLine should geoIntersect
-//with this poly.
-result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
-assert.eq(result.itcount(), 2);
-result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
-
-//Test a poly that runs horizontally along the equator.
-
-var longPoly = {type: "Polygon",
- coordinates: [
- [[89.0, 1.0], [-89.0, 1.0], [-89.0, -1.0], [89.0, -1.0], [89.0, 1.0]]
- ]};
-
-// Thanks to spherical geometry, this poly contains most of the hemisphere.
-result = t.find({geo: {$within: {$geometry: longPoly}}});
-assert.eq(result.itcount(), 3);
-result = t.find({geo: {$geoIntersects: {$geometry: longPoly}}});
-assert.eq(result.itcount(), 3);
-
-//Test a poly that is the size of half the earth.
-
-t.drop()
-t.ensureIndex( { geo : "2dsphere" } );
-
-var insidePoint = {
- name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [100.0, 0.0]
- }
-};
-
-var outsidePoint = {
- name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [-100.0, 0.0]
- }
-};
-
-t.insert(insidePoint);
-t.insert(outsidePoint);
-
-var largePoly = {type: "Polygon",
- coordinates: [
- [[0.0, -90.0], [0.0, 90.0], [180.0, 0], [0.0, -90.0]]
- ]};
-
-result = t.find({geo: {$within: {$geometry: largePoly}}});
-assert.eq(result.itcount(), 1);
-result = t.find({geo: {$within: {$geometry: largePoly}}});
-var point = result[0]
-assert.eq(point.name, 'inside');
-
-//Test a poly that is very small. A couple meters around.
-
-t.drop()
-t.ensureIndex( { geo : "2dsphere" } );
-
-insidePoint = {
- name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [0.01, 0.0]
- }};
-
-outsidePoint = {
- name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [0.2, 0.0]
- }};
-
-t.insert(insidePoint);
-t.insert(outsidePoint);
-
-smallPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -0.01], [0.015, -0.01], [0.015, 0.01], [0.0, 0.01], [0.0, -0.01]]
- ]};
-
-result = t.find({geo: {$within: {$geometry: smallPoly}}});
-assert.eq(result.itcount(), 1);
-result = t.find({geo: {$within: {$geometry: smallPoly}}});
-point = result[0]
-assert.eq(point.name, 'inside');
-
diff --git a/jstests/geo_s2ordering.js b/jstests/geo_s2ordering.js
deleted file mode 100644
index 531f22a6254..00000000000
--- a/jstests/geo_s2ordering.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// This tests that 2dsphere indices can be ordered arbitrarily, and that the ordering
-// actually matters for lookup speed. That is, if we're looking for a non-geo key of which
-// there are not many, the index order (nongeo, geo) should be faster than (geo, nongeo)
-// for 2dsphere.
-t = db.geo_s2ordering
-t.drop();
-
-needle = "hari"
-
-// We insert lots of points in a region and look for a non-geo key which is rare.
-function makepoints(needle) {
- lat = 0
- lng = 0
- points = 200.0
- for (var x = -points; x < points; x += 1) {
- for (var y = -points; y < points; y += 1) {
- tag = x.toString() + "," + y.toString();
- t.insert({nongeo: tag, geo : { "type" : "Point", "coordinates" : [lng + x/points, lat + y/points]}})
- }
- }
- t.insert({nongeo: needle, geo : { "type" : "Point", "coordinates" : [0,0]}})
-}
-
-function runTest(index) {
- t.ensureIndex(index)
- // If both tests take longer than this, then we will error. This is intentional
- // since the tests shouldn't take that long.
- mintime = 100000.0;
- resultcount = 0;
- iterations = 10;
- for (var x = 0; x < iterations; ++x) {
- res = t.find({nongeo: needle, geo: {$within: {$centerSphere: [[0,0], Math.PI/180.0]}}})
- if (res.explain().millis < mintime) {
- mintime = res.explain().millis
- resultcount = res.itcount()
- }
- }
- t.dropIndex(index)
- return {time: mintime, results: resultcount}
-}
-
-makepoints(needle)
-// Indexing non-geo first should be quicker.
-fast = runTest({nongeo: 1, geo: "2dsphere"})
-slow = runTest({geo: "2dsphere", nongeo: 1})
-assert.eq(fast.results, slow.results)
-assert(fast.time < slow.time)
diff --git a/jstests/geo_s2overlappingpolys.js b/jstests/geo_s2overlappingpolys.js
deleted file mode 100644
index 0d96222206c..00000000000
--- a/jstests/geo_s2overlappingpolys.js
+++ /dev/null
@@ -1,213 +0,0 @@
-var t = db.geo_s2overlappingpolys
-t.drop()
-
-t.ensureIndex( { geo : "2dsphere" } );
-
-var minError = 0.8e-13;
-
-var canonPoly = {type: "Polygon",
- coordinates: [
- [[-1.0, -1.0], [1.0, -1.0], [1.0, 1.0], [-1.0, 1.0], [-1.0, -1.0]]
- ]};
-t.insert({geo: canonPoly});
-
-// Test 1: If a poly completely encloses the canonPoly, we expect the canonPoly
-// to be returned for both $within and $geoIntersect
-
-var outerPoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
- ]};
-var result = t.find({geo: {$within: {$geometry: outerPoly}}});
-assert.eq(result.itcount(), 1);
-result = t.find({geo: {$geoIntersects: {$geometry: outerPoly}}});
-assert.eq(result.itcount(), 1);
-
-
-// Test 2: If a poly that covers half of the canonPoly, we expect that it should
-// geoIntersect, but should not be within.
-
-var partialPoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -2.0], [2.0, -2.0], [2.0, 0.0], [-2.0, 0.0], [-2.0, -2.0]]
- ]};
-
-//Should not be within
-result = t.find({geo: {$within: {$geometry: partialPoly}}});
-assert.eq(result.itcount(), 0);
-
-//This should however count as a geoIntersect
-result = t.find({geo: {$geoIntersects: {$geometry: partialPoly}}});
-assert.eq(result.itcount(), 1);
-
-
-// Test 3: Polygons that intersect at a point or an edge have undefined
-// behaviour in s2 The s2 library we're using appears to have
-// the following behaviour.
-
-// Case (a): Polygons that intersect at one point (not a vertex).
-// behaviour: geoIntersects.
-
-var sharedPointPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [0.0, -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: sharedPointPoly}}});
-assert.eq(result.itcount(), 1);
-
-// Case (b): Polygons that intersect at one point (a vertex).
-// behaviour: not geoIntersect
-
-var sharedVertexPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [1.0, -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: sharedVertexPoly}}});
-assert.eq(result.itcount(), 0);
-
-// Case (c): Polygons that intesersect at one point that is very close to a
-// vertex should have the same behaviour as Case (b).
-
-var almostSharedVertexPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [1.0 - minError, -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: almostSharedVertexPoly}}});
-assert.eq(result.itcount(), 0);
-
-
-// Case (d): Polygons that intesersect at one point that is not quite as close
-// to a vertex should behave as though it were not a vertex, and should
-// geoIntersect
-
-var notCloseEnoughSharedVertexPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [1.0 - (10 * minError), -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedVertexPoly}}});
-assert.eq(result.itcount(), 1);
-
-// Case (e): Polygons that come very close to having a point intersection
-// on a non-vertex coordinate should intersect.
-
-var almostSharedPointPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [0.0, (-1.0 - minError)], [1.0, -2.0], [0.0, -2.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: almostSharedPointPoly}}});
-assert.eq(result.itcount(), 1);
-
-
-// Case (f): If we increase the error a little, it should no longer act
-// as though it's intersecting.
-// NOTE: I think this error bound seems odd. Going to 0.000152297 will break this test.
-// I've confirmed there is an error bound, but it's a lot larger than we experienced above.
-var errorBound = 0.000152298
-var notCloseEnoughSharedPointPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [0.0, -1.0 - errorBound], [1.0, -2.0], [0.0, -2.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedPointPoly}}});
-assert.eq(result.itcount(), 0);
-
-/* Test 3: Importantly, polygons with shared edges have undefined intersection
- * under s2. Therefore these test serve more to make sure nothing changes than
- * to confirm an expected behaviour.
- */
-
-// Case 1: A polygon who shares an edge with another polygon, where the searching
-// polygon's edge is fully covered by the canon polygon's edge.
-// Result: No intersection.
-var fullyCoveredEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -0.5], [-1.0, -0.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -0.5]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: fullyCoveredEdgePoly}}});
-assert.eq(result.itcount(), 0);
-
-// Case 2: A polygon who shares an edge with another polygon, where the searching
-// polygon's edge fully covers the canon polygon's edge.
-// Result: Intersection.
-var coveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0, -1.5], [-1.0, 1.5], [-2.0, 1.5], [-2.0, -1.5]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: coveringEdgePoly}}});
-assert.eq(result.itcount(), 1);
-
-// Case 2a: same as Case 2, except pulled slightly away from the polygon.
-// Result: Intersection.
-// NOTE: Scales of errors?
-var closebyCoveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0 - (minError / 1000), -1.5], [-1.0 - (minError / 1000), 1.5], [-2.0, 1.5], [-2.0, -1.5]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: closebyCoveringEdgePoly}}});
-assert.eq(result.itcount(), 1);
-
-// Case 2b: same as Case 4, except pulled slightly away from the polygon, so that it's not intersecting.
-// Result: No Intersection.
-// NOTE: Scales of errors?
-var notCloseEnoughCoveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0 - (minError / 100), -1.5], [-1.0 - (minError / 100), 1.5], [-2.0, 1.5], [-2.0, -1.5]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughCoveringEdgePoly}}});
-assert.eq(result.itcount(), 0);
-
-// Case 3: A polygon who shares an edge with another polygon, where the searching
-// polygon's edge partially covers by the canon polygon's edge.
-// Result: No intersection.
-var partiallyCoveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0, -1.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -1.5]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: partiallyCoveringEdgePoly}}});
-assert.eq(result.itcount(), 0);
-
-
-//Polygons that intersect at three non-co-linear points should geoIntersect
-var sharedPointsPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -3.0], [0.0, -1.0], [2.0, -2.0], [1.0, 0.0], [2.0, 2.0], [0.0, 1.0], [0.0, 3.0], [3.0, 3.0], [3.0, -3.0], [0.0, -3.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: sharedPointsPoly}}});
-assert.eq(result.itcount(), 1);
-
-//If a polygon contains a hole, and another polygon is within that hole, it should not be within or intersect.
-
-var bigHolePoly = {type: "Polygon",
- coordinates: [
- [[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
- [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
- ]};
-result = t.find({geo: {$within: {$geometry: bigHolePoly}}});
-assert.eq(result.itcount(), 0);
-result = t.find({geo: {$geoIntersects: {$geometry: bigHolePoly}}});
-assert.eq(result.itcount(), 0);
-
-// If a polygon has a hole, and another polygon is contained partially by that hole, it should be an intersection
-// but not a within.
-
-var internalOverlapPoly = {type: "Polygon",
- coordinates: [
- [[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
- [[-2.0, 0.0], [2.0, 0.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, 0.0]]
- ]};
-
-result = t.find({geo: {$geoIntersects: {$geometry: internalOverlapPoly}}});
-assert.eq(result.itcount(), 1);
-result = t.find({geo: {$within: {$geometry: internalOverlapPoly}}});
-assert.eq(result.itcount(), 0);
diff --git a/jstests/geo_s2polywithholes.js b/jstests/geo_s2polywithholes.js
deleted file mode 100755
index beb9932739f..00000000000
--- a/jstests/geo_s2polywithholes.js
+++ /dev/null
@@ -1,48 +0,0 @@
-var t = db.geo_s2weirdpolys;
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-
-var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]};
-var edgePoint = {"type": "Point", "coordinates": [0, 0.5]};
-var cornerPoint = {"type": "Point", "coordinates": [0, 0]};
-
-t.insert({geo : centerPoint});
-t.insert({geo : edgePoint});
-t.insert({geo : cornerPoint});
-
-var polygonWithNoHole = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
- ]
-};
-
-// Test 1: Sanity check. Expect all three points.
-var sanityResult = t.find({geo: {$within: {$geometry: polygonWithNoHole}}});
-assert.eq(sanityResult.itcount(), 3);
-
-// Test 2: Polygon with a hole that isn't contained byt the poly shell.
-var polygonWithProtrudingHole = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0.4,0.9], [0.4,1.1], [0.5, 1.1], [0.5, 0.9], [0.4, 0.9]]
- ]
-};
-
-// Bad shell, should error.
-t.insert({geo: polygonWithProtrudingHole});
-assert(db.getLastError());
-
-// Can't search with bogus poly.
-assert.throws(function() {
- return t.find({geo: {$within: {$geometry: polygonWithProtrudingHole}}}).itcount()
-})
-
-// Test 3: This test will confirm that a polygon with overlapping holes throws
-// an error.
-var polyWithOverlappingHoles = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0.2,0.6], [0.2,0.9], [0.6, 0.9], [0.6, 0.6], [0.2, 0.6]],
- [[0.5,0.4], [0.5,0.7], [0.8, 0.7], [0.8, 0.4], [0.5, 0.4]]
- ]
-};
-
-t.insert({geo: polyWithOverlappingHoles});
-assert(db.getLastError());
diff --git a/jstests/geo_s2selfintersectingpoly.js b/jstests/geo_s2selfintersectingpoly.js
deleted file mode 100644
index 4b7e0d4eff3..00000000000
--- a/jstests/geo_s2selfintersectingpoly.js
+++ /dev/null
@@ -1,12 +0,0 @@
-var t = db.geo_s2selfintersectingpoly;
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-
-var intersectingPolygon = {"type": "Polygon", "coordinates": [
- [[0.0, 0.0], [0.0, 4.0], [-3.0, 2.0], [1.0, 2.0], [0.0, 0.0]]
-]};
-/*
- * Self intersecting polygons should cause a parse exception.
- */
-t.insert({geo : intersectingPolygon});
-assert(db.getLastError());
diff --git a/jstests/geo_s2sparse.js b/jstests/geo_s2sparse.js
deleted file mode 100644
index 3fbc01188a3..00000000000
--- a/jstests/geo_s2sparse.js
+++ /dev/null
@@ -1,113 +0,0 @@
-// Test behavior of 2dsphere and sparse. See SERVER-9639.
-// All V2 2dsphere indices are sparse in the geo fields.
-
-var coll = db.geo_s2sparse;
-
-var point = { type: "Point", coordinates: [5, 5] }
-
-var indexSpec = { geo: "2dsphere", nonGeo: 1 };
-
-var indexName = 'test.geo_s2sparse.$geo_2dsphere_nonGeo_1';
-
-//
-// V2 indices are "geo sparse" always.
-//
-
-// Clean up.
-coll.drop();
-coll.ensureIndex(indexSpec);
-
-// Insert N documents with the geo field.
-var N = 1000;
-for (var i = 0; i < N; i++) {
- coll.insert({ geo: point, nonGeo: "point_"+i });
-}
-
-// Expect N keys.
-assert.eq(N, coll.validate().keysPerIndex[indexName]);
-
-// Insert N documents without the geo field.
-for (var i = 0; i < N; i++) {
- coll.insert({ wrongGeo: point, nonGeo: i});
-}
-
-// Still expect N keys as we didn't insert any geo stuff.
-assert.eq(N, coll.validate().keysPerIndex[indexName]);
-
-// Insert N documents with just the geo field.
-for (var i = 0; i < N; i++) {
- coll.insert({ geo: point});
-}
-
-// Expect 2N keys.
-assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
-
-// Add some "not geo" stuff.
-for (var i = 0; i < N; i++) {
- coll.insert({ geo: null});
- coll.insert({ geo: []});
- coll.insert({ geo: undefined});
- coll.insert({ geo: {}});
-}
-
-// Still expect 2N keys.
-assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
-
-//
-// V1 indices are never sparse
-//
-
-coll.drop();
-coll.ensureIndex(indexSpec, {"2dsphereIndexVersion": 1});
-
-// Insert N documents with the geo field.
-for (var i = 0; i < N; i++) {
- coll.insert({ geo: point, nonGeo: "point_"+i });
-}
-
-// Expect N keys.
-assert.eq(N, coll.validate().keysPerIndex[indexName]);
-
-// Insert N documents without the geo field.
-for (var i = 0; i < N; i++) {
- coll.insert({ wrongGeo: point, nonGeo: i});
-}
-
-// Expect N keys as it's a V1 index.
-assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
-
-//
-// V2 indices with several 2dsphere-indexed fields are only sparse if all are missing.
-//
-
-// Clean up.
-coll.drop();
-coll.ensureIndex({geo: "2dsphere", otherGeo: "2dsphere"});
-
-indexName = 'test.geo_s2sparse.$geo_2dsphere_otherGeo_2dsphere';
-
-// Insert N documents with the first geo field.
-var N = 1000;
-for (var i = 0; i < N; i++) {
- coll.insert({ geo: point});
-}
-
-// Expect N keys.
-assert.eq(N, coll.validate().keysPerIndex[indexName]);
-
-// Insert N documents with the second geo field.
-var N = 1000;
-for (var i = 0; i < N; i++) {
- coll.insert({ otherGeo: point});
-}
-
-// They get inserted too.
-assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
-
-// Insert N documents with neither geo field.
-for (var i = 0; i < N; i++) {
- coll.insert({ nonGeo: i});
-}
-
-// Still expect 2N keys as the neither geo docs were omitted from the index.
-assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
diff --git a/jstests/geo_s2twofields.js b/jstests/geo_s2twofields.js
deleted file mode 100644
index 6beac190fb0..00000000000
--- a/jstests/geo_s2twofields.js
+++ /dev/null
@@ -1,64 +0,0 @@
-// Verify that we can index multiple geo fields with 2dsphere, and that
-// performance is what we expect it to be with indexing both fields.
-var t = db.geo_s2twofields
-t.drop()
-
-Random.setRandomSeed();
-var random = Random.rand;
-var PI = Math.PI;
-
-function randomCoord(center, minDistDeg, maxDistDeg) {
- var dx = random() * (maxDistDeg - minDistDeg) + minDistDeg;
- var dy = random() * (maxDistDeg - minDistDeg) + minDistDeg;
- return [center[0] + dx, center[1] + dy];
-}
-
-var nyc = {type: "Point", coordinates: [-74.0064, 40.7142]};
-var miami = {type: "Point", coordinates: [-80.1303, 25.7903]};
-var maxPoints = 10000;
-var degrees = 5;
-
-for (var i = 0; i < maxPoints; ++i) {
- var fromCoord = randomCoord(nyc.coordinates, 0, degrees);
- var toCoord = randomCoord(miami.coordinates, 0, degrees);
- t.insert({from: {type: "Point", coordinates: fromCoord}, to: { type: "Point", coordinates: toCoord}})
- assert(!db.getLastError());
-}
-
-function semiRigorousTime(func) {
- var lowestTime = func();
- var iter = 2;
- for (var i = 0; i < iter; ++i) {
- var run = func();
- if (run < lowestTime) { lowestTime = run; }
- }
- return lowestTime;
-}
-
-function timeWithoutAndWithAnIndex(index, query) {
- t.dropIndex(index);
- var withoutTime = semiRigorousTime(function() { return t.find(query).explain().millis; });
- t.ensureIndex(index);
- var withTime = semiRigorousTime(function() { return t.find(query).explain().millis; });
- t.dropIndex(index);
- return [withoutTime, withTime];
-}
-
-var maxQueryRad = 0.5 * PI / 180.0;
-// When we're not looking at ALL the data, anything indexed should beat not-indexed.
-var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere", from: "2dsphere"},
- {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
-print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
-// assert(smallQuery[0] > smallQuery[1]);
-
-// Let's just index one field.
-var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere"},
- {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
-print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
-// assert(smallQuery[0] > smallQuery[1]);
-
-// And the other one.
-var smallQuery = timeWithoutAndWithAnIndex({from: "2dsphere"},
- {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
-print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
-// assert(smallQuery[0] > smallQuery[1]);
diff --git a/jstests/geo_s2validindex.js b/jstests/geo_s2validindex.js
deleted file mode 100644
index fee00d8d208..00000000000
--- a/jstests/geo_s2validindex.js
+++ /dev/null
@@ -1,26 +0,0 @@
-//
-// Tests valid cases for creation of 2dsphere index
-//
-
-var coll = db.getCollection("twodspherevalid");
-
-// Valid index
-coll.drop();
-assert.eq(undefined, coll.ensureIndex({geo : "2dsphere", other : 1}));
-
-// Valid index
-coll.drop();
-assert.eq(undefined, coll.ensureIndex({geo : "2dsphere", other : 1, geo2 : "2dsphere"}));
-
-// Invalid index, using hash with 2dsphere
-coll.drop();
-assert.neq(undefined, coll.ensureIndex({geo : "2dsphere", other : "hash"}).err);
-
-// Invalid index, using 2d with 2dsphere
-coll.drop();
-assert.neq(undefined, coll.ensureIndex({geo : "2dsphere", other : "2d"}).err);
-
-jsTest.log("Success!");
-
-// Ensure the empty collection is gone, so that small_oplog passes.
-coll.drop();
diff --git a/jstests/geo_s2within.js b/jstests/geo_s2within.js
deleted file mode 100644
index 87fd32a7676..00000000000
--- a/jstests/geo_s2within.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Test some cases that might be iffy with $within, mostly related to polygon w/holes.
-t = db.geo_s2within
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
-
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [40.2, 5.2]]}})
-// This is only partially contained within the polygon.
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [42, 7]]}})
-
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 1);
-
-t.drop()
-t.ensureIndex({geo: "2dsphere"})
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,8], [43,8], [43,5], [40,5]],
- [ [41,6], [42,6], [42,7], [41,7], [41,6]]]}
-
-t.insert({geo:{ "type" : "Point", "coordinates": [ 40, 5 ] }})
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 1);
-// In the hole. Shouldn't find it.
-t.insert({geo:{ "type" : "Point", "coordinates": [ 41.1, 6.1 ] }})
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 1);
-// Also in the hole.
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.1, 6.1], [41.2, 6.2]]}})
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 1);
-// Half-hole, half-not. Shouldn't be $within.
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.5, 6.5], [42.5, 7.5]]}})
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
-assert.eq(res.itcount(), 1);
diff --git a/jstests/geo_small_large.js b/jstests/geo_small_large.js
deleted file mode 100644
index aff4743fc71..00000000000
--- a/jstests/geo_small_large.js
+++ /dev/null
@@ -1,151 +0,0 @@
-// SERVER-2386, general geo-indexing using very large and very small bounds
-
-load( "jstests/libs/geo_near_random.js" );
-
-// Do some random tests (for near queries) with very large and small ranges
-
-var test = new GeoNearRandomTest( "geo_small_large" );
-
-bounds = { min : -Math.pow( 2, 34 ), max : Math.pow( 2, 34 ) };
-
-test.insertPts( 50, bounds );
-
-printjson( db["geo_small_large"].find().limit( 10 ).toArray() )
-
-test.testPt( [ 0, 0 ] );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-
-test = new GeoNearRandomTest( "geo_small_large" );
-
-bounds = { min : -Math.pow( 2, -34 ), max : Math.pow( 2, -34 ) };
-
-test.insertPts( 50, bounds );
-
-printjson( db["geo_small_large"].find().limit( 10 ).toArray() )
-
-test.testPt( [ 0, 0 ] );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-
-
-// Check that our box and circle queries also work
-var scales = [ Math.pow( 2, 40 ), Math.pow( 2, -40 ), Math.pow(2, 2), Math.pow(3, -15), Math.pow(3, 15) ]
-
-for ( var i = 0; i < scales.length; i++ ) {
-
- scale = scales[i];
-
- var eps = Math.pow( 2, -7 ) * scale;
- var radius = 5 * scale;
- var max = 10 * scale;
- var min = -max;
- var range = max - min;
- var bits = 2 + Math.random() * 30
-
- var t = db["geo_small_large"]
- t.drop();
- t.ensureIndex( { p : "2d" }, { min : min, max : max, bits : bits })
-
- var outPoints = 0;
- var inPoints = 0;
-
- printjson({ eps : eps, radius : radius, max : max, min : min, range : range, bits : bits })
-
- // Put a point slightly inside and outside our range
- for ( var j = 0; j < 2; j++ ) {
- var currRad = ( j % 2 == 0 ? radius + eps : radius - eps );
- t.insert( { p : { x : currRad, y : 0 } } );
- print( db.getLastError() )
- }
-
- printjson( t.find().toArray() );
-
- assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1, "Incorrect center points found!" )
- assert.eq( t.count( { p : { $within : { $box : [ [ -radius, -radius ], [ radius, radius ] ] } } } ), 1,
- "Incorrect box points found!" )
-
- shouldFind = []
- randoms = []
-
- for ( var j = 0; j < 2; j++ ) {
-
- var randX = Math.random(); // randoms[j].randX
- var randY = Math.random(); // randoms[j].randY
-
- randoms.push({ randX : randX, randY : randY })
-
- var x = randX * ( range - eps ) + eps + min;
- var y = randY * ( range - eps ) + eps + min;
-
- t.insert( { p : [ x, y ] } );
-
- if ( x * x + y * y > radius * radius ){
- // print( "out point ");
- // printjson({ x : x, y : y })
- outPoints++
- }
- else{
- // print( "in point ");
- // printjson({ x : x, y : y })
- inPoints++
- shouldFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) })
- }
- }
-
- /*
- function printDiff( didFind, shouldFind ){
-
- for( var i = 0; i < shouldFind.length; i++ ){
- var beenFound = false;
- for( var j = 0; j < didFind.length && !beenFound ; j++ ){
- beenFound = shouldFind[i].x == didFind[j].x &&
- shouldFind[i].y == didFind[j].y
- }
-
- if( !beenFound ){
- print( "Could not find: " )
- shouldFind[i].inRadius = ( radius - shouldFind[i].radius >= 0 )
- printjson( shouldFind[i] )
- }
- }
- }
-
- print( "Finding random pts... ")
- var found = t.find( { p : { $within : { $center : [[0, 0], radius ] } } } ).toArray()
- var didFind = []
- for( var f = 0; f < found.length; f++ ){
- //printjson( found[f] )
- var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0]
- var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1]
- didFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) })
- }
-
- print( "Did not find but should: ")
- printDiff( didFind, shouldFind )
- print( "Found but should not have: ")
- printDiff( shouldFind, didFind )
- */
-
- assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1 + inPoints,
- "Incorrect random center points found!\n" + tojson( randoms ) )
-
- print("Found " + inPoints + " points in and " + outPoints + " points out.");
-
- var found = t.find( { p : { $near : [0, 0], $maxDistance : radius } } ).toArray()
- var dist = 0;
- for( var f = 0; f < found.length; f++ ){
- var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0]
- var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1]
- print( "Dist: x : " + x + " y : " + y + " dist : " + Math.sqrt( x * x + y * y) + " radius : " + radius )
- }
-
- assert.eq( t.count( { p : { $near : [0, 0], $maxDistance : radius } } ), 1 + inPoints,
- "Incorrect random center points found near!\n" + tojson( randoms ) )
-
-}
-
diff --git a/jstests/geo_sort1.js b/jstests/geo_sort1.js
deleted file mode 100644
index 67de80e65c7..00000000000
--- a/jstests/geo_sort1.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-t = db.geo_sort1
-t.drop();
-
-for ( x=0; x<10; x++ ){
- for ( y=0; y<10; y++ ){
- t.insert( { loc : [ x , y ] , foo : x * x * y } );
- }
-}
-
-t.ensureIndex( { loc : "2d" , foo : 1 } )
-
-q = t.find( { loc : { $near : [ 5 , 5 ] } , foo : { $gt : 20 } } )
-m = function(z){ return z.foo; }
-
-a = q.clone().map( m );
-b = q.clone().sort( { foo : 1 } ).map( m );
-
-assert.neq( a , b , "A" );
-a.sort();
-b.sort();
-assert.eq( a , b , "B" );
diff --git a/jstests/geo_uniqueDocs.js b/jstests/geo_uniqueDocs.js
deleted file mode 100644
index 61f1a40522d..00000000000
--- a/jstests/geo_uniqueDocs.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// Test uniqueDocs option for $within and geoNear queries SERVER-3139
-// SERVER-12120 uniqueDocs is deprecated. Server always returns unique documents.
-
-collName = 'geo_uniqueDocs_test'
-t = db.geo_uniqueDocs_test
-t.drop()
-
-t.save( { locs : [ [0,2], [3,4]] } )
-t.save( { locs : [ [6,8], [10,10] ] } )
-
-t.ensureIndex( { locs : '2d' } )
-
-// geoNear tests
-// uniqueDocs option is ignored.
-assert.eq(2, db.runCommand({geoNear:collName, near:[0,0]}).results.length)
-assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:false}).results.length)
-assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:true}).results.length)
-results = db.runCommand({geoNear:collName, near:[0,0], num:2}).results
-assert.eq(2, results.length)
-assert.close(2, results[0].dis)
-assert.close(10, results[1].dis)
-results = db.runCommand({geoNear:collName, near:[0,0], num:2, uniqueDocs:true}).results
-assert.eq(2, results.length)
-assert.close(2, results[0].dis)
-assert.close(10, results[1].dis)
-
-// $within tests
-
-assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]]}}}).itcount())
-assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : true}}}).itcount())
-assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : false}}}).itcount())
-
-assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : true}}}).itcount())
-assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : false}}}).itcount())
-
-assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : true}}}).itcount())
-assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : false}}}).itcount())
-
-assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : true}}}).itcount())
-assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : false}}}).itcount())
diff --git a/jstests/geo_uniqueDocs2.js b/jstests/geo_uniqueDocs2.js
deleted file mode 100644
index f9b95113f78..00000000000
--- a/jstests/geo_uniqueDocs2.js
+++ /dev/null
@@ -1,80 +0,0 @@
-// Additional checks for geo uniqueDocs and includeLocs SERVER-3139.
-// SERVER-12120 uniqueDocs is deprecated.
-// Server always returns results with implied uniqueDocs=true
-
-collName = 'jstests_geo_uniqueDocs2';
-t = db[collName];
-t.drop();
-
-t.save( {loc:[[20,30],[40,50]]} );
-t.ensureIndex( {loc:'2d'} );
-
-// Check exact matches of different locations.
-assert.eq( 1, t.count( { loc : [20,30] } ) );
-assert.eq( 1, t.count( { loc : [40,50] } ) );
-
-// Check behavior for $near, where $uniqueDocs mode is unavailable.
-assert.eq( [t.findOne()], t.find( { loc: { $near: [50,50] } } ).toArray() );
-
-// Check correct number of matches for $within / $uniqueDocs.
-// uniqueDocs ignored - does not affect results.
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40] } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : true } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ) );
-
-// For $within / $uniqueDocs, limit applies to docs.
-assert.eq( 1, t.find( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ).limit(1).itcount() );
-
-// Now check a circle only containing one of the locs.
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10] } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : true } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : false } } } ) );
-
-// Check number and character of results with geoNear / uniqueDocs / includeLocs.
-notUniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : false } );
-uniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : false } );
-notUniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } );
-uniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : true } );
-
-// Check that only unique docs are returned.
-assert.eq( 1, notUniqueNotInclude.results.length );
-assert.eq( 1, uniqueNotInclude.results.length );
-assert.eq( 1, notUniqueInclude.results.length );
-assert.eq( 1, uniqueInclude.results.length );
-
-// Check that locs are included.
-assert( !notUniqueNotInclude.results[0].loc );
-assert( !uniqueNotInclude.results[0].loc );
-assert( notUniqueInclude.results[0].loc );
-assert( uniqueInclude.results[0].loc );
-
-// For geoNear / uniqueDocs, 'num' limit seems to apply to locs.
-assert.eq( 1, db.runCommand( { geoNear : collName , near : [50,50], num : 1, uniqueDocs : false, includeLocs : false } ).results.length );
-
-// Check locs returned in includeLocs mode.
-t.remove({});
-objLocs = [{x:20,y:30,z:['loc1','loca']},{x:40,y:50,z:['loc2','locb']}];
-t.save( {loc:objLocs} );
-results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results;
-assert.contains( results[0].loc, objLocs );
-
-// Check locs returned in includeLocs mode, where locs are arrays.
-t.remove({});
-arrLocs = [[20,30],[40,50]];
-t.save( {loc:arrLocs} );
-results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results;
-// The original loc arrays are returned as objects.
-expectedLocs = arrLocs
-
-assert.contains( results[0].loc, expectedLocs );
-
-// Test a large number of locations in the array.
-t.drop();
-arr = [];
-for( i = 0; i < 10000; ++i ) {
- arr.push( [10,10] );
-}
-arr.push( [100,100] );
-t.save( {loc:arr} );
-t.ensureIndex( {loc:'2d'} );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[99, 99], 5] } } } ) );
diff --git a/jstests/geo_update.js b/jstests/geo_update.js
deleted file mode 100644
index dd4b28c8374..00000000000
--- a/jstests/geo_update.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// Tests geo queries w/ update & upsert
-// from SERVER-3428
-
-var coll = db.testGeoUpdate
-coll.drop()
-
-coll.ensureIndex({ loc : "2d" })
-
-// Test normal update
-print( "Updating..." )
-
-coll.insert({ loc : [1.0, 2.0] })
-
-coll.update({ loc : { $near : [1.0, 2.0] } },
- { x : true, loc : [1.0, 2.0] })
-
-// Test upsert
-print( "Upserting..." )
-
-coll.update({ loc : { $within : { $center : [[10, 20], 1] } } },
- { x : true },
- true)
-
-coll.update({ loc : { $near : [10.0, 20.0], $maxDistance : 1 } },
- { x : true },
- true)
-
-
-coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } },
- { $set : { loc : [100, 100] }, $push : { people : "chris" } },
- true)
-
-coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } },
- { $set : { loc : [100, 100] }, $push : { people : "john" } },
- true)
-
-assert.eq( 4, coll.find().itcount() )
diff --git a/jstests/geo_update1.js b/jstests/geo_update1.js
deleted file mode 100644
index 68a8de668b3..00000000000
--- a/jstests/geo_update1.js
+++ /dev/null
@@ -1,38 +0,0 @@
-
-t = db.geo_update1
-t.drop()
-
-for(var x = 0; x < 10; x++ ) {
- for(var y = 0; y < 10; y++ ) {
- t.insert({"loc": [x, y] , x : x , y : y , z : 1 });
- }
-}
-
-t.ensureIndex( { loc : "2d" } )
-
-function p(){
- print( "--------------" );
- for ( var y=0; y<10; y++ ){
- var c = t.find( { y : y } ).sort( { x : 1 } )
- var s = "";
- while ( c.hasNext() )
- s += c.next().z + " ";
- print( s )
- }
- print( "--------------" );
-}
-
-p()
-
-t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true);
-assert.isnull( db.getLastError() , "B1" )
-p()
-
-t.update({}, {'$inc' : { 'z' : 1}}, false, true);
-assert.isnull( db.getLastError() , "B2" )
-p()
-
-
-t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true);
-assert.isnull( db.getLastError() , "B3" )
-p()
diff --git a/jstests/geo_update2.js b/jstests/geo_update2.js
deleted file mode 100644
index 2308b2c7899..00000000000
--- a/jstests/geo_update2.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-t = db.geo_update2
-t.drop()
-
-for(var x = 0; x < 10; x++ ) {
- for(var y = 0; y < 10; y++ ) {
- t.insert({"loc": [x, y] , x : x , y : y });
- }
-}
-
-t.ensureIndex( { loc : "2d" } )
-
-function p(){
- print( "--------------" );
- for ( var y=0; y<10; y++ ){
- var c = t.find( { y : y } ).sort( { x : 1 } )
- var s = "";
- while ( c.hasNext() )
- s += c.next().z + " ";
- print( s )
- }
- print( "--------------" );
-}
-
-p()
-
-
-t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true);
-assert.isnull( db.getLastError() , "B1" )
-p()
-
-t.update({}, {'$inc' : { 'z' : 1}}, false, true);
-assert.isnull( db.getLastError() , "B2" )
-p()
-
-
-t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true);
-assert.isnull( db.getLastError() , "B3" )
-p()
-
diff --git a/jstests/geo_update_btree.js b/jstests/geo_update_btree.js
deleted file mode 100644
index 38d9692faeb..00000000000
--- a/jstests/geo_update_btree.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Tests whether the geospatial search is stable under btree updates
-
-var coll = db.getCollection( "jstests_geo_update_btree" )
-coll.drop()
-
-coll.ensureIndex( { loc : '2d' } )
-
-for ( i = 0; i < 10000; i++ ) {
- coll.insert( { loc : [ Random.rand() * 180, Random.rand() * 180 ], v : '' } );
-}
-
-var big = new Array( 3000 ).toString()
-
-for ( i = 0; i < 1000; i++ ) {
- coll.update(
- { loc : { $within : { $center : [ [ Random.rand() * 180, Random.rand() * 180 ], Random.rand() * 50 ] } } },
- { $set : { v : big } }, false, true )
-
- if (testingReplication)
- db.getLastError(2);
- else
- db.getLastError();
-
- if( i % 10 == 0 ) print( i );
-}
diff --git a/jstests/geo_update_btree2.js b/jstests/geo_update_btree2.js
deleted file mode 100644
index d99970c73e0..00000000000
--- a/jstests/geo_update_btree2.js
+++ /dev/null
@@ -1,71 +0,0 @@
-// Tests whether the geospatial search is stable under btree updates
-//
-// Tests the implementation of the 2d search, not the behavior we promise. MongoDB currently
-// promises no isolation, so there is no guarantee that we get the results we expect in this file.
-
-// The old query system, if it saw a 2d query, would never consider a collscan.
-//
-// The new query system can answer the queries in this file with a collscan and ranks
-// the collscan against the indexed result.
-//
-// In order to expose the specific NON GUARANTEED isolation behavior this file tests
-// we disable table scans to ensure that the new query system only looks at the 2d
-// scan.
-assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) );
-
-var status = function( msg ){
- print( "\n\n###\n" + msg + "\n###\n\n" )
-}
-
-var coll = db.getCollection( "jstests_geo_update_btree2" )
-coll.drop()
-
-coll.ensureIndex( { loc : '2d' } )
-
-status( "Inserting points..." )
-
-var numPoints = 10
-for ( i = 0; i < numPoints; i++ ) {
- coll.insert( { _id : i, loc : [ Random.rand() * 180, Random.rand() * 180 ], i : i % 2 } );
-}
-
-status( "Starting long query..." )
-
-var query = coll.find({ loc : { $within : { $box : [[-180, -180], [180, 180]] } } }).batchSize( 2 )
-var firstValues = [ query.next()._id, query.next()._id ]
-printjson( firstValues )
-
-status( "Removing points not returned by query..." )
-
-var allQuery = coll.find()
-var removeIds = []
-while( allQuery.hasNext() ){
- var id = allQuery.next()._id
- if( firstValues.indexOf( id ) < 0 ){
- removeIds.push( id )
- }
-}
-
-var updateIds = []
-for( var i = 0, max = removeIds.length / 2; i < max; i++ ) updateIds.push( removeIds.pop() )
-
-printjson( removeIds )
-coll.remove({ _id : { $in : removeIds } })
-
-status( "Updating points returned by query..." )
-printjson(updateIds);
-
-var big = new Array( 3000 ).toString()
-for( var i = 0; i < updateIds.length; i++ )
- coll.update({ _id : updateIds[i] }, { $set : { data : big } })
-
-status( "Counting final points..." )
-
-// It's not defined whether or not we return documents that are modified during a query. We
-// shouldn't crash, but it's not defined how many results we get back. This test is modifying every
-// doc not returned by the query, and since we currently handle the invalidation by removing them,
-// we won't return them. But we shouldn't crash.
-// assert.eq( ( numPoints - 2 ) / 2, query.itcount() )
-query.itcount();
-
-assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false} ) );
diff --git a/jstests/geo_update_dedup.js b/jstests/geo_update_dedup.js
deleted file mode 100644
index 8ec08b82ea0..00000000000
--- a/jstests/geo_update_dedup.js
+++ /dev/null
@@ -1,60 +0,0 @@
-// Test that updates with geo queries which match
-// the same document multiple times only apply
-// the update once
-
-var t = db.jstests_geo_update_dedup;;
-
-// 2d index with $near
-t.drop();
-t.ensureIndex({locs: "2d"});
-t.save({locs: [[49.999,49.999], [50.0,50.0], [50.001,50.001]]});
-
-var q = {locs: {$near: [50.0, 50.0]}};
-assert.eq(1, t.find(q).itcount(), 'duplicates returned from query');
-
-t.update({locs: {$near: [50.0, 50.0]}}, {$inc: {touchCount: 1}}, false, true);
-assert.eq(1, db.getLastErrorObj().n);
-assert.eq(1, t.findOne().touchCount);
-
-t.drop();
-t.ensureIndex({locs: "2d"});
-t.save({locs: [{x:49.999,y:49.999}, {x:50.0,y:50.0}, {x:50.001,y:50.001}]});
-t.update({locs: {$near: {x:50.0, y:50.0}}}, {$inc: {touchCount: 1}});
-assert.eq(1, db.getLastErrorObj().n);
-assert.eq(1, t.findOne().touchCount);
-
-// 2d index with $within
-t.drop();
-t.ensureIndex({loc: "2d"});
-t.save({loc: [[0, 0], [1, 1]]});
-
-t.update({loc: {$within: {$center: [[0, 0], 2]}}}, {$inc: {touchCount: 1}}, false, true);
-assert.eq(1, db.getLastErrorObj().n);
-assert.eq(1, t.findOne().touchCount);
-
-// 2dsphere index with $geoNear
-t.drop();
-t.ensureIndex({geo: "2dsphere"});
-var x = { "type" : "Polygon",
- "coordinates" : [[[49.999,49.999], [50.0,50.0], [50.001,50.001], [49.999,49.999]]]}
-t.save({geo: x})
-
-t.update({geo: {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}},
- {$inc: {touchCount: 1}}, false, true);
-assert.eq(1, db.getLastErrorObj().n);
-assert.eq(1, t.findOne().touchCount);
-
-t.drop();
-var locdata = [
- {geo: {type: "Point", coordinates: [49.999,49.999]}},
- {geo: {type: "Point", coordinates: [50.000,50.000]}},
- {geo: {type: "Point", coordinates: [50.001,50.001]}}
-];
-t.save({locdata: locdata, count: 0})
-t.ensureIndex({"locdata.geo": "2dsphere"});
-
-t.update({"locdata.geo": {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}},
- {$inc: {touchCount: 1}}, false, true);
-assert.eq(1, db.getLastErrorObj().n);
-assert.eq(1, t.findOne().touchCount);
-
diff --git a/jstests/geo_withinquery.js b/jstests/geo_withinquery.js
deleted file mode 100644
index 11701d34c62..00000000000
--- a/jstests/geo_withinquery.js
+++ /dev/null
@@ -1,15 +0,0 @@
-// SERVER-7343: allow $within without a geo index.
-t = db.geo_withinquery;
-t.drop();
-
-num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] }
- t.save( o )
- }
-}
-
-assert.eq(21 * 21 - 1, t.find({ $and: [ {loc: {$ne:[0,0]}},
- {loc: {$within: {$box: [[0,0], [100,100]]}}},
- ]}).itcount(), "UHOH!")
diff --git a/jstests/geoa.js b/jstests/geoa.js
deleted file mode 100644
index 3081f6c5c2e..00000000000
--- a/jstests/geoa.js
+++ /dev/null
@@ -1,12 +0,0 @@
-
-t = db.geoa
-t.drop();
-
-t.save( { _id : 1 , a : { loc : [ 5 , 5 ] } } )
-t.save( { _id : 2 , a : { loc : [ 6 , 6 ] } } )
-t.save( { _id : 3 , a : { loc : [ 7 , 7 ] } } )
-
-t.ensureIndex( { "a.loc" : "2d" } );
-
-cur = t.find( { "a.loc" : { $near : [ 6 , 6 ] } } );
-assert.eq( 2 , cur.next()._id , "A1" );
diff --git a/jstests/geob.js b/jstests/geob.js
deleted file mode 100644
index 0dcc2658ba2..00000000000
--- a/jstests/geob.js
+++ /dev/null
@@ -1,35 +0,0 @@
-var t = db.geob;
-t.drop();
-
-var a = {p: [0, 0]};
-var b = {p: [1, 0]};
-var c = {p: [3, 4]};
-var d = {p: [0, 6]};
-
-t.save(a);
-t.save(b);
-t.save(c);
-t.save(d);
-t.ensureIndex({p: "2d"});
-
-var res = t.runCommand("geoNear", {near: [0,0]});
-assert.close(3, res.stats.avgDistance, "A");
-
-assert.close(0, res.results[0].dis, "B1");
-assert.eq(a._id, res.results[0].obj._id, "B2");
-
-assert.close(1, res.results[1].dis, "C1");
-assert.eq(b._id, res.results[1].obj._id, "C2");
-
-assert.close(5, res.results[2].dis, "D1");
-assert.eq(c._id, res.results[2].obj._id, "D2");
-
-assert.close(6, res.results[3].dis, "E1");
-assert.eq(d._id, res.results[3].obj._id, "E2");
-
-res = t.runCommand("geoNear", {near: [0,0], distanceMultiplier: 2});
-assert.close(6, res.stats.avgDistance, "F");
-assert.close(0, res.results[0].dis, "G");
-assert.close(2, res.results[1].dis, "H");
-assert.close(10, res.results[2].dis, "I");
-assert.close(12, res.results[3].dis, "J");
diff --git a/jstests/geoc.js b/jstests/geoc.js
deleted file mode 100644
index 8b0178095e8..00000000000
--- a/jstests/geoc.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-t = db.geoc;
-t.drop()
-
-N = 1000;
-
-for (var i=0; i<N; i++) t.insert({loc:[100+Math.random(), 100+Math.random()], z:0})
-for (var i=0; i<N; i++) t.insert({loc:[0+Math.random(), 0+Math.random()], z:1})
-for (var i=0; i<N; i++) t.insert({loc:[-100+Math.random(), -100+Math.random()], z:2})
-
-t.ensureIndex({loc:'2d'})
-
-function test( z , l ){
- assert.lt( 0 , t.find({loc:{$near:[100,100]}, z:z}).limit(l).itcount() , "z: " + z + " l: " + l );
-}
-
-test( 1 , 1 );
-test( 1 , 2 );
-test( 2 , 2 );
-test( 2 , 10 );
-test( 2 , 1000 );
-test( 2 , 100000 );
-test( 2 , 10000000 );
-
diff --git a/jstests/geod.js b/jstests/geod.js
deleted file mode 100644
index 6e458454a71..00000000000
--- a/jstests/geod.js
+++ /dev/null
@@ -1,14 +0,0 @@
-var t=db.geod;
-t.drop()
-t.save( { loc: [0,0] } )
-t.save( { loc: [0.5,0] } )
-t.ensureIndex({loc:"2d"})
-// do a few geoNears with different maxDistances. The first iteration
-// should match no points in the dataset.
-dists = [.49, .51, 1.0]
-for (idx in dists){
- b=db.runCommand({geoNear:"geod", near:[1,0], num:2, maxDistance:dists[idx]});
- assert.eq(b.errmsg, undefined, "A"+idx);
- l=b.results.length
- assert.eq(l, idx, "B"+idx)
-}
diff --git a/jstests/geoe.js b/jstests/geoe.js
deleted file mode 100644
index 22feb83ab1e..00000000000
--- a/jstests/geoe.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Was reported as SERVER-1283.
-// The problem seems to be that sometimes the index btrees are such that
-// the first search for a matching point in the geo code could run to
-// the end of the btree and not reverse direction (leaving the rest of
-// the search always looking at some random non-matching point).
-
-t=db.geo_box;
-t.drop();
-
-t.insert({"_id": 1, "geo" : [ 33, -11.1 ] });
-t.insert({"_id": 2, "geo" : [ -122, 33.3 ] });
-t.insert({"_id": 3, "geo" : [ -122, 33.4 ] });
-t.insert({"_id": 4, "geo" : [ -122.28, 37.67 ] });
-t.insert({"_id": 5, "geo" : [ -122.29, 37.68 ] });
-t.insert({"_id": 6, "geo" : [ -122.29, 37.67 ] });
-t.insert({"_id": 7, "geo" : [ -122.29, 37.67 ] });
-t.insert({"_id": 8, "geo" : [ -122.29, 37.68 ] });
-t.insert({"_id": 9, "geo" : [ -122.29, 37.68 ] });
-t.insert({"_id": 10, "geo" : [ -122.3, 37.67 ] });
-t.insert({"_id": 11, "geo" : [ -122.31, 37.67 ] });
-t.insert({"_id": 12, "geo" : [ -122.3, 37.66 ] });
-t.insert({"_id": 13, "geo" : [ -122.2435, 37.637072 ] });
-t.insert({"_id": 14, "geo" : [ -122.289505, 37.695774 ] });
-
-
-t.ensureIndex({ geo : "2d" });
-
-c=t.find({geo: {"$within": {"$box": [[-125.078461,36.494473], [-120.320648,38.905199]]} } });
-assert.eq(11, c.count(), "A1");
-
-c=t.find({geo: {"$within": {"$box": [[-124.078461,36.494473], [-120.320648,38.905199]]} } });
-assert.eq(11, c.count(), "B1");
diff --git a/jstests/geof.js b/jstests/geof.js
deleted file mode 100644
index 786ead6a94a..00000000000
--- a/jstests/geof.js
+++ /dev/null
@@ -1,19 +0,0 @@
-t = db.geof
-t.drop();
-
-// corners (dist ~0.98)
-t.insert({loc: [ 0.7, 0.7]})
-t.insert({loc: [ 0.7, -0.7]})
-t.insert({loc: [-0.7, 0.7]})
-t.insert({loc: [-0.7, -0.7]})
-
-// on x axis (dist == 0.9)
-t.insert({loc: [-0.9, 0]})
-t.insert({loc: [-0.9, 0]})
-
-t.ensureIndex( { loc : "2d" } )
-
-t.find({loc: {$near: [0,0]}}).limit(2).forEach( function(o){
- //printjson(o);
- assert.lt(Geo.distance([0,0], o.loc), 0.95);
-});
diff --git a/jstests/geonear_cmd_input_validation.js b/jstests/geonear_cmd_input_validation.js
deleted file mode 100644
index 2a44391183b..00000000000
--- a/jstests/geonear_cmd_input_validation.js
+++ /dev/null
@@ -1,119 +0,0 @@
-//
-// Test input validation for geoNear command.
-//
-var t = db.geonear_cmd_input_validation;
-t.drop();
-t.ensureIndex({loc: "2dsphere"});
-
-// The test matrix. Some combinations are not supported:
-// 2d index and minDistance.
-// 2d index and GeoJSON
-// 2dsphere index and spherical=false
-var indexTypes = ['2d', '2dsphere'],
- pointTypes = [
- {type: 'Point', coordinates: [0, 0]},
- [0, 0]],
- sphericalOptions = [true, false],
- optionNames = ['minDistance', 'maxDistance'],
- badNumbers = [-1, undefined, 'foo'];
-
-indexTypes.forEach(function(indexType) {
- t.drop();
- t.createIndex({'loc': indexType});
-
- pointTypes.forEach(function(pointType) {
- sphericalOptions.forEach(function(spherical) {
- optionNames.forEach(function(optionName) {
- var isLegacy = Array.isArray(pointType),
- pointDescription = (isLegacy ? "legacy coordinates" : "GeoJSON point");
-
- function makeCommand(distance) {
- var command = {
- geoNear: t.getName(),
- near: pointType,
- spherical: spherical
- };
- command[optionName] = distance;
- return command;
- }
-
- // Unsupported combinations should return errors.
- if (
- (indexType == '2d' && optionName == 'minDistance') ||
- (indexType == '2d' && !isLegacy) ||
- (indexType == '2dsphere' && !spherical)
- ) {
- assert.commandFailed(
- db.runCommand(makeCommand(1)),
- "geoNear with spherical=" + spherical + " and " + indexType
- + " index and " + pointDescription
- + " should've failed."
- );
-
- // Stop processing this combination in the test matrix.
- return;
- }
-
- // This is a supported combination. No error.
- assert.commandWorked(db.runCommand({
- geoNear: t.getName(),
- near: pointType,
- spherical: spherical
- }));
-
- // No error with min/maxDistance 1.
- db.runCommand(makeCommand(1));
-
- var outOfRangeDistances = [];
- if (indexType == '2d') {
- // maxDistance unlimited; no error.
- db.runCommand(makeCommand(1e10));
- }
-
- // Try several bad values for min/maxDistance.
- badNumbers.concat(outOfRangeDistances).forEach(function(badDistance) {
-
- var msg = (
- "geoNear with spherical=" + spherical + " and "
- + pointDescription + " and " + indexType
- + " index should've failed with "
- + optionName + " " + badDistance);
-
- assert.commandFailed(
- db.runCommand(makeCommand(badDistance)),
- msg);
- });
-
- // Bad values for limit / num.
- ['num', 'limit'].forEach(function(limitOptionName) {
- [-1, 'foo'].forEach(function(badLimit) {
-
- var msg = (
- "geoNear with spherical=" + spherical + " and "
- + pointDescription + " and " + indexType
- + " index should've failed with '"
- + limitOptionName + "' " + badLimit);
-
- var command = makeCommand(1);
- command[limitOptionName] = badLimit;
- assert.commandFailed(db.runCommand(command), msg);
- });
- });
-
- // Bad values for distanceMultiplier.
- badNumbers.forEach(function(badNumber) {
-
- var msg = (
- "geoNear with spherical=" + spherical + " and "
- + pointDescription + " and " + indexType
- + " index should've failed with distanceMultiplier "
- + badNumber);
-
- var command = makeCommand(1);
- command['distanceMultiplier'] = badNumber;
- assert.commandFailed(db.runCommand(command), msg);
- });
- });
- });
- });
-});
diff --git a/jstests/geonear_validate.js b/jstests/geonear_validate.js
deleted file mode 100644
index 49d4c1ade15..00000000000
--- a/jstests/geonear_validate.js
+++ /dev/null
@@ -1,8 +0,0 @@
-// Test to make sure that geoNear validates numWanted
-t = db.geonear_validate
-t.drop();
-t.ensureIndex({ geo : "2dsphere" })
-origin = { "type" : "Point", "coordinates": [ 0, 0] }
-t.insert({geo: origin})
-res = db.runCommand({geoNear: t.getName(), near: [0,0], spherical: true, num: -1});
-assert.eq(0, res.ok);
diff --git a/jstests/getlog1.js b/jstests/getlog1.js
deleted file mode 100644
index 75fbeabddf2..00000000000
--- a/jstests/getlog1.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// to run:
-// ./mongo jstests/<this-file>
-
-contains = function(arr,obj) {
- var i = arr.length;
- while (i--) {
- if (arr[i] === obj) {
- return true;
- }
- }
- return false;
-}
-
-var resp = db.adminCommand({getLog:"*"})
-assert( resp.ok == 1, "error executing getLog command" );
-assert( resp.names, "no names field" );
-assert( resp.names.length > 0, "names array is empty" );
-assert( contains(resp.names,"global") , "missing global category" );
-assert( !contains(resp.names,"butty") , "missing butty category" );
-
-resp = db.adminCommand({getLog:"global"})
-assert( resp.ok == 1, "error executing getLog command" );
-assert( resp.log, "no log field" );
-assert( resp.log.length > 0 , "no log lines" );
diff --git a/jstests/getlog2.js b/jstests/getlog2.js
deleted file mode 100644
index 2712f96fc3e..00000000000
--- a/jstests/getlog2.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// tests getlog as well as slow querying logging
-
-glcol = db.getLogTest2;
-glcol.drop()
-
-contains = function(arr, func) {
- var i = arr.length;
- while (i--) {
- if (func(arr[i])) {
- return true;
- }
- }
- return false;
-}
-
-// test doesn't work when talking to mongos
-if(db.isMaster().msg != "isdbgrid") {
- // run a slow query
- glcol.save({ "SENTINEL": 1 });
- glcol.findOne({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } });
-
- // run a slow update
- glcol.update({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }, { "x": "x" });
-
- var resp = db.adminCommand({getLog:"global"});
- assert( resp.ok == 1, "error executing getLog command" );
- assert( resp.log, "no log field" );
- assert( resp.log.length > 0 , "no log lines" );
-
- // ensure that slow query is logged in detail
- assert( contains(resp.log, function(v) {
- print(v);
- return v.indexOf(" query ") != -1 && v.indexOf("query:") != -1 &&
- v.indexOf("nscanned:") != -1 &&
- v.indexOf("nscannedObjects:") != -1 &&
- v.indexOf("SENTINEL") != -1;
- }) );
-
- // same, but for update
- assert( contains(resp.log, function(v) {
- print(v);
- return v.indexOf(" update ") != -1 && v.indexOf("query:") != -1 &&
- v.indexOf("nscanned:") != -1 &&
- v.indexOf("nscannedObjects:") != -1 &&
- v.indexOf("SENTINEL") != -1;
- }) );
-}
diff --git a/jstests/group1.js b/jstests/group1.js
deleted file mode 100644
index c4147c0d89a..00000000000
--- a/jstests/group1.js
+++ /dev/null
@@ -1,64 +0,0 @@
-t = db.group1;
-t.drop();
-
-t.save( { n : 1 , a : 1 } );
-t.save( { n : 2 , a : 1 } );
-t.save( { n : 3 , a : 2 } );
-t.save( { n : 4 , a : 2 } );
-t.save( { n : 5 , a : 2 } );
-
-var p = { key : { a : true } ,
- reduce : function(obj,prev) { prev.count++; },
- initial: { count: 0 }
- };
-
-res = t.group( p );
-
-assert( res.length == 2 , "A" );
-assert( res[0].a == 1 , "B" );
-assert( res[0].count == 2 , "C" );
-assert( res[1].a == 2 , "D" );
-assert( res[1].count == 3 , "E" );
-
-assert.eq( res , t.groupcmd( p ) , "ZZ" );
-
-ret = t.groupcmd( { key : {} , reduce : p.reduce , initial : p.initial } );
-assert.eq( 1 , ret.length , "ZZ 2" );
-assert.eq( 5 , ret[0].count , "ZZ 3" );
-
-ret = t.groupcmd( { key : {} , reduce : function(obj,prev){ prev.sum += obj.n } , initial : { sum : 0 } } );
-assert.eq( 1 , ret.length , "ZZ 4" );
-assert.eq( 15 , ret[0].sum , "ZZ 5" );
-
-t.drop();
-
-t.save( { "a" : 2 } );
-t.save( { "b" : 5 } );
-t.save( { "a" : 1 } );
-t.save( { "a" : 2 } );
-
-c = {key: {a:1}, cond: {}, initial: {"count": 0}, reduce: function(obj, prev) { prev.count++; } };
-
-assert.eq( t.group( c ) , t.groupcmd( c ) , "ZZZZ" );
-
-
-t.drop();
-
-t.save( { name : { first : "a" , last : "A" } } );
-t.save( { name : { first : "b" , last : "B" } } );
-t.save( { name : { first : "a" , last : "A" } } );
-
-
-p = { key : { 'name.first' : true } ,
- reduce : function(obj,prev) { prev.count++; },
- initial: { count: 0 }
- };
-
-res = t.group( p );
-assert.eq( 2 , res.length , "Z1" );
-assert.eq( "a" , res[0]['name.first'] , "Z2" )
-assert.eq( "b" , res[1]['name.first'] , "Z3" )
-assert.eq( 2 , res[0].count , "Z4" )
-assert.eq( 1 , res[1].count , "Z5" )
-
-
diff --git a/jstests/group2.js b/jstests/group2.js
deleted file mode 100644
index a8e6653470a..00000000000
--- a/jstests/group2.js
+++ /dev/null
@@ -1,38 +0,0 @@
-t = db.group2;
-t.drop();
-
-t.save({a: 2});
-t.save({b: 5});
-t.save({a: 1});
-
-cmd = { key: {a: 1},
- initial: {count: 0},
- reduce: function(obj, prev) {
- prev.count++;
- }
- };
-
-result = t.group(cmd);
-
-assert.eq(3, result.length, "A");
-assert.eq(null, result[1].a, "C");
-assert("a" in result[1], "D");
-assert.eq(1, result[2].a, "E");
-
-assert.eq(1, result[0].count, "F");
-assert.eq(1, result[1].count, "G");
-assert.eq(1, result[2].count, "H");
-
-
-delete cmd.key
-cmd["$keyf"] = function(x){ return { a : x.a }; };
-result2 = t.group( cmd );
-
-assert.eq( result , result2, "check result2" );
-
-
-delete cmd.$keyf
-cmd["keyf"] = function(x){ return { a : x.a }; };
-result3 = t.group( cmd );
-
-assert.eq( result , result3, "check result3" );
diff --git a/jstests/group3.js b/jstests/group3.js
deleted file mode 100644
index d113b9d570f..00000000000
--- a/jstests/group3.js
+++ /dev/null
@@ -1,43 +0,0 @@
-t = db.group3;
-t.drop();
-
-t.save({a: 1});
-t.save({a: 2});
-t.save({a: 3});
-t.save({a: 4});
-
-
-cmd = { initial: {count: 0, sum: 0},
- reduce: function(obj, prev) {
- prev.count++;
- prev.sum += obj.a;
- },
- finalize: function(obj) {
- if (obj.count){
- obj.avg = obj.sum / obj.count;
- }else{
- obj.avg = 0;
- }
- },
- };
-
-result1 = t.group(cmd);
-
-assert.eq(1, result1.length, "test1");
-assert.eq(10, result1[0].sum, "test1");
-assert.eq(4, result1[0].count, "test1");
-assert.eq(2.5, result1[0].avg, "test1");
-
-
-cmd['finalize'] = function(obj) {
- if (obj.count){
- return obj.sum / obj.count;
- }else{
- return 0;
- }
-};
-
-result2 = t.group(cmd);
-
-assert.eq(1, result2.length, "test2");
-assert.eq(2.5, result2[0], "test2");
diff --git a/jstests/group4.js b/jstests/group4.js
deleted file mode 100644
index e75c0d1ae2c..00000000000
--- a/jstests/group4.js
+++ /dev/null
@@ -1,45 +0,0 @@
-
-t = db.group4
-t.drop();
-
-function test( c , n ){
- var x = {};
- c.forEach(
- function(z){
- assert.eq( z.count , z.values.length , n + "\t" + tojson( z ) );
- }
- );
-}
-
-t.insert({name:'bob',foo:1})
-t.insert({name:'bob',foo:2})
-t.insert({name:'alice',foo:1})
-t.insert({name:'alice',foo:3})
-t.insert({name:'fred',foo:3})
-t.insert({name:'fred',foo:4})
-
-x = t.group(
- {
- key: {foo:1},
- initial: {count:0,values:[]},
- reduce: function (obj, prev){
- prev.count++
- prev.values.push(obj.name)
- }
- }
-);
-test( x , "A" );
-
-x = t.group(
- {
- key: {foo:1},
- initial: {count:0},
- reduce: function (obj, prev){
- if (!prev.values) {prev.values = [];}
- prev.count++;
- prev.values.push(obj.name);
- }
- }
-);
-test( x , "B" );
-
diff --git a/jstests/group5.js b/jstests/group5.js
deleted file mode 100644
index 3534fe5f030..00000000000
--- a/jstests/group5.js
+++ /dev/null
@@ -1,38 +0,0 @@
-
-t = db.group5;
-t.drop();
-
-// each group has groupnum+1 5 users
-for ( var group=0; group<10; group++ ){
- for ( var i=0; i<5+group; i++ ){
- t.save( { group : "group" + group , user : i } )
- }
-}
-
-function c( group ){
- return t.group(
- {
- key : { group : 1 } ,
- q : { group : "group" + group } ,
- initial : { users : {} },
- reduce : function(obj,prev){
- prev.users[obj.user] = true; // add this user to the hash
- },
- finalize : function(x){
- var count = 0;
- for (var key in x.users){
- count++;
- }
-
- //replace user obj with count
- //count add new field and keep users
- x.users = count;
- return x;
- }
- })[0]; // returns array
-}
-
-assert.eq( "group0" , c(0).group , "g0" );
-assert.eq( 5 , c(0).users , "g0 a" );
-assert.eq( "group5" , c(5).group , "g5" );
-assert.eq( 10 , c(5).users , "g5 a" );
diff --git a/jstests/group6.js b/jstests/group6.js
deleted file mode 100644
index b77a37a5d11..00000000000
--- a/jstests/group6.js
+++ /dev/null
@@ -1,32 +0,0 @@
-t = db.jstests_group6;
-t.drop();
-
-for( i = 1; i <= 10; ++i ) {
- t.save( {i:new NumberLong( i ),y:1} );
-}
-
-assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
-
-t.drop();
-for( i = 1; i <= 10; ++i ) {
- if ( i % 2 == 0 ) {
- t.save( {i:new NumberLong( i ),y:1} );
- } else {
- t.save( {i:i,y:1} );
- }
-}
-
-assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
-
-t.drop();
-for( i = 1; i <= 10; ++i ) {
- if ( i % 2 == 1 ) {
- t.save( {i:new NumberLong( i ),y:1} );
- } else {
- t.save( {i:i,y:1} );
- }
-}
-
-assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
-
-assert.eq.automsg( "NumberLong(10)", "t.group( {$reduce: function(doc, prev) { prev.count += 1; }, initial: {count: new NumberLong(0) }} )[ 0 ].count" ); \ No newline at end of file
diff --git a/jstests/group7.js b/jstests/group7.js
deleted file mode 100644
index f18a84055f4..00000000000
--- a/jstests/group7.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// Test yielding group command SERVER-1395
-
-t = db.jstests_group7;
-t.drop();
-
-function checkForYield( docs, updates ) {
- t.drop();
- a = 0;
- for( var i = 0; i < docs; ++i ) {
- t.save( {a:a} );
- }
- db.getLastError();
-
- // Iteratively update all a values atomically.
- p = startParallelShell( 'for( a = 0; a < ' + updates + '; ++a ) { db.jstests_group7.update( {$atomic:true}, {$set:{a:a}}, false, true ); db.getLastError(); }' );
-
- for( var i = 0; i < updates; ++i ) {
- print("running group " + i + " of " + updates);
- ret = t.group({key:{a:1},reduce:function(){},initial:{}});
- // Check if group sees more than one a value, indicating that it yielded.
- if ( ret.length > 1 ) {
- p();
- return true;
- }
- printjson( ret );
- }
-
- p();
- return false;
-}
-
-var yielded = false;
-var docs = 1500;
-var updates = 50;
-for( var j = 1; j <= 6; ++j ) {
- print("Iteration " + j + " docs = " + docs + " updates = " + updates);
- if ( checkForYield( docs, updates ) ) {
- yielded = true;
- break;
- }
- // Increase docs and updates to encourage yielding.
- docs *= 2;
- updates *= 2;
-}
-assert( yielded );
diff --git a/jstests/group_empty.js b/jstests/group_empty.js
deleted file mode 100644
index 62a734ed0f8..00000000000
--- a/jstests/group_empty.js
+++ /dev/null
@@ -1,8 +0,0 @@
-
-t = db.group_empty;
-t.drop();
-
-res1 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
-t.ensureIndex( { x : 1 } );
-res2 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
-assert.eq( res1, res2 );
diff --git a/jstests/grow_hash_table.js b/jstests/grow_hash_table.js
deleted file mode 100644
index 3e148b7240f..00000000000
--- a/jstests/grow_hash_table.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// This test creates a large projection, which causes a set of field names to
-// be stored in a StringMap (based on UnorderedFastKeyTable). The hash table
-// starts with 20 slots, but must be grown repeatedly to hold the complete set
-// of fields. This test verifies that we can grow the hash table repeatedly
-// with no failures.
-//
-// Related to SERVER-9824.
-
-var testDB = db.getSiblingDB('grow_hash_table');
-
-var doTest = function(count) {
- print('Testing with count of ' + count);
- testDB.dropDatabase();
- var id = { data: 1 };
- var doc = { _id: id };
- var projection = { };
-
- // Create a document and a projection with fields r1, r2, r3 ...
- for (var i = 1; i <= count; ++i) {
- var r = 'r' + i;
- doc[r] = i;
- projection[r] = 1;
- }
-
- // Store the document
- testDB.collection.insert(doc);
- var errorObj = testDB.getLastErrorObj();
- assert(errorObj.err == null,
- 'Failed to insert document, getLastErrorObj = ' + tojsononeline(errorObj));
-
- // Try to read the document using a large projection
- try {
- var findCount = testDB.collection.find({ _id: id }, projection).itcount();
- assert(findCount == 1,
- 'Failed to find single stored document, find().itcount() == ' + findCount);
- }
- catch (e) {
- testDB.dropDatabase();
- doassert('Test FAILED! Caught exception ' + tojsononeline(e));
- }
- testDB.dropDatabase();
- jsTest.log('Test PASSED');
-}
-
-doTest(10000);
diff --git a/jstests/hashindex1.js b/jstests/hashindex1.js
deleted file mode 100644
index 34bd6dc0725..00000000000
--- a/jstests/hashindex1.js
+++ /dev/null
@@ -1,94 +0,0 @@
-var t = db.hashindex1;
-t.drop()
-
-//test non-single field hashed indexes don't get created (maybe change later)
-var badspec = {a : "hashed" , b : 1};
-t.ensureIndex( badspec );
-assert.eq( t.getIndexes().length , 1 , "only _id index should be created");
-
-//test unique index not created (maybe change later)
-var goodspec = {a : "hashed"};
-t.ensureIndex( goodspec , {"unique" : true});
-assert.eq( t.getIndexes().length , 1 , "unique index got created.");
-
-//now test that non-unique index does get created
-t.ensureIndex(goodspec);
-assert.eq( t.getIndexes().length , 2 , "hashed index didn't get created");
-
-//test basic inserts
-for(i=0; i < 10; i++ ){
- t.insert( {a:i } );
-}
-assert.eq( t.find().count() , 10 , "basic insert didn't work");
-assert.eq( t.find().hint(goodspec).toArray().length , 10 , "basic insert didn't work");
-assert.eq( t.find({a : 3}).hint({_id : 1}).toArray()[0]._id ,
- t.find({a : 3}).hint(goodspec).toArray()[0]._id ,
- "hashindex lookup didn't work" );
-
-
-//make sure things with the same hash are not both returned
-t.insert( {a: 3.1} );
-assert.eq( t.find().count() , 11 , "additional insert didn't work");
-assert.eq( t.find({a : 3.1}).hint(goodspec).toArray().length , 1);
-assert.eq( t.find({a : 3}).hint(goodspec).toArray().length , 1);
-//test right obj is found
-assert.eq( t.find({a : 3.1}).hint(goodspec).toArray()[0].a , 3.1);
-
-//test that hashed cursor is used when it should be
-var cursorname = "BtreeCursor a_hashed";
-assert.eq( t.find({a : 1}).explain().cursor ,
- cursorname ,
- "not using hashed cursor");
-
-// SERVER-12222
-//printjson( t.find({a : {$gte : 3 , $lte : 3}}).explain() )
-//assert.eq( t.find({a : {$gte : 3 , $lte : 3}}).explain().cursor ,
-// cursorname ,
-// "not using hashed cursor");
-assert.neq( t.find({c : 1}).explain().cursor ,
- cursorname ,
- "using irrelevant hashed cursor");
-
-printjson( t.find({a : {$in : [1,2]}}).explain() )
-// Hash index used with a $in set membership predicate.
-assert.eq( t.find({a : {$in : [1,2]}}).explain()["cursor"],
- "BtreeCursor a_hashed",
- "not using hashed cursor");
-
-// Hash index used with a singleton $and predicate conjunction.
-assert.eq( t.find({$and : [{a : 1}]}).explain()["cursor"],
- "BtreeCursor a_hashed",
- "not using hashed cursor");
-
-// Hash index used with a non singleton $and predicate conjunction.
-assert.eq( t.find({$and : [{a : {$in : [1,2]}},{a : {$gt : 1}}]}).explain()["cursor"],
- "BtreeCursor a_hashed",
- "not using hashed cursor");
-
-//test creation of index based on hash of _id index
-var goodspec2 = {'_id' : "hashed"};
-t.ensureIndex( goodspec2 );
-assert.eq( t.getIndexes().length , 3 , "_id index didn't get created");
-
-var newid = t.findOne()["_id"];
-assert.eq( t.find( {_id : newid} ).hint( {_id : 1} ).toArray()[0]._id ,
- t.find( {_id : newid} ).hint( goodspec2 ).toArray()[0]._id,
- "using hashed index and different index returns different docs");
-
-
-//test creation of sparse hashed index
-var sparseindex = {b : "hashed"};
-t.ensureIndex( sparseindex , {"sparse" : true});
-assert.eq( t.getIndexes().length , 4 , "sparse index didn't get created");
-
-//test sparse index has smaller total items on after inserts
-for(i=0; i < 10; i++ ){
- t.insert( {b : i} );
-}
-var totalb = t.find().hint(sparseindex).toArray().length;
-assert.eq( totalb , 10 , "sparse index has wrong total");
-
-var total = t.find().hint({"_id" : 1}).toArray().length;
-var totala = t.find().hint(goodspec).toArray().length;
-assert.eq(total , totala , "non-sparse index has wrong total");
-assert.lt(totalb , totala , "sparse index should have smaller total");
diff --git a/jstests/hashtest1.js b/jstests/hashtest1.js
deleted file mode 100644
index 981a0c36877..00000000000
--- a/jstests/hashtest1.js
+++ /dev/null
@@ -1,78 +0,0 @@
-//hashtest1.js
-//Simple tests to check hashing of various types
-//make sure that different numeric types hash to same thing, and other sanity checks
-
-var hash = function( v , seed ){
- if (seed)
- return db.runCommand({"_hashBSONElement" : v , "seed" : seed})["out"];
- else
- return db.runCommand({"_hashBSONElement" : v})["out"];
-};
-
-var oidHash = hash( ObjectId() );
-var oidHash2 = hash( ObjectId() );
-var oidHash3 = hash( ObjectId() );
-assert(! friendlyEqual( oidHash, oidHash2) , "ObjectIDs should hash to different things");
-assert(! friendlyEqual( oidHash, oidHash3) , "ObjectIDs should hash to different things");
-assert(! friendlyEqual( oidHash2, oidHash3) , "ObjectIDs should hash to different things");
-
-var intHash = hash( NumberInt(3) );
-var doubHash = hash( 3 );
-var doubHash2 = hash( 3.0 );
-var longHash = hash( NumberLong(3) );
-var fracHash = hash( NumberInt(3.5) );
-assert.eq( intHash , doubHash );
-assert.eq( intHash , doubHash2 );
-assert.eq( intHash , longHash );
-assert.eq( intHash , fracHash );
-
-var trueHash = hash( true );
-var falseHash = hash( false );
-assert(! friendlyEqual( trueHash, falseHash) , "true and false should hash to different things");
-
-var nullHash = hash( null );
-assert(! friendlyEqual( falseHash , nullHash ) , "false and null should hash to different things");
-
-var dateHash = hash( new Date() );
-sleep(1);
-var isodateHash = hash( ISODate() );
-assert(! friendlyEqual( dateHash, isodateHash) , "different dates should hash to different things");
-
-var stringHash = hash( "3" );
-assert(! friendlyEqual( intHash , stringHash ), "3 and \"3\" should hash to different things");
-
-var regExpHash = hash( RegExp("3") );
-assert(! friendlyEqual( stringHash , regExpHash) , "\"3\" and RegExp(3) should hash to different things");
-
-var intHash4 = hash( 4 );
-assert(! friendlyEqual( intHash , intHash4 ), "3 and 4 should hash to different things");
-
-var intHashSeeded = hash( 4 , 3 );
-assert(! friendlyEqual(intHash4 , intHashSeeded ), "different seeds should make different hashes");
-
-var minkeyHash = hash( MinKey );
-var maxkeyHash = hash( MaxKey );
-assert(! friendlyEqual(minkeyHash , maxkeyHash ), "minkey and maxkey should hash to different things");
-
-var arrayHash = hash( [0,1.0,NumberLong(2)] );
-var arrayHash2 = hash( [0,NumberInt(1),2] );
-assert.eq( arrayHash , arrayHash2 , "didn't squash numeric types in array");
-
-var objectHash = hash( {"0":0, "1" : NumberInt(1), "2" : 2} );
-assert(! friendlyEqual(objectHash , arrayHash2) , "arrays and sub-objects should hash to different things");
-
-var c = hash( {a : {}, b : 1} );
-var d = hash( {a : {b : 1}} );
-assert(! friendlyEqual( c , d ) , "hashing doesn't group sub-docs and fields correctly");
-
-var e = hash( {a : 3 , b : [NumberLong(3), {c : NumberInt(3)}]} );
-var f = hash( {a : NumberLong(3) , b : [NumberInt(3), {c : 3.0}]} );
-assert.eq( e , f , "recursive number squashing doesn't work");
-
-var nanHash = hash( 0/0 );
-var zeroHash = hash( 0 );
-assert.eq( nanHash , zeroHash , "NaN and Zero should hash to the same thing");
-
-
-//should also test that CodeWScope hashes correctly
-//but waiting for SERVER-3391 (CodeWScope support in shell) \ No newline at end of file
diff --git a/jstests/hint1.js b/jstests/hint1.js
deleted file mode 100644
index b5a580f2b93..00000000000
--- a/jstests/hint1.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-p = db.jstests_hint1;
-p.drop();
-
-p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true } );
-p.ensureIndex( { ts: 1 } );
-
-e = p.find( { live: true, ts: { $lt: new Date( 1234119308272 ) }, cls: "entry", verticals: "alleyinsider" } ).sort( { ts: -1 } ).hint( { ts: 1 } ).explain();
-assert.eq(e.indexBounds.ts[0][0].getTime(), new Date(1234119308272).getTime(), "A");
-
-//printjson(e);
-
-assert.eq( /*just below min date is bool true*/true, e.indexBounds.ts[0][1], "B");
-
-assert.eq(1, p.find({ live: true, ts: { $lt: new Date(1234119308272) }, cls: "entry", verticals: "alleyinsider" }).sort({ ts: -1 }).hint({ ts: 1 }).count());
-
diff --git a/jstests/hostinfo.js b/jstests/hostinfo.js
deleted file mode 100644
index 16c3810b2c4..00000000000
--- a/jstests/hostinfo.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// SERVER-4615: Ensure hostInfo() command returns expected results on each platform
-
-assert.commandWorked( db.hostInfo() );
-var hostinfo = db.hostInfo();
-
-// test for os-specific fields
-if (hostinfo.os.type == "Windows") {
- assert.neq( hostinfo.os.name, "" || null, "Missing Windows os name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing Windows version" );
-
-} else if (hostinfo.os.type == "Linux") {
- assert.neq( hostinfo.os.name, "" || null, "Missing Linux os/distro name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing Lindows version" );
-
-} else if (hostinfo.os.type == "Darwin") {
- assert.neq( hostinfo.os.name, "" || null, "Missing Darwin os name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing Darwin version" );
-
-} else if (hostinfo.os.type == "BSD") {
- assert.neq( hostinfo.os.name, "" || null, "Missing FreeBSD os name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing FreeBSD version" );
-}
-
-// comment out this block for systems which have not implemented hostinfo.
-if (hostinfo.os.type != "") {
- assert.neq( hostinfo.system.hostname, "" || null, "Missing Hostname" );
- assert.neq( hostinfo.system.currentTime, "" || null, "Missing Current Time" );
- assert.neq( hostinfo.system.cpuAddrSize, "" || null || 0, "Missing CPU Address Size" );
- assert.neq( hostinfo.system.memSizeMB, "" || null, "Missing Memory Size" );
- assert.neq( hostinfo.system.numCores, "" || null || 0, "Missing Number of Cores" );
- assert.neq( hostinfo.system.cpuArch, "" || null, "Missing CPU Architecture" );
- assert.neq( hostinfo.system.numaEnabled, "" || null, "Missing NUMA flag" );
-}
diff --git a/jstests/id1.js b/jstests/id1.js
deleted file mode 100644
index 9236340e4ec..00000000000
--- a/jstests/id1.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.id1
-t.drop();
-
-t.save( { _id : { a : 1 , b : 2 } , x : "a" } );
-t.save( { _id : { a : 1 , b : 2 } , x : "b" } );
-t.save( { _id : { a : 3 , b : 2 } , x : "c" } );
-t.save( { _id : { a : 4 , b : 2 } , x : "d" } );
-t.save( { _id : { a : 4 , b : 2 } , x : "e" } );
-t.save( { _id : { a : 2 , b : 2 } , x : "f" } );
-
-assert.eq( 4 , t.find().count() , "A" );
-assert.eq( "b" , t.findOne( { _id : { a : 1 , b : 2 } } ).x );
-assert.eq( "c" , t.findOne( { _id : { a : 3 , b : 2 } } ).x );
-assert.eq( "e" , t.findOne( { _id : { a : 4 , b : 2 } } ).x );
-assert.eq( "f" , t.findOne( { _id : { a : 2 , b : 2 } } ).x );
diff --git a/jstests/idhack.js b/jstests/idhack.js
deleted file mode 100644
index 311c4ebc935..00000000000
--- a/jstests/idhack.js
+++ /dev/null
@@ -1,59 +0,0 @@
-
-t = db.idhack
-t.drop()
-
-
-t.insert( { _id : { x : 1 } , z : 1 } )
-t.insert( { _id : { x : 2 } , z : 2 } )
-t.insert( { _id : { x : 3 } , z : 3 } )
-t.insert( { _id : 1 , z : 4 } )
-t.insert( { _id : 2 , z : 5 } )
-t.insert( { _id : 3 , z : 6 } )
-
-assert.eq( 2 , t.findOne( { _id : { x : 2 } } ).z , "A1" )
-assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).count() , "A2" )
-assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).itcount() , "A3" )
-
-t.update( { _id : { x : 2 } } , { $set : { z : 7 } } )
-assert.eq( 7 , t.findOne( { _id : { x : 2 } } ).z , "B1" )
-
-t.update( { _id : { $gte : 2 } } , { $set : { z : 8 } } , false , true )
-assert.eq( 4 , t.findOne( { _id : 1 } ).z , "C1" )
-assert.eq( 8 , t.findOne( { _id : 2 } ).z , "C2" )
-assert.eq( 8 , t.findOne( { _id : 3 } ).z , "C3" )
-
-// explain output should show that the ID hack was applied.
-var query = { _id : { x : 2 } };
-var explain = t.find( query ).explain( true );
-print( "explain for " + tojson( query , "" , true ) + " = " + tojson( explain ) );
-assert.eq( 1 , explain.n , "D1" );
-assert.eq( 1 , explain.nscanned , "D2" );
-assert.neq( undefined , explain.cursor , "D3" );
-assert.neq( "" , explain.cursor , "D4" );
-assert.neq( undefined , explain.indexBounds , "D5" );
-assert.neq( {} , explain.indexBounds , "D6" );
-
-// ID hack cannot be used with hint().
-var query = { _id : { x : 2 } };
-var explain = t.find( query ).explain();
-t.ensureIndex( { _id : 1 , a : 1 } );
-var hintExplain = t.find( query ).hint( { _id : 1 , a : 1 } ).explain();
-print( "explain for hinted query = " + tojson( hintExplain ) );
-assert.neq( explain.cursor, hintExplain.cursor, "E1" );
-
-// ID hack cannot be used with skip().
-var skipExplain = t.find( query ).skip(1).explain();
-print( "explain for skip query = " + tojson( skipExplain ) );
-assert.neq( explain.cursor, skipExplain.cursor, "F1" );
-
-// Only acceptable projection for ID hack is {_id: 1}.
-var projectionExplain = t.find( query, { _id : 0, z : 1 } ).explain();
-print( "explain for projection query = " + tojson( projectionExplain ) );
-assert.neq( explain.cursor, projectionExplain.cursor, "G1" );
-
-// Covered query returning _id field only can be handled by ID hack.
-var coveredExplain = t.find( query, { _id : 1 } ).explain();
-print( "explain for covered query = " + tojson( coveredExplain ) );
-assert.eq( explain.cursor, coveredExplain.cursor, "H1" );
-// Check doc from covered ID hack query.
-assert.eq( { _id : { x: 2 } }, t.findOne( query, { _id : 1 } ), "H2" );
diff --git a/jstests/in.js b/jstests/in.js
deleted file mode 100644
index da1313692e1..00000000000
--- a/jstests/in.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-t = db.in1;
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-
-// $in must take an array as argument: SERVER-7445
-assert.throws( function() { return t.find( { a : { $in : { x : 1 } } } ).itcount(); } );
-assert.throws( function() { return t.find( { a : { $in : 1 } } ).itcount(); } );
-
-assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount() , "A" );
-assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "B" );
-assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "C" );
-
-t.ensureIndex( { a : 1 } );
-
-assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount(), "D" );
-assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "E" );
-assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" );
-
-assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" );
-
-assert.eq( 1 , t.find( { a : { $gt: 1, $in : [ 2 ] } } ).itcount() , "H" );
diff --git a/jstests/in2.js b/jstests/in2.js
deleted file mode 100644
index 66b90daa25a..00000000000
--- a/jstests/in2.js
+++ /dev/null
@@ -1,33 +0,0 @@
-
-t = db.in2;
-
-function go( name , index ){
-
- t.drop();
-
- t.save( { a : 1 , b : 1 } );
- t.save( { a : 1 , b : 2 } );
- t.save( { a : 1 , b : 3 } );
-
- t.save( { a : 1 , b : 1 } );
- t.save( { a : 2 , b : 2 } );
- t.save( { a : 3 , b : 3 } );
-
- t.save( { a : 1 , b : 1 } );
- t.save( { a : 2 , b : 1 } );
- t.save( { a : 3 , b : 1 } );
-
- if ( index )
- t.ensureIndex( index );
-
- assert.eq( 7 , t.find( { a : { $in : [ 1 , 2 ] } } ).count() , name + " A" );
-
- assert.eq( 6 , t.find( { a : { $in : [ 1 , 2 ] } , b : { $in : [ 1 , 2 ] } } ).count() , name + " B" );
-}
-
-go( "no index" );
-go( "index on a" , { a : 1 } );
-go( "index on b" , { b : 1 } );
-go( "index on a&b" , { a : 1 , b : 1 } );
-
-
diff --git a/jstests/in3.js b/jstests/in3.js
deleted file mode 100644
index b0a8bb7b81f..00000000000
--- a/jstests/in3.js
+++ /dev/null
@@ -1,11 +0,0 @@
-t = db.jstests_in3;
-
-t.drop();
-t.ensureIndex( {i:1} );
-assert.eq( {i:[[3,3]]}, t.find( {i:{$in:[3]}} ).explain().indexBounds , "A1" );
-assert.eq( {i:[[3,3],[6,6]]}, t.find( {i:{$in:[3,6]}} ).explain().indexBounds , "A2" );
-
-for ( var i=0; i<20; i++ )
- t.insert( { i : i } );
-
-assert.eq( 3 , t.find( {i:{$in:[3,6]}} ).explain().nscanned , "B1" )
diff --git a/jstests/in4.js b/jstests/in4.js
deleted file mode 100644
index 3e3dca29528..00000000000
--- a/jstests/in4.js
+++ /dev/null
@@ -1,42 +0,0 @@
-t = db.jstests_in4;
-
-function checkRanges( a, b ) {
- assert.eq( a, b );
-}
-
-t.drop();
-t.ensureIndex( {a:1,b:1} );
-checkRanges( {a:[[2,2]],b:[[3,3]]}, t.find( {a:2,b:3} ).explain().indexBounds );
-checkRanges( {a:[[2,2],[3,3]],b:[[4,4]]}, t.find( {a:{$in:[2,3]},b:4} ).explain().indexBounds );
-checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds );
-checkRanges( {a:[[2,2],[3,3]],b:[[4,4],[5,5]]}, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).explain().indexBounds );
-
-checkRanges( {a:[[2,2],[3,3]],b:[[4,10]]}, t.find( {a:{$in:[2,3]},b:{$gt:4,$lt:10}} ).explain().indexBounds );
-
-t.save( {a:1,b:1} );
-t.save( {a:2,b:4.5} );
-t.save( {a:2,b:4} );
-assert.eq( 2, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).hint( {a:1,b:1} ).explain().nscanned );
-assert.eq( 2, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).a );
-assert.eq( 4, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).b );
-
-t.drop();
-t.ensureIndex( {a:1,b:1,c:1} );
-checkRanges( {a:[[2,2]],b:[[3,3],[4,4]],c:[[5,5]]}, t.find( {a:2,b:{$in:[3,4]},c:5} ).explain().indexBounds );
-
-t.save( {a:2,b:3,c:5} );
-t.save( {a:2,b:3,c:4} );
-assert.eq( 1, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned );
-t.remove({});
-t.save( {a:2,b:4,c:5} );
-t.save( {a:2,b:4,c:4} );
-assert.eq( 2, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned );
-
-t.drop();
-t.ensureIndex( {a:1,b:-1} );
-ib = t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds;
-checkRanges( {a:[[2,2]],b:[[4,4],[3,3]]}, ib );
-assert( ib.b[ 0 ][ 0 ] > ib.b[ 1 ][ 0 ] );
-ib = t.find( {a:2,b:{$in:[3,4]}} ).sort( {a:-1,b:1} ).explain().indexBounds;
-checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, ib );
-assert( ib.b[ 0 ][ 0 ] < ib.b[ 1 ][ 0 ] );
diff --git a/jstests/in5.js b/jstests/in5.js
deleted file mode 100644
index 435c8864004..00000000000
--- a/jstests/in5.js
+++ /dev/null
@@ -1,56 +0,0 @@
-
-t = db.in5
-
-function go( fn ){
- t.drop();
- o = {};
- o[fn] = { a : 1 , b : 2 };
- t.insert( o );
-
- x = {};
- x[fn] = { a : 1 , b : 2 };
- assert.eq( 1 , t.find( x ).itcount() , "A1 - " + fn );
-
-
- y = {};
- y[fn] = { $in : [ { a : 1 , b : 2 } ] }
- assert.eq( 1 , t.find( y ).itcount() , "A2 - " + fn );
-
-
- z = {};
- z[fn+".a"] = 1;
- z[fn+".b"] = { $in : [ 2 ] }
- assert.eq( 1 , t.find( z ).itcount() , "A3 - " + fn ); // SERVER-1366
-
-
- i = {}
- i[fn] = 1
- t.ensureIndex( i )
-
- assert.eq( 1 , t.find( x ).itcount() , "B1 - " + fn );
- assert.eq( 1 , t.find( y ).itcount() , "B2 - " + fn );
- assert.eq( 1 , t.find( z ).itcount() , "B3 - " + fn ); // SERVER-1366
-
- t.dropIndex( i )
-
- assert.eq( 1 , t.getIndexes().length , "T2" );
-
- i = {}
- i[fn + ".a" ] = 1;
- t.ensureIndex( i )
- assert.eq( 2 , t.getIndexes().length , "T3" );
-
- assert.eq( 1 , t.find( x ).itcount() , "C1 - " + fn );
- assert.eq( 1 , t.find( y ).itcount() , "C2 - " + fn );
- assert.eq( 1 , t.find( z ).itcount() , "C3 - " + fn ); // SERVER-1366
-
- t.dropIndex( i )
-
-
-}
-
-go( "x" );
-go( "_id" )
-
-
-
diff --git a/jstests/in6.js b/jstests/in6.js
deleted file mode 100644
index f114d93442a..00000000000
--- a/jstests/in6.js
+++ /dev/null
@@ -1,13 +0,0 @@
-t = db.jstests_in6;
-t.drop();
-
-t.save( {} );
-
-function doTest() {
- assert.eq.automsg( "1", "t.count( {i:null} )" );
- assert.eq.automsg( "1", "t.count( {i:{$in:[null]}} )" );
-}
-
-doTest();
-t.ensureIndex( {i:1} );
-doTest();
diff --git a/jstests/in8.js b/jstests/in8.js
deleted file mode 100644
index 5e7e587629f..00000000000
--- a/jstests/in8.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// SERVER-2829 Test arrays matching themselves within a $in expression.
-
-t = db.jstests_in8;
-t.drop();
-
-t.save( {key: [1]} );
-t.save( {key: ['1']} );
-t.save( {key: [[2]]} );
-
-function doTest() {
- assert.eq( 1, t.count( {key:[1]} ) );
- assert.eq( 1, t.count( {key:{$in:[[1]]}} ) );
- assert.eq( 1, t.count( {key:{$in:[[1]],$ne:[2]}} ) );
- assert.eq( 1, t.count( {key:{$in:[['1']],$type:2}} ) );
- assert.eq( 1, t.count( {key:['1']} ) );
- assert.eq( 1, t.count( {key:{$in:[['1']]}} ) );
- assert.eq( 1, t.count( {key:[2]} ) );
- assert.eq( 1, t.count( {key:{$in:[[2]]}} ) );
-}
-
-doTest();
-t.ensureIndex( {key:1} );
-doTest();
diff --git a/jstests/in9.js b/jstests/in9.js
deleted file mode 100644
index cbe28e2e2df..00000000000
--- a/jstests/in9.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// SERVER-2343 Test $in empty array matching.
-
-t = db.jstests_in9;
-t.drop();
-
-function someData() {
- t.remove({});
- t.save( {key: []} );
-}
-
-function moreData() {
- someData();
- t.save( {key: [1]} );
- t.save( {key: ['1']} );
- t.save( {key: null} );
- t.save( {} );
-}
-
-function check() {
- assert.eq( 1, t.count( {key:[]} ) );
- assert.eq( 1, t.count( {key:{$in:[[]]}} ) );
-}
-
-function doTest() {
- someData();
- check();
- moreData();
- check();
-}
-
-doTest();
-
-// SERVER-1943 not fixed yet
-t.ensureIndex( {key:1} );
-doTest();
diff --git a/jstests/ina.js b/jstests/ina.js
deleted file mode 100644
index cf614ab994d..00000000000
--- a/jstests/ina.js
+++ /dev/null
@@ -1,15 +0,0 @@
-// Uassert when $elemMatch is attempted within $in SERVER-3545
-
-t = db.jstests_ina;
-t.drop();
-t.save( {} );
-
-assert.throws( function() { t.find( {a:{$in:[{$elemMatch:{b:1}}]}} ).itcount(); } );
-assert.throws( function() { t.find( {a:{$not:{$in:[{$elemMatch:{b:1}}]}}} ).itcount(); } );
-
-assert.throws( function() { t.find( {a:{$nin:[{$elemMatch:{b:1}}]}} ).itcount(); } );
-assert.throws( function() { t.find( {a:{$not:{$nin:[{$elemMatch:{b:1}}]}}} ).itcount(); } );
-
-// NOTE Above we don't check cases like {b:2,$elemMatch:{b:3,4}} - generally
-// we assume that the first key is $elemMatch if any key is, and validating
-// every key is expensive in some cases. \ No newline at end of file
diff --git a/jstests/inb.js b/jstests/inb.js
deleted file mode 100644
index 34ec843d36c..00000000000
--- a/jstests/inb.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// Test $in regular expressions with overlapping index bounds. SERVER-4677
-
-t = db.jstests_inb;
-t.drop();
-
-function checkBoundsAndResults( query ) {
- assert.eq( [ 'a', 'b' ], t.find( query ).explain().indexBounds.x[0] );
- assert.eq( 4, t.count( query ) );
- assert.eq( 4, t.find( query ).itcount() );
-}
-
-t.ensureIndex( {x:1} );
-t.save( {x:'aa'} );
-t.save( {x:'ab'} );
-t.save( {x:'ac'} );
-t.save( {x:'ad'} );
-
-checkBoundsAndResults( {x:{$in:[/^a/,/^ab/]}} );
-checkBoundsAndResults( {x:{$in:[/^ab/,/^a/]}} );
diff --git a/jstests/inc-SERVER-7446.js b/jstests/inc-SERVER-7446.js
deleted file mode 100644
index 73cdef3dbd5..00000000000
--- a/jstests/inc-SERVER-7446.js
+++ /dev/null
@@ -1,43 +0,0 @@
-var c = db.incSERVER7446
-
-// A 32 bit overflow spills to 64 bits
-c.drop();
-c.save( { a: NumberInt( "2147483647" ) } );
-c.update( {}, { $inc:{ a:NumberInt( 1 ) } } );
-var gle = db.getLastErrorObj();
-assert.eq(1, gle.n, "Object not inserted");
-var res = c.findOne();
-assert.eq(NumberLong, res.a.constructor,
- "NumberInt incremented beyond std::numeric_limits<in32_t>::max() not NumberLong");
-assert.eq(NumberLong("2147483648"), res.a,
- "NumberInt incremented beyond std::numeric_limits<in32_t>::max() has wrong value");
-
-// A 32 bit underflow spills to 64 bits
-c.drop();
-c.save( { a: NumberInt( "-2147483648" ) } );
-c.update( {}, { $inc:{ a:NumberInt( -1 ) } } );
-gle = db.getLastErrorObj();
-assert.eq(1, gle.n, "Object not inserted");
-res = c.findOne();
-assert.eq(NumberLong, res.a.constructor,
- "NumberInt decremented beyond std::numeric_limits<in32_t>::min() not NumberLong");
-assert.eq(NumberLong("-2147483649"), res.a,
- "NumberInt decremented beyond std::numeric_limits<in32_t>::min() has wrong value");
-
-// A 64 bit overflow is an error
-c.drop();
-c.save( { a: NumberLong( "9223372036854775807" ) } );
-c.update( {}, { $inc:{ a:NumberInt( 1 ) } } );
-gle = db.getLastErrorObj();
-assert.eq(0, gle.n,
- "Did not fail to increment a NumberLong past std::numeric_limits<int64_t>::max()");
-
-// A 64 bit underflow is an error
-c.drop();
-c.save( { a: NumberLong( "-9223372036854775808" ) } );
-c.update( {}, { $inc:{ a:NumberInt( -1 ) } } );
-gle = db.getLastErrorObj();
-assert.eq(0, gle.n,
- "Did not fail to decrement a NumberLong past std::numeric_limits<int64_t>::min()");
-
-c.drop()
diff --git a/jstests/inc1.js b/jstests/inc1.js
deleted file mode 100644
index 027f307a476..00000000000
--- a/jstests/inc1.js
+++ /dev/null
@@ -1,32 +0,0 @@
-
-t = db.inc1;
-t.drop();
-
-function test( num , name ){
- assert.eq( 1 , t.count() , name + " count" );
- assert.eq( num , t.findOne().x , name + " value" );
-}
-
-t.save( { _id : 1 , x : 1 } );
-test( 1 , "A" );
-
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-test( 2 , "B" );
-
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-test( 3 , "C" );
-
-t.update( { _id : 2 } , { $inc : { x : 1 } } );
-test( 3 , "D" );
-
-t.update( { _id : 1 } , { $inc : { x : 2 } } );
-test( 5 , "E" );
-
-t.update( { _id : 1 } , { $inc : { x : -1 } } );
-test( 4 , "F" );
-
-t.ensureIndex( { x : 1 } );
-
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-test( 5 , "G" );
-
diff --git a/jstests/inc2.js b/jstests/inc2.js
deleted file mode 100644
index 75a8e65a384..00000000000
--- a/jstests/inc2.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-t = db.inc2
-t.drop();
-
-t.save( { _id : 1 , x : 1 } );
-t.save( { _id : 2 , x : 2 } );
-t.save( { _id : 3 , x : 3 } );
-
-function order(){
- return t.find().sort( { x : 1 } ).map( function(z){ return z._id; } );
-}
-
-assert.eq( "1,2,3" , order() , "A" );
-
-t.update( { _id : 1 } , { $inc : { x : 4 } } );
-assert.eq( "2,3,1" , order() , "B" );
-
-t.ensureIndex( { x : 1 } );
-assert.eq( "2,3,1" , order() , "C" );
-
-t.update( { _id : 3 } , { $inc : { x : 4 } } );
-assert.eq( "2,1,3" , order() , "D" );
diff --git a/jstests/inc3.js b/jstests/inc3.js
deleted file mode 100644
index baeeb198cf4..00000000000
--- a/jstests/inc3.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.inc3;
-
-t.drop();
-t.save( { _id : 1 , z : 1 , a : 1 } );
-t.update( {} , { $inc : { z : 1 , a : 1 } } );
-t.update( {} , { $inc : { a : 1 , z : 1 } } );
-assert.eq( { _id : 1 , z : 3 , a : 3 } , t.findOne() , "A" )
-
-
-t.drop();
-t.save( { _id : 1 , a : 1 , z : 1 } );
-t.update( {} , { $inc : { z : 1 , a : 1 } } );
-t.update( {} , { $inc : { a : 1 , z : 1 } } );
-assert.eq( { _id : 1 , a : 3 , z : 3 } , t.findOne() , "B" )
-
diff --git a/jstests/index1.js b/jstests/index1.js
deleted file mode 100644
index 64bbfa8732b..00000000000
--- a/jstests/index1.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-t = db.embeddedIndexTest;
-
-t.remove( {} );
-
-o = { name : "foo" , z : { a : 17 , b : 4} };
-t.save( o );
-
-assert( t.findOne().z.a == 17 );
-assert( t.findOne( { z : { a : 17 } } ) == null);
-
-t.ensureIndex( { "z.a" : 1 } );
-
-assert( t.findOne().z.a == 17 );
-assert( t.findOne( { z : { a : 17 } } ) == null);
-
-o = { name : "bar" , z : { a : 18 } };
-t.save( o );
-
-assert.eq.automsg( "2", "t.find().length()" );
-assert.eq.automsg( "2", "t.find().sort( { 'z.a' : 1 } ).length()" );
-assert.eq.automsg( "2", "t.find().sort( { 'z.a' : -1 } ).length()" );
-
-assert(t.validate().valid);
diff --git a/jstests/index10.js b/jstests/index10.js
deleted file mode 100644
index 92f5927097d..00000000000
--- a/jstests/index10.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// unique index, drop dups
-
-t = db.jstests_index10;
-t.drop();
-
-t.save( {i:1} );
-t.save( {i:2} );
-t.save( {i:1} );
-t.save( {i:3} );
-t.save( {i:1} );
-
-t.ensureIndex( {i:1} );
-assert.eq( 5, t.count() );
-t.dropIndexes();
-t.ensureIndex( {i:1}, true );
-err = db.getLastErrorObj();
-assert( err.err , "err.err" );
-assert.eq( 11000, err.code );
-assert( 1 == db.system.indexes.count( {ns:"test.jstests_index10" } ), "only id index" );
-// t.dropIndexes();
-
-ts = t.totalIndexSize();
-t.ensureIndex( {i:1}, [ true, true ] );
-ts2 = t.totalIndexSize();
-
-assert.eq( ts * 2, ts2, "totalIndexSize fail" );
-
-assert.eq( 3, t.count() );
-assert.eq( 1, t.count( {i:1} ) );
-
-t.ensureIndex( {j:1}, [ true, true ] );
-assert.eq( 1, t.count() );
diff --git a/jstests/index13.js b/jstests/index13.js
deleted file mode 100644
index 7e317d90d94..00000000000
--- a/jstests/index13.js
+++ /dev/null
@@ -1,147 +0,0 @@
-// Top level match fields within an $elemMatch clause may constrain multiple subfields from a
-// compound multikey index. SERVER-3104
-//
-// Given a multikey index { 'a.b':1, 'a.c':1 } and query { 'a.b':3, 'a.c':3 } only the index field
-// 'a.b' is constrained to the range [3, 3], while the index field 'a.c' is just constrained
-// to be within minkey and maxkey. This implementation ensures that the document
-// { a:[ { b:3 }, { c:3 } ] }, which generates index keys { 'a.b':3, 'a.c':null } and
-// { 'a.b':null and 'a.c':3 } will be retrieved for the query. (See SERVER-958 for more
-// information.)
-//
-// If the query is instead { a:{ $elemMatch:{ b:3, c:3 } } } then the document
-// { a:[ { b:3 }, { c:3 } ] } does not match. Until SERVER-3104 was implemented, the index
-// constraints would be [3,3] on the 'a.b' field and [minkey,maxkey] on the 'a.c' field, the same as
-// for the non $elemMatch query in the previous paragraph. With the SERVER-3104 implementation,
-// constraints on two fields within a $elemMatch parent can both be applied to an index. Due to the
-// SERVER-3104 implementation, the index constraints become [3,3] on the 'a.b' field _and_ [3,3] on
-// the 'a.c' field.
-
-t = db.jstests_index13;
-t.drop();
-
-function assertConsistentResults( query ) {
- assert.eq( t.find( query ).hint( { $natural:1 } ).sort( { _id:1 } ).toArray(),
- t.find( query ).hint( index ).sort( { _id:1 } ).toArray() );
-}
-
-function assertResults( query ) {
- explain = t.find( query ).hint( index ).explain();
- // printjson( explain ); // debug
- assertConsistentResults( query );
-}
-
-// Cases with single dotted index fied names.
-index = { 'a.b':1, 'a.c':1 };
-t.ensureIndex( index );
-t.save( { a:[ { b:1 }, { c:1 } ] } );
-t.save( { a:[ { b:1, c:1 } ] } );
-assert.eq( 2, t.count() );
-// Without $elemMatch.
-assertResults( { 'a.b':1, 'a.c':1 } );
-// With $elemMatch.
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } } } );
-
-// Without shared $elemMatch.
-assertResults( { 'a.b':1, a:{ $elemMatch:{ c:1 } } } );
-// Two different $elemMatch expressions.
-assertResults( { $and:[ { a:{ $elemMatch:{ b:1 } } },
- { a:{ $elemMatch:{ c:1 } } } ] } );
-
-
-// Cases relating to parse order and inclusion of intersected ranges.
-assertResults( { 'a.b':1, a:{ $elemMatch:{ b:{ $gt:0 }, c:1 } } } );
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':1 } );
-assertResults( { 'a.c':1, a:{ $elemMatch:{ b:1, c:1 } } } );
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':{ $gt:0 } } );
-
-// Cases with $elemMatch on multiple fields.
-t.remove({});
-index = { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 };
-t.ensureIndex( index );
-t.insert( { a:[ { b:1 }, { c:1 } ], d: { e:1, f:1 } } );
-t.insert( { a:[ { b:1, c:1 } ], d: { e:1, f:1 } } );
-t.insert( { a:{ b:1, c:1 }, d:[ { e:1, f:1 } ] } );
-t.insert( { a:{ b:1, c:1 }, d:[ { e:1 }, { f:1 } ] } );
-
-assert.eq( 4, t.count() );
-
-// Without $elemMatch.
-assertResults( { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 } );
-// With $elemMatch.
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd': { $elemMatch:{ e:1, f:1 } } } );
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd.e': 1, 'd.f' : 1 } );
-assertResults( { 'a.b': 1, 'a.c' : 1, 'd': { $elemMatch:{ e:1, f:1 } } } );
-
-
-// Cases with nested $elemMatch.
-t.remove({})
-index = { 'a.b.c':1, 'a.b.d' :1 };
-t.ensureIndex( index );
-t.insert( { a:[ { b: [ { c : 1, d : 1 } ] } ] } ) ;
-t.insert( { a:[ { b: [ { c : 1 } , { d : 1 } ] } ] } ) ;
-assert.eq( 2, t.count() );
-// Without $elemMatch.
-assertResults( { 'a.b.c':1, 'a.b.d':1 } );
-// With $elemMatch.
-assertResults( { "a" : { $elemMatch : { "b" : { $elemMatch : { c : 1, d : 1 } } } } } );
-
-// Cases with double dotted index field names.
-t.drop();
-index = { 'a.b.x':1, 'a.b.y':1 };
-t.ensureIndex( index );
-t.save( { a:{ b:{ x:1, y:1 } } } );
-t.save( { a:[ { b:{ x:1 } }, { b:{ y:1 } } ] } );
-t.save( { a:[ { b:[ { x:1 }, { y:1 } ] } ] } );
-t.save( { a:[ { b:[ { x:1, y:1 } ] } ] } );
-assert.eq( 4, t.count() );
-
-// No $elemMatch.
-assertResults( { 'a.b.x':1, 'a.b.y':1 } );
-// $elemMatch with dotted children.
-assertResults( { a:{ $elemMatch:{ 'b.x':1, 'b.y':1 } } } );
-// $elemMatch with undotted children.
-assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
-
-// Cases where a field is indexed along with its children.
-t.dropIndexes();
-index = { 'a':1, 'a.b.x':1, 'a.b.y':1 };
-t.ensureIndex( index );
-
-// With $ne.
-assertResults( { a:{ $ne:4 }, 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
-
-// No constraint on a prior parent field.
-assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
-
-// Cases with double dotted index field names branching to different fields at each dot.
-t.drop();
-index = { 'a.b.c':1, 'a.e.f':1, 'a.b.d':1, 'a.e.g':1 }
-t.ensureIndex( index );
-t.save( { a:{ b:{ c:1, d:1 }, e:{ f:1, g:1 } } } );
-t.save( { a:[ { b:{ c:1 }, e:{ f:1 } }, { b:{ d:1 }, e:{ g:1 } } ] } );
-t.save( { a:[ { b:{ c:1 } }, { e:{ f:1 } }, { b:{ d:1 } }, { e:{ g:1 } } ] } );
-t.save( { a:[ { b:[ { c:1 }, { d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } );
-t.save( { a:[ { b:[ { c:[ 1 ] }, { d:[ 1 ] } ] }, { e:[ { f:[ 1 ] }, { g:[ 1 ] } ] } ] } );
-t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } );
-t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1, g:1 } ] } ] } );
-assert.eq( 7, t.count() );
-
-// Constraint on a prior cousin field.
-assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } },
- 'a.e':{ $elemMatch:{ f:1, g:1 } } } );
-
-// Different constraint on a prior cousin field.
-assertResults( { 'a.b':{ $elemMatch:{ d:1 } },
- 'a.e':{ $elemMatch:{ f:1, g:1 } } } );
-
-
-// Cases with double dotted index field names branching to different fields at each dot, and the
-// same field name strings after the second dot.
-t.drop();
-index = { 'a.b.c':1, 'a.e.c':1, 'a.b.d':1, 'a.e.d':1 }
-t.ensureIndex( index );
-t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { c:1, d:1 } ] } ] } );
-assert.eq( 1, t.count() );
-
-// Constraint on a prior cousin field with the same field names.
-assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, 'a.e':{ $elemMatch:{ c:1, d:1 } } } );
diff --git a/jstests/index2.js b/jstests/index2.js
deleted file mode 100644
index b54abcaa792..00000000000
--- a/jstests/index2.js
+++ /dev/null
@@ -1,40 +0,0 @@
-/* test indexing where the key is an embedded object.
- */
-
-t = db.embeddedIndexTest2;
-
-t.drop();
-assert( t.findOne() == null );
-
-o = { name : "foo" , z : { a : 17 } };
-p = { name : "foo" , z : { a : 17 } };
-q = { name : "barrr" , z : { a : 18 } };
-r = { name : "barrr" , z : { k : "zzz", L:[1,2] } };
-
-t.save( o );
-
-assert( t.findOne().z.a == 17 );
-
-t.save( p );
-t.save( q );
-
-assert( t.findOne({z:{a:17}}).z.a==17 );
-assert( t.find({z:{a:17}}).length() == 2 );
-assert( t.find({z:{a:18}}).length() == 1 );
-
-t.save( r );
-
-assert( t.findOne({z:{a:17}}).z.a==17 );
-assert( t.find({z:{a:17}}).length() == 2 );
-assert( t.find({z:{a:18}}).length() == 1 );
-
-t.ensureIndex( { z : 1 } );
-
-assert( t.findOne({z:{a:17}}).z.a==17 );
-assert( t.find({z:{a:17}}).length() == 2 );
-assert( t.find({z:{a:18}}).length() == 1 );
-
-assert( t.find().sort( { z : 1 } ).length() == 4 );
-assert( t.find().sort( { z : -1 } ).length() == 4 );
-
-assert(t.validate().valid);
diff --git a/jstests/index3.js b/jstests/index3.js
deleted file mode 100644
index 80139460cb4..00000000000
--- a/jstests/index3.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-t = db.index3;
-t.drop();
-
-assert( t.getIndexes().length == 0 );
-
-t.ensureIndex( { name : 1 } );
-
-t.save( { name : "a" } );
-
-t.ensureIndex( { name : 1 } );
-
-assert( t.getIndexes().length == 2 );
-
-assert(t.validate().valid);
diff --git a/jstests/index4.js b/jstests/index4.js
deleted file mode 100644
index 9dd731c83ee..00000000000
--- a/jstests/index4.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// index4.js
-
-
-t = db.index4;
-t.drop();
-
-t.save( { name : "alleyinsider" ,
- instances : [
- { pool : "prod1" } ,
- { pool : "dev1" }
- ]
- } );
-
-t.save( { name : "clusterstock" ,
- instances : [
- { pool : "dev1" }
- ]
- } );
-
-
-// this should fail, not allowed -- we confirm that.
-t.ensureIndex( { instances : { pool : 1 } } );
-assert.eq( 0, db.system.indexes.find( {ns:"test.index4",name:{$ne:"_id_"}} ).count(), "no indexes should be here yet");
-
-t.ensureIndex( { "instances.pool" : 1 } );
-
-sleep( 10 );
-
-a = t.find( { instances : { pool : "prod1" } } );
-assert( a.length() == 1, "len1" );
-assert( a[0].name == "alleyinsider", "alley" );
-
-assert(t.validate().valid, "valid" );
diff --git a/jstests/index5.js b/jstests/index5.js
deleted file mode 100644
index 841ac12ed45..00000000000
--- a/jstests/index5.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// index5.js - test reverse direction index
-
-function validate() {
- assert.eq( 2, t.find().count() );
- f = t.find().sort( { a: 1 } );
- assert.eq( 2, t.count() );
- assert.eq( 1, f[ 0 ].a );
- assert.eq( 2, f[ 1 ].a );
- r = t.find().sort( { a: -1 } );
- assert.eq( 2, r.count() );
- assert.eq( 2, r[ 0 ].a );
- assert.eq( 1, r[ 1 ].a );
-}
-
-t = db.index5;
-t.drop();
-
-t.save( { a: 1 } );
-t.save( { a: 2 } );
-
-validate();
-
-t.ensureIndex( { a: -1 } );
-validate();
diff --git a/jstests/index6.js b/jstests/index6.js
deleted file mode 100644
index 8dbd8f74fcf..00000000000
--- a/jstests/index6.js
+++ /dev/null
@@ -1,8 +0,0 @@
-// index6.js Test indexes on array subelements.
-
-r = db.ed.db.index6;
-r.drop();
-
-r.save( { comments : [ { name : "eliot", foo : 1 } ] } );
-r.ensureIndex( { "comments.name": 1 } );
-assert( r.findOne( { "comments.name": "eliot" } ) );
diff --git a/jstests/index7.js b/jstests/index7.js
deleted file mode 100644
index 9e3a6c66d11..00000000000
--- a/jstests/index7.js
+++ /dev/null
@@ -1,67 +0,0 @@
-// index7.js Test that we use an index when and only when we expect to.
-
-function index( q ) {
- assert( q.explain().cursor.match( /^BtreeCursor/ ) , "index assert" );
-}
-
-function noIndex( q ) {
- assert( q.explain().cursor.match( /^BasicCursor/ ) , "noIndex assert" );
-}
-
-function start( k, q, rev) {
- var exp = q.explain().indexBounds;
- var s = {a:exp.a[rev?1:0][0],b:exp.b[0][0]};
- assert.eq( k.a, s.a );
- assert.eq( k.b, s.b );
-}
-function end( k, q, rev) {
- var exp = q.explain().indexBounds
- var e = {a:exp.a[rev?1:0][1],b:exp.b[0][1]};
- assert.eq( k.a, e.a );
- assert.eq( k.b, e.b );
-}
-function both( k, q ) {
- start( k, q );
- end( k, q );
-}
-
-f = db.ed_db_index7;
-f.drop();
-
-f.save( { a : 5 } )
-f.ensureIndex( { a: 1 } );
-index( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { a: 1 } ) );
-noIndex( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { $natural: 1 } ) );
-f.drop();
-
-f.ensureIndex( { a: 1, b: 1 } );
-assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] );
-assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] );
-assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] );
-assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] );
-assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c );
-assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c );
-
-start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
-start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true );
-start( { a: "a", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
-end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
-end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true );
-end( { a: "b", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
-
-start( { a: "z", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
-end( { a: "{", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
-start( { a: "az", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) );
-end( { a: "a{", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
-both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { a: 1, b: 1 } ) );
-
-both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).hint( { a: 1, b: 1 } ) );
-both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
-f.drop();
-f.ensureIndex( { b: 1, a: 1 } );
-both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { b: 1, a: 1 } ) );
diff --git a/jstests/index8.js b/jstests/index8.js
deleted file mode 100644
index 719ad2dd2cb..00000000000
--- a/jstests/index8.js
+++ /dev/null
@@ -1,62 +0,0 @@
-// Test key uniqueness
-
-t = db.jstests_index8;
-t.drop();
-
-t.ensureIndex( { a: 1 } );
-t.ensureIndex( { b: 1 }, true );
-t.ensureIndex( { c: 1 }, [ false, "cIndex" ] );
-
-checkIndexes = function( num ) {
-// printjson( db.system.indexes.find( { ns: "test.jstests_index8" } ).toArray() );
- indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } ).toArray();
- var start = 0;
- if ( indexes[0].name == "_id_" )
- start = 1;
- assert( !indexes[ start ].unique , "A" + num );
- assert( indexes[ start + 1 ].unique , "B" + num + " " + tojson( indexes[start+1] ) );
- assert( !indexes[ start + 2 ].unique , "C" + num );
- assert.eq( "cIndex", indexes[ start + 2 ].name , "D" + num );
-}
-
-checkIndexes( 1 );
-
-t.reIndex();
-checkIndexes( 2 );
-
-t.save( { a: 2, b: 1 } );
-t.save( { a: 2 } );
-assert.eq( 2, t.find().count() );
-
-t.save( { b: 4 } );
-t.save( { b: 4 } );
-assert.eq( 3, t.find().count() );
-assert.eq( 3, t.find().hint( {c:1} ).toArray().length );
-assert.eq( 3, t.find().hint( {b:1} ).toArray().length );
-assert.eq( 3, t.find().hint( {a:1} ).toArray().length );
-
-t.drop();
-t.ensureIndex( { a: 1, b: -1 }, true );
-t.save( { a: 2, b: 3 } );
-t.save( { a: 2, b: 3 } );
-t.save( { a: 2, b: 4 } );
-t.save( { a: 1, b: 3 } );
-assert.eq( 3, t.find().count() );
-
-t.drop();
-t.ensureIndex( { a: 1 }, true );
-t.save( { a: [ 2, 3 ] } );
-t.save( { a: 2 } );
-assert.eq( 1, t.find().count() );
-
-t.drop();
-t.ensureIndex( { a: 1 }, true );
-t.save( { a: 2 } );
-t.save( { a: [ 1, 2, 3 ] } );
-t.save( { a: [ 3, 2, 1 ] } );
-assert.eq( 1, t.find().sort( { a: 1 } ).hint( { a: 1 } ).toArray().length );
-assert.eq( 1, t.find().sort( { a: -1 } ).hint( { a: 1 } ).toArray().length );
-
-assert.eq( t._indexSpec( { x : 1 } , true ) , t._indexSpec( { x : 1 } , [ true ] ) , "spec 1" );
-assert.eq( t._indexSpec( { x : 1 } , "eliot" ) , t._indexSpec( { x : 1 } , [ "eliot" ] ) , "spec 2" );
-
diff --git a/jstests/index9.js b/jstests/index9.js
deleted file mode 100644
index 04b900949ec..00000000000
--- a/jstests/index9.js
+++ /dev/null
@@ -1,25 +0,0 @@
-t = db.jstests_index9;
-
-t.drop();
-db.createCollection( "jstests_index9" );
-assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index with default collection" );
-t.drop();
-db.createCollection( "jstests_index9", {autoIndexId: true} );
-assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index if autoIndexId: true" );
-
-t.drop();
-db.createCollection( "jstests_index9", {autoIndexId:false} );
-assert.eq( 0, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 0 index if autoIndexId: false" );
-t.createIndex( { _id:1 } );
-assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
-t.createIndex( { _id:1 } );
-assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
-
-t.drop();
-t.createIndex( { _id:1 } );
-assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
-
-t.drop();
-t.save( {a:1} );
-t.createIndex( { _id:1 } );
-assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
diff --git a/jstests/indexOtherNamespace.js b/jstests/indexOtherNamespace.js
deleted file mode 100644
index 5bb7355ddb6..00000000000
--- a/jstests/indexOtherNamespace.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// SERVER-8814: Test that only the system.indexes namespace can be used to build indexes.
-
-function assertGLENotOK(status) {
- assert(status.ok && status.err !== null,
- "Expected not-OK status object; found " + tojson(status));
-}
-
-var otherDB = db.getSiblingDB("indexOtherNS");
-otherDB.dropDatabase();
-
-otherDB.foo.insert({a:1})
-assert.eq(1, otherDB.system.indexes.count());
-assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor);
-
-otherDB.randomNS.system.indexes.insert({ns:"indexOtherNS.foo", key:{a:1}, name:"a_1"});
-assertGLENotOK(otherDB.getLastErrorObj());
-// Assert that index didn't actually get built
-assert.eq(1, otherDB.system.indexes.count());
-assert.eq(null, otherDB.system.namespaces.findOne({name : "indexOtherNS.foo.$a_1"}));
-assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor);
-otherDB.dropDatabase();
diff --git a/jstests/indexStatsCommand.js b/jstests/indexStatsCommand.js
deleted file mode 100644
index 9c055e37e26..00000000000
--- a/jstests/indexStatsCommand.js
+++ /dev/null
@@ -1,88 +0,0 @@
-db.jstests_commands.drop();
-db.createCollection("jstests_commands");
-
-t = db.jstests_commands;
-
-for (var i = 0; i < 3000; ++i) {
- t.insert({i: i, d: i % 13});
-}
-
-function textWithIndexVersion(version) {
- var indexName = 'test_d_' + version;
- t.ensureIndex({d: 1}, {v: version, name: indexName});
-
- var result = t.indexStats({index: indexName});
- if (result["bad cmd"]) {
- print("storageDetails command not available: skipping");
- return;
- }
-
- assert.commandWorked(result);
-
- assert(result.index === indexName);
- assert(result.isIdIndex === false);
- assert(isObject(result.keyPattern));
- assert.neq(result.keyPattern, null);
- assert(isString(result.storageNs));
- assert(isNumber(result.bucketBodyBytes));
- assert.eq(result.depth, 1);
- assert(isObject(result.overall));
- assert.neq(result.overall, null);
-
- function checkStats(data) {
- assert(data.count instanceof NumberLong);
- assert(isNumber(data.mean));
- assert(isNumber(data.stddev));
- assert(isNumber(data.min));
- assert(isNumber(data.max));
- }
-
- function checkAreaStats(data) {
- assert(isNumber(data.numBuckets));
-
- assert(isObject(data.keyCount));
- assert.neq(data.keyCount, null);
- checkStats(data.keyCount);
-
- assert(isObject(data.usedKeyCount));
- assert.neq(data.usedKeyCount, null);
- checkStats(data.usedKeyCount);
-
- assert(isObject(data.bsonRatio));
- assert.neq(data.bsonRatio, null);
- checkStats(data.bsonRatio);
-
- assert(isObject(data.keyNodeRatio));
- assert.neq(data.keyNodeRatio, null);
- checkStats(data.keyNodeRatio);
-
- assert(isObject(data.fillRatio));
- assert.neq(data.fillRatio, null);
- checkStats(data.fillRatio);
- }
-
- assert(isObject(result.overall));
- checkAreaStats(result.overall);
-
- assert(result.perLevel instanceof Array);
- for (var i = 0; i < result.perLevel.length; ++i) {
- assert(isObject(result.perLevel[i]));
- checkAreaStats(result.perLevel[i]);
- }
-
- result = t.indexStats();
- assert.commandFailed(result);
- assert(result.errmsg.match(/index name is required/));
-
- result = t.indexStats({index: "nonexistent"})
- assert.commandFailed(result);
- assert(result.errmsg.match(/index does not exist/));
-
- result = t.indexStats({index: "_id_", expandNodes: ['string']})
- assert.commandFailed(result);
- assert(result.errmsg.match(/expandNodes.*numbers/));
-
- t.dropIndex(indexName);
-}
-
-[0, 1].map(textWithIndexVersion);
diff --git a/jstests/index_arr1.js b/jstests/index_arr1.js
deleted file mode 100644
index d35cb80a83f..00000000000
--- a/jstests/index_arr1.js
+++ /dev/null
@@ -1,23 +0,0 @@
-
-t = db.index_arr1
-t.drop()
-
-t.insert( { _id : 1 , a : 5 , b : [ { x : 1 } ] } )
-t.insert( { _id : 2 , a : 5 , b : [] } )
-t.insert( { _id : 3 , a : 5 } )
-
-assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A1" )
-
-t.ensureIndex( { a : 1 , "b.x" : 1 } )
-
-//t.find().sort( { a : 1 } )._addSpecial( "$returnKey" , 1 ).forEach( printjson )
-//t.find( { a : 5 } ).forEach( printjson )
-
-assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A2" ); // SERVER-1082
-
-
-assert.eq( 2 , t.getIndexes().length , "B1" )
-t.insert( { _id : 4 , a : 5 , b : [] } )
-t.ensureIndex( { a : 1 , "b.a" : 1 , "b.c" : 1 } )
-assert.eq( 3 , t.getIndexes().length , "B2" )
-
diff --git a/jstests/index_arr2.js b/jstests/index_arr2.js
deleted file mode 100644
index 321bed8ad03..00000000000
--- a/jstests/index_arr2.js
+++ /dev/null
@@ -1,51 +0,0 @@
-NUM = 20;
-M = 5;
-
-t = db.jstests_arr2;
-
-function test( withIndex ){
- t.drop();
-
- // insert a bunch of items to force queries to use the index.
- newObject = {
- _id : 1,
- a : [
- { b : { c : 1 } }
- ]
- }
-
- now = (new Date()).getTime() / 1000;
- for (created = now - NUM; created <= now; created++ ) {
- newObject['created'] = created;
- t.insert(newObject);
- newObject['_id'] ++;
- }
-
- // change the last M items.
- query = {
- 'created' : { '$gte' : now - M }
- }
-
- Z = t.find( query ).count();
-
- if ( withIndex ){
- //t.ensureIndex( { 'a.b.c' : 1, 'created' : -1 } )
- //t.ensureIndex( { created : -1 } )
- t.ensureIndex( { 'a.b.c' : 1 } , { name : "x" } )
- }
-
- t.update(query, { '$set' : { "a.0.b.c" : 0 } } , false , true )
- assert.eq( Z , db.getLastErrorObj().n , "num updated withIndex:" + withIndex );
-
- // now see how many were actually updated.
- query['a.b.c'] = 0;
-
- count = t.count(query);
-
- assert.eq( Z , count , "count after withIndex:" + withIndex );
-}
-
-test( false )
-test( true );
-
-
diff --git a/jstests/index_big1.js b/jstests/index_big1.js
deleted file mode 100644
index 3e53692a2f6..00000000000
--- a/jstests/index_big1.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// check where "key to big" happens
-
-t = db.index_big1;
-
-N = 3200;
-t.drop();
-
-var s = "";
-
-t.ensureIndex( { a : 1 , x : 1 } )
-
-for ( i=0; i<N; i++ ) {
- t.insert( { a : i + .5 , x : s } )
- s += "x";
-}
-
-assert.eq( 2 , t.getIndexes().length );
-
-flip = -1;
-
-for ( i=0; i<N; i++ ) {
- var c = t.find( { a : i + .5 } ).count();
- if ( c == 1 ) {
- assert.eq( -1 , flip , "flipping : " + i );
- }
- else {
- if ( flip == -1 ) {
- flip = i;
- }
- }
-}
-
-//print(flip);
-//print(flip/1024);
-
-assert.eq( /*v0 index : 797*/1002, flip , "flip changed" );
diff --git a/jstests/index_bigkeys.js b/jstests/index_bigkeys.js
deleted file mode 100755
index b0ea66d65f8..00000000000
--- a/jstests/index_bigkeys.js
+++ /dev/null
@@ -1,59 +0,0 @@
-
-t = db.bigkeysidxtest;
-
-var keys = []
-
-var str = "aaaabbbbccccddddeeeeffffgggghhhh";
-
-while ( str.length < 20000 ) {
- keys.push( str );
- str = str + str;
-}
-
-function doInsert( order ) {
- if (order == 1) {
- for (var i = 0; i < 10; i++) {
- t.insert({ _id: i, k: keys[i] });
- }
- }
- else {
- for (var i = 9; i >= 0; i--) {
- t.insert({ _id: i, k: keys[i] });
- }
- }
-}
-
-var expect = null;
-
-function check() {
- assert(t.validate().valid);
- assert.eq( 5, t.count() );
-
- var c = t.find({ k: /^a/ }).count();
- assert.eq( 5, c );
-}
-
-function runTest( order ) {
- t.drop();
- t.ensureIndex({ k: 1 });
- doInsert( order );
- check(); // check incremental addition
-
- t.reIndex();
- check(); // check bottom up
-
- t.drop();
- doInsert( order );
- assert.eq( 1, t.getIndexes().length );
- t.ensureIndex({ k: 1 });
- assert.eq( 1, t.getIndexes().length );
-
- t.drop();
- doInsert( order );
- assert.eq( 1, t.getIndexes().length );
- t.ensureIndex({ k: 1 }, { background: true });
- assert.eq( 1, t.getIndexes().length );
-}
-
-runTest( 1 );
-runTest( 2 );
diff --git a/jstests/index_bigkeys_update.js b/jstests/index_bigkeys_update.js
deleted file mode 100644
index dd428b5fd4b..00000000000
--- a/jstests/index_bigkeys_update.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-bigString = "";
-while ( bigString.length < 16000 )
- bigString += ".";
-
-t = db.index_bigkeys_update;
-t.drop();
-
-t.insert( { _id : 0, x : "asd" } );
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 1, t.count() );
-
-t.update( {} , { $set : { x : bigString } } );
-err = db.getLastErrorObj();
-assert( err.err, err );
-
-assert.eq( 1, t.count() );
-assert.eq( "asd", t.findOne().x ); // make sure doc is the old version
-assert.eq( "asd", t.findOne( { _id : 0 } ).x ); // make sure doc is the old version
diff --git a/jstests/index_bounds_number_edge_cases.js b/jstests/index_bounds_number_edge_cases.js
deleted file mode 100644
index 0ab482028ed..00000000000
--- a/jstests/index_bounds_number_edge_cases.js
+++ /dev/null
@@ -1,50 +0,0 @@
-// end-to-end tests on index bounds for numerical values
-// should handle numerical extremes
-// such as Number.MAX_VALUE and Infinity
-
-t = db.indexboundsnumberedgecases;
-
-t.drop();
-
-t.ensureIndex({a: 1});
-
-t.save({a: -Infinity});
-t.save({a: -Number.MAX_VALUE});
-t.save({a: 1});
-t.save({a: Number.MAX_VALUE});
-t.save({a: Infinity});
-
-// index bounds generated by query planner are
-// validated in unit tests
-
-// lte
-
-assert.eq(1, t.find({a: {$lte: -Infinity}}).itcount());
-assert.eq(2, t.find({a: {$lte: -Number.MAX_VALUE}}).itcount());
-assert.eq(3, t.find({a: {$lte: 1}}).itcount());
-assert.eq(4, t.find({a: {$lte: Number.MAX_VALUE}}).itcount());
-assert.eq(5, t.find({a: {$lte: Infinity}}).itcount());
-
-// lt
-
-assert.eq(0, t.find({a: {$lt: -Infinity}}).itcount());
-assert.eq(1, t.find({a: {$lt: -Number.MAX_VALUE}}).itcount());
-assert.eq(2, t.find({a: {$lt: 1}}).itcount());
-assert.eq(3, t.find({a: {$lt: Number.MAX_VALUE}}).itcount());
-assert.eq(4, t.find({a: {$lt: Infinity}}).itcount());
-
-// gt
-
-assert.eq(0, t.find({a: {$gt: Infinity}}).itcount());
-assert.eq(1, t.find({a: {$gt: Number.MAX_VALUE}}).itcount());
-assert.eq(2, t.find({a: {$gt: 1}}).itcount());
-assert.eq(3, t.find({a: {$gt: -Number.MAX_VALUE}}).itcount());
-assert.eq(4, t.find({a: {$gt: -Infinity}}).itcount());
-
-// gte
-
-assert.eq(1, t.find({a: {$gte: Infinity}}).itcount());
-assert.eq(2, t.find({a: {$gte: Number.MAX_VALUE}}).itcount());
-assert.eq(3, t.find({a: {$gte: 1}}).itcount());
-assert.eq(4, t.find({a: {$gte: -Number.MAX_VALUE}}).itcount());
-assert.eq(5, t.find({a: {$gte: -Infinity}}).itcount());
diff --git a/jstests/index_check1.js b/jstests/index_check1.js
deleted file mode 100644
index 7113dff0877..00000000000
--- a/jstests/index_check1.js
+++ /dev/null
@@ -1,31 +0,0 @@
-
-db.somecollection.drop();
-
-assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 1);
-
-db.somecollection.save({a:1});
-
-assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 2);
-
-db.somecollection.ensureIndex({a:1});
-
-var z = db.system.namespaces.find({name:/somecollection/}).length();
-assert( z >= 1 , 3 );
-
-if( z == 1 )
- print("warning: z==1, should only happen with alternate storage engines");
-
-db.somecollection.drop();
-
-assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 4);
-
-db.somecollection.save({a:1});
-
-assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 5);
-
-db.somecollection.ensureIndex({a:1});
-
-var x = db.system.namespaces.find({name:/somecollection/}).length();
-assert( x == 2 || x == z, 6);
-
-assert(db.somecollection.validate().valid, 7);
diff --git a/jstests/index_check2.js b/jstests/index_check2.js
deleted file mode 100644
index eed3b8e42b7..00000000000
--- a/jstests/index_check2.js
+++ /dev/null
@@ -1,41 +0,0 @@
-
-t = db.index_check2;
-t.drop();
-
-for ( var i=0; i<1000; i++ ){
- var a = [];
- for ( var j=1; j<5; j++ ){
- a.push( "tag" + ( i * j % 50 ));
- }
- t.save( { num : i , tags : a } );
-}
-
-q1 = { tags : "tag6" };
-q2 = { tags : "tag12" };
-q3 = { tags : { $all : [ "tag6" , "tag12" ] } }
-
-assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
-assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
-assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
-
-t.ensureIndex( { tags : 1 } );
-
-assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
-assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
-assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
-
-assert.eq( "BtreeCursor tags_1" , t.find( q1 ).explain().cursor , "e1" );
-assert.eq( "BtreeCursor tags_1" , t.find( q2 ).explain().cursor , "e2" );
-assert.eq( "BtreeCursor tags_1" , t.find( q3 ).explain().cursor , "e3" );
-
-scanned1 = t.find(q1).explain().nscanned;
-scanned2 = t.find(q2).explain().nscanned;
-scanned3 = t.find(q3).explain().nscanned;
-
-//print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 );
-
-// $all should just iterate either of the words
-assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" );
-
-exp3 = t.find( q3 ).explain();
-assert.eq( exp3.indexBounds.tags[0][0], exp3.indexBounds.tags[0][1], "$all range not a single key" );
diff --git a/jstests/index_check3.js b/jstests/index_check3.js
deleted file mode 100644
index 55515aff3f5..00000000000
--- a/jstests/index_check3.js
+++ /dev/null
@@ -1,63 +0,0 @@
-
-
-t = db.index_check3;
-t.drop();
-
-
-
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
-t.save( { a : "z" } );
-
-assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "A" );
-assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "B" );
-
-t.ensureIndex( { a : 1 } );
-
-assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "C" );
-assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "D" );
-
-t.drop();
-
-for ( var i=0; i<100; i++ ){
- var o = { i : i };
- if ( i % 2 == 0 )
- o.foo = i;
- t.save( o );
-}
-
-t.ensureIndex( { foo : 1 } );
-
-//printjson( t.find( { foo : { $lt : 50 } } ).explain() );
-assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" );
-//printjson( t.find( { foo : { $gt : 50 } } ).explain() );
-assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" );
-
-
-t.drop();
-t.save( {i:'a'} );
-for( var i=0; i < 10; ++i ) {
- t.save( {} );
-}
-
-t.ensureIndex( { i : 1 } );
-
-//printjson( t.find( { i : { $lte : 'a' } } ).explain() );
-assert.gt( 3 , t.find( { i : { $lte : 'a' } } ).explain().nscanned , "lte" );
-//printjson( t.find( { i : { $gte : 'a' } } ).explain() );
-// bug SERVER-99
-assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).count() , "gte a" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).count() , "gte c" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).itcount() , "gte d" );
-
-t.save( { i : "b" } );
-
-assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).count() , "gte a2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).itcount() , "gte c2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : -1 } ).itcount() , "gte d2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : 1 } ).itcount() , "gte e2" );
diff --git a/jstests/index_check5.js b/jstests/index_check5.js
deleted file mode 100644
index eabb929749f..00000000000
--- a/jstests/index_check5.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.index_check5
-t.drop();
-
-t.save( { "name" : "Player1" ,
- "scores" : [{"level" : 1 , "score" : 100},
- {"level" : 2 , "score" : 50}],
- "total" : 150 } );
-t.save( { "name" : "Player2" ,
- "total" : 90 ,
- "scores" : [ {"level" : 1 , "score" : 90},
- {"level" : 2 , "score" : 0} ]
- } );
-
-assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "A" );
-t.ensureIndex( { "scores.level" : 1 , "scores.score" : 1 } );
-assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "B" );
diff --git a/jstests/index_check6.js b/jstests/index_check6.js
deleted file mode 100644
index be395fb3d2e..00000000000
--- a/jstests/index_check6.js
+++ /dev/null
@@ -1,82 +0,0 @@
-
-t = db.index_check6;
-t.drop();
-
-t.ensureIndex( { age : 1 , rating : 1 } );
-
-for ( var age=10; age<50; age++ ){
- for ( var rating=0; rating<10; rating++ ){
- t.save( { age : age , rating : rating } );
- }
-}
-
-assert.eq( 10 , t.find( { age : 30 } ).explain().nscanned , "A" );
-assert.eq( 20 , t.find( { age : { $gte : 29 , $lte : 30 } } ).explain().nscanned , "B" );
-assert.eq( 18 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,9] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C1" );
-assert.eq( 23 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C2" );
-assert.eq( 28 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [1,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C3" );
-
-assert.eq( 4 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : 5 } ).hint( {age:1,rating:1} ).explain().nscanned , "C" ); // SERVER-371
-assert.eq( 6 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : { $gte : 4 , $lte : 5 } } ).hint( {age:1,rating:1} ).explain().nscanned , "D" ); // SERVER-371
-
-assert.eq.automsg( "2", "t.find( { age:30, rating:{ $gte:4, $lte:5} } ).explain().nscanned" );
-
-t.drop();
-
-for ( var a=1; a<10; a++ ){
- for ( var b=0; b<10; b++ ){
- for ( var c=0; c<10; c++ ) {
- t.save( { a:a, b:b, c:c } );
- }
- }
-}
-
-function doQuery( count, query, sort, index ) {
- var nscanned = t.find( query ).hint( index ).sort( sort ).explain().nscanned;
- assert(Math.abs(count - nscanned) <= 2);
-}
-
-function doTest( sort, index ) {
- doQuery( 1, { a:5, b:5, c:5 }, sort, index );
- doQuery( 2, { a:5, b:5, c:{$gte:5,$lte:6} }, sort, index );
- doQuery( 1, { a:5, b:5, c:{$gte:5.5,$lte:6} }, sort, index );
- doQuery( 1, { a:5, b:5, c:{$gte:5,$lte:5.5} }, sort, index );
- doQuery( 3, { a:5, b:5, c:{$gte:5,$lte:7} }, sort, index );
- doQuery( 4, { a:5, b:{$gte:5,$lte:6}, c:5 }, sort, index );
- if ( sort.b > 0 ) {
- doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index );
- doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index );
- } else {
- doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index );
- doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index );
- }
- doQuery( 7, { a:5, b:{$gte:5,$lte:7}, c:5 }, sort, index );
- doQuery( 4, { a:{$gte:5,$lte:6}, b:5, c:5 }, sort, index );
- if ( sort.a > 0 ) {
- doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index );
- doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index );
- doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
- } else {
- doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index );
- doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index );
- doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
- }
- doQuery( 7, { a:{$gte:5,$lte:7}, b:5, c:5 }, sort, index );
- doQuery( 6, { a:{$gte:5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
- doQuery( 6, { a:5, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index );
- doQuery( 10, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:5 }, sort, index );
- doQuery( 14, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index );
-}
-
-for ( var a = -1; a <= 1; a += 2 ) {
- for( var b = -1; b <= 1; b += 2 ) {
- for( var c = -1; c <= 1; c += 2 ) {
- t.dropIndexes();
- var spec = {a:a,b:b,c:c};
- t.ensureIndex( spec );
- doTest( spec, spec );
- doTest( {a:-a,b:-b,c:-c}, spec );
- }
- }
-}
-
diff --git a/jstests/index_check7.js b/jstests/index_check7.js
deleted file mode 100644
index 1d0aaebba35..00000000000
--- a/jstests/index_check7.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-t = db.index_check7
-t.drop()
-
-for ( var i=0; i<100; i++ )
- t.save( { x : i } )
-
-t.ensureIndex( { x : 1 } )
-assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "A" )
-
-t.ensureIndex( { x : -1 } )
-assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "B" )
-
-assert.eq( 40 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" );
-
diff --git a/jstests/index_check8.js b/jstests/index_check8.js
deleted file mode 100644
index 1964ecbe7fc..00000000000
--- a/jstests/index_check8.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-t = db.index_check8
-t.drop();
-
-t.insert( { a : 1 , b : 1 , c : 1 , d : 1 , e : 1 } )
-t.ensureIndex( { a : 1 , b : 1 , c : 1 } )
-t.ensureIndex({ a: 1, b: 1, d: 1, e: 1 })
-
-// this block could be added to many tests in theory...
-if ((new Date()) % 10 == 0) {
- var coll = t.toString().substring(db.toString().length + 1);
- print("compacting " + coll + " before continuing testing");
- // don't check return code - false for mongos
- print("ok: " + db.runCommand({ compact: coll, dev: true }));
-}
-
-x = t.find( { a : 1 , b : 1 , d : 1 } ).sort( { e : 1 } ).explain()
-assert( ! x.scanAndOrder , "A : " + tojson( x ) )
-
-x = t.find( { a : 1 , b : 1 , c : 1 , d : 1 } ).sort( { e : 1 } ).explain()
-//assert( ! x.scanAndOrder , "B : " + tojson( x ) )
diff --git a/jstests/index_diag.js b/jstests/index_diag.js
deleted file mode 100644
index 21840682e7f..00000000000
--- a/jstests/index_diag.js
+++ /dev/null
@@ -1,50 +0,0 @@
-
-t = db.index_diag
-t.drop();
-
-t.ensureIndex( { x : 1 } );
-
-all = []
-ids = []
-xs = []
-
-function r( a ){
- var n = []
- for ( var x=a.length-1; x>=0; x-- )
- n.push( a[x] );
- return n;
-}
-
-for ( i=1; i<4; i++ ){
- o = { _id : i , x : -i }
- t.insert( o );
- all.push( o );
- ids.push( { _id : i } );
- xs.push( { x : -i } );
-}
-
-assert.eq( all , t.find().sort( { _id : 1 } ).toArray() , "A1" );
-assert.eq( r( all ) , t.find().sort( { _id : -1 } ).toArray() , "A2" );
-
-assert.eq( all , t.find().sort( { x : -1 } ).toArray() , "A3" );
-assert.eq( r( all ) , t.find().sort( { x : 1 } ).toArray() , "A4" );
-
-assert.eq( ids , t.find().sort( { _id : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B1" )
-assert.eq( r( ids ) , t.find().sort( { _id : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B2" )
-assert.eq( xs , t.find().sort( { x : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B3" )
-assert.eq( r( xs ) , t.find().sort( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" )
-
-assert.eq( r( xs ) , t.find().hint( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" )
-
-// SERVER-4981
-t.ensureIndex( { _id : 1 , x : 1 } );
-assert.eq( all ,
- t.find().hint( { _id : 1 , x : 1 } )._addSpecial( "$returnKey" , true ).toArray()
- )
-assert.eq( r( all ) ,
- t.find().hint( { _id : 1 , x : 1 } ).sort( { x : 1 } )
- ._addSpecial( "$returnKey" , true ).toArray()
- )
-
-assert.eq( [ {} , {} , {} ],
- t.find().hint( { $natural : 1 } )._addSpecial( "$returnKey" , true ).toArray() )
diff --git a/jstests/index_elemmatch1.js b/jstests/index_elemmatch1.js
deleted file mode 100644
index 9170ce66018..00000000000
--- a/jstests/index_elemmatch1.js
+++ /dev/null
@@ -1,41 +0,0 @@
-
-t = db.index_elemmatch1
-t.drop()
-
-x = 0
-y = 0
-for ( a=0; a<100; a++ ){
- for ( b=0; b<100; b++ ){
- t.insert( { a : a , b : b % 10 , arr : [ { x : x++ % 10 , y : y++ % 10 } ] } )
- }
-}
-
-t.ensureIndex( { a : 1 , b : 1 } )
-t.ensureIndex( { "arr.x" : 1 , a : 1 } )
-
-assert.eq( 100 , t.find( { a : 55 } ).itcount() , "A1" );
-assert.eq( 10 , t.find( { a : 55 , b : 7 } ).itcount() , "A2" );
-
-q = { a : 55 , b : { $in : [ 1 , 5 , 8 ] } }
-assert.eq( 30 , t.find( q ).itcount() , "A3" )
-
-q.arr = { $elemMatch : { x : 5 , y : 5 } }
-assert.eq( 10 , t.find( q ).itcount() , "A4" )
-
-function nscannedForCursor( explain, cursor ) {
- plans = explain.allPlans;
- for( i in plans ) {
- if ( plans[ i ].cursor == cursor ) {
- return plans[ i ].nscanned;
- }
- }
- return -1;
-}
-
-assert.eq( t.find(q).itcount(),
- nscannedForCursor( t.find(q).explain(true), 'BtreeCursor arr.x_1_a_1' ), "A5" );
-
-printjson(t.find(q).explain());
-print("Num results:");
-assert.eq(10, t.find(q).itcount());
-printjson(t.find(q).itcount());
diff --git a/jstests/index_filter_commands.js b/jstests/index_filter_commands.js
deleted file mode 100644
index c10fd3da0ca..00000000000
--- a/jstests/index_filter_commands.js
+++ /dev/null
@@ -1,181 +0,0 @@
-/**
- * Index Filter commands
- *
- * Commands:
- * - planCacheListFilters
- * Displays index filters for all query shapes in a collection.
- *
- * - planCacheClearFilters
- * Clears index filter for a single query shape or,
- * if the query shape is omitted, all filters for the collection.
- *
- * - planCacheSetFilter
- * Sets index filter for a query shape. Overrides existing filter.
- *
- * Not a lot of data access in this test suite. Hint commands
- * manage a non-persistent mapping in the server of
- * query shape to list of index specs.
- *
- * Only time we might need to execute a query is to check the plan
- * cache state. We would do this with the planCacheListPlans command
- * on the same query shape with the index filters.
- *
- */
-
-var t = db.jstests_index_filter_commands;
-
-t.drop();
-
-t.save({a: 1});
-
-// Add 2 indexes.
-// 1st index is more efficient.
-// 2nd and 3rd indexes will be used to test index filters.
-var indexA1 = {a: 1};
-var indexA1B1 = {a: 1, b: 1};
-var indexA1C1 = {a: 1, c: 1};
-t.ensureIndex(indexA1);
-t.ensureIndex(indexA1B1);
-t.ensureIndex(indexA1C1);
-
-var queryA1 = {a: 1};
-var projectionA1 = {_id: 0, a: 1};
-var sortA1 = {a: -1};
-
-//
-// Tests for planCacheListFilters, planCacheClearFilters, planCacheSetFilter
-//
-
-// Utility function to list index filters.
-function getFilters(collection) {
- if (collection == undefined) {
- collection = t;
- }
- var res = collection.runCommand('planCacheListFilters');
- print('planCacheListFilters() = ' + tojson(res));
- assert.commandWorked(res, 'planCacheListFilters failed');
- assert(res.hasOwnProperty('filters'), 'filters missing from planCacheListFilters result');
- return res.filters;
-
-}
-
-// If query shape is in plan cache,
-// planCacheListPlans returns non-empty array of plans.
-function planCacheContains(shape) {
- var res = t.runCommand('planCacheListPlans', shape);
- assert.commandWorked(res);
- return res.plans.length > 0;
-}
-
-// Utility function to list plans for a query.
-function getPlans(shape) {
- var res = t.runCommand('planCacheListPlans', shape);
- assert.commandWorked(res, 'planCacheListPlans(' + tojson(shape, '', true) + ' failed');
- assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
- tojson(shape, '', true) + ') result');
- return res.plans;
-}
-
-// Attempting to retrieve index filters on a non-existent collection
-// will return empty results.
-var missingCollection = db.jstests_index_filter_commands_missing;
-missingCollection.drop();
-assert.eq(0, getFilters(missingCollection),
- 'planCacheListFilters should return empty array on non-existent collection');
-
-// Retrieve index filters from an empty test collection.
-var filters = getFilters();
-assert.eq(0, filters.length, 'unexpected number of index filters in planCacheListFilters result');
-
-// Check details of winning plan in plan cache before setting index filter.
-assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-var shape = {query: queryA1, sort: sortA1, projection: projectionA1};
-var planBeforeSetFilter = getPlans(shape)[0];
-print('Winning plan (before setting index filters) = ' + tojson(planBeforeSetFilter));
-// Check filterSet field in plan details
-assert.eq(false, planBeforeSetFilter.filterSet, 'missing or invalid filterSet field in plan details');
-
-// Adding index filters to a non-existent collection should be an error.
-assert.commandFailed(missingCollection.runCommand('planCacheSetFilter',
- {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
-
-// Add index filters for simple query.
-assert.commandWorked(t.runCommand('planCacheSetFilter',
- {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
-filters = getFilters();
-assert.eq(1, filters.length, 'no change in query settings after successfully setting index filters');
-assert.eq(queryA1, filters[0].query, 'unexpected query in filters');
-assert.eq(sortA1, filters[0].sort, 'unexpected sort in filters');
-assert.eq(projectionA1, filters[0].projection, 'unexpected projection in filters');
-assert.eq(2, filters[0].indexes.length, 'unexpected number of indexes in filters');
-assert.eq(indexA1B1, filters[0].indexes[0], 'unexpected first index');
-assert.eq(indexA1C1, filters[0].indexes[1], 'unexpected first index');
-
-// Plans for query shape should be removed after setting index filter.
-assert(!planCacheContains(shape), 'plan cache for query shape not flushed after updating filter');
-
-// Check details of winning plan in plan cache after setting filter and re-executing query.
-assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-planAfterSetFilter = getPlans(shape)[0];
-print('Winning plan (after setting index filter) = ' + tojson(planAfterSetFilter));
-// Check filterSet field in plan details
-assert.eq(true, planAfterSetFilter.filterSet, 'missing or invalid filterSet field in plan details');
-
-// Execute query with cursor.hint(). Check that user-provided hint is overridden.
-// Applying the index filters will remove the user requested index from the list
-// of indexes provided to the planner.
-// If the planner still tries to use the user hint, we will get a 'bad hint' error.
-t.find(queryA1, projectionA1).sort(sortA1).hint(indexA1).itcount();
-
-// Clear filters
-// Clearing filters on a missing collection should be a no-op.
-assert.commandWorked(missingCollection.runCommand('planCacheClearFilters'));
-// Clear the filters set earlier.
-assert.commandWorked(t.runCommand('planCacheClearFilters'));
-filters = getFilters();
-assert.eq(0, filters.length, 'filters not cleared after successful planCacheClearFilters command');
-
-// Plans should be removed after clearing filters
-assert(!planCacheContains(shape), 'plan cache for query shape not flushed after clearing filters');
-
-print('Plan details before setting filter = ' + tojson(planBeforeSetFilter.details, '', true));
-print('Plan details after setting filter = ' + tojson(planAfterSetFilter.details, '', true));
-
-//
-// explain.filterSet
-// cursor.explain() should indicate if index filter has been applied.
-// The following 3 runners should always provide a value for 'filterSet':
-// - SingleSolutionRunner
-// - MultiPlanRunner
-// - CachedPlanRuner
-//
-
-// No filter set.
-
-t.getPlanCache().clear();
-// SingleSolutionRunner
-assert.eq(false, t.find({z: 1}).explain().filterSet,
- 'missing or invalid filterSet field in SingleSolutionRunner explain');
-// MultiPlanRunner
-assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
- 'missing or invalid filterSet field in MultiPlanRunner explain');
-// CachedPlanRunner
-assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
- 'missing or invalid filterSet field in CachedPlanRunner explain');
-
-// Add index filter.
-assert.commandWorked(t.runCommand('planCacheSetFilter',
- {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
-// Index filter with non-existent index key pattern to force use of single solution runner.
-assert.commandWorked(t.runCommand('planCacheSetFilter', {query: {z: 1}, indexes: [{z: 1}]}));
-
-t.getPlanCache().clear();
-// SingleSolutionRunner
-assert.eq(true, t.find({z: 1}).explain().filterSet,
- 'missing or invalid filterSet field in SingleSolutionRunner explain');
-// MultiPlanRunner
-assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
- 'missing or invalid filterSet field in MultiPlanRunner explain');
-// CachedPlanRunner
-assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
- 'missing or invalid filterSet field in CachedPlanRunner explain');
diff --git a/jstests/index_many.js b/jstests/index_many.js
deleted file mode 100644
index 46705a20470..00000000000
--- a/jstests/index_many.js
+++ /dev/null
@@ -1,51 +0,0 @@
-/* test using lots of indexes on one collection */
-
-t = db.many;
-
-function f() {
-
- t.drop();
- db.many2.drop();
-
- t.save({ x: 9, y : 99 });
- t.save({ x: 19, y : 99 });
-
- x = 2;
- while (x < 70) {
- patt = {};
- patt[x] = 1;
- if (x == 20)
- patt = { x: 1 };
- if (x == 64)
- patt = { y: 1 };
- t.ensureIndex(patt);
- x++;
- }
-
- // print( tojson(db.getLastErrorObj()) );
- assert(db.getLastError(), "should have got an error 'too many indexes'");
-
- // 40 is the limit currently
- lim = t.getIndexes().length;
- if (lim != 64) {
- print("# of indexes should be 64 but is : " + lim);
- return;
- }
- assert(lim == 64, "not 64 indexes");
-
- assert(t.find({ x: 9 }).length() == 1, "b");
- assert(t.find({ x: 9 }).explain().cursor.match(/Btree/), "not using index?");
-
- assert(t.find({ y: 99 }).length() == 2, "y idx");
- assert(t.find({ y: 99 }).explain().cursor.match(/Btree/), "not using y index?");
-
- /* check that renamecollection remaps all the indexes right */
- assert(t.renameCollection("many2").ok, "rename failed");
- assert(t.find({ x: 9 }).length() == 0, "many2a");
- assert(db.many2.find({ x: 9 }).length() == 1, "many2b");
- assert(t.find({ y: 99 }).length() == 0, "many2c");
- assert(db.many2.find({ y: 99 }).length() == 2, "many2d");
-
-}
-
-f();
diff --git a/jstests/index_many2.js b/jstests/index_many2.js
deleted file mode 100644
index f113b8b87ed..00000000000
--- a/jstests/index_many2.js
+++ /dev/null
@@ -1,31 +0,0 @@
-
-t = db.index_many2;
-t.drop()
-
-t.save( { x : 1 } )
-
-assert.eq( 1 , t.getIndexKeys().length , "A1" )
-
-function make( n ){
- var x = {}
- x["x"+n] = 1;
- return x;
-}
-
-for ( i=1; i<1000; i++ ){
- t.ensureIndex( make(i) );
-}
-
-assert.eq( 64 , t.getIndexKeys().length , "A2" )
-
-
-num = t.getIndexKeys().length
-
-t.dropIndex( make(num-1) )
-assert.eq( num - 1 , t.getIndexKeys().length , "B0" )
-
-t.ensureIndex( { z : 1 } )
-assert.eq( num , t.getIndexKeys().length , "B1" )
-
-t.dropIndex( "*" );
-assert.eq( 1 , t.getIndexKeys().length , "C1" )
diff --git a/jstests/index_sparse1.js b/jstests/index_sparse1.js
deleted file mode 100644
index eab3c7fec95..00000000000
--- a/jstests/index_sparse1.js
+++ /dev/null
@@ -1,46 +0,0 @@
-
-t = db.index_sparse1;
-t.drop();
-
-t.insert( { _id : 1 , x : 1 } )
-t.insert( { _id : 2 , x : 2 } )
-t.insert( { _id : 3 , x : 2 } )
-t.insert( { _id : 4 } )
-t.insert( { _id : 5 } )
-
-assert.eq( 5 , t.count() , "A1" )
-assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "A2" )
-
-t.ensureIndex( { x : 1 } )
-assert.eq( 2 , t.getIndexes().length , "B1" )
-assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "B2" )
-t.dropIndex( { x : 1 } )
-assert.eq( 1 , t.getIndexes().length , "B3" )
-
-t.ensureIndex( { x : 1 } , { sparse : 1 } )
-assert.eq( 2 , t.getIndexes().length , "C1" )
-assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "C2" )
-t.dropIndex( { x : 1 } )
-assert.eq( 1 , t.getIndexes().length , "C3" )
-
-// -- sparse & unique
-
-t.remove( { _id : 2 } )
-
-// test that we can't create a unique index without sparse
-t.ensureIndex( { x : 1 } , { unique : 1 } )
-assert( db.getLastError() , "D1" )
-assert.eq( 1 , t.getIndexes().length , "D2" )
-
-
-t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } )
-assert.eq( 2 , t.getIndexes().length , "E1" )
-t.dropIndex( { x : 1 } )
-assert.eq( 1 , t.getIndexes().length , "E3" )
-
-
-t.insert( { _id : 2 , x : 2 } )
-t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } )
-assert.eq( 1 , t.getIndexes().length , "F1" )
-
-
diff --git a/jstests/index_sparse2.js b/jstests/index_sparse2.js
deleted file mode 100644
index 56a59db3711..00000000000
--- a/jstests/index_sparse2.js
+++ /dev/null
@@ -1,23 +0,0 @@
-t = db.index_sparse2;
-t.drop();
-
-t.insert( { _id : 1 , x : 1 , y : 1 } )
-t.insert( { _id : 2 , x : 2 } )
-t.insert( { _id : 3 } )
-
-t.ensureIndex( { x : 1 , y : 1 } )
-assert.eq( 2 , t.getIndexes().length , "A1" )
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "A2 count()" )
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "A2 itcount()" )
-t.dropIndex( { x : 1 , y : 1 } )
-assert.eq( 1 , t.getIndexes().length , "A3" )
-
-t.ensureIndex( { x : 1 , y : 1 } , { sparse : 1 } )
-assert.eq( 2 , t.getIndexes().length , "B1" )
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "B2 count()" )
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "B2 itcount()" )
-t.dropIndex( { x : 1 , y : 1 } )
-assert.eq( 1 , t.getIndexes().length , "B3" )
-
-
-
diff --git a/jstests/indexa.js b/jstests/indexa.js
deleted file mode 100644
index 7602183adb2..00000000000
--- a/jstests/indexa.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// unique index constraint test for updates
-// case where object doesn't grow tested here
-
-t = db.indexa;
-t.drop();
-
-t.ensureIndex( { x:1 }, true );
-
-t.insert( { 'x':'A' } );
-t.insert( { 'x':'B' } );
-t.insert( { 'x':'A' } );
-
-assert.eq( 2 , t.count() , "indexa 1" );
-
-t.update( {x:'B'}, { x:'A' } );
-
-a = t.find().toArray();
-u = Array.unique( a.map( function(z){ return z.x } ) );
-assert.eq( 2 , t.count() , "indexa 2" );
-
-assert( a.length == u.length , "unique index update is broken" );
-
diff --git a/jstests/indexapi.js b/jstests/indexapi.js
deleted file mode 100644
index 7bc5d45acd3..00000000000
--- a/jstests/indexapi.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-t = db.indexapi;
-t.drop();
-
-key = { x : 1 };
-
-c = { ns : t._fullName , key : key , name : t._genIndexName( key ) };
-assert.eq( c , t._indexSpec( { x : 1 } ) , "A" );
-
-c.name = "bob";
-assert.eq( c , t._indexSpec( { x : 1 } , "bob" ) , "B" );
-
-c.name = t._genIndexName( key );
-assert.eq( c , t._indexSpec( { x : 1 } ) , "C" );
-
-c.unique = true;
-assert.eq( c , t._indexSpec( { x : 1 } , true ) , "D" );
-assert.eq( c , t._indexSpec( { x : 1 } , [ true ] ) , "E" );
-assert.eq( c , t._indexSpec( { x : 1 } , { unique : true } ) , "F" );
-
-c.dropDups = true;
-assert.eq( c , t._indexSpec( { x : 1 } , [ true , true ] ) , "G" );
-assert.eq( c , t._indexSpec( { x : 1 } , { unique : true , dropDups : true } ) , "F" );
-
-t.ensureIndex( { x : 1 } , { unique : true } );
-idx = t.getIndexes();
-assert.eq( 2 , idx.length , "M1" );
-assert.eq( key , idx[1].key , "M2" );
-assert( idx[1].unique , "M3" );
-
-t.drop();
-t.ensureIndex( { x : 1 } , { unique : 1 } );
-idx = t.getIndexes();
-assert.eq( 2 , idx.length , "M1" );
-assert.eq( key , idx[1].key , "M2" );
-assert( idx[1].unique , "M3" );
-//printjson( idx );
-
-db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } );
-assert( db.getLastError() != null , "Z1" );
diff --git a/jstests/indexb.js b/jstests/indexb.js
deleted file mode 100644
index d7d2e8c9f05..00000000000
--- a/jstests/indexb.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// unique index test for a case where the object grows
-// and must move
-
-// see indexa.js for the test case for an update with dup id check
-// when it doesn't move
-
-
-t = db.indexb;
-t.drop();
-t.ensureIndex({a:1},true);
-
-t.insert({a:1});
-
-x = { a : 2 };
-t.save(x);
-
-{
-
- assert( t.count() == 2, "count wrong B");
-
- x.a = 1;
- x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
- t.save(x); // should fail, not unique.
-
- assert( t.count() == 2,"count wrong" );
- assert( t.find({a:1}).count() == 1,"bfail1" );
- assert( t.find({a:2}).count() == 1,"bfail2" );
-
-}
diff --git a/jstests/indexc.js b/jstests/indexc.js
deleted file mode 100644
index b099e2d2823..00000000000
--- a/jstests/indexc.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.indexc;
-t.drop();
-
-for ( var i=1; i<100; i++ ){
- var d = new Date( ( new Date() ).getTime() + i );
- t.save( { a : i , ts : d , cats : [ i , i + 1 , i + 2 ] } );
- if ( i == 51 )
- mid = d;
-}
-
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "A" );
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "B" );
-
-t.ensureIndex( { ts : 1 , cats : 1 } );
-t.ensureIndex( { cats : 1 } );
-
-// multi-key bug was firing here (related to getsetdup()):
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "C" );
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "D" );
diff --git a/jstests/indexd.js b/jstests/indexd.js
deleted file mode 100644
index 33246ad9812..00000000000
--- a/jstests/indexd.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.indexd;
-t.drop();
-
-t.save( { a : 1 } );
-t.ensureIndex( { a : 1 } );
-assert.throws( function(){ db.indexd.$_id_.drop(); } );
-assert( t.drop() );
-
-//db.indexd.$_id_.remove({});
diff --git a/jstests/indexe.js b/jstests/indexe.js
deleted file mode 100644
index 213f7c74cf0..00000000000
--- a/jstests/indexe.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-t = db.indexe;
-t.drop();
-
-num = 100000;
-
-for ( i=0; i<num; i++){
- t.insert( { a : "b" } );
-}
-
-assert.eq( num , t.find().count() ,"A1" );
-assert.eq( num , t.find( { a : "b" } ).count() , "B1" );
-assert.eq( num , t.find( { a : "b" } ).itcount() , "C1" );
-
-t.ensureIndex( { a : 1 } );
-
-assert.eq( num , t.find().count() ,"A2" );
-assert.eq( num , t.find().sort( { a : 1 } ).count() , "A2a" );
-assert.eq( num , t.find( { a : "b" } ).count() , "B2" );
-assert.eq( num , t.find( { a : "b" } ).itcount() , "C3" );
-
-t.drop();
diff --git a/jstests/indexes_on_indexes.js b/jstests/indexes_on_indexes.js
deleted file mode 100644
index 807c1e25bfd..00000000000
--- a/jstests/indexes_on_indexes.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// ensure an index cannot be created on system.indexes
-t = db.getSiblingDB("indexes_on_indexes");
-printjson(t.system.indexes.getIndexes());
-assert.eq(t.system.indexes.getIndexes().length, 0);
-print("trying via ensureIndex");
-assert.throws(t.system.indexes.ensureIndex({_id:1}));
-printjson(t.system.indexes.getIndexes());
-assert.eq(t.system.indexes.getIndexes().length, 0);
-print("trying via createIndex");
-assert.throws(t.system.indexes.createIndex({_id:1}));
-printjson(t.system.indexes.getIndexes());
-assert.eq(t.system.indexes.getIndexes().length, 0);
-print("trying via direct insertion");
-assert.throws(t.system.indexes.insert({ v:1,
- key:{_id:1},
- ns: "indexes_on_indexes.system.indexes",
- name:"wontwork"}));
-printjson(t.system.indexes.getIndexes());
-assert.eq(t.system.indexes.getIndexes().length, 0);
diff --git a/jstests/indexf.js b/jstests/indexf.js
deleted file mode 100644
index d65e7b1c898..00000000000
--- a/jstests/indexf.js
+++ /dev/null
@@ -1,13 +0,0 @@
-
-t = db.indexf
-t.drop();
-
-t.ensureIndex( { x : 1 } );
-
-t.save( { x : 2 } );
-t.save( { y : 3 } );
-t.save( { x : 4 } );
-
-assert.eq( 2 , t.findOne( { x : 2 } ).x , "A1" );
-assert.eq( 3 , t.findOne( { x : null } ).y , "A2" );
-assert.eq( 4 , t.findOne( { x : 4 } ).x , "A3" );
diff --git a/jstests/indexg.js b/jstests/indexg.js
deleted file mode 100644
index a0709fd6568..00000000000
--- a/jstests/indexg.js
+++ /dev/null
@@ -1,13 +0,0 @@
-
-f = db.jstests_indexg;
-f.drop();
-f.save( { list: [1, 2] } );
-f.save( { list: [1, 3] } );
-
-doit = function() {
- assert.eq( 1, f.count( { list: { $in: [1], $ne: 3 } } ) );
- assert.eq( 1, f.count( { list: { $in: [1], $not:{$in: [3] } } } ) );
-}
-doit();
-f.ensureIndex( { list: 1 } );
-doit(); \ No newline at end of file
diff --git a/jstests/indexh.js b/jstests/indexh.js
deleted file mode 100644
index ac2a93ec62b..00000000000
--- a/jstests/indexh.js
+++ /dev/null
@@ -1,41 +0,0 @@
-// This should get skipped when testing replication
-
-t = db.jstests_indexh;
-
-function debug( t ) {
- print( t );
-}
-
-function extraDebug() {
-// printjson( db.stats() );
-// db.printCollectionStats();
-}
-
-// index extent freeing
-t.drop();
-t.save( {} );
-var s1 = db.stats().dataSize;
-debug( "s1: " + s1 );
-extraDebug();
-t.ensureIndex( {a:1} );
-var s2 = db.stats().dataSize;
-debug( "s2: " + s2 );
-assert.automsg( "s1 < s2" );
-t.dropIndex( {a:1} );
-var s3 = db.stats().dataSize;
-debug( "s3: " + s3 );
-extraDebug();
-assert.eq.automsg( "s1", "s3" );
-
-// index node freeing
-t.drop();
-t.ensureIndex( {a:1} );
-for( i = 'a'; i.length < 500; i += 'a' ) {
- t.save( {a:i} );
-}
-var s4 = db.stats().indexSize;
-debug( "s4: " + s4 );
-t.remove( {} );
-var s5 = db.stats().indexSize;
-debug( "s5: " + s5 );
-assert.automsg( "s5 < s4" ); \ No newline at end of file
diff --git a/jstests/indexi.js b/jstests/indexi.js
deleted file mode 100644
index 446d6a21fc5..00000000000
--- a/jstests/indexi.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// Test that client cannot access index namespaces SERVER-4276.
-
-if ( 0 ) { // SERVER-4276
-
-t = db.jstests_indexi;
-t.drop();
-
-debug = true;
-
-idx = db.jstests_indexi.$_id_;
-
-function shouldFail( f ) {
- e = assert.throws( function() {
- f();
- if( db.getLastError() ) {
- throw db.getLastError();
- }
- } );
- if ( debug ) {
- printjson( e );
- }
-}
-
-function checkFailingOperations() {
- // Test that accessing the index namespace fails.
- shouldFail( function() { idx.count(); } );
- shouldFail( function() { idx.find().itcount(); } );
- shouldFail( function() { idx.insert({}); } );
- shouldFail( function() { idx.remove({}); } );
- shouldFail( function() { idx.update({},{}); } );
- assert.commandFailed( idx.runCommand( 'compact' ) );
-
- // No validation here (yet).
- //shouldFail( function() { idx.ensureIndex({x:1}) } );
-}
-
-// Check with base collection not present.
-checkFailingOperations();
-t.save({});
-// Check with base collection present.
-checkFailingOperations();
-
-}
diff --git a/jstests/indexj.js b/jstests/indexj.js
deleted file mode 100644
index 6d8ac85c972..00000000000
--- a/jstests/indexj.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// SERVER-726
-
-t = db.jstests_indexj;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.save( {a:5} );
-assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "A" );
-
-t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:4} );
-assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "B" );
-
-t.save( {a:5} );
-assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "D" );
-
-t.save( {a:4} );
-assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "C" );
-
-t.save( {a:5} );
-assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "D" );
-
-t.drop();
-t.ensureIndex( {a:1,b:1} );
-t.save( { a:1,b:1 } );
-t.save( { a:1,b:2 } );
-t.save( { a:2,b:1 } );
-t.save( { a:2,b:2 } );
-
-assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
-assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).sort( {a:-1,b:-1} ).explain().nscanned );
-
-t.save( {a:1,b:1} );
-t.save( {a:1,b:1} );
-assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
-assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
-assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).sort( {a:-1,b:-1} ).explain().nscanned );
-
-assert.eq( 1, t.find( { a:{$in:[1,1.9]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
-assert.eq( 1, t.find( { a:{$in:[1.1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).sort( {a:-1,b:-1} ).explain().nscanned );
-
-t.save( { a:1,b:1.5} );
-assert.eq( 3, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned, "F" );
diff --git a/jstests/indexl.js b/jstests/indexl.js
deleted file mode 100644
index 666586db7a7..00000000000
--- a/jstests/indexl.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Check nonoverlapping $in/$all with multikeys SERVER-2165
-
-t = db.jstests_indexl;
-
-function test(t) {
- t.save( {a:[1,2]} );
- assert.eq( 1, t.count( {a:{$all:[1],$in:[2]}} ) );
- assert.eq( 1, t.count( {a:{$all:[2],$in:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[2],$all:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[1],$all:[2]}} ) );
- assert.eq( 1, t.count( {a:{$all:[1],$in:[2]}} ) );
- t.save({a:[3,4]})
- t.save({a:[2,3]})
- t.save({a:[1,2,3,4]})
- assert.eq( 2, t.count( {a:{$in:[2],$all:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[3],$all:[1,2]}} ) );
- assert.eq( 1, t.count( {a:{$in:[1],$all:[3]}} ) );
- assert.eq( 2, t.count( {a:{$in:[2,3],$all:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[4],$all:[2,3]}} ) );
- assert.eq( 3, t.count( {a:{$in:[1,3],$all:[2]}} ) );
-}
-
-t.drop();
-test(t);
-t.drop();
-t.ensureIndex( {a:1} );
-test(t); \ No newline at end of file
diff --git a/jstests/indexm.js b/jstests/indexm.js
deleted file mode 100644
index 6b31ea628cd..00000000000
--- a/jstests/indexm.js
+++ /dev/null
@@ -1,38 +0,0 @@
-// Check proper range combinations with or clauses overlapping non or portion of query SERVER-2302
-
-t = db.jstests_indexm;
-t.drop();
-
-t.save( { a : [ { x : 1 } , { x : 2 } , { x : 3 } , { x : 4 } ] } )
-
-function test(){
- assert.eq( 1, t.count(
- {
- a : { x : 1 } ,
- "$or" : [ { a : { x : 2 } } , { a : { x : 3 } } ]
- }
- ) );
-}
-
-// The first find will return a result since there isn't an index.
-test();
-
-// Now create an index.
-t.ensureIndex({"a":1});
-test();
-// SERVER-3105
-//assert( !t.find(
-// {
-// a : { x : 1 } ,
-// "$or" : [ { a : { x : 2 } } , { a : { x : 3 } } ]
-// }
-// ).explain().clauses );
-
-// Now create a different index.
-t.dropIndexes();
-t.ensureIndex({"a.x":1});
-test();
-
-// Drop the indexes.
-t.dropIndexes();
-test(); \ No newline at end of file
diff --git a/jstests/indexn.js b/jstests/indexn.js
deleted file mode 100644
index 9abb001eed9..00000000000
--- a/jstests/indexn.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// Test "impossible match" queries, or queries that will always have
-// an empty result set.
-
-t = db.jstests_indexn;
-t.drop();
-
-function checkImpossibleMatch( explain ) {
- printjson(explain);
- assert.eq( 0, explain.n );
-}
-
-t.save( {a:1,b:[1,2]} );
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-// {a:1} is a single key index, so no matches are possible for this query
-assert.eq( 0, t.count( {a:{$gt:5,$lt:0}} ) );
-checkImpossibleMatch( t.find( {a:{$gt:5,$lt:0}} ).explain() );
-
-assert.eq( 0, t.count( {a:{$gt:5,$lt:0},b:2} ) );
-checkImpossibleMatch( t.find( {a:{$gt:5,$lt:0},b:2} ).explain() );
-
-assert.eq( 0, t.count( {a:{$gt:5,$lt:0},b:{$gt:0,$lt:5}} ) );
-checkImpossibleMatch( t.find( {a:{$gt:5,$lt:0},b:{$gt:0,$lt:5}} ).explain() );
-
-// One clause of an $or is an "impossible match"
-printjson( t.find( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ).explain() )
-assert.eq( 1, t.count( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ) );
-checkImpossibleMatch( t.find( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ).explain().clauses[ 0 ] );
-
-// One clause of an $or is an "impossible match"; original order of the $or
-// does not matter.
-printjson( t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ).explain() )
-assert.eq( 1, t.count( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ) );
-checkImpossibleMatch( t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ).explain().clauses[ 0 ] );
-
-t.save( {a:2} );
-
-// Descriptive test: query system sees this query as an $or where
-// one clause of the $or is an $and. The $and bounds get intersected
-// forming a clause with empty index bounds. The union of the $or bounds
-// produces the two point intervals [1, 1] and [2, 2].
-assert.eq( 2, t.count( {$or:[{a:1},{a:{$gt:5,$lt:0}},{a:2}]} ) );
-explain = t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}},{a:2}]} ).explain();
-printjson( explain )
-assert.eq( 2, explain.clauses.length );
-checkImpossibleMatch( explain.clauses[ 0 ] );
-assert.eq( [[1, 1], [2,2]], explain.clauses[ 1 ].indexBounds.a );
diff --git a/jstests/indexo.js b/jstests/indexo.js
deleted file mode 100644
index 250b14bbb0e..00000000000
--- a/jstests/indexo.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// Tests that an index cannot be created with dropDups=true on
-// a capped collection.
-
-var coll = db.jstests_indexo;
-coll.drop();
-
-// Can create a dropDups index on non-capped collection.
-var response = coll.ensureIndex({x: 1}, {dropDups: true});
-assert(response == null);
-coll.drop();
-
-// Cannot create a dropDups index on non-capped collection.
-db.createCollection("jstests_indexy", {capped: true, size: 1024});
-coll = db.jstests_indexy;
-response = coll.ensureIndex({x: 1}, {dropDups: true});
-assert(response != null);
-coll.drop();
diff --git a/jstests/indexp.js b/jstests/indexp.js
deleted file mode 100644
index d71de4716a7..00000000000
--- a/jstests/indexp.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// Tests that SERVER-11374 is fixed: specifically, that indexes cannot
-// be created on fields that begin with '$' but are not part of DBRefs
-// and that indexes cannot be created on field paths that contain empty
-// fields.
-
-var coll = db.jstests_indexp;
-
-// Empty field checks.
-coll.ensureIndex({ 'a..b': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on 'a..b' was created successfully." );
-
-coll.ensureIndex({ '.a': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on '.a' was created successfully." );
-
-coll.ensureIndex({ 'a.': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on 'a.' was created successfully." );
-
-coll.ensureIndex({ '.': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on '.' was created successfully." );
-
-coll.ensureIndex({ '': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on '' was created successfully." );
-
-coll.ensureIndex({ 'a.b': 1 });
-assert( db.getLastError() == null,
- "Expected no error, but creating index on 'a.b' failed." );
-
-// '$'-prefixed field checks.
-coll.ensureIndex({ '$a': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on '$a' was created successfully." );
-
-coll.ensureIndex({ 'a.$b': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on 'a.$b' was created successfully." );
-
-coll.ensureIndex({ 'a$ap': 1 });
-assert( db.getLastError() == null,
- "Expected no error, but creating index on 'a$ap' failed." );
-
-coll.ensureIndex({ '$db': 1 });
-assert( db.getLastError() != null,
- "Expected error, but index on '$db' was created successfully." );
-
-coll.ensureIndex({ 'a.$id': 1 });
-assert( db.getLastError() == null,
- "Expected no error, but creating index on 'a.$id' failed." );
-
-coll.dropIndexes();
diff --git a/jstests/indexq.js b/jstests/indexq.js
deleted file mode 100644
index 38cd27b8798..00000000000
--- a/jstests/indexq.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// Test multikey range preference for a fully included range SERVER-958.
-
-t = db.jstests_indexq;
-t.drop();
-
-t.ensureIndex( {a:1} );
-// Single key index
-assert.eq( 5, t.find( {a:{$gt:4,$gte:5}} ).explain().indexBounds.a[ 0 ][ 0 ] );
-assert.eq( [[1,1],[2,2]], t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain().indexBounds.a );
-
-t.save( {a:[1,3]} );
-// Now with multi key index.
-
-// SERVER-12281: We should know that >4 is worse than >5
-// assert.eq( 5, t.find( {a:{$gt:4,$gte:5}} ).explain().indexBounds.a[ 0 ][ 0 ] );
-
-printjson(t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain())
-
-// SERVER-12281: We should know that in[1,2] is better than in[1,2,3].
-// assert.eq( [[1,1],[2,2]], t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain().indexBounds.a );
diff --git a/jstests/indexr.js b/jstests/indexr.js
deleted file mode 100644
index c3eecd045c8..00000000000
--- a/jstests/indexr.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// Check multikey index cases with parallel nested fields SERVER-958.
-
-t = db.jstests_indexr;
-t.drop();
-
-// Check without indexes.
-t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } );
-assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
-
-// Check with single key indexes.
-t.remove({});
-t.ensureIndex( {'a.b':1,'a.c':1} );
-t.ensureIndex( {a:1,'a.c':1} );
-assert.eq( 0, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 0, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
-assert.eq( 4, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
-
-t.save( { a: { b: 3, c: 3 } } );
-assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 1, t.count( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
-assert.eq( 4, t.find( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
-
-// Check with multikey indexes.
-t.remove({});
-t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } );
-
-assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
-assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] );
-assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] );
-
-// Check reverse direction.
-assert.eq( 1, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).itcount() );
-assert.eq( 1, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).itcount() );
-
-assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).explain().indexBounds['a.c'] );
-assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).explain().indexBounds['a.c'] );
-
-// Check second field is constrained if first is not.
-assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {'a.b':1,'a.c':1} ).itcount() );
-assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {a:1,'a.c':1} ).itcount() );
diff --git a/jstests/indexs.js b/jstests/indexs.js
deleted file mode 100644
index 609f912affe..00000000000
--- a/jstests/indexs.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Test index key generation issue with parent and nested fields in same index and array containing subobject SERVER-3005.
-
-t = db.jstests_indexs;
-
-t.drop();
-t.ensureIndex( {a:1} );
-t.save( { a: [ { b: 3 } ] } );
-assert.eq( 1, t.count( { a:{ b:3 } } ) );
-
-t.drop();
-t.ensureIndex( {a:1,'a.b':1} );
-t.save( { a: { b: 3 } } );
-assert.eq( 1, t.count( { a:{ b:3 } } ) );
-ib = t.find( { a:{ b:3 } } ).explain().indexBounds;
-
-t.drop();
-t.ensureIndex( {a:1,'a.b':1} );
-t.save( { a: [ { b: 3 } ] } );
-assert.eq( ib, t.find( { a:{ b:3 } } ).explain().indexBounds );
-assert.eq( 1, t.find( { a:{ b:3 } } ).explain().nscanned );
-assert.eq( 1, t.count( { a:{ b:3 } } ) );
diff --git a/jstests/indext.js b/jstests/indext.js
deleted file mode 100644
index e418dc2e959..00000000000
--- a/jstests/indext.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Sparse indexes with arrays SERVER-3216
-
-t = db.jstests_indext;
-t.drop();
-
-t.ensureIndex( {'a.b':1}, {sparse:true} );
-t.save( {a:[]} );
-t.save( {a:1} );
-assert.eq( 0, t.find().hint( {'a.b':1} ).itcount() );
-assert.eq( 0, t.find().hint( {'a.b':1} ).explain().nscanned );
-
-t.ensureIndex( {'a.b':1,'a.c':1}, {sparse:true} );
-t.save( {a:[]} );
-t.save( {a:1} );
-assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).itcount() );
-assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned );
-
-t.save( {a:[{b:1}]} );
-t.save( {a:1} );
-assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).itcount() );
-assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned );
diff --git a/jstests/indexu.js b/jstests/indexu.js
deleted file mode 100644
index de0d9831dab..00000000000
--- a/jstests/indexu.js
+++ /dev/null
@@ -1,137 +0,0 @@
-// Test index key generation with duplicate values addressed by array index and
-// object field. SERVER-2902
-
-t = db.jstests_indexu;
-t.drop();
-
-var dupDoc = {a:[{'0':1}]}; // There are two 'a.0' fields in this doc.
-var dupDoc2 = {a:[{'1':1},'c']};
-var noDupDoc = {a:[{'1':1}]};
-
-// Test that we can't index dupDoc.
-t.save( dupDoc );
-assert( !db.getLastError() );
-t.ensureIndex( {'a.0':1} );
-assert( db.getLastError() );
-
-t.remove({});
-t.ensureIndex( {'a.0':1} );
-assert( !db.getLastError() );
-t.save( dupDoc );
-assert( db.getLastError() );
-
-// Test that we can't index dupDoc2.
-t.drop();
-t.save( dupDoc2 );
-assert( !db.getLastError() );
-t.ensureIndex( {'a.1':1} );
-assert( db.getLastError() );
-
-t.remove({});
-t.ensureIndex( {'a.1':1} );
-assert( !db.getLastError() );
-t.save( dupDoc2 );
-assert( db.getLastError() );
-
-// Test that we can index dupDoc with a different index.
-t.drop();
-t.ensureIndex( {'a.b':1} );
-t.save( dupDoc );
-assert( !db.getLastError() );
-
-// Test number field starting with hyphen.
-t.drop();
-t.ensureIndex( {'a.-1':1} );
-t.save( {a:[{'-1':1}]} );
-assert( !db.getLastError() );
-
-// Test number field starting with zero.
-t.drop();
-t.ensureIndex( {'a.00':1} );
-t.save( {a:[{'00':1}]} );
-assert( !db.getLastError() );
-
-// Test multiple array indexes
-t.drop();
-t.ensureIndex( {'a.0':1,'a.1':1} );
-t.save( {a:[{'1':1}]} );
-assert( !db.getLastError() );
-t.save( {a:[{'1':1},4]} );
-assert( db.getLastError() );
-
-// Test that we can index noDupDoc.
-t.drop();
-t.save( noDupDoc );
-t.ensureIndex( {'a.0':1} );
-assert( !db.getLastError() );
-t.ensureIndex( {'a.1':1} );
-assert( !db.getLastError() );
-
-t.drop();
-t.ensureIndex( {'a.0':1} );
-t.ensureIndex( {'a.1':1} );
-t.save( noDupDoc );
-assert( !db.getLastError() );
-
-// Test that we can query noDupDoc.
-assert.eq( 1, t.find( {'a.1':1} ).hint( {'a.1':1} ).itcount() );
-assert.eq( 1, t.find( {'a.1':1} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {'a.0':1} ).itcount() );
-assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {$natural:1} ).itcount() );
-
-// Check multiple nested array fields.
-t.drop();
-t.save( {a:[[1]]} );
-t.ensureIndex( {'a.0.0':1} );
-assert( !db.getLastError() );
-assert.eq( 1, t.find( {'a.0.0':1} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {'a.0.0':1} ).hint( {'a.0.0':1} ).itcount() );
-
-// Check where there is a duplicate for a partially addressed field but not for a fully addressed field.
-t.drop();
-t.save( {a:[[1],{'0':1}]} );
-t.ensureIndex( {'a.0.0':1} );
-assert( db.getLastError() );
-
-// Check where there is a duplicate for a fully addressed field.
-t.drop();
-t.save( {a:[[1],{'0':[1]}]} );
-assert( !db.getLastError() );
-t.ensureIndex( {'a.0.0':1} );
-assert( db.getLastError() );
-
-// Two ways of addressing parse to an array.
-t.drop();
-t.save( {a:[{'0':1}]} );
-t.ensureIndex( {'a.0.0':1} );
-assert( db.getLastError() );
-
-// Test several key depths - with same arrays being found.
-t.drop();
-t.save( {a:[{'0':[{'0':1}]}]} );
-t.ensureIndex( {'a.0.0.0.0.0.0':1} );
-assert( db.getLastError() );
-t.ensureIndex( {'a.0.0.0.0.0':1} );
-assert( db.getLastError() );
-t.ensureIndex( {'a.0.0.0.0':1} );
-assert( db.getLastError() );
-t.ensureIndex( {'a.0.0.0':1} );
-assert( db.getLastError() );
-t.ensureIndex( {'a.0.0':1} );
-assert( db.getLastError() );
-t.ensureIndex( {'a.0':1} );
-assert( db.getLastError() );
-t.ensureIndex( {'a':1} );
-assert( !db.getLastError() );
-
-// Two prefixes extract docs, but one terminates extraction before array.
-t.drop();
-t.save( {a:[{'0':{'c':[]}}]} );
-t.ensureIndex( {'a.0.c':1} );
-assert( db.getLastError() );
-
-t.drop();
-t.save( {a:[[{'b':1}]]} );
-assert.eq( 1, t.find( {'a.0.b':1} ).itcount() );
-t.ensureIndex( {'a.0.b':1} );
-assert.eq( 1, t.find( {'a.0.b':1} ).itcount() );
diff --git a/jstests/indexv.js b/jstests/indexv.js
deleted file mode 100644
index 334ec432d74..00000000000
--- a/jstests/indexv.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// Check null key generation.
-
-t = db.jstests_indexv;
-t.drop();
-
-t.ensureIndex( {'a.b':1} );
-
-t.save( {a:[{},{b:1}]} );
-var e = t.find( {'a.b':null} ).explain();
-assert.eq( 1, e.n );
-assert.eq( 1, e.nscanned );
-
-t.drop();
-t.ensureIndex( {'a.b.c':1} );
-t.save( {a:[{b:[]},{b:{c:1}}]} );
-var e = t.find( {'a.b.c':null} ).explain();
-assert.eq( 0, e.n );
-assert.eq( 1, e.nscanned );
diff --git a/jstests/indexw.js b/jstests/indexw.js
deleted file mode 100644
index bd7c75b8b08..00000000000
--- a/jstests/indexw.js
+++ /dev/null
@@ -1,15 +0,0 @@
-// Check that v0 keys are generated for v0 indexes SERVER-3375
-
-t = db.jstests_indexw;
-t.drop();
-
-t.save( {a:[]} );
-assert.eq( 1, t.count( {a:[]} ) );
-t.ensureIndex( {a:1} );
-assert.eq( 1, t.count( {a:[]} ) );
-t.dropIndexes();
-
-// The count result is incorrect - just checking here that v0 key generation is used.
-t.ensureIndex( {a:1}, {v:0} );
-// QUERY_MIGRATION: WE GET THIS RIGHT...BY CHANCE?
-// assert.eq( 0, t.count( {a:[]} ) );
diff --git a/jstests/insert1.js b/jstests/insert1.js
deleted file mode 100644
index 7e6b73b6566..00000000000
--- a/jstests/insert1.js
+++ /dev/null
@@ -1,44 +0,0 @@
-t = db.insert1;
-t.drop();
-
-o = {a:1};
-t.insert(o);
-id = t._lastID
-assert.eq(o, {a:1}, "input unchanged 1");
-assert.eq(typeof(id), "object", "1");
-assert.eq(id.constructor, ObjectId, "1");
-assert.eq(t.findOne({_id:id}).a, 1, "find by id 1");
-assert.eq(t.findOne({a:1})._id, id , "find by val 1");
-
-o = {a:2, _id:new ObjectId()};
-id1 = o._id
-t.insert(o);
-id2 = t._lastID
-assert.eq(id1, id2, "ids match 2");
-assert.eq(o, {a:2, _id:id1}, "input unchanged 2");
-assert.eq(typeof(id2), "object", "2");
-assert.eq(id2.constructor, ObjectId, "2");
-assert.eq(t.findOne({_id:id1}).a, 2, "find by id 2");
-assert.eq(t.findOne({a:2})._id, id1 , "find by val 2");
-
-o = {a:3, _id:"asdf"};
-id1 = o._id
-t.insert(o);
-id2 = t._lastID
-assert.eq(id1, id2, "ids match 3");
-assert.eq(o, {a:3, _id:id1}, "input unchanged 3");
-assert.eq(typeof(id2), "string", "3");
-assert.eq(t.findOne({_id:id1}).a, 3, "find by id 3");
-assert.eq(t.findOne({a:3})._id, id1 , "find by val 3");
-
-o = {a:4, _id:null};
-id1 = o._id
-t.insert(o);
-id2 = t._lastID
-assert.eq(id1, id2, "ids match 4");
-assert.eq(o, {a:4, _id:id1}, "input unchanged 4");
-assert.eq(t.findOne({_id:id1}).a, 4, "find by id 4");
-assert.eq(t.findOne({a:4})._id, id1 , "find by val 4");
-
-var stats = db.runCommand({ collstats: "insert1" });
-assert(stats.paddingFactor == 1.0);
diff --git a/jstests/insert2.js b/jstests/insert2.js
deleted file mode 100644
index 9480efeac4d..00000000000
--- a/jstests/insert2.js
+++ /dev/null
@@ -1,8 +0,0 @@
-
-t = db.insert2
-t.drop()
-
-assert.isnull( t.findOne() , "A" )
-t.insert( { z : 1 , $inc : { x : 1 } } , 0, true );
-assert.isnull( t.findOne() , "B" )
-
diff --git a/jstests/insert_id_undefined.js b/jstests/insert_id_undefined.js
deleted file mode 100644
index 945640a815b..00000000000
--- a/jstests/insert_id_undefined.js
+++ /dev/null
@@ -1,6 +0,0 @@
-// ensure a document with _id undefined cannot be saved
-t = db.insert_id_undefined;
-t.drop();
-t.insert({_id:undefined});
-db.getLastError();
-assert.eq(t.count(), 0);
diff --git a/jstests/insert_illegal_doc.js b/jstests/insert_illegal_doc.js
deleted file mode 100644
index 2b4d326e9ce..00000000000
--- a/jstests/insert_illegal_doc.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// SERVER-12185: Do not allow insertion or update of docs which will fail the
-// "parallel indexing of arrays" test
-var coll = db.insert_illegal_doc;
-coll.drop();
-coll.ensureIndex({a: 1, b: 1});
-
-// test upsert
-coll.update({}, {_id: 1, a: [1, 2, 3], b: [4, 5, 6]}, true);
-assert.gleErrorCode(db, 10088);
-assert.eq(0, coll.find().itcount(), "should not be a doc");
-
-// test insert
-coll.insert({_id: 1, a: [1, 2, 3], b: [4, 5, 6]});
-assert.gleErrorCode(db, 10088);
-assert.eq(0, coll.find().itcount(), "should not be a doc");
-
-// test update
-coll.insert({_id: 1});
-assert.gleSuccess(db, "insert failed");
-coll.update({_id: 1}, {$set : { a : [1, 2, 3], b: [4, 5, 6]}});
-assert.gleErrorCode(db, 10088);
-assert.eq(undefined, coll.findOne().a, "update should have failed");
diff --git a/jstests/insert_long_index_key.js b/jstests/insert_long_index_key.js
deleted file mode 100644
index 6379c36fb4a..00000000000
--- a/jstests/insert_long_index_key.js
+++ /dev/null
@@ -1,10 +0,0 @@
-t = db.insert_long_index_key;
-t.drop();
-
-var s = new Array(2000).toString();
-t.ensureIndex( { x : 1 } );
-
-t.insert({ x: 1 });
-t.insert({ x: s });
-
-assert.eq( 1, t.count() );
diff --git a/jstests/ismaster.js b/jstests/ismaster.js
deleted file mode 100644
index 0c385b02d7c..00000000000
--- a/jstests/ismaster.js
+++ /dev/null
@@ -1,28 +0,0 @@
-var res = db.isMaster();
-// check that the fields that should be there are there and have proper values
-assert( res.maxBsonObjectSize &&
- isNumber(res.maxBsonObjectSize) &&
- res.maxBsonObjectSize > 0, "maxBsonObjectSize possibly missing:" + tojson(res));
-assert( res.maxMessageSizeBytes &&
- isNumber(res.maxMessageSizeBytes) &&
- res.maxBsonObjectSize > 0, "maxMessageSizeBytes possibly missing:" + tojson(res));
-assert( res.maxWriteBatchSize &&
- isNumber(res.maxWriteBatchSize) &&
- res.maxWriteBatchSize > 0, "maxWriteBatchSize possibly missing:" + tojson(res));
-assert(res.ismaster, "ismaster missing or false:" + tojson(res));
-assert(res.localTime, "localTime possibly missing:" + tojson(res));
-var unwantedFields = ["setName", "setVersion", "secondary", "hosts", "passives", "arbiters",
- "primary", "aribterOnly", "passive", "slaveDelay", "hidden", "tags",
- "buildIndexes", "me"];
-// check that the fields that shouldn't be there are not there
-var badFields = [];
-for (field in res) {
- if (!res.hasOwnProperty(field)){
- continue;
- }
- if (Array.contains(unwantedFields, field)) {
- badFields.push(field);
- }
-}
-assert(badFields.length === 0, "\nthe result:\n" + tojson(res)
- + "\ncontained fields it shouldn't have: " + badFields);
diff --git a/jstests/js1.js b/jstests/js1.js
deleted file mode 100644
index 240d9f82fbb..00000000000
--- a/jstests/js1.js
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-t = db.jstests_js1;
-t.remove( {} );
-
-t.save( { z : 1 } );
-t.save( { z : 2 } );
-assert( 2 == t.find().length() );
-assert( 2 == t.find( { $where : function(){ return 1; } } ).length() );
-assert( 1 == t.find( { $where : function(){ return obj.z == 2; } } ).length() );
-
-assert(t.validate().valid);
diff --git a/jstests/js2.js b/jstests/js2.js
deleted file mode 100644
index 8753599887a..00000000000
--- a/jstests/js2.js
+++ /dev/null
@@ -1,23 +0,0 @@
-
-t = db.jstests_js2;
-t.remove( {} );
-
-t2 = db.jstests_js2_2;
-t2.remove( {} );
-
-assert.eq( 0 , t2.find().length() , "A" );
-
-t.save( { z : 1 } );
-t.save( { z : 2 } );
-assert.throws( function(){
- t.find( { $where :
- function(){
- db.jstests_js2_2.save( { y : 1 } );
- return 1;
- }
- } ).forEach( printjson );
-} , null , "can't save from $where" );
-
-assert.eq( 0 , t2.find().length() , "B" )
-
-assert(t.validate().valid , "E");
diff --git a/jstests/js3.js b/jstests/js3.js
deleted file mode 100644
index 4249ad6183d..00000000000
--- a/jstests/js3.js
+++ /dev/null
@@ -1,76 +0,0 @@
-
-t = db.jstests_js3;
-
-debug = function( s ){
- //printjson( s );
-}
-
-for( z = 0; z < 2; z++ ) {
- debug(z);
-
- t.drop();
-
- if( z > 0 ) {
- t.ensureIndex({_id:1});
- t.ensureIndex({i:1});
- }
-
- for( i = 0; i < 1000; i++ )
- t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
-
- assert( 33 == db.dbEval(function() { return 33; } ) );
-
- db.dbEval( function() { db.jstests_js3.save({i:-1, z:"server side"}) } );
-
- assert( t.findOne({i:-1}) );
-
- assert( 2 == t.find( { $where :
- function(){
- return obj.i == 7 || obj.i == 8;
- }
- } ).length() );
-
-
- // NPE test
- var ok = false;
- try {
- var x = t.find( { $where :
- function(){
- asdf.asdf.f.s.s();
- }
- } );
- debug( x.length() );
- debug( tojson( x ) );
- }
- catch(e) {
- ok = true;
- }
- debug( ok );
- assert(ok);
-
- t.ensureIndex({z:1});
- t.ensureIndex({q:1});
-
- debug( "before indexed find" );
-
- arr = t.find( { $where :
- function(){
- return obj.i == 7 || obj.i == 8;
- }
- } ).toArray();
- debug( arr );
- assert.eq( 2, arr.length );
-
- debug( "after indexed find" );
-
- for( i = 1000; i < 2000; i++ )
- t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
-
- assert( t.find().count() == 2001 );
-
- assert( t.validate().valid );
-
- debug( "done iter" );
-}
-
-t.drop(); \ No newline at end of file
diff --git a/jstests/js4.js b/jstests/js4.js
deleted file mode 100644
index 38cadf355de..00000000000
--- a/jstests/js4.js
+++ /dev/null
@@ -1,49 +0,0 @@
-t = db.jstests_js4;
-t.drop();
-
-real = { a : 1 ,
- b : "abc" ,
- c : /abc/i ,
- d : new Date(111911100111) ,
- e : null ,
- f : true
- };
-
-t.save( real );
-
-assert.eq( "/abc/i" , real.c.toString() , "regex 1" );
-
-var cursor = t.find( { $where :
- function(){
- fullObject;
- assert.eq( 7 , Object.keySet( obj ).length , "A" )
- assert.eq( 1 , obj.a , "B" );
- assert.eq( "abc" , obj.b , "C" );
- assert.eq( "/abc/i" , obj.c.toString() , "D" );
- assert.eq( 111911100111 , obj.d.getTime() , "E" );
- assert( obj.f , "F" );
- assert( ! obj.e , "G" );
-
- return true;
- }
- } );
-assert.eq( 1 , cursor.toArray().length );
-assert.eq( "abc" , cursor[0].b );
-
-// ---
-
-t.drop();
-t.save( { a : 2 , b : { c : 7 , d : "d is good" } } );
-var cursor = t.find( { $where :
- function(){
- fullObject;
- assert.eq( 3 , Object.keySet( obj ).length )
- assert.eq( 2 , obj.a );
- assert.eq( 7 , obj.b.c );
- assert.eq( "d is good" , obj.b.d );
- return true;
- }
- } );
-assert.eq( 1 , cursor.toArray().length );
-
-assert(t.validate().valid);
diff --git a/jstests/js5.js b/jstests/js5.js
deleted file mode 100644
index 84770d72da2..00000000000
--- a/jstests/js5.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.jstests_js5
-t.drop();
-
-t.save( { a : 1 } )
-t.save( { a : 2 } )
-
-assert.eq( 2 , t.find( { "$where" : "this.a" } ).count() , "A" );
-assert.eq( 0 , t.find( { "$where" : "this.b" } ).count() , "B" );
-assert.eq( 0 , t.find( { "$where" : "this.b > 45" } ).count() , "C" );
diff --git a/jstests/js7.js b/jstests/js7.js
deleted file mode 100644
index d12e207379e..00000000000
--- a/jstests/js7.js
+++ /dev/null
@@ -1,5 +0,0 @@
-t = db.jstests_js7;
-t.drop();
-
-assert.eq( 17 , db.eval( function( foo ){ return foo; } , 17 ) );
-
diff --git a/jstests/js8.js b/jstests/js8.js
deleted file mode 100644
index da2dcc619cd..00000000000
--- a/jstests/js8.js
+++ /dev/null
@@ -1,14 +0,0 @@
-t = db.jstests_js8;
-t.drop();
-
-t.save( { a : 1 , b : [ 2 , 3 , 4 ] } );
-
-assert.eq( 1 , t.find().length() , "A" );
-assert.eq( 1 , t.find( function(){ return this.a == 1; } ).length() , "B" );
-assert.eq( 1 , t.find( function(){ if ( ! this.b.length ) return true; return this.b.length == 3; } ).length() , "B2" );
-assert.eq( 1 , t.find( function(){ return this.b[0] == 2; } ).length() , "C" );
-assert.eq( 0 , t.find( function(){ return this.b[0] == 3; } ).length() , "D" );
-assert.eq( 1 , t.find( function(){ return this.b[1] == 3; } ).length() , "E" );
-
-
-assert(t.validate().valid);
diff --git a/jstests/js9.js b/jstests/js9.js
deleted file mode 100644
index 8748667f527..00000000000
--- a/jstests/js9.js
+++ /dev/null
@@ -1,24 +0,0 @@
-c = db.jstests_js9;
-c.drop();
-
-c.save( { a : 1 } );
-c.save( { a : 2 } );
-
-
-assert.eq( 2 , c.find().length() );
-assert.eq( 2 , c.find().count() );
-
-
-assert.eq( 2 ,
- db.eval(
- function(){
- num = 0;
- db.jstests_js9.find().forEach(
- function(z){
- num++;
- }
- );
- return num;
- }
- )
- )
diff --git a/jstests/json1.js b/jstests/json1.js
deleted file mode 100644
index 054a9b46047..00000000000
--- a/jstests/json1.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-x = { quotes:"a\"b" , nulls:null };
-eval( "y = " + tojson( x ) );
-assert.eq( tojson( x ) , tojson( y ) , "A" );
-assert.eq( typeof( x.nulls ) , typeof( y.nulls ) , "B" );
-
-// each type is parsed properly
-x = {"x" : null, "y" : true, "z" : 123, "w" : "foo", "a": undefined};
-assert.eq(tojson(x,"",false), '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo",\n\t"a" : undefined\n}' , "C" );
-
-x = {"x" : [], "y" : {}};
-assert.eq(tojson(x,"",false), '{\n\t"x" : [ ],\n\t"y" : {\n\t\t\n\t}\n}' , "D" );
-
-// nested
-x = {"x" : [{"x" : [1,2,[]], "z" : "ok", "y" : [[]]}, {"foo" : "bar"}], "y" : null};
-assert.eq(tojson(x), '{\n\t"x" : [\n\t\t{\n\t\t\t"x" : [\n\t\t\t\t1,\n\t\t\t\t2,\n\t\t\t\t[ ]\n\t\t\t],\n\t\t\t"z" : "ok",\n\t\t\t"y" : [\n\t\t\t\t[ ]\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\t"foo" : "bar"\n\t\t}\n\t],\n\t"y" : null\n}' , "E" );
-
-// special types
-x = {"x" : ObjectId("4ad35a73d2e34eb4fc43579a"), 'z' : /xd?/ig};
-assert.eq(tojson(x,"",false), '{\n\t"x" : ObjectId("4ad35a73d2e34eb4fc43579a"),\n\t"z" : /xd?/gi\n}' , "F" );
-
-// Timestamp type
-x = {"x" : Timestamp()};
-assert.eq(tojson(x,"",false), '{\n\t"x" : Timestamp(0, 0)\n}' , "G")
-
-// Timestamp type, second
-x = {"x" : Timestamp(10,2)};
-assert.eq(tojson(x,"",false), '{\n\t"x" : Timestamp(10, 2)\n}' , "H")
diff --git a/jstests/killop.js b/jstests/killop.js
deleted file mode 100644
index 9567391598d..00000000000
--- a/jstests/killop.js
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Basic test of killop functionality.
- *
- * Theory of operation: Creates two operations that will take a long time, sends killop for those
- * operations, and then attempts to infer that the operations died because of killop, and not for
- * some other reason.
- *
- * NOTES:
- * The long operations are count({$where: function () { while (1) ; } }). These operations do not
- * terminate until the server determines that they've spent too much time in JS execution, typically
- * after 30 seconds of wall clock time have passed. For these operations to take a long time, the
- * counted collection must not be empty; hence an initial write to the collection is required.
- */
-t = db.jstests_killop
-t.drop();
-
-t.save( {} );
-db.getLastError();
-
-/**
- * This function filters for the operations that we're looking for, based on their state and
- * the contents of their query object.
- */
-function ops() {
- p = db.currentOp().inprog;
- ids = [];
- for ( var i in p ) {
- var o = p[ i ];
- // We *can't* check for ns, b/c it's not guaranteed to be there unless the query is active, which
- // it may not be in our polling cycle - particularly b/c we sleep every second in both the query and
- // the assert
- if ( ( o.active || o.waitingForLock ) && o.query && o.query.query && o.query.query.$where && o.query.count == "jstests_killop" ) {
- ids.push( o.opid );
- }
- }
- return ids;
-}
-
-var s1 = null;
-var s2 = null;
-try {
- s1 = startParallelShell( "db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )" );
- s2 = startParallelShell( "db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )" );
-
- o = [];
- assert.soon(function() { o = ops(); return o.length == 2; },
- { toString: function () { return tojson(db.currentOp().inprog); } },
- 10000);
- db.killOp( o[ 0 ] );
- db.killOp( o[ 1 ] );
- start = new Date();
-}
-finally {
- if (s1) s1();
- if (s2) s2();
-}
-
-// don't want to pass if timeout killed the js function NOTE: This test will sometimes pass when the
-// JS engine did actually kill the operation, because the JS timeout is 30 seconds of wall clock
-// time from the moment the operation starts, but "start" measures from shortly after the test sends
-// the killop message to the server.
-assert( ( new Date() ) - start < 30000 );
diff --git a/jstests/loadserverscripts.js b/jstests/loadserverscripts.js
deleted file mode 100644
index 792e1c9228a..00000000000
--- a/jstests/loadserverscripts.js
+++ /dev/null
@@ -1,57 +0,0 @@
-
-// Test db.loadServerScripts()
-
-var testdb = db.getSisterDB("loadserverscripts");
-
-jsTest.log("testing db.loadServerScripts()");
-var x;
-
-// assert._debug = true;
-
-// clear out any data from old tests
-testdb.system.js.remove({});
-delete myfunc;
-delete myfunc2;
-
-x = testdb.system.js.findOne();
-assert.isnull(x, "Test for empty collection");
-
-// User functions should not be defined yet
-assert.eq( typeof myfunc, "undefined", "Checking that myfunc() is undefined" );
-assert.eq( typeof myfunc2, "undefined", "Checking that myfunc2() is undefined" );
-
-// Insert a function in the context of this process: make sure it's in the collection
-testdb.system.js.insert( { _id: "myfunc", "value": function(){ return "myfunc"; } } );
-x = testdb.system.js.count();
-assert.eq( x, 1, "Should now be one function in the system.js collection");
-
-// Load that function
-testdb.loadServerScripts();
-assert.eq( typeof myfunc, "function", "Checking that myfunc() loaded correctly" );
-
-// Make sure it works
-x = myfunc();
-assert.eq(x, "myfunc", "Checking that myfunc() returns the correct value");
-
-// Insert value into collection from another process
-var coproc = startParallelShell(
- 'db.getSisterDB("loadserverscripts").system.js.insert' +
- ' ( {_id: "myfunc2", "value": function(){ return "myfunc2"; } } );' +
- 'db.getLastError();'
- );
-// wait for results
-coproc();
-
-// Make sure the collection's been updated
-x = testdb.system.js.count();
-assert.eq( x, 2, "Should now be two functions in the system.js collection");
-
-
-// Load the new functions: test them as above
-testdb.loadServerScripts();
-assert.eq( typeof myfunc2, "function", "Checking that myfunc2() loaded correctly" );
-x = myfunc2();
-assert.eq(x, "myfunc2", "Checking that myfunc2() returns the correct value");
-
-jsTest.log("completed test of db.loadServerScripts()");
-
diff --git a/jstests/loglong.js b/jstests/loglong.js
deleted file mode 100644
index 06cbf296c09..00000000000
--- a/jstests/loglong.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// test for SERVER-5013
-// make sure very long long lines get truncated
-
-t = db.loglong;
-t.drop();
-
-t.insert( { x : 1 } );
-
-n = 0;
-query = { x : [] }
-while ( Object.bsonsize( query ) < 30000 ) {
- query.x.push( n++ );
-}
-
-before = db.adminCommand( { setParameter : 1 , logLevel : 1 } )
-
-t.findOne( query )
-
-x = db.adminCommand( { setParameter : 1 , logLevel : before.was } )
-assert.eq( 1 , x.was , tojson( x ) )
-
-log = db.adminCommand( { getLog : "global" } ).log
-
-found = false
-for ( i=log.length - 1; i>= 0; i-- ) {
- if ( log[i].indexOf( "warning: log line attempted (16k)" ) >= 0 ) {
- found = true;
- break;
- }
-}
-
-assert( found )
diff --git a/jstests/logprocessdetails.js b/jstests/logprocessdetails.js
deleted file mode 100644
index 607b1acb057..00000000000
--- a/jstests/logprocessdetails.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * SERVER-7140 test. Checks that process info is re-logged on log rotation
- */
-
-/**
- * Checks an array for match against regex.
- * Returns true if regex matches a string in the array
- */
-doesLogMatchRegex = function(logArray, regex) {
- for (var i = (logArray.length - 1); i >= 0; i--){
- var regexInLine = regex.exec(logArray[i]);
- if (regexInLine != null){
- return true;
- }
- }
- return false;
-}
-
-doTest = function() {
- var log = db.adminCommand({ getLog: 'global'});
- //this regex will need to change if output changes
- var re = new RegExp(".*conn.*options.*");
-
- assert.neq(null, log);
- var lineCount = log.totalLinesWritten;
- assert.neq(0, lineCount);
-
- var result = db.adminCommand({ logRotate: 1});
- assert.eq(1, result.ok);
-
- var log2 = db.adminCommand({ getLog: 'global'});
- assert.neq(null, log2);
- assert.gte(log2.totalLinesWritten, lineCount);
-
- var informationIsLogged = doesLogMatchRegex(log2.log, re);
- assert.eq(informationIsLogged, true, "Process details not present in RAM log");
-}
-
-doTest();
diff --git a/jstests/long_index_rename.js b/jstests/long_index_rename.js
deleted file mode 100644
index 41e1bfd4a3b..00000000000
--- a/jstests/long_index_rename.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// SERVER-7720 Building an index with a too-long name should always fail
-// Formerly, we would allow an index that already existed to be "created" with too long a name,
-// but this caused secondaries to crash when replicating what should be a bad createIndex command.
-// Here we test that the too-long name is rejected in this situation as well
-
-t = db.long_index_rename;
-t.drop();
-
-for (i = 1; i < 10; i++) {
- t.save({a:i});
-}
-
-t.createIndex({a:1}, {name: "aaa"});
-t.createIndex({a:1}, {name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"});
-var result = db.getLastErrorObj();
-assert( result.code >= 0 );
-assert( result.err.indexOf( "too long" ) >= 0 );
diff --git a/jstests/map1.js b/jstests/map1.js
deleted file mode 100644
index 1db53cd3848..00000000000
--- a/jstests/map1.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-function basic1( key , lookup , shouldFail){
- var m = new Map();
- m.put( key , 17 );
-
- var out = m.get( lookup || key );
-
- if ( ! shouldFail ){
- assert.eq( 17 , out , "basic1 missing: " + tojson( key ) );
- }
- else {
- assert.isnull( out , "basic1 not missing: " + tojson( key ) );
- }
-
-}
-
-basic1( 6 )
-basic1( new Date() )
-basic1( "eliot" )
-basic1( { a : 1 } );
-basic1( { a : 1 , b : 1 } )
-basic1( { a : 1 } , { b : 1 } , true )
-basic1( { a : 1 , b : 1 } , { b : 1 , a : 1 } , true )
-basic1( { a : 1 } , { a : 2 } , true );
diff --git a/jstests/max_message_size.js b/jstests/max_message_size.js
deleted file mode 100644
index 7aa66c16e75..00000000000
--- a/jstests/max_message_size.js
+++ /dev/null
@@ -1,88 +0,0 @@
-// Test handling of messages up to and over isMaster().maxMessageSizeBytes
-
-function go() { // using a function to ensure that all resources can be freed after test
- if (db.serverStatus().mem.bits == 32) {
- print("skipping max_message_size.js on 32bit system");
- return;
- }
-
- var t = db.max_message_size;
-
- var maxMessageSize = db.isMaster().maxMessageSizeBytes;
- var maxBsonSize = db.isMaster().maxBsonObjectSize;
-
- function makeObj(str) {
- return {_id: ObjectId(), s: str};
- }
-
- var bsonOverhead = Object.bsonsize(makeObj(''));
-
- var bigStr = 'x';
- while (bigStr.length < maxBsonSize) {
- bigStr += bigStr;
- }
-
- function insertWithBytes(bytes) {
- var toGo = bytes;
- toGo -= 16; // Message Header
- toGo -= 4; // Flags
- toGo -= t.getFullName().length + 1; // namespace with NUL
-
- var batch = [];
- while (toGo > 0) {
- var objBytes = Math.min(toGo, maxBsonSize);
- var filler = bigStr.substr(0, objBytes - bsonOverhead);
- var obj = makeObj(filler);
- assert.eq(Object.bsonsize(obj), objBytes);
- batch.push(obj);
- toGo -= objBytes;
- }
- assert.eq(toGo, 0);
-
- t.insert(batch);
-
- return batch.length;
- }
-
- function works(bytes) {
- t.drop();
- var numInserted = insertWithBytes(bytes);
- assert.isnull(db.getLastError());
- assert.eq(t.count(), numInserted);
- }
-
- function fails(bytes) {
- t.drop();
-
- try {
- var numInserted = insertWithBytes(bytes);
- var error = db.getLastErrorObj();
- } catch (e) {
- // A string is thrown rather than an object
- if (! (/^socket error/.test(e) || /socket exception/.test(e)))
- throw e;
-
- sleep(3000); // shell won't reconnect within 2 second window
-
- assert.eq(t.count(), 0);
- return; // successfully killed connection and reconnected
- }
-
- // Note to future maintainers: This test will need to be changed if we
- // modify the server's behavior to skip oversized messages and report
- // them in getLastError. The output from this should be helpful in
- // detecting this case.
- printjson({numInserted: numInserted, error: error});
- assert(false, "Connection not reset");
- }
-
- works(maxMessageSize - 1024*1024);
- works(maxMessageSize - 1);
- works(maxMessageSize);
-
- fails(maxMessageSize + 1);
- works(maxMessageSize); // make sure we still work after failure
- fails(maxMessageSize + 1024*1024);
- works(maxMessageSize);
-}
-go();
diff --git a/jstests/max_time_ms.js b/jstests/max_time_ms.js
deleted file mode 100644
index 1d0cca7949f..00000000000
--- a/jstests/max_time_ms.js
+++ /dev/null
@@ -1,303 +0,0 @@
-// Tests query/command option $maxTimeMS.
-
-var t = db.max_time_ms;
-var exceededTimeLimit = 50; // ErrorCodes::ExceededTimeLimit
-var cursor;
-var res;
-
-//
-// Simple positive test for query: a ~300ms query with a 100ms time limit should be aborted.
-//
-
-t.drop();
-t.insert([{},{},{}]);
-cursor = t.find({$where: function() { sleep(100); return true; }});
-cursor.maxTimeMS(100);
-assert.throws(function() { cursor.itcount(); }, [], "expected query to abort due to time limit");
-
-//
-// Simple negative test for query: a ~300ms query with a 10s time limit should not hit the time
-// limit.
-//
-
-t.drop();
-t.insert([{},{},{}]);
-cursor = t.find({$where: function() { sleep(100); return true; }});
-cursor.maxTimeMS(10*1000);
-assert.doesNotThrow(function() { cursor.itcount(); },
- [],
- "expected query to not hit the time limit");
-
-//
-// Simple positive test for getmore:
-// - Issue a find() that returns 2 batches: a fast batch, then a slow batch.
-// - The find() has a 2-second time limit; the first batch should run "instantly", but the second
-// batch takes ~6 seconds, so the getmore should be aborted.
-//
-
-t.drop();
-t.insert([{},{},{}]); // fast batch
-t.insert([{slow: true},{slow: true},{slow: true}]); // slow batch
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
- }
- return true;
-}});
-cursor.batchSize(3);
-cursor.maxTimeMS(2*1000);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 1 (query) to not hit the time limit");
-assert.throws(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 2 (getmore) to abort due to time limit");
-
-//
-// Simple negative test for getmore:
-// - Issue a find() that returns 2 batches: a fast batch, then a slow batch.
-// - The find() has a 10-second time limit; the first batch should run "instantly", and the second
-// batch takes only ~2 seconds, so both the query and getmore should not hit the time limit.
-//
-
-t.drop();
-t.insert([{},{},{}]); // fast batch
-t.insert([{},{},{slow: true}]); // slow batch
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
- }
- return true;
-}});
-cursor.batchSize(3);
-cursor.maxTimeMS(10*1000);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 1 (query) to not hit the time limit");
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 2 (getmore) to not hit the time limit");
-
-//
-// Many-batch positive test for getmore:
-// - Issue a many-batch find() with a 6-second time limit where the results take 10 seconds to
-// generate; one of the later getmore ops should be aborted.
-//
-
-t.drop();
-for (var i=0; i<5; i++) {
- t.insert([{},{},{slow:true}]);
-}
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
- }
- return true;
-}});
-cursor.batchSize(3);
-cursor.maxTimeMS(6*1000);
-assert.throws(function() { cursor.itcount(); }, [], "expected find() to abort due to time limit");
-
-//
-// Many-batch negative test for getmore:
-// - Issue a many-batch find() with a 20-second time limit where the results take 10 seconds to
-// generate; the find() should not hit the time limit.
-//
-
-t.drop();
-for (var i=0; i<5; i++) {
- t.insert([{},{},{slow:true}]);
-}
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
- }
- return true;
-}});
-cursor.batchSize(3);
-cursor.maxTimeMS(20*1000);
-assert.doesNotThrow(function() { cursor.itcount(); },
- [],
- "expected find() to not hit the time limit");
-
-//
-// Simple positive test for commands: a ~300ms command with a 100ms time limit should be aborted.
-//
-
-t.drop();
-res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 100});
-assert(res.ok == 0 && res.code == exceededTimeLimit,
- "expected sleep command to abort due to time limit, ok=" + res.ok + ", code=" + res.code);
-
-//
-// Simple negative test for commands: a ~300ms command with a 10s time limit should not hit the
-// time limit.
-//
-
-t.drop();
-res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 10*1000});
-assert(res.ok == 1,
- "expected sleep command to not hit the time limit, ok=" + res.ok + ", code=" + res.code);
-
-//
-// Tests for input validation.
-//
-
-t.drop();
-t.insert({});
-
-// Verify lower boundary for acceptable input (0 is acceptable, 1 isn't).
-
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(0).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberInt(0)).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberLong(0)).itcount(); });
-assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: 0}).ok);
-assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(0)}).ok);
-assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(0)}).ok);
-
-assert.throws.automsg(function() { t.find().maxTimeMS(-1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberInt(-1)).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberLong(-1)).itcount(); });
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: -1}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(-1)}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(-1)}).ok);
-
-// Verify upper boundary for acceptable input (2^31-1 is acceptable, 2^31 isn't).
-
-var maxValue = Math.pow(2,31)-1;
-
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(maxValue).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberInt(maxValue)).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberLong(maxValue)).itcount(); });
-assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue}).ok);
-assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue)}).ok);
-assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue)}).ok);
-
-assert.throws.automsg(function() { t.find().maxTimeMS(maxValue+1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberInt(maxValue+1)).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberLong(maxValue+1)).itcount(); });
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue+1}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue+1)}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue+1)}).ok);
-
-// Verify invalid values are rejected.
-assert.throws.automsg(function() { t.find().maxTimeMS(0.1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(-0.1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS().itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS("").itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(true).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS({}).itcount(); });
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: 0.1}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: -0.1}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: undefined}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: ""}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: true}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: {}}).ok);
-
-// Verify that the maxTimeMS command argument can be sent with $query-wrapped commands.
-cursor = t.getDB().$cmd.find({ping: 1, maxTimeMS: 0}).limit(-1);
-cursor._ensureSpecial();
-assert.eq(1, cursor.next().ok);
-
-// Verify that the server rejects invalid command argument $maxTimeMS.
-cursor = t.getDB().$cmd.find({ping: 1, $maxTimeMS: 0}).limit(-1);
-cursor._ensureSpecial();
-assert.eq(0, cursor.next().ok);
-
-// Verify that the $maxTimeMS query option can't be sent with $query-wrapped commands.
-cursor = t.getDB().$cmd.find({ping: 1}).limit(-1).maxTimeMS(0);
-cursor._ensureSpecial();
-assert.eq(0, cursor.next().ok);
-
-//
-// Tests for fail points maxTimeAlwaysTimeOut and maxTimeNeverTimeOut.
-//
-
-// maxTimeAlwaysTimeOut positive test for command.
-t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"}).ok);
-res = t.getDB().runCommand({ping: 1, maxTimeMS: 10*1000});
-assert(res.ok == 0 && res.code == exceededTimeLimit,
- "expected command to trigger maxTimeAlwaysTimeOut fail point, ok=" + res.ok + ", code="
- + res.code);
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
-
-// maxTimeNeverTimeOut positive test for command.
-t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
-res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 100});
-assert(res.ok == 1,
- "expected command to trigger maxTimeNeverTimeOut fail point, ok=" + res.ok + ", code="
- + res.code);
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
-
-// maxTimeAlwaysTimeOut positive test for query.
-t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"}).ok);
-assert.throws(function() { t.find().maxTimeMS(10*1000).itcount(); },
- [],
- "expected query to trigger maxTimeAlwaysTimeOut fail point");
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
-
-// maxTimeNeverTimeOut positive test for query.
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
-t.drop();
-t.insert([{},{},{}]);
-cursor = t.find({$where: function() { sleep(100); return true; }});
-cursor.maxTimeMS(100);
-assert.doesNotThrow(function() { cursor.itcount(); },
- [],
- "expected query to trigger maxTimeNeverTimeOut fail point");
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
-
-// maxTimeAlwaysTimeOut positive test for getmore.
-t.drop();
-t.insert([{},{},{}]);
-cursor = t.find().maxTimeMS(10*1000).batchSize(2);
-assert.doesNotThrow.automsg(function() { cursor.next(); cursor.next(); });
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"}).ok);
-assert.throws(function() { cursor.next(); },
- [],
- "expected getmore to trigger maxTimeAlwaysTimeOut fail point");
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
-
-// maxTimeNeverTimeOut positive test for getmore.
-t.drop();
-t.insert([{},{},{}]); // fast batch
-t.insert([{slow: true},{slow: true},{slow: true}]); // slow batch
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
- }
- return true;
-}});
-cursor.batchSize(3);
-cursor.maxTimeMS(2*1000);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 1 (query) to not hit the time limit");
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 2 (getmore) to trigger maxTimeNeverTimeOut fail point");
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
-
-//
-// Test that maxTimeMS is accepted by commands that have an option whitelist.
-//
-
-// "aggregate" command.
-res = t.runCommand("aggregate", {pipeline: [], maxTimeMS: 60*1000});
-assert(res.ok == 1,
- "expected aggregate with maxtime to succeed, ok=" + res.ok + ", code=" + res.code);
-
-// "collMod" command.
-res = t.runCommand("collMod", {usePowerOf2Sizes: true, maxTimeMS: 60*1000});
-assert(res.ok == 1,
- "expected collmod with maxtime to succeed, ok=" + res.ok + ", code=" + res.code);
diff --git a/jstests/maxscan.js b/jstests/maxscan.js
deleted file mode 100644
index 3d15b26f638..00000000000
--- a/jstests/maxscan.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.maxscan;
-t.drop();
-
-N = 100;
-for ( i=0; i<N; i++ ){
- t.insert( { _id : i , x : i % 10 } );
-}
-
-assert.eq( N , t.find().itcount() , "A" )
-assert.eq( 50 , t.find()._addSpecial( "$maxScan" , 50 ).itcount() , "B" )
-
-assert.eq( 10 , t.find( { x : 2 } ).itcount() , "C" )
-assert.eq( 5 , t.find( { x : 2 } )._addSpecial( "$maxScan" , 50 ).itcount() , "D" )
-
-t.ensureIndex({x: 1});
-assert.eq( 10, t.find( { x : 2 } ).hint({x:1})._addSpecial( "$maxScan" , N ).itcount() , "E" )
-assert.eq( 0, t.find( { x : 2 } ).hint({x:1})._addSpecial( "$maxScan" , 1 ).itcount() , "E" )
diff --git a/jstests/minmax.js b/jstests/minmax.js
deleted file mode 100644
index d84a6e42855..00000000000
--- a/jstests/minmax.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// test min / max query parameters
-
-addData = function() {
- t.save( { a: 1, b: 1 } );
- t.save( { a: 1, b: 2 } );
- t.save( { a: 2, b: 1 } );
- t.save( { a: 2, b: 2 } );
-}
-
-t = db.jstests_minmax;
-t.drop();
-t.ensureIndex( { a: 1, b: 1 } );
-addData();
-
-printjson( t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray() );
-assert.eq( 1, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray().length );
-assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1.5 } ).toArray().length );
-assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 2 } ).toArray().length );
-
-// just one bound
-assert.eq( 3, t.find().min( { a: 1, b: 2 } ).toArray().length );
-assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).toArray().length );
-assert.eq( 3, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: 1 } ).toArray().length );
-assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).hint( { a: 1, b: 1 } ).toArray().length );
-
-t.drop();
-t.ensureIndex( { a: 1, b: -1 } );
-addData();
-assert.eq( 4, t.find().min( { a: 1, b: 2 } ).toArray().length );
-assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).toArray().length );
-assert.eq( 1, t.find().min( { a: 2, b: 1 } ).toArray().length );
-assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).toArray().length );
-assert.eq( 4, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray().length );
-assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
-assert.eq( 1, t.find().min( { a: 2, b: 1 } ).hint( { a: 1, b: -1 } ).toArray().length );
-assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
-
-// hint doesn't match
-assert.throws( function() { t.find().min( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray() } );
-assert.throws( function() { t.find().min( { a: 1, b: 1 } ).max( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray() } );
-assert.throws( function() { t.find().min( { b: 1 } ).max( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray() } );
-assert.throws( function() { t.find().min( { a: 1 } ).hint( { $natural: 1 } ).toArray() } );
-assert.throws( function() { t.find().max( { a: 1 } ).hint( { $natural: 1 } ).toArray() } );
-
-// Reverse direction scan of the a:1 index between a:6 (inclusive) and a:3 (exclusive).
-t.drop();
-t.ensureIndex( { a:1 } );
-for( i = 0; i < 10; ++i ) {
- t.save( { _id:i, a:i } );
-}
-if ( 0 ) { // SERVER-3766
-reverseResult = t.find().min( { a:6 } ).max( { a:3 } ).sort( { a:-1 } ).hint( { a:1 } ).toArray();
-assert.eq( [ { _id:6, a:6 }, { _id:5, a:5 }, { _id:4, a:4 } ], reverseResult );
-}
diff --git a/jstests/mod1.js b/jstests/mod1.js
deleted file mode 100644
index 46e3482bc72..00000000000
--- a/jstests/mod1.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-t = db.mod1;
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 11 } );
-t.save( { a : 20 } );
-t.save( { a : "asd" } );
-t.save( { a : "adasdas" } );
-
-assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "A1" );
-assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "A2" );
-assert.eq( 6 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "A3" );
-
-t.ensureIndex( { a : 1 } );
-
-assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "B1" );
-assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "B2" );
-
-assert.eq( 1 , t.find( "this.a % 10 == 0" ).itcount() , "B3" );
-assert.eq( 1 , t.find( { a : { $mod : [ 10 , 0 ] } } ).itcount() , "B4" );
-assert.eq( 4 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "B5" );
-
-assert.eq( 1, t.find( { a: { $gt: 5, $mod : [ 10, 1 ] } } ).itcount() ); \ No newline at end of file
diff --git a/jstests/mr1.js b/jstests/mr1.js
deleted file mode 100644
index 33390a6187a..00000000000
--- a/jstests/mr1.js
+++ /dev/null
@@ -1,184 +0,0 @@
-
-t = db.mr1;
-t.drop();
-
-t.save( { x : 1 , tags : [ "a" , "b" ] } );
-t.save( { x : 2 , tags : [ "b" , "c" ] } );
-t.save( { x : 3 , tags : [ "c" , "a" ] } );
-t.save( { x : 4 , tags : [ "b" , "c" ] } );
-
-emit = printjson;
-
-function d( x ){
- printjson( x );
-}
-
-ks = "_id";
-if ( db.version() == "1.1.1" )
- ks = "key";
-
-
-m = function(){
- this.tags.forEach(
- function(z){
- emit( z , { count : 1 } );
- }
- );
-};
-
-m2 = function(){
- for ( var i=0; i<this.tags.length; i++ ){
- emit( this.tags[i] , 1 );
- }
-};
-
-
-r = function( key , values ){
- var total = 0;
- for ( var i=0; i<values.length; i++ ){
- total += values[i].count;
- }
- return { count : total };
-};
-
-r2 = function( key , values ){
- var total = 0;
- for ( var i=0; i<values.length; i++ ){
- total += values[i];
- }
- return total;
-};
-
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
-d( res );
-if ( ks == "_id" ) assert( res.ok , "not ok" );
-assert.eq( 4 , res.counts.input , "A" );
-x = db[res.result];
-
-assert.eq( 3 , x.find().count() , "B" );
-x.find().forEach( d );
-z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
-d( z );
-assert.eq( 3 , Object.keySet( z ).length , "C" );
-assert.eq( 2 , z.a , "D" );
-assert.eq( 3 , z.b , "E" );
-assert.eq( 3 , z.c , "F" );
-x.drop();
-
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , query : { x : { "$gt" : 2 } } , out : "mr1_out" } );
-d( res );
-assert.eq( 2 , res.counts.input , "B" );
-x = db[res.result];
-z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
-assert.eq( 1 , z.a , "C1" );
-assert.eq( 1 , z.b , "C2" );
-assert.eq( 2 , z.c , "C3" );
-x.drop();
-
-res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , query : { x : { "$gt" : 2 } } , out : "mr1_out" } );
-d( res );
-assert.eq( 2 , res.counts.input , "B" );
-x = db[res.result];
-z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value; } );
-assert.eq( 1 , z.a , "C1z" );
-assert.eq( 1 , z.b , "C2z" );
-assert.eq( 2 , z.c , "C3z" );
-x.drop();
-
-res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r , query : { x : { "$gt" : 2 } } } );
-d( res );
-assert.eq( 2 , res.counts.input , "B2" );
-assert.eq( "mr1_foo" , res.result , "B2-c" );
-x = db[res.result];
-z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
-assert.eq( 1 , z.a , "C1a" );
-assert.eq( 1 , z.b , "C2a" );
-assert.eq( 2 , z.c , "C3a" );
-x.drop();
-
-for ( i=5; i<1000; i++ ){
- t.save( { x : i , tags : [ "b" , "d" ] } );
-}
-
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
-d( res );
-assert.eq( 999 , res.counts.input , "Z1" );
-x = db[res.result];
-x.find().forEach( d )
-assert.eq( 4 , x.find().count() , "Z2" );
-assert.eq( "a,b,c,d" , x.distinct( ks ) , "Z3" );
-
-function getk( k ){
- var o = {};
- o[ks] = k;
- return x.findOne( o );
-}
-
-assert.eq( 2 , getk( "a" ).value.count , "ZA" );
-assert.eq( 998 , getk( "b" ).value.count , "ZB" );
-assert.eq( 3 , getk( "c" ).value.count , "ZC" );
-assert.eq( 995 , getk( "d" ).value.count , "ZD" );
-x.drop();
-
-if ( true ){
- printjson( db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , verbose : true , out : "mr1_out" } ) );
-}
-
-print( "t1: " + Date.timeFunc(
- function(){
- var out = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
- if ( ks == "_id" ) assert( out.ok , "XXX : " + tojson( out ) );
- db[out.result].drop();
- } , 10 ) + " (~500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } , 10 ) );
-
-
-
-// test doesn't exist
-res = db.runCommand( { mapreduce : "lasjdlasjdlasjdjasldjalsdj12e" , map : m , reduce : r , out : "mr1_out" } );
-assert( ! res.ok , "should be not ok" );
-
-if ( true ){
- correct = {};
-
- for ( i=0; i<20000; i++ ){
- k = "Z" + i % 10000;
- if ( correct[k] )
- correct[k]++;
- else
- correct[k] = 1;
- t.save( { x : i , tags : [ k ] } );
- }
-
- res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r } );
- d( res );
- print( "t2: " + res.timeMillis + " (~3500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } ) );
- x = db[res.result];
- z = {};
- x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
- for ( zz in z ){
- if ( zz.indexOf( "Z" ) == 0 ){
- assert.eq( correct[zz] , z[zz] , "ZZ : " + zz );
- }
- }
- x.drop();
-
- res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m2 , reduce : r2 , out : "mr1_out" } );
- d(res);
- print( "t3: " + res.timeMillis + " (~3500 on 2.8ghz)" );
-
- res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , out : { inline : true } } );
- print( "t4: " + res.timeMillis );
-
-}
-
-
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
-assert( res.ok , "should be ok" );
-
-t.drop();
-t1 = db.mr1_out;
-t1.drop(); \ No newline at end of file
diff --git a/jstests/mr2.js b/jstests/mr2.js
deleted file mode 100644
index c15d8abdfae..00000000000
--- a/jstests/mr2.js
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
-t = db.mr2;
-t.drop();
-
-t.save( { comments : [ { who : "a" , txt : "asdasdasd" } ,
- { who : "b" , txt : "asdasdasdasdasdasdas" } ] } );
-
-t.save( { comments : [ { who : "b" , txt : "asdasdasdaaa" } ,
- { who : "c" , txt : "asdasdasdaasdasdas" } ] } );
-
-
-
-function m(){
- for ( var i=0; i<this.comments.length; i++ ){
- var c = this.comments[i];
- emit( c.who , { totalSize : c.txt.length , num : 1 } );
- }
-}
-
-function r( who , values ){
- var n = { totalSize : 0 , num : 0 };
- for ( var i=0; i<values.length; i++ ){
- n.totalSize += values[i].totalSize;
- n.num += values[i].num;
- }
- return n;
-}
-
-function reformat( r ){
- var x = {};
- var cursor;
- if ( r.results )
- cursor = r.results;
- else
- cursor = r.find();
- cursor.forEach(
- function(z){
- x[z._id] = z.value;
- }
- );
- return x;
-}
-
-function f( who , res ){
- res.avg = res.totalSize / res.num;
- return res;
-}
-
-res = t.mapReduce( m , r , { finalize : f , out : "mr2_out" } );
-printjson( res )
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "A1" );
-assert.eq( 16 , x.b.avg , "A2" );
-assert.eq( 18 , x.c.avg , "A3" );
-res.drop();
-
-res = t.mapReduce( m , r , { finalize : f , out : { inline : 1 } } );
-printjson( res )
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "B1" );
-assert.eq( 16 , x.b.avg , "B2" );
-assert.eq( 18 , x.c.avg , "B3" );
-res.drop();
-assert( ! ( "result" in res ) , "B4" )
-
-res = t.mapReduce( m , r , { finalize : f , out : "mr2_out", jsMode: true } );
-printjson( res )
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "A1" );
-assert.eq( 16 , x.b.avg , "A2" );
-assert.eq( 18 , x.c.avg , "A3" );
-res.drop();
-
-res = t.mapReduce( m , r , { finalize : f , out : { inline : 5 }, jsMode: true } );
-printjson( res )
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "B1" );
-assert.eq( 16 , x.b.avg , "B2" );
-assert.eq( 18 , x.c.avg , "B3" );
-res.drop();
-assert( ! ( "result" in res ) , "B4" )
-
diff --git a/jstests/mr3.js b/jstests/mr3.js
deleted file mode 100644
index 3b0a918a4f3..00000000000
--- a/jstests/mr3.js
+++ /dev/null
@@ -1,73 +0,0 @@
-
-t = db.mr3;
-t.drop();
-
-t.save( { x : 1 , tags : [ "a" , "b" ] } );
-t.save( { x : 2 , tags : [ "b" , "c" ] } );
-t.save( { x : 3 , tags : [ "c" , "a" ] } );
-t.save( { x : 4 , tags : [ "b" , "c" ] } );
-
-m = function( n , x ){
- x = x || 1;
- this.tags.forEach(
- function(z){
- for ( var i=0; i<x; i++ )
- emit( z , { count : n || 1 } );
- }
- );
-};
-
-r = function( key , values ){
- var total = 0;
- for ( var i=0; i<values.length; i++ ){
- total += values[i].count;
- }
- return { count : total };
-};
-
-res = t.mapReduce( m , r , { out : "mr3_out" } );
-z = res.convertToSingleObject()
-
-assert.eq( 3 , Object.keySet( z ).length , "A1" );
-assert.eq( 2 , z.a.count , "A2" );
-assert.eq( 3 , z.b.count , "A3" );
-assert.eq( 3 , z.c.count , "A4" );
-
-res.drop();
-
-res = t.mapReduce( m , r , { out : "mr3_out" , mapparams : [ 2 , 2 ] } );
-z = res.convertToSingleObject()
-
-assert.eq( 3 , Object.keySet( z ).length , "B1" );
-assert.eq( 8 , z.a.count , "B2" );
-assert.eq( 12 , z.b.count , "B3" );
-assert.eq( 12 , z.c.count , "B4" );
-
-res.drop();
-
-// -- just some random tests
-
-realm = m;
-
-m = function(){
- emit( this._id , 1 );
-}
-res = t.mapReduce( m , r , { out : "mr3_out" } );
-res.drop();
-
-m = function(){
- emit( this._id , this.xzz.a );
-}
-
-before = db.getCollectionNames().length;
-assert.throws( function(){ t.mapReduce( m , r , { out : "mr3_out" } ); } );
-assert.eq( before , db.getCollectionNames().length , "after throw crap" );
-
-
-m = realm;
-r = function( k , v ){
- return v.x.x.x;
-}
-before = db.getCollectionNames().length;
-assert.throws( function(){ t.mapReduce( m , r , "mr3_out" ) } )
-assert.eq( before , db.getCollectionNames().length , "after throw crap" );
diff --git a/jstests/mr4.js b/jstests/mr4.js
deleted file mode 100644
index 78c8bce8953..00000000000
--- a/jstests/mr4.js
+++ /dev/null
@@ -1,45 +0,0 @@
-
-t = db.mr4;
-t.drop();
-
-t.save( { x : 1 , tags : [ "a" , "b" ] } );
-t.save( { x : 2 , tags : [ "b" , "c" ] } );
-t.save( { x : 3 , tags : [ "c" , "a" ] } );
-t.save( { x : 4 , tags : [ "b" , "c" ] } );
-
-m = function(){
- this.tags.forEach(
- function(z){
- emit( z , { count : xx } );
- }
- );
-};
-
-r = function( key , values ){
- var total = 0;
- for ( var i=0; i<values.length; i++ ){
- total += values[i].count;
- }
- return { count : total };
-};
-
-res = t.mapReduce( m , r , { out : "mr4_out" , scope : { xx : 1 } } );
-z = res.convertToSingleObject()
-
-assert.eq( 3 , Object.keySet( z ).length , "A1" );
-assert.eq( 2 , z.a.count , "A2" );
-assert.eq( 3 , z.b.count , "A3" );
-assert.eq( 3 , z.c.count , "A4" );
-
-res.drop();
-
-
-res = t.mapReduce( m , r , { scope : { xx : 2 } , out : "mr4_out" } );
-z = res.convertToSingleObject()
-
-assert.eq( 3 , Object.keySet( z ).length , "A1" );
-assert.eq( 4 , z.a.count , "A2" );
-assert.eq( 6 , z.b.count , "A3" );
-assert.eq( 6 , z.c.count , "A4" );
-
-res.drop();
diff --git a/jstests/mr5.js b/jstests/mr5.js
deleted file mode 100644
index 50a63d1d55b..00000000000
--- a/jstests/mr5.js
+++ /dev/null
@@ -1,58 +0,0 @@
-
-t = db.mr5;
-t.drop();
-
-t.save( { "partner" : 1, "visits" : 9 } )
-t.save( { "partner" : 2, "visits" : 9 } )
-t.save( { "partner" : 1, "visits" : 11 } )
-t.save( { "partner" : 1, "visits" : 30 } )
-t.save( { "partner" : 2, "visits" : 41 } )
-t.save( { "partner" : 2, "visits" : 41 } )
-
-m = function(){
- emit( this.partner , { stats : [ this.visits ] } )
-}
-
-r = function( k , v ){
- var stats = [];
- var total = 0;
- for ( var i=0; i<v.length; i++ ){
- for ( var j in v[i].stats ) {
- stats.push( v[i].stats[j] )
- total += v[i].stats[j];
- }
- }
- return { stats : stats , total : total }
-}
-
-res = t.mapReduce( m , r , { out : "mr5_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
-
-z = res.convertToSingleObject()
-assert.eq( 2 , Object.keySet( z ).length , "A1" )
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" )
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" )
-
-
-res.drop()
-
-m = function(){
- var x = "partner";
- var y = "visits";
- emit( this[x] , { stats : [ this[y] ] } )
-}
-
-
-
-res = t.mapReduce( m , r , { out : "mr5_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
-
-z = res.convertToSingleObject()
-assert.eq( 2 , Object.keySet( z ).length , "B1" )
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" )
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" )
-
-
-res.drop()
-
-
diff --git a/jstests/mr_bigobject.js b/jstests/mr_bigobject.js
deleted file mode 100644
index 97195e2542e..00000000000
--- a/jstests/mr_bigobject.js
+++ /dev/null
@@ -1,46 +0,0 @@
-
-t = db.mr_bigobject
-t.drop()
-
-// v8 requires large start string, otherwise UTF16
-var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
-var s = large;
-while ( s.length < ( 6 * 1024 * 1024 ) ){
- s += large;
-}
-
-for ( i=0; i<5; i++ )
- t.insert( { _id : i , s : s } )
-
-m = function(){
- emit( 1 , this.s + this.s );
-}
-
-r = function( k , v ){
- return 1;
-}
-
-assert.throws( function(){ r = t.mapReduce( m , r , "mr_bigobject_out" ); } , null , "emit should fail" )
-
-
-m = function(){
- emit( 1 , this.s );
-}
-
-assert.eq( { 1 : 1 } , t.mapReduce( m , r , "mr_bigobject_out" ).convertToSingleObject() , "A1" )
-
-r = function( k , v ){
- total = 0;
- for ( var i=0; i<v.length; i++ ){
- var x = v[i];
- if ( typeof( x ) == "number" )
- total += x
- else
- total += x.length;
- }
- return total;
-}
-
-assert.eq( { 1 : t.count() * s.length } , t.mapReduce( m , r , "mr_bigobject_out" ).convertToSingleObject() , "A1" )
-
-t.drop()
diff --git a/jstests/mr_comments.js b/jstests/mr_comments.js
deleted file mode 100644
index f6a06994f55..00000000000
--- a/jstests/mr_comments.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.mr_comments
-t.drop()
-
-t.insert( { foo : 1 } )
-t.insert( { foo : 1 } )
-t.insert( { foo : 2 } )
-
-res = db.runCommand(
- { mapreduce : "mr_comments",
- map : "// This will fail\n\n // Emit some stuff\n emit(this.foo, 1)\n",
- reduce : function(key, values){
- return Array.sum(values);
- },
- out: "mr_comments_out"
- });
-assert.eq( 3 , res.counts.emit )
-
-res = db.runCommand(
- { mapreduce : "mr_comments",
- map : "// This will fail\nfunction(){\n // Emit some stuff\n emit(this.foo, 1)\n}\n",
- reduce : function(key, values){
- return Array.sum(values);
- },
- out: "mr_comments_out"
- });
-
-assert.eq( 3 , res.counts.emit )
diff --git a/jstests/mr_drop.js b/jstests/mr_drop.js
deleted file mode 100644
index 3fdd99460dd..00000000000
--- a/jstests/mr_drop.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// Drop a collection while a map/reduce job is running against it. SERVER-6757
-
-// Use a different DB to prevent other tests from breaking this race-y test when run in parallel.
-// Formerly another test could take out a write lock on the DB, preventing map/reduce and drop()
-// from running until the other action completed. When the other action completed, the drop thread
-// was finished sleeping and ready to drop and would drop before map/reduce was able to start.
-t = db.getSiblingDB('MrDrop').jstests_mr_drop;
-t.drop();
-
-Random.setRandomSeed();
-
-// Set sleep times for different stages of the map/reduce job. The collection drop will occur
-// during different stages of map/reduce depending on these sleep values.
-mapSleep = Random.randInt( 4 );
-reduceSleep = Random.randInt( 4 );
-finalizeSleep = Random.randInt( 4 );
-
-// Insert some documents.
-for( i = 0; i < 10000; ++i ) {
- t.save( { key:parseInt( i / 2 ),
- mapSleep:mapSleep,
- reduceSleep:reduceSleep,
- finalizeSleep:finalizeSleep } );
-}
-db.getLastError();
-
-// Schedule a collection drop two seconds in the future.
-s = startParallelShell( "sleep( 2000 ); db.getSiblingDB('MrDrop').jstests_mr_drop.drop();" );
-
-// Run the map/reduce job. Check for command failure internally. The job succeeds even if the
-// source collection is dropped in progress.
-t.mapReduce( function() { sleep( this.mapSleep ); emit( this.key, this ); },
- function( key, vals ) { sleep( vals[ 0 ].reduceSleep ); return vals[ 0 ]; },
- { finalize:function( key, value ) { sleep( value.finalizeSleep ); return value; },
- out:'jstests_mr_drop_out' }
- );
-
-// Wait for the parallel shell to finish.
-s();
-
-// Ensure the server is still alive. Under SERVER-6757 the server can crash.
-assert( !db.getLastError() );
diff --git a/jstests/mr_errorhandling.js b/jstests/mr_errorhandling.js
deleted file mode 100644
index c4e1137b4c6..00000000000
--- a/jstests/mr_errorhandling.js
+++ /dev/null
@@ -1,49 +0,0 @@
-
-t = db.mr_errorhandling;
-t.drop();
-
-t.save( { a : [ 1 , 2 , 3 ] } )
-t.save( { a : [ 2 , 3 , 4 ] } )
-
-m_good = function(){
- for ( var i=0; i<this.a.length; i++ ){
- emit( this.a[i] , 1 );
- }
-}
-
-m_bad = function(){
- for ( var i=0; i<this.a.length; i++ ){
- emit( this.a[i] );
- }
-}
-
-r = function( k , v ){
- var total = 0;
- for ( var i=0; i<v.length; i++ )
- total += v[i];
- return total;
-}
-
-res = t.mapReduce( m_good , r , "mr_errorhandling_out" );
-assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
-res.drop()
-
-res = null;
-
-theerror = null;
-try {
- res = t.mapReduce( m_bad , r , "mr_errorhandling_out" );
-}
-catch ( e ){
- theerror = e.toString();
-}
-assert.isnull( res , "B1" );
-assert( theerror , "B2" );
-assert( theerror.indexOf( "emit" ) >= 0 , "B3" );
-
-// test things are still in an ok state
-res = t.mapReduce( m_good , r , "mr_errorhandling_out" );
-assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
-res.drop()
-
-assert.throws( function(){ t.mapReduce( m_good , r , { out : "xxx" , query : "foo" } ); } )
diff --git a/jstests/mr_index.js b/jstests/mr_index.js
deleted file mode 100644
index 521d44d29f0..00000000000
--- a/jstests/mr_index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-
-t = db.mr_index
-t.drop()
-
-outName = "mr_index_out"
-out = db[outName]
-out.drop()
-
-t.insert( { tags : [ 1 ] } )
-t.insert( { tags : [ 1 , 2 ] } )
-t.insert( { tags : [ 1 , 2 , 3 ] } )
-t.insert( { tags : [ 3 ] } )
-t.insert( { tags : [ 2 , 3 ] } )
-t.insert( { tags : [ 2 , 3 ] } )
-t.insert( { tags : [ 1 , 2 ] } )
-
-m = function(){
- for ( i=0; i<this.tags.length; i++ )
- emit( this.tags[i] , 1 );
-}
-
-r = function( k , vs ){
- return Array.sum( vs );
-}
-
-ex = function(){
- return out.find().sort( { value : 1 } ).explain()
-}
-
-res = t.mapReduce( m , r , { out : outName } )
-
-assert.eq( "BasicCursor" , ex().cursor , "A1" )
-out.ensureIndex( { value : 1 } )
-assert.eq( "BtreeCursor value_1" , ex().cursor , "A2" )
-assert.eq( 3 , ex().n , "A3" )
-
-res = t.mapReduce( m , r , { out : outName } )
-
-assert.eq( "BtreeCursor value_1" , ex().cursor , "B1" )
-assert.eq( 3 , ex().n , "B2" )
-res.drop()
-
-
diff --git a/jstests/mr_index2.js b/jstests/mr_index2.js
deleted file mode 100644
index a8d845ed69d..00000000000
--- a/jstests/mr_index2.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-t = db.mr_index2;
-t.drop()
-
-t.save( { arr : [1, 2] } )
-
-map = function() { emit(this._id, 1) }
-reduce = function(k,vals) { return Array.sum( vals ); }
-
-res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : {} })
-assert.eq( 1 ,res.counts.input , "A" )
-res.drop()
-
-res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : { arr: {$gte:0} } })
-assert.eq( 1 ,res.counts.input , "B" )
-res.drop()
-
-t.ensureIndex({arr:1})
-res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : { arr: {$gte:0} } })
-assert.eq( 1 ,res.counts.input , "C" )
-res.drop();
-
diff --git a/jstests/mr_index3.js b/jstests/mr_index3.js
deleted file mode 100644
index 0607cc8aa84..00000000000
--- a/jstests/mr_index3.js
+++ /dev/null
@@ -1,50 +0,0 @@
-
-t = db.mr_index3
-t.drop();
-
-t.insert( { _id : 1, name : 'name1', tags : ['dog', 'cat'] } );
-t.insert( { _id : 2, name : 'name2', tags : ['cat'] } );
-t.insert( { _id : 3, name : 'name3', tags : ['mouse', 'cat', 'dog'] } );
-t.insert( { _id : 4, name : 'name4', tags : [] } );
-
-m = function(){
- for ( var i=0; i<this.tags.length; i++ )
- emit( this.tags[i] , 1 )
-};
-
-r = function( key , values ){
- return Array.sum( values );
-};
-
-a1 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r , out : { inline : true } } ).results
-a2 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : 'name1'} , out : { inline : true }}).results
-a3 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : {$gt:'name'} } , out : { inline : true }}).results
-
-assert.eq( [
- {
- "_id" : "cat",
- "value" : 3
- },
- {
- "_id" : "dog",
- "value" : 2
- },
- {
- "_id" : "mouse",
- "value" : 1
- }
-] , a1 , "A1" );
-assert.eq( [ { "_id" : "cat", "value" : 1 }, { "_id" : "dog", "value" : 1 } ] , a2 , "A2" )
-assert.eq( a1 , a3 , "A3" )
-
-t.ensureIndex({name:1, tags:1});
-
-b1 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r , out : { inline : true } } ).results
-b2 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : 'name1'} , out : { inline : true }}).results
-b3 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : {$gt:'name'} } , out : { inline : true }}).results
-
-assert.eq( a1 , b1 , "AB1" )
-assert.eq( a2 , b2 , "AB2" )
-assert.eq( a3 , b3 , "AB3" )
-
-
diff --git a/jstests/mr_killop.js b/jstests/mr_killop.js
deleted file mode 100644
index cee812fa730..00000000000
--- a/jstests/mr_killop.js
+++ /dev/null
@@ -1,156 +0,0 @@
-// Test killop applied to m/r operations and child ops of m/r operations.
-
-t = db.jstests_mr_killop;
-t.drop();
-t2 = db.jstests_mr_killop_out;
-t2.drop();
-
-function debug( x ) {
-// printjson( x );
-}
-
-/** @return op code for map reduce op created by spawned shell, or that op's child */
-function op( childLoop ) {
- p = db.currentOp().inprog;
- debug( p );
- for ( var i in p ) {
- var o = p[ i ];
- // Identify a map/reduce or where distinct operation by its collection, whether or not
- // it is currently active.
- if ( childLoop ) {
- if ( ( o.active || o.waitingForLock ) &&
- o.query &&
- o.query.query &&
- o.query.query.$where &&
- o.query.distinct == "jstests_mr_killop" ) {
- return o.opid;
- }
- }
- else {
- if ( ( o.active || o.waitingForLock ) &&
- o.query &&
- o.query.mapreduce &&
- o.query.mapreduce == "jstests_mr_killop" ) {
- return o.opid;
- }
- }
- }
- return -1;
-}
-
-/**
-* Run one map reduce with the specified parameters in a parallel shell, kill the
-* map reduce op or its child op with killOp, and wait for the map reduce op to
-* terminate.
-* @param childLoop - if true, a distinct $where op is killed rather than the map reduce op.
-* This is necessay for a child distinct $where of a map reduce op because child
-* ops currently mask parent ops in currentOp.
-*/
-function testOne( map, reduce, finalize, scope, childLoop, wait ) {
- debug( "testOne - map = " + tojson( map ) + "; reduce = " + tojson( reduce ) +
- "; finalize = " + tojson( finalize ) + "; scope = " + tojson( scope ) +
- "; childLoop = " + childLoop + "; wait = " + wait );
-
- t.drop();
- t2.drop();
- // Ensure we have 2 documents for the reduce to run
- t.save( {a:1} );
- t.save( {a:1} );
- db.getLastError();
-
- spec = {
- mapreduce:"jstests_mr_killop",
- out:"jstests_mr_killop_out",
- map: map,
- reduce: reduce
- };
- if ( finalize ) {
- spec[ "finalize" ] = finalize;
- }
- if ( scope ) {
- spec[ "scope" ] = scope;
- }
-
- // Windows shell strips all double quotes from command line, so use
- // single quotes.
- stringifiedSpec = tojson( spec ).toString().replace( /\n/g, ' ' ).replace( /\"/g, "\'" );
-
- // The assert below won't be caught by this test script, but it will cause error messages
- // to be printed.
- s = startParallelShell( "assert.commandWorked( db.runCommand( " + stringifiedSpec + " ) );" );
-
- if ( wait ) {
- sleep( 2000 );
- }
-
- o = null;
- assert.soon( function() { o = op( childLoop ); return o != -1 } );
-
- res = db.killOp( o );
- debug( "did kill : " + tojson( res ) );
-
- // When the map reduce op is killed, the spawned shell will exit
- s();
- debug( "parallel shell completed" );
-
- assert.eq( -1, op( childLoop ) );
-}
-
-/** Test using wait and non wait modes */
-function test( map, reduce, finalize, scope, childLoop ) {
- debug( " Non wait mode" );
- testOne( map, reduce, finalize, scope, childLoop, false );
-
- debug( " Wait mode" );
- testOne( map, reduce, finalize, scope, childLoop, true );
-}
-
-/** Test looping in map and reduce functions */
-function runMRTests( loop, childLoop ) {
- debug( " Running MR test - loop map function. no scope " );
- test( loop, // map
- function( k, v ) { return v[ 0 ]; }, // reduce
- null, // finalize
- null, // scope
- childLoop );
-
- debug( " Running MR test - loop reduce function " );
- test( function() { emit( this.a, 1 ); }, // map
- loop, // reduce
- null, // finalize
- null, // scope
- childLoop );
-
- debug( " Running finalization test - loop map function. with scope " );
- test( function() { loop(); }, // map
- function( k, v ) { return v[ 0 ] }, // reduce
- null, // finalize
- { loop: loop }, // scope
- childLoop );
-}
-
-/** Test looping in finalize function */
-function runFinalizeTests( loop, childLoop ) {
- debug( " Running finalization test - no scope " );
- test( function() { emit( this.a, 1 ); }, // map
- function( k, v ) { return v[ 0 ] }, // reduce
- loop, // finalize
- null, // scope
- childLoop );
-
- debug( " Running finalization test - with scope " );
- test( function() { emit( this.a, 1 ); }, // map
- function( k, v ) { return v[ 0 ] }, // reduce
- function( a, b ) { loop() }, // finalize
- { loop: loop }, // scope
- childLoop );
-}
-
-// Run inside server. No access to debug().
-var loop = function() {
- while( 1 ) {
- sleep( 1000 );
- }
-}
-runMRTests( loop, false );
-runFinalizeTests( loop, false );
diff --git a/jstests/mr_merge.js b/jstests/mr_merge.js
deleted file mode 100644
index 9350c45f773..00000000000
--- a/jstests/mr_merge.js
+++ /dev/null
@@ -1,60 +0,0 @@
-
-t = db.mr_merge;
-t.drop();
-
-t.insert( { a : [ 1 , 2 ] } )
-t.insert( { a : [ 2 , 3 ] } )
-t.insert( { a : [ 3 , 4 ] } )
-
-outName = "mr_merge_out";
-out = db[outName];
-out.drop();
-
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
-r = function(k,vs){ return Array.sum( vs ); }
-
-function tos( o ){
- var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
- s += i + "_" + o[i];
- }
- return s;
-}
-
-
-res = t.mapReduce( m , r , { out : outName } )
-
-
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 }
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
-
-t.insert( { a : [ 4 , 5 ] } )
-out.insert( { _id : 10 , value : "5" } )
-res = t.mapReduce( m , r , { out : outName } )
-
-expected["4"]++;
-expected["5"] = 1
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
-
-t.insert( { a : [ 5 , 6 ] } )
-out.insert( { _id : 10 , value : "5" } )
-res = t.mapReduce( m , r , { out : { merge : outName } } )
-
-expected["5"]++;
-expected["10"] = 5
-expected["6"] = 1
-
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "C" );
-
-// test that the nonAtomic output gives valid result
-t.insert( { a : [ 6 , 7 ] } )
-out.insert( { _id : 20 , value : "10" } )
-res = t.mapReduce( m , r , { out : { merge : outName, nonAtomic: true } } )
-
-expected["6"]++;
-expected["20"] = 10
-expected["7"] = 1
-
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "D" );
-
diff --git a/jstests/mr_merge2.js b/jstests/mr_merge2.js
deleted file mode 100644
index 520bbfdbc8e..00000000000
--- a/jstests/mr_merge2.js
+++ /dev/null
@@ -1,37 +0,0 @@
-
-t = db.mr_merge2;
-t.drop();
-
-t.insert( { a : [ 1 , 2 ] } )
-t.insert( { a : [ 2 , 3 ] } )
-t.insert( { a : [ 3 , 4 ] } )
-
-outName = "mr_merge2_out";
-out = db[outName];
-out.drop();
-
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
-r = function(k,vs){ return Array.sum( vs ); }
-
-function tos( o ){
- var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
- s += i + "_" + o[i] + "|";
- }
- return s;
-}
-
-
-outOptions = { out : { merge : outName } }
-
-res = t.mapReduce( m , r , outOptions )
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 }
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
-
-t.insert( { a : [ 4 , 5 ] } )
-res = t.mapReduce( m , r , outOptions )
-expected["4"]++;
-expected["5"] = 1
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
-
diff --git a/jstests/mr_mutable_properties.js b/jstests/mr_mutable_properties.js
deleted file mode 100644
index 7c4442aab9e..00000000000
--- a/jstests/mr_mutable_properties.js
+++ /dev/null
@@ -1,62 +0,0 @@
-// See SERVER-9448
-// Test argument and receiver (aka 'this') objects and their children can be mutated
-// in Map, Reduce and Finalize functions
-
-var collection = db.mrMutableReceiver;
-collection.drop();
-collection.insert({a:1});
-
-var map = function() {
- // set property on receiver
- this.feed = {beef:1};
-
- // modify property on receiever
- this.a = {cake:1};
- emit(this._id, this.feed);
- emit(this._id, this.a);
-}
-
-var reduce = function(key, values) {
- // set property on receiver
- this.feed = {beat:1};
-
- // set property on key arg
- key.fed = {mochi:1};
-
- // push properties onto values array arg
- values.push(this.feed);
- values.push(key.fed);
-
- // modify each value in the (modified) array arg
- values.forEach(function(val) { val.mod = 1; });
- return {food:values};
-}
-
-var finalize = function(key, values) {
- // set property on receiver
- this.feed = {ice:1};
-
- // set property on key arg
- key.fed = {cream:1};
-
- // push properties onto values array arg
- printjson(values);
- values.food.push(this.feed);
- values.food.push(key.fed);
-
- // modify each value in the (modified) array arg
- values.food.forEach(function(val) { val.mod = 1; });
- return values;
-}
-
-var mr = collection.mapReduce(map, reduce, {finalize: finalize, out: {inline: 1}});
-printjson(mr);
-
-// verify mutated properties exist (order dictated by emit sequence and properties added)
-assert.eq(mr.results[0].value.food[0].beef, 1);
-assert.eq(mr.results[0].value.food[1].cake, 1);
-assert.eq(mr.results[0].value.food[2].beat, 1);
-assert.eq(mr.results[0].value.food[3].mochi, 1);
-assert.eq(mr.results[0].value.food[4].ice, 1);
-assert.eq(mr.results[0].value.food[5].cream, 1);
-mr.results[0].value.food.forEach(function(val) { assert.eq(val.mod, 1); });
diff --git a/jstests/mr_optim.js b/jstests/mr_optim.js
deleted file mode 100644
index 164839e2f2c..00000000000
--- a/jstests/mr_optim.js
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-t = db.mr_optim;
-t.drop();
-
-for (var i = 0; i < 1000; ++i) {
- t.save( {a: Math.random(1000), b: Math.random(10000)} );
-}
-
-function m(){
- emit(this._id, 13);
-}
-
-function r( key , values ){
- return "bad";
-}
-
-function reformat( r ){
- var x = {};
- var cursor;
- if ( r.results )
- cursor = r.results;
- else
- cursor = r.find();
- cursor.forEach(
- function(z){
- x[z._id] = z.value;
- }
- );
- return x;
-}
-
-res = t.mapReduce( m , r , { out : "mr_optim_out" } );
-printjson( res )
-x = reformat( res );
-for (var key in x) {
- assert.eq(x[key], 13, "value is not equal to original, maybe reduce has run");
-}
-res.drop();
-
-res = t.mapReduce( m , r , { out : { inline : 1 } } );
-//printjson( res )
-x2 = reformat( res );
-res.drop();
-
-assert.eq(x, x2, "object from inline and collection are not equal")
-
-t.drop(); \ No newline at end of file
diff --git a/jstests/mr_outreduce.js b/jstests/mr_outreduce.js
deleted file mode 100644
index 793ec252feb..00000000000
--- a/jstests/mr_outreduce.js
+++ /dev/null
@@ -1,49 +0,0 @@
-
-t = db.mr_outreduce;
-t.drop();
-
-t.insert( { _id : 1 , a : [ 1 , 2 ] } )
-t.insert( { _id : 2 , a : [ 2 , 3 ] } )
-t.insert( { _id : 3 , a : [ 3 , 4 ] } )
-
-outName = "mr_outreduce_out";
-out = db[outName];
-out.drop();
-
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
-r = function(k,vs){ return Array.sum( vs ); }
-
-function tos( o ){
- var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
- s += i + "_" + o[i] + "|"
- }
- return s;
-}
-
-
-res = t.mapReduce( m , r , { out : outName } )
-
-
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 }
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
-
-t.insert( { _id : 4 , a : [ 4 , 5 ] } )
-out.insert( { _id : 10 , value : "5" } ) // this is a sentinal to make sure it wasn't killed
-res = t.mapReduce( m , r , { out : { reduce : outName } , query : { _id : { $gt : 3 } } } )
-
-expected["4"]++;
-expected["5"] = 1
-expected["10"] = 5
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
-
-t.insert( { _id : 5 , a : [ 5 , 6 ] } )
-out.insert( { _id : 20 , value : "10" } ) // this is a sentinal to make sure it wasn't killed
-res = t.mapReduce( m , r , { out : { reduce : outName, nonAtomic: true } , query : { _id : { $gt : 4 } } } )
-
-expected["5"]++;
-expected["6"] = 1
-expected["20"] = 10
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "C" );
-
diff --git a/jstests/mr_outreduce2.js b/jstests/mr_outreduce2.js
deleted file mode 100644
index fc273638577..00000000000
--- a/jstests/mr_outreduce2.js
+++ /dev/null
@@ -1,27 +0,0 @@
-
-normal = "mr_outreduce2"
-out = normal + "_out"
-
-t = db[normal]
-t.drop();
-
-db[out].drop()
-
-t.insert( { _id : 1 , x : 1 } )
-t.insert( { _id : 2 , x : 1 } )
-t.insert( { _id : 3 , x : 2 } )
-
-m = function(){ emit( this.x , 1 ); }
-r = function(k,v){ return Array.sum( v ); }
-
-res = t.mapReduce( m , r , { out : { reduce : out } , query : { _id : { $gt : 0 } } } )
-
-assert.eq( 2 , db[out].findOne( { _id : 1 } ).value , "A1" )
-assert.eq( 1 , db[out].findOne( { _id : 2 } ).value , "A2" )
-
-
-t.insert( { _id : 4 , x : 2 } )
-res = t.mapReduce( m , r , { out : { reduce : out } , query : { _id : { $gt : 3 } } , finalize : null } )
-
-assert.eq( 2 , db[out].findOne( { _id : 1 } ).value , "B1" )
-assert.eq( 2 , db[out].findOne( { _id : 2 } ).value , "B2" )
diff --git a/jstests/mr_replaceIntoDB.js b/jstests/mr_replaceIntoDB.js
deleted file mode 100644
index 217f40717e5..00000000000
--- a/jstests/mr_replaceIntoDB.js
+++ /dev/null
@@ -1,45 +0,0 @@
-
-t = db.mr_replace;
-t.drop();
-
-t.insert( { a : [ 1 , 2 ] } )
-t.insert( { a : [ 2 , 3 ] } )
-t.insert( { a : [ 3 , 4 ] } )
-
-outCollStr = "mr_replace_col";
-outDbStr = "mr_db";
-
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); }
-r = function(k,vs){ return Array.sum( vs ); }
-
-function tos( o ){
- var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
- s += i + "_" + o[i];
- }
- return s;
-}
-
-print("Testing mr replace into other DB")
-res = t.mapReduce( m , r , { out : { replace: outCollStr, db: outDbStr } } )
-printjson( res );
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 };
-outDb = db.getMongo().getDB(outDbStr);
-outColl = outDb[outCollStr];
-str = tos( outColl.convertToSingleObject("value") )
-print("Received result: " + str);
-assert.eq( tos( expected ) , str , "A Received wrong result " + str );
-
-print("checking result field");
-assert.eq(res.result.collection, outCollStr, "B1 Wrong collection " + res.result.collection)
-assert.eq(res.result.db, outDbStr, "B2 Wrong db " + res.result.db)
-
-print("Replace again and check");
-outColl.save({_id: "5", value : 1});
-t.mapReduce( m , r , { out : { replace: outCollStr, db: outDbStr } } )
-str = tos( outColl.convertToSingleObject("value") )
-print("Received result: " + str);
-assert.eq( tos( expected ) , str , "C1 Received wrong result " + str );
-
-
diff --git a/jstests/mr_sort.js b/jstests/mr_sort.js
deleted file mode 100644
index cc8db18e174..00000000000
--- a/jstests/mr_sort.js
+++ /dev/null
@@ -1,44 +0,0 @@
-
-t = db.mr_sort;
-t.drop()
-
-t.ensureIndex( { x : 1 } )
-
-t.insert( { x : 1 } )
-t.insert( { x : 10 } )
-t.insert( { x : 2 } )
-t.insert( { x : 9 } )
-t.insert( { x : 3 } )
-t.insert( { x : 8 } )
-t.insert( { x : 4 } )
-t.insert( { x : 7 } )
-t.insert( { x : 5 } )
-t.insert( { x : 6 } )
-
-m = function(){
- emit( "a" , this.x )
-}
-
-r = function( k , v ){
- return Array.sum( v )
-}
-
-
-res = t.mapReduce( m , r , "mr_sort_out " );
-x = res.convertToSingleObject();
-res.drop();
-assert.eq( { "a" : 55 } , x , "A1" )
-
-res = t.mapReduce( m , r , { out : "mr_sort_out" , query : { x : { $lt : 3 } } } )
-x = res.convertToSingleObject();
-res.drop();
-assert.eq( { "a" : 3 } , x , "A2" )
-
-res = t.mapReduce( m , r , { out : "mr_sort_out" , sort : { x : 1 } , limit : 2 } );
-x = res.convertToSingleObject();
-res.drop();
-assert.eq( { "a" : 3 } , x , "A3" )
-
-
-
-
diff --git a/jstests/mr_stored.js b/jstests/mr_stored.js
deleted file mode 100644
index 7963d9892e1..00000000000
--- a/jstests/mr_stored.js
+++ /dev/null
@@ -1,66 +0,0 @@
-
-t = db.mr_stored;
-t.drop();
-
-t.save( { "partner" : 1, "visits" : 9 } )
-t.save( { "partner" : 2, "visits" : 9 } )
-t.save( { "partner" : 1, "visits" : 11 } )
-t.save( { "partner" : 1, "visits" : 30 } )
-t.save( { "partner" : 2, "visits" : 41 } )
-t.save( { "partner" : 2, "visits" : 41 } )
-
-m = function(obj){
- emit( obj.partner , { stats : [ obj.visits ] } )
-}
-
-r = function( k , v ){
- var stats = [];
- var total = 0;
- for ( var i=0; i<v.length; i++ ){
- for ( var j in v[i].stats ) {
- stats.push( v[i].stats[j] )
- total += v[i].stats[j];
- }
- }
- return { stats : stats , total : total }
-}
-
-// Test that map reduce works with stored javascript
-db.system.js.save( { _id : "mr_stored_map" , value : m } )
-db.system.js.save( { _id : "mr_stored_reduce" , value : r } )
-
-res = t.mapReduce( function () { mr_stored_map(this) } ,
- function ( k , v ) { return mr_stored_reduce( k , v ) } ,
- { out : "mr_stored_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
-
-z = res.convertToSingleObject()
-assert.eq( 2 , Object.keySet( z ).length , "A1" )
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" )
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" )
-
-
-res.drop()
-
-m = function(obj){
- var x = "partner";
- var y = "visits";
- emit( obj[x] , { stats : [ obj[y] ] } )
-}
-
-db.system.js.save( { _id : "mr_stored_map" , value : m } )
-
-res = t.mapReduce( function () { mr_stored_map(this) } ,
- function ( k , v ) { return mr_stored_reduce( k , v ) } ,
- { out : "mr_stored_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
-
-z = res.convertToSingleObject()
-assert.eq( 2 , Object.keySet( z ).length , "B1" )
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" )
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" )
-
-db.system.js.remove( { _id : "mr_stored_map" } )
-db.system.js.remove( { _id : "mr_stored_reduce" } )
-
-res.drop()
diff --git a/jstests/mr_undef.js b/jstests/mr_undef.js
deleted file mode 100644
index e162f99836b..00000000000
--- a/jstests/mr_undef.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-t = db.mr_undef
-t.drop()
-
-outname = "mr_undef_out"
-out = db[outname]
-out.drop()
-
-t.insert({x : 0})
-
-var m = function() { emit(this.mod, this.x); }
-var r = function(k,v) { total = 0; for(i in v) { total+= v[i]; } return total; }
-
-res = t.mapReduce(m, r, {out : outname } )
-
-assert.eq( 0 , out.find( { _id : { $type : 6 } } ).itcount() , "A1" )
-assert.eq( 1 , out.find( { _id : { $type : 10 } } ).itcount() , "A2" )
-
-x = out.findOne()
-assert.eq( x , out.findOne( { _id : x["_id"] } ) , "A3" )
-
-
diff --git a/jstests/multi.js b/jstests/multi.js
deleted file mode 100644
index eb6cad348cd..00000000000
--- a/jstests/multi.js
+++ /dev/null
@@ -1,24 +0,0 @@
-t = db.jstests_multi;
-t.drop();
-
-t.ensureIndex( { a: 1 } );
-t.save( { a: [ 1, 2 ] } );
-assert.eq( 1, t.find( { a: { $gt: 0 } } ).count() , "A" );
-assert.eq( 1, t.find( { a: { $gt: 0 } } ).toArray().length , "B" );
-
-t.drop();
-t.save( { a: [ [ [ 1 ] ] ] } );
-assert.eq( 0, t.find( { a:1 } ).count() , "C" );
-assert.eq( 0, t.find( { a: [ 1 ] } ).count() , "D" );
-assert.eq( 1, t.find( { a: [ [ 1 ] ] } ).count() , "E" );
-assert.eq( 1, t.find( { a: [ [ [ 1 ] ] ] } ).count() , "F" );
-
-t.drop();
-t.save( { a: [ 1, 2 ] } );
-assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , "G" );
-
-t.drop();
-t.save( { a: [ { b: 1 }, { b: 2 } ] } );
-assert.eq( 0, t.find( { 'a.b': { $ne: 1 } } ).count() , "H" );
-
-// TODO - run same tests with an index on a
diff --git a/jstests/multi2.js b/jstests/multi2.js
deleted file mode 100644
index 7c72722fd34..00000000000
--- a/jstests/multi2.js
+++ /dev/null
@@ -1,23 +0,0 @@
-
-t = db.multi2;
-t.drop();
-
-t.save( { x : 1 , a : [ 1 ] } );
-t.save( { x : 1 , a : [] } );
-t.save( { x : 1 , a : null } );
-t.save( {} );
-
-assert.eq( 3 , t.find( { x : 1 } ).count() , "A" );
-
-t.ensureIndex( { x : 1 } );
-assert.eq( 3 , t.find( { x : 1 } ).count() , "B" );
-assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s1" );
-assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "B2" );
-
-t.dropIndex( { x : 1 } );
-t.ensureIndex( { x : 1 , a : 1 } );
-assert.eq( 3 , t.find( { x : 1 } ).count() , "C" ); // SERVER-279
-assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s2" );
-assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "C2" );
-
-
diff --git a/jstests/ne1.js b/jstests/ne1.js
deleted file mode 100644
index e1c5656b5c8..00000000000
--- a/jstests/ne1.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.ne1;
-t.drop();
-
-t.save( { x : 1 } );
-t.save( { x : 2 } );
-t.save( { x : 3 } );
-
-assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "A" );
-t.ensureIndex( { x : 1 } );
-assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "B" );
diff --git a/jstests/ne2.js b/jstests/ne2.js
deleted file mode 100644
index a69bfd6a114..00000000000
--- a/jstests/ne2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// check that we don't scan $ne values
-
-t = db.jstests_ne2;
-t.drop();
-t.ensureIndex( {a:1} );
-
-t.save( { a:-0.5 } );
-t.save( { a:0 } );
-t.save( { a:0 } );
-t.save( { a:0.5 } );
-
-e = t.find( { a: { $ne: 0 } } ).explain( true );
-assert.eq( 2, e.n, 'A' );
-
-e = t.find( { a: { $gt: -1, $lt: 1, $ne: 0 } } ).explain();
-assert.eq( 2, e.n, 'B' );
diff --git a/jstests/ne3.js b/jstests/ne3.js
deleted file mode 100644
index 3260fd3c40f..00000000000
--- a/jstests/ne3.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// don't allow most operators with regex
-
-t = db.jstests_ne3;
-t.drop();
-
-assert.throws( function() { t.findOne( { t: { $ne: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $gt: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $gte: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $lt: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $lte: /a/ } } ); } );
-
-assert.eq( 0, t.count( { t: { $in: [ /a/ ] } } ) );
diff --git a/jstests/nestedarr1.js b/jstests/nestedarr1.js
deleted file mode 100644
index b3bc9b73156..00000000000
--- a/jstests/nestedarr1.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// make sure that we don't crash on large nested arrays but correctly do not index them
-// SERVER-5127, SERVER-5036
-
-function makeNestArr(depth){
- if(depth == 1){
- return {a : [depth]};
- }
- else{
- return {a : [makeNestArr(depth - 1)] };
- }
-}
-
-t = db.arrNestTest;
-t.drop();
-
-t.ensureIndex({a:1});
-
-n = 1;
-while ( true ) {
- var before = t.count();
- t.insert( { _id : n, a : makeNestArr(n) } );
- var after = t.count();
- if ( before == after )
- break;
- n++;
-}
-
-assert( n > 30, "not enough n: " + n );
-
-assert.eq( t.count(), t.find( { _id : { $gt : 0 } } ).hint( { a : 1 } ).itcount() );
diff --git a/jstests/nestedobj1.js b/jstests/nestedobj1.js
deleted file mode 100644
index 45ef0c530d4..00000000000
--- a/jstests/nestedobj1.js
+++ /dev/null
@@ -1,30 +0,0 @@
-//SERVER-5127, SERVER-5036
-
-function makeNestObj(depth){
- toret = { a : 1};
-
- for(i = 1; i < depth; i++){
- toret = {a : toret};
- }
-
- return toret;
-}
-
-t = db.objNestTest;
-t.drop();
-
-t.ensureIndex({a:1});
-
-n = 1;
-while ( true ) {
- var before = t.count();
- t.insert( { _id : n, a : makeNestObj(n) } );
- var after = t.count();
- if ( before == after )
- break;
- n++;
-}
-
-assert( n > 30, "not enough n: " + n );
-
-assert.eq( t.count(), t.find( { _id : { $gt : 0 } } ).hint( { a : 1 } ).itcount() );
diff --git a/jstests/nin.js b/jstests/nin.js
deleted file mode 100644
index 06582781591..00000000000
--- a/jstests/nin.js
+++ /dev/null
@@ -1,58 +0,0 @@
-t = db.jstests_nin;
-t.drop();
-
-function checkEqual( name , key , value ){
- var o = {};
- o[key] = { $in : [ value ] };
- var i = t.find( o ).count();
- o[key] = { $nin : [ value ] };
- var n = t.find( o ).count();
-
- assert.eq( t.find().count() , i + n ,
- "checkEqual " + name + " $in + $nin != total | " + i + " + " + n + " != " + t.find().count() );
-}
-
-doTest = function( n ) {
-
- t.save( { a:[ 1,2,3 ] } );
- t.save( { a:[ 1,2,4 ] } );
- t.save( { a:[ 1,8,5 ] } );
- t.save( { a:[ 1,8,6 ] } );
- t.save( { a:[ 1,9,7 ] } );
-
- assert.eq( 5, t.find( { a: { $nin: [ 10 ] } } ).count() , n + " A" );
- assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , n + " B" );
- assert.eq( 0, t.find( { a: { $nin: [ 1 ] } } ).count() , n + " C" );
- assert.eq( 0, t.find( { a: { $nin: [ 1, 2 ] } } ).count() , n + " D" );
- assert.eq( 3, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " E" );
- assert.eq( 3, t.find( { a: { $nin: [ 8 ] } } ).count() , n + " F" );
- assert.eq( 4, t.find( { a: { $nin: [ 9 ] } } ).count() , n + " G" );
- assert.eq( 4, t.find( { a: { $nin: [ 3 ] } } ).count() , n + " H" );
- assert.eq( 3, t.find( { a: { $nin: [ 2, 3 ] } } ).count() , n + " I" );
- assert.eq( 1, t.find( { a: { $ne: 8, $nin: [ 2, 3 ] } } ).count() , n + " I2" );
-
- checkEqual( n + " A" , "a" , 5 );
-
- t.save( { a: [ 2, 2 ] } );
- assert.eq( 3, t.find( { a: { $nin: [ 2, 2 ] } } ).count() , n + " J" );
-
- t.save( { a: [ [ 2 ] ] } );
- assert.eq( 4, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " K" );
-
- t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
- checkEqual( n + " B" , "a" , 5 );
- checkEqual( n + " C" , "a.b" , 5 );
-
- assert.eq( 7, t.find( { 'a.b': { $nin: [ 10 ] } } ).count() , n + " L" );
- assert.eq( 7, t.find( { 'a.b': { $nin: [ [ 10, 11 ] ] } } ).count() , n + " M" );
- assert.eq( 7, t.find( { a: { $nin: [ 11 ] } } ).count() , n + " N" );
-
- t.save( { a: { b: [ 20, 30 ] } } );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() , n + " O" );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() , n + " P" );
-}
-
-doTest( "no index" );
-t.drop();
-t.ensureIndex( {a:1} );
-doTest( "with index" );
diff --git a/jstests/nin2.js b/jstests/nin2.js
deleted file mode 100644
index afdbb0494da..00000000000
--- a/jstests/nin2.js
+++ /dev/null
@@ -1,67 +0,0 @@
-// Check that $nin is the opposite of $in SERVER-3264
-
-t = db.jstests_nin2;
-t.drop();
-
-// Check various operator types.
-function checkOperators( array, inMatches ) {
- inCount = inMatches ? 1 : 0;
- notInCount = 1 - inCount;
- assert.eq( inCount, t.count( {foo:{$in:array}} ) );
- assert.eq( notInCount, t.count( {foo:{$not:{$in:array}}} ) );
- assert.eq( notInCount, t.count( {foo:{$nin:array}} ) );
- assert.eq( inCount, t.count( {foo:{$not:{$nin:array}}} ) );
-}
-
-t.save({});
-
-assert.eq( 1, t.count( {foo:null} ) );
-assert.eq( 0, t.count( {foo:{$ne:null}} ) );
-assert.eq( 0, t.count( {foo:1} ) );
-
-// Check matching null against missing field.
-checkOperators( [null], true );
-checkOperators( [null,1], true );
-checkOperators( [1,null], true );
-
-t.remove({});
-t.save({foo:null});
-
-assert.eq( 1, t.count( {foo:null} ) );
-assert.eq( 0, t.count( {foo:{$ne:null}} ) );
-assert.eq( 0, t.count( {foo:1} ) );
-
-// Check matching empty set.
-checkOperators( [], false );
-
-// Check matching null against missing null field.
-checkOperators( [null], true );
-checkOperators( [null,1], true );
-checkOperators( [1,null], true );
-
-t.remove({});
-t.save({foo:1});
-
-assert.eq( 0, t.count( {foo:null} ) );
-assert.eq( 1, t.count( {foo:{$ne:null}} ) );
-assert.eq( 1, t.count( {foo:1} ) );
-
-// Check matching null against 1.
-checkOperators( [null], false );
-checkOperators( [null,1], true );
-checkOperators( [1,null], true );
-
-t.remove({});
-t.save( {foo:[0,1]} );
-// Check exact match of embedded array.
-checkOperators( [[0,1]], true );
-
-t.remove({});
-t.save( {foo:[]} );
-// Check exact match of embedded empty array.
-checkOperators( [[]], true );
-
-t.remove({});
-t.save( {foo:'foo'} );
-// Check regex match.
-checkOperators( [/o/], true );
diff --git a/jstests/not1.js b/jstests/not1.js
deleted file mode 100644
index f99a8490170..00000000000
--- a/jstests/not1.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.not1;
-t.drop();
-
-
-t.insert({a:1})
-t.insert({a:2})
-t.insert({})
-
-function test( name ){
- assert.eq( 3 , t.find().count() , name + "A" );
- assert.eq( 1 , t.find( { a : 1 } ).count() , name + "B" );
- assert.eq( 2 , t.find( { a : { $ne : 1 } } ).count() , name + "C" ); // SERVER-198
- assert.eq( 1 , t.find({a:{$in:[1]}}).count() , name + "D" );
- assert.eq( 2 , t.find({a:{$nin:[1]}}).count() , name + "E" ); // SERVER-198
-}
-
-test( "no index" );
-t.ensureIndex( { a : 1 } );
-test( "with index" );
diff --git a/jstests/not2.js b/jstests/not2.js
deleted file mode 100644
index 239ea89d226..00000000000
--- a/jstests/not2.js
+++ /dev/null
@@ -1,84 +0,0 @@
-t = db.jstests_not2;
-t.drop();
-
-check = function( query, expected, size ) {
- if ( size == null ) {
- size = 1;
- }
- assert.eq( size, t.find( query ).itcount(), tojson( query ) );
- if ( size > 0 ) {
- assert.eq( expected, t.findOne( query ).i, tojson( query ) );
- }
-}
-
-fail = function( query ) {
- try {
- t.find( query ).itcount();
- assert( false, tojson( query ) );
- } catch ( e ) {
- // expected
- }
-}
-
-doTest = function() {
-
-t.remove( {} );
-
-t.save( {i:"a"} );
-t.save( {i:"b"} );
-
-fail( {i:{$not:"a"}} );
-// SERVER-12735: We currently do not handle double negatives
-// during query canonicalization.
-//fail( {i:{$not:{$not:"a"}}} );
-//fail( {i:{$not:{$not:{$gt:"a"}}}} );
-fail( {i:{$not:{$ref:"foo"}}} );
-fail( {i:{$not:{}}} );
-check( {i:{$gt:"a"}}, "b" );
-check( {i:{$not:{$gt:"a"}}}, "a" );
-check( {i:{$not:{$ne:"a"}}}, "a" );
-check( {i:{$not:{$gte:"b"}}}, "a" );
-check( {i:{$exists:true}}, "a", 2 );
-check( {i:{$not:{$exists:true}}}, "", 0 );
-check( {j:{$not:{$exists:false}}}, "", 0 );
-check( {j:{$not:{$exists:true}}}, "a", 2 );
-check( {i:{$not:{$in:["a"]}}}, "b" );
-check( {i:{$not:{$in:["a", "b"]}}}, "", 0 );
-check( {i:{$not:{$in:["g"]}}}, "a", 2 );
-check( {i:{$not:{$nin:["a"]}}}, "a" );
-check( {i:{$not:/a/}}, "b" );
-check( {i:{$not:/(a|b)/}}, "", 0 );
-check( {i:{$not:/a/,$regex:"a"}}, "", 0 );
-check( {i:{$not:/aa/}}, "a", 2 );
-fail( {i:{$not:{$regex:"a"}}} );
-fail( {i:{$not:{$options:"a"}}} );
-check( {i:{$type:2}}, "a", 2 );
-check( {i:{$not:{$type:1}}}, "a", 2 );
-check( {i:{$not:{$type:2}}}, "", 0 );
-
-t.remove( {} );
-t.save( {i:1} );
-check( {i:{$not:{$mod:[5,1]}}}, null, 0 );
-check( {i:{$mod:[5,2]}}, null, 0 );
-check( {i:{$not:{$mod:[5,2]}}}, 1, 1 );
-
-t.remove( {} );
-t.save( {i:["a","b"]} );
-check( {i:{$not:{$size:2}}}, null, 0 );
-check( {i:{$not:{$size:3}}}, ["a","b"] );
-check( {i:{$not:{$gt:"a"}}}, null, 0 );
-check( {i:{$not:{$gt:"c"}}}, ["a","b"] );
-check( {i:{$not:{$all:["a","b"]}}}, null, 0 );
-check( {i:{$not:{$all:["c"]}}}, ["a","b"] );
-
-t.remove( {} );
-t.save( {i:[{j:"a"}]} );
-t.save( {i:[{j:"b"}]} );
-check( {i:{$not:{$elemMatch:{j:"a"}}}}, [{j:"b"}] );
-check( {i:{$not:{$elemMatch:{j:"f"}}}}, [{j:"a"}], 2 );
-
-}
-
-doTest();
-t.ensureIndex( {i:1} );
-doTest();
diff --git a/jstests/notablescan.js b/jstests/notablescan.js
deleted file mode 100644
index f2ca68d2912..00000000000
--- a/jstests/notablescan.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// check notablescan mode
-
-t = db.test_notablescan;
-t.drop();
-
-try {
- assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) );
- // commented lines are SERVER-2222
- if ( 0 ) { // SERVER-2222
- assert.throws( function() { t.find( {a:1} ).toArray(); } );
- }
- t.save( {a:1} );
- if ( 0 ) { // SERVER-2222
- assert.throws( function() { t.count( {a:1} ); } );
- assert.throws( function() { t.find( {} ).toArray(); } );
- }
- assert.eq( 1, t.find( {} ).itcount() ); // SERVER-274
- assert.throws( function() { t.find( {a:1} ).toArray(); } );
- assert.throws( function() { t.find( {a:1} ).hint( {$natural:1} ).toArray(); } );
- t.ensureIndex( {a:1} );
- assert.eq( 0, t.find( {a:1,b:1} ).itcount() );
- assert.eq( 1, t.find( {a:1,b:null} ).itcount() );
-
- // SERVER-4327
- assert.eq( 0, t.find( {a:{$in:[]}} ).itcount() );
- assert.eq( 0, t.find( {a:{$in:[]},b:0} ).itcount() );
-} finally {
- // We assume notablescan was false before this test started and restore that
- // expected value.
- assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false } ) );
-}
diff --git a/jstests/ns_length.js b/jstests/ns_length.js
deleted file mode 100644
index 2e3fb02b0af..00000000000
--- a/jstests/ns_length.js
+++ /dev/null
@@ -1,85 +0,0 @@
-// SERVER-7282 Faulty logic when testing maximum collection name length.
-
-// constants from server
-var maxNsLength = 127;
-var maxNsCollectionLength = 120;
-
-var myDb = db.getSiblingDB("ns_length");
-myDb.dropDatabase(); // start empty
-
-function mkStr(length) {
- s = "";
- while (s.length < length) {
- s += "x";
- }
- return s;
-}
-
-function canMakeCollectionWithName(name) {
- assert.eq(myDb.stats().fileSize, 0, "initial conditions");
-
- myDb[name].insert({});
- var success = myDb.getLastError() == null;
- if (!success) {
- assert.eq(myDb.stats().fileSize, 0, "no files should be created on error");
- return false;
- }
-
- myDb.dropDatabase();
- return true;
-}
-
-function canMakeIndexWithName(collection, name) {
- var success = (collection.ensureIndex({x:1}, {name: name}) == undefined);
- if (success) {
- assert.commandWorked(collection.dropIndex(name));
- }
- return success;
-}
-
-function canRenameCollection(from, to) {
- var success = myDb[from].renameCollection(to).ok;
- if (success) {
- // put it back
- assert.commandWorked(myDb[to].renameCollection(from));
- }
- return success;
-}
-
-// test making collections around the name limit
-var prefixOverhead = (myDb.getName() + ".").length;
-var maxCollectionNameLength = maxNsCollectionLength - prefixOverhead;
-for (var i = maxCollectionNameLength - 3; i <= maxCollectionNameLength + 3; i++) {
- assert.eq(canMakeCollectionWithName(mkStr(i)),
- i <= maxCollectionNameLength,
- "ns name length = " + (prefixOverhead + i));
-}
-
-// test making indexes around the name limit
-var collection = myDb.collection;
-collection.insert({});
-var maxIndexNameLength = maxNsLength - (collection.getFullName() + ".$").length;
-for (var i = maxIndexNameLength - 3; i <= maxIndexNameLength + 3; i++) {
- assert.eq(canMakeIndexWithName(collection, mkStr(i)),
- i <= maxIndexNameLength,
- "index ns name length = " + ((collection.getFullName() + ".$").length + i));
-}
-
-// test renaming collections with the destination around the name limit
-myDb.from.insert({});
-for (var i = maxCollectionNameLength - 3; i <= maxCollectionNameLength + 3; i++) {
- assert.eq(canRenameCollection("from", mkStr(i)),
- i <= maxCollectionNameLength,
- "new ns name length = " + (prefixOverhead + i));
-}
-
-// test renaming collections with the destination around the name limit due to long indexe names
-myDb.from.ensureIndex({a:1}, {name: mkStr(100)});
-var indexNsNameOverhead = (myDb.getName() + "..$").length + 100; // index ns name - collection name
-var maxCollectionNameWithIndex = maxNsLength - indexNsNameOverhead;
-for (var i = maxCollectionNameWithIndex - 3; i <= maxCollectionNameWithIndex + 3; i++) {
- assert.eq(canRenameCollection("from", mkStr(i)),
- i <= maxCollectionNameWithIndex,
- "index ns name length = " + (indexNsNameOverhead + i));
-}
-
diff --git a/jstests/null.js b/jstests/null.js
deleted file mode 100644
index f4bdeb44a4d..00000000000
--- a/jstests/null.js
+++ /dev/null
@@ -1,26 +0,0 @@
-
-t = db.null1;
-t.drop();
-
-t.save( { x : 1 } );
-t.save( { x : null } );
-
-assert.eq( 1 , t.find( { x : null } ).count() , "A" );
-assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "B" );
-
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 1 , t.find( { x : null } ).count() , "C" );
-assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "D" );
-
-// -----
-
-assert.eq( 2, t.find( { y : null } ).count(), "E" );
-
-t.ensureIndex( { y : 1 } );
-assert.eq( 2, t.find( { y : null } ).count(), "E" );
-
-t.dropIndex( { y : 1 } );
-
-t.ensureIndex( { y : 1 }, { sparse : true } );
-assert.eq( 2, t.find( { y : null } ).count(), "E" );
diff --git a/jstests/null2.js b/jstests/null2.js
deleted file mode 100644
index 17b1a392714..00000000000
--- a/jstests/null2.js
+++ /dev/null
@@ -1,45 +0,0 @@
-
-t = db.null2;
-t.drop();
-
-t.insert( { _id : 1, a : [ { b : 5 } ] } );
-t.insert( { _id : 2, a : [ {} ] } );
-t.insert( { _id : 3, a : [] } );
-t.insert( { _id : 4, a : [ {}, { b : 5 } ] } );
-t.insert( { _id : 5, a : [ 5, { b : 5 } ] } );
-
-function doQuery( query ) {
- printjson( query );
- t.find( query ).forEach(
- function(z) {
- print( "\t" + tojson(z) );
- }
- );
- return t.find( query ).count();
-}
-
-function getIds( query ) {
- var ids = []
- t.find( query ).forEach(
- function(z) {
- ids.push( z._id );
- }
- );
- return ids;
-}
-
-theQueries = [ { "a.b" : null }, { "a.b" : { $in : [ null ] } } ];
-
-for ( var i=0; i < theQueries.length; i++ ) {
- assert.eq( 2, doQuery( theQueries[i] ) );
- assert.eq( [2,4], getIds( theQueries[i] ) );
-}
-
-t.ensureIndex( { "a.b" : 1 } )
-
-for ( var i=0; i < theQueries.length; i++ ) {
- assert.eq( 2, doQuery( theQueries[i] ) );
- assert.eq( [2,4], getIds( theQueries[i] ) );
-}
-
-
diff --git a/jstests/null_field_name.js b/jstests/null_field_name.js
deleted file mode 100644
index 7fa14b0a1bc..00000000000
--- a/jstests/null_field_name.js
+++ /dev/null
@@ -1,8 +0,0 @@
-// SERVER-10313: Test that null char in field name causes an error when converting to bson
-assert.throws( function () { Object.bsonsize({"a\0":1}); },
- null,
- "null char in field name");
-
-assert.throws( function () { Object.bsonsize({"\0asdf":1}); },
- null,
- "null char in field name"); \ No newline at end of file
diff --git a/jstests/numberint.js b/jstests/numberint.js
deleted file mode 100644
index 258450f8e82..00000000000
--- a/jstests/numberint.js
+++ /dev/null
@@ -1,92 +0,0 @@
-assert.eq.automsg( "0", "new NumberInt()" );
-
-n = new NumberInt( 4 );
-assert.eq.automsg( "4", "n" );
-assert.eq.automsg( "4", "n.toNumber()" );
-assert.eq.automsg( "8", "n + 4" );
-assert.eq.automsg( "'NumberInt(4)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(4)'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(4) }'", "p" );
-
-assert.eq.automsg( "NumberInt(4 )", "eval( tojson( NumberInt( 4 ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberInt( -4 );
-assert.eq.automsg( "-4", "n" );
-assert.eq.automsg( "-4", "n.toNumber()" );
-assert.eq.automsg( "0", "n + 4" );
-assert.eq.automsg( "'NumberInt(-4)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(-4)'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(-4) }'", "p" );
-
-n = new NumberInt( "11111" );
-assert.eq.automsg( "'NumberInt(11111)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(11111)'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(11111) }'", "p" );
-
-assert.eq.automsg( "NumberInt('11111' )", "eval( tojson( NumberInt( '11111' ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberInt( "-11111" );
-assert.eq.automsg( "-11111", "n.toNumber()" );
-assert.eq.automsg( "-11107", "n + 4" );
-assert.eq.automsg( "'NumberInt(-11111)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(-11111)'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(-11111) }'", "p" );
-
-// parsing: v8 evaluates not numbers to 0 which is not bad
-//assert.throws.automsg( function() { new NumberInt( "" ); } );
-//assert.throws.automsg( function() { new NumberInt( "y" ); } );
-
-// eq
-
-assert.eq( { x : 5 } , { x : new NumberInt( "5" ) } );
-
-assert( 5 == NumberInt( 5 ) , "eq" );
-assert( 5 < NumberInt( 6 ) , "lt" );
-assert( 5 > NumberInt( 4 ) , "lt" );
-assert( NumberInt( 1 ) , "to bool a" );
-
-// objects are always considered thruthy
-//assert( ! NumberInt( 0 ) , "to bool b" );
-
-// create doc with int value in db
-t = db.getCollection( "numberint" );
-t.drop();
-
-o = { a : NumberInt(42) };
-t.save( o );
-
-assert.eq( 42 , t.findOne().a , "save doc 1" );
-assert.eq( 1 , t.find({a: {$type: 16}}).count() , "save doc 2" );
-assert.eq( 0 , t.find({a: {$type: 1}}).count() , "save doc 3" );
-
-// roundtripping
-mod = t.findOne({a: 42});
-mod.a += 10;
-mod.b = "foo";
-delete mod._id;
-t.save(mod);
-assert.eq( 2 , t.find({a: {$type: 16}}).count() , "roundtrip 1" );
-assert.eq( 0 , t.find({a: {$type: 1}}).count() , "roundtrip 2" );
-assert.eq( 1 , t.find({a: 52}).count() , "roundtrip 3" );
-
-// save regular number
-t.save({a: 42});
-assert.eq( 2 , t.find({a: {$type: 16}}).count() , "normal 1" );
-assert.eq( 1 , t.find({a: {$type: 1}}).count() , "normal 2" );
-assert.eq( 2 , t.find({a: 42}).count() , "normal 3" );
-
-
diff --git a/jstests/numberlong.js b/jstests/numberlong.js
deleted file mode 100644
index 1cbbc7a798a..00000000000
--- a/jstests/numberlong.js
+++ /dev/null
@@ -1,55 +0,0 @@
-assert.eq.automsg( "0", "new NumberLong()" );
-
-n = new NumberLong( 4 );
-assert.eq.automsg( "4", "n" );
-assert.eq.automsg( "4", "n.toNumber()" );
-assert.eq.automsg( "8", "n + 4" );
-assert.eq.automsg( "'NumberLong(4)'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(4)'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(4) }'", "p" );
-
-assert.eq.automsg( "NumberLong(4 )", "eval( tojson( NumberLong( 4 ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberLong( -4 );
-assert.eq.automsg( "-4", "n" );
-assert.eq.automsg( "-4", "n.toNumber()" );
-assert.eq.automsg( "0", "n + 4" );
-assert.eq.automsg( "'NumberLong(-4)'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(-4)'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(-4) }'", "p" );
-
-// too big to fit in double
-n = new NumberLong( "11111111111111111" );
-assert.eq.automsg( "11111111111111112", "n.toNumber()" );
-assert.eq.automsg( "11111111111111116", "n + 4" );
-assert.eq.automsg( "'NumberLong(\"11111111111111111\")'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(\"11111111111111111\")'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(\"11111111111111111\") }'", "p" );
-
-assert.eq.automsg( "NumberLong('11111111111111111' )", "eval( tojson( NumberLong( '11111111111111111' ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberLong( "-11111111111111111" );
-assert.eq.automsg( "-11111111111111112", "n.toNumber()" );
-assert.eq.automsg( "-11111111111111108", "n + 4" );
-assert.eq.automsg( "'NumberLong(\"-11111111111111111\")'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(\"-11111111111111111\")'", "tojson( n )" );
-a = {}
-a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(\"-11111111111111111\") }'", "p" );
-
-// parsing
-assert.throws.automsg( function() { new NumberLong( "" ); } );
-assert.throws.automsg( function() { new NumberLong( "y" ); } );
-assert.throws.automsg( function() { new NumberLong( "11111111111111111111" ); } );
diff --git a/jstests/numberlong2.js b/jstests/numberlong2.js
deleted file mode 100644
index 5d7529a9e21..00000000000
--- a/jstests/numberlong2.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Test precision of NumberLong values with v1 index code SERVER-3717
-
-t = db.jstests_numberlong2;
-t.drop();
-
-t.ensureIndex( {x:1} );
-
-function chk(longNum) {
- t.remove({});
- t.save({ x: longNum });
- assert.eq(longNum, t.find().hint({ x: 1 }).next().x);
- assert.eq(longNum, t.find({}, { _id: 0, x: 1 }).hint({ x: 1 }).next().x);
-}
-
-chk( NumberLong("1123539983311657217") );
-chk(NumberLong("-1123539983311657217"));
- chk(NumberLong("4503599627370495"));
- chk(NumberLong("4503599627370496"));
- chk(NumberLong("4503599627370497"));
-
-t.remove({});
-
-s = "11235399833116571";
-for( i = 99; i >= 0; --i ) {
- t.save( {x:NumberLong( s + i )} );
-}
-
-assert.eq( t.find().sort( {x:1} ).hint( {$natural:1} ).toArray(), t.find().sort( {x:1} ).hint( {x:1} ).toArray() );
diff --git a/jstests/numberlong3.js b/jstests/numberlong3.js
deleted file mode 100644
index 10036c0544e..00000000000
--- a/jstests/numberlong3.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Test sorting with long longs and doubles - SERVER-3719
-
-t = db.jstests_numberlong3;
-t.drop();
-
-s = "11235399833116571";
-for( i = 10; i >= 0; --i ) {
- n = NumberLong( s + i );
- t.save( {x:n} );
- if ( 0 ) { // SERVER-3719
- t.save( {x:n.floatApprox} );
- }
-}
-
-ret = t.find().sort({x:1}).toArray().filter( function( x ) { return typeof( x.x.floatApprox ) != 'undefined' } );
-
-//printjson( ret );
-
-for( i = 1; i < ret.length; ++i ) {
- first = ret[i-1].x.toString();
- second = ret[i].x.toString();
- if ( first.length == second.length ) {
- assert.lte( ret[i-1].x.toString(), ret[i].x.toString() );
- }
-}
diff --git a/jstests/numberlong4.js b/jstests/numberlong4.js
deleted file mode 100644
index 0924931efaf..00000000000
--- a/jstests/numberlong4.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Test handling of comparison between long longs and their double approximations in btrees - SERVER-3719.
-
-t = db.jstests_numberlong4;
-t.drop();
-
-if ( 0 ) { // SERVER-3719
-
-t.ensureIndex({x:1});
-
-Random.setRandomSeed();
-
-s = "11235399833116571";
-for( i = 0; i < 10000; ++i ) {
- n = NumberLong( s + Random.randInt( 10 ) );
- t.insert( { x: ( Random.randInt( 2 ) ? n : n.floatApprox ) } );
-}
-
-// If this does not return, there is a problem with index structure.
-t.find().hint({x:1}).itcount();
-
-}
diff --git a/jstests/objid1.js b/jstests/objid1.js
deleted file mode 100644
index dea31eed0d8..00000000000
--- a/jstests/objid1.js
+++ /dev/null
@@ -1,16 +0,0 @@
-t = db.objid1;
-t.drop();
-
-b = new ObjectId();
-assert( b.str , "A" );
-
-a = new ObjectId( b.str );
-assert.eq( a.str , b.str , "B" );
-
-t.save( { a : a } )
-assert( t.findOne().a.isObjectId , "C" );
-assert.eq( a.str , t.findOne().a.str , "D" );
-
-x = { a : new ObjectId() };
-eval( " y = " + tojson( x ) );
-assert.eq( x.a.str , y.a.str , "E" );
diff --git a/jstests/objid2.js b/jstests/objid2.js
deleted file mode 100644
index a28c18fca15..00000000000
--- a/jstests/objid2.js
+++ /dev/null
@@ -1,7 +0,0 @@
-t = db.objid2;
-t.drop();
-
-t.save( { _id : 517 , a : "hello" } )
-
-assert.eq( t.findOne().a , "hello" );
-assert.eq( t.findOne()._id , 517 );
diff --git a/jstests/objid3.js b/jstests/objid3.js
deleted file mode 100644
index ddf20d9af27..00000000000
--- a/jstests/objid3.js
+++ /dev/null
@@ -1,9 +0,0 @@
-t = db.objid3;
-t.drop();
-
-t.save( { a : "bob" , _id : 517 } );
-for ( var k in t.findOne() ){
- assert.eq( k , "_id" , "keys out of order" );
- break;
-}
-
diff --git a/jstests/objid4.js b/jstests/objid4.js
deleted file mode 100644
index 23986b95c71..00000000000
--- a/jstests/objid4.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
-o = new ObjectId();
-assert( o.str );
-
-a = new ObjectId( o.str );
-assert.eq( o.str , a.str );
-assert.eq( a.str , a.str.toString() )
-
-b = ObjectId( o.str );
-assert.eq( o.str , b.str );
-assert.eq( b.str , b.str.toString() )
-
-assert.throws( function(z){ return new ObjectId( "a" ); } );
-assert.throws( function(z){ return new ObjectId( "12345678901234567890123z" ); } );
diff --git a/jstests/objid5.js b/jstests/objid5.js
deleted file mode 100644
index f85ebc8c71d..00000000000
--- a/jstests/objid5.js
+++ /dev/null
@@ -1,19 +0,0 @@
-
-t = db.objid5;
-t.drop();
-
-t.save( { _id : 5.5 } );
-assert.eq( 18 , Object.bsonsize( t.findOne() ) , "A" );
-
-x = db.runCommand( { features : 1 } )
-y = db.runCommand( { features : 1 , oidReset : 1 } )
-
-if( !x.ok )
- print("x: " + tojson(x));
-
-assert( x.oidMachine , "B1" )
-assert.neq( x.oidMachine , y.oidMachine , "B2" )
-assert.eq( x.oidMachine , y.oidMachineOld , "B3" )
-
-assert.eq( 18 , Object.bsonsize( { _id : 7.7 } ) , "C1" )
-assert.eq( 0 , Object.bsonsize( null ) , "C2" )
diff --git a/jstests/objid6.js b/jstests/objid6.js
deleted file mode 100644
index b90dc9e914e..00000000000
--- a/jstests/objid6.js
+++ /dev/null
@@ -1,16 +0,0 @@
-o = new ObjectId();
-assert(o.getTimestamp);
-
-a = new ObjectId("4c17f616a707427266a2801a");
-b = new ObjectId("4c17f616a707428966a2801c");
-assert.eq(a.getTimestamp(), b.getTimestamp() , "A" );
-
-x = Math.floor( (new Date()).getTime() / 1000 );
-sleep(10/*ms*/)
-a = new ObjectId();
-sleep(10/*ms*/)
-z = Math.floor( (new Date()).getTime() / 1000 );
-y = a.getTimestamp().getTime() / 1000;
-
-assert.lte( x , y , "B" );
-assert.lte( y , z , "C" );
diff --git a/jstests/objid7.js b/jstests/objid7.js
deleted file mode 100644
index 5a5ca728c7d..00000000000
--- a/jstests/objid7.js
+++ /dev/null
@@ -1,13 +0,0 @@
-
-a = new ObjectId( "4c1a478603eba73620000000" )
-b = new ObjectId( "4c1a478603eba73620000000" )
-c = new ObjectId();
-
-assert.eq( a.toString() , b.toString() , "A" )
-assert.eq( a.toString() , "ObjectId(\"4c1a478603eba73620000000\")" , "B" );
-
-assert( a.equals( b ) , "C" )
-
-assert.neq( a.toString() , c.toString() , "D" );
-assert( ! a.equals( c ) , "E" );
-
diff --git a/jstests/or1.js b/jstests/or1.js
deleted file mode 100644
index 66bbd2e6eea..00000000000
--- a/jstests/or1.js
+++ /dev/null
@@ -1,57 +0,0 @@
-t = db.jstests_or1;
-t.drop();
-
-checkArrs = function( a, b, m ) {
- assert.eq( a.length, b.length, m );
- aStr = [];
- bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert( -1 != bStr.indexOf( aStr[ i ] ), m );
- }
-}
-
-doTest = function() {
-
-t.save( {_id:0,a:1} );
-t.save( {_id:1,a:2} );
-t.save( {_id:2,b:1} );
-t.save( {_id:3,b:2} );
-t.save( {_id:4,a:1,b:1} );
-t.save( {_id:5,a:1,b:2} );
-t.save( {_id:6,a:2,b:1} );
-t.save( {_id:7,a:2,b:2} );
-
-assert.throws( function() { t.find( { $or:"a" } ).toArray(); } );
-assert.throws( function() { t.find( { $or:[] } ).toArray(); } );
-assert.throws( function() { t.find( { $or:[ "a" ] } ).toArray(); } );
-
-a1 = t.find( { $or: [ { a : 1 } ] } ).toArray();
-checkArrs( [ { _id:0, a:1 }, { _id:4, a:1, b:1 }, { _id:5, a:1, b:2 } ], a1 );
-
-a1b2 = t.find( { $or: [ { a : 1 }, { b : 2 } ] } ).toArray();
-checkArrs( [ { _id:0, a:1 }, { _id:3, b:2 }, { _id:4, a:1, b:1 }, { _id:5, a:1, b:2 }, { _id:7, a:2, b:2 } ], a1b2 );
-
-t.drop();
-t.save( {a:[0,1],b:[0,1]} );
-assert.eq( 1, t.find( { $or: [ { a: {$in:[0,1]}} ] } ).toArray().length );
-assert.eq( 1, t.find( { $or: [ { b: {$in:[0,1]}} ] } ).toArray().length );
-assert.eq( 1, t.find( { $or: [ { a: {$in:[0,1]}}, { b: {$in:[0,1]}} ] } ).toArray().length );
-
-}
-
-doTest();
-
-// not part of SERVER-1003, but good check for subseq. implementations
-t.drop();
-t.ensureIndex( {a:1} );
-doTest();
-
-t.drop();
-t.ensureIndex( {b:1} );
-doTest();
-
-t.drop();
-t.ensureIndex( {a:1,b:1} );
-doTest(); \ No newline at end of file
diff --git a/jstests/or2.js b/jstests/or2.js
deleted file mode 100644
index 00e9f68decf..00000000000
--- a/jstests/or2.js
+++ /dev/null
@@ -1,69 +0,0 @@
-t = db.jstests_or2;
-t.drop();
-
-checkArrs = function( a, b, m ) {
- assert.eq( a.length, b.length, m );
- aStr = [];
- bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert( -1 != bStr.indexOf( aStr[ i ] ), m );
- }
-}
-
-doTest = function( index ) {
- if ( index == null ) {
- index = true;
- }
-
- t.save( {_id:0,x:0,a:1} );
- t.save( {_id:1,x:0,a:2} );
- t.save( {_id:2,x:0,b:1} );
- t.save( {_id:3,x:0,b:2} );
- t.save( {_id:4,x:1,a:1,b:1} );
- t.save( {_id:5,x:1,a:1,b:2} );
- t.save( {_id:6,x:1,a:2,b:1} );
- t.save( {_id:7,x:1,a:2,b:2} );
-
- assert.throws( function() { t.find( { x:0,$or:"a" } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$or:[] } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$or:[ "a" ] } ).toArray(); } );
-
- a1 = t.find( { x:0, $or: [ { a : 1 } ] } ).toArray();
- checkArrs( [ { _id:0, x:0, a:1 } ], a1 );
- if ( index ) {
- assert( t.find( { x:0,$or: [ { a : 1 } ] } ).explain().cursor.match( /Btree/ ) );
- }
-
- a1b2 = t.find( { x:1, $or: [ { a : 1 }, { b : 2 } ] } ).toArray();
- checkArrs( [ { _id:4, x:1, a:1, b:1 }, { _id:5, x:1, a:1, b:2 }, { _id:7, x:1, a:2, b:2 } ], a1b2 );
- if ( index ) {
- assert( t.find( { x:0,$or: [ { a : 1 } ] } ).explain().cursor.match( /Btree/ ) );
- }
-
- /*
- t.drop();
- obj = {_id:0,x:10,a:[1,2,3]};
- t.save( obj );
- t.update( {x:10,$or:[ {a:2} ]}, {$set:{'a.$':100}} );
- assert.eq( obj, t.findOne() ); // no change
- */
-}
-
-doTest( false );
-
-t.ensureIndex( { x:1 } );
-doTest();
-
-t.drop();
-t.ensureIndex( { x:1,a:1 } );
-doTest();
-
-t.drop();
-t.ensureIndex( {x:1,b:1} );
-doTest();
-
-t.drop();
-t.ensureIndex( {x:1,a:1,b:1} );
-doTest();
diff --git a/jstests/or3.js b/jstests/or3.js
deleted file mode 100644
index 7759e689f84..00000000000
--- a/jstests/or3.js
+++ /dev/null
@@ -1,62 +0,0 @@
-t = db.jstests_or3;
-t.drop();
-
-checkArrs = function( a, b, m ) {
- assert.eq( a.length, b.length, m );
- aStr = [];
- bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert( -1 != bStr.indexOf( aStr[ i ] ), m );
- }
-}
-
-doTest = function( index ) {
- if ( index == null ) {
- index = true;
- }
-
- t.save( {_id:0,x:0,a:1} );
- t.save( {_id:1,x:0,a:2} );
- t.save( {_id:2,x:0,b:1} );
- t.save( {_id:3,x:0,b:2} );
- t.save( {_id:4,x:1,a:1,b:1} );
- t.save( {_id:5,x:1,a:1,b:2} );
- t.save( {_id:6,x:1,a:2,b:1} );
- t.save( {_id:7,x:1,a:2,b:2} );
-
- assert.throws( function() { t.find( { x:0,$nor:"a" } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$nor:[] } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$nor:[ "a" ] } ).toArray(); } );
-
- an1 = t.find( { $nor: [ { a : 1 } ] } ).toArray();
- checkArrs( t.find( {a:{$ne:1}} ).toArray(), an1 );
-
- an1bn2 = t.find( { x:1, $nor: [ { a : 1 }, { b : 2 } ] } ).toArray();
- checkArrs( [ { _id:6, x:1, a:2, b:1 } ], an1bn2 );
- checkArrs( t.find( { x:1, a:{$ne:1}, b:{$ne:2} } ).toArray(), an1bn2 );
- if ( index ) {
- assert( t.find( { x:1, $nor: [ { a : 1 }, { b : 2 } ] } ).explain().cursor.match( /Btree/ ) );
- }
-
- an1b2 = t.find( { $nor: [ { a : 1 } ], $or: [ { b : 2 } ] } ).toArray();
- checkArrs( t.find( {a:{$ne:1},b:2} ).toArray(), an1b2 );
-}
-
-doTest( false );
-
-t.ensureIndex( { x:1 } );
-doTest();
-
-t.drop();
-t.ensureIndex( { x:1,a:1 } );
-doTest();
-
-t.drop();
-t.ensureIndex( {x:1,b:1} );
-doTest();
-
-t.drop();
-t.ensureIndex( {x:1,a:1,b:1} );
-doTest();
diff --git a/jstests/or4.js b/jstests/or4.js
deleted file mode 100644
index 23c10bba8e2..00000000000
--- a/jstests/or4.js
+++ /dev/null
@@ -1,99 +0,0 @@
-t = db.jstests_or4;
-t.drop();
-
-// v8 does not have a builtin Array.sort
-if (!Array.sort) {
- Array.sort = function(arr) {
- return arr.sort();
- };
-}
-
-checkArrs = function( a, b ) {
- m = "[" + a + "] != [" + b + "]";
- a = eval( a );
- b = eval( b );
- assert.eq( a.length, b.length, m );
- aStr = [];
- bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert( -1 != bStr.indexOf( aStr[ i ] ), m );
- }
-}
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-t.save( {a:2} );
-t.save( {b:3} );
-t.save( {b:3} );
-t.save( {a:2,b:3} );
-
-assert.eq.automsg( "4", "t.count( {$or:[{a:2},{b:3}]} )" );
-assert.eq.automsg( "2", "t.count( {$or:[{a:2},{a:2}]} )" );
-
-assert.eq.automsg( "2", "t.find( {} ).skip( 2 ).count( true )" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).skip( 2 ).count( true )" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:2},{b:3}]} ).skip( 3 ).count( true )" );
-
-assert.eq.automsg( "2", "t.find( {} ).limit( 2 ).count( true )" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:2},{b:3}]} ).limit( 1 ).count( true )" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).limit( 2 ).count( true )" );
-assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).limit( 3 ).count( true )" );
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).limit( 4 ).count( true )" );
-
-t.remove({ $or: [{ a: 2 }, { b: 3}] });
-assert.eq.automsg( "0", "t.count()" );
-
-t.save( {b:3} );
-t.remove({ $or: [{ a: 2 }, { b: 3}] });
-assert.eq.automsg( "0", "t.count()" );
-
-t.save( {a:2} );
-t.save( {b:3} );
-t.save( {a:2,b:3} );
-
-t.update( {$or:[{a:2},{b:3}]}, {$set:{z:1}}, false, true );
-assert.eq.automsg( "3", "t.count( {z:1} )" );
-
-assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).toArray().length" );
-checkArrs( "t.find().toArray()", "t.find( {$or:[{a:2},{b:3}]} ).toArray()" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).skip(1).toArray().length" );
-
-assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length" );
-
-t.save( {a:1} );
-t.save( {b:4} );
-t.save( {a:2} );
-
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length" );
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).snapshot().toArray().length" );
-
-t.save( {a:1,b:3} );
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).batchSize(-4).toArray().length" );
-
-assert.eq.automsg( "[1,2]", "Array.sort( t.distinct( 'a', {$or:[{a:2},{b:3}]} ) )" );
-
-assert.eq.automsg( "[{a:2},{a:null},{a:1}]", "t.group( {key:{a:1}, cond:{$or:[{a:2},{b:3}]}, reduce:function( x, y ) { }, initial:{} } )" );
-assert.eq.automsg( "5", "t.mapReduce( function() { emit( 'a', this.a ); }, function( key, vals ) { return vals.length; }, {out:{inline:true},query:{$or:[{a:2},{b:3}]}} ).counts.input" );
-
-explain = t.find( {$or:[{a:2},{b:3}]} ).explain();
-
-t.remove( {} );
-
-t.save( {a:[1,2]} );
-assert.eq.automsg( "1", "t.find( {$or:[{a:1},{a:2}]} ).toArray().length" );
-assert.eq.automsg( "1", "t.count( {$or:[{a:1},{a:2}]} )" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:2},{a:1}]} ).toArray().length" );
-assert.eq.automsg( "1", "t.count( {$or:[{a:2},{a:1}]} )" );
-
-t.remove({});
-
-assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{a:1}]} ).sort( {b:1} ).explain().cursor" );
-assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{}]} ).sort( {b:1} ).explain().cursor" );
-assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{b:1}]} ).sort( {b:1} ).explain().cursor" );
-
-assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{a:1}]} ).hint( {b:1} ).explain().cursor" );
-assert.eq.automsg( "'BtreeCursor b_1'", "t.find( {$or:[{}]} ).hint( {b:1} ).explain().cursor" );
-assert.eq.automsg( "1", "t.find( {$or:[{b:1}]} ).hint( {b:1} ).explain().indexBounds.b[ 0 ][ 0 ]" );
diff --git a/jstests/or5.js b/jstests/or5.js
deleted file mode 100644
index 6a7316787d4..00000000000
--- a/jstests/or5.js
+++ /dev/null
@@ -1,70 +0,0 @@
-t = db.jstests_or5;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-assert.eq.automsg( "'BasicCursor'", "t.find( {$or:[{a:2},{b:3},{}]} ).explain().cursor" );
-assert.eq.automsg( "'BasicCursor'", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).explain().cursor" );
-
-t.ensureIndex( {c:1} );
-
-t.save( {a:2} );
-t.save( {b:3} );
-t.save( {c:4} );
-t.save( {a:2,b:3} );
-t.save( {a:2,c:4} );
-t.save( {b:3,c:4} );
-t.save( {a:2,b:3,c:4} );
-
-assert.eq.automsg( "7", "t.count( {$or:[{a:2},{b:3},{c:4}]} )" );
-assert.eq.automsg( "6", "t.count( {$or:[{a:6},{b:3},{c:4}]} )" );
-assert.eq.automsg( "6", "t.count( {$or:[{a:2},{b:6},{c:4}]} )" );
-assert.eq.automsg( "6", "t.count( {$or:[{a:2},{b:3},{c:6}]} )" );
-
-assert.eq.automsg( "7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).toArray().length" );
-
-for( i = 2; i <= 7; ++i ) {
-assert.eq.automsg( "7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( i ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).batchSize( i ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).batchSize( i ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).batchSize( i ).toArray().length" );
-}
-
-t.ensureIndex( {z:"2d"} );
-
-assert.eq.automsg( "'GeoSearchCursor'", "t.find( {z:{$near:[50,50]},a:2} ).explain().cursor" );
-assert.eq.automsg( "'GeoSearchCursor'", "t.find( {z:{$near:[50,50]},$or:[{a:2}]} ).explain().cursor" );
-assert.eq.automsg( "'GeoSearchCursor'", "t.find( {$or:[{a:2}],z:{$near:[50,50]}} ).explain().cursor" );
-assert.eq.automsg( "'GeoSearchCursor'", "t.find( {$or:[{a:2},{b:3}],z:{$near:[50,50]}} ).explain().cursor" );
-assert.throws.automsg( function() { return t.find( {$or:[{z:{$near:[50,50]}},{a:2}]} ).toArray(); } );
-
-function reset() {
- t.drop();
-
- t.ensureIndex( {a:1} );
- t.ensureIndex( {b:1} );
- t.ensureIndex( {c:1} );
-
- t.save( {a:2} );
- t.save( {a:2} );
- t.save( {b:3} );
- t.save( {b:3} );
- t.save( {c:4} );
- t.save( {c:4} );
-}
-
-reset();
-
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 1 ).itcount()" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 2 ).itcount()" );
-
-t.drop();
-
-t.save( {a:[1,2]} );
-assert.eq.automsg( "1", "t.find( {$or:[{a:[1,2]}]} ).itcount()" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:{$all:[1,2]}}]} ).itcount()" );
-assert.eq.automsg( "0", "t.find( {$or:[{a:{$all:[1,3]}}]} ).itcount()" );
diff --git a/jstests/or6.js b/jstests/or6.js
deleted file mode 100644
index 43b75f467aa..00000000000
--- a/jstests/or6.js
+++ /dev/null
@@ -1,23 +0,0 @@
-t = db.jstests_or6;
-t.drop();
-
-t.ensureIndex( {a:1} );
-
-assert.eq.automsg( "null", "t.find( {$or:[{a:1},{b:2}]} ).hint( {a:1} ).explain().clauses" );
-
-assert.eq.automsg( "'BasicCursor'", "t.find( {$or:[{a:1},{a:3}]} ).hint( {$natural:1} ).explain().cursor" );
-
-t.ensureIndex( {b:1} );
-assert.eq.automsg( "2", "t.find( {$or:[{a:1,b:5},{a:3,b:5}]} ).hint( {a:1} ).explain().clauses.length" );
-
-t.drop();
-
-t.ensureIndex( {a:1,b:1} );
-assert.eq.automsg( "2", "t.find( {$or:[{a:{$in:[1,2]},b:5}, {a:2,b:6}]} )" +
- ".hint({a:1,b:1}).explain().clauses.length" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:{$gt:1,$lte:2},b:5}, {a:2,b:6}]} )" +
- ".hint({a:1,b:1}).explain().clauses.length" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:{$gt:1,$lte:3},b:5}, {a:2,b:6}]} )" +
- ".hint({a:1,b:1}).explain().clauses.length" );
-assert.eq.automsg( "null", "t.find( {$or:[{a:{$in:[1,2]}}, {a:2}]} )" +
- ".hint({a:1,b:1}).explain().clauses" );
diff --git a/jstests/or7.js b/jstests/or7.js
deleted file mode 100644
index 916158047d8..00000000000
--- a/jstests/or7.js
+++ /dev/null
@@ -1,41 +0,0 @@
-t = db.jstests_or7;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.save( {a:2} );
-
-assert.eq.automsg( "1", "t.count( {$or:[{a:{$in:[1,3]}},{a:2}]} )" );
-
-//SERVER-1201 ...
-
-t.remove({});
-
-t.save( {a:"aa"} );
-t.save( {a:"ab"} );
-t.save( {a:"ad"} );
-
-assert.eq.automsg( "3", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
-
-t.remove({});
-
-t.save( {a:"aa"} );
-t.save( {a:"ad"} );
-
-assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
-
-t.remove({});
-
-t.save( {a:"aa"} );
-t.save( {a:"ac"} );
-
-assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
-
-assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
-
-t.save( {a:"ab"} );
-assert.eq.automsg( "3", "t.count( {$or:[{a:{$in:[/^ab/],$gte:'abc'}},{a:/^a/}]} )" );
-
-t.remove({});
-t.save( {a:"a"} );
-t.save( {a:"b"} );
-assert.eq.automsg( "2", "t.count( {$or:[{a:{$gt:'a',$lt:'b'}},{a:{$gte:'a',$lte:'b'}}]} )" );
diff --git a/jstests/or8.js b/jstests/or8.js
deleted file mode 100644
index 40d5b38cede..00000000000
--- a/jstests/or8.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// missing collection
-
-t = db.jstests_or8;
-t.drop();
-
-t.find({ "$or": [ { "PropA": { "$lt": "b" } }, { "PropA": { "$lt": "b", "$gt": "a" } } ] }).toArray();
-
-// empty $in
-
-t.save( {a:1} );
-t.save( {a:3} );
-t.ensureIndex( {a:1} );
-t.find({ $or: [ { a: {$in:[]} } ] } ).toArray();
-assert.eq.automsg( "2", "t.find({ $or: [ { a: {$in:[]} }, {a:1}, {a:3} ] } ).toArray().length" );
-assert.eq.automsg( "2", "t.find({ $or: [ {a:1}, { a: {$in:[]} }, {a:3} ] } ).toArray().length" );
-assert.eq.automsg( "2", "t.find({ $or: [ {a:1}, {a:3}, { a: {$in:[]} } ] } ).toArray().length" );
-
-// nested negate field
-
-t.drop();
-t.save( {a:{b:1,c:1}} );
-t.ensureIndex( { 'a.b':1 } );
-t.ensureIndex( { 'a.c':1 } );
-assert.eq( 1, t.find( {$or: [ { 'a.b':1 }, { 'a.c':1 } ] } ).itcount() );
-
-t.remove({});
-t.save( {a:[{b:1,c:1},{b:2,c:1}]} );
-assert.eq( 1, t.find( {$or: [ { 'a.b':2 }, { 'a.c':1 } ] } ).itcount() );
diff --git a/jstests/or9.js b/jstests/or9.js
deleted file mode 100644
index 7318a532af4..00000000000
--- a/jstests/or9.js
+++ /dev/null
@@ -1,64 +0,0 @@
-// index skipping and previous index range negation
-
-t = db.jstests_or9;
-t.drop();
-
-t.ensureIndex( {a:1,b:1} );
-
-t.save( {a:2,b:2} );
-
-function check( a, b, q ) {
- count = a;
- clauses = b;
- query = q;
- assert.eq.automsg( "count", "t.count( query )" );
- if ( clauses == 1 ) {
- assert.eq.automsg( "undefined", "t.find( query ).explain().clauses" );
- } else {
- assert.eq.automsg( "clauses", "t.find( query ).hint({a:1, b:1}).explain().clauses.length" );
- }
-}
-
-// SERVER-12594: there are two clauses in this case, because we do
-// not yet collapse OR of ANDs to a single ixscan.
-check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 } }, { a: 2 } ] } );
-
-check( 1, 2, { $or: [ { a: { $gt:2,$lte:3 } }, { a: 2 } ] } );
-
-check( 1, 1, { $or: [ { b: { $gte:1,$lte:3 } }, { b: 2 } ] } );
-check( 1, 1, { $or: [ { b: { $gte:2,$lte:3 } }, { b: 2 } ] } );
-check( 1, 1, { $or: [ { b: { $gt:2,$lte:3 } }, { b: 2 } ] } );
-
-// SERVER-12594: there are two clauses in this case, because we do
-// not yet collapse OR of ANDs to a single ixscan.
-check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 } }, { a: 2, b: 2 } ] } );
-
-check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 }, b:3 }, { a: 2 } ] } );
-
-check( 1, 1, { $or: [ { b: { $gte:1,$lte:3 } }, { b: 2, a: 2 } ] } );
-
-check( 1, 1, { $or: [ { b: { $gte:1,$lte:3 }, a:3 }, { b: 2 } ] } );
-
-check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 }, b: 3 }, { a: 2, b: 2 } ] } );
-check( 1, 2, { $or: [ { a: { $gte:2,$lte:3 }, b: 3 }, { a: 2, b: 2 } ] } );
-// SERVER-12594: there are two clauses in this case, because we do
-// not yet collapse OR of ANDs to a single ixscan.
-check( 1, 2, { $or: [ { a: { $gte:1,$lte:3 }, b: 2 }, { a: 2, b: 2 } ] } );
-
-check( 1, 2, { $or: [ { b: { $gte:1,$lte:3 }, a: 3 }, { a: 2, b: 2 } ] } );
-check( 1, 2, { $or: [ { b: { $gte:2,$lte:3 }, a: 3 }, { a: 2, b: 2 } ] } );
-// SERVER-12594: there are two clauses in this case, because we do
-// not yet collapse OR of ANDs to a single ixscan.
-check( 1, 2, { $or: [ { b: { $gte:1,$lte:3 }, a: 2 }, { a: 2, b: 2 } ] } );
-
-t.remove({});
-
-t.save( {a:1,b:5} );
-t.save( {a:5,b:1} );
-
-// SERVER-12594: there are two clauses in the case below, because we do
-// not yet collapse OR of ANDs to a single ixscan.
-check( 2, 2, { $or: [ { a: { $in:[1,5] }, b: { $in:[1,5] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
-
-check( 2, 2, { $or: [ { a: { $in:[1] }, b: { $in:[1,5] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
-check( 2, 2, { $or: [ { a: { $in:[1] }, b: { $in:[1] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
diff --git a/jstests/ora.js b/jstests/ora.js
deleted file mode 100644
index 67af4c191ec..00000000000
--- a/jstests/ora.js
+++ /dev/null
@@ -1,17 +0,0 @@
-var t = db.jstests_ora;
-
-// $where
-t.drop();
-for (var i = 0; i < 10; i += 1) {
- t.save({x: i, y: 10 - i});
-}
-assert.eq.automsg("1", "t.find({$or: [{$where: 'this.x === 2'}]}).count()");
-assert.eq.automsg("2", "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 2'}]}).count()");
-assert.eq.automsg("1", "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 8'}]}).count()");
-assert.eq.automsg("10", "t.find({$or: [{$where: 'this.x === 2'}, {x: {$ne: 2}}]}).count()");
-
-// geo
-t.drop();
-t.ensureIndex({loc: "2d"});
-
-assert.throws(function () {t.find({$or: [{loc: {$near: [11, 11]}}]}).limit(1).next()['_id'];});
diff --git a/jstests/orb.js b/jstests/orb.js
deleted file mode 100644
index a4abdeecabf..00000000000
--- a/jstests/orb.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// check neg direction index and negation
-
-var t = db.jstests_orb;
-t.drop();
-
-t.save( {a:1} );
-t.ensureIndex( {a:-1} );
-
-assert.eq.automsg( "1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )" );
-
-t.drop();
-
-t.save( {a:1,b:1} );
-t.ensureIndex( {a:1,b:-1} );
-
-assert.eq.automsg( "1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )" );
-assert.eq.automsg( "1", "t.count( {$or: [ { a:1, b: { $gt:0,$lt:2 } }, { a:1, b: { $gt:-1,$lt:3 } } ] } )" ); \ No newline at end of file
diff --git a/jstests/orc.js b/jstests/orc.js
deleted file mode 100644
index dec6a7b920d..00000000000
--- a/jstests/orc.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// test that or duplicates are dropped in certain special cases
-t = db.jstests_orc;
-t.drop();
-
-// The goal here will be to ensure the full range of valid values is scanned for each or clause, in order to ensure that
-// duplicates are eliminated properly in the cases below when field range elimination is not employed. The deduplication
-// of interest will occur on field a. The range specifications for fields b and c are such that (in the current
-// implementation) field range elimination will not occur between the or clauses, meaning that the full range of valid values
-// will be scanned for each clause and deduplication will be forced.
-
-// NOTE This test uses some tricks to avoid or range elimination, but in future implementations these tricks may not apply.
-// Perhaps it would be worthwhile to create a mode where range elimination is disabled so it will be possible to write a more
-// robust test.
-
-t.ensureIndex( {a:-1,b:1,c:1} );
-
-// sanity test
-t.save( {a:null,b:4,c:4} );
-assert.eq( 1, t.count( {$or:[{a:null,b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:null,b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
-
-// from here on is SERVER-2245
-t.remove({});
-t.save( {b:4,c:4} );
-assert.eq( 1, t.count( {$or:[{a:null,b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:null,b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
-
-//t.remove({});
-//t.save( {a:[],b:4,c:4} );
-//printjson( t.find( {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ).explain() );
-//assert.eq( 1, t.count( {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
diff --git a/jstests/ord.js b/jstests/ord.js
deleted file mode 100644
index 1ab0c1258a9..00000000000
--- a/jstests/ord.js
+++ /dev/null
@@ -1,35 +0,0 @@
-// check that we don't crash if an index used by an earlier or clause is dropped
-
-// Dropping an index kills all cursors on the indexed namespace, not just those
-// cursors using the dropped index. This test is to serve as a reminder that
-// the $or implementation may need minor adjustments (memory ownership) if this
-// behavior is changed.
-
-t = db.jstests_ord;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-for( i = 0; i < 80; ++i ) {
- t.save( {a:1} );
-}
-
-for( i = 0; i < 100; ++i ) {
- t.save( {b:1} );
-}
-
-c = t.find( { $or: [ {a:1}, {b:1} ] } ).batchSize( 100 );
-for( i = 0; i < 90; ++i ) {
- c.next();
-}
-// At this point, our initial query has ended and there is a client cursor waiting
-// to read additional documents from index {b:1}. Deduping is performed against
-// the index key {a:1}.
-
-t.dropIndex( {a:1} );
-db.getLastError();
-
-// Dropping an index kills all cursors on the indexed namespace, not just those
-// cursors using the dropped index.
-assert.throws( c.next() );
diff --git a/jstests/ore.js b/jstests/ore.js
deleted file mode 100644
index f938f635d41..00000000000
--- a/jstests/ore.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// verify that index direction is considered when deduping based on an earlier
-// index
-
-t = db.jstests_ore;
-t.drop();
-
-t.ensureIndex( {a:-1} )
-t.ensureIndex( {b:1} );
-
-t.save( {a:1,b:1} );
-t.save( {a:2,b:1} );
-
-assert.eq( 2, t.count( {$or:[{a:{$in:[1,2]}},{b:1}]} ) );
diff --git a/jstests/orf.js b/jstests/orf.js
deleted file mode 100644
index 720b5b31f0c..00000000000
--- a/jstests/orf.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Test a query with 200 $or clauses
-
-t = db.jstests_orf;
-t.drop();
-
-var a = [];
-var expectBounds = [];
-for( var i = 0; i < 200; ++i ) {
- a.push( {_id:i} );
- expectBounds.push([i, i]);
-}
-a.forEach( function( x ) { t.save( x ); } );
-
-// This $or query is answered as an index scan over
-// a series of _id index point intervals.
-explain = t.find( {$or:a} ).hint( {_id: 1} ).explain( true );
-printjson( explain );
-assert.eq( 'BtreeCursor _id_', explain.cursor, 'cursor' );
-assert.eq( expectBounds, explain.indexBounds['_id'], 'indexBounds' );
-assert.eq( 200, explain.n, 'n' );
-assert.eq( 200, explain.nscanned, 'nscanned' );
-assert.eq( 200, explain.nscannedObjects, 'nscannedObjects' );
-assert.eq( false, explain.isMultiKey, 'isMultiKey' );
-assert.eq( false, explain.scanAndOrder, 'scanAndOrder' );
-assert.eq( false, explain.indexOnly, 'indexOnly' );
-
-assert.eq( 200, t.count( {$or:a} ) );
diff --git a/jstests/org.js b/jstests/org.js
deleted file mode 100644
index 19239f96c10..00000000000
--- a/jstests/org.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// SERVER-2282 $or de duping with sparse indexes
-
-t = db.jstests_org;
-t.drop();
-
-t.ensureIndex( {a:1}, {sparse:true} );
-t.ensureIndex( {b:1} );
-
-t.remove({});
-t.save( {a:1,b:2} );
-assert.eq( 1, t.count( {$or:[{a:1},{b:2}]} ) );
-
-t.remove({});
-t.save( {a:null,b:2} );
-assert.eq( 1, t.count( {$or:[{a:null},{b:2}]} ) );
-
-t.remove({});
-t.save( {b:2} );
-assert.eq( 1, t.count( {$or:[{a:null},{b:2}]} ) );
diff --git a/jstests/orh.js b/jstests/orh.js
deleted file mode 100644
index 5fb845fd01c..00000000000
--- a/jstests/orh.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// SERVER-2831 Demonstration of sparse index matching semantics in a multi index $or query.
-
-t = db.jstests_orh;
-t.drop();
-
-t.ensureIndex( {a:1}, {sparse:true} );
-t.ensureIndex( {b:1,a:1} );
-
-t.remove({});
-t.save( {b:2} );
-assert.eq( 1, t.count( {a:null} ) );
-assert.eq( 1, t.count( {b:2,a:null} ) );
-
-assert.eq( 1, t.count( {$or:[{b:2,a:null},{a:null}]} ) );
-
-// Is this desired?
-assert.eq( 1, t.count( {$or:[{a:null},{b:2,a:null}]} ) );
diff --git a/jstests/orj.js b/jstests/orj.js
deleted file mode 100644
index fa234f36cb5..00000000000
--- a/jstests/orj.js
+++ /dev/null
@@ -1,121 +0,0 @@
-// Test nested $or clauses SERVER-2585 SERVER-3192
-
-t = db.jstests_orj;
-t.drop();
-
-t.save( {a:1,b:2} );
-
-function check() {
-
-assert.throws( function() { t.find( { x:0,$or:"a" } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[ "a" ] } ).toArray(); } );
-
-assert.throws( function() { t.find( { x:0,$or:[{$or:"a"}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[{$or:[]}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[{$or:[ "a" ]}] } ).toArray(); } );
-
-assert.throws( function() { t.find( { x:0,$nor:"a" } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[ "a" ] } ).toArray(); } );
-
-assert.throws( function() { t.find( { x:0,$nor:[{$nor:"a"}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[{$nor:[]}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[{$nor:[ "a" ]}] } ).toArray(); } );
-
-assert.eq( 1, t.find( {a:1,b:2} ).itcount() );
-
-assert.eq( 1, t.find( {a:1,$or:[{b:2}]} ).itcount() );
-assert.eq( 0, t.find( {a:1,$or:[{b:3}]} ).itcount() );
-
-assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {a:1,$or:[{$or:[{b:3}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$or:[{$or:[{a:2},{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{a:2},{$or:[{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{a:1},{$or:[{b:3}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$or:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:4}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:2},{b:4}]}]} ).itcount() );
-assert.eq( 0, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:3},{b:4}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {a:1,$and:[{$or:[{$or:[{b:2}]}]}]} ).itcount() );
-assert.eq( 0, t.find( {a:1,$and:[{$or:[{$or:[{b:3}]}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$or:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:1}]}]} ).itcount() );
-
-assert.eq( 0, t.find( {$and:[{$nor:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:3},{b:1}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
-
-}
-
-check();
-
-t.ensureIndex( {a:1} );
-check();
-t.dropIndexes();
-
-t.ensureIndex( {b:1} );
-check();
-t.dropIndexes();
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-check();
-t.dropIndexes();
-
-t.ensureIndex( {a:1,b:1} );
-check();
-t.dropIndexes();
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-t.ensureIndex( {a:1,b:1} );
-check();
-
-function checkHinted( hint ) {
- assert.eq( 1, t.find( {a:1,b:2} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {a:1,$or:[{b:2}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {a:1,$or:[{b:3}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {a:1,$or:[{$or:[{b:3}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$or:[{$or:[{a:2},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$or:[{a:2},{$or:[{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$or:[{a:1},{$or:[{b:3}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$or:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:4}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:2},{b:4}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:3},{b:4}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {a:1,$and:[{$or:[{$or:[{b:2}]}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {a:1,$and:[{$or:[{$or:[{b:3}]}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$or:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:1}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 0, t.find( {$and:[{$nor:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:3},{b:1}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
-}
-
-checkHinted( {$natural:1} );
-checkHinted( {a:1} );
-checkHinted( {b:1} );
-checkHinted( {a:1,b:1} ); \ No newline at end of file
diff --git a/jstests/ork.js b/jstests/ork.js
deleted file mode 100644
index d6d40161e69..00000000000
--- a/jstests/ork.js
+++ /dev/null
@@ -1,11 +0,0 @@
-// SERVER-2585 Test $or clauses within indexed top level $or clauses.
-
-t = db.jstests_ork;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.save( {a:[1,2],b:5} );
-t.save( {a:[2,4],b:5} );
-
-assert.eq( 2, t.find( {$or:[{a:1,$and:[{$or:[{a:2},{a:3}]},{$or:[{b:5}]}]},{a:2,$or:[{a:3},{a:4}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{a:1,$and:[{$or:[{a:2},{a:3}]},{$or:[{b:6}]}]},{a:2,$or:[{a:3},{a:4}]}]} ).itcount() );
diff --git a/jstests/orl.js b/jstests/orl.js
deleted file mode 100644
index 2726975d5aa..00000000000
--- a/jstests/orl.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// SERVER-3445 Test using coarse multikey bounds for or range elimination.
-
-t = db.jstests_orl;
-t.drop();
-
-t.ensureIndex( {'a.b':1,'a.c':1} );
-// make the index multikey
-t.save( {a:{b:[1,2]}} );
-
-// SERVER-3445
-if ( 0 ) {
-assert( !t.find( {$or:[{'a.b':2,'a.c':3},{'a.b':2,'a.c':4}]} ).explain().clauses );
-} \ No newline at end of file
diff --git a/jstests/oro.js b/jstests/oro.js
deleted file mode 100644
index ae1b6f53552..00000000000
--- a/jstests/oro.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Test $or query with several clauses on separate indexes.
-
-t = db.jstests_oro;
-t.drop();
-
-orClauses = [];
-for( idxKey = 'a'; idxKey <= 'aaaaaaaaaa'; idxKey += 'a' ) {
- idx = {}
- idx[ idxKey ] = 1;
- t.ensureIndex( idx );
- for( i = 0; i < 200; ++i ) {
- t.insert( idx );
- }
- orClauses.push( idx );
-}
-
-printjson( t.find({$or:orClauses}).explain() );
-c = t.find({$or:orClauses}).batchSize( 100 );
-count = 0;
-
-while( c.hasNext() ) {
- for( i = 0; i < 50 && c.hasNext(); ++i, c.next(), ++count );
- // Interleave with another operation.
- t.stats();
-}
-
-assert.eq( 10 * 200, count );
diff --git a/jstests/orp.js b/jstests/orp.js
deleted file mode 100644
index 18abdfbc63a..00000000000
--- a/jstests/orp.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// $or clause deduping with result set sizes > 101 (smaller result sets are now also deduped by the
-// query optimizer cursor).
-
-t = db.jstests_orp;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-t.ensureIndex( { c:1 } );
-
-for( i = 0; i < 200; ++i ) {
- t.save( { a:1, b:1 } );
-}
-
-// Deduping results from the previous clause.
-assert.eq( 200, t.count( { $or:[ { a:1 }, { b:1 } ] } ) );
-
-// Deduping results from a prior clause.
-assert.eq( 200, t.count( { $or:[ { a:1 }, { c:1 }, { b:1 } ] } ) );
-t.save( { c:1 } );
-assert.eq( 201, t.count( { $or:[ { a:1 }, { c:1 }, { b:1 } ] } ) );
-
-// Deduping results that would normally be index only matches on overlapping and double scanned $or
-// field regions.
-t.drop();
-t.ensureIndex( { a:1, b:1 } );
-for( i = 0; i < 16; ++i ) {
- for( j = 0; j < 16; ++j ) {
- t.save( { a:i, b:j } );
- }
-}
-assert.eq( 16 * 16,
- t.count( { $or:[ { a:{ $gte:0 }, b:{ $gte:0 } }, { a:{ $lte:16 }, b:{ $lte:16 } } ] } ) );
-
-// Deduping results from a clause that completed before the multi cursor takeover.
-t.drop();
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-t.save( { a:1,b:200 } );
-for( i = 0; i < 200; ++i ) {
- t.save( { b:i } );
-}
-assert.eq( 201, t.count( { $or:[ { a:1 }, { b:{ $gte:0 } } ] } ) );
diff --git a/jstests/padding.js b/jstests/padding.js
deleted file mode 100644
index 1872574d80f..00000000000
--- a/jstests/padding.js
+++ /dev/null
@@ -1,66 +0,0 @@
-p = db.getCollection("padding");
-p.drop();
-
-// this test requires usePowerOf2Sizes to be off
-db.createCollection( p.getName(), { "usePowerOf2Sizes" : false } );
-assert.eq(0, p.stats().userFlags);
-
-for (var i = 0; i < 1000; i++) {
- p.insert({ x: 1, y: "aaaaaaaaaaaaaaa" });
-}
-
-assert.eq(p.stats().paddingFactor, 1, "Padding Not 1");
-
-for (var i = 0; i < 1000; i++) {
- var x = p.findOne();
- x.y = x.y + "aaaaaaaaaaaaaaaa";
- p.update({}, x);
- if (i % 100 == 0)
-
- print(p.stats().paddingFactor);
-}
-
-assert.gt(p.stats().paddingFactor, 1.9, "Padding not > 1.9");
-
-// this should make it go down
-for (var i = 0; i < 1000; i++) {
- p.update({}, { $inc: { x: 1} });
- if (i % 100 == 0)
- print(p.stats().paddingFactor);
-}
-assert.lt(p.stats().paddingFactor, 1.7, "Padding not < 1.7");
-
-for (var i = 0; i < 1000; i++) {
- if (i % 2 == 0) {
- p.update({}, { $inc: { x: 1} });
- }
- else {
- var x = p.findOne();
- x.y = x.y + "aaaaaaaaaaaaaaaa";
- p.update({}, x);
- }
- if( i % 100 == 0 )
- print(p.stats().paddingFactor);
-}
-var ps = p.stats().paddingFactor;
-assert.gt(ps, 1.7, "Padding not greater than 1.7");
-assert.lt(ps, 1.9, "Padding not less than 1.9");
-
-// 50/50 inserts and nonfitting updates
-for (var i = 0; i < 1000; i++) {
- if (i % 2 == 0) {
- p.insert({});
- }
- else {
- var x = p.findOne();
- x.y = x.y + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
- p.update({}, x);
- }
- if (i % 100 == 0)
- print(p.stats().paddingFactor);
-}
-
-// should have trended somewhat higher over the above.
-// speed of increase would be higher with more indexes.
-assert.gt(p.stats().paddingFactor, ps + 0.02 , "padding factor not greater than value (+.02)");
-p.drop();
diff --git a/jstests/plan_cache_commands.js b/jstests/plan_cache_commands.js
deleted file mode 100644
index 613d436aa15..00000000000
--- a/jstests/plan_cache_commands.js
+++ /dev/null
@@ -1,433 +0,0 @@
-/**
- * Plan cache commands
- *
- * Cache-wide Commands
- * - planCacheListQueryShapes
- * - planCacheClear
- * Removes plans for one or all query shapes.
- * - planCacheListPlans
- */
-
-var t = db.jstests_plan_cache_commands;
-t.drop();
-
-// Insert some data so we don't go to EOF.
-t.save({a: 1, b: 1});
-t.save({a: 2, b: 2});
-
-// We need two indices so that the MultiPlanRunner is executed.
-t.ensureIndex({a: 1});
-t.ensureIndex({a: 1, b:1});
-
-// Run the query.
-var queryA1 = {a: 1, b:1};
-var projectionA1 = {_id: 0, a: 1};
-var sortA1 = {a: -1};
-assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-// We now expect the two indices to be compared and a cache entry to exist.
-
-
-//
-// tests for planCacheListQueryShapes
-// Returns a list of query shapes for the queries currently cached in the collection.
-//
-
-// Utility function to list query shapes in cache.
-function getShapes(collection) {
- if (collection == undefined) {
-
- collection = t;
- }
- var res = collection.runCommand('planCacheListQueryShapes');
- print('planCacheListQueryShapes() = ' + tojson(res));
- assert.commandWorked(res, 'planCacheListQueryShapes failed');
- assert(res.hasOwnProperty('shapes'), 'shapes missing from planCacheListQueryShapes result');
- return res.shapes;
-
-}
-
-// Attempting to retrieve cache information on non-existent collection is not an error
-// and should return an empty array of query shapes.
-var missingCollection = db.jstests_query_cache_missing;
-missingCollection.drop();
-assert.eq(0, getShapes(missingCollection).length,
- 'planCacheListQueryShapes should return empty array on non-existent collection');
-
-// Retrieve query shapes from the test collection
-// Number of shapes should match queries executed by multi-plan runner.
-var shapes = getShapes();
-assert.eq(1, shapes.length, 'unexpected number of shapes in planCacheListQueryShapes result');
-assert.eq({query: queryA1, sort: sortA1, projection: projectionA1}, shapes[0],
- 'unexpected query shape returned from planCacheListQueryShapes');
-
-
-
-//
-// Tests for planCacheClear (one query shape)
-//
-
-// Invalid key should be a no-op.
-t.runCommand('planCacheClear', {query: {unknownfield: 1}});
-assert.eq(1, getShapes().length, 'removing unknown query should not affecting exisiting entries');
-
-// Run a new query shape and drop it from the cache
-assert.eq(1, t.find({a: 2, b: 2}).itcount(), 'unexpected document count');
-assert.eq(2, getShapes().length, 'unexpected cache size after running 2nd query');
-assert.commandWorked(t.runCommand('planCacheClear', {query: {a: 1, b: 1}}));
-assert.eq(1, getShapes().length, 'unexpected cache size after dropping 2nd query from cache');
-
-
-
-//
-// Tests for planCacheListPlans
-//
-
-// Utility function to list plans for a query.
-function getPlans(query, sort, projection) {
- var key = {query: query, sort: sort, projection: projection};
- var res = t.runCommand('planCacheListPlans', key);
- assert.commandWorked(res, 'planCacheListPlans(' + tojson(key, '', true) + ' failed');
- assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
- tojson(key, '', true) + ') result');
- return res.plans;
-}
-
-// Invalid key should be an error.
-assert.eq(0, getPlans({unknownfield: 1}, {}, {}),
- 'planCacheListPlans should return empty results on unknown query shape');
-
-// Retrieve plans for valid cache entry.
-var plans = getPlans(queryA1, sortA1, projectionA1);
-assert.eq(2, plans.length, 'unexpected number of plans cached for query');
-
-// Print every plan
-// Plan details/feedback verified separately in section after Query Plan Revision tests.
-print('planCacheListPlans result:');
-for (var i = 0; i < plans.length; i++) {
- print('plan ' + i + ': ' + tojson(plans[i]));
-}
-
-
-
-//
-// Tests for planCacheClear
-//
-
-// Drop query cache. This clears all cached queries in the collection.
-res = t.runCommand('planCacheClear');
-print('planCacheClear() = ' + tojson(res));
-assert.commandWorked(res, 'planCacheClear failed');
-assert.eq(0, getShapes().length, 'plan cache should be empty after successful planCacheClear()');
-
-
-
-//
-// Query Plan Revision
-// http://docs.mongodb.org/manual/core/query-plans/#query-plan-revision
-// As collections change over time, the query optimizer deletes the query plan and re-evaluates
-// after any of the following events:
-// - The collection receives 1,000 write operations.
-// - The reIndex rebuilds the index.
-// - You add or drop an index.
-// - The mongod process restarts.
-//
-
-// Case 1: The collection receives 1,000 write operations.
-// Steps:
-// Populate cache. Cache should contain 1 key after running query.
-// Insert 1000 documents.
-// Cache should be cleared.
-assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-assert.eq(1, getShapes().length, 'plan cache should not be empty after query');
-for (var i = 0; i < 1000; i++) {
- t.save({b: i});
-}
-assert.eq(0, getShapes().length, 'plan cache should be empty after adding 1000 documents.');
-
-// Case 2: The reIndex rebuilds the index.
-// Steps:
-// Populate the cache with 1 entry.
-// Run reIndex on the collection.
-// Confirm that cache is empty.
-assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-assert.eq(1, getShapes().length, 'plan cache should not be empty after query');
-res = t.reIndex();
-print('reIndex result = ' + tojson(res));
-assert.eq(0, getShapes().length, 'plan cache should be empty after reIndex operation');
-
-// Case 3: You add or drop an index.
-// Steps:
-// Populate the cache with 1 entry.
-// Add an index.
-// Confirm that cache is empty.
-assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-assert.eq(1, getShapes().length, 'plan cache should not be empty after query');
-t.ensureIndex({b: 1});
-assert.eq(0, getShapes().length, 'plan cache should be empty after adding index');
-
-// Case 4: The mongod process restarts
-// Not applicable.
-
-
-
-//
-// Tests for plan reason and feedback in planCacheListPlans
-//
-
-// Generate more plans for test query by adding indexes (compound and sparse).
-// This will also clear the plan cache.
-t.ensureIndex({a: -1}, {sparse: true});
-t.ensureIndex({a: 1, b: 1});
-
-// Implementation note: feedback stats is calculated after 20 executions.
-// See PlanCacheEntry::kMaxFeedback.
-var numExecutions = 100;
-var queryA3B3 = {a: 3, b: 3};
-for (var i = 0; i < numExecutions; i++) {
- assert.eq(0, t.find(queryA3B3, projectionA1).sort(sortA1).itcount(), 'query failed');
-}
-
-plans = getPlans(queryA3B3, sortA1, projectionA1);
-
-// This should be obvious but feedback is available only for the first (winning) plan.
-print('planCacheListPlans result (after adding indexes and completing 20 executions):');
-for (var i = 0; i < plans.length; i++) {
- print('plan ' + i + ': ' + tojson(plans[i]));
- assert.gt(plans[i].reason.score, 0, 'plan ' + i + ' score is invalid');
- if (i > 0) {
- assert.lte(plans[i].reason.score, plans[i-1].reason.score,
- 'plans not sorted by score in descending order. ' +
- 'plan ' + i + ' has a score that is greater than that of the previous plan');
- }
- assert(plans[i].reason.stats.hasOwnProperty('type'), 'no stats inserted for plan ' + i);
-}
-
-// feedback meaningful only for plan 0
-// feedback is capped at 20
-//
-// This assertion relies on the condition that the plan cache feedback mechanism
-// has not evicted the cache entry. In order for this to be reliable, we must be
-// sure that the plan scores the same each time it is run. We can be sure of this
-// because:
-// 1) The plan will produce zero results. This means that the productivity will
-// always be zero, and in turn the score will always be the same.
-// 2) The plan hits EOF quickly. This means that it will be cached despite
-// returning zero results.
-assert.eq(20, plans[0].feedback.nfeedback, 'incorrect nfeedback');
-assert.gt(plans[0].feedback.averageScore, 0, 'invalid average score');
-
-
-
-//
-// Tests for shell helpers
-//
-
-// Reset collection data and indexes.
-t.drop();
-var n = 200;
-for (var i = 0; i < n; i++) {
- t.save({a:i, b: i});
-}
-t.ensureIndex({a: 1});
-t.ensureIndex({b: 1});
-t.ensureIndex({a: 1, b: 1});
-
-// Repopulate plan cache with 3 query shapes.
-var queryB = {a: {$gte: 0}, b: {$gte: 0}};
-var projectionB = {_id: 0, b: 1};
-var sortB = {b: -1};
-assert.eq(n, t.find(queryB, projectionB).sort(sortB).itcount(), 'unexpected document count');
-assert.eq(n, t.find(queryB, projectionB).itcount(), 'unexpected document count');
-assert.eq(n, t.find(queryB).sort(sortB).itcount(), 'unexpected document count');
-assert.eq(n, t.find(queryB).itcount(), 'unexpected document count');
-assert.eq(4, getShapes().length, 'unexpected number of query shapes in plan cache');
-
-//
-// PlanCache.getName
-//
-
-var planCache = t.getPlanCache();
-assert.eq(t.getName(), planCache.getName(), 'name of plan cache should match collection');
-
-//
-// PlanCache.help
-//
-planCache.help();
-
-//
-// shellPrint
-//
-
-print('plan cache:');
-print(planCache);
-
-//
-// collection.getPlanCache().listQueryShapes
-//
-
-missingCollection.drop();
-// should return empty array on non-existent collection.
-assert.eq(0, missingCollection.getPlanCache().listQueryShapes().length,
- 'collection.getPlanCache().listQueryShapes() should return empty results ' +
- 'on non-existent collection');
-assert.eq(getShapes(), planCache.listQueryShapes(),
- 'unexpected collection.getPlanCache().listQueryShapes() shell helper result');
-
-//
-// collection.getPlanCache().getPlansByQuery
-//
-
-// should return empty array on non-existent query shape.
-assert.eq(0, planCache.getPlansByQuery({unknownfield: 1}).length,
- 'collection.getPlanCache().getPlansByQuery() should return empty results ' +
- 'on non-existent collection');
-// should error on missing required field query.
-assert.throws(function() { planCache.getPlansByQuery() });
-
-// Invoke with various permutations of required (query) and optional (projection, sort) arguments.
-assert.eq(getPlans(queryB, sortB, projectionB), planCache.getPlansByQuery(queryB, projectionB,
- sortB),
- 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-assert.eq(getPlans(queryB, {}, projectionB), planCache.getPlansByQuery(queryB, projectionB),
- 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-assert.eq(getPlans(queryB, sortB, {}), planCache.getPlansByQuery(queryB, undefined, sortB),
- 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-assert.eq(getPlans(queryB, {}, {}), planCache.getPlansByQuery(queryB),
- 'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-
-// getPlansByQuery() will also accept a single argument with the query shape object
-// as an alternative to specifying the query, sort and projection parameters separately.
-// Format of query shape object:
-// {
-// query: <query>,
-// projection: <projection>,
-// sort: <sort>
-// }
-var shapeB = {query: queryB, projection: projectionB, sort: sortB};
-assert.eq(getPlans(queryB, sortB, projectionB),
- planCache.getPlansByQuery(shapeB),
- 'collection.getPlanCache().getPlansByQuery() did not accept query shape object');
-
-// Should return empty array on missing or extra fields in query shape object.
-// The entire invalid query shape object will be passed to the command
-// as the 'query' component which will result in the server returning an empty
-// array of plans.
-assert.eq(0, planCache.getPlansByQuery({query: queryB}).length,
- 'collection.getPlanCache.getPlansByQuery should return empty results on ' +
- 'incomplete query shape');
-assert.eq(0, planCache.getPlansByQuery({query: queryB, sort: sortB,
- projection: projectionB,
- unknown_field: 1}).length,
- 'collection.getPlanCache.getPlansByQuery should return empty results on ' +
- 'invalid query shape');
-
-
-
-//
-// collection.getPlanCache().clearPlansByQuery
-//
-
-// should not error on non-existent query shape.
-planCache.clearPlansByQuery({unknownfield: 1});
-// should error on missing required field query.
-assert.throws(function() { planCache.clearPlansByQuery() });
-
-// Invoke with various permutations of required (query) and optional (projection, sort) arguments.
-planCache.clearPlansByQuery(queryB, projectionB, sortB);
-assert.eq(3, getShapes().length,
- 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
-
-planCache.clearPlansByQuery(queryB, projectionB);
-assert.eq(2, getShapes().length,
- 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
-
-planCache.clearPlansByQuery(queryB, undefined, sortB);
-assert.eq(1, getShapes().length,
- 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
-
-planCache.clearPlansByQuery(queryB);
-assert.eq(0, getShapes().length,
- 'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
-
-// clearPlansByQuery() will also accept a single argument with the query shape object
-// as an alternative to specifying the query, sort and projection parameters separately.
-// Format of query shape object:
-// {
-// query: <query>,
-// projection: <projection>,
-// sort: <sort>
-// }
-
-// Repopulate cache
-assert.eq(n, t.find(queryB, projectionB).sort(sortB).itcount(), 'unexpected document count');
-
-// Clear using query shape object.
-planCache.clearPlansByQuery(shapeB);
-assert.eq(0, getShapes().length,
- 'collection.getPlanCache().clearPlansByQuery() did not accept query shape object');
-
-// Should not error on missing or extra fields in query shape object.
-planCache.clearPlansByQuery({query: queryB});
-planCache.clearPlansByQuery({query: queryB, sort: sortB, projection: projectionB,
- unknown_field: 1});
-
-
-
-//
-// collection.getPlanCache().clear
-//
-
-// Should not error on non-existent collection.
-missingCollection.getPlanCache().clear();
-// Re-populate plan cache with 1 query shape.
-assert.eq(n, t.find(queryB, projectionB).sort(sortB).itcount(), 'unexpected document count');
-assert.eq(1, getShapes().length, 'plan cache should not be empty after running cacheable query');
-// Clear cache.
-planCache.clear();
-assert.eq(0, getShapes().length, 'plan cache not empty after clearing');
-
-
-
-//
-// explain and plan cache
-// Running explain should not mutate the plan cache.
-//
-
-planCache.clear();
-
-// MultiPlanRunner explain
-var multiPlanRunnerExplain = t.find(queryB, projectionB).sort(sortB).explain(true);
-
-print('multi plan runner explain = ' + tojson(multiPlanRunnerExplain));
-
-assert.eq(0, getShapes().length, 'explain should not mutate plan cache');
-
-
-
-
-//
-// SERVER-12796: Plans for queries that return zero
-// results should not be cached.
-//
-
-t.drop();
-
-t.ensureIndex({a: 1});
-t.ensureIndex({b: 1});
-
-for (var i = 0; i < 200; i++) {
- t.save({a: 1, b: 1});
-}
-t.save({a: 2, b: 2});
-
-// A query with zero results that does not hit EOF should not be cached...
-assert.eq(0, t.find({c: 0}).itcount(), 'unexpected count');
-assert.eq(0, getShapes().length, 'unexpected number of query shapes in plan cache');
-
-// ...but a query with zero results that hits EOF will be cached.
-assert.eq(0, t.find({a: 3, b: 3}).itcount(), 'unexpected count');
-assert.eq(1, getShapes().length, 'unexpected number of query shapes in plan cache');
-
-// A query that returns results but does not hit EOF will also be cached.
-assert.eq(200, t.find({a: {$gte: 0}, b:1}).itcount(), 'unexpected count');
-assert.eq(2, getShapes().length, 'unexpected number of query shapes in plan cache');
diff --git a/jstests/profile1.js b/jstests/profile1.js
deleted file mode 100644
index 7c168dea0ab..00000000000
--- a/jstests/profile1.js
+++ /dev/null
@@ -1,170 +0,0 @@
-// This test is inherently a race between the client and the server, and the test is unreliable.
-// We compare the duration of a query as seen by the server with the duration as seen by the
-// client, and if the client is delayed by a few milliseconds, or, in extreme cases, by even
-// 1 millisecond, it may think that there is a problem when in fact it's just a race, and the
-// client lost the race.
-// Windows seems to experience this more than the other platforms, so, to "fix" SERVER-5373,
-// disable the test for Windows.
-
-if (!_isWindows()) {
-
-print("profile1.js BEGIN");
-
-// special db so that it can be run in parallel tests
-var stddb = db;
-var db = db.getSisterDB("profile1");
-var username = "jstests_profile1_user";
-
-db.dropUser(username)
-db.dropDatabase();
-
-try {
-
- db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
- db.auth( username, "password" );
-
- function profileCursor( query ) {
- query = query || {};
- Object.extend( query, { user:username + "@" + db.getName() } );
- return db.system.profile.find( query );
- }
-
- function getProfileAString() {
- var s = "\n";
- profileCursor().forEach( function(z){
- s += tojson( z ) + " ,\n" ;
- } );
- return s;
- }
-
- /* With pre-created system.profile (capped) */
- db.runCommand({profile: 0});
- db.getCollection("system.profile").drop();
- assert(!db.getLastError(), "Z");
- assert.eq(0, db.runCommand({profile: -1}).was, "A");
-
- // Create 32MB profile (capped) collection
- db.system.profile.drop();
- db.createCollection("system.profile", {capped: true, size: 32 * 1024 * 1024});
- db.runCommand({profile: 2});
- assert.eq(2, db.runCommand({profile: -1}).was, "B");
- assert.eq(1, db.system.profile.stats().capped, "C");
- var capped_size = db.system.profile.storageSize();
- assert.gt(capped_size, 31 * 1024 * 1024, "D");
- assert.lt(capped_size, 65 * 1024 * 1024, "E");
-
- db.foo.findOne()
-
- var profileItems = profileCursor().toArray();
-
- // create a msg for later if there is a failure.
- var msg = "";
- profileItems.forEach(function(d) {msg += "profile doc: " + d.ns + " " + d.op + " " + tojson(d.query ? d.query : d.command)});
- msg += tojson(db.system.profile.stats());
-
- // If these nunmbers don't match, it is possible the collection has rolled over (set to 32MB above in the hope this doesn't happen)
- assert.eq( 4 , profileItems.length , "E2 -- " + msg );
-
- /* Make sure we can't drop if profiling is still on */
- assert.throws( function(z){ db.getCollection("system.profile").drop(); } )
-
- /* With pre-created system.profile (un-capped) */
- db.runCommand({profile: 0});
- db.getCollection("system.profile").drop();
- assert.eq(0, db.runCommand({profile: -1}).was, "F");
-
- db.createCollection("system.profile");
- assert.eq( 0, db.runCommand({profile: 2}).ok );
- assert.eq( 0, db.runCommand({profile: -1}).was, "G");
- assert.eq(null, db.system.profile.stats().capped, "G1");
-
- /* With no system.profile collection */
- db.runCommand({profile: 0});
- db.getCollection("system.profile").drop();
- assert.eq(0, db.runCommand({profile: -1}).was, "H");
-
- db.runCommand({profile: 2});
- assert.eq(2, db.runCommand({profile: -1}).was, "I");
- assert.eq(1, db.system.profile.stats().capped, "J");
- var auto_size = db.system.profile.storageSize();
- assert.lt(auto_size, capped_size, "K");
-
-
- db.eval("sleep(1)") // pre-load system.js
-
- function resetProfile( level , slowms ) {
- db.setProfilingLevel(0);
- db.system.profile.drop();
- db.setProfilingLevel(level,slowms);
- }
-
- resetProfile(2);
-
- db.eval( "sleep(25)" )
- db.eval( "sleep(120)" )
-
- assert.eq( 2 , profileCursor( { "command.$eval" : /^sleep/ } ).count() );
-
- assert.lte( 119 , profileCursor( { "command.$eval" : "sleep(120)" } )[0].millis );
- assert.lte( 24 , profileCursor( { "command.$eval" : "sleep(25)" } )[0].millis );
-
- /* sleep() could be inaccurate on certain platforms. let's check */
- print("\nsleep 2 time actual:");
- for (var i = 0; i < 4; i++) {
- print(db.eval("var x = new Date(); sleep(2); return new Date() - x;"));
- }
- print();
- print("\nsleep 20 times actual:");
- for (var i = 0; i < 4; i++) {
- print(db.eval("var x = new Date(); sleep(20); return new Date() - x;"));
- }
- print();
- print("\nsleep 120 times actual:");
- for (var i = 0; i < 4; i++) {
- print(db.eval("var x = new Date(); sleep(120); return new Date() - x;"));
- }
- print();
-
- function evalSleepMoreThan(millis,max){
- var start = new Date();
- db.eval("sleep("+millis+")");
- var end = new Date();
- var actual = end.getTime() - start.getTime();
- if ( actual > ( millis + 5 ) ) {
- print( "warning wanted to sleep for: " + millis + " but took: " + actual );
- }
- return actual >= max ? 1 : 0;
- }
-
- resetProfile(1,100);
- var delta = 0;
- delta += evalSleepMoreThan( 15 , 100 );
- delta += evalSleepMoreThan( 120 , 100 );
- assert.eq( delta , profileCursor( { "command.$eval" : /^sleep/ } ).count() , "X2 : " + getProfileAString() )
-
- resetProfile(1,20);
- delta = 0;
- delta += evalSleepMoreThan( 5 , 20 );
- delta += evalSleepMoreThan( 120 , 20 );
- assert.eq( delta , profileCursor( { "command.$eval" : /^sleep/ } ).count() , "X3 : " + getProfileAString() )
-
- resetProfile(2);
- db.profile1.drop();
- var q = { _id : 5 };
- var u = { $inc : { x : 1 } };
- db.profile1.update( q , u );
- var r = profileCursor( { ns : db.profile1.getFullName() } ).sort( { $natural : -1 } )[0]
- assert.eq( q , r.query , "Y1: " + tojson(r) );
- assert.eq( u , r.updateobj , "Y2" );
- assert.eq( "update" , r.op , "Y3" );
- assert.eq("profile1.profile1", r.ns, "Y4");
-
- print("profile1.js SUCCESS OK");
-
-} finally {
- // disable profiling for subsequent tests
- assert.commandWorked( db.runCommand( {profile:0} ) );
- db = stddb;
-}
-
-} // !_isWindows()
diff --git a/jstests/profile2.js b/jstests/profile2.js
deleted file mode 100644
index 1006c03a40d..00000000000
--- a/jstests/profile2.js
+++ /dev/null
@@ -1,25 +0,0 @@
-print("profile2.js BEGIN");
-
-// special db so that it can be run in parallel tests
-var stddb = db;
-var db = db.getSisterDB("profile2");
-
-try {
-
- assert.commandWorked( db.runCommand( {profile:2} ) );
-
- var str = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
- huge = str;
- while (huge.length < 2*1024*1024){
- huge += str;
- }
-
- db.profile2.count({huge:huge}) // would make a huge entry in db.system.profile
-
- print("profile2.js SUCCESS OK");
-
-} finally {
- // disable profiling for subsequent tests
- assert.commandWorked( db.runCommand( {profile:0} ) );
- db = stddb;
-}
diff --git a/jstests/profile3.js b/jstests/profile3.js
deleted file mode 100644
index 89fa0a33269..00000000000
--- a/jstests/profile3.js
+++ /dev/null
@@ -1,54 +0,0 @@
-
-// special db so that it can be run in parallel tests
-var stddb = db;
-var db = db.getSisterDB("profile3");
-
-db.dropAllUsers();
-t = db.profile3;
-t.drop();
-
-profileCursor = function( query ) {
- print( "----" );
- query = query || {};
- Object.extend( query, { user: username + "@" + db.getName() } );
- return db.system.profile.find( query );
-}
-
-try {
- username = "jstests_profile3_user";
- db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
- db.auth( username, "password" );
-
- db.setProfilingLevel(0);
-
- db.system.profile.drop();
- assert.eq( 0 , profileCursor().count() )
-
- db.setProfilingLevel(2);
-
- db.createCollection(t.getName(), {usePowerOf2Sizes: false});
- t.insert( { x : 1 } );
- t.findOne( { x : 1 } );
- t.find( { x : 1 } ).count();
- t.update( { x : 1 }, {$inc:{a:1}} );
- t.update( { x : 1 }, {$inc:{a:1}} );
- t.update( { x : 0 }, {$inc:{a:1}} );
-
- profileCursor().forEach( printjson )
-
- db.setProfilingLevel(0);
-
-
- assert.eq(profileCursor({nMatched: {$exists:1}}).count(), 3)
- assert.eq(profileCursor({nMatched: 1}).count(), 2)
- assert.eq(profileCursor({nMatched: 0}).count(), 1)
- assert.eq(profileCursor({nmoved: 1}).count(), 1)
-
- db.system.profile.drop();
-
-}
-finally {
- db.setProfilingLevel(0);
- db = stddb;
-}
-
diff --git a/jstests/profile4.js b/jstests/profile4.js
deleted file mode 100644
index 25d71acba6e..00000000000
--- a/jstests/profile4.js
+++ /dev/null
@@ -1,119 +0,0 @@
-// Check debug information recorded for a query.
-
-// special db so that it can be run in parallel tests
-var stddb = db;
-var db = db.getSisterDB("profile4");
-
-db.dropAllUsers();
-t = db.profile4;
-t.drop();
-
-function profileCursor() {
- return db.system.profile.find( { user:username + "@" + db.getName() } );
-}
-
-function lastOp() {
- p = profileCursor().sort( { $natural:-1 } ).next();
-// printjson( p );
- return p;
-}
-
-function checkLastOp( spec ) {
- p = lastOp();
- for( i in spec ) {
- s = spec[ i ];
- assert.eq( s[ 1 ], p[ s[ 0 ] ], s[ 0 ] );
- }
-}
-
-try {
- username = "jstests_profile4_user";
- db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
- db.auth( username, "password" );
-
- db.setProfilingLevel(0);
-
- db.system.profile.drop();
- assert.eq( 0 , profileCursor().count() )
-
- db.setProfilingLevel(2);
-
- t.find().itcount();
- checkLastOp( [ [ "op", "query" ],
- [ "ns", "profile4.profile4" ],
- [ "query", {} ],
- [ "ntoreturn", 0 ],
- [ "ntoskip", 0 ],
- [ "nscanned", 0 ],
- [ "keyUpdates", 0 ],
- [ "nreturned", 0 ],
- [ "responseLength", 20 ] ] );
-
- t.save( {} );
-
- // check write lock stats are set
- o = lastOp();
-
- assert.eq('insert', o.op);
- assert.eq( 0, o.lockStats.timeLockedMicros.r );
- assert.lt( 0, o.lockStats.timeLockedMicros.w );
- assert.eq( 0, o.lockStats.timeAcquiringMicros.r );
- //assert.lt( 0, o.lockStats.timeAcquiringMicros.w ); // Removed due to SERVER-8331
-
- // check read lock stats are set
- t.find();
- o = lastOp();
- assert.eq('query', o.op);
- assert.lt( 0, o.lockStats.timeLockedMicros.r );
- assert.eq( 0, o.lockStats.timeLockedMicros.w );
- //assert.lt( 0, o.lockStats.timeAcquiringMicros.r ); // Removed due to SERVER-8331
- //assert.lt( 0, o.lockStats.timeAcquiringMicros.w ); // Removed due to SERVER-8331
-
- t.save( {} );
- t.save( {} );
- t.find().skip( 1 ).limit( 4 ).itcount();
- checkLastOp( [ [ "ntoreturn", 4 ],
- [ "ntoskip", 1 ],
- [ "nscanned", 3 ],
- [ "nreturned", 2 ] ] );
-
- t.find().batchSize( 2 ).next();
- o = lastOp();
- assert.lt( 0, o.cursorid );
-
- t.find( {a:1} ).itcount();
- checkLastOp( [ [ "query", {a:1} ] ] );
-
- t.find( {_id:0} ).itcount();
- checkLastOp( [ [ "idhack", true ] ] );
-
- t.find().sort( {a:1} ).itcount();
- checkLastOp( [ [ "scanAndOrder", true ] ] );
-
- t.ensureIndex( {a:1} );
- t.find( {a:1} ).itcount();
- o = lastOp();
- assert.eq( "FETCH", o.execStats.type, tojson( o.execStats ) );
- assert.eq( "IXSCAN", o.execStats.children[0].type, tojson( o.execStats ) );
-
- // For queries with a lot of stats data, the execution stats in the profile
- // is replaced by the plan summary.
- var orClauses = 32;
- var bigOrQuery = { $or: [] };
- for ( var i = 0; i < orClauses; ++i ) {
- var indexSpec = {};
- indexSpec[ "a" + i ] = 1;
- t.ensureIndex( indexSpec );
- bigOrQuery[ "$or" ].push( indexSpec );
- }
- t.find( bigOrQuery ).itcount();
- o = lastOp();
- assert.neq( undefined, o.execStats.summary, tojson( o.execStats ) );
-
- db.setProfilingLevel(0);
- db.system.profile.drop();
-}
-finally {
- db.setProfilingLevel(0);
- db = stddb;
-}
diff --git a/jstests/proj_key1.js b/jstests/proj_key1.js
deleted file mode 100644
index ad944f71827..00000000000
--- a/jstests/proj_key1.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.proj_key1;
-t.drop();
-
-as = []
-
-for ( i=0; i<10; i++ ){
- as.push( { a : i } )
- t.insert( { a : i , b : i } );
-}
-
-assert( ! t.find( {} , { a : 1 } ).explain().indexOnly , "A1" )
-
-t.ensureIndex( { a : 1 } )
-
-assert( t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).explain().indexOnly , "A2" )
-
-assert( ! t.find( { a : { $gte : 0 } } , { a : 1 } ).explain().indexOnly , "A3" ) // because id _id
-
-// assert( t.find( {} , { a : 1 , _id : 0 } ).explain().indexOnly , "A4" ); // TODO: need to modify query optimier SERVER-2109
-
-assert.eq( as , t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).toArray() , "B1" )
-assert.eq( as , t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).batchSize(2).toArray() , "B1" )
-
-
-
-
-
diff --git a/jstests/pull.js b/jstests/pull.js
deleted file mode 100644
index 3cb6328e2de..00000000000
--- a/jstests/pull.js
+++ /dev/null
@@ -1,33 +0,0 @@
-t = db.jstests_pull;
-t.drop();
-
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pull: { a: 2 } } );
-t.update( {}, { $pull: { a: 6 } } );
-assert.eq( [ 1, 3 ], t.findOne().a );
-
-t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pull: { a: 2 } } );
-t.update( {}, { $pull: { a: 2 } } );
-assert.eq( [ 1, 3 ], t.findOne().a );
-
-t.drop();
-t.save( { a: [ 2 ] } );
-t.update( {}, { $pull: { a: 2 } } );
-t.update( {}, { $pull: { a: 6 } } );
-assert.eq( [], t.findOne().a );
-
-// SERVER-6047: $pull creates empty nested docs for dotted fields
-// that don't exist.
-t.drop()
-t.save({ m : 1 } );
-t.update( { m : 1 }, { $pull : { 'a.b' : [ 1 ] } } );
-assert( ('a' in t.findOne()) == false );
-// Non-obvious bit: the implementation of non-in-place update
-// might do different things depending on whether the "new" field
-// comes before or after existing fields in the document.
-// So for now it's worth testing that too. Sorry, future; blame the past.
-t.update( { m : 1 }, { $pull : { 'x.y' : [ 1 ] } } );
-assert( ('z' in t.findOne()) == false );
-// End SERVER-6047
diff --git a/jstests/pull2.js b/jstests/pull2.js
deleted file mode 100644
index ca13fc2e726..00000000000
--- a/jstests/pull2.js
+++ /dev/null
@@ -1,31 +0,0 @@
-
-t = db.pull2;
-t.drop();
-
-t.save( { a : [ { x : 1 } , { x : 1 , b : 2 } ] } );
-assert.eq( 2 , t.findOne().a.length , "A" );
-
-t.update( {} , { $pull : { a : { x : 1 } } } );
-assert.eq( 0 , t.findOne().a.length , "B" );
-
-assert.eq( 1 , t.find().count() , "C1" )
-
-t.update( {} , { $push : { a : { x : 1 } } } )
-t.update( {} , { $push : { a : { x : 1 , b : 2 } } } )
-assert.eq( 2 , t.findOne().a.length , "C" );
-
-t.update( {} , { $pullAll : { a : [ { x : 1 } ] } } );
-assert.eq( 1 , t.findOne().a.length , "D" );
-
-t.update( {} , { $push : { a : { x : 2 , b : 2 } } } )
-t.update( {} , { $push : { a : { x : 3 , b : 2 } } } )
-t.update( {} , { $push : { a : { x : 4 , b : 2 } } } )
-assert.eq( 4 , t.findOne().a.length , "E" );
-
-assert.eq( 1 , t.find().count() , "C2" )
-
-
-t.update( {} , { $pull : { a : { x : { $lt : 3 } } } } );
-assert.eq( 2 , t.findOne().a.length , "F" );
-assert.eq( [ 3 , 4 ] , t.findOne().a.map( function(z){ return z.x; } ) , "G" )
-
diff --git a/jstests/pull_or.js b/jstests/pull_or.js
deleted file mode 100644
index 905c7a87060..00000000000
--- a/jstests/pull_or.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-t = db.pull_or;
-t.drop();
-
-doc = { _id : 1 , a : { b : [ { x : 1 },
- { y : 'y' },
- { x : 2 },
- { z : 'z' } ] } };
-
-t.insert( doc );
-
-t.update({}, { $pull : { 'a.b' : { 'y' : { $exists : true } } } } );
-
-assert.eq( [ { x : 1 }, { x : 2 }, { z : 'z' } ], t.findOne().a.b );
-
-t.drop();
-t.insert( doc );
-t.update({}, { $pull : { 'a.b' : { $or : [ { 'y' : { $exists : true } },
- { 'z' : { $exists : true } } ] } } } );
-
-assert.eq( [ { x : 1 }, { x : 2 } ], t.findOne().a.b );
diff --git a/jstests/pull_remove1.js b/jstests/pull_remove1.js
deleted file mode 100644
index 379f3f2832b..00000000000
--- a/jstests/pull_remove1.js
+++ /dev/null
@@ -1,14 +0,0 @@
-
-t = db.pull_remove1
-t.drop()
-
-o = { _id : 1 , a : [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ] }
-t.insert( o )
-
-assert.eq( o , t.findOne() , "A1" )
-
-o.a = o.a.filter( function(z){ return z >= 6; } )
-t.update( {} , { $pull : { a : { $lt : 6 } } } )
-
-assert.eq( o.a , t.findOne().a , "A2" )
-
diff --git a/jstests/pullall.js b/jstests/pullall.js
deleted file mode 100644
index 7dd932c4bbf..00000000000
--- a/jstests/pullall.js
+++ /dev/null
@@ -1,31 +0,0 @@
-t = db.jstests_pullall;
-t.drop();
-
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pullAll: { a: [ 3 ] } } );
-assert.eq( [ 1, 2 ], t.findOne().a );
-t.update( {}, { $pullAll: { a: [ 3 ] } } );
-assert.eq( [ 1, 2 ], t.findOne().a );
-
-t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pullAll: { a: [ 2, 3 ] } } );
-assert.eq( [ 1 ], t.findOne().a );
-t.update( {}, { $pullAll: { a: [] } } );
-assert.eq( [ 1 ], t.findOne().a );
-t.update( {}, { $pullAll: { a: [ 1, 5 ] } } );
-assert.eq( [], t.findOne().a );
-
-// SERVER-6047: $pullAll creates empty nested docs for dotted fields
-// that don't exist.
-t.drop()
-t.save({ m : 1 } );
-t.update( { m : 1 }, { $pullAll : { 'a.b' : [ 1 ] } } );
-assert( ('a' in t.findOne()) == false );
-// Non-obvious bit: the implementation of non-in-place update
-// might do different things depending on whether the "new" field
-// comes before or after existing fields in the document.
-// So for now it's worth testing that too. Sorry, future; blame the past.
-t.update( { m : 1 }, { $pullAll : { 'x.y' : [ 1 ] } } );
-assert( ('z' in t.findOne()) == false );
-// End SERVER-6047
diff --git a/jstests/pullall2.js b/jstests/pullall2.js
deleted file mode 100644
index 61369badaa4..00000000000
--- a/jstests/pullall2.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.pullall2
-t.drop()
-
-o = { _id : 1 , a : [] }
-for ( i=0; i<5; i++ )
- o.a.push( { x : i , y : i } )
-
-t.insert( o )
-
-assert.eq( o , t.findOne() , "A" );
-
-t.update( {} , { $pull : { a : { x : 3 } } } )
-o.a = o.a.filter( function(z){ return z.x != 3 } )
-assert.eq( o , t.findOne() , "B" );
-
-t.update( {} , { $pull : { a : { x : { $in : [ 1 , 4 ] } } } } );
-o.a = o.a.filter( function(z){ return z.x != 1 } )
-o.a = o.a.filter( function(z){ return z.x != 4 } )
-assert.eq( o , t.findOne() , "C" );
diff --git a/jstests/push.js b/jstests/push.js
deleted file mode 100644
index 9bcaa2ffb6b..00000000000
--- a/jstests/push.js
+++ /dev/null
@@ -1,54 +0,0 @@
-
-t = db.push
-t.drop();
-
-t.save( { _id : 2 , a : [ 1 ] } );
-t.update( { _id : 2 } , { $push : { a : 2 } } );
-assert.eq( "1,2" , t.findOne().a.toString() , "A" );
-t.update( { _id : 2 } , { $push : { a : 3 } } );
-assert.eq( "1,2,3" , t.findOne().a.toString() , "B" );
-
-t.update( { _id : 2 } , { $pop : { a : 1 } } );
-assert.eq( "1,2" , t.findOne().a.toString() , "C" );
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.eq( "2" , t.findOne().a.toString() , "D" );
-
-
-t.update( { _id : 2 } , { $push : { a : 3 } } );
-t.update( { _id : 2 } , { $push : { a : 4 } } );
-t.update( { _id : 2 } , { $push : { a : 5 } } );
-assert.eq( "2,3,4,5" , t.findOne().a.toString() , "E1" );
-
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.eq( "3,4,5" , t.findOne().a.toString() , "E2" );
-
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.eq( "4,5" , t.findOne().a.toString() , "E3" );
-
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.isnull( db.getLastError() , "E4a" )
-assert.eq( "5" , t.findOne().a.toString() , "E4" );
-
-
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.isnull( db.getLastError() , "E5a")
-assert.eq( "" , t.findOne().a.toString() , "E5" );
-
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.isnull( db.getLastError() , "E6a" )
-assert.eq( "" , t.findOne().a.toString() , "E6" );
-
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.isnull( db.getLastError() , "E7a" )
-assert.eq( "" , t.findOne().a.toString() , "E7" );
-
-t.update( { _id : 2 } , { $pop : { a : 1 } } );
-assert.isnull( db.getLastError() , "E8a" )
-assert.eq( "" , t.findOne().a.toString() , "E8" );
-
-t.update( { _id : 2 } , { $pop : { b : -1 } } );
-assert.isnull( db.getLastError() , "E4a" )
-
-t.update( { _id : 2 } , { $pop : { b : 1 } } );
-assert.isnull( db.getLastError() , "E4a" )
-
diff --git a/jstests/push2.js b/jstests/push2.js
deleted file mode 100644
index e8bcff6760c..00000000000
--- a/jstests/push2.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-t = db.push2
-t.drop()
-
-t.save( { _id : 1 , a : [] } )
-
-s = new Array(700000).toString();
-
-gotError = null;
-
-for ( x=0; x<100; x++ ){
- print (x + " pushes");
- t.update( {} , { $push : { a : s } } );
- gotError = db.getLastError();
- if ( gotError )
- break;
-}
-
-assert( gotError , "should have gotten error" );
-
-t.drop();
diff --git a/jstests/push_sort.js b/jstests/push_sort.js
deleted file mode 100644
index 87916d5ea6b..00000000000
--- a/jstests/push_sort.js
+++ /dev/null
@@ -1,96 +0,0 @@
-//
-// $push acquired the possibility of sorting the resulting array as part of SERVER-8008. This
-// test exercises such $sort clause from the shell user's perspective.
-//
-
-t = db.push_sort;
-t.drop();
-
-//
-// Valid Cases
-//
-
-// $slice amount is too large to kick in.
-t.save( { _id: 1, x: [ {a:1}, {a:2} ] } );
-t.update( {_id:1}, { $push: { x: { $each: [ {a:3} ], $slice:-5, $sort: {a:1} } } } )
-assert.eq( [{a:1}, {a:2}, {a:3}] , t.findOne( {_id:1} ).x );
-
-// $slice amount kicks in using values of both the base doc and of the $each clause.
-t.save({ _id: 2, x: [ {a:1}, {a:3} ] } );
-t.update( {_id:2}, { $push: { x: { $each: [ {a:2} ], $slice:-2, $sort: {a:1} } } } )
-assert.eq( [{a:2}, {a:3}], t.findOne( {_id:2} ).x );
-
-// $sort is descending and $slice is too large to kick in.
-t.save({ _id: 3, x: [ {a:1}, {a:3} ] } );
-t.update( {_id:3}, { $push: { x: { $each: [ {a:2} ], $slice:-5, $sort: {a:-1} } } } )
-assert.eq( [{a:3}, {a:2}, {a:1}], t.findOne( {_id:3} ).x );
-
-// $sort is descending and $slice kicks in using values of both the base doc and of
-// the $each clause.
-t.save({ _id: 4, x: [ {a:1}, {a:3} ] } );
-t.update( {_id:4}, { $push: { x: { $each: [ {a:2} ], $slice:-2, $sort: {a:-1} } } } )
-assert.eq( [{a:2}, {a:1}], t.findOne( {_id:4} ).x );
-
-// $sort over only a portion of the array's elements objects and #slice kicking in
-// using values of both the base doc and of the $each clause.
-t.save({ _id: 5, x: [ {a:1,b:2}, {a:3,b:1} ] } );
-t.update( {_id:5}, { $push: { x: { $each: [ {a:2,b:3} ], $slice:-2, $sort: {b:1} } } } )
-assert.eq( [{a:1, b:2}, {a:2,b:3}], t.findOne( {_id:5} ).x );
-
-// $sort over an array of nested objects and $slice too large to kick in.
-t.save({ _id: 6, x: [ {a:{b:2}}, {a:{b:1}} ] } );
-t.update( {_id:6}, { $push: { x: { $each: [ {a:{b:3}} ], $slice:-5, $sort: {'a.b':1} } } } )
-assert.eq( [{a:{b:1}}, {a:{b:2}}, {a:{b:3}}], t.findOne( {_id:6} ).x );
-
-// $sort over an array of nested objects and $slice kicking in using values of both the
-// base doc and of the $each clause.
-t.save({ _id: 7, x: [ {a:{b:2}}, {a:{b:1}} ] } );
-t.update( {_id:7}, { $push: { x: { $each: [ {a:{b:3}} ], $slice:-2, $sort: {'a.b':1} } } } )
-assert.eq( [{a:{b:2}}, {a:{b:3}}], t.findOne( {_id:7} ).x );
-
-//
-// Invalid Cases
-//
-
-t.save({ _id: 100, x: [ {a:1} ] } );
-
-// For now, elements of the $each vector need to be objects. In here, '2' is an invalide $each.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [ 2 ], $slice:-2, $sort:{a:1} } } } ) )
-
-// For the same reason as above, '1' is an invalid $each element.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2},1], $slice:-2, $sort:{a:1} } } }))
-
-// The sort key pattern cannot be empty.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort:{} } } } ) )
-
-// For now, we do not support positive $slice's (ie, trimming from the array's front).
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:2, $sort: {a:1} } } }))
-
-// A $slice cannot be a fractional value.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2.1, $sort: {a:1} } }}))
-
-// The sort key pattern's value must be either 1 or -1. In here, {a:-2} is an invalid value.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {a:-2} } } }))
-
-// For now, we are not supporting sorting of basic elements (non-object, non-arrays). In here,
-// the $sort clause would need to have a key pattern value rather than 1.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: 1 } } } ) )
-
-// The key pattern 'a.' is an invalid value for $sort.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {'a.':1} }}}))
-
-// An empty key pattern is not a valid $sort value.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {'':1} } } }))
-
-// If a $slice is used, the only other $sort clause that's accepted is $sort. In here, $xxx
-// is not a valid clause.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $xxx: {s:1} } } } ) )
-
-t.remove({})
-
-// Ensure that existing values are validated in the array as objects during a $sort with $each,
-// not only the elements in the $each array.
-t.save({ _id: 100, x: [ 1, "foo" ] } );
-assert.throws(t.update(
- {_id: 100},
- { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {a:1} } } } ) )
diff --git a/jstests/pushall.js b/jstests/pushall.js
deleted file mode 100644
index eda68203ed3..00000000000
--- a/jstests/pushall.js
+++ /dev/null
@@ -1,20 +0,0 @@
-t = db.jstests_pushall;
-t.drop();
-
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pushAll: { a: [ 4 ] } } );
-assert.eq( [ 1, 2, 3, 4 ], t.findOne().a );
-t.update( {}, { $pushAll: { a: [ 4 ] } } );
-assert.eq( [ 1, 2, 3, 4, 4 ], t.findOne().a );
-
-t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pushAll: { a: [ 4, 5 ] } } );
-assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
-t.update( {}, { $pushAll: { a: [] } } );
-assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
-
-t.drop();
-t.save( {} );
-t.update( {}, { $pushAll: { a: [ 1, 2 ] } } );
-assert.eq( [ 1, 2 ], t.findOne().a );
diff --git a/jstests/query1.js b/jstests/query1.js
deleted file mode 100644
index 8fa402cda65..00000000000
--- a/jstests/query1.js
+++ /dev/null
@@ -1,26 +0,0 @@
-
-t = db.query1;
-t.drop();
-
-t.save( { num : 1 } );
-t.save( { num : 3 } )
-t.save( { num : 4 } );
-
-num = 0;
-total = 0;
-
-t.find().forEach(
- function(z){
- num++;
- total += z.num;
- }
-);
-
-assert.eq( num , 3 , "num" )
-assert.eq( total , 8 , "total" )
-
-assert.eq( 3 , t.find()._addSpecial( "$comment" , "this is a test" ).itcount() , "B1" )
-assert.eq( 3 , t.find()._addSpecial( "$comment" , "this is a test" ).count() , "B2" )
-
-assert.eq( 3 , t.find( { "$comment" : "yo ho ho" } ).itcount() , "C1" )
-assert.eq( 3 , t.find( { "$comment" : "this is a test" } ).count() , "C2" )
diff --git a/jstests/queryoptimizer3.js b/jstests/queryoptimizer3.js
deleted file mode 100644
index a90c7985839..00000000000
--- a/jstests/queryoptimizer3.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Check cases where index scans are aborted due to the collection being dropped. SERVER-4400
-
-t = db.jstests_queryoptimizer3;
-t.drop();
-
-p = startParallelShell( 'for( i = 0; i < 400; ++i ) { sleep( 50 ); db.jstests_queryoptimizer3.drop(); }' );
-
-for( i = 0; i < 100; ++i ) {
- t.drop();
- t.ensureIndex({a:1});
- t.ensureIndex({b:1});
- for( j = 0; j < 100; ++j ) {
- t.save({a:j,b:j});
- }
- m = i % 5;
- if ( m == 0 ) {
- t.count({a:{$gte:0},b:{$gte:0}});
- }
- else if ( m == 1 ) {
- t.find({a:{$gte:0},b:{$gte:0}}).itcount();
- }
- else if ( m == 2 ) {
- t.remove({a:{$gte:0},b:{$gte:0}});
- }
- else if ( m == 3 ) {
- t.update({a:{$gte:0},b:{$gte:0}},{});
- }
- else if ( m == 4 ) {
- t.distinct('x',{a:{$gte:0},b:{$gte:0}});
- }
-}
-
-p();
diff --git a/jstests/queryoptimizer6.js b/jstests/queryoptimizer6.js
deleted file mode 100644
index 32efccbdb0b..00000000000
--- a/jstests/queryoptimizer6.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// Test that $ne constraints are accounted for in QueryPattern. SERVER-4665
-
-t = db.jstests_queryoptimizer6;
-t.drop();
-
-t.save( {a:1} );
-
-// There is a bug in the 2.4.x indexing where the first query below returns 0 results with this
-// index, but 1 result without it.
-//
-// t.ensureIndex( {b:1}, {sparse:true} );
-
-// The sparse index will be used, and recorded for this query pattern.
-assert.eq( 1, t.find( {a:1,b:{$ne:1}} ).itcount() );
-// The query pattern should be different, and the sparse index should not be used.
-assert.eq( 1, t.find( {a:1} ).itcount() );
diff --git a/jstests/queryoptimizera.js b/jstests/queryoptimizera.js
deleted file mode 100644
index f26c2b0978c..00000000000
--- a/jstests/queryoptimizera.js
+++ /dev/null
@@ -1,92 +0,0 @@
-// Check that a warning message about doing a capped collection scan for a query with an _id
-// constraint is printed at appropriate times. SERVER-5353
-
-function numWarnings() {
- logs = db.adminCommand( { getLog:"global" } ).log
- ret = 0;
- logs.forEach( function( x ) {
- if ( x.match( warningMatchRegexp ) ) {
- ++ret;
- }
- } );
- return ret;
-}
-
-collectionNameIndex = 0;
-
-// Generate a collection name not already present in the log.
-do {
- testCollectionName = 'jstests_queryoptimizera__' + collectionNameIndex++;
- warningMatchString = 'unindexed _id query on capped collection.*collection: test.' +
- testCollectionName;
- warningMatchRegexp = new RegExp( warningMatchString );
-
-} while( numWarnings() > 0 );
-
-t = db[ testCollectionName ];
-t.drop();
-
-notCappedCollectionName = testCollectionName + '_notCapped';
-
-notCapped = db[ notCappedCollectionName ];
-notCapped.drop();
-
-db.createCollection( testCollectionName, { capped:true, size:1000 } );
-db.createCollection( notCappedCollectionName, { autoIndexId:false } );
-
-t.insert( {} );
-notCapped.insert( {} );
-
-oldNumWarnings = 0;
-
-function assertNoNewWarnings() {
- assert.eq( oldNumWarnings, numWarnings() );
-}
-
-function assertNewWarning() {
- newNumWarnings = numWarnings();
- // Ensure that newNumWarnings > oldNumWarnings. It's not safe to test that oldNumWarnings + 1
- // == newNumWarnings, because a (simulated) page fault exception may cause multiple messages to
- // be logged instead of only one.
- assert.lt( oldNumWarnings, newNumWarnings );
- oldNumWarnings = newNumWarnings;
-}
-
-// Simple _id query
-t.find( { _id:0 } ).itcount();
-assertNoNewWarnings();
-
-// Simple _id query without an _id index, on a non capped collection.
-notCapped.find( { _id:0 } ).itcount();
-assertNoNewWarnings();
-
-// A multi field query, including _id.
-t.find( { _id:0, a:0 } ).itcount();
-assertNoNewWarnings();
-
-// An unsatisfiable query.
-t.find( { _id:0, a:{$in:[]} } ).itcount();
-assertNoNewWarnings();
-
-// An hinted query.
-t.find( { _id:0 } ).hint( { $natural:1 } ).itcount();
-assertNoNewWarnings();
-
-// Retry a multi field query.
-t.find( { _id:0, a:0 } ).itcount();
-assertNoNewWarnings();
-
-// Warnings should not be printed when an index is added on _id.
-t.ensureIndex( { _id:1 } );
-
-t.find( { _id:0 } ).itcount();
-assertNoNewWarnings();
-
-t.find( { _id:0, a:0 } ).itcount();
-assertNoNewWarnings();
-
-t.find( { _id:0, a:0 } ).itcount();
-assertNoNewWarnings();
-
-t.drop(); // cleanup
-notCapped.drop(); \ No newline at end of file
diff --git a/jstests/ref.js b/jstests/ref.js
deleted file mode 100644
index 20fd6ca94f0..00000000000
--- a/jstests/ref.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// to run:
-// ./mongo jstests/ref.js
-
-db.otherthings.drop();
-db.things.drop();
-
-var other = { s : "other thing", n : 1};
-db.otherthings.save(other);
-
-db.things.save( { name : "abc" } );
-x = db.things.findOne();
-x.o = new DBPointer( "otherthings" , other._id );
-db.things.save(x);
-
-assert( db.things.findOne().o.fetch().n == 1, "dbref broken 2" );
-
-other.n++;
-db.otherthings.save(other);
-assert( db.things.findOne().o.fetch().n == 2, "dbrefs broken" );
diff --git a/jstests/ref2.js b/jstests/ref2.js
deleted file mode 100644
index 29640cd5da0..00000000000
--- a/jstests/ref2.js
+++ /dev/null
@@ -1,14 +0,0 @@
-
-t = db.ref2;
-t.drop();
-
-a = { $ref : "foo" , $id : 1 };
-b = { $ref : "foo" , $id : 2 };
-
-
-t.save( { name : "a" , r : a } );
-t.save( { name : "b" , r : b } );
-
-assert.eq( 2 , t.find().count() , "A" );
-assert.eq( 1 , t.find( { r : a } ).count() , "B" );
-assert.eq( 1 , t.find( { r : b } ).count() , "C" );
diff --git a/jstests/ref3.js b/jstests/ref3.js
deleted file mode 100644
index 14037ee4cc8..00000000000
--- a/jstests/ref3.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// to run:
-// ./mongo jstests/ref3.js
-
-db.otherthings3.drop();
-db.things3.drop();
-
-var other = { s : "other thing", n : 1};
-db.otherthings3.save(other);
-
-db.things3.save( { name : "abc" } );
-x = db.things3.findOne();
-x.o = new DBRef( "otherthings3" , other._id );
-db.things3.save(x);
-
-assert( db.things3.findOne().o.fetch().n == 1, "dbref broken 2" );
-
-other.n++;
-db.otherthings3.save(other);
-assert( db.things3.findOne().o.fetch().n == 2, "dbrefs broken" );
diff --git a/jstests/ref4.js b/jstests/ref4.js
deleted file mode 100644
index 1c105ef2795..00000000000
--- a/jstests/ref4.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-a = db.ref4a;
-b = db.ref4b;
-
-a.drop();
-b.drop();
-
-var other = { s : "other thing", n : 17 };
-b.save(other);
-
-a.save( { name : "abc" , others : [ new DBRef( "ref4b" , other._id ) , new DBPointer( "ref4b" , other._id ) ] } );
-assert( a.findOne().others[0].fetch().n == 17 , "dbref broken 1" );
-
-x = Array.fetchRefs( a.findOne().others );
-assert.eq( 2 , x.length , "A" );
-assert.eq( 17 , x[0].n , "B" );
-assert.eq( 17 , x[1].n , "C" );
-
-
-assert.eq( 0 , Array.fetchRefs( a.findOne().others , "z" ).length , "D" );
diff --git a/jstests/regex.js b/jstests/regex.js
deleted file mode 100644
index f431d506ea6..00000000000
--- a/jstests/regex.js
+++ /dev/null
@@ -1,24 +0,0 @@
-t = db.jstests_regex;
-
-t.drop();
-t.save( { a: "bcd" } );
-assert.eq( 1, t.count( { a: /b/ } ) , "A" );
-assert.eq( 1, t.count( { a: /bc/ } ) , "B" );
-assert.eq( 1, t.count( { a: /bcd/ } ) , "C" );
-assert.eq( 0, t.count( { a: /bcde/ } ) , "D" );
-
-t.drop();
-t.save( { a: { b: "cde" } } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "E" );
-
-t.drop();
-t.save( { a: { b: [ "cde" ] } } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "F" );
-
-t.drop();
-t.save( { a: [ { b: "cde" } ] } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "G" );
-
-t.drop();
-t.save( { a: [ { b: [ "cde" ] } ] } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "H" );
diff --git a/jstests/regex2.js b/jstests/regex2.js
deleted file mode 100644
index 87d5cb47c05..00000000000
--- a/jstests/regex2.js
+++ /dev/null
@@ -1,70 +0,0 @@
-
-t = db.regex2;
-t.drop();
-
-t.save( { a : "test" } );
-t.save( { a : "Test" } );
-
-assert.eq( 2 , t.find().count() , "A" );
-assert.eq( 1 , t.find( { a : "Test" } ).count() , "B" );
-assert.eq( 1 , t.find( { a : "test" } ).count() , "C" );
-assert.eq( 1 , t.find( { a : /Test/ } ).count() , "D" );
-assert.eq( 1 , t.find( { a : /test/ } ).count() , "E" );
-assert.eq( 2 , t.find( { a : /test/i } ).count() , "F" );
-
-
-t.drop();
-
-a = "\u0442\u0435\u0441\u0442";
-b = "\u0422\u0435\u0441\u0442";
-
-assert( ( new RegExp( a ) ).test( a ) , "B 1" );
-assert( ! ( new RegExp( a ) ).test( b ) , "B 2" );
-assert( ( new RegExp( a , "i" ) ).test( b ) , "B 3 " );
-
-t.save( { a : a } );
-t.save( { a : b } );
-
-
-assert.eq( 2 , t.find().count() , "C A" );
-assert.eq( 1 , t.find( { a : a } ).count() , "C B" );
-assert.eq( 1 , t.find( { a : b } ).count() , "C C" );
-assert.eq( 1 , t.find( { a : new RegExp( a ) } ).count() , "C D" );
-assert.eq( 1 , t.find( { a : new RegExp( b ) } ).count() , "C E" );
-assert.eq( 2 , t.find( { a : new RegExp( a , "i" ) } ).count() , "C F is spidermonkey built with UTF-8 support?" );
-
-
-// same tests as above but using {$regex: "a|b", $options: "imx"} syntax.
-t.drop();
-
-t.save( { a : "test" } );
-t.save( { a : "Test" } );
-
-assert.eq( 2 , t.find().count() , "obj A" );
-assert.eq( 1 , t.find( { a : {$regex:"Test"} } ).count() , "obj D" );
-assert.eq( 1 , t.find( { a : {$regex:"test"} } ).count() , "obj E" );
-assert.eq( 2 , t.find( { a : {$regex:"test", $options:"i"} } ).count() , "obj F" );
-assert.eq( 2 , t.find( { a : {$options:"i", $regex:"test"} } ).count() , "obj F rev" ); // both orders should work
-
-
-t.drop();
-
-a = "\u0442\u0435\u0441\u0442";
-b = "\u0422\u0435\u0441\u0442";
-
-t.save( { a : a } );
-t.save( { a : b } );
-
-
-assert.eq( 1 , t.find( { a : {$regex: a} } ).count() , "obj C D" );
-assert.eq( 1 , t.find( { a : {$regex: b} } ).count() , "obj C E" );
-assert.eq( 2 , t.find( { a : {$regex: a , $options: "i" } } ).count() , "obj C F is spidermonkey built with UTF-8 support?" );
-
-// Test s (DOT_ALL) option. Not supported with /regex/opts syntax
-t.drop();
-t.save({a:'1 2'})
-t.save({a:'1\n2'})
-assert.eq( 1 , t.find( { a : {$regex: '1.*2'} } ).count() );
-assert.eq( 2 , t.find( { a : {$regex: '1.*2', $options: 's'} } ).count() );
-
-
diff --git a/jstests/regex3.js b/jstests/regex3.js
deleted file mode 100644
index 5ac8fab4c40..00000000000
--- a/jstests/regex3.js
+++ /dev/null
@@ -1,36 +0,0 @@
-
-t = db.regex3;
-t.drop();
-
-t.save( { name : "eliot" } );
-t.save( { name : "emily" } );
-t.save( { name : "bob" } );
-t.save( { name : "aaron" } );
-
-assert.eq( 2 , t.find( { name : /^e.*/ } ).itcount() , "no index count" );
-assert.eq( 4 , t.find( { name : /^e.*/ } ).explain().nscanned , "no index explain" );
-t.ensureIndex( { name : 1 } );
-assert.eq( 2 , t.find( { name : /^e.*/ } ).itcount() , "index count" );
-assert.eq( 2 , t.find( { name : /^e.*/ } ).explain().nscanned , "index explain" ); // SERVER-239
-
-t.drop();
-
-t.save( { name : "aa" } );
-t.save( { name : "ab" } );
-t.save( { name : "ac" } );
-t.save( { name : "c" } );
-
-assert.eq( 3 , t.find( { name : /^aa*/ } ).itcount() , "B ni" );
-t.ensureIndex( { name : 1 } );
-assert.eq( 3 , t.find( { name : /^aa*/ } ).itcount() , "B i 1" );
-assert.eq( 4 , t.find( { name : /^aa*/ } ).explain().nscanned , "B i 1 e" );
-
-assert.eq( 2 , t.find( { name : /^a[ab]/ } ).itcount() , "B i 2" );
-assert.eq( 2 , t.find( { name : /^a[bc]/ } ).itcount() , "B i 3" );
-
-t.drop();
-
-t.save( { name: "" } );
-assert.eq( 1, t.find( { name: /^a?/ } ).itcount() , "C 1" );
-t.ensureIndex( { name: 1 } );
-assert.eq( 1, t.find( { name: /^a?/ } ).itcount(), "C 2");
diff --git a/jstests/regex4.js b/jstests/regex4.js
deleted file mode 100644
index fc26d691c91..00000000000
--- a/jstests/regex4.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.regex4;
-t.drop();
-
-t.save( { name : "eliot" } );
-t.save( { name : "emily" } );
-t.save( { name : "bob" } );
-t.save( { name : "aaron" } );
-
-assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "no index count" );
-assert.eq( 4 , t.find( { name : /^e.*/ } ).explain().nscanned , "no index explain" );
-//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "no index count ne" ); // SERVER-251
-
-t.ensureIndex( { name : 1 } );
-
-assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "index count" );
-assert.eq( 2 , t.find( { name : /^e.*/ } ).explain().nscanned , "index explain" ); // SERVER-239
-//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "index count ne" ); // SERVER-251
diff --git a/jstests/regex5.js b/jstests/regex5.js
deleted file mode 100644
index 9f2549d7146..00000000000
--- a/jstests/regex5.js
+++ /dev/null
@@ -1,53 +0,0 @@
-
-t = db.regex5
-t.drop()
-
-// Add filler data to make sure that indexed solutions are
-// chosen over collection scans.
-for (var i = 0; i < 10; i++) {
- t.save({filler: "filler"});
-}
-
-t.save( { x : [ "abc" , "xyz1" ] } )
-t.save( { x : [ "ac" , "xyz2" ] } )
-
-a = /.*b.*c/
-x = /.*y.*/
-
-doit = function() {
-
- assert.eq( 1 , t.find( { x : a } ).count() , "A" );
- assert.eq( 2 , t.find( { x : x } ).count() , "B" );
- assert.eq( 2 , t.find( { x : { $in: [ x ] } } ).count() , "C" ); // SERVER-322
- assert.eq( 1 , t.find( { x : { $in: [ a, "xyz1" ] } } ).count() , "D" ); // SERVER-322
- assert.eq( 2 , t.find( { x : { $in: [ a, "xyz2" ] } } ).count() , "E" ); // SERVER-322
- assert.eq( 1 , t.find( { x : { $all : [ a , x ] } } ).count() , "F" ); // SERVER-505
- assert.eq( 1 , t.find( { x : { $all : [ a , "abc" ] } } ).count() , "G" ); // SERVER-505
- assert.eq( 0 , t.find( { x : { $all : [ a , "ac" ] } } ).count() , "H" ); // SERVER-505
- assert.eq( 10 , t.find( { x : { $nin: [ x ] } } ).count() , "I" ); // SERVER-322
- assert.eq( 11 , t.find( { x : { $nin: [ a, "xyz1" ] } } ).count() , "J" ); // SERVER-322
- assert.eq( 10 , t.find( { x : { $nin: [ a, "xyz2" ] } } ).count() , "K" ); // SERVER-322
- assert.eq( 2 , t.find( { x : { $not: { $nin: [ x ] } } } ).count() , "L" ); // SERVER-322
- assert.eq( 11 , t.find( { x : { $nin: [ /^a.c/ ] } } ).count() , "M" ) // SERVER-322
-}
-
-doit();
-t.ensureIndex( {x:1} );
-print( "now indexed" );
-doit();
-
-// check bound unions SERVER-322
-assert.eq( {
- x:[[1,1],
- [2.5,2.5],
- ["a","a"],
- ["b","e"],
- [/^b/,/^b/],
- [/^c/,/^c/],
- [/^d/,/^d/]]
- },
- t.find( { x : { $in: [ 1, 2.5, "a", "b", /^b/, /^c/, /^d/ ] } } ).explain().indexBounds );
-
-// SERVER-505
-assert.eq( 0, t.find( { x : { $all: [ "a", /^a/ ] } } ).itcount());
-assert.eq( 2, t.find( { x : { $all: [ /^a/ ] } } ).itcount());
diff --git a/jstests/regex6.js b/jstests/regex6.js
deleted file mode 100644
index 54143248398..00000000000
--- a/jstests/regex6.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// contributed by Andrew Kempe
-t = db.regex6;
-t.drop();
-
-t.save( { name : "eliot" } );
-t.save( { name : "emily" } );
-t.save( { name : "bob" } );
-t.save( { name : "aaron" } );
-t.save( { name : "[with]some?symbols" } );
-
-t.ensureIndex( { name : 1 } );
-
-assert.eq( 0 , t.find( { name : /^\// } ).count() , "index count" );
-assert.eq( 1 , t.find( { name : /^\// } ).explain().nscanned , "index explain 1" );
-assert.eq( 0 , t.find( { name : /^é/ } ).explain().nscanned , "index explain 2" );
-assert.eq( 0 , t.find( { name : /^\é/ } ).explain().nscanned , "index explain 3" );
-assert.eq( 1 , t.find( { name : /^\./ } ).explain().nscanned , "index explain 4" );
-assert.eq( 5 , t.find( { name : /^./ } ).explain().nscanned , "index explain 5" );
-
-// SERVER-2862
-assert.eq( 0 , t.find( { name : /^\Qblah\E/ } ).count() , "index explain 6" );
-assert.eq( 1 , t.find( { name : /^\Qblah\E/ } ).explain().nscanned , "index explain 6" );
-assert.eq( 1 , t.find( { name : /^blah/ } ).explain().nscanned , "index explain 6" );
-assert.eq( 1 , t.find( { name : /^\Q[\Ewi\Qth]some?s\Eym/ } ).count() , "index explain 6" );
-assert.eq( 2 , t.find( { name : /^\Q[\Ewi\Qth]some?s\Eym/ } ).explain().nscanned , "index explain 6" );
-assert.eq( 2 , t.find( { name : /^bob/ } ).explain().nscanned , "index explain 6" ); // proof nscanned == count+1
-
-assert.eq( 1, t.find( { name : { $regex : "^e", $gte: "emily" } } ).explain().nscanned , "ie7" );
-assert.eq( 1, t.find( { name : { $gt : "a", $regex: "^emily" } } ).explain().nscanned , "ie7" );
diff --git a/jstests/regex7.js b/jstests/regex7.js
deleted file mode 100644
index ab4f6089f9b..00000000000
--- a/jstests/regex7.js
+++ /dev/null
@@ -1,26 +0,0 @@
-t = db.regex_matches_self;
-t.drop();
-
-t.insert({r:/^a/});
-t.insert({r:/^a/i});
-t.insert({r:/^b/});
-
-// no index
-assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 a')
-assert.eq( 1, t.count({r:/^a/}), '1 2')
-assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 a')
-assert.eq( 1, t.count({r:/^a/i}), '2 2 a')
-assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 a')
-assert.eq( 1, t.count({r:/^b/}), '3 2 a')
-
-// with index
-t.ensureIndex({r:1})
-assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 b')
-assert.eq( 1, t.count({r:/^a/}), '1 2 b')
-assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 b')
-assert.eq( 1, t.count({r:/^a/i}), '2 2 b')
-assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 b')
-assert.eq( 1, t.count({r:/^b/}), '3 2 b')
-
-t.insert( {r:"a"} );
-assert.eq( 2, t.count({r:/^a/}), 'c' ); \ No newline at end of file
diff --git a/jstests/regex8.js b/jstests/regex8.js
deleted file mode 100644
index 33dd74fb812..00000000000
--- a/jstests/regex8.js
+++ /dev/null
@@ -1,19 +0,0 @@
-
-t = db.regex8;
-t.drop()
-
-t.insert( { _id : 1 , a : "abc" } )
-t.insert( { _ud : 2 , a : "abc" } )
-t.insert( { _id : 3 , a : "bdc" } )
-
-function test( msg ){
- assert.eq( 3 , t.find().itcount() , msg + "1" )
- assert.eq( 2 , t.find( { a : /a.*/ } ).itcount() , msg + "2" )
- assert.eq( 3 , t.find( { a : /[ab].*/ } ).itcount() , msg + "3" )
- assert.eq( 3 , t.find( { a : /[a|b].*/ } ).itcount() , msg + "4" )
-}
-
-test( "A" );
-
-t.ensureIndex( { a : 1 } )
-test( "B" )
diff --git a/jstests/regex9.js b/jstests/regex9.js
deleted file mode 100644
index 896855c6dfb..00000000000
--- a/jstests/regex9.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.regex9;
-t.drop();
-
-t.insert( { _id : 1 , a : [ "a" , "b" , "c" ] } )
-t.insert( { _id : 2 , a : [ "a" , "b" , "c" , "d" ] } )
-t.insert( { _id : 3 , a : [ "b" , "c" , "d" ] } )
-
-assert.eq( 2 , t.find( { a : /a/ } ).itcount() , "A1" )
-assert.eq( 2 , t.find( { a : { $regex : "a" } } ).itcount() , "A2" )
-assert.eq( 2 , t.find( { a : { $regex : /a/ } } ).itcount() , "A3" )
diff --git a/jstests/regex_embed1.js b/jstests/regex_embed1.js
deleted file mode 100644
index 61b1b9a14f6..00000000000
--- a/jstests/regex_embed1.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-t = db.regex_embed1
-
-t.drop()
-
-t.insert( { _id : 1 , a : [ { x : "abc" } , { x : "def" } ] } )
-t.insert( { _id : 2 , a : [ { x : "ab" } , { x : "de" } ] } )
-t.insert( { _id : 3 , a : [ { x : "ab" } , { x : "de" } , { x : "abc" } ] } )
-
-function test( m ){
- assert.eq( 3 , t.find().itcount() , m + "1" );
- assert.eq( 2 , t.find( { "a.x" : "abc" } ).itcount() , m + "2" );
- assert.eq( 2 , t.find( { "a.x" : /.*abc.*/ } ).itcount() , m + "3" );
-
- assert.eq( 1 , t.find( { "a.0.x" : "abc" } ).itcount() , m + "4" );
- assert.eq( 1 , t.find( { "a.0.x" : /abc/ } ).itcount() , m + "5" );
-}
-
-test( "A" );
-
-t.ensureIndex( { "a.x" : 1 } )
-test( "B" );
-
-
-
diff --git a/jstests/regex_limit.js b/jstests/regex_limit.js
deleted file mode 100644
index e05dae8ab8b..00000000000
--- a/jstests/regex_limit.js
+++ /dev/null
@@ -1,22 +0,0 @@
-var t = db.regex_limit;
-t.drop();
-
-var repeatStr = function(str, n){
- return new Array(n + 1).join(str);
-};
-
-t.insert({ z: repeatStr('c', 100000) });
-
-var maxOkStrLen = repeatStr('c', 32764);
-var strTooLong = maxOkStrLen + 'c';
-
-assert(t.findOne({ z: { $regex: maxOkStrLen }}) != null);
-assert.throws(function() {
- t.findOne({ z: { $regex: strTooLong }});
-});
-
-assert(t.findOne({ z: { $in: [ new RegExp(maxOkStrLen) ]}}) != null);
-assert.throws(function() {
- t.findOne({ z: { $in: [ new RegExp(strTooLong) ]}});
-});
-
diff --git a/jstests/regex_options.js b/jstests/regex_options.js
deleted file mode 100644
index 3febe2575ab..00000000000
--- a/jstests/regex_options.js
+++ /dev/null
@@ -1,7 +0,0 @@
-t = db.jstests_regex_options;
-
-t.drop();
-t.save( { a: "foo" } );
-assert.eq( 1, t.count( { a: { "$regex": /O/i } } ) );
-assert.eq( 1, t.count( { a: /O/i } ) );
-assert.eq( 1, t.count( { a: { "$regex": "O", "$options": "i" } } ) );
diff --git a/jstests/regex_util.js b/jstests/regex_util.js
deleted file mode 100644
index 86ba8036516..00000000000
--- a/jstests/regex_util.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Tests for RegExp.escape
-
-(function() {
- var TEST_STRINGS = [
- "[db]",
- "{ab}",
- "<c2>",
- "(abc)",
- "^first^",
- "&addr",
- "k@10gen.com",
- "#4",
- "!b",
- "<>3",
- "****word+",
- "\t| |\n\r",
- "Mongo-db",
- "[{(<>)}]!@#%^&*+\\"
- ];
-
- TEST_STRINGS.forEach(function (str) {
- var escaped = RegExp.escape(str);
- var regex = new RegExp(escaped);
- assert(regex.test(str), "Wrong escape for " + str);
- });
-})();
-
diff --git a/jstests/regexa.js b/jstests/regexa.js
deleted file mode 100644
index b0d47190e77..00000000000
--- a/jstests/regexa.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// Test simple regex optimization with a regex | (bar) present - SERVER-3298
-
-t = db.jstests_regexa;
-t.drop();
-
-function check() {
- assert.eq( 1, t.count( {a:/^(z|.)/} ) );
- assert.eq( 1, t.count( {a:/^z|./} ) );
- assert.eq( 0, t.count( {a:/^z(z|.)/} ) );
- assert.eq( 1, t.count( {a:/^zz|./} ) );
-}
-
-t.save( {a:'a'} );
-
-check();
-t.ensureIndex( {a:1} );
-if ( 1 ) { // SERVER-3298
-check();
-}
diff --git a/jstests/regexb.js b/jstests/regexb.js
deleted file mode 100644
index 169841239c8..00000000000
--- a/jstests/regexb.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// Test more than four regex expressions in a query -- SERVER-969
-
-t = db.jstests_regexb;
-t.drop();
-
-t.save( {a:'a',b:'b',c:'c',d:'d',e:'e'} );
-
-assert.eq( 1, t.count( {a:/a/,b:/b/,c:/c/,d:/d/,e:/e/} ) );
-assert.eq( 0, t.count( {a:/a/,b:/b/,c:/c/,d:/d/,e:/barf/} ) );
-
-
-
-
-
diff --git a/jstests/regexc.js b/jstests/regexc.js
deleted file mode 100644
index f7690c96496..00000000000
--- a/jstests/regexc.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Multiple regular expressions using the same index
-
-var t = db.jstests_regexc;
-
-// $and using same index twice
-t.drop();
-t.ensureIndex({a: 1});
-t.save({a: "0"});
-t.save({a: "1"});
-t.save({a: "10"});
-assert.eq( 1, t.find({$and: [{a: /0/}, {a: /1/}]}).itcount() );
-
-// implicit $and using compound index twice
-t.drop();
-t.ensureIndex({a: 1, b: 1});
-t.save({a: "0", b: "1"});
-t.save({a: "10", b: "10"});
-t.save({a: "10", b: "2"});
-assert.eq( 2, t.find({a: /0/, b: /1/}).itcount() );
-
-// $or using same index twice
-t.drop();
-t.ensureIndex({a: 1});
-t.save({a: "0"});
-t.save({a: "1"});
-t.save({a: "2"});
-t.save({a: "10"});
-assert.eq( 3, t.find({$or: [{a: /0/}, {a: /1/}]}).itcount() );
diff --git a/jstests/remove.js b/jstests/remove.js
deleted file mode 100644
index 6800a41fedc..00000000000
--- a/jstests/remove.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// remove.js
-// unit test for db remove
-
-t = db.removetest;
-
-function f(n,dir) {
- t.ensureIndex({x:dir||1});
- for( i = 0; i < n; i++ ) t.save( { x:3, z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
-
- assert.eq( n , t.find().count() );
- t.remove({x:3});
-
- assert.eq( 0 , t.find().count() );
-
- assert( t.findOne() == null , "A:" + tojson( t.findOne() ) );
- assert( t.validate().valid , "B" );
-}
-
-t.drop();
-f(300, 1);
-
-f(500, -1);
-
-assert(t.validate().valid , "C" );
-
-// no query for remove() throws starting in 2.6
-assert.throws(function() { db.t.remove() });
diff --git a/jstests/remove2.js b/jstests/remove2.js
deleted file mode 100644
index 2b222d7ecac..00000000000
--- a/jstests/remove2.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// remove2.js
-// a unit test for db remove
-
-t = db.removetest2;
-
-function f() {
- t.save( { x:[3,3,3,3,3,3,3,3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
- t.save( { x: 9 } );
- t.save( { x: 1 } );
-
- t.remove({x:3});
-
- assert( t.findOne({x:3}) == null );
- assert( t.validate().valid );
-}
-
-x = 0;
-
-function g() {
- t.save( { x:[3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
- t.save( { x:[7,8,9], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
-
- t.remove( {x : {$gte:3}, $atomic:x++ } );
-
- assert( !db.getLastError() );
- // $atomic within $and is not allowed.
- //t.remove( {x : {$gte:3}, $and:[{$atomic:true}] } );
- //assert( db.getLastError() );
-
- assert( t.findOne({x:3}) == null );
- assert( t.findOne({x:8}) == null );
- assert( t.validate().valid );
-}
-
-t.drop();
-f();
-t.drop();
-g();
-
-t.ensureIndex({x:1});
-t.remove({});
-f();
-t.drop();
-t.ensureIndex({x:1});
-g();
-
diff --git a/jstests/remove3.js b/jstests/remove3.js
deleted file mode 100644
index 2a51a6e0fd4..00000000000
--- a/jstests/remove3.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.remove3;
-t.drop();
-
-for ( i=1; i<=8; i++){
- t.save( { _id : i , x : i } );
-}
-
-assert.eq( 8 , t.count() , "A" );
-
-t.remove( { x : { $lt : 5 } } );
-assert.eq( 4 , t.count() , "B" );
-
-t.remove( { _id : 5 } );
-assert.eq( 3 , t.count() , "C" );
-
-t.remove( { _id : { $lt : 8 } } );
-assert.eq( 1 , t.count() , "D" );
diff --git a/jstests/remove4.js b/jstests/remove4.js
deleted file mode 100644
index bd007ed4d27..00000000000
--- a/jstests/remove4.js
+++ /dev/null
@@ -1,10 +0,0 @@
-t = db.remove4;
-t.drop();
-
-t.save ( { a : 1 , b : 1 } );
-t.save ( { a : 2 , b : 1 } );
-t.save ( { a : 3 , b : 1 } );
-
-assert.eq( 3 , t.find().length() );
-t.remove( { b : 1 } );
-assert.eq( 0 , t.find().length() );
diff --git a/jstests/remove5.js b/jstests/remove5.js
deleted file mode 100644
index be4f0b49ec1..00000000000
--- a/jstests/remove5.js
+++ /dev/null
@@ -1,24 +0,0 @@
-f = db.jstests_remove5;
-f.drop();
-
-getLastError = function() {
- return db.runCommand( { getlasterror : 1 } );
-}
-
-f.remove( {} );
-assert.eq( 0, getLastError().n );
-f.save( {a:1} );
-f.remove( {} );
-assert.eq( 1, getLastError().n );
-for( i = 0; i < 10; ++i ) {
- f.save( {i:i} );
-}
-f.remove( {} );
-assert.eq( 10, getLastError().n );
-assert.eq( 10, db.getPrevError().n );
-assert.eq( 1, db.getPrevError().nPrev );
-
-f.findOne();
-assert.eq( 0, getLastError().n );
-assert.eq( 10, db.getPrevError().n );
-assert.eq( 2, db.getPrevError().nPrev );
diff --git a/jstests/remove6.js b/jstests/remove6.js
deleted file mode 100644
index d843aeeec0f..00000000000
--- a/jstests/remove6.js
+++ /dev/null
@@ -1,38 +0,0 @@
-
-t = db.remove6;
-t.drop();
-
-N = 1000;
-
-function pop(){
- t.drop();
- for ( var i=0; i<N; i++ ){
- t.save( { x : 1 , tags : [ "a" , "b" , "c" ] } );
- }
-}
-
-function del(){
- t.remove( { tags : { $in : [ "a" , "c" ] } } );
-}
-
-function test( n , idx ){
- pop();
- assert.eq( N , t.count() , n + " A " + idx );
- if ( idx )
- t.ensureIndex( idx );
- del();
- var e = db.getLastError();
- assert( e == null , "error deleting: " + e );
- assert.eq( 0 , t.count() , n + " B " + idx );
-}
-
-test( "a" );
-test( "b" , { x : 1 } );
-test( "c" , { tags : 1 } );
-
-N = 5000
-
-test( "a2" );
-test( "b2" , { x : 1 } );
-test( "c2" , { tags : 1 } );
-
diff --git a/jstests/remove7.js b/jstests/remove7.js
deleted file mode 100644
index 50c6ac189bc..00000000000
--- a/jstests/remove7.js
+++ /dev/null
@@ -1,35 +0,0 @@
-
-t = db.remove7
-t.drop();
-
-
-
-function getTags( n ){
- n = n || 5;
- var a = [];
- for ( var i=0; i<n; i++ ){
- var v = Math.ceil( 20 * Math.random() );
- a.push( v );
- }
-
- return a;
-}
-
-for ( i=0; i<1000; i++ ){
- t.save( { tags : getTags() } );
-}
-
-t.ensureIndex( { tags : 1 } );
-
-for ( i=0; i<200; i++ ){
- for ( var j=0; j<10; j++ )
- t.save( { tags : getTags( 100 ) } );
- var q = { tags : { $in : getTags( 10 ) } };
- var before = t.find( q ).count();
- t.remove( q );
- var o = db.getLastErrorObj();
- var after = t.find( q ).count();
- assert.eq( 0 , after , "not zero after!" );
- assert.isnull( o.err , "error: " + tojson( o ) );
-}
-
diff --git a/jstests/remove8.js b/jstests/remove8.js
deleted file mode 100644
index 3ab53f3289a..00000000000
--- a/jstests/remove8.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-t = db.remove8;
-t.drop();
-
-N = 1000;
-
-function fill(){
- for ( var i=0; i<N; i++ ){
- t.save( { x : i } );
- }
-}
-
-fill();
-assert.eq( N , t.count() , "A" );
-t.remove( {} )
-assert.eq( 0 , t.count() , "B" );
-
-fill();
-assert.eq( N , t.count() , "C" );
-db.eval( function(){ db.remove8.remove( {} ); } )
-assert.eq( 0 , t.count() , "D" );
diff --git a/jstests/remove9.js b/jstests/remove9.js
deleted file mode 100644
index 655594afe8b..00000000000
--- a/jstests/remove9.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// SERVER-2009 Count odd numbered entries while updating and deleting even numbered entries.
-
-t = db.jstests_remove9;
-t.drop();
-t.ensureIndex( {i:1} );
-for( i = 0; i < 1000; ++i ) {
- t.save( {i:i} );
-}
-
-s = startParallelShell( 't = db.jstests_remove9; for( j = 0; j < 5000; ++j ) { i = Random.randInt( 499 ) * 2; t.update( {i:i}, {$set:{i:2000}} ); t.remove( {i:2000} ); t.save( {i:i} ); }' );
-
-for( i = 0; i < 1000; ++i ) {
- assert.eq( 500, t.find( {i:{$gte:0,$mod:[2,1]}} ).hint( {i:1} ).itcount() );
-}
-
-s();
diff --git a/jstests/remove_justone.js b/jstests/remove_justone.js
deleted file mode 100644
index e412a13483c..00000000000
--- a/jstests/remove_justone.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.remove_justone
-t.drop()
-
-t.insert( { x : 1 } )
-t.insert( { x : 1 } )
-t.insert( { x : 1 } )
-t.insert( { x : 1 } )
-
-assert.eq( 4 , t.count() )
-
-t.remove( { x : 1 } , true )
-assert.eq( 3 , t.count() )
-
-t.remove( { x : 1 } )
-assert.eq( 0 , t.count() )
diff --git a/jstests/remove_undefined.js b/jstests/remove_undefined.js
deleted file mode 100644
index d5344a3a562..00000000000
--- a/jstests/remove_undefined.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.drop_undefined.js
-
-t.insert( { _id : 1 } )
-t.insert( { _id : 2 } )
-t.insert( { _id : null } )
-
-z = { foo : 1 , x : null }
-
-t.remove( { x : z.bar } )
-assert.eq( 3 , t.count() , "A1" )
-
-t.remove( { x : undefined } )
-assert.eq( 3 , t.count() , "A2" )
-
-assert.throws( function(){ t.remove( { _id : z.bar } ) } , null , "B1" )
-assert.throws( function(){ t.remove( { _id : undefined } ) } , null , "B2" )
-
-
-t.remove( { _id : z.x } )
-assert.eq( 2 , t.count() , "C1" )
-
-t.insert( { _id : null } )
-assert.eq( 3 , t.count() , "C2" )
-
-assert.throws( function(){ t.remove( { _id : undefined } ) } , null, "C3" )
-assert.eq( 3 , t.count() , "C4" )
-
diff --git a/jstests/removea.js b/jstests/removea.js
deleted file mode 100644
index 703d8c4cf92..00000000000
--- a/jstests/removea.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Test removal of a substantial proportion of inserted documents. SERVER-3803
-// A complete test will only be performed against a DEBUG build.
-
-t = db.jstests_removea;
-
-Random.setRandomSeed();
-
-for( v = 0; v < 2; ++v ) { // Try each index version.
- t.drop();
- t.ensureIndex( { a:1 }, { v:v } );
- for( i = 0; i < 10000; ++i ) {
- t.save( { a:i } );
- }
-
- toDrop = [];
- for( i = 0; i < 10000; ++i ) {
- toDrop.push( Random.randInt( 10000 ) ); // Dups in the query will be ignored.
- }
- // Remove many of the documents; $atomic prevents use of a ClientCursor, which would invoke a
- // different bucket deallocation procedure than the one to be tested (see SERVER-4575).
- t.remove( { a:{ $in:toDrop }, $atomic:true } );
- assert( !db.getLastError() );
-}
diff --git a/jstests/removeb.js b/jstests/removeb.js
deleted file mode 100644
index b6634140081..00000000000
--- a/jstests/removeb.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// Test removal of Records that have been reused since the remove operation began. SERVER-5198
-
-t = db.jstests_removeb;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-
-// Make the index multikey to trigger cursor dedup checking.
-t.insert( { a:[ -1, -2 ] } );
-t.remove({});
-
-// Insert some data.
-for( i = 0; i < 20000; ++i ) {
- t.insert( { a:i } );
-}
-db.getLastError();
-
-p = startParallelShell(
- // Wait until the remove operation (below) begins running.
- 'while( db.jstests_removeb.count() == 20000 );' +
- // Insert documents with increasing 'a' values. These inserted documents may
- // reuse Records freed by the remove operation in progress and will be
- // visited by the remove operation if it has not completed.
- 'for( i = 20000; i < 40000; ++i ) {' +
- ' db.jstests_removeb.insert( { a:i } );' +
- ' db.getLastError();' +
- ' if (i % 1000 == 0) {' +
- ' print( i-20000 + \" of 20000 documents inserted\" );' +
- ' }' +
- '}'
- );
-
-// Remove using the a:1 index in ascending direction.
-t.remove( { a:{ $gte:0 } } );
-assert( !db.getLastError(), 'The remove operation failed.' );
-
-p();
-
-t.drop();
diff --git a/jstests/removec.js b/jstests/removec.js
deleted file mode 100644
index 539647c502e..00000000000
--- a/jstests/removec.js
+++ /dev/null
@@ -1,40 +0,0 @@
-// Sanity test for removing documents with adjacent index keys. SERVER-2008
-
-t = db.jstests_removec;
-t.drop();
-t.ensureIndex( { a:1 } );
-
-/** @return an array containing a sequence of numbers from i to i + 10. */
-function runStartingWith( i ) {
- ret = [];
- for( j = 0; j < 11; ++j ) {
- ret.push( i + j );
- }
- return ret;
-}
-
-// Insert some documents with adjacent index keys.
-for( i = 0; i < 1100; i += 11 ) {
- t.save( { a:runStartingWith( i ) } );
-}
-db.getLastError();
-
-// Remove and then reinsert random documents in the background.
-s = startParallelShell(
- 't = db.jstests_removec;' +
- 'for( j = 0; j < 1000; ++j ) {' +
- ' o = t.findOne( { a:Random.randInt( 1100 ) } );' +
- ' t.remove( { _id:o._id } );' +
- ' t.insert( o );' +
- '}'
- );
-
-// Find operations are error free. Note that the cursor throws if it detects the $err
-// field in the returned document.
-for( i = 0; i < 200; ++i ) {
- t.find( { a:{ $gte:0 } } ).hint( { a:1 } ).itcount();
-}
-
-s();
-
-t.drop();
diff --git a/jstests/rename.js b/jstests/rename.js
deleted file mode 100644
index 51b74047288..00000000000
--- a/jstests/rename.js
+++ /dev/null
@@ -1,56 +0,0 @@
-admin = db.getMongo().getDB( "admin" );
-
-a = db.jstests_rename_a;
-b = db.jstests_rename_b;
-c = db.jstests_rename_c;
-
-a.drop();
-b.drop();
-c.drop();
-
-a.save( {a: 1} );
-a.save( {a: 2} );
-a.ensureIndex( {a:1} );
-a.ensureIndex( {b:1} );
-
-c.save( {a: 100} );
-assert.commandFailed( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_c"} ) );
-
-assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
-assert.eq( 0, a.find().count() );
-
-assert.eq( 2, b.find().count() );
-assert( db.system.namespaces.findOne( {name:"test.jstests_rename_b" } ) );
-assert( !db.system.namespaces.findOne( {name:"test.jstests_rename_a" } ) );
-assert.eq( 3, db.system.indexes.find( {ns:"test.jstests_rename_b"} ).count() );
-assert.eq( 0, db.system.indexes.find( {ns:"test.jstests_rename_a"} ).count() );
-assert( b.find( {a:1} ).explain().cursor.match( /^BtreeCursor/ ) );
-
-// now try renaming a capped collection
-
-a.drop();
-b.drop();
-c.drop();
-
-// TODO: too many numbers hard coded here
-// this test depends precisely on record size and hence may not be very reliable
-// note we use floats to make sure numbers are represented as doubles for both SM and v8, since test relies on record size
-db.createCollection( "jstests_rename_a", {capped:true,size:10000} );
-for( i = 0.1; i < 10; ++i ) {
- a.save( { i: i } );
-}
-assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
-assert.eq( 1, b.count( {i:9.1} ) );
-for( i = 10.1; i < 250; ++i ) {
- b.save( { i: i } );
-}
-
-//res = b.find().sort({i:1});
-//while (res.hasNext()) printjson(res.next());
-
-assert.eq( 0, b.count( {i:9.1} ) );
-assert.eq( 1, b.count( {i:19.1} ) );
-
-assert( db.system.namespaces.findOne( {name:"test.jstests_rename_b" } ) );
-assert( !db.system.namespaces.findOne( {name:"test.jstests_rename_a" } ) );
-assert.eq( true, db.system.namespaces.findOne( {name:"test.jstests_rename_b"} ).options.capped );
diff --git a/jstests/rename2.js b/jstests/rename2.js
deleted file mode 100644
index a06268f1bfb..00000000000
--- a/jstests/rename2.js
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-a = db.rename2a;
-b = db.rename2b;
-
-a.drop();
-b.drop();
-
-a.save( { x : 1 } )
-a.save( { x : 2 } )
-a.save( { x : 3 } )
-
-assert.eq( 3 , a.count() , "A" )
-assert.eq( 0 , b.count() , "B" )
-
-assert( a.renameCollection( "rename2b" ) , "the command" );
-
-assert.eq( 0 , a.count() , "C" )
-assert.eq( 3 , b.count() , "D" )
diff --git a/jstests/rename3.js b/jstests/rename3.js
deleted file mode 100644
index 5e1005f8176..00000000000
--- a/jstests/rename3.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-a = db.rename3a
-b = db.rename3b
-
-a.drop();
-b.drop()
-
-a.save( { x : 1 } );
-b.save( { x : 2 } );
-
-assert.eq( 1 , a.findOne().x , "before 1a" );
-assert.eq( 2 , b.findOne().x , "before 2a" );
-
-res = b.renameCollection( a._shortName );
-assert.eq( 0 , res.ok , "should fail: " + tojson( res ) );
-
-assert.eq( 1 , a.findOne().x , "before 1b" );
-assert.eq( 2 , b.findOne().x , "before 2b" );
-
-res = b.renameCollection( a._shortName , true )
-assert.eq( 1 , res.ok , "should succeed:" + tojson( res ) );
-
-assert.eq( 2 , a.findOne().x , "after 1" );
-assert.isnull( b.findOne() , "after 2" );
diff --git a/jstests/rename4.js b/jstests/rename4.js
deleted file mode 100644
index 508b8b9321b..00000000000
--- a/jstests/rename4.js
+++ /dev/null
@@ -1,145 +0,0 @@
-t = db.jstests_rename4;
-t.drop();
-
-function bad( f ) {
- //Ensure no error to start with
- var lstError = db.getLastError();
- if (lstError)
- assert( false, "Unexpected error : " + lstError );
-
- var docsBeforeUpdate = t.find().toArray();
- eval( f );
-
- //Ensure error
- var lstError = db.getLastErrorObj();
- if (!lstError.err) {
- print("Error:" + tojson(lstError));
- print("Existing docs (before)")
- printjson(docsBeforeUpdate);
- print("Existing docs (after)")
- printjson(t.find().toArray());
- assert( false, "Expected error but didn't get one for: " + f );
- }
-
- db.resetError();
-}
-
-bad( "t.update( {}, {$rename:{'a':'a'}} )" );
-bad( "t.update( {}, {$rename:{'':'a'}} )" );
-bad( "t.update( {}, {$rename:{'a':''}} )" );
-bad( "t.update( {}, {$rename:{'.a':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'.b'}} )" );
-bad( "t.update( {}, {$rename:{'a.':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b.'}} )" );
-bad( "t.update( {}, {$rename:{'a.b':'a'}} )" );
-bad( "t.update( {}, {$rename:{'a.$':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b.$'}} )" );
-
-// Only bad if input doc has field resulting in conflict
-t.save( {_id:1, a:2} );
-bad( "t.update( {}, {$rename:{'_id':'a'}} )" );
-bad( "t.update( {}, {$set:{b:1},$rename:{'a':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b'},$set:{b:1}} )" );
-bad( "t.update( {}, {$rename:{'a':'b'},$set:{a:1}} )" );
-bad( "t.update( {}, {$set:{'b.c':1},$rename:{'a':'b'}} )" );
-bad( "t.update( {}, {$set:{b:1},$rename:{'a':'b.c'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b'},$set:{'b.c':1}} )" );
-bad( "t.update( {}, {$rename:{'a':'b.c'},$set:{b:1}} )" );
-
-
-t.remove({});
-t.save( {a:[1],b:{c:[2]},d:[{e:3}],f:4} );
-bad( "t.update( {}, {$rename:{'a.0':'f'}} )" );
-bad( "t.update( {}, {$rename:{'a.0':'g'}} )" );
-bad( "t.update( {}, {$rename:{'f':'a.0'}} )" );
-bad( "t.update( {}, {$rename:{'b.c.0':'f'}} )" );
-bad( "t.update( {}, {$rename:{'f':'b.c.0'}} )" );
-bad( "t.update( {}, {$rename:{'d.e':'d.f'}} )" );
-bad( "t.update( {}, {$rename:{'d.e':'f'}} )" );
-bad( "t.update( {}, {$rename:{'d.f':'d.e'}} )" );
-bad( "t.update( {}, {$rename:{'f':'d.e'}} )" );
-bad( "t.update( {}, {$rename:{'d.0.e':'d.f'}} )" );
-bad( "t.update( {}, {$rename:{'d.0.e':'f'}} )" );
-bad( "t.update( {}, {$rename:{'d.f':'d.0.e'}} )" );
-bad( "t.update( {}, {$rename:{'f':'d.0.e'}} )" );
-bad( "t.update( {}, {$rename:{'f.g':'a'}} )" );
-bad( "t.update( {}, {$rename:{'a':'f.g'}} )" );
-
-function good( start, mod, expected ) {
- t.remove({});
- t.save( start );
- t.update( {}, mod );
- assert( !db.getLastError() );
- var got = t.findOne();
- delete got._id;
- assert.docEq( expected, got );
-}
-
-good( {a:1}, {$rename:{a:'b'}}, {b:1} );
-good( {a:1}, {$rename:{a:'bb'}}, {bb:1} );
-good( {b:1}, {$rename:{b:'a'}}, {a:1} );
-good( {bb:1}, {$rename:{bb:'a'}}, {a:1} );
-good( {a:{y:1}}, {$rename:{'a.y':'a.z'}}, {a:{z:1}} );
-good( {a:{yy:1}}, {$rename:{'a.yy':'a.z'}}, {a:{z:1}} );
-good( {a:{z:1}}, {$rename:{'a.z':'a.y'}}, {a:{y:1}} );
-good( {a:{zz:1}}, {$rename:{'a.zz':'a.y'}}, {a:{y:1}} );
-good( {a:{c:1}}, {$rename:{a:'b'}}, {b:{c:1}} );
-good( {aa:{c:1}}, {$rename:{aa:'b'}}, {b:{c:1}} );
-good( {a:1,b:2}, {$rename:{a:'b'}}, {b:1} );
-good( {aa:1,b:2}, {$rename:{aa:'b'}}, {b:1} );
-good( {a:1,bb:2}, {$rename:{a:'bb'}}, {bb:1} );
-good( {a:1}, {$rename:{a:'b.c'}}, {b:{c:1}} );
-good( {aa:1}, {$rename:{aa:'b.c'}}, {b:{c:1}} );
-good( {a:1,b:{}}, {$rename:{a:'b.c'}}, {b:{c:1}} );
-good( {aa:1,b:{}}, {$rename:{aa:'b.c'}}, {b:{c:1}} );
-good( {a:1}, {$rename:{b:'c'}}, {a:1} );
-good( {aa:1}, {$rename:{b:'c'}}, {aa:1} );
-good( {}, {$rename:{b:'c'}}, {} );
-good( {a:{b:1,c:2}}, {$rename:{'a.b':'d'}}, {a:{c:2},d:1} );
-good( {a:{bb:1,c:2}}, {$rename:{'a.bb':'d'}}, {a:{c:2},d:1} );
-good( {a:{b:1}}, {$rename:{'a.b':'d'}}, {a:{},d:1} );
-good( {a:[5]}, {$rename:{a:'b'}}, {b:[5]} );
-good( {aa:[5]}, {$rename:{aa:'b'}}, {b:[5]} );
-good( {'0':1}, {$rename:{'0':'5'}}, {'5':1} );
-good( {a:1,b:2}, {$rename:{a:'c'},$set:{b:5}}, {b:5,c:1} );
-good( {aa:1,b:2}, {$rename:{aa:'c'},$set:{b:5}}, {b:5,c:1} );
-good( {a:1,b:2}, {$rename:{z:'c'},$set:{b:5}}, {a:1,b:5} );
-good( {aa:1,b:2}, {$rename:{z:'c'},$set:{b:5}}, {aa:1,b:5} );
-
-// (formerly) rewriting single field
-good( {a:{z:1,b:1}}, {$rename:{'a.b':'a.c'}}, {a:{c:1,z:1}} );
-good( {a:{z:1,tomato:1}}, {$rename:{'a.tomato':'a.potato'}}, {a:{potato:1,z:1}} );
-good( {a:{z:1,b:1,c:1}}, {$rename:{'a.b':'a.c'}}, {a:{c:1,z:1}} );
-good( {a:{z:1,tomato:1,potato:1}}, {$rename:{'a.tomato':'a.potato'}}, {a:{potato:1,z:1}} );
-good( {a:{z:1,b:1}}, {$rename:{'a.b':'a.cc'}}, {a:{cc:1,z:1}} );
-good( {a:{z:1,b:1,c:1}}, {$rename:{'a.b':'aa.c'}}, {a:{c:1,z:1},aa:{c:1}} );
-
-// invalid target, but missing source
-good( {a:1,c:4}, {$rename:{b:'c.d'}}, {a:1,c:4} );
-
-// TODO: This should be supported, and it is with the new update framework, but not with the
-// old, and we currently don't have a good way to check which mode we are in. When we do have
-// that, add this test guarded under that condition. Or, when we remove the old update path
-// just enable this test.
-
-// valid to rename away from an invalid name
-// good( {x:1}, {$rename:{'$a.b':'a.b'}}, {x:1} );
-
-// check index
-t.drop();
-t.ensureIndex( {a:1} );
-
-function l( start, mod, query, expected ) {
- t.remove({});
- t.save( start );
- t.update( {}, mod );
- assert( !db.getLastError() );
- var got = t.find( query ).hint( {a:1} ).next();
- delete got._id;
- assert.docEq( expected, got );
-}
-
-l( {a:1}, {$rename:{a:'b'}}, {a:null}, {b:1} );
-l( {a:1}, {$rename:{a:'bb'}}, {a:null}, {bb:1} );
-l( {b:1}, {$rename:{b:'a'}}, {a:1}, {a:1} );
-l( {bb:1}, {$rename:{bb:'a'}}, {a:1}, {a:1} );
diff --git a/jstests/rename5.js b/jstests/rename5.js
deleted file mode 100644
index 927c767b981..00000000000
--- a/jstests/rename5.js
+++ /dev/null
@@ -1,46 +0,0 @@
-// Check some $rename cases with a missing source. SERVER-4845
-
-t = db.jstests_rename5;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-t.save( { b:1 } );
-
-t.update( {}, { $rename:{ a:'b' } } );
-assert.eq( 1, t.findOne().b );
-
-// Test with another modifier.
-t.update( {}, { $rename:{ a:'b' }, $set:{ x:1 } } );
-assert.eq( 1, t.findOne().b );
-assert.eq( 1, t.findOne().x );
-
-// Test with an in place modifier.
-t.update( {}, { $rename:{ a:'b' }, $inc:{ x:1 } } );
-assert.eq( 1, t.findOne().b );
-assert.eq( 2, t.findOne().x );
-
-// Check similar cases with upserts.
-t.drop();
-
-t.remove({});
-t.update( { b:1 }, { $rename:{ a:'b' } }, true );
-assert.eq( 1, t.findOne().b );
-
-t.remove({});
-t.update( { b:1 }, { $rename:{ a:'b' }, $set:{ c:1 } }, true );
-assert.eq( 1, t.findOne().b );
-assert.eq( 1, t.findOne().c );
-
-t.remove({});
-t.update( { b:1, c:2 }, { $rename:{ a:'b' }, $inc:{ c:1 } }, true );
-assert.eq( 1, t.findOne().b );
-assert.eq( 3, t.findOne().c );
-
-// Check a similar case with multiple renames of an unindexed document.
-t.drop();
-
-t.save( { b:1, x:1 } );
-t.update( {}, { $rename: { a:'b', x:'y' } } );
-assert.eq( 1, t.findOne().b );
-assert.eq( 1, t.findOne().y );
-assert( !t.findOne().x );
diff --git a/jstests/rename6.js b/jstests/rename6.js
deleted file mode 100644
index 17cbf4b80b1..00000000000
--- a/jstests/rename6.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Test for SERVER-7017
-// We shouldn't rename a collection when one of its indexes will generate a namespace
-// that is greater than 120 chars. To do this we create a long index name and try
-// and rename the collection to one with a much longer name. We use the test database
-// by default and we add this here to ensure we are using it
-testDB = db.getSiblingDB("test")
-c = "rename2c";
-dbc = testDB.getCollection(c);
-d = "dest4567890123456789012345678901234567890123456789012345678901234567890"
-dbd = testDB.getCollection(d);
-dbc.ensureIndex({ "name" : 1,
- "date" : 1,
- "time" : 1,
- "renameCollection" : 1,
- "mongodb" : 1,
- "testing" : 1,
- "data" : 1});
-//Checking for the newly created index and the _id index in original collection
-assert.eq(2, testDB.system.indexes.find( { "ns" : "test." + c } ).count(), "Long Rename Init");
-//Should fail to rename collection as the index namespace is too long
-assert.commandFailed( dbc.renameCollection( dbd ) , "Long Rename Exec" );
-//Since we failed we should have the 2 indexes unmoved and no indexes under the new collection name
-assert.eq(2, testDB.system.indexes.find( { "ns" : "test." + c } ).count(), "Long Rename Result 1");
-assert.eq(0, testDB.system.indexes.find( { "ns" : "test." + d } ).count(), "Long Rename Result 2");
diff --git a/jstests/rename7.js b/jstests/rename7.js
deleted file mode 100644
index 33899957755..00000000000
--- a/jstests/rename7.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// ***************************************************************
-// rename7.js
-// Test renameCollection functionality across different databases.
-// ***************************************************************
-
-// Set up namespaces a and b.
-admin = db.getMongo().getDB( "admin" );
-db_a = db.getMongo().getDB( "db_a" );
-db_b = db.getMongo().getDB( "db_b" );
-a = db_a.rename7;
-b = db_b.rename7;
-
-a.drop();
-b.drop();
-
-// Put some documents and indexes in a.
-a.save( {a: 1} );
-a.save( {a: 2} );
-a.save( {a: 3} );
-a.ensureIndex( {a: 1} );
-a.ensureIndex( {b: 1} );
-
-assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7", to: "db_b.rename7"} ) );
-
-assert.eq( 0, a.find().count() );
-assert( !db_a.system.namespaces.findOne( {name: "db_a.rename7"} ) );
-
-assert.eq( 3, b.find().count() );
-assert( db_b.system.namespaces.findOne( {name: "db_b.rename7"} ) );
-assert( b.find( {a: 1} ).explain().cursor.match( /^BtreeCursor/ ) );
-
-a.drop();
-b.drop();
-
-// Capped collection testing.
-db_a.createCollection( "rename7_capped", {capped:true, size:10000} );
-a = db_a.rename7_capped;
-b = db_b.rename7_capped;
-
-a.save( {a: 1} );
-a.save( {a: 2} );
-a.save( {a: 3} );
-
-assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7_capped",
- to: "db_b.rename7_capped"} ) );
-
-assert.eq( 0, a.find().count() );
-assert( !db_a.system.namespaces.findOne( {name: "db_a.rename7_capped"} ) );
-
-assert.eq( 3, b.find().count() );
-assert( db_b.system.namespaces.findOne( {name: "db_b.rename7_capped"} ) );
-assert.eq( true, db_b.system.namespaces.findOne( {name:"db_b.rename7_capped"} ).options.capped );
-assert.eq( 12288, b.stats().storageSize );
-
-a.drop();
-b.drop();
diff --git a/jstests/rename8.js b/jstests/rename8.js
deleted file mode 100644
index 8b955824ea8..00000000000
--- a/jstests/rename8.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// SERVER-12591: prevent renaming to arbitrary system collections.
-
-var testdb = db.getSiblingDB("rename8"); // to avoid breaking other tests when we touch system.users
-var coll = testdb.rename8;
-var systemNamespaces = testdb.system.namespaces;
-var systemFoo = testdb.system.foo;
-var systemUsers = testdb.system.users;
-
-systemFoo.drop();
-systemUsers.drop();
-coll.drop();
-coll.insert({});
-
-// system.foo isn't in the whitelist so it can't be renamed to or from
-assert.commandFailed(coll.renameCollection(systemFoo.getName()));
-assert.commandFailed(systemFoo.renameCollection(coll.getName()));
-
-// same with system.namespaces, even though it does exist
-assert.commandFailed(coll.renameCollection(systemNamespaces.getName()));
-assert.commandFailed(coll.renameCollection(systemNamespaces.getName(), /*dropTarget*/true));
-assert.commandFailed(systemNamespaces.renameCollection(coll.getName()));
-
-// system.users is whitelisted so these should work
-assert.commandWorked(coll.renameCollection(systemUsers.getName()));
-assert.commandWorked(systemUsers.renameCollection(coll.getName()));
diff --git a/jstests/rename_stayTemp.js b/jstests/rename_stayTemp.js
deleted file mode 100644
index afd77d1289c..00000000000
--- a/jstests/rename_stayTemp.js
+++ /dev/null
@@ -1,24 +0,0 @@
-orig = 'rename_stayTemp_orig'
-dest = 'rename_stayTemp_dest'
-
-db[orig].drop()
-db[dest].drop()
-
-function ns(coll){ return db[coll].getFullName() }
-
-db.runCommand({create: orig, temp:1})
-assert.eq(db.system.namespaces.findOne({name:ns(orig)}).options.temp, 1)
-
-db.adminCommand({renameCollection: ns(orig), to: ns(dest)});
-var options = db.system.namespaces.findOne({name:ns(dest)}).options || {};
-assert.eq(options.temp, undefined);
-
-db[dest].drop();
-
-db.runCommand({create: orig, temp:1})
-assert.eq(db.system.namespaces.findOne({name:ns(orig)}).options.temp, 1)
-
-db.adminCommand({renameCollection: ns(orig), to: ns(dest), stayTemp: true});
-assert.eq(db.system.namespaces.findOne({name:ns(dest)}).options.temp, 1)
-
-
diff --git a/jstests/repair.js b/jstests/repair.js
deleted file mode 100644
index 5026ec3bcbb..00000000000
--- a/jstests/repair.js
+++ /dev/null
@@ -1,28 +0,0 @@
-mydb = db.getSisterDB( "repair_test1" )
-
-t = mydb.jstests_repair;
-t.drop();
-
-t.save( { i:1 } );
-doc = t.findOne();
-t.ensureIndex( { i : 1 } );
-assert.eq( 2, t.getIndexes().length );
-ex = t.find( { i : 1 } ).explain();
-
-assert.commandWorked( mydb.repairDatabase() );
-
-v = t.validate();
-assert( v.valid , "not valid! " + tojson( v ) );
-
-assert.eq( 1, t.count() );
-assert.eq( doc, t.findOne() );
-
-assert.eq( 2, t.getIndexes().length, tojson( t.getIndexes() ) );
-var explainAfterRepair = t.find( { i : 1 } ).explain();
-
-// Remove "millis" field. We're interested in the other fields.
-// It's not relevant for both explain() operations to have
-// the same execution time.
-delete ex[ "millis" ];
-delete explainAfterRepair[ "millis" ];
-assert.eq( ex, explainAfterRepair );
diff --git a/jstests/repair_server12955.js b/jstests/repair_server12955.js
deleted file mode 100644
index 9582e4c7852..00000000000
--- a/jstests/repair_server12955.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-mydb = db.getSisterDB( "repair_server12955" );
-mydb.dropDatabase()
-
-mydb.foo.ensureIndex({a:"text"})
-mydb.foo.insert({a:"hello world"})
-
-before = mydb.stats().dataFileVersion;
-
-mydb.repairDatabase();
-
-after = mydb.stats().dataFileVersion;
-
-assert.eq( before, after );
-mydb.dropDatabase();
diff --git a/jstests/reversecursor.js b/jstests/reversecursor.js
deleted file mode 100644
index bb661952fc9..00000000000
--- a/jstests/reversecursor.js
+++ /dev/null
@@ -1,34 +0,0 @@
-// Test to make sure that a reverse cursor can correctly handle empty extents (SERVER-6980)
-
-// Create a collection with three small extents
-db.jstests_reversecursor.drop();
-db.runCommand({"create":"jstests_reversecursor", $nExtents: [4096,4096,4096]});
-
-// Function to check whether all three extents are non empty
-function extentsSpanned() {
- var extents = db.jstests_reversecursor.validate(true).extents;
- return (extents[0].firstRecord != "null" &&
- extents[1].firstRecord != "null" &&
- extents[2].firstRecord != "null");
-}
-
-// Insert enough documents to span all three extents
-a = 0;
-while (!extentsSpanned()) {
- db.jstests_reversecursor.insert({a:a++});
-}
-
-// Delete all the elements in the middle
-db.jstests_reversecursor.remove({a:{$gt:0,$lt:a-1}});
-
-// Make sure the middle extent is empty and that both end extents are not empty
-assert.eq(db.jstests_reversecursor.validate(true).extents[1].firstRecord, "null");
-assert.eq(db.jstests_reversecursor.validate(true).extents[1].lastRecord, "null");
-assert.neq(db.jstests_reversecursor.validate(true).extents[0].firstRecord, "null");
-assert.neq(db.jstests_reversecursor.validate(true).extents[0].lastRecord, "null");
-assert.neq(db.jstests_reversecursor.validate(true).extents[2].firstRecord, "null");
-assert.neq(db.jstests_reversecursor.validate(true).extents[2].lastRecord, "null");
-
-// Make sure that we get the same number of elements for both the forward and reverse cursors
-assert.eq(db.jstests_reversecursor.find().sort({$natural:1}).toArray().length, 2);
-assert.eq(db.jstests_reversecursor.find().sort({$natural:-1}).toArray().length, 2);
diff --git a/jstests/role_management_helpers.js b/jstests/role_management_helpers.js
deleted file mode 100644
index 1cb821975ef..00000000000
--- a/jstests/role_management_helpers.js
+++ /dev/null
@@ -1,137 +0,0 @@
-// This test is a basic sanity check of the shell helpers for manipulating role objects
-// It is not a comprehensive test of the functionality of the role manipulation commands
-
-function assertHasRole(rolesArray, roleName, roleDB) {
- for (i in rolesArray) {
- var curRole = rolesArray[i];
- if (curRole.role == roleName && curRole.db == roleDB) {
- return;
- }
- }
- assert(false, "role " + roleName + "@" + roleDB + " not found in array: " + tojson(rolesArray));
-}
-
-function assertHasPrivilege(privilegeArray, privilege) {
- for (i in privilegeArray) {
- var curPriv = privilegeArray[i];
- if (curPriv.resource.cluster == privilege.resource.cluster &&
- curPriv.resource.anyResource == privilege.resource.anyResource &&
- curPriv.resource.db == privilege.resource.db &&
- curPriv.resource.collection == privilege.resource.collection) {
- // Same resource
- assert.eq(curPriv.actions.length, privilege.actions.length);
- for (k in curPriv.actions) {
- assert.eq(curPriv.actions[k], privilege.actions[k]);
- }
- return;
- }
- }
- assert(false, "Privilege " + tojson(privilege) + " not found in privilege array: " +
- tojson(privilegeArray));
-}
-
-(function(db) {
- var db = db.getSiblingDB("role_management_helpers");
- db.dropDatabase();
- db.dropAllRoles();
-
- db.createRole({role:'roleA',
- roles: [],
- privileges: [{resource: {db:db.getName(), collection: "foo"},
- actions: ['find']}]});
- db.createRole({role:'roleB', privileges: [], roles: ["roleA"]});
- db.createRole({role:'roleC', privileges: [], roles: []});
-
- // Test getRole
- var roleObj = db.getRole("roleA");
- assert.eq(0, roleObj.roles.length);
- assert.eq(null, roleObj.privileges);
- roleObj = db.getRole("roleA", {showPrivileges: true});
- assert.eq(1, roleObj.privileges.length);
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(1, roleObj.inheritedPrivileges.length); // inherited from roleA
- assertHasPrivilege(roleObj.inheritedPrivileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleA", db.getName());
-
- // Test getRoles
- var roles = db.getRoles();
- assert.eq(3, roles.length);
- printjson(roles);
- assert(roles[0].role == 'roleA' || roles[1].role == 'roleA' || roles[2].role == 'roleA');
- assert(roles[0].role == 'roleB' || roles[1].role == 'roleB' || roles[2].role == 'roleB');
- assert(roles[0].role == 'roleC' || roles[1].role == 'roleC' || roles[2].role == 'roleC');
- assert.eq(null, roles[0].inheritedPrivileges);
- var roles = db.getRoles({showPrivileges: true, showBuiltinRoles: true});
- assert.eq(8, roles.length);
- assert.neq(null, roles[0].inheritedPrivileges);
-
-
- // Granting roles to nonexistent role fails
- assert.throws(function() { db.grantRolesToRole("fakeRole", ['dbAdmin']); });
- // Granting roles to built-in role fails
- assert.throws(function() { db.grantRolesToRole("readWrite", ['dbAdmin']); });
- // Granting non-existant role fails
- assert.throws(function() { db.grantRolesToRole("roleB", ['dbAdmin', 'fakeRole']); });
-
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(1, roleObj.inheritedPrivileges.length);
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleA", db.getName());
-
- // Granting a role you already have is no problem
- db.grantRolesToRole("roleB", ['readWrite', 'roleC']);
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.gt(roleObj.inheritedPrivileges.length, 1); // Got privileges from readWrite role
- assert.eq(3, roleObj.roles.length);
- assertHasRole(roleObj.roles, "readWrite", db.getName());
- assertHasRole(roleObj.roles, "roleA", db.getName());
- assertHasRole(roleObj.roles, "roleC", db.getName());
-
- // Revoking roles the role doesn't have is fine
- db.revokeRolesFromRole("roleB", ['roleA', 'readWrite', 'dbAdmin']);
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(0, roleObj.inheritedPrivileges.length);
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleC", db.getName());
-
- // Privileges on the same resource get collapsed
- db.grantPrivilegesToRole("roleA",
- [{resource: {db:db.getName(), collection:""}, actions:['dropDatabase']},
- {resource: {db:db.getName(), collection:"foo"}, actions:['insert']}]);
- roleObj = db.getRole("roleA", {showPrivileges: true});
- assert.eq(0, roleObj.roles.length);
- assert.eq(2, roleObj.privileges.length);
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find', 'insert']});
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:""}, actions:['dropDatabase']});
-
- // Update role
- db.updateRole("roleA", {roles:['roleB'],
- privileges:[{resource: {db: db.getName(), collection:"foo"},
- actions:['find']}]});
- roleObj = db.getRole("roleA", {showPrivileges: true});
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleB", db.getName());
- assert.eq(1, roleObj.privileges.length);
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
-
- // Test dropRole
- db.dropRole('roleC');
- assert.throws(function() {db.getRole('roleC')});
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(0, roleObj.privileges.length);
- assert.eq(0, roleObj.roles.length);
-
- // Test dropAllRoles
- db.dropAllRoles();
- assert.throws(function() {db.getRole('roleA')});
- assert.throws(function() {db.getRole('roleB')});
- assert.throws(function() {db.getRole('roleC')});
-
-}(db)); \ No newline at end of file
diff --git a/jstests/run_program1.js b/jstests/run_program1.js
deleted file mode 100644
index 7a994b2171a..00000000000
--- a/jstests/run_program1.js
+++ /dev/null
@@ -1,19 +0,0 @@
-if ( ! _isWindows() ) {
-
- // note that normal program exit returns 0
- assert.eq (0, runProgram('true'))
- assert.neq(0, runProgram('false'))
- assert.neq(0, runProgram('this_program_doesnt_exit'));
-
- //verify output visually
- runProgram('echo', 'Hello', 'World.', 'How are you?');
- runProgram('bash', '-c', 'echo Hello World. "How are you?"'); // only one space is printed between Hello and World
-
- // numbers can be passed as numbers or strings
- runProgram('sleep', 0.5);
- runProgram('sleep', '0.5');
-
-} else {
-
- runProgram('cmd', '/c', 'echo hello windows');
-}
diff --git a/jstests/server1470.js b/jstests/server1470.js
deleted file mode 100644
index 0bb4d02c933..00000000000
--- a/jstests/server1470.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.server1470;
-t.drop();
-
-q = { "name" : "first" , "pic" : { "$ref" : "foo", "$id" : ObjectId("4c48d04cd33a5a92628c9af6") } };
-t.update( q , {$set:{ x : 1 } } , true, true );
-ref = t.findOne().pic
-assert.eq( "object", typeof( ref ) );
-assert.eq( q.pic["$ref"] , ref["$ref"] )
-assert.eq( q.pic["$id"] , ref["$id"] )
-
-// just make we haven't broken other update operators
-t.drop();
-t.update( { _id : 1 , x : { $gt : 5 } } , { $set : { y : 1 } } , true );
-assert.eq( { _id : 1 , y : 1 } , t.findOne() );
-
-
-
-
-
diff --git a/jstests/server5346.js b/jstests/server5346.js
deleted file mode 100644
index f4a692bd16a..00000000000
--- a/jstests/server5346.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-t = db.server5346;
-t.drop();
-
-x = { _id : 1 , versions : {} }
-t.insert( x )
-
-t.update({ _id : 1 }, { $inc : { "versions.2_01" : 1 } } )
-t.update({ _id : 1 }, { $inc : { "versions.2_1" : 2 } } )
-t.update({ _id : 1 }, { $inc : { "versions.01" : 3 } } )
-t.update({ _id : 1 }, { $inc : { "versions.1" : 4 } } )
-
-// Make sure the correct fields are set, without duplicates.
-assert.docEq( { "_id" : 1, "versions" : { "01" : 3, "1" : 4, "2_01" : 1, "2_1" : 2 } },
- t.findOne())
diff --git a/jstests/server7756.js b/jstests/server7756.js
deleted file mode 100644
index 5a7177ebcc9..00000000000
--- a/jstests/server7756.js
+++ /dev/null
@@ -1,12 +0,0 @@
-
-t = db.server7756;
-t.drop();
-
-t.save( { a:[ { 1:'x' }, 'y' ] } );
-
-assert.eq( 1, t.count( { 'a.1':'x' } ) );
-assert.eq( 1, t.count( { 'a.1':'y' } ) );
-
-assert.eq( 1, t.count( { 'a.1':/x/ } ) );
-assert.eq( 1, t.count( { 'a.1':/y/ } ) );
-
diff --git a/jstests/server9385.js b/jstests/server9385.js
deleted file mode 100644
index ee86891ce2a..00000000000
--- a/jstests/server9385.js
+++ /dev/null
@@ -1,16 +0,0 @@
-// SERVER-9385 ensure saving a document derived from bson->js conversion doesn't lose it's _id
-t = db.server9385;
-t.drop();
-
-t.insert( { _id : 1, x : 1 } );
-x = t.findOne();
-x._id = 2;
-t.save( x );
-
-t.find().forEach( printjson );
-
-assert.eq( 2, t.find().count() );
-assert.eq( 2, t.find().itcount() );
-
-assert( t.findOne( { _id : 1 } ), "original insert missing" );
-assert( t.findOne( { _id : 2 } ), "save didn't work?" );
diff --git a/jstests/server9547.js b/jstests/server9547.js
deleted file mode 100644
index 67cacfc22a7..00000000000
--- a/jstests/server9547.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// SERVER-9547
-// Test that sorting with .max() and .min() doesn't crash.
-
-var t = db.server9547;
-t.drop();
-
-for (var i=0; i<10; i++) {
- t.save({a: i});
-}
-
-t.ensureIndex({a: 1});
-
-// note: max() value is exclusive upper bound
-assert.eq(4, t.find({}).max({a: 4}).toArray().length, "no order");
-
-// Ascending order is fine.
-assert.eq(4, t.find({}).max({a: 4}).sort({a: 1}).toArray().length, "ascending");
-
-// Descending order is still broken.
-// This should really return the same # of results but doesn't.
-assert.eq(5, t.find({}).max({a: 4}).sort({a: -1}).toArray().length, "descending");
diff --git a/jstests/set1.js b/jstests/set1.js
deleted file mode 100644
index d741387af58..00000000000
--- a/jstests/set1.js
+++ /dev/null
@@ -1,9 +0,0 @@
-
-t = db.set1;
-t.drop();
-
-t.insert( { _id : 1, emb : {} });
-t.update( { _id : 1 }, { $set : { emb : { 'a.dot' : 'data'} }});
-assert.eq( { _id : 1 , emb : {} } , t.findOne() , "A" );
-
-
diff --git a/jstests/set2.js b/jstests/set2.js
deleted file mode 100644
index 221ee407759..00000000000
--- a/jstests/set2.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.set2;
-t.drop();
-
-t.save( { _id : 1 , x : true , y : { x : true } } );
-assert.eq( true , t.findOne().x );
-
-t.update( { _id : 1 } , { $set : { x : 17 } } );
-assert.eq( 17 , t.findOne().x );
-
-assert.eq( true , t.findOne().y.x );
-t.update( { _id : 1 } , { $set : { "y.x" : 17 } } );
-assert.eq( 17 , t.findOne().y.x );
-
-t.update( { _id : 1 } , { $set : { a : 2 , b : 3 } } );
-assert.eq( 2 , t.findOne().a );
-assert.eq( 3 , t.findOne().b );
-
diff --git a/jstests/set3.js b/jstests/set3.js
deleted file mode 100644
index 611abc4e6bf..00000000000
--- a/jstests/set3.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.set3;
-t.drop();
-
-t.insert( { "test1" : { "test2" : { "abcdefghijklmnopqrstu" : {"id":1} } } } );
-t.update( {}, {"$set":{"test1.test2.abcdefghijklmnopqrstuvwxyz":{"id":2}}})
-
-x = t.findOne();
-assert.eq( 1 , x.test1.test2.abcdefghijklmnopqrstu.id , "A" );
-assert.eq( 2 , x.test1.test2.abcdefghijklmnopqrstuvwxyz.id , "B" );
-
diff --git a/jstests/set4.js b/jstests/set4.js
deleted file mode 100644
index b37366cdb81..00000000000
--- a/jstests/set4.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-t = db.set4;
-t.drop();
-
-orig = { _id:1 , a : [ { x : 1 } ]}
-t.insert( orig );
-
-t.update( {}, { $set : { 'a.0.x' : 2, 'foo.bar' : 3 } } );
-orig.a[0].x = 2; orig.foo = { bar : 3 }
-assert.eq( orig , t.findOne() , "A" );
-
-t.update( {}, { $set : { 'a.0.x' : 4, 'foo.bar' : 5 } } );
-orig.a[0].x = 4; orig.foo.bar = 5;
-assert.eq( orig , t.findOne() , "B" );
-
diff --git a/jstests/set5.js b/jstests/set5.js
deleted file mode 100644
index afa0d014bde..00000000000
--- a/jstests/set5.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.set5;
-t.drop();
-
-function check( want , err ){
- var x = t.findOne();
- delete x._id;
- assert.docEq( want , x , err );
-}
-
-t.update( { a : 5 } , { $set : { a : 6 , b : null } } , true );
-check( { a : 6 , b : null } , "A" )
-
-t.drop();
-
-t.update( { z : 5 } , { $set : { z : 6 , b : null } } , true );
-check( { b : null , z : 6 } , "B" )
diff --git a/jstests/set6.js b/jstests/set6.js
deleted file mode 100644
index d41e7aba971..00000000000
--- a/jstests/set6.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.set6;
-t.drop();
-
-x = { _id : 1 , r : new DBRef( "foo" , new ObjectId() ) }
-t.insert( x )
-assert.eq( x , t.findOne() , "A" );
-
-x.r.$id = new ObjectId()
-t.update({}, { $set : { r : x.r } } );
-assert.eq( x , t.findOne() , "B");
-
-x.r2 = new DBRef( "foo2" , 5 )
-t.update( {} , { $set : { "r2" : x.r2 } } );
-assert.eq( x , t.findOne() , "C" )
-
-x.r.$id = 2;
-t.update( {} , { $set : { "r.$id" : 2 } } )
-assert.eq( x.r.$id , t.findOne().r.$id , "D");
-
diff --git a/jstests/set7.js b/jstests/set7.js
deleted file mode 100644
index 68c4d471f58..00000000000
--- a/jstests/set7.js
+++ /dev/null
@@ -1,67 +0,0 @@
-// test $set with array indicies
-
-t = db.jstests_set7;
-
-t.drop();
-
-t.save( {a:[0,1,2,3]} );
-t.update( {}, {$set:{"a.0":2}} );
-assert.eq( [2,1,2,3], t.findOne().a );
-
-t.update( {}, {$set:{"a.4":5}} );
-assert.eq( [2,1,2,3,5], t.findOne().a );
-
-t.update( {}, {$set:{"a.9":9}} );
-assert.eq( [2,1,2,3,5,null,null,null,null,9], t.findOne().a );
-
-t.drop();
-t.save( {a:[0,1,2,3]} );
-t.update( {}, {$set:{"a.9":9,"a.7":7}} );
-assert.eq( [0,1,2,3,null,null,null,7,null,9], t.findOne().a );
-
-t.drop();
-t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
-t.update( {}, {$set:{"a.11":11} } );
-assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
-
-t.drop();
-t.save( {} );
-t.update( {}, {$set:{"a.0":4}} );
-assert.eq( {"0":4}, t.findOne().a );
-
-t.drop();
-t.update( {"a.0":4}, {$set:{b:1}}, true );
-assert.eq( {"0":4}, t.findOne().a );
-
-t.drop();
-t.save( {a:[]} );
-t.update( {}, {$set:{"a.f":1}} );
-assert( db.getLastError() );
-assert.eq( [], t.findOne().a );
-
-// Test requiring proper ordering of multiple mods.
-t.drop();
-t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
-t.update( {}, {$set:{"a.11":11,"a.2":-2}} );
-assert.eq( [0,1,-2,3,4,5,6,7,8,9,10,11], t.findOne().a );
-
-// Test upsert case
-t.drop();
-t.update( {a:[0,1,2,3,4,5,6,7,8,9,10]}, {$set:{"a.11":11} }, true );
-assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
-
-// SERVER-3750
-t.drop();
-t.save( {a:[]} );
-t.update( {}, {$set:{"a.1500000":1}} ); // current limit
-assert( db.getLastError() == null );
-
-t.drop();
-t.save( {a:[]} );
-t.update( {}, {$set:{"a.1500001":1}} ); // 1 over limit
-assert.neq( db.getLastErrorObj(), null );
-
-t.drop();
-t.save( {a:[]} );
-t.update( {}, {$set:{"a.1000000000":1}} ); // way over limit
-assert.neq( db.getLastErrorObj(), null );
diff --git a/jstests/set_param1.js b/jstests/set_param1.js
deleted file mode 100644
index 555cb520306..00000000000
--- a/jstests/set_param1.js
+++ /dev/null
@@ -1,9 +0,0 @@
-
-old = db.adminCommand( { "getParameter" : "*" } )
-tmp1 = db.adminCommand( { "setParameter" : 1 , "logLevel" : 5 } )
-tmp2 = db.adminCommand( { "setParameter" : 1 , "logLevel" : old.logLevel } )
-now = db.adminCommand( { "getParameter" : "*" } )
-
-assert.eq( old , now , "A" )
-assert.eq( old.logLevel , tmp1.was , "B" )
-assert.eq( 5 , tmp2.was , "C" )
diff --git a/jstests/shell1.js b/jstests/shell1.js
deleted file mode 100644
index 2e6c7292374..00000000000
--- a/jstests/shell1.js
+++ /dev/null
@@ -1,15 +0,0 @@
-x = 1;
-
-shellHelper( "show", "tables;" )
-shellHelper( "show", "tables" )
-shellHelper( "show", "tables ;" )
-
-// test slaveOk levels
-assert(!db.getSlaveOk() && !db.test.getSlaveOk() && !db.getMongo().getSlaveOk(), "slaveOk 1");
-db.getMongo().setSlaveOk();
-assert(db.getSlaveOk() && db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 2");
-db.setSlaveOk(false);
-assert(!db.getSlaveOk() && !db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 3");
-db.test.setSlaveOk(true);
-assert(!db.getSlaveOk() && db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 4");
-
diff --git a/jstests/shell_writeconcern.js b/jstests/shell_writeconcern.js
deleted file mode 100644
index e59bd471294..00000000000
--- a/jstests/shell_writeconcern.js
+++ /dev/null
@@ -1,72 +0,0 @@
-"use strict"
-// check that shell writeconcern work correctly
-// 1.) tests that it can be set on each level and is inherited
-// 2.) tests that each operation (update/insert/remove/save) take and ensure a write concern
-
-var collA = db.shell_wc_a;
-var collB = db.shell_wc_b;
-collA.drop()
-collB.drop()
-
-// test inheritance
-db.setWriteConcern({w:1})
-assert.eq(1, db.getWriteConcern().toJSON().w)
-assert.eq(1, collB.getWriteConcern().toJSON().w)
-
-collA.setWriteConcern({w:2})
-assert.eq(2, collA.getWriteConcern().toJSON().w)
-collA.unsetWriteConcern()
-assert.eq(1, collA.getWriteConcern().toJSON().w)
-
-db.unsetWriteConcern()
-assert.eq(undefined, collA.getWriteConcern())
-assert.eq(undefined, collB.getWriteConcern())
-assert.eq(undefined, db.getWriteConcern())
-
-// test methods, by generating an error
-var res = assert.gleOK(collA.save({_id:1}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
- assert.eq(1, res.n, tojson(res));
- assert.eq(1, res.upserted, tojson(res));
-} else {
- assert.eq(1, res.nUpserted, tojson(res));
-}
-
-var res = assert.gleOK(collA.update({_id:1}, {_id:1}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
- assert.eq(1, res.n, tojson(res));
-} else {
- assert.eq(1, res.nMatched, tojson(res));
-}
-var res = assert.gleOK(collA.update({_id:1}, {_id:1}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
- assert.eq(1, res.n, tojson(res));
-} else {
- assert.eq(1, res.nMatched, tojson(res));
-}
-
-var res = assert.gleOK(collA.insert({_id:2}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
- assert.eq(0, res.n, tojson(res));
-} else {
- assert.eq(1, res.nInserted, tojson(res));
-}
-
-var res = assert.gleOK(collA.remove({_id:3}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
- assert.eq(0, res.n, tojson(res));
-} else {
- assert.eq(0, res.nRemoved, tojson(res));
-}
-
-var res = assert.gleOK(collA.remove({}, {justOne:true, writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
- assert.eq(1, res.n, tojson(res));
-} else {
- assert.eq(1, res.nRemoved, tojson(res));
-}
-
-assert.gleError(collA.insert([{_id:1}, {_id:1}], {ordered:true, writeConcern:{w:1}}));
-assert.gleError(collA.insert([{_id:1}, {_id:1}], {ordered:false, writeConcern:{w:1}}));
-
-
diff --git a/jstests/shellkillop.js b/jstests/shellkillop.js
deleted file mode 100644
index d903f251f13..00000000000
--- a/jstests/shellkillop.js
+++ /dev/null
@@ -1,61 +0,0 @@
-baseName = "jstests_shellkillop";
-
-// 'retry' should be set to true in contexts where an exception should cause the test to be retried rather than to fail.
-retry = false;
-
-function testShellAutokillop() {
-
-if (true) { // toggle to disable test
- db[baseName].drop();
-
- print("shellkillop.js insert data");
- for (i = 0; i < 100000; ++i) {
- db[baseName].insert({ i: 1 });
- }
- assert.eq(100000, db[baseName].count());
-
- // mongo --autokillop suppressed the ctrl-c "do you want to kill current operation" message
- // it's just for testing purposes and thus not in the shell help
- var evalStr = "print('SKO subtask started'); db." + baseName + ".update( {}, {$set:{i:'abcdefghijkl'}}, false, true ); db." + baseName + ".count();";
- print("shellkillop.js evalStr:" + evalStr);
- spawn = startMongoProgramNoConnect("mongo", "--autokillop", "--port", myPort(), "--eval", evalStr);
-
- sleep(100);
- retry = true;
- assert(db[baseName].find({ i: 'abcdefghijkl' }).count() < 100000, "update ran too fast, test won't be valid");
- retry = false;
-
- stopMongoProgramByPid(spawn);
-
- sleep(100);
-
- print("count abcdefghijkl:" + db[baseName].find({ i: 'abcdefghijkl' }).count());
-
- var inprog = db.currentOp().inprog;
- for (i in inprog) {
- if (inprog[i].ns == "test." + baseName)
- throw "shellkillop.js op is still running: " + tojson( inprog[i] );
- }
-
- retry = true;
- assert(db[baseName].find({ i: 'abcdefghijkl' }).count() < 100000, "update ran too fast, test was not valid");
- retry = false;
-}
-
-}
-
-for( var nTries = 0; nTries < 10 && retry; ++nTries ) {
- try {
- testShellAutokillop();
- } catch (e) {
- if ( !retry ) {
- throw e;
- }
- printjson( e );
- print( "retrying..." );
- }
-}
-
-assert( !retry, "retried too many times" );
-
-print("shellkillop.js SUCCESS");
diff --git a/jstests/shellstartparallel.js b/jstests/shellstartparallel.js
deleted file mode 100644
index 59110296b26..00000000000
--- a/jstests/shellstartparallel.js
+++ /dev/null
@@ -1,17 +0,0 @@
-function f() {
- throw "intentional_throw_to_test_assert_throws";
-}
-assert.throws(f);
-
-// verify that join works
-db.sps.drop();
-join = startParallelShell("sleep(1000); db.sps.insert({x:1}); db.getLastError();");
-join();
-assert.eq(1, db.sps.count(), "join problem?");
-
-// test with a throw
-join = startParallelShell("db.sps.insert({x:1}); db.getLastError(); throw 'intentionally_uncaught';");
-join();
-assert.eq(2, db.sps.count(), "join2 problem?");
-
-print("shellstartparallel.js SUCCESS");
diff --git a/jstests/shelltypes.js b/jstests/shelltypes.js
deleted file mode 100644
index 3f109269b39..00000000000
--- a/jstests/shelltypes.js
+++ /dev/null
@@ -1,53 +0,0 @@
-// check that constructor also works without "new"
-var a;
-var b;
-a = new ObjectId();
-b = ObjectId(a.valueOf());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "oid");
-
-a = new DBRef("test", "theid");
-b = DBRef(a.getRef(), a.getId());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "dbref");
-
-a = new DBPointer("test", new ObjectId());
-b = DBPointer(a.getCollection(), a.getId());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "dbpointer");
-
-a = new Timestamp(10, 20);
-b = Timestamp(a.t, a.i);
-printjson(a);
-assert.eq(tojson(a), tojson(b), "timestamp");
-
-a = new BinData(3,"VQ6EAOKbQdSnFkRmVUQAAA==");
-b = BinData(a.type, a.base64());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "bindata");
-
-a = new UUID("550e8400e29b41d4a716446655440000");
-b = UUID(a.hex());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "uuid");
-
-a = new MD5("550e8400e29b41d4a716446655440000");
-b = MD5(a.hex());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "md5");
-
-a = new HexData(4, "550e8400e29b41d4a716446655440000");
-b = HexData(a.type, a.hex());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "hexdata");
-
-a = new NumberLong(100);
-b = NumberLong(a.toNumber());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "long");
-
-a = new NumberInt(100);
-b = NumberInt(a.toNumber());
-printjson(a);
-assert.eq(tojson(a), tojson(b), "int");
-
diff --git a/jstests/showdiskloc.js b/jstests/showdiskloc.js
deleted file mode 100644
index d1339c6d238..00000000000
--- a/jstests/showdiskloc.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Sanity check for the $showDiskLoc option.
-
-t = db.jstests_showdiskloc;
-t.drop();
-
-function checkResults( arr ) {
- for( i in arr ) {
- a = arr[ i ];
- assert( a['$diskLoc'] );
- }
-}
-
-// Check query.
-t.save( {} );
-checkResults( t.find()._addSpecial("$showDiskLoc" , true).toArray() );
-
-// Check query and get more.
-t.save( {} );
-t.save( {} );
-checkResults( t.find().batchSize( 2 )._addSpecial("$showDiskLoc" , true).toArray() );
-
-// Check with a covered index.
-t.ensureIndex( { a:1 } );
-checkResults
-( t.find( {}, { _id:0, a:1 } ).hint( { a:1 } )._addSpecial("$showDiskLoc" , true).toArray() );
diff --git a/jstests/skip1.js b/jstests/skip1.js
deleted file mode 100644
index c620fb01bca..00000000000
--- a/jstests/skip1.js
+++ /dev/null
@@ -1,15 +0,0 @@
-// SERVER-2845 When skipping objects without loading them, they shouldn't be
-// included in the nscannedObjects count.
-
-if ( 0 ) { // SERVER-2845
-t = db.jstests_skip1;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.save( {a:5} );
-t.save( {a:5} );
-t.save( {a:5} );
-
-assert.eq( 3, t.find( {a:5} ).skip( 2 ).explain().nscanned );
-assert.eq( 1, t.find( {a:5} ).skip( 2 ).explain().nscannedObjects );
-} \ No newline at end of file
diff --git a/jstests/slice1.js b/jstests/slice1.js
deleted file mode 100644
index b20e7e48b14..00000000000
--- a/jstests/slice1.js
+++ /dev/null
@@ -1,68 +0,0 @@
-t = db.slice1;
-t.drop();
-
-t.insert({_id:1, a:[0,1,2,3,4,5,-5,-4,-3,-2,-1], b:1, c:1});
-
-// first three
-out = t.findOne({}, {a:{$slice:3}});
-assert.eq(out.a , [0,1,2], '1');
-
-// last three
-out = t.findOne({}, {a:{$slice:-3}});
-assert.eq(out.a , [-3, -2, -1], '2');
-
-// skip 2, limit 3
-out = t.findOne({}, {a:{$slice:[2, 3]}});
-assert.eq(out.a , [2,3,4], '3');
-
-// skip to fifth from last, limit 4
-out = t.findOne({}, {a:{$slice:[-5, 4]}});
-assert.eq(out.a , [-5, -4, -3, -2], '4');
-
-// skip to fifth from last, limit 10
-out = t.findOne({}, {a:{$slice:[-5, 10]}});
-assert.eq(out.a , [-5, -4, -3, -2, -1], '5');
-
-
-// interaction with other fields
-
-out = t.findOne({}, {a:{$slice:3}});
-assert.eq(out.a , [0,1,2], 'A 1');
-assert.eq(out.b , 1, 'A 2');
-assert.eq(out.c , 1, 'A 3');
-
-out = t.findOne({}, {a:{$slice:3}, b:true});
-assert.eq(out.a , [0,1,2], 'B 1');
-assert.eq(out.b , 1, 'B 2');
-assert.eq(out.c , undefined);
-
-out = t.findOne({}, {a:{$slice:3}, b:false});
-assert.eq(out.a , [0,1,2]);
-assert.eq(out.b , undefined);
-assert.eq(out.c , 1);
-
-t.drop()
-t.insert({comments: [{id:0, text:'a'},{id:1, text:'b'},{id:2, text:'c'},{id:3, text:'d'}], title:'foo'})
-
-
-out = t.findOne({}, {comments:{$slice:2}, 'comments.id':true});
-assert.eq(out.comments , [{id:0}, {id:1}]);
-assert.eq(out.title , undefined);
-
-out = t.findOne({}, {comments:{$slice:2}, 'comments.id':false});
-assert.eq(out.comments , [{text: 'a'}, {text: 'b'}]);
-assert.eq(out.title , 'foo');
-
-//nested arrays
-t.drop();
-t.insert({_id:1, a:[[1,1,1], [2,2,2], [3,3,3]], b:1, c:1});
-
-out = t.findOne({}, {a:{$slice:1}});
-assert.eq(out.a , [[1,1,1]], 'n 1');
-
-out = t.findOne({}, {a:{$slice:-1}});
-assert.eq(out.a , [[3,3,3]], 'n 2');
-
-out = t.findOne({}, {a:{$slice:[0,2]}});
-assert.eq(out.a , [[1,1,1],[2,2,2]], 'n 2');
-
diff --git a/jstests/sort1.js b/jstests/sort1.js
deleted file mode 100644
index 12b97728e90..00000000000
--- a/jstests/sort1.js
+++ /dev/null
@@ -1,48 +0,0 @@
-debug = function( s ){
- //print( s );
-}
-
-t = db.sort1;
-t.drop();
-
-t.save({x:3,z:33});
-t.save({x:5,z:33});
-t.save({x:2,z:33});
-t.save({x:3,z:33});
-t.save({x:1,z:33});
-
-debug( "a" )
-for( var pass = 0; pass < 2; pass++ ) {
- assert( t.find().sort({x:1})[0].x == 1 );
- assert( t.find().sort({x:1}).skip(1)[0].x == 2 );
- assert( t.find().sort({x:-1})[0].x == 5 );
- assert( t.find().sort({x:-1})[1].x == 3 );
- assert.eq( t.find().sort({x:-1}).skip(0)[0].x , 5 );
- assert.eq( t.find().sort({x:-1}).skip(1)[0].x , 3 );
- t.ensureIndex({x:1});
-
-}
-
-debug( "b" )
-assert(t.validate().valid);
-
-t.drop();
-t.save({x:'a'});
-t.save({x:'aba'});
-t.save({x:'zed'});
-t.save({x:'foo'});
-
-debug( "c" )
-
-for( var pass = 0; pass < 2; pass++ ) {
- debug( tojson( t.find().sort( { "x" : 1 } ).limit(1).next() ) );
- assert.eq( "a" , t.find().sort({'x': 1}).limit(1).next().x , "c.1" );
- assert.eq( "a" , t.find().sort({'x': 1}).next().x , "c.2" );
- assert.eq( "zed" , t.find().sort({'x': -1}).limit(1).next().x , "c.3" );
- assert.eq( "zed" , t.find().sort({'x': -1}).next().x , "c.4" );
- t.ensureIndex({x:1});
-}
-
-debug( "d" )
-
-assert(t.validate().valid);
diff --git a/jstests/sort10.js b/jstests/sort10.js
deleted file mode 100644
index e9663f4a55d..00000000000
--- a/jstests/sort10.js
+++ /dev/null
@@ -1,48 +0,0 @@
-// signed dates check
-t = db.sort10;
-
-function checkSorting1(opts) {
- t.drop();
- t.insert({ x: new Date(50000) });
- t.insert({ x: new Date(-50) });
- var d = new Date(-50);
- for (var pass = 0; pass < 2; pass++) {
- assert(t.find().sort({x:1})[0].x.valueOf() == d.valueOf());
- t.ensureIndex({ x: 1 }, opts);
- t.insert({ x: new Date() });
- }
-}
-
-checkSorting1({})
-checkSorting1({"background":true})
-
-
-
-function checkSorting2(dates, sortOrder) {
- cur = t.find().sort({x:sortOrder});
- assert.eq(dates.length, cur.count(), "Incorrect number of results returned");
- index = 0;
- while (cur.hasNext()) {
- date = cur.next().x;
- assert.eq(dates[index].valueOf(), date.valueOf());
- index++;
- }
-}
-
-t.drop();
-dates = [new Date(-5000000000000), new Date(5000000000000), new Date(0), new Date(5), new Date(-5)];
-for (var i = 0; i < dates.length; i++) {
- t.insert({x:dates[i]});
-}
-dates.sort(function(a,b){return a - b});
-reverseDates = dates.slice(0).reverse()
-
-checkSorting2(dates, 1)
-checkSorting2(reverseDates, -1)
-t.ensureIndex({x:1})
-checkSorting2(dates, 1)
-checkSorting2(reverseDates, -1)
-t.dropIndexes()
-t.ensureIndex({x:-1})
-checkSorting2(dates, 1)
-checkSorting2(reverseDates, -1)
diff --git a/jstests/sort2.js b/jstests/sort2.js
deleted file mode 100644
index 6dfa8486201..00000000000
--- a/jstests/sort2.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// test sorting, mainly a test ver simple with no index
-
-t = db.sort2;
-
-t.drop();
-t.save({x:1, y:{a:5,b:4}});
-t.save({x:1, y:{a:7,b:3}});
-t.save({x:1, y:{a:2,b:3}});
-t.save({x:1, y:{a:9,b:3}});
-for( var pass = 0; pass < 2; pass++ ) {
- var res = t.find().sort({'y.a':1}).toArray();
- assert( res[0].y.a == 2 );
- assert( res[1].y.a == 5 );
- assert( res.length == 4 );
- t.ensureIndex({"y.a":1});
-}
-assert(t.validate().valid);
-
-t.drop();
-t.insert({ x: 1 })
-t.insert({ x: 5000000000 })
-t.insert({ x: NaN });
-t.insert({ x: Infinity });
-t.insert({ x: -Infinity });
-var good = [NaN, -Infinity, 1, 5000000000, Infinity];
-for (var pass = 0; pass < 2; pass++) {
- var res = t.find({}, { _id: 0 }).sort({ x: 1 }).toArray();
- for (var i = 0; i < good.length; i++) {
- assert(good[i].toString() == res[i].x.toString());
- }
- t.ensureIndex({ x : 1 });
-}
diff --git a/jstests/sort3.js b/jstests/sort3.js
deleted file mode 100644
index b79f1f60381..00000000000
--- a/jstests/sort3.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.sort3;
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 5 } );
-t.save( { a : 3 } );
-
-assert.eq( "1,5,3" , t.find().toArray().map( function(z){ return z.a; } ) );
-
-assert.eq( "1,3,5" , t.find().sort( { a : 1 } ).toArray().map( function(z){ return z.a; } ) );
-assert.eq( "5,3,1" , t.find().sort( { a : -1 } ).toArray().map( function(z){ return z.a; } ) );
-
-assert.eq( "1,3,5" , t.find( { query : {} , orderby : { a : 1 } } ).toArray().map( function(z){ return z.a; } ) );
-assert.eq( "5,3,1" , t.find( { query : {} , orderby : { a : -1 } } ).toArray().map( function(z){ return z.a; } ) );
-
diff --git a/jstests/sort4.js b/jstests/sort4.js
deleted file mode 100644
index 5174b46f41f..00000000000
--- a/jstests/sort4.js
+++ /dev/null
@@ -1,43 +0,0 @@
-t = db.sort4;
-t.drop();
-
-
-function nice( sort , correct , extra ){
- var c = t.find().sort( sort );
- var s = "";
- c.forEach(
- function(z){
- if ( s.length )
- s += ",";
- s += z.name;
- if ( z.prename )
- s += z.prename;
- }
- );
- print( tojson( sort ) + "\t" + s );
- if ( correct )
- assert.eq( correct , s , tojson( sort ) + "(" + extra + ")" );
- return s;
-}
-
-t.save({name: 'A', prename: 'B'})
-t.save({name: 'A', prename: 'C'})
-t.save({name: 'B', prename: 'B'})
-t.save({name: 'B', prename: 'D'})
-
-nice( { name:1 } , "AB,AC,BB,BD" , "s1" );
-nice( { prename : 1 } , "AB,BB,AC,BD" , "s2" );
-nice( {name:1, prename:1} , "AB,AC,BB,BD" , "s3" );
-
-t.save({name: 'A'})
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD" , "e1" );
-
-t.save({name: 'C'})
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2" ); // SERVER-282
-
-t.ensureIndex( { name : 1 , prename : 1 } );
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ia" ); // SERVER-282
-
-t.dropIndexes();
-t.ensureIndex( { name : 1 } );
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ib" ); // SERVER-282
diff --git a/jstests/sort5.js b/jstests/sort5.js
deleted file mode 100644
index b90256ef79d..00000000000
--- a/jstests/sort5.js
+++ /dev/null
@@ -1,21 +0,0 @@
-var t = db.sort5;
-t.drop();
-
-t.save({_id: 5, x: 1, y: {a: 5, b: 4}});
-t.save({_id: 7, x: 2, y: {a: 7, b: 3}});
-t.save({_id: 2, x: 3, y: {a: 2, b: 3}});
-t.save({_id: 9, x: 4, y: {a: 9, b: 3}});
-
-// test compound sorting
-
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A no index" );
-t.ensureIndex({"y.b": 1, "y.a": -1});
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A index" );
-assert(t.validate().valid, "A valid");
-
-// test sorting on compound key involving _id
-
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B no index" );
-t.ensureIndex({"y.b": 1, "_id": -1});
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B index" );
-assert(t.validate().valid, "B valid");
diff --git a/jstests/sort6.js b/jstests/sort6.js
deleted file mode 100644
index 027ba7a01f5..00000000000
--- a/jstests/sort6.js
+++ /dev/null
@@ -1,38 +0,0 @@
-
-t = db.sort6;
-
-function get( x ){
- return t.find().sort( { c : x } ).map( function(z){ return z._id; } );
-}
-
-// part 1
-t.drop();
-
-t.insert({_id:1,c:null})
-t.insert({_id:2,c:1})
-t.insert({_id:3,c:2})
-
-
-assert.eq( [3,2,1] , get( -1 ) , "A1" ) // SERVER-635
-assert.eq( [1,2,3] , get( 1 ) , "A2" )
-
-t.ensureIndex( { c : 1 } );
-
-assert.eq( [3,2,1] , get( -1 ) , "B1" )
-assert.eq( [1,2,3] , get( 1 ) , "B2" )
-
-
-// part 2
-t.drop();
-
-t.insert({_id:1})
-t.insert({_id:2,c:1})
-t.insert({_id:3,c:2})
-
-assert.eq( [3,2,1] , get( -1 ) , "C1" ) // SERVER-635
-assert.eq( [1,2,3] , get( 1 ) , "C2" )
-
-t.ensureIndex( { c : 1 } );
-
-assert.eq( [3,2,1] , get( -1 ) , "D1" )
-assert.eq( [1,2,3] , get( 1 ) , "X2" )
diff --git a/jstests/sort7.js b/jstests/sort7.js
deleted file mode 100644
index 0b98734e5ff..00000000000
--- a/jstests/sort7.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Check sorting of array sub field SERVER-480.
-
-t = db.jstests_sort7;
-t.drop();
-
-// Compare indexed and unindexed sort order for an array embedded field.
-
-t.save( { a : [ { x : 2 } ] } );
-t.save( { a : [ { x : 1 } ] } );
-t.save( { a : [ { x : 3 } ] } );
-unindexed = t.find().sort( {"a.x":1} ).toArray();
-t.ensureIndex( { "a.x" : 1 } );
-indexed = t.find().sort( {"a.x":1} ).hint( {"a.x":1} ).toArray();
-assert.eq( unindexed, indexed );
-
-// Now check when there are two objects in the array.
-
-t.remove({});
-t.save( { a : [ { x : 2 }, { x : 3 } ] } );
-t.save( { a : [ { x : 1 }, { x : 4 } ] } );
-t.save( { a : [ { x : 3 }, { x : 2 } ] } );
-unindexed = t.find().sort( {"a.x":1} ).toArray();
-t.ensureIndex( { "a.x" : 1 } );
-indexed = t.find().sort( {"a.x":1} ).hint( {"a.x":1} ).toArray();
-assert.eq( unindexed, indexed );
diff --git a/jstests/sort8.js b/jstests/sort8.js
deleted file mode 100644
index 916075502d7..00000000000
--- a/jstests/sort8.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Check sorting of arrays indexed by key SERVER-2884
-
-t = db.jstests_sort8;
-t.drop();
-
-t.save( {a:[1,10]} );
-t.save( {a:5} );
-unindexedForward = t.find().sort( {a:1} ).toArray();
-unindexedReverse = t.find().sort( {a:-1} ).toArray();
-t.ensureIndex( {a:1} );
-indexedForward = t.find().sort( {a:1} ).hint( {a:1} ).toArray();
-indexedReverse = t.find().sort( {a:-1} ).hint( {a:1} ).toArray();
-
-assert.eq( unindexedForward, indexedForward );
-assert.eq( unindexedReverse, indexedReverse );
-
-// Sorting is based on array members, not the array itself.
-assert.eq( [1,10], unindexedForward[ 0 ].a );
-assert.eq( [1,10], unindexedReverse[ 0 ].a );
-
-// Now try with a bounds constraint.
-t.dropIndexes();
-unindexedForward = t.find({a:{$gte:5}}).sort( {a:1} ).toArray();
-unindexedReverse = t.find({a:{$lte:5}}).sort( {a:-1} ).toArray();
-t.ensureIndex( {a:1} );
-indexedForward = t.find({a:{$gte:5}}).sort( {a:1} ).hint( {a:1} ).toArray();
-indexedReverse = t.find({a:{$lte:5}}).sort( {a:-1} ).hint( {a:1} ).toArray();
-
-assert.eq( unindexedForward, indexedForward );
-assert.eq( unindexedReverse, indexedReverse );
diff --git a/jstests/sort9.js b/jstests/sort9.js
deleted file mode 100644
index 62407d6e96d..00000000000
--- a/jstests/sort9.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// Unindexed array sorting SERVER-2884
-
-t = db.jstests_sort9;
-t.drop();
-
-t.save( {a:[]} );
-t.save( {a:[[]]} );
-assert.eq( 2, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
-assert.eq( 2, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 2, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-
-t.drop();
-t.save( {} );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:0}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:0}} ).sort( {'a.b':1} ).itcount() );
-
-t.drop();
-t.save( {a:{}} );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$exists:0}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$exists:0}} ).sort( {'a.b':1} ).itcount() );
diff --git a/jstests/sort_numeric.js b/jstests/sort_numeric.js
deleted file mode 100644
index 807f23dfe8d..00000000000
--- a/jstests/sort_numeric.js
+++ /dev/null
@@ -1,35 +0,0 @@
-
-t = db.sort_numeric;
-t.drop();
-
-// there are two numeric types int he db; make sure it handles them right
-// for comparisons.
-
-t.save( { a : 3 } );
-t.save( { a : 3.1 } );
-t.save( { a : 2.9 } );
-t.save( { a : 1 } );
-t.save( { a : 1.9 } );
-t.save( { a : 5 } );
-t.save( { a : 4.9 } );
-t.save( { a : 2.91 } );
-
-for( var pass = 0; pass < 2; pass++ ) {
-
- var c = t.find().sort({a:1});
- var last = 0;
- while( c.hasNext() ) {
- current = c.next();
- assert( current.a > last );
- last = current.a;
- }
-
- assert( t.find({a:3}).count() == 1 );
- assert( t.find({a:3.0}).count() == 1 );
- assert( t.find({a:3.0}).length() == 1 );
-
- t.ensureIndex({a:1});
-}
-
-assert(t.validate().valid);
-
diff --git a/jstests/sorta.js b/jstests/sorta.js
deleted file mode 100644
index 7c82778a186..00000000000
--- a/jstests/sorta.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// SERVER-2905 sorting with missing fields
-
-t = db.jstests_sorta;
-t.drop();
-
-// Enable _allow_dot to try and bypass v8 field name checking.
-t.insert( {_id:0,a:MinKey}, true );
-t.save( {_id:3,a:null} );
-t.save( {_id:1,a:[]} );
-t.save( {_id:7,a:[2]} );
-t.save( {_id:4} );
-t.save( {_id:5,a:null} );
-t.save( {_id:2,a:[]} );
-t.save( {_id:6,a:1} );
-t.insert( {_id:8,a:MaxKey}, true );
-
-function sorted( arr ) {
- assert.eq( 9, arr.length );
- for( i = 1; i < arr.length; ++i ) {
- assert.lte( arr[ i-1 ]._id, arr[ i ]._id );
- }
-}
-
-sorted( t.find().sort( {a:1} ).toArray() );
-t.ensureIndex( {a:1} );
-sorted( t.find().sort( {a:1} ).hint( {a:1} ).toArray() );
diff --git a/jstests/sortb.js b/jstests/sortb.js
deleted file mode 100644
index e16c7d650e6..00000000000
--- a/jstests/sortb.js
+++ /dev/null
@@ -1,27 +0,0 @@
-// Test that the in memory sort capacity limit is checked for all "top N" sort candidates.
-// SERVER-4716
-
-t = db.jstests_sortb;
-t.drop();
-
-t.ensureIndex({b:1});
-
-for( i = 0; i < 100; ++i ) {
- t.save( {a:i,b:i} );
-}
-
-// These large documents will not be part of the initial set of "top 100" matches, and they will
-// not be part of the final set of "top 100" matches returned to the client. However, they are an
-// intermediate set of "top 100" matches and should trigger an in memory sort capacity exception.
-big = new Array( 1024 * 1024 ).toString();
-for( i = 100; i < 200; ++i ) {
- t.save( {a:i,b:i,big:big} );
-}
-
-for( i = 200; i < 300; ++i ) {
- t.save( {a:i,b:i} );
-}
-
-assert.throws( function() { t.find().sort( {a:-1} ).hint( {b:1} ).limit( 100 ).itcount(); } );
-assert.throws( function() { t.find().sort( {a:-1} ).hint( {b:1} ).showDiskLoc().limit( 100 ).itcount(); } );
-t.drop(); \ No newline at end of file
diff --git a/jstests/sortc.js b/jstests/sortc.js
deleted file mode 100644
index f9aa202508b..00000000000
--- a/jstests/sortc.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// Test sorting with skipping and multiple candidate query plans.
-
-t = db.jstests_sortc;
-t.drop();
-
-t.save( {a:1} );
-t.save( {a:2} );
-
-function checkA( a, sort, skip, query ) {
- query = query || {};
- assert.eq( a, t.find( query ).sort( sort ).skip( skip )[ 0 ].a );
-}
-
-function checkSortAndSkip() {
- checkA( 1, {a:1}, 0 );
- checkA( 2, {a:1}, 1 );
-
- checkA( 1, {a:1}, 0, {a:{$gt:0},b:null} );
- checkA( 2, {a:1}, 1, {a:{$gt:0},b:null} );
-
- checkA( 2, {a:-1}, 0 );
- checkA( 1, {a:-1}, 1 );
-
- checkA( 2, {a:-1}, 0, {a:{$gt:0},b:null} );
- checkA( 1, {a:-1}, 1, {a:{$gt:0},b:null} );
-
- checkA( 1, {$natural:1}, 0 );
- checkA( 2, {$natural:1}, 1 );
-
- checkA( 2, {$natural:-1}, 0 );
- checkA( 1, {$natural:-1}, 1 );
-}
-
-checkSortAndSkip();
-
-t.ensureIndex( {a:1} );
-checkSortAndSkip();
diff --git a/jstests/sortd.js b/jstests/sortd.js
deleted file mode 100644
index 963d32b0ca4..00000000000
--- a/jstests/sortd.js
+++ /dev/null
@@ -1,70 +0,0 @@
-// Test sorting with dups and multiple candidate query plans.
-
-t = db.jstests_sortd;
-
-function checkNumSorted( n, query ) {
- docs = query.toArray();
- assert.eq( n, docs.length );
- for( i = 1; i < docs.length; ++i ) {
- assert.lte( docs[ i-1 ].a, docs[ i ].a );
- }
-}
-
-
-// Test results added by ordered and unordered plans, unordered plan finishes.
-
-t.drop();
-
-t.save( {a:[1,2,3,4,5]} );
-t.save( {a:10} );
-t.ensureIndex( {a:1} );
-
-assert.eq( 2, t.find( {a:{$gt:0}} ).sort( {a:1} ).itcount() );
-assert.eq( 2, t.find( {a:{$gt:0},b:null} ).sort( {a:1} ).itcount() );
-
-// Test results added by ordered and unordered plans, ordered plan finishes.
-
-t.drop();
-
-t.save( {a:1} );
-t.save( {a:10} );
-for( i = 2; i <= 9; ++i ) {
- t.save( {a:i} );
-}
-for( i = 0; i < 30; ++i ) {
- t.save( {a:100} );
-}
-t.ensureIndex( {a:1} );
-
-checkNumSorted( 10, t.find( {a:{$gte:0,$lte:10}} ).sort( {a:1} ) );
-checkNumSorted( 10, t.find( {a:{$gte:0,$lte:10},b:null} ).sort( {a:1} ) );
-
-// Test results added by ordered and unordered plans, ordered plan finishes and continues with getmore.
-
-t.drop();
-
-t.save( {a:1} );
-t.save( {a:200} );
-for( i = 2; i <= 199; ++i ) {
- t.save( {a:i} );
-}
-for( i = 0; i < 30; ++i ) {
- t.save( {a:2000} );
-}
-t.ensureIndex( {a:1} );
-
-checkNumSorted( 200, t.find( {a:{$gte:0,$lte:200}} ).sort( {a:1} ) );
-checkNumSorted( 200, t.find( {a:{$gte:0,$lte:200},b:null} ).sort( {a:1} ) );
-
-// Test results added by ordered and unordered plans, with unordered results excluded during
-// getmore.
-
-t.drop();
-
-for( i = 399; i >= 0; --i ) {
- t.save( {a:i} );
-}
-t.ensureIndex( {a:1} );
-
-checkNumSorted( 400, t.find( {a:{$gte:0,$lte:400},b:null} ).batchSize( 50 ).sort( {a:1} ) );
-
diff --git a/jstests/sorte.js b/jstests/sorte.js
deleted file mode 100644
index fee513a298b..00000000000
--- a/jstests/sorte.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Check that getLastError is clear after a scan and order plan triggers an in memory sort
-// exception, but an in order plan continues running.
-// SERVER-5016
-
-if ( 0 ) { // SERVER-5016
-
-t = db.jstests_sorte;
-t.drop();
-
-big = new Array( 1000000 ).toString()
-
-for( i = 0; i < 300; ++i ) {
- t.save( { a:0, b:0 } );
-}
-
-for( i = 0; i < 40; ++i ) {
- t.save( { a:1, b:1, big:big } );
-}
-
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-
-c = t.find( { a:{ $gte:0 }, b:1 } ).sort( { a:1 } );
-c.next();
-assert( !db.getLastError() );
-count = 1;
-count += c.itcount();
-assert.eq( 40, count );
-
-} \ No newline at end of file
diff --git a/jstests/sortf.js b/jstests/sortf.js
deleted file mode 100644
index 615791e25a5..00000000000
--- a/jstests/sortf.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// Unsorted plan on {a:1}, sorted plan on {b:1}. The unsorted plan exhausts its memory limit before
-// the sorted plan is chosen by the query optimizer.
-
-t = db.jstests_sortf;
-t.drop();
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-for( i = 0; i < 100; ++i ) {
- t.save( {a:0,b:0} );
-}
-
-big = new Array( 10 * 1000 * 1000 ).toString();
-for( i = 0; i < 5; ++i ) {
- t.save( {a:1,b:1,big:big} );
-}
-
-assert.eq( 5, t.find( {a:1} ).sort( {b:1} ).itcount() );
-t.drop(); \ No newline at end of file
diff --git a/jstests/sortg.js b/jstests/sortg.js
deleted file mode 100644
index bde4ad70061..00000000000
--- a/jstests/sortg.js
+++ /dev/null
@@ -1,64 +0,0 @@
-// Test that a memory exception is triggered for in memory sorts, but not for indexed sorts.
-
-t = db.jstests_sortg;
-t.drop();
-
-big = new Array( 1000000 ).toString()
-
-for( i = 0; i < 100; ++i ) {
- t.save( {b:0} );
-}
-
-for( i = 0; i < 40; ++i ) {
- t.save( {a:0,x:big} );
-}
-
-function memoryException( sortSpec, querySpec ) {
- querySpec = querySpec || {};
- var ex = assert.throws( function() {
- t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).itcount()
- } );
- assert( ex.toString().match( /sort/ ) );
- assert.throws( function() {
- t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).explain( true )
- } );
- assert( ex.toString().match( /sort/ ) );
-}
-
-function noMemoryException( sortSpec, querySpec ) {
- querySpec = querySpec || {};
- t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).itcount();
- t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).explain( true );
-}
-
-// Unindexed sorts.
-memoryException( {a:1} );
-memoryException( {b:1} );
-
-// Indexed sorts.
-noMemoryException( {_id:1} );
-noMemoryException( {$natural:1} );
-
-assert.eq( 1, t.getIndexes().length );
-
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-t.ensureIndex( {c:1} );
-
-assert.eq( 4, t.getIndexes().length );
-
-// These sorts are now indexed.
-noMemoryException( {a:1} );
-noMemoryException( {b:1} );
-
-// A memory exception is triggered for an unindexed sort involving multiple plans.
-memoryException( {d:1}, {b:null,c:null} );
-
-// With an indexed plan on _id:1 and an unindexed plan on b:1, the indexed plan
-// should succeed even if the unindexed one would exhaust its memory limit.
-noMemoryException( {_id:1}, {b:null} );
-
-// With an unindexed plan on b:1 recorded for a query, the query should be
-// retried when the unindexed plan exhausts its memory limit.
-noMemoryException( {_id:1}, {b:null} );
-t.drop();
diff --git a/jstests/sorth.js b/jstests/sorth.js
deleted file mode 100644
index 1072975a3ec..00000000000
--- a/jstests/sorth.js
+++ /dev/null
@@ -1,140 +0,0 @@
-// Tests for the $in/sort/limit optimization combined with inequality bounds. SERVER-5777
-
-
-t = db.jstests_sorth;
-t.drop();
-
-/** Assert that the 'a' and 'b' fields of the documents match. */
-function assertMatch( expectedMatch, match ) {
- if (undefined !== expectedMatch.a) {
- assert.eq( expectedMatch.a, match.a );
- }
- if (undefined !== expectedMatch.b) {
- assert.eq( expectedMatch.b, match.b );
- }
-}
-
-/** Assert an expected document or array of documents matches the 'matches' array. */
-function assertMatches( expectedMatches, matches ) {
- if ( expectedMatches.length == null ) {
- assertMatch( expectedMatches, matches[ 0 ] );
- }
- for( i = 0; i < expectedMatches.length; ++i ) {
- assertMatch( expectedMatches[ i ], matches[ i ] );
- }
-}
-
-/** Generate a cursor using global parameters. */
-function find( query ) {
- return t.find( query ).sort( _sort ).limit( _limit ).hint( _hint );
-}
-
-/** Check the expected matches for a query. */
-function checkMatches( expectedMatch, query ) {
- result = find( query ).toArray();
- assertMatches( expectedMatch, result );
- explain = find( query ).explain();
- assert.eq( expectedMatch.length || 1, explain.n );
-}
-
-/** Reset data, index, and _sort and _hint globals. */
-function reset( sort, index ) {
- t.drop();
- t.save( { a:1, b:1 } );
- t.save( { a:1, b:2 } );
- t.save( { a:1, b:3 } );
- t.save( { a:2, b:0 } );
- t.save( { a:2, b:3 } );
- t.save( { a:2, b:5 } );
- t.ensureIndex( index );
- _sort = sort;
- _hint = index;
-}
-
-function checkForwardDirection( sort, index ) {
- reset( sort, index );
-
- _limit = -1;
-
- // Lower bound checks.
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:0 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:2 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:3 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:3 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:4 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:4 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:5 } } );
-
- // Upper bound checks.
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:0 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:1 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:1 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3 } } );
-
- // Lower and upper bounds checks.
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lte:0 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lt:1 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lte:1 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:0, $lte:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2, $lt:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2.5, $lte:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:2.5, $lte:3 } } );
-
- // Limit is -2.
- _limit = -2;
- checkMatches( [ { a:2, b:0 }, { a:1, b:1 } ],
- { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
- // We omit 'a' here because it's not defined whether or not we will see
- // {a:2, b:3} or {a:1, b:3} first as our sort is over 'b'.
- checkMatches( [ { a:1, b:2 }, { b:3 } ],
- { a:{ $in:[ 1, 2 ] }, b:{ $gt:1 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:4 } } );
-
- // With an additional document between the $in values.
- t.save( { a:1.5, b:3 } );
- checkMatches( [ { a:2, b:0 }, { a:1, b:1 } ],
- { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
-}
-
-// Basic test with an index suffix order.
-checkForwardDirection( { b:1 }, { a:1, b:1 } );
-// With an additonal index field.
-checkForwardDirection( { b:1 }, { a:1, b:1, c:1 } );
-// With an additonal reverse direction index field.
-checkForwardDirection( { b:1 }, { a:1, b:1, c:-1 } );
-// With an additonal ordered index field.
-checkForwardDirection( { b:1, c:1 }, { a:1, b:1, c:1 } );
-// With an additonal reverse direction ordered index field.
-checkForwardDirection( { b:1, c:-1 }, { a:1, b:1, c:-1 } );
-
-function checkReverseDirection( sort, index ) {
- reset( sort, index );
- _limit = -1;
-
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:5 } } );
-
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:5 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:5 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.1 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.5 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:3 } } );
-
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:5, $gte:5 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:2, $gte:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3, $gt:1 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.5, $gte:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:3, $gt:0 } } );
-}
-
-// With a descending order index.
-checkReverseDirection( { b:-1 }, { a:1, b:-1 } );
-checkReverseDirection( { b:-1 }, { a:1, b:-1, c:1 } );
-checkReverseDirection( { b:-1 }, { a:1, b:-1, c:-1 } );
-checkReverseDirection( { b:-1, c:1 }, { a:1, b:-1, c:1 } );
-checkReverseDirection( { b:-1, c:-1 }, { a:1, b:-1, c:-1 } );
diff --git a/jstests/sorti.js b/jstests/sorti.js
deleted file mode 100644
index 2e5cfe110d7..00000000000
--- a/jstests/sorti.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Check that a projection is applied after an in memory sort.
-
-t = db.jstests_sorti;
-t.drop();
-
-t.save( { a:1, b:0 } );
-t.save( { a:3, b:1 } );
-t.save( { a:2, b:2 } );
-t.save( { a:4, b:3 } );
-
-function checkBOrder( query ) {
- arr = query.toArray();
- order = [];
- for( i in arr ) {
- a = arr[ i ];
- order.push( a.b );
- }
- assert.eq( [ 0, 2, 1, 3 ], order );
-}
-
-checkBOrder( t.find().sort( { a:1 } ) );
-checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ) );
-t.ensureIndex( { b:1 } );
-checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ) );
-checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ).hint( { b:1 } ) );
diff --git a/jstests/sortj.js b/jstests/sortj.js
deleted file mode 100644
index 7a73829b94e..00000000000
--- a/jstests/sortj.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// Test an in memory sort memory assertion after a plan has "taken over" in the query optimizer
-// cursor.
-
-t = db.jstests_sortj;
-t.drop();
-
-t.ensureIndex( { a:1 } );
-
-big = new Array( 100000 ).toString();
-for( i = 0; i < 1000; ++i ) {
- t.save( { a:1, b:big } );
-}
-
-assert.throws( function() {
- t.find( { a:{ $gte:0 }, c:null } ).sort( { d:1 } ).itcount();
- } );
-t.drop(); \ No newline at end of file
diff --git a/jstests/sortk.js b/jstests/sortk.js
deleted file mode 100644
index 3895a34c3ac..00000000000
--- a/jstests/sortk.js
+++ /dev/null
@@ -1,140 +0,0 @@
-// End-to-end testing for index scan explosion + merge sort.
-// SERVER-5063 and SERVER-1205.
-t = db.jstests_sortk;
-t.drop();
-
-function resetCollection() {
- t.drop();
- t.save( { a:1, b:1 } );
- t.save( { a:1, b:2 } );
- t.save( { a:1, b:3 } );
- t.save( { a:2, b:4 } );
- t.save( { a:2, b:5 } );
- t.save( { a:2, b:0 } );
-}
-
-resetCollection();
-t.ensureIndex( { a:1, b:1 } );
-
-function simpleQuery( extraFields, sort, hint ) {
- query = { a:{ $in:[ 1, 2 ] } };
- Object.extend( query, extraFields );
- sort = sort || { b:1 };
- hint = hint || { a:1, b:1 };
- return t.find( query ).sort( sort ).hint( hint );
-}
-
-function simpleQueryWithLimit( limit ) {
- return simpleQuery().limit( limit );
-}
-
-// The limit is -1.
-assert.eq( 0, simpleQueryWithLimit( -1 )[ 0 ].b );
-
-// The limit is -2.
-assert.eq( 0, simpleQueryWithLimit( -2 )[ 0 ].b );
-assert.eq( 1, simpleQueryWithLimit( -2 )[ 1 ].b );
-
-// A skip is applied.
-assert.eq( 1, simpleQueryWithLimit( -1 ).skip( 1 )[ 0 ].b );
-
-// No limit is applied.
-assert.eq( 6, simpleQueryWithLimit( 0 ).itcount() );
-assert.eq( 6, simpleQueryWithLimit( 0 ).explain().nscanned );
-assert.eq( 5, simpleQueryWithLimit( 0 ).skip( 1 ).itcount() );
-
-// The query has additional constriants, preventing limit optimization.
-assert.eq( 2, simpleQuery( { $where:'this.b>=2' } ).limit( -1 )[ 0 ].b );
-
-// The sort order is the reverse of the index order.
-assert.eq( 5, simpleQuery( {}, { b:-1 } ).limit( -1 )[ 0 ].b );
-
-// The sort order is the reverse of the index order on a constrained field.
-assert.eq( 0, simpleQuery( {}, { a:-1, b:1 } ).limit( -1 )[ 0 ].b );
-
-// Without a hint, multiple cursors are attempted.
-assert.eq( 0, t.find( { a:{ $in:[ 1, 2 ] } } ).sort( { b:1 } ).limit( -1 )[ 0 ].b );
-explain = t.find( { a:{ $in:[ 1, 2 ] } } ).sort( { b:1 } ).limit( -1 ).explain( true );
-assert.eq( 1, explain.n );
-
-// The expected first result now comes from the first interval.
-t.remove( { b:0 } );
-assert.eq( 1, simpleQueryWithLimit( -1 )[ 0 ].b );
-
-// With three intervals.
-
-function inThreeIntervalQueryWithLimit( limit ) {
- return t.find( { a:{ $in: [ 1, 2, 3 ] } } ).sort( { b:1 } ).hint( { a:1, b:1 } ).limit( limit );
-}
-
-assert.eq( 1, inThreeIntervalQueryWithLimit( -1 )[ 0 ].b );
-assert.eq( 1, inThreeIntervalQueryWithLimit( -2 )[ 0 ].b );
-assert.eq( 2, inThreeIntervalQueryWithLimit( -2 )[ 1 ].b );
-t.save( { a:3, b:0 } );
-assert.eq( 0, inThreeIntervalQueryWithLimit( -1 )[ 0 ].b );
-assert.eq( 0, inThreeIntervalQueryWithLimit( -2 )[ 0 ].b );
-assert.eq( 1, inThreeIntervalQueryWithLimit( -2 )[ 1 ].b );
-
-// The index is multikey.
-t.remove({});
-t.save( { a:1, b:[ 0, 1, 2 ] } );
-t.save( { a:2, b:[ 0, 1, 2 ] } );
-t.save( { a:1, b:5 } );
-assert.eq( 3, simpleQueryWithLimit( -3 ).itcount() );
-
-// The index ordering is reversed.
-resetCollection();
-t.ensureIndex( { a:1, b:-1 } );
-
-// The sort order is consistent with the index order.
-assert.eq( 5, simpleQuery( {}, { b:-1 }, { a:1, b:-1 } ).limit( -1 )[ 0 ].b );
-
-// The sort order is the reverse of the index order.
-assert.eq( 0, simpleQuery( {}, { b:1 }, { a:1, b:-1 } ).limit( -1 )[ 0 ].b );
-
-// An equality constraint precedes the $in constraint.
-t.drop();
-t.ensureIndex( { a:1, b:1, c:1 } );
-t.save( { a:0, b:0, c:-1 } );
-t.save( { a:0, b:2, c:1 } );
-t.save( { a:1, b:1, c:1 } );
-t.save( { a:1, b:1, c:2 } );
-t.save( { a:1, b:1, c:3 } );
-t.save( { a:1, b:2, c:4 } );
-t.save( { a:1, b:2, c:5 } );
-t.save( { a:1, b:2, c:0 } );
-
-function eqInQueryWithLimit( limit ) {
- return t.find( { a:1, b:{ $in:[ 1, 2 ] } } ).sort( { c: 1 } ).hint( { a:1, b:1, c:1 } ).
- limit( limit );
-}
-
-function andEqInQueryWithLimit( limit ) {
- return t.find( { $and:[ { a:1 }, { b:{ $in:[ 1, 2 ] } } ] } ).sort( { c: 1 } ).
- hint( { a:1, b:1, c:1 } ).limit( limit );
-}
-
-// The limit is -1.
-assert.eq( 0, eqInQueryWithLimit( -1 )[ 0 ].c );
-assert.eq( 0, andEqInQueryWithLimit( -1 )[ 0 ].c );
-
-// The limit is -2.
-assert.eq( 0, eqInQueryWithLimit( -2 )[ 0 ].c );
-assert.eq( 1, eqInQueryWithLimit( -2 )[ 1 ].c );
-assert.eq( 0, andEqInQueryWithLimit( -2 )[ 0 ].c );
-assert.eq( 1, andEqInQueryWithLimit( -2 )[ 1 ].c );
-
-function inQueryWithLimit( limit, sort ) {
- sort = sort || { b:1 };
- return t.find( { a:{ $in:[ 0, 1 ] } } ).sort( sort ).hint( { a:1, b:1, c:1 } ).limit( limit );
-}
-
-// The index has two suffix fields unconstrained by the query.
-assert.eq( 0, inQueryWithLimit( -1 )[ 0 ].b );
-
-// The index has two ordered suffix fields unconstrained by the query.
-assert.eq( 0, inQueryWithLimit( -1, { b:1, c:1 } )[ 0 ].b );
-
-// The index has two ordered suffix fields unconstrained by the query and the limit is -2.
-assert.eq( 0, inQueryWithLimit( -2, { b:1, c:1 } )[ 0 ].b );
-assert.eq( 1, inQueryWithLimit( -2, { b:1, c:1 } )[ 1 ].b );
diff --git a/jstests/splitvector.js b/jstests/splitvector.js
deleted file mode 100644
index d239625de67..00000000000
--- a/jstests/splitvector.js
+++ /dev/null
@@ -1,309 +0,0 @@
-// -------------------------
-// SPLITVECTOR TEST UTILS
-// -------------------------
-
-// -------------------------
-// assertChunkSizes verifies that a given 'splitVec' divides the 'test.jstest_splitvector'
-// collection in 'maxChunkSize' approximately-sized chunks. Its asserts fail otherwise.
-// @param splitVec: an array with keys for field 'x'
-// e.g. [ { x : 1927 }, { x : 3855 }, ...
-// @param numDocs: domain of 'x' field
-// e.g. 20000
-// @param maxChunkSize is in MBs.
-//
-assertChunkSizes = function ( splitVec , numDocs , maxChunkSize , msg ){
- splitVec = [{ x: -1 }].concat( splitVec );
- splitVec.push( { x: numDocs+1 } );
- for ( i=0; i<splitVec.length-1; i++) {
- min = splitVec[i];
- max = splitVec[i+1];
- size = db.runCommand( { datasize: "test.jstests_splitvector" , min: min , max: max } ).size;
-
- // It is okay for the last chunk to be smaller. A collection's size does not
- // need to be exactly a multiple of maxChunkSize.
- if ( i < splitVec.length - 2 )
- assert.close( maxChunkSize , size , "A"+i , -3 );
- else
- assert.gt( maxChunkSize , size , "A"+i , msg + "b" );
- }
-}
-
-// Takes two documents and asserts that both contain exactly the same set of field names.
-// This is useful for checking that splitPoints have the same format as the original key pattern,
-// even when sharding on a prefix key.
-// Not very efficient, so only call when # of field names is small
-var assertFieldNamesMatch = function( splitPoint , keyPattern ){
- for ( var p in splitPoint ) {
- if( splitPoint.hasOwnProperty( p ) ) {
- assert( keyPattern.hasOwnProperty( p ) , "property " + p + " not in keyPattern" );
- }
- }
- for ( var p in keyPattern ) {
- if( keyPattern.hasOwnProperty( p ) ){
- assert( splitPoint.hasOwnProperty( p ) , "property " + p + " not in splitPoint" );
- }
- }
-}
-
-// turn off powerOf2, this test checks regular allocation
-var resetCollection = function() {
- f.drop();
- db.createCollection(f.getName(), {usePowerOf2Sizes: false});
-}
-
-// -------------------------
-// TESTS START HERE
-// -------------------------
-f = db.jstests_splitvector;
-resetCollection();
-
-// -------------------------
-// Case 1: missing parameters
-
-assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" } ).ok , "1a" );
-assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" , maxChunkSize: 1} ).ok , "1b" );
-
-
-// -------------------------
-// Case 2: missing index
-
-assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } ).ok , "2");
-
-
-// -------------------------
-// Case 3: empty collection
-
-f.ensureIndex( { x: 1} );
-assert.eq( [], db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } ).splitKeys , "3");
-
-
-// -------------------------
-// Case 4: uniform collection
-
-resetCollection();
-f.ensureIndex( { x: 1 } );
-
-var case4 = function() {
- // Get baseline document size
- filler = "";
- while( filler.length < 500 ) filler += "a";
- f.save( { x: 0, y: filler } );
- docSize = db.runCommand( { datasize: "test.jstests_splitvector" } ).size;
- assert.gt( docSize, 500 , "4a" );
-
- // Fill collection and get split vector for 1MB maxChunkSize
- numDocs = 20000;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: i, y: filler } );
- }
- db.getLastError();
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
-
- // splitVector aims at getting half-full chunks after split
- factor = 0.5;
-
- assert.eq( true , res.ok , "4b" );
- assert.close( numDocs*docSize / ((1<<20) * factor), res.splitKeys.length , "num split keys" , -1 );
- assertChunkSizes( res.splitKeys , numDocs, (1<<20) * factor , "4d" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
- }
-}
-case4();
-
-// -------------------------
-// Case 5: limit number of split points
-
-resetCollection();
-f.ensureIndex( { x: 1 } );
-
-var case5 = function() {
- // Fill collection and get split vector for 1MB maxChunkSize
- numDocs = 10000;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: i, y: filler } );
- }
- db.getLastError();
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 , maxSplitPoints: 1} );
-
- assert.eq( true , res.ok , "5a" );
- assert.eq( 1 , res.splitKeys.length , "5b" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
- }
-}
-case5();
-
-// -------------------------
-// Case 6: limit number of objects in a chunk
-
-resetCollection();
-f.ensureIndex( { x: 1 } );
-
-var case6 = function() {
- // Fill collection and get split vector for 1MB maxChunkSize
- numDocs = 10000;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: i, y: filler } );
- }
- db.getLastError();
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 , maxChunkObjects: 500} );
-
- assert.eq( true , res.ok , "6a" );
- assert.eq( 19 , res.splitKeys.length , "6b" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
- }
-}
-case6();
-
-// -------------------------
-// Case 7: enough occurances of min key documents to pass the chunk limit
-// [1111111111111111,2,3)
-
-resetCollection();
-f.ensureIndex( { x: 1 } );
-
-var case7 = function() {
- // Fill collection and get split vector for 1MB maxChunkSize
- numDocs = 2100;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: 1, y: filler } );
- }
-
- for( i=1; i<10; i++ ){
- f.save( { x: 2, y: filler } );
- }
- db.getLastError();
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
-
- assert.eq( true , res.ok , "7a" );
- assert.eq( 2 , res.splitKeys[0].x, "7b");
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
- }
-}
-case7();
-
-// -------------------------
-// Case 8: few occurrances of min key, and enough of some other that we cannot split it
-// [1, 22222222222222, 3)
-
-resetCollection();
-f.ensureIndex( { x: 1 } );
-
-var case8 = function() {
- for( i=1; i<10; i++ ){
- f.save( { x: 1, y: filler } );
- }
-
- numDocs = 2100;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: 2, y: filler } );
- }
-
- for( i=1; i<10; i++ ){
- f.save( { x: 3, y: filler } );
- }
-
- db.getLastError();
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
-
- assert.eq( true , res.ok , "8a" );
- assert.eq( 2 , res.splitKeys.length , "8b" );
- assert.eq( 2 , res.splitKeys[0].x , "8c" );
- assert.eq( 3 , res.splitKeys[1].x , "8d" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
- }
-}
-case8();
-
-// -------------------------
-// Case 9: splitVector "force" mode, where we split (possible small) chunks in the middle
-//
-
-resetCollection();
-f.ensureIndex( { x: 1 } );
-
-var case9 = function() {
- f.save( { x: 1 } );
- f.save( { x: 2 } );
- f.save( { x: 3 } );
- db.getLastError();
-
- assert.eq( 3 , f.count() );
- print( f.getFullName() )
-
- res = db.runCommand( { splitVector: f.getFullName() , keyPattern: {x:1} , force : true } );
-
- assert.eq( true , res.ok , "9a" );
- assert.eq( 1 , res.splitKeys.length , "9b" );
- assert.eq( 2 , res.splitKeys[0].x , "9c" );
-
- if ( db.runCommand( "isMaster" ).msg != "isdbgrid" ) {
- res = db.adminCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , force : true } );
-
- assert.eq( true , res.ok , "9a: " + tojson(res) );
- assert.eq( 1 , res.splitKeys.length , "9b: " + tojson(res) );
- assert.eq( 2 , res.splitKeys[0].x , "9c: " + tojson(res) );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
- }
- }
-}
-case9();
-
-// -------------------------
-// Repeat all cases using prefix shard key.
-//
-
-resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
-case4();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
-case4();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
-case5();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
-case5();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
-case6();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
-case6();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
-case7();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
-case7();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
-case8();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
-case8();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
-case9();
-
-resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
-case9();
-
-print("PASSED");
diff --git a/jstests/stages_and_hash.js b/jstests/stages_and_hash.js
deleted file mode 100644
index 42ae0c8e34d..00000000000
--- a/jstests/stages_and_hash.js
+++ /dev/null
@@ -1,42 +0,0 @@
-t = db.stages_and_hashed;
-t.drop();
-
-var N = 50;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: i, bar: N - i, baz: i});
-}
-
-t.ensureIndex({foo: 1})
-t.ensureIndex({bar: 1})
-t.ensureIndex({baz: 1})
-
-// Scan foo <= 20
-ixscan1 = {ixscan: {args:{name: "stages_and_hashed", keyPattern:{foo: 1},
- startKey: {"": 20}, endKey: {},
- endKeyInclusive: true, direction: -1}}};
-
-// Scan bar >= 40
-ixscan2 = {ixscan: {args:{name: "stages_and_hashed", keyPattern:{bar: 1},
- startKey: {"": 40}, endKey: {},
- endKeyInclusive: true, direction: 1}}};
-
-// bar = 50 - foo
-// Intersection is (foo=0 bar=50, foo=1 bar=49, ..., foo=10 bar=40)
-andix1ix2 = {andHash: {args: { nodes: [ixscan1, ixscan2]}}}
-res = db.runCommand({stageDebug: andix1ix2});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 11);
-
-// This should raise an error as we can't filter on baz since we haven't done a fetch and it's not
-// in the index data.
-andix1ix2badfilter = {andHash: {filter: {baz: 5}, args: {nodes: [ixscan1, ixscan2]}}};
-res = db.runCommand({stageDebug: andix1ix2badfilter});
-assert.eq(res.ok, 0);
-
-// Filter predicates from 2 indices. Tests that we union the idx info.
-andix1ix2filter = {andHash: {filter: {bar: {$in: [45, 46, 48]},
- foo: {$in: [4,5,6]}},
- args: {nodes: [ixscan1, ixscan2]}}};
-res = db.runCommand({stageDebug: andix1ix2filter});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 2);
diff --git a/jstests/stages_and_sorted.js b/jstests/stages_and_sorted.js
deleted file mode 100644
index fd96ab24153..00000000000
--- a/jstests/stages_and_sorted.js
+++ /dev/null
@@ -1,49 +0,0 @@
-t = db.stages_and_sorted;
-t.drop();
-
-var N = 10;
-for (var i = 0; i < N; ++i) {
- // These will show up in the index scans below but must not be outputted in the and.
- t.insert({foo: 1});
- t.insert({foo: 1, bar: 1});
- t.insert({baz: 12});
- t.insert({bar: 1});
- // This is the only thing that should be outputted in the and.
- t.insert({foo: 1, bar:1, baz: 12});
- t.insert({bar: 1});
- t.insert({bar:1, baz: 12})
- t.insert({baz: 12});
- t.insert({foo: 1, baz: 12});
- t.insert({baz: 12});
-}
-
-t.ensureIndex({foo: 1});
-t.ensureIndex({bar: 1});
-t.ensureIndex({baz: 1});
-
-// Scan foo == 1
-ixscan1 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{foo: 1},
- startKey: {"": 1}, endKey: {"": 1},
- endKeyInclusive: true, direction: 1}}};
-
-// Scan bar == 1
-ixscan2 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{bar: 1},
- startKey: {"": 1}, endKey: {"": 1},
- endKeyInclusive: true, direction: 1}}};
-
-// Scan baz == 12
-ixscan3 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{baz: 1},
- startKey: {"": 12}, endKey: {"": 12},
- endKeyInclusive: true, direction: 1}}};
-
-// Intersect foo==1 with bar==1 with baz==12.
-andix1ix2 = {andSorted: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}};
-res = db.runCommand({stageDebug: andix1ix2});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, N);
-
-// Might as well make sure that hashed does the same thing.
-andix1ix2hash = {andHash: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}};
-res = db.runCommand({stageDebug: andix1ix2hash});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, N);
diff --git a/jstests/stages_collection_scan.js b/jstests/stages_collection_scan.js
deleted file mode 100644
index d7de30cf8e7..00000000000
--- a/jstests/stages_collection_scan.js
+++ /dev/null
@@ -1,43 +0,0 @@
-// Test basic query stage collection scan functionality.
-t = db.stages_collection_scan;
-t.drop();
-
-var N = 50;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: i});
-}
-
-forward = {cscan: {args: {name: "stages_collection_scan", direction: 1}}}
-res = db.runCommand({stageDebug: forward});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, N);
-assert.eq(res.results[0].foo, 0);
-assert.eq(res.results[49].foo, 49);
-
-// And, backwards.
-backward = {cscan: {args: {name: "stages_collection_scan", direction: -1}}}
-res = db.runCommand({stageDebug: backward});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, N);
-assert.eq(res.results[0].foo, 49);
-assert.eq(res.results[49].foo, 0);
-
-forwardFiltered = {cscan: {args: {name: "stages_collection_scan", direction: 1},
- filter: {foo: {$lt: 25}}}}
-res = db.runCommand({stageDebug: forwardFiltered});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 25);
-assert.eq(res.results[0].foo, 0);
-assert.eq(res.results[24].foo, 24);
-
-backwardFiltered = {cscan: {args: {name: "stages_collection_scan", direction: -1},
- filter: {foo: {$lt: 25}}}}
-res = db.runCommand({stageDebug: backwardFiltered});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 25);
-assert.eq(res.results[0].foo, 24);
-assert.eq(res.results[24].foo, 0);
diff --git a/jstests/stages_fetch.js b/jstests/stages_fetch.js
deleted file mode 100644
index 3e2c01df91a..00000000000
--- a/jstests/stages_fetch.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Test basic fetch functionality.
-t = db.stages_fetch;
-t.drop();
-
-var N = 50;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: i, bar: N - i, baz: i});
-}
-
-t.ensureIndex({foo: 1});
-
-// 20 <= foo <= 30
-// bar == 25 (not covered, should error.)
-ixscan1 = {ixscan: {args:{name: "stages_fetch", keyPattern:{foo:1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1},
- filter: {bar: 25}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(db.getLastError());
-assert.eq(res.ok, 0);
-
-// Now, add a fetch. We should be able to filter on the non-covered field since we fetched the obj.
-ixscan2 = {ixscan: {args:{name: "stages_fetch", keyPattern:{foo:1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1}}}
-fetch = {fetch: {args: {node: ixscan2}, filter: {bar: 25}}}
-res = db.runCommand({stageDebug: fetch});
-printjson(res);
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 1);
diff --git a/jstests/stages_ixscan.js b/jstests/stages_ixscan.js
deleted file mode 100644
index a7cd6bedc3a..00000000000
--- a/jstests/stages_ixscan.js
+++ /dev/null
@@ -1,76 +0,0 @@
-// Test basic query stage index scan functionality.
-t = db.stages_ixscan;
-t.drop();
-
-var N = 50;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: i, bar: N - i, baz: i});
-}
-
-t.ensureIndex({foo: 1})
-t.ensureIndex({foo: 1, baz: 1});
-
-// foo <= 20
-ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {}, endKeyInclusive: true,
- direction: -1}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 21);
-
-// 20 <= foo < 30
-ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: false,
- direction: 1}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 10);
-
-// 20 <= foo <= 30
-ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 11);
-
-// 20 <= foo <= 30
-// foo == 25
-ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1},
- filter: {foo: 25}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 1);
-
-// 20 <= foo <= 30
-// baz == 25 (in index so we can match against it.)
-ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo:1, baz: 1},
- startKey: {"": 20, "":MinKey},
- endKey: {"" : 30, "":MaxKey}, endKeyInclusive: true,
- direction: 1},
- filter: {baz: 25}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 1);
-
-// 20 <= foo <= 30
-// bar == 25 (not covered, should error.)
-ixscan1 = {ixscan: {args:{name: "stages_ixscan", keyPattern:{foo:1, baz: 1},
- startKey: {"": 20, "":MinKey},
- endKey: {"" : 30, "":MaxKey}, endKeyInclusive: true,
- direction: 1},
- filter: {bar: 25}}};
-res = db.runCommand({stageDebug: ixscan1});
-assert(db.getLastError());
-assert.eq(res.ok, 0);
diff --git a/jstests/stages_limit_skip.js b/jstests/stages_limit_skip.js
deleted file mode 100644
index 9441e4cd65b..00000000000
--- a/jstests/stages_limit_skip.js
+++ /dev/null
@@ -1,34 +0,0 @@
-// Test limit and skip
-t = db.stages_limit_skip;
-t.drop();
-
-var N = 50;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: i, bar: N - i, baz: i});
-}
-
-t.ensureIndex({foo: 1})
-
-// foo <= 20, decreasing
-// Limit of 5 results.
-ixscan1 = {ixscan: {args:{name: "stages_limit_skip", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {}, endKeyInclusive: true,
- direction: -1}}};
-limit1 = {limit: {args: {node: ixscan1, num: 5}}}
-res = db.runCommand({stageDebug: limit1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 5);
-assert.eq(res.results[0].foo, 20);
-assert.eq(res.results[4].foo, 16);
-
-// foo <= 20, decreasing
-// Skip 5 results.
-skip1 = {skip: {args: {node: ixscan1, num: 5}}}
-res = db.runCommand({stageDebug: skip1});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 16);
-assert.eq(res.results[0].foo, 15);
-assert.eq(res.results[res.results.length - 1].foo, 0);
diff --git a/jstests/stages_mergesort.js b/jstests/stages_mergesort.js
deleted file mode 100644
index 394d60b5b20..00000000000
--- a/jstests/stages_mergesort.js
+++ /dev/null
@@ -1,32 +0,0 @@
-// Test query stage merge sorting.
-t = db.stages_mergesort;
-t.drop();
-
-var N = 10;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: 1, bar: N - i - 1});
- t.insert({baz: 1, bar: i})
-}
-
-t.ensureIndex({foo: 1, bar:1})
-t.ensureIndex({baz: 1, bar:1})
-
-// foo == 1
-// We would (internally) use "": MinKey and "": MaxKey for the bar index bounds.
-ixscan1 = {ixscan: {args:{name: "stages_mergesort", keyPattern:{foo: 1, bar:1},
- startKey: {"": 1, "": 0},
- endKey: {"": 1, "": 100000}, endKeyInclusive: true,
- direction: 1}}};
-// baz == 1
-ixscan2 = {ixscan: {args:{name: "stages_mergesort", keyPattern:{baz: 1, bar:1},
- startKey: {"": 1, "": 0},
- endKey: {"": 1, "": 100000}, endKeyInclusive: true,
- direction: 1}}};
-
-mergesort = {mergeSort: {args: {nodes: [ixscan1, ixscan2], pattern: {bar: 1}}}};
-res = db.runCommand({stageDebug: mergesort});
-assert(!db.getLastError());
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 2 * N);
-assert.eq(res.results[0].bar, 0);
-assert.eq(res.results[2 * N - 1].bar, N - 1);
diff --git a/jstests/stages_or.js b/jstests/stages_or.js
deleted file mode 100644
index bb0e02b11d4..00000000000
--- a/jstests/stages_or.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Test basic OR functionality
-t = db.stages_or;
-t.drop();
-
-var N = 50;
-for (var i = 0; i < N; ++i) {
- t.insert({foo: i, bar: N - i, baz: i});
-}
-
-t.ensureIndex({foo: 1})
-t.ensureIndex({bar: 1})
-t.ensureIndex({baz: 1})
-
-// baz >= 40
-ixscan1 = {ixscan: {args:{name: "stages_or", keyPattern:{baz: 1},
- startKey: {"": 40}, endKey: {},
- endKeyInclusive: true, direction: 1}}};
-// foo >= 40
-ixscan2 = {ixscan: {args:{name: "stages_or", keyPattern:{foo: 1},
- startKey: {"": 40}, endKey: {},
- endKeyInclusive: true, direction: 1}}};
-
-// OR of baz and foo. Baz == foo and we dedup.
-orix1ix2 = {or: {args: {nodes: [ixscan1, ixscan2], dedup:true}}};
-res = db.runCommand({stageDebug: orix1ix2});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 10);
-
-// No deduping, 2x the results.
-orix1ix2nodd = {or: {args: {nodes: [ixscan1, ixscan2], dedup:false}}};
-res = db.runCommand({stageDebug: orix1ix2nodd});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 20);
diff --git a/jstests/stages_sort.js b/jstests/stages_sort.js
deleted file mode 100644
index f7200cbac03..00000000000
--- a/jstests/stages_sort.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Test query stage sorting.
-if (false) {
- t = db.stages_sort;
- t.drop();
-
- var N = 50;
- for (var i = 0; i < N; ++i) {
- t.insert({foo: i, bar: N - i});
- }
-
- t.ensureIndex({foo: 1})
-
- // Foo <= 20, descending.
- ixscan1 = {ixscan: {args:{name: "stages_sort", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {}, endKeyInclusive: true,
- direction: -1}}};
-
- // Sort with foo ascending.
- sort1 = {sort: {args: {node: ixscan1, pattern: {foo: 1}}}};
- res = db.runCommand({stageDebug: sort1});
- assert(!db.getLastError());
- assert.eq(res.ok, 1);
- assert.eq(res.results.length, 21);
- assert.eq(res.results[0].foo, 0);
- assert.eq(res.results[20].foo, 20);
-
- // Sort with a limit.
- //sort2 = {sort: {args: {node: ixscan1, pattern: {foo: 1}, limit: 2}}};
- //res = db.runCommand({stageDebug: sort2});
- //assert(!db.getLastError());
- //assert.eq(res.ok, 1);
- //assert.eq(res.results.length, 2);
- //assert.eq(res.results[0].foo, 0);
- //assert.eq(res.results[1].foo, 1);
-}
diff --git a/jstests/stages_text.js b/jstests/stages_text.js
deleted file mode 100644
index 8407ffe1e14..00000000000
--- a/jstests/stages_text.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// Test very basic functionality of text stage
-
-t = db.stages_text;
-t.drop();
-t.save({x: "az b x"})
-
-t.ensureIndex({x: "text"})
-
-// We expect to retrieve 'b'
-res = db.runCommand({stageDebug: {text: {args: {name: "test.stages_text", search: "b"}}}});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 1);
-
-// I have not been indexed yet.
-res = db.runCommand({stageDebug: {text: {args: {name: "test.stages_text", search: "hari"}}}});
-assert.eq(res.ok, 1);
-assert.eq(res.results.length, 0);
diff --git a/jstests/stats.js b/jstests/stats.js
deleted file mode 100644
index 08a74a00fb7..00000000000
--- a/jstests/stats.js
+++ /dev/null
@@ -1,23 +0,0 @@
-
-var statsDB = db.getSiblingDB( "stats" );
-statsDB.dropDatabase();
-var t = statsDB.stats1;
-
-t.save( { a : 1 } );
-
-assert.lt( 0 , t.dataSize() , "A" );
-assert.lt( t.dataSize() , t.storageSize() , "B" );
-assert.lt( 0 , t.totalIndexSize() , "C" );
-
-var stats = statsDB.stats();
-assert.gt( stats.fileSize, 0 );
-assert.eq( stats.dataFileVersion.major, 4 );
-assert.eq( stats.dataFileVersion.minor, 5 );
-
-// test empty database; should be no dataFileVersion
-statsDB.dropDatabase();
-var statsEmptyDB = statsDB.stats();
-assert.eq( statsEmptyDB.fileSize, 0 );
-assert.eq( {}, statsEmptyDB.dataFileVersion );
-
-statsDB.dropDatabase();
diff --git a/jstests/storageDetailsCommand.js b/jstests/storageDetailsCommand.js
deleted file mode 100644
index 8b144153c1b..00000000000
--- a/jstests/storageDetailsCommand.js
+++ /dev/null
@@ -1,99 +0,0 @@
-db.jstests_commands.drop();
-db.createCollection("jstests_commands");
-
-t = db.jstests_commands;
-
-for (var i = 0; i < 3000; ++i) {
- t.insert({i: i, d: i % 13});
-}
-
-function test() {
- var result = t.diskStorageStats({numberOfSlices: 100});
- if (result["bad cmd"]) {
- print("storageDetails command not available: skipping");
- return;
- }
-
- assert.commandWorked(result);
-
- function checkDiskStats(data) {
- assert(isNumber(data.extentHeaderBytes));
- assert(isNumber(data.recordHeaderBytes));
- assert(isNumber(data.numEntries));
- assert(data.bsonBytes instanceof NumberLong);
- assert(data.recBytes instanceof NumberLong);
- assert(data.onDiskBytes instanceof NumberLong);
- assert(isNumber(data.outOfOrderRecs));
- assert(isNumber(data.characteristicCount));
- assert(isNumber(data.characteristicAvg));
- assert(data.freeRecsPerBucket instanceof Array);
- }
-
- assert(result.extents && result.extents instanceof Array);
-
- var extents = result.extents;
-
- for (var i = 0; i < extents.length; ++i) {
- assert(isObject(extents[i]));
- assert.neq(extents[i], null);
- assert(extents[i].range instanceof Array);
- assert.eq(extents[i].range.length, 2);
- assert.eq(extents[i].isCapped, false);
- checkDiskStats(extents[i]);
- assert(extents[i].slices instanceof Array);
- for (var c = 0; c < extents[i].slices[c]; ++c) {
- assert(isObject(extents[i].slices[c]));
- assert.neq(extents[i].slices[c], null);
- checkStats(extents[i].slices[c]);
- }
- }
-
- result = t.pagesInRAM({numberOfSlices: 100});
- assert(result.ok);
-
- assert(result.extents instanceof Array);
- var extents = result.extents;
-
- for (var i = 0; i < result.extents.length; ++i) {
- assert(isObject(extents[i]));
- assert.neq(extents[i], null);
- assert(isNumber(extents[i].pageBytes));
- assert(isNumber(extents[i].onDiskBytes));
- assert(isNumber(extents[i].inMem));
-
- assert(extents[i].slices instanceof Array);
- for (var c = 0; c < extents[i].slices.length; ++c) {
- assert(isNumber(extents[i].slices[c]));
- }
- }
-
- function checkErrorConditions(helper) {
- var result = helper.apply(t, [{extent: 'a'}]);
- assert.commandFailed(result);
- assert(result.errmsg.match(/extent.*must be a number/));
-
- result = helper.apply(t, [{range: [2, 4]}]);
- assert.commandFailed(result);
- assert(result.errmsg.match(/range is only allowed.*extent/));
-
- result = helper.apply(t, [{extent: 3, range: [3, 'a']}]);
- assert.commandFailed(result);
- assert(result.errmsg.match(/must be an array.*numeric elements/));
-
- result = helper.apply(t, [{granularity: 'a'}]);
- assert.commandFailed(result);
- assert(result.errmsg.match(/granularity.*number/));
-
- result = helper.apply(t, [{numberOfSlices: 'a'}]);
- assert.commandFailed(result);
- assert(result.errmsg.match(/numberOfSlices.*number/));
-
- result = helper.apply(t, [{extent: 100}]);
- assert.commandFailed(result);
- assert(result.errmsg.match(/extent.*does not exist/));
- }
-
- checkErrorConditions(t.diskStorageStats);
- checkErrorConditions(t.pagesInRAM);
-}
-test();
diff --git a/jstests/storefunc.js b/jstests/storefunc.js
deleted file mode 100644
index f5d1c3be48a..00000000000
--- a/jstests/storefunc.js
+++ /dev/null
@@ -1,44 +0,0 @@
-// Use a private sister database to avoid conflicts with other tests that use system.js
-var testdb = db.getSisterDB("storefunc");
-
-s = testdb.system.js;
-s.remove({});
-assert.eq( 0 , s.count() , "setup - A" );
-
-s.save( { _id : "x" , value : "3" } );
-assert.isnull( testdb.getLastError() , "setup - B" );
-assert.eq( 1 , s.count() , "setup - C" );
-
-s.remove( { _id : "x" } );
-assert.eq( 0 , s.count() , "setup - D" );
-s.save( { _id : "x" , value : "4" } );
-assert.eq( 1 , s.count() , "setup - E" );
-
-assert.eq( 4 , s.findOne( { _id : "x" } ).value , "E2 " );
-
-assert.eq( 4 , s.findOne().value , "setup - F" );
-s.update( { _id : "x" } , { $set : { value : 5 } } );
-assert.eq( 1 , s.count() , "setup - G" );
-assert.eq( 5 , s.findOne().value , "setup - H" );
-
-assert.eq( 5 , testdb.eval( "return x" ) , "exec - 1 " );
-
-s.update( { _id : "x" } , { $set : { value : 6 } } );
-assert.eq( 1 , s.count() , "setup2 - A" );
-assert.eq( 6 , s.findOne().value , "setup - B" );
-
-assert.eq( 6 , testdb.eval( "return x" ) , "exec - 2 " );
-
-
-
-s.insert( { _id : "bar" , value : function( z ){ return 17 + z; } } );
-assert.eq( 22 , testdb.eval( "return bar(5);" ) , "exec - 3 " );
-
-assert( s.getIndexKeys().length > 0 , "no indexes" );
-assert( s.getIndexKeys()[0]._id , "no _id index" );
-
-assert.eq( "undefined" , testdb.eval( function(){ return typeof(zzz); } ) , "C1" );
-s.save( { _id : "zzz" , value : 5 } )
-assert.eq( "number" , testdb.eval( function(){ return typeof(zzz); } ) , "C2" );
-s.remove( { _id : "zzz" } );
-assert.eq( "undefined" , testdb.eval( function(){ return typeof(zzz); } ) , "C3" );
diff --git a/jstests/string_with_nul_bytes.js b/jstests/string_with_nul_bytes.js
deleted file mode 100644
index a1f6e395dd2..00000000000
--- a/jstests/string_with_nul_bytes.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// SERVER-6649 - issues round-tripping strings with embedded NUL bytes
-
-t = db.string_with_nul_bytes.js;
-t.drop();
-
-string = "string with a NUL (\0) byte";
-t.insert({str:string});
-assert.eq(t.findOne().str, string);
-assert.eq(t.findOne().str.length, string.length); // just to be sure
diff --git a/jstests/sub1.js b/jstests/sub1.js
deleted file mode 100644
index 9e643f811fd..00000000000
--- a/jstests/sub1.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// sub1.js
-
-t = db.sub1;
-t.drop();
-
-x = { a : 1 , b : { c : { d : 2 } } }
-
-t.save( x );
-
-y = t.findOne();
-
-assert.eq( 1 , y.a );
-assert.eq( 2 , y.b.c.d );
-print( tojson( y ) );
diff --git a/jstests/temp_cleanup.js b/jstests/temp_cleanup.js
deleted file mode 100644
index e827083d605..00000000000
--- a/jstests/temp_cleanup.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-mydb = db.getSisterDB( "temp_cleanup_test" )
-
-t = mydb.tempCleanup
-t.drop()
-
-t.insert( { x : 1 } )
-
-res = t.mapReduce( function(){ emit(1,1); } , function(){ return 1; } , "xyz" );
-printjson( res );
-
-assert.eq( 1 , t.count() , "A1" )
-assert.eq( 1 , mydb[res.result].count() , "A2" )
-
-mydb.dropDatabase()
-
diff --git a/jstests/testminmax.js b/jstests/testminmax.js
deleted file mode 100644
index 803f1b48a0b..00000000000
--- a/jstests/testminmax.js
+++ /dev/null
@@ -1,14 +0,0 @@
-t = db.minmaxtest;
-t.drop();
-t.insert({"_id" : "IBM.N|00001264779918428889", "DESCRIPTION" : { "n" : "IBMSTK2", "o" : "IBM STK", "s" : "changed" } });
-t.insert({ "_id" : "VOD.N|00001264779918433344", "COMPANYNAME" : { "n" : "Vodafone Group PLC 2", "o" : "Vodafone Group PLC", "s" : "changed" } });
-t.insert({ "_id" : "IBM.N|00001264779918437075", "DESCRIPTION" : { "n" : "IBMSTK3", "o" : "IBM STK2", "s" : "changed" } });
-t.insert({ "_id" : "VOD.N|00001264779918441426", "COMPANYNAME" : { "n" : "Vodafone Group PLC 3", "o" : "Vodafone Group PLC 2", "s" : "changed" } });
-
-// temp:
-printjson( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).toArray() );
-
-// this should be 2!! add assertion when fixed
-// http://jira.mongodb.org/browse/SERVER-675
-print( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).count() );
-
diff --git a/jstests/touch1.js b/jstests/touch1.js
deleted file mode 100644
index f7a0878f2e6..00000000000
--- a/jstests/touch1.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-t = db.touch1;
-t.drop();
-
-t.insert( { x : 1 } );
-t.ensureIndex( { x : 1 } );
-
-res = t.runCommand( "touch" );
-assert( !res.ok, tojson( res ) );
-
-res = t.runCommand( "touch", { data : true, index : true } );
-assert.eq( 1, res.data.numRanges, tojson( res ) );
-assert.eq( 2, res.indexes.numRanges, tojson( res ) );
-
-
diff --git a/jstests/ts1.js b/jstests/ts1.js
deleted file mode 100644
index 30f7882e863..00000000000
--- a/jstests/ts1.js
+++ /dev/null
@@ -1,38 +0,0 @@
-t = db.ts1
-t.drop()
-
-N = 20
-
-for ( i=0; i<N; i++ ){
- t.insert( { _id : i , x : new Timestamp() } )
- sleep( 100 )
-}
-
-function get(i){
- return t.findOne( { _id : i } ).x;
-}
-
-function cmp( a , b ){
- if ( a.t < b.t )
- return -1;
- if ( a.t > b.t )
- return 1;
-
- return a.i - b.i;
-}
-
-for ( i=0; i<N-1; i++ ){
- a = get(i);
- b = get(i+1);
- //print( tojson(a) + "\t" + tojson(b) + "\t" + cmp(a,b) );
- assert.gt( 0 , cmp( a , b ) , "cmp " + i )
-}
-
-assert.eq( N , t.find( { x : { $type : 17 } } ).itcount() , "B1" )
-assert.eq( 0 , t.find( { x : { $type : 3 } } ).itcount() , "B2" )
-
-t.insert( { _id : 100 , x : new Timestamp( 123456 , 50 ) } )
-x = t.findOne( { _id : 100 } ).x
-assert.eq( 123456 , x.t , "C1" )
-assert.eq( 50 , x.i , "C2" )
-
diff --git a/jstests/type1.js b/jstests/type1.js
deleted file mode 100644
index 518e36728e7..00000000000
--- a/jstests/type1.js
+++ /dev/null
@@ -1,24 +0,0 @@
-
-t = db.type1;
-t.drop();
-
-t.save( { x : 1.1 } );
-t.save( { x : "3" } );
-t.save( { x : "asd" } );
-t.save( { x : "foo" } );
-
-assert.eq( 4 , t.find().count() , "A1" );
-assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "A2" );
-assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "A3" );
-assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "A4" );
-assert.eq( 4 , t.find( { x : { $type : 1 } } ).explain().nscanned , "A5" );
-
-
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 4 , t.find().count() , "B1" );
-assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "B2" );
-assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "B3" );
-assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "B4" );
-assert.eq( 1 , t.find( { x : { $type : 1 } } ).explain().nscanned , "B5" );
-assert.eq( 1 , t.find( { x : { $regex:"f", $type : 2 } } ).count() , "B3" ); \ No newline at end of file
diff --git a/jstests/type2.js b/jstests/type2.js
deleted file mode 100644
index 820607e0b30..00000000000
--- a/jstests/type2.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// SERVER-1735 $type:10 matches null value, not missing value.
-
-t = db.jstests_type2;
-t.drop();
-
-t.save( {a:null} );
-t.save( {} );
-t.save( {a:'a'} );
-
-function test() {
- assert.eq( 2, t.count( {a:null} ) );
- assert.eq( 1, t.count( {a:{$type:10}} ) );
- assert.eq( 2, t.count( {a:{$exists:true}} ) );
- assert.eq( 1, t.count( {a:{$exists:false}} ) );
-}
-
-test();
-t.ensureIndex( {a:1} );
-test(); \ No newline at end of file
diff --git a/jstests/type3.js b/jstests/type3.js
deleted file mode 100644
index 82a8b8ae7fc..00000000000
--- a/jstests/type3.js
+++ /dev/null
@@ -1,68 +0,0 @@
-// Check query type bracketing SERVER-3222
-
-t = db.jstests_type3;
-t.drop();
-
-t.ensureIndex( {a:1} );
-
-// Type Object
-t.save( {a:{'':''}} );
-assert.eq( 1, t.find( {a:{$type:3}} ).hint( {a:1} ).itcount() );
-
-// Type Array
-t.remove({});
-t.save( {a:[['c']]} );
-assert.eq( 1, t.find( {a:{$type:4}} ).hint( {a:1} ).itcount() );
-
-// Type RegEx
-t.remove({});
-t.save( {a:/r/} );
-assert.eq( 1, t.find( {a:{$type:11}} ).hint( {a:1} ).itcount() );
-
-// Type jstNULL
-t.remove({});
-assert.eq( [[null,null]], t.find( {a:{$type:10}} ).hint( {a:1} ).explain().indexBounds.a );
-
-// Type Undefined
-t.remove({});
-// 'null' is the client friendly version of undefined.
-assert.eq( [[null,null]], t.find( {a:{$type:6}} ).hint( {a:1} ).explain().indexBounds.a );
-
-t.save( {a:undefined} );
-assert.eq( 1, t.find( {a:{$type:6}} ).hint( {a:1} ).itcount() );
-
-// This one won't be returned.
-t.save( {a:null} );
-assert.eq( 1, t.find( {a:{$type:6}} ).hint( {a:1} ).itcount() );
-
-t.remove({});
-// Type MinKey
-assert.eq( [[{$minElement:1},{$minElement:1}]], t.find( {a:{$type:-1}} ).hint( {a:1} ).explain().indexBounds.a );
-// Type MaxKey
-assert.eq( [[{$maxElement:1},{$maxElement:1}]], t.find( {a:{$type:127}} ).hint( {a:1} ).explain().indexBounds.a );
-
-// Type Timestamp
-t.remove({});
-t.save( {a:new Timestamp()} );
-assert.eq( 1, t.find( {a:{$type:17}} ).itcount() );
-if ( 0 ) { // SERVER-3304
-assert.eq( 0, t.find( {a:{$type:9}} ).itcount() );
-}
-
-// Type Date
-t.remove({});
-t.save( {a:new Date()} );
-if ( 0 ) { // SERVER-3304
-assert.eq( 0, t.find( {a:{$type:17}} ).itcount() );
-}
-assert.eq( 1, t.find( {a:{$type:9}} ).itcount() );
-
-// Type Code
-t.remove({});
-t.save( {a:function(){var a = 0;}} );
-assert.eq( 1, t.find( {a:{$type:13}} ).itcount() );
-
-// Type BinData
-t.remove({});
-t.save( {a:new BinData(0,'')} );
-assert.eq( 1, t.find( {a:{$type:5}} ).itcount() );
diff --git a/jstests/unique2.js b/jstests/unique2.js
deleted file mode 100644
index 0d3993630c3..00000000000
--- a/jstests/unique2.js
+++ /dev/null
@@ -1,112 +0,0 @@
-// Test unique and dropDups index options.
-
-function checkNprev( np ) {
- // getPrevError() is not available sharded.
- if ( typeof( myShardingTest ) == 'undefined' ) {
- assert.eq( np, db.getPrevError().nPrev );
- }
-}
-
-t = db.jstests_unique2;
-
-t.drop();
-
-/* test for good behavior when indexing multikeys */
-
-t.insert({k:3});
-t.insert({k:[2,3]});
-t.insert({k:[4,3]});
-t.insert({k:[4,3]}); // tests SERVER-4770
-
-t.ensureIndex({k:1}, {unique:true, dropDups:true});
-
-assert( t.count() == 1 ) ;
-assert( t.find().sort({k:1}).toArray().length == 1 ) ;
-assert( t.find().sort({k:1}).count() == 1 ) ;
-
-t.drop();
-
-/* same test wtih background:true*/
-
-t.insert({k:3});
-t.insert({k:[2,3]});
-t.insert({k:[4,3]});
-t.insert({k:[4,3]});
-
-t.ensureIndex({k:1}, {unique:true, dropDups:true, background:true});
-
-assert( t.count() == 1 ) ;
-assert( t.find().sort({k:1}).toArray().length == 1 ) ;
-assert( t.find().sort({k:1}).count() == 1 ) ;
-
-t.drop();
-
-/* */
-
-t.ensureIndex({k:1}, {unique:true});
-
-t.insert({k:3});
-t.insert({k:[2,3]});
-assert( db.getLastError() );
-t.insert({k:[4,3]});
-assert( db.getLastError() );
-
-assert( t.count() == 1 ) ;
-assert( t.find().sort({k:1}).toArray().length == 1 ) ;
-assert( t.find().sort({k:1}).count() == 1 ) ;
-
-t.dropIndexes();
-
-t.insert({k:[2,3]});
-t.insert({k:[4,3]});
-assert( t.count() == 3 ) ;
-
-// Trigger an error, so we can test n of getPrevError() later.
-t.update({ x : 1 }, { $invalid : true });
-assert.neq(null, db.getLastError());
-checkNprev( 1 );
-
-t.ensureIndex({k:1}, {unique:true, dropDups:true});
-// Check error flag was not set SERVER-2054.
-assert.eq(null, db.getLastError() );
-// Check that offset of previous error is correct.
-checkNprev( 2 );
-
-// Check the dups were dropped.
-assert( t.count() == 1 ) ;
-assert( t.find().sort({k:1}).toArray().length == 1 ) ;
-assert( t.find().sort({k:1}).count() == 1 ) ;
-
-// Check that a new conflicting insert will cause an error.
-t.insert({k:[2,3]});
-assert( db.getLastError() );
-
-t.drop();
-
-t.insert({k:3});
-t.insert({k:[2,3]});
-t.insert({k:[4,3]});
-assert( t.count() == 3 ) ;
-
-
-// Now try with a background index op.
-
-// Trigger an error, so we can test n of getPrevError() later.
-t.update({ x : 1 }, { $invalid : true });
-assert( db.getLastError() );
-checkNprev( 1 );
-
-t.ensureIndex({k:1}, {background:true, unique:true, dropDups:true});
-// Check error flag was not set SERVER-2054.
-assert( !db.getLastError() );
-// Check that offset of pervious error is correct.
-checkNprev( 2 );
-
-// Check the dups were dropped.
-assert( t.count() == 1 ) ;
-assert( t.find().sort({k:1}).toArray().length == 1 ) ;
-assert( t.find().sort({k:1}).count() == 1 ) ;
-
-// Check that a new conflicting insert will cause an error.
-t.insert({k:[2,3]});
-assert( db.getLastError() );
diff --git a/jstests/uniqueness.js b/jstests/uniqueness.js
deleted file mode 100644
index ce19ad08d82..00000000000
--- a/jstests/uniqueness.js
+++ /dev/null
@@ -1,58 +0,0 @@
-
-t = db.jstests_uniqueness;
-
-t.drop();
-
-// test uniqueness of _id
-
-t.save( { _id : 3 } );
-assert( !db.getLastError(), 1 );
-
-// this should yield an error
-t.insert( { _id : 3 } );
-assert( db.getLastError() , 2);
-assert( t.count() == 1, "hmmm");
-
-t.insert( { _id : 4, x : 99 } );
-assert( !db.getLastError() , 3);
-
-// this should yield an error
-t.update( { _id : 4 } , { _id : 3, x : 99 } );
-assert( db.getLastError() , 4);
-assert( t.findOne( {_id:4} ), 5 );
-
-// Check for an error message when we index and there are dups
-db.jstests_uniqueness2.drop();
-db.jstests_uniqueness2.insert({a:3});
-db.jstests_uniqueness2.insert({a:3});
-assert( db.jstests_uniqueness2.count() == 2 , 6) ;
-db.resetError();
-db.jstests_uniqueness2.ensureIndex({a:1}, true);
-assert( db.getLastError() , 7);
-assert( db.getLastError().match( /E11000/ ) );
-
-// Check for an error message when we index in the background and there are dups
-db.jstests_uniqueness2.drop();
-db.jstests_uniqueness2.insert({a:3});
-db.jstests_uniqueness2.insert({a:3});
-assert( db.jstests_uniqueness2.count() == 2 , 6) ;
-assert( !db.getLastError() );
-db.resetError();
-db.jstests_uniqueness2.ensureIndex({a:1}, {unique:true,background:true});
-assert( db.getLastError() , 7);
-assert( db.getLastError().match( /E11000/ ) );
-
-/* Check that if we update and remove _id, it gets added back by the DB */
-
-/* - test when object grows */
-t.drop();
-t.save( { _id : 'Z' } );
-t.update( {}, { k : 2 } );
-assert( t.findOne()._id == 'Z', "uniqueness.js problem with adding back _id" );
-
-/* - test when doesn't grow */
-t.drop();
-t.save( { _id : 'Z', k : 3 } );
-t.update( {}, { k : 2 } );
-assert( t.findOne()._id == 'Z', "uniqueness.js problem with adding back _id (2)" );
-
diff --git a/jstests/unset.js b/jstests/unset.js
deleted file mode 100644
index f3cdcf03deb..00000000000
--- a/jstests/unset.js
+++ /dev/null
@@ -1,19 +0,0 @@
-t = db.unset;
-t.drop();
-
-orig = { _id : 1, emb : {} };
-t.insert(orig);
-
-t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
-t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
-assert.eq( orig , t.findOne() , "A" );
-
-t.update( { _id : 1 }, { $set : { 'emb.a' : 1 }});
-t.update( { _id : 1 }, { $set : { 'z' : 1 }});
-
-t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
-t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
-assert.eq( orig , t.findOne() , "B" ); // note that emb isn't removed
-
-t.update( { _id : 1 }, { $unset : { 'emb' : 1 }});
-assert.eq( {_id :1} , t.findOne() , "C" );
diff --git a/jstests/unset2.js b/jstests/unset2.js
deleted file mode 100644
index e1dc445fcb8..00000000000
--- a/jstests/unset2.js
+++ /dev/null
@@ -1,23 +0,0 @@
-t = db.unset2;
-t.drop();
-
-t.save( {a:["a","b","c","d"]} );
-t.update( {}, {$unset:{"a.3":1}} );
-assert.eq( ["a","b","c",null], t.findOne().a );
-t.update( {}, {$unset:{"a.1":1}} );
-assert.eq( ["a",null,"c",null], t.findOne().a );
-t.update( {}, {$unset:{"a.0":1}} );
-assert.eq( [null,null,"c",null], t.findOne().a );
-t.update( {}, {$unset:{"a.4":1}} );
-assert.eq( [null,null,"c",null], t.findOne().a ); // no change
-
-t.drop();
-t.save( {a:["a","b","c","d","e"]} );
-t.update( {}, {$unset:{"a.2":1},$set:{"a.3":3,"a.4":4,"a.5":5}} );
-assert.eq( ["a","b",null,3,4,5], t.findOne().a );
-
-t.drop();
-t.save( {a:["a","b","c","d","e"]} );
-t.update( {}, {$unset:{"a.2":1},$set:{"a.2":4}} );
-assert( db.getLastError() );
-assert.eq( ["a","b","c","d","e"], t.findOne().a ); \ No newline at end of file
diff --git a/jstests/update.js b/jstests/update.js
deleted file mode 100644
index 37bf6378c64..00000000000
--- a/jstests/update.js
+++ /dev/null
@@ -1,40 +0,0 @@
-
-asdf = db.getCollection( "asdf" );
-asdf.drop();
-
-var txt = "asdf";
-for(var i=0; i<10; i++) {
- txt = txt + txt;
-}
-
-var iterations = _isWindows() ? 2500 : 5000
-
-// fill db
-for(var i=1; i<=iterations; i++) {
- var obj = {txt : txt};
- asdf.save(obj);
-
- var obj2 = {txt: txt, comments: [{num: i, txt: txt}, {num: [], txt: txt}, {num: true, txt: txt}]};
- asdf.update(obj, obj2);
-
- if(i%100 == 0) {
- var c = asdf.count();
- assert.eq(c , i);
- }
-}
-
-assert(asdf.validate().valid);
-
-var stats = db.runCommand({ collstats: "asdf" });
-
-// some checks. want to check that padding factor is working; in addition this lets us do a little basic
-// testing of the collstats command at the same time
-assert(stats.count == iterations);
-assert(stats.size < 140433012 * 5 && stats.size > 1000000);
-assert(stats.numExtents < 20);
-assert(stats.nindexes == 1);
-var pf = stats.paddingFactor;
-print("update.js padding factor: " + pf);
-assert(pf > 1.7 && pf <= 2);
-
-asdf.drop();
diff --git a/jstests/update2.js b/jstests/update2.js
deleted file mode 100644
index 654914c1f45..00000000000
--- a/jstests/update2.js
+++ /dev/null
@@ -1,18 +0,0 @@
-f = db.ed_db_update2;
-
-f.drop();
-f.save( { a: 4 } );
-f.update( { a: 4 }, { $inc: { a: 2 } } );
-assert.eq( 6, f.findOne().a );
-
-f.drop();
-f.save( { a: 4 } );
-f.ensureIndex( { a: 1 } );
-f.update( { a: 4 }, { $inc: { a: 2 } } );
-assert.eq( 6, f.findOne().a );
-
-// Verify that drop clears the index
-f.drop();
-f.save( { a: 4 } );
-f.update( { a: 4 }, { $inc: { a: 2 } } );
-assert.eq( 6, f.findOne().a );
diff --git a/jstests/update3.js b/jstests/update3.js
deleted file mode 100644
index 995c6e67b45..00000000000
--- a/jstests/update3.js
+++ /dev/null
@@ -1,28 +0,0 @@
-// Update with mods corner cases.
-
-f = db.jstests_update3;
-
-f.drop();
-f.save( { a:1 } );
-f.update( {}, {$inc:{ a:1 }} );
-assert.eq( 2, f.findOne().a , "A" );
-
-f.drop();
-f.save( { a:{ b: 1 } } );
-f.update( {}, {$inc:{ "a.b":1 }} );
-assert.eq( 2, f.findOne().a.b , "B" );
-
-f.drop();
-f.save( { a:{ b: 1 } } );
-f.update( {}, {$set:{ "a.b":5 }} );
-assert.eq( 5, f.findOne().a.b , "C" );
-
-f.drop();
-f.save( {'_id':0} );
-f.update( {}, {$set:{'_id':5}} );
-assert.eq( 0, f.findOne()._id , "D" );
-
-f.drop();
-f.save({_id:1, a:1})
-f.update({}, {$unset:{"a":1, "b.c":1}})
-assert.docEq(f.findOne(), {_id:1}, "E") \ No newline at end of file
diff --git a/jstests/update4.js b/jstests/update4.js
deleted file mode 100644
index 1502f672a50..00000000000
--- a/jstests/update4.js
+++ /dev/null
@@ -1,33 +0,0 @@
-f = db.jstests_update4;
-f.drop();
-
-getLastError = function() {
- ret = db.runCommand( { getlasterror : 1 } );
-// printjson( ret );
- return ret;
-}
-
-f.save( {a:1} );
-f.update( {a:1}, {a:2} );
-assert.eq( true, getLastError().updatedExisting , "A" );
-assert.eq( 1, getLastError().n , "B" );
-f.update( {a:1}, {a:2} );
-assert.eq( false, getLastError().updatedExisting , "C" );
-assert.eq( 0, getLastError().n , "D" );
-
-f.update( {a:1}, {a:1}, true );
-assert.eq( false, getLastError().updatedExisting , "E" );
-assert.eq( 1, getLastError().n , "F" );
-f.update( {a:1}, {a:1}, true );
-assert.eq( true, getLastError().updatedExisting , "G" );
-assert.eq( 1, getLastError().n , "H" );
-assert.eq( true, db.getPrevError().updatedExisting , "I" );
-assert.eq( 1, db.getPrevError().nPrev , "J" );
-
-f.findOne();
-assert.eq( undefined, getLastError().updatedExisting , "K" );
-assert.eq( true, db.getPrevError().updatedExisting , "L" );
-assert.eq( 2, db.getPrevError().nPrev , "M" );
-
-db.forceError();
-assert.eq( undefined, getLastError().updatedExisting , "N" );
diff --git a/jstests/update5.js b/jstests/update5.js
deleted file mode 100644
index 2728000f2d4..00000000000
--- a/jstests/update5.js
+++ /dev/null
@@ -1,41 +0,0 @@
-
-t = db.update5;
-
-function go( key ){
-
- t.drop();
-
- function check( num , name ){
- assert.eq( 1 , t.find().count() , tojson( key ) + " count " + name );
- assert.eq( num , t.findOne().n , tojson( key ) + " value " + name );
- }
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 1 , "A" );
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 2 , "B" );
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 3 , "C" );
-
- var ik = {};
- for ( k in key )
- ik[k] = 1;
- t.ensureIndex( ik );
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 4 , "D" );
-
-}
-
-go( { a : 5 } );
-go( { a : 5 } );
-
-go( { a : 5 , b : 7 } );
-go( { a : null , b : 7 } );
-
-go( { referer: 'blah' } );
-go( { referer: 'blah', lame: 'bar' } );
-go( { referer: 'blah', name: 'bar' } );
-go( { date: null, referer: 'blah', name: 'bar' } );
diff --git a/jstests/update6.js b/jstests/update6.js
deleted file mode 100644
index 05fc5b223d9..00000000000
--- a/jstests/update6.js
+++ /dev/null
@@ -1,46 +0,0 @@
-
-t = db.update6;
-t.drop();
-
-t.save( { a : 1 , b : { c : 1 , d : 1 } } );
-
-t.update( { a : 1 } , { $inc : { "b.c" : 1 } } );
-assert.eq( 2 , t.findOne().b.c , "A" );
-assert.eq( "c,d" , Object.keySet( t.findOne().b ).toString() , "B" );
-
-t.update( { a : 1 } , { $inc : { "b.0e" : 1 } } );
-assert.eq( 1 , t.findOne().b["0e"] , "C" );
-assert.docEq( { "c" : 2, "d" : 1, "0e" : 1 }, t.findOne().b, "D" );
-
-// -----
-
-t.drop();
-
-t.save( {"_id" : 2 ,
- "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
- //"b323" : {"0720" : 1} ,
- }
- );
-
-
-assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks before" );
-t.update({_id:2},{$inc: { 'b3.0719' : 1}},true)
-assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks after" );
-
-
-// -----
-
-t.drop();
-
-t.save( {"_id" : 2 ,
- "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
- "b324" : {"0720" : 1} ,
- }
- );
-
-
-assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks before" );
-printjson( t.find({_id:2},{b3:1})[0].b3 )
-t.update({_id:2},{$inc: { 'b3.0719' : 1}} )
-printjson( t.find({_id:2},{b3:1})[0].b3 )
-assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks after" );
diff --git a/jstests/update7.js b/jstests/update7.js
deleted file mode 100644
index b893121080f..00000000000
--- a/jstests/update7.js
+++ /dev/null
@@ -1,138 +0,0 @@
-
-t = db.update7;
-t.drop();
-
-function s(){
- return t.find().sort( { _id : 1 } ).map( function(z){ return z.x; } );
-}
-
-t.save( { _id : 1 , x : 1 } );
-t.save( { _id : 2 , x : 5 } );
-
-assert.eq( "1,5" , s() , "A" );
-
-t.update( {} , { $inc : { x : 1 } } );
-assert.eq( "2,5" , s() , "B" );
-
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-assert.eq( "3,5" , s() , "C" );
-
-t.update( { _id : 2 } , { $inc : { x : 1 } } );
-assert.eq( "3,6" , s() , "D" );
-
-t.update( {} , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "E" );
-
-t.update( {} , { $set : { x : 2 } } , false , true );
-assert.eq( "2,2" , s() , "F" );
-
-// non-matching in cursor
-
-t.drop();
-
-t.save( { _id : 1 , x : 1 , a : 1 , b : 1 } );
-t.save( { _id : 2 , x : 5 , a : 1 , b : 2 } );
-assert.eq( "1,5" , s() , "B1" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "2,6" , s() , "B2" );
-
-t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "B3" );
-
-t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "B4" );
-
-t.ensureIndex( { a : 1 } );
-t.ensureIndex( { b : 1 } );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "B5" );
-
-t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,7" , s() , "B6" );
-
-t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,7" , s() , "B7" );
-
-t.update( { b : 2 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8" , s() , "B7" );
-
-
-// multi-key
-
-t.drop();
-
-t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
-t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
-assert.eq( "1,5" , s() , "C1" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "2,5" , s() , "C2" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,5" , s() , "C3" );
-
-t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "C4" );
-
-t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "C5" );
-
-t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8" , s() , "C6" );
-
-
-t.drop();
-
-t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
-t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
-t.ensureIndex( { a : 1 } );
-assert.eq( "1,5" , s() , "D1" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "2,5" , s() , "D2" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,5" , s() , "D3" );
-
-t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "D4" );
-
-t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "D5" );
-
-t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8" , s() , "D6" );
-
-t.update( { a : { $lt : 10 } } , { $inc : { x : -1 } } , false , true );
-assert.eq( "4,7" , s() , "D7" );
-
-// ---
-
-t.save( { _id : 3 } );
-assert.eq( "4,7," , s() , "E1" );
-t.update( {} , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8,1" , s() , "E2" );
-
-for ( i = 4; i<8; i++ )
- t.save( { _id : i } );
-t.save( { _id : i , x : 1 } );
-assert.eq( "5,8,1,,,,,1" , s() , "E4" );
-t.update( {} , { $inc : { x : 1 } } , false , true );
-assert.eq( "6,9,2,1,1,1,1,2" , s() , "E5" );
-
-
-// --- $inc indexed field
-
-t.drop();
-
-t.save( { x : 1 } );
-t.save( { x : 2 } );
-t.save( { x : 3 } );
-
-t.ensureIndex( { x : 1 } );
-
-assert.eq( "1,2,3" , s() , "F1" )
-t.update( { x : { $gt : 0 } } , { $inc : { x : 5 } } , false , true );
-assert.eq( "6,7,8" , s() , "F1" )
diff --git a/jstests/update8.js b/jstests/update8.js
deleted file mode 100644
index 2388ff85c9d..00000000000
--- a/jstests/update8.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.update8;
-t.drop();
-
-t.update( { _id : 1 , tags: {"$ne": "a"}}, {"$push": { tags : "a" } } , true )
-assert.eq( { _id : 1 , tags : [ "a" ] } , t.findOne() , "A" );
-
-t.drop()
-//SERVER-390
-//t.update( { "x.y" : 1 } , { $inc : { i : 1 } } , true );
-//printjson( t.findOne() );
diff --git a/jstests/update9.js b/jstests/update9.js
deleted file mode 100644
index 45b9e2d0e26..00000000000
--- a/jstests/update9.js
+++ /dev/null
@@ -1,19 +0,0 @@
-
-t = db.update9;
-t.drop()
-
-orig = { "_id" : 1 ,
- "question" : "a",
- "choices" : { "1" : { "choice" : "b" },
- "0" : { "choice" : "c" } } ,
-
- }
-
-t.save( orig );
-assert.eq( orig , t.findOne() , "A" );
-
-t.update({_id: 1, 'choices.0.votes': {$ne: 1}}, {$push: {'choices.0.votes': 1}})
-
-orig.choices["0"].votes = [ 1 ] ;
-assert.eq( orig.choices["0"] , t.findOne().choices["0"] , "B" );
-
diff --git a/jstests/update_addToSet.js b/jstests/update_addToSet.js
deleted file mode 100644
index da930555267..00000000000
--- a/jstests/update_addToSet.js
+++ /dev/null
@@ -1,58 +0,0 @@
-
-t = db.update_addToSet1;
-t.drop();
-
-o = { _id : 1 , a : [ 2 , 1 ] }
-t.insert( o );
-
-assert.eq( o , t.findOne() , "A1" );
-
-t.update( {} , { $addToSet : { a : 3 } } );
-o.a.push( 3 );
-assert.eq( o , t.findOne() , "A2" );
-
-t.update( {} , { $addToSet : { a : 3 } } );
-assert.eq( o , t.findOne() , "A3" );
-
-// SERVER-628
-t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
-o.a.push( 5 )
-o.a.push( 6 )
-assert.eq( o , t.findOne() , "B1" )
-
-t.drop()
-o = { _id : 1 , a : [ 3 , 5 , 6 ] }
-t.insert( o );
-t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
-assert.eq( o , t.findOne() , "B2" );
-
-t.drop();
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
-assert.eq( o , t.findOne() , "B3" );
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
-assert.eq( o , t.findOne() , "B4" );
-
-// SERVER-630
-t.drop();
-t.update( { _id : 2 } , { $addToSet : { a : 3 } } , true );
-assert.eq( 1 , t.count() , "C1" );
-assert.eq( { _id : 2 , a : [ 3 ] } , t.findOne() , "C2" );
-
-// SERVER-3245
-o = {_id: 1, a: [1,2]};
-t.drop();
-t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2]}}}, true );
-assert.eq( o, t.findOne(), "D1" );
-
-t.drop();
-t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2,1,2]}}}, true );
-assert.eq( o, t.findOne(), "D2" );
-
-t.drop();
-t.insert( {_id: 1} );
-t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2,2,1]}}} );
-assert.eq( o, t.findOne(), "D3" );
-
-t.update( {_id: 1}, {$addToSet: {a: {$each: [3,2,2,3,3]}}} );
-o.a.push( 3 );
-assert.eq( o, t.findOne(), "D4" );
diff --git a/jstests/update_addToSet2.js b/jstests/update_addToSet2.js
deleted file mode 100644
index cb168f8d15e..00000000000
--- a/jstests/update_addToSet2.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.update_addToSet2
-t.drop();
-
-o = { _id : 1 }
-t.insert( { _id : 1 } );
-
-t.update({},{$addToSet : {'kids' :{ 'name' : 'Bob', 'age': '4'}}})
-t.update({},{$addToSet : {'kids' :{ 'name' : 'Dan', 'age': '2'}}})
-
-printjson( t.findOne() );
diff --git a/jstests/update_addToSet3.js b/jstests/update_addToSet3.js
deleted file mode 100644
index e9da58eb6e0..00000000000
--- a/jstests/update_addToSet3.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.update_addToSet3
-t.drop()
-
-t.insert( { _id : 1 } )
-
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 6 , 5 , 4 ] } } } )
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 ] } , "A1" )
-
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 2 , 1 ] } } } )
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 ] } , "A2" )
-
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 4 , 7 , 9 , 2 ] } } } )
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 , 7 , 9 ] } , "A3" )
-
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 12 , 13 , 12 ] } } } )
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 , 7 , 9 , 12 , 13 ] } , "A4" )
-
diff --git a/jstests/update_arraymatch1.js b/jstests/update_arraymatch1.js
deleted file mode 100644
index 521271d7f85..00000000000
--- a/jstests/update_arraymatch1.js
+++ /dev/null
@@ -1,16 +0,0 @@
-
-t = db.update_arraymatch1
-t.drop();
-
-o = { _id : 1 , a : [ { x : 1 , y : 1 } , { x : 2 , y : 2 } , { x : 3 , y : 3 } ] }
-t.insert( o );
-assert.eq( o , t.findOne() , "A1" );
-
-q = { "a.x" : 2 }
-t.update( q , { $set : { b : 5 } } )
-o.b = 5
-assert.eq( o , t.findOne() , "A2" )
-
-t.update( { "a.x" : 2 } , { $inc : { "a.$.y" : 1 } } )
-o.a[1].y++;
-assert.eq( o , t.findOne() , "A3" );
diff --git a/jstests/update_arraymatch2.js b/jstests/update_arraymatch2.js
deleted file mode 100644
index c07a61c378c..00000000000
--- a/jstests/update_arraymatch2.js
+++ /dev/null
@@ -1,16 +0,0 @@
-t = db.update_arraymatch2;
-t.drop();
-
-t.insert( { } );
-t.insert( { x : [1,2,3] } );
-t.insert( { x : 99 } );
-t.update( {x : 2}, { $inc : { "x.$" : 1 } } , false, true );
-assert( t.findOne({x:1}).x[1] == 3, "A1" );
-
-t.insert( { x : { y : [8,7,6] } } )
-t.update( {'x.y' : 7}, { $inc : { "x.y.$" : 1 } } , false, true )
-assert.eq( 8 , t.findOne({"x.y" : 8}).x.y[1] , "B1" );
-
-t.insert( { x : [90,91,92], y : ['a', 'b', 'c'] } );
-t.update( { x : 92} , { $set : { 'y.$' : 'z' } }, false, true );
-assert.eq( 'z', t.findOne({x:92}).y[2], "B2" );
diff --git a/jstests/update_arraymatch3.js b/jstests/update_arraymatch3.js
deleted file mode 100644
index 116ac6be2e3..00000000000
--- a/jstests/update_arraymatch3.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.update_arraymatch3;
-t.drop();
-
-o = { _id : 1 ,
- title : "ABC",
- comments : [ { "by" : "joe", "votes" : 3 },
- { "by" : "jane", "votes" : 7 }
- ]
- }
-
-t.save( o );
-assert.eq( o , t.findOne() , "A1" );
-
-t.update( {'comments.by':'joe'}, {$inc:{'comments.$.votes':1}}, false, true )
-o.comments[0].votes++;
-assert.eq( o , t.findOne() , "A2" );
diff --git a/jstests/update_arraymatch4.js b/jstests/update_arraymatch4.js
deleted file mode 100644
index 5abd0aa3bf0..00000000000
--- a/jstests/update_arraymatch4.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.update_arraymatch4
-t.drop()
-
-x = { _id : 1 , arr : ["A1","B1","C1"] }
-t.insert( x )
-assert.eq( x , t.findOne() , "A1" )
-
-x.arr[0] = "A2"
-t.update( { arr : "A1" } , { $set : { "arr.$" : "A2" } } )
-assert.eq( x , t.findOne() , "A2" )
-
-t.ensureIndex( { arr : 1 } )
-x.arr[0] = "A3"
-t.update( { arr : "A2" } , { $set : { "arr.$" : "A3" } } )
-assert.eq( x , t.findOne() , "A3" ); // SERVER-1055
-
-
diff --git a/jstests/update_arraymatch5.js b/jstests/update_arraymatch5.js
deleted file mode 100644
index aff1a0323ef..00000000000
--- a/jstests/update_arraymatch5.js
+++ /dev/null
@@ -1,15 +0,0 @@
-
-t = db.update_arraymatch5
-t.drop();
-
-t.insert({abc:{visible:true}, testarray:[{foobar_id:316, visible:true, xxx: 1}]});
-t.ensureIndex({'abc.visible':1, 'testarray.visible':1 , 'testarray.xxx': 1});
-assert( t.findOne({'abc.visible':true, testarray:{'$elemMatch': {visible:true, xxx:1}}}) , "A1" )
-assert( t.findOne({testarray:{'$elemMatch': {visible:true, xxx:1}}}) , "A2" );
-
-t.update({'testarray.foobar_id':316}, {'$set': {'testarray.$.visible': true, 'testarray.$.xxx': 2}}, false, true);
-
-assert( t.findOne() , "B1" );
-assert( t.findOne({testarray:{'$elemMatch': {visible:true, xxx:2}}}) , "B2" )
-assert( t.findOne({'abc.visible':true, testarray:{'$elemMatch': {visible:true, xxx:2}}}) , "B3" );
-assert.eq( 1 , t.find().count() , "B4" );
diff --git a/jstests/update_arraymatch6.js b/jstests/update_arraymatch6.js
deleted file mode 100644
index 8892e6fcc68..00000000000
--- a/jstests/update_arraymatch6.js
+++ /dev/null
@@ -1,14 +0,0 @@
-t = db.jstests_update_arraymatch6;
-t.drop();
-
-function doTest() {
- t.save( {a: [{id: 1, x: [5,6,7]}, {id: 2, x: [8,9,10]}]} );
- t.update({'a.id': 1}, {$set: {'a.$.x': [1,1,1]}});
- assert.automsg( "!db.getLastError()" );
- assert.eq.automsg( "1", "t.findOne().a[ 0 ].x[ 0 ]" );
-}
-
-doTest();
-t.drop();
-t.ensureIndex( { 'a.id':1 } );
-doTest(); \ No newline at end of file
diff --git a/jstests/update_arraymatch7.js b/jstests/update_arraymatch7.js
deleted file mode 100644
index 5621f60c39e..00000000000
--- a/jstests/update_arraymatch7.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// Check that the positional operator works properly when an index only match is used for the update
-// query spec. SERVER-5067
-
-t = db.jstests_update_arraymatch7;
-t.drop();
-
-function testPositionalInc() {
- t.remove({});
- t.save( { a:[ { b:'match', count:0 } ] } );
- t.update( { 'a.b':'match' }, { $inc:{ 'a.$.count':1 } } );
- // Check that the positional $inc succeeded.
- assert( t.findOne( { 'a.count':1 } ) );
-}
-
-testPositionalInc();
-
-// Now check with a non multikey index.
-t.ensureIndex( { 'a.b' : 1 } );
-testPositionalInc();
diff --git a/jstests/update_arraymatch8.js b/jstests/update_arraymatch8.js
deleted file mode 100644
index 1e8ce377862..00000000000
--- a/jstests/update_arraymatch8.js
+++ /dev/null
@@ -1,158 +0,0 @@
-// Checking for positional array updates with either .$ or .0 at the end
-// SERVER-7511
-
-// array.$.name
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.$.name': 'new'}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
-
-// array.$ (failed in 2.2.2)
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.$': {'name':'new'}}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
-
-// array.0.name
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.0.name': 'new'}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
-
-// array.0 (failed in 2.2.2)
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.0': {'name':'new'}}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
-
-// // array.12.name
-t = db.jstests_update_arraymatch8;
-t.drop();
-arr = new Array();
-for (var i=0; i<20; i++) {
- arr.push({'name': 'old'});
-}
-t.ensureIndex( {'array.name': 1} );
-t.insert( {_id:0, 'array': arr} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {_id:0}, {$set: {'array.12.name': 'new'}} );
-// note: both documents now have to be in the array
-assert( t.findOne({'array.name': 'new'}) );
-assert( t.findOne({'array.name': 'old'}) );
-
-// array.12 (failed in 2.2.2)
-t = db.jstests_update_arraymatch8;
-t.drop();
-arr = new Array();
-for (var i=0; i<20; i++) {
- arr.push({'name': 'old'});
-}
-t.ensureIndex( {'array.name': 1} );
-t.insert( {_id:0, 'array': arr} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {_id:0}, {$set: {'array.12': {'name':'new'}}} );
-// note: both documents now have to be in the array
-assert( t.findOne({'array.name': 'new'}) );
-assert( t.findOne({'array.name': 'old'}) );
-
-// array.$.123a.name
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.123a.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.$.123a.name': 'new'}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
-
-// array.$.123a
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.$.123a': {'name': 'new'}}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
-
-// array.0.123a.name
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.123a.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.0.123a.name': 'new'}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
-
-// array.0.123a
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.0.123a': {'name': 'new'}}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
-
-// a.0.b
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'a.0.b': 1} );
-t.insert( {'a': [ [ { b:'old' } ] ] } );
-assert( t.findOne({'a.0.0.b': 'old'}) );
-assert( t.findOne({'a.0.b': 'old'}) );
-t.update( {}, {$set: {'a.0.0.b': 'new'}} );
-assert( t.findOne({'a.0.b': 'new'}) );
-assert( !t.findOne({'a.0.b': 'old'}) );
-
-// a.0.b.c
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'a.0.b.c': 1} );
-t.insert( {'a': [ { b:[ { c:'old' } ] } ] } );
-assert( t.findOne({'a.0.b.0.c': 'old'}) );
-assert( t.findOne({'a.b.0.c': 'old'}) );
-assert( t.findOne({'a.0.b.c': 'old'}) );
-assert( t.findOne({'a.b.c': 'old'}) );
-t.update( {}, {$set: {'a.0.b.0.c': 'new'}} );
-assert( t.findOne({'a.0.b.c': 'new'}) );
-assert( !t.findOne({'a.0.b.c': 'old'}) );
-
-// a.b.$ref
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'a.b.$ref': 1} );
-t.insert( {'a': [ { 'b':{ '$ref':'old', '$id':0 } } ] } );
-assert( t.findOne({'a.b.$ref': 'old'}) );
-assert( t.findOne({'a.0.b.$ref': 'old'}) );
-t.update( {}, {$set: {'a.0.b.$ref': 'new'}} );
-assert( t.findOne({'a.b.$ref': 'new'}) );
-assert( !t.findOne({'a.b.$ref': 'old'}) );
-
-// a.b and a-b
-t = db.jstests_update_arraymatch8;
-t.drop();
-t.ensureIndex( {'a.b': 1} );
-t.ensureIndex( {'a-b': 1} );
-t.insert( {'a':{'b':'old'}} );
-assert( t.findOne({'a.b': 'old'}) );
-t.update( {}, {$set: {'a': {'b': 'new'}}} );
-assert( t.findOne({'a.b': 'new'}) );
-assert( !t.findOne({'a.b': 'old'}) );
diff --git a/jstests/update_bit_examples.js b/jstests/update_bit_examples.js
deleted file mode 100644
index f277630a7da..00000000000
--- a/jstests/update_bit_examples.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Basic examples for $bit
-var coll = db.update_bit;
-coll.drop();
-
-// $bit and
-coll.remove({})
-coll.save({_id:1, a:NumberInt(2)});
-coll.update({}, {$bit: {a: {and: NumberInt(4)}}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne().a, 0)
-
-// $bit or
-coll.remove({})
-coll.save({_id:1, a:NumberInt(2)});
-coll.update({}, {$bit: {a: {or: NumberInt(4)}}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne().a, 6)
-
-// $bit xor
-coll.remove({})
-coll.save({_id:1, a:NumberInt(0)});
-coll.update({}, {$bit: {a: {xor: NumberInt(4)}}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne().a, 4)
diff --git a/jstests/update_blank1.js b/jstests/update_blank1.js
deleted file mode 100644
index a2344035dc3..00000000000
--- a/jstests/update_blank1.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.update_blank1
-t.drop();
-
-orig = { "" : 1 , _id : 2 , "a" : 3 , "b" : 4 };
-t.insert( orig );
-t.update( {} , { $set : { "c" : 5 } } );
-print( db.getLastError() );
-orig["c"] = 5;
-assert.docEq( orig , t.findOne() , "after $set" ); // SERVER-2651 \ No newline at end of file
diff --git a/jstests/update_currentdate_examples.js b/jstests/update_currentdate_examples.js
deleted file mode 100644
index 055bd3089da..00000000000
--- a/jstests/update_currentdate_examples.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Basic examples for $currentDate
-var coll = db.update_currentdate;
-coll.drop();
-
-// $currentDate default
-coll.remove({})
-coll.save({_id:1, a:2});
-coll.update({}, {$currentDate: {a: true}})
-assert.gleSuccess(coll.getDB())
-assert(coll.findOne().a.constructor == Date)
-
-// $currentDate type = date
-coll.remove({})
-coll.save({_id:1, a:2});
-coll.update({}, {$currentDate: {a: {$type: "date"}}})
-assert.gleSuccess(coll.getDB())
-assert(coll.findOne().a.constructor == Date)
-
-// $currentDate type = timestamp
-coll.remove({})
-coll.save({_id:1, a:2});
-coll.update({}, {$currentDate: {a: {$type: "timestamp"}}})
-assert.gleSuccess(coll.getDB())
-assert(coll.findOne().a.constructor == Timestamp)
diff --git a/jstests/update_dbref.js b/jstests/update_dbref.js
deleted file mode 100644
index bf31566fc28..00000000000
--- a/jstests/update_dbref.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Test that we can update DBRefs, but not dbref fields outside a DBRef
-
-t = db.jstests_update_dbref;
-t.drop();
-
-t.save({_id:1, a: new DBRef("a", "b")});
-assert.gleSuccess(db, "failed to save dbref");
-assert.docEq({_id:1, a: new DBRef("a", "b")}, t.findOne());
-
-t.update({}, {$set: {"a.$id": 2}});
-assert.gleSuccess(db, "a.$id update");
-assert.docEq({_id:1, a: new DBRef("a", 2)}, t.findOne());
-
-t.update({}, {$set: {"a.$ref": "b"}});
-assert.gleSuccess(db, "a.$ref update");
-
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
-
-// Bad updates
-t.update({}, {$set: {"$id": 3}});
-assert.gleErrorRegex(db, /\$id/, "expected bad update because of $id")
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
-
-t.update({}, {$set: {"$ref": "foo"}});
-assert.gleErrorRegex(db, /\$ref/, "expected bad update because of $ref")
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
-
-t.update({}, {$set: {"$db": "aDB"}});
-assert.gleErrorRegex(db, /\$db/, "expected bad update because of $db")
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
-
-t.update({}, {$set: {"b.$id": 2}});
-assert.gleError(db, function() { return "b.$id update -- doc:" + tojson(t.findOne())});
-
-t.update({}, {$set: {"b.$ref": 2}});
-assert.gleError(db, function() { return "b.$ref update -- doc:" + tojson(t.findOne())});
diff --git a/jstests/update_invalid1.js b/jstests/update_invalid1.js
deleted file mode 100644
index 7c94507f560..00000000000
--- a/jstests/update_invalid1.js
+++ /dev/null
@@ -1,6 +0,0 @@
-
-t = db.update_invalid1
-t.drop()
-
-t.update( { _id : 5 } , { $set : { $inc : { x : 5 } } } , true );
-assert.eq( 0 , t.count() , "A1" );
diff --git a/jstests/update_min_max_examples.js b/jstests/update_min_max_examples.js
deleted file mode 100644
index ef84cff3635..00000000000
--- a/jstests/update_min_max_examples.js
+++ /dev/null
@@ -1,31 +0,0 @@
-// Basic examples for $min/$max
-var coll = db.update_min_max;
-coll.drop();
-
-// $min for number
-coll.insert({_id:1, a:2});
-coll.update({_id:1}, {$min: {a: 1}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne({_id:1}).a, 1)
-
-// $max for number
-coll.insert({_id:2, a:2});
-coll.update({_id:2}, {$max: {a: 1}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne({_id:2}).a, 2)
-
-// $min for Date
-coll.insert({_id:3, a: new Date()});
-var origDoc = coll.findOne({_id:3})
-sleep(2)
-coll.update({_id:3}, {$min: {a: new Date()}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne({_id:3}).a, origDoc.a)
-
-// $max for Date
-coll.insert({_id:4, a: new Date()});
-sleep(2)
-var newDate = new Date();
-coll.update({_id:4}, {$max: {a: newDate}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne({_id:4}).a, newDate)
diff --git a/jstests/update_mul_examples.js b/jstests/update_mul_examples.js
deleted file mode 100644
index a57fa0a3380..00000000000
--- a/jstests/update_mul_examples.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Basic examples for $mul (multiply)
-var coll = db.update_mul;
-coll.drop();
-
-// $mul positive
-coll.remove({})
-coll.save({_id:1, a:2});
-coll.update({}, {$mul: {a: 10}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne().a, 20)
-
-// $mul negative
-coll.remove({})
-coll.save({_id:1, a:2});
-coll.update({}, {$mul: {a: -10}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne().a, -20)
-
-// $mul zero
-coll.remove({})
-coll.save({_id:1, a:2});
-coll.update({}, {$mul: {a: 0}})
-assert.gleSuccess(coll.getDB())
-assert.eq(coll.findOne().a, 0)
diff --git a/jstests/update_multi3.js b/jstests/update_multi3.js
deleted file mode 100644
index 903d8265b63..00000000000
--- a/jstests/update_multi3.js
+++ /dev/null
@@ -1,25 +0,0 @@
-
-t = db.update_multi3;
-
-function test( useIndex ){
- t.drop();
-
- if ( useIndex )
- t.ensureIndex({k:1})
-
- for (i=0; i<10; i++) {
- t.save({ _id : i , k: 'x', a: []});
- }
-
- t.update({k: 'x'}, {$push: {a: 'y'}}, false, true);
-
- t.find( { k : "x" } ).forEach(
- function(z){
- assert.eq( [ "y" ] , z.a , "useIndex: " + useIndex )
- }
- );
-
-}
-
-test( false )
-test( true )
diff --git a/jstests/update_multi4.js b/jstests/update_multi4.js
deleted file mode 100644
index e81a19a5feb..00000000000
--- a/jstests/update_multi4.js
+++ /dev/null
@@ -1,18 +0,0 @@
-
-t = db.update_mulit4;
-t.drop();
-
-for(i=0;i<1000;i++){
- t.insert( { _id:i ,
- k:i%12,
- v:"v"+i%12 } );
-}
-
-t.ensureIndex({k:1})
-
-assert.eq( 84 , t.count({k:2,v:"v2"} ) , "A0" );
-
-t.update({k:2},{$set:{v:"two v2"}},false,true)
-
-assert.eq( 0 , t.count({k:2,v:"v2"} ) , "A1" );
-assert.eq( 84 , t.count({k:2,v:"two v2"} ) , "A2" );
diff --git a/jstests/update_multi5.js b/jstests/update_multi5.js
deleted file mode 100644
index 46ef8f36da5..00000000000
--- a/jstests/update_multi5.js
+++ /dev/null
@@ -1,17 +0,0 @@
-
-t = db.update_multi5;
-
-t.drop()
-
-t.insert({path: 'r1', subscribers: [1,2]});
-t.insert({path: 'r2', subscribers: [3,4]});
-
-t.update({}, {$addToSet: {subscribers: 5}}, false, true);
-
-t.find().forEach(
- function(z){
- assert.eq( 3 , z.subscribers.length , z );
- }
-);
-
-
diff --git a/jstests/update_multi6.js b/jstests/update_multi6.js
deleted file mode 100644
index dcc1ff04034..00000000000
--- a/jstests/update_multi6.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.update_multi6
-t.drop();
-
-t.update( { _id : 1 } , { _id : 1 , x : 1 , y : 2 } , true , false );
-assert( t.findOne( { _id : 1 } ) , "A" )
-
-t.update( { _id : 2 } , { _id : 2 , x : 1 , y : 2 } , true , true );
-assert( db.getLastError() , "B: " + tojson(db.getLastErrorCmd()) );
-
diff --git a/jstests/update_replace.js b/jstests/update_replace.js
deleted file mode 100644
index 0f9ef8fbe39..00000000000
--- a/jstests/update_replace.js
+++ /dev/null
@@ -1,50 +0,0 @@
-// This test checks validation of the replaced doc (on the server) for dots, $prefix and _id
-
-// Create a new connection object so it won't affect the global connection when we modify
-// it's settings.
-var conn = new Mongo(db.getMongo().host);
-t = conn.getDB(db.getName()).jstests_update_replace;
-t.drop();
-
-var myDB = t.getDB();
-
-// Bypass validation in shell so we can test the server.
-conn._skipValidation = true;
-
-// Should not allow "." in field names
-t.save({_id:1, "a.a":1})
-assert.gleError(myDB, "a.a");
-
-// Should not allow "." in field names, embedded
-t.save({_id:1, a :{"a.a":1}})
-assert.gleError(myDB, "a: a.a");
-
-// Should not allow "$"-prefixed field names, caught before "." check
-t.save({_id:1, $a :{"a.a":1}})
-assert.gleError(myDB, "$a: a.a");
-
-// Should not allow "$"-prefixed field names
-t.save({_id:1, $a: 1})
-assert.gleError(myDB, "$a");
-
-// _id validation checks
-
-// Should not allow regex _id
-t.save({_id: /a/})
-assert.gleError(myDB, "_id regex");
-
-// Should not allow regex _id, even if not first
-t.save({a:2, _id: /a/})
-assert.gleError(myDB, "a _id regex");
-
-// Should not allow array _id
-t.save({_id: [9]})
-assert.gleError(myDB, "_id array");
-
-// This is fine since _id isn't a top level field
-t.save({a :{ _id: [9]}})
-assert.gleSuccess(myDB, "embedded _id array");
-
-// This is fine since _id isn't a top level field
-t.save({b:1, a :{ _id: [9]}})
-assert.gleSuccess(myDB, "b embedded _id array");
diff --git a/jstests/update_server-12848.js b/jstests/update_server-12848.js
deleted file mode 100644
index 467be7c2af3..00000000000
--- a/jstests/update_server-12848.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// In SERVER-12848, it was shown that validation fails for certain types of updates
-// because the validate code was attempting to validate field names in array. Mutable
-// doesn't offer guarantees about the examined field name of array elements, only of the
-// field name of array elements when serialized. This is a regression test to
-// check that the new validation logic doesn't attempt to validate field names.
-
-var t = db.update_server_12848;
-t.drop();
-
-var orig = { "_id" : 1, "a" : [ 1, [ ] ] };
-t.insert(orig);
-assert.gleSuccess(db, "insert");
-assert.eq(orig, t.findOne());
-
-t.update({ "_id" : 1 }, { $addToSet : { "a.1" : 1 } });
-assert.gleSuccess(db, "update");
-
-var updated = { "_id" : 1, "a" : [ 1, [ 1 ] ] };
-assert.eq(updated, t.findOne());
diff --git a/jstests/update_setOnInsert.js b/jstests/update_setOnInsert.js
deleted file mode 100644
index be215ab408d..00000000000
--- a/jstests/update_setOnInsert.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// This tests that $setOnInsert works and allow setting the _id
-t = db.update_setOnInsert;
-
-db.setProfilingLevel( 2 );
-
-function getLastOp() {
- var cursor = db.system.profile.find( { ns : t.getFullName() , op : "update" } );
- cursor = cursor.sort( { $natural : -1 } ).limit(1);
- return cursor[0];
-}
-
-function dotest( useIndex ) {
- t.drop();
- if ( useIndex ) {
- t.ensureIndex( { a : 1 } );
- }
-
- t.update( { _id: 5 }, { $inc : { x: 2 }, $setOnInsert : { a : 3 } }, true );
- assert.docEq( { _id : 5, a: 3, x : 2 }, t.findOne() );
-
- t.update( { _id: 5 }, { $set : { a : 4 } }, true );
-
- t.update( { _id: 5 }, { $inc : { x: 2 }, $setOnInsert : { a : 3 } }, true );
- assert.docEq( { _id : 5, a: 4, x : 4 }, t.findOne() );
-
- op = getLastOp();
- assert( op.fastmod );
-}
-
-dotest( false );
-dotest( true );
-
-
-// Cases for SERVER-9958 -- Allow _id $setOnInsert during insert (if upsert:true, and not doc found)
-t.drop();
-
-t.update( {_id: 1} , { $setOnInsert: { "_id.a": new Date() } } , true );
-assert.gleError(db, function(gle) {
- return "$setOnInsert _id.a - " + tojson(gle) + tojson(t.findOne()) } );
-
-t.update( {"_id.a": 4} , { $setOnInsert: { "_id.b": 1 } } , true );
-assert.gleError(db, function(gle) {
- return "$setOnInsert _id.b - " + tojson(gle) + tojson(t.findOne()) } );
-
-t.update( {"_id.a": 4} , { $setOnInsert: { "_id": {a:4, b:1} } } , true );
-assert.gleError(db, function(gle) {
- return "$setOnInsert _id 3 - " + tojson(gle) + tojson(t.findOne()) } );
diff --git a/jstests/updatea.js b/jstests/updatea.js
deleted file mode 100644
index 40b900d0c9d..00000000000
--- a/jstests/updatea.js
+++ /dev/null
@@ -1,67 +0,0 @@
-
-t = db.updatea;
-t.drop();
-
-orig = { _id : 1 , a : [ { x : 1 , y : 2 } , { x : 10 , y : 11 } ] }
-
-t.save( orig )
-assert.gleSuccess(db, "orig");
-
-// SERVER-181
-t.update( {} , { $set : { "a.0.x" : 3 } } )
-assert.gleSuccess(db, "a.0.x");
-orig.a[0].x = 3;
-assert.eq( orig , t.findOne() , "A1" );
-
-t.update( {} , { $set : { "a.1.z" : 17 } } )
-assert.gleSuccess(db, "a.1.z");
-orig.a[1].z = 17;
-assert.eq( orig , t.findOne() , "A2" );
-
-// SERVER-273
-t.update( {} , { $unset : { "a.1.y" : 1 } } )
-assert.gleSuccess(db, "a.1.y");
-delete orig.a[1].y
-assert.eq( orig , t.findOne() , "A3" );
-
-// SERVER-333
-t.drop();
-orig = { _id : 1 , comments : [ { name : "blah" , rate_up : 0 , rate_ups : [] } ] }
-t.save( orig );
-assert.gleSuccess(db, "save");
-
-
-t.update( {} , { $inc: { "comments.0.rate_up" : 1 } , $push: { "comments.0.rate_ups" : 99 } } )
-assert.gleSuccess(db, "comments.0.rate_up");
-orig.comments[0].rate_up++;
-orig.comments[0].rate_ups.push( 99 )
-assert.eq( orig , t.findOne() , "B1" )
-
-t.drop();
-orig = { _id : 1 , a : [] }
-for ( i=0; i<12; i++ )
- orig.a.push( i );
-
-
-t.save( orig );
-assert.gleSuccess(db, "C1");
-assert.eq( orig , t.findOne() , "C1" );
-
-t.update( {} , { $inc: { "a.0" : 1 } } );
-assert.gleSuccess(db, "C2");
-orig.a[0]++;
-assert.eq( orig , t.findOne() , "C2" );
-
-t.update( {} , { $inc: { "a.10" : 1 } } );
-assert.gleSuccess(db, "a.10");
-orig.a[10]++;
-
-
-// SERVER-3218
-t.drop()
-t.insert({"a":{"c00":1}, 'c':2})
-t.update({"c":2}, {'$inc':{'a.c000':1}})
-assert.gleSuccess(db, "D1");
-
-assert.eq( { "c00" : 1 , "c000" : 1 } , t.findOne().a , "D1" )
-
diff --git a/jstests/updateb.js b/jstests/updateb.js
deleted file mode 100644
index d85e19a36bc..00000000000
--- a/jstests/updateb.js
+++ /dev/null
@@ -1,11 +0,0 @@
-
-t = db.updateb;
-t.drop();
-
-t.update( { "x.y" : 2 } , { $inc : { a : 7 } } , true );
-
-correct = { a : 7 , x : { y : 2 } };
-got = t.findOne();
-delete got._id;
-assert.docEq( correct , got , "A" )
-
diff --git a/jstests/updatec.js b/jstests/updatec.js
deleted file mode 100644
index 0c77b8b3cda..00000000000
--- a/jstests/updatec.js
+++ /dev/null
@@ -1,14 +0,0 @@
-
-t = db.updatec;
-t.drop();
-
-t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
-t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
-
-assert.docEq(
- {
- "_id" : 123,
- "f" : [ 234, 234 ] ,
- "v" : { "i" : 123, "a" : 456 }
- } , t.findOne() );
-
diff --git a/jstests/updated.js b/jstests/updated.js
deleted file mode 100644
index c202e8d435f..00000000000
--- a/jstests/updated.js
+++ /dev/null
@@ -1,20 +0,0 @@
-
-t = db.updated;
-t.drop()
-
-o = { _id : Math.random() ,
- items:[null,null,null,null]
- };
-
-t.insert( o );
-assert.docEq( o , t.findOne() , "A1" );
-
-o.items[0] = {amount:9000,itemId:1};
-t.update({},{$set:{"items.0":o.items[0]}});
-assert.docEq( o , t.findOne() , "A2" );
-
-o.items[0].amount += 1000;
-o.items[1] = {amount:1,itemId:2};
-t.update({},{$inc:{"items.0.amount":1000},$set:{"items.1":o.items[1]}});
-assert.docEq( o , t.findOne() , "A3" );
-
diff --git a/jstests/updatee.js b/jstests/updatee.js
deleted file mode 100644
index 85ba37c5c05..00000000000
--- a/jstests/updatee.js
+++ /dev/null
@@ -1,71 +0,0 @@
-// big numeric updates (used to overflow)
-
-t = db.updatee;
-t.drop();
-
-var o = { "_id" : 1,
- "actual" : {
- "key1" : "val1",
- "key2" : "val2",
- "001" : "val3",
- "002" : "val4",
- "0020000000000000000000" : "val5"
- },
- "profile-id" : "test" };
-
-
-t.insert( o );
-assert.eq( o , t.findOne() , "A1" );
-
-t.update({"profile-id" : "test"}, {$set: {"actual.0030000000000000000000": "val6"}});
-
-var q = t.findOne();
-
-// server-1347
-assert.eq(q.actual["0020000000000000000000"], "val5", "A2");
-assert.eq(q.actual["0030000000000000000000"], "val6", "A3");
-
-t.update({"profile-id" : "test"}, {$set: {"actual.02": "v4"}});
-
-q = t.findOne();
-assert.eq(q.actual["02"], "v4", "A4");
-assert.eq(q.actual["002"], "val4", "A5");
-
-t.update({"_id" : 1}, {$set : {"actual.2139043290148390248219423941.b" : 4}});
-q = t.findOne();
-assert.eq(q.actual["2139043290148390248219423941"].b, 4, "A6");
-
-// non-nested
-t.update({"_id" : 1}, {$set : {"7213647182934612837492342341" : 1}});
-t.update({"_id" : 1}, {$set : {"7213647182934612837492342342" : 2}});
-
-q = t.findOne();
-assert.eq(q["7213647182934612837492342341"], 1, "A7 1");
-assert.eq(q["7213647182934612837492342342"], 2, "A7 2");
-
-// 0s
-t.update({"_id" : 1}, {$set : {"actual.000" : "val000"}});
-q = t.findOne();
-assert.eq(q.actual["000"], "val000", "A8 zeros");
-
-t.update({"_id" : 1}, {$set : {"actual.00" : "val00"}});
-q = t.findOne();
-assert.eq(q.actual["00"], "val00", "A8 00");
-assert.eq(q.actual["000"], "val000", "A9");
-
-t.update({"_id" : 1}, {$set : {"actual.000" : "val000"}});
-q = t.findOne();
-assert.eq(q.actual["000"], "val000", "A9");
-assert.eq(q.actual["00"], "val00", "A10");
-
-t.update({"_id" : 1}, {$set : {"actual.01" : "val01"}});
-q = t.findOne();
-assert.eq(q.actual["000"], "val000", "A11");
-assert.eq(q.actual["01"], "val01", "A12");
-
-// shouldn't work, but shouldn't do anything too heinous, either
-t.update({"_id" : 1}, {$set : {"0.." : "val01"}});
-t.update({"_id" : 1}, {$set : {"0..0" : "val01"}});
-t.update({"_id" : 1}, {$set : {".0" : "val01"}});
-t.update({"_id" : 1}, {$set : {"..0" : "val01"}});
-t.update({"_id" : 1}, {$set : {"0.0..0" : "val01"}});
diff --git a/jstests/updatef.js b/jstests/updatef.js
deleted file mode 100644
index 69425932f19..00000000000
--- a/jstests/updatef.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Test unsafe management of nsdt on update command yield SERVER-3208
-
-prefixNS = db.jstests_updatef;
-prefixNS.save( {} );
-
-t = db.jstests_updatef_actual;
-t.drop();
-
-t.save( {a:0,b:[]} );
-for( i = 0; i < 1000; ++i ) {
- t.save( {a:100} );
-}
-t.save( {a:0,b:[]} );
-
-db.getLastError();
-// Repeatedly rename jstests_updatef to jstests_updatef_ and back. This will
-// invalidate the jstests_updatef_actual NamespaceDetailsTransient object.
-s = startParallelShell( "for( i=0; i < 100; ++i ) { db.jstests_updatef.renameCollection( 'jstests_updatef_' ); db.jstests_updatef_.renameCollection( 'jstests_updatef' ); }" );
-
-for( i=0; i < 20; ++i ) {
- t.update( {a:0}, {$push:{b:i}}, false, true );
-}
-
-s();
diff --git a/jstests/updateg.js b/jstests/updateg.js
deleted file mode 100644
index f8d452f71b2..00000000000
--- a/jstests/updateg.js
+++ /dev/null
@@ -1,17 +0,0 @@
-// SERVER-3370 check modifiers with field name characters comparing less than '.' character.
-
-t = db.jstests_updateg;
-
-t.drop();
-t.update({}, { '$inc' : { 'all.t' : 1, 'all-copy.t' : 1 }}, true);
-assert.eq( 1, t.count( {all:{t:1},'all-copy':{t:1}} ) );
-
-t.drop();
-t.save({ 'all' : {}, 'all-copy' : {}});
-t.update({}, { '$inc' : { 'all.t' : 1, 'all-copy.t' : 1 }});
-assert.eq( 1, t.count( {all:{t:1},'all-copy':{t:1}} ) );
-
-t.drop();
-t.save({ 'all11' : {}, 'all2' : {}});
-t.update({}, { '$inc' : { 'all11.t' : 1, 'all2.t' : 1 }});
-assert.eq( 1, t.count( {all11:{t:1},'all2':{t:1}} ) );
diff --git a/jstests/updateh.js b/jstests/updateh.js
deleted file mode 100644
index 2a39f6a0975..00000000000
--- a/jstests/updateh.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// Disallow $ in field names - SERVER-3730
-
-t = db.jstest_updateh
-t.drop()
-
-t.insert( {x:1} )
-
-t.update( {x:1}, {$set: {y:1}} ) // ok
-e = db.getLastErrorObj()
-assert.eq( e.err, null )
-
-t.update( {x:1}, {$set: {$z:1}} ) // not ok
-e = db.getLastErrorObj()
-assert( e.err != null )
-
-// TODO: This shouldn't be supported, and it isn't with the new update framework, but we
-// currently don't have a good way to check which mode we are in. When we do have that, add
-// this test guarded under that condition. Or, when we remove the old update path just enable
-// this test.
-//
-// t.update( {x:1}, {$set: {'a.$b':1}} ) // not ok
-// e = db.getLastErrorObj()
-// assert( e.err != null )
-
-t.update( {x:1}, {$unset: {$z:1}} ) // unset ok to remove bad fields
-e = db.getLastErrorObj()
-assert.eq( e.err, null )
-
-t.update( {x:1}, {$inc: {$z:1}} ) // not ok
-e = db.getLastErrorObj()
-assert( e.err != null )
-
-t.update( {x:1}, {$pushAll: {$z:[1,2,3]}} ) // not ok
-e = db.getLastErrorObj()
-assert( e.err != null )
-
-t.update( {x:1}, {$pushAll: {z:[1,2,3]}} ) // ok
-e = db.getLastErrorObj()
-assert.eq( e.err, null )
diff --git a/jstests/updatei.js b/jstests/updatei.js
deleted file mode 100644
index e45b3fde5bb..00000000000
--- a/jstests/updatei.js
+++ /dev/null
@@ -1,86 +0,0 @@
-// Test new (optional) update syntax
-// SERVER-4176
-t = db.updatei;
-
-// Using a multi update
-
-t.drop();
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "x" }, { $push: { a: "y" }}, { multi: true });
-t.find({ k : "x" }).forEach(function(z) {
- assert.eq([ "y" ], z.a, "multi update using object arg");
-});
-
-t.drop();
-
-// Using a single update
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "x" }, { $push: { a: "y" }}, { multi: false });
-assert.eq(1, t.find({ "a": "y" }).count(), "update using object arg");
-
-t.drop();
-
-// Using upsert, found
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: true });
-assert.eq(1, t.find({ "k": "x", "a": "y" }).count(), "upsert (found) using object arg");
-
-t.drop();
-
-// Using upsert + multi, found
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: true, multi: true });
-t.find({ k : "x" }).forEach(function(z) {
- assert.eq([ "y" ], z.a, "multi + upsert (found) using object arg");
-});
-
-t.drop();
-
-// Using upsert, not found
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "y" }, { $push: { a: "y" }}, { upsert: true });
-assert.eq(1, t.find({ "k": "y", "a": "y" }).count(), "upsert (not found) using object arg");
-
-t.drop();
-
-// Without upsert, found
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: false });
-assert.eq(1, t.find({ "a": "y" }).count(), "no upsert (found) using object arg");
-
-t.drop();
-
-// Without upsert, not found
-
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
-}
-
-t.update({ k: "y" }, { $push: { a: "y" }}, { upsert: false });
-assert.eq(0, t.find({ "a": "y" }).count(), "no upsert (not found) using object arg");
-
-t.drop();
diff --git a/jstests/updatej.js b/jstests/updatej.js
deleted file mode 100644
index 6a70a4c2d51..00000000000
--- a/jstests/updatej.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// Test that update validation failure terminates the update without modifying subsequent
-// documents. SERVER-4779
-
-t = db.jstests_updatej;
-t.drop();
-
-t.save( {a:[]} );
-t.save( {a:1} );
-t.save( {a:[]} );
-
-t.update( {}, {$push:{a:2}}, false, true );
-assert.eq( 1, t.count( {a:2} ) );
diff --git a/jstests/updatek.js b/jstests/updatek.js
deleted file mode 100644
index b96f3138a81..00000000000
--- a/jstests/updatek.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// Test modifier operations on numerically equivalent string field names. SERVER-4776
-
-t = db.jstests_updatek;
-
-t.drop();
-t.save( { _id:0, '1':{}, '01':{} } );
-t.update( {}, { $set:{ '1.b':1, '1.c':2 } } );
-assert.docEq( { "01" : { }, "1" : { "b" : 1, "c" : 2 }, "_id" : 0 }, t.findOne() );
-
-t.drop();
-t.save( { _id:0, '1':{}, '01':{} } );
-t.update( {}, { $set:{ '1.b':1, '01.c':2 } } );
-assert.docEq( { "01" : { "c" : 2 }, "1" : { "b" : 1 }, "_id" : 0 }, t.findOne() );
-
diff --git a/jstests/updatel.js b/jstests/updatel.js
deleted file mode 100644
index be4b95cf99f..00000000000
--- a/jstests/updatel.js
+++ /dev/null
@@ -1,48 +0,0 @@
-// The positional operator allows an update modifier field path to contain a sentinel ('$') path
-// part that is replaced with the numeric position of an array element matched by the update's query
-// spec. <http://dochub.mongodb.org/core/positionaloperator>
-
-// If no array element position from a query is available to substitute for the positional operator
-// setinel ('$'), the update fails with an error. SERVER-6669 SERVER-4713
-
-t = db.jstests_updatel;
-t.drop();
-
-
-
-// The collection is empty, forcing an upsert. In this case the query has no array position match
-// to substiture for the positional operator. SERVER-4713
-t.update( {}, { $set:{ 'a.$.b':1 } }, true );
-assert( db.getLastError(), "An error is reported." );
-assert.eq( 0, t.count(), "No upsert occurred." );
-
-
-
-// Save a document to the collection so it is no longer empty.
-t.save( { _id:0 } );
-
-// Now, with an existing document, trigger an update rather than an upsert. The query has no array
-// position match to substiture for the positional operator. SERVER-6669
-t.update( {}, { $set:{ 'a.$.b':1 } } );
-assert( db.getLastError(), "An error is reported." );
-assert.eq( [ { _id:0 } ], t.find().toArray(), "No update occurred." );
-
-
-
-// Now, try with an update by _id (without a query array match).
-t.update( { _id:0 }, { $set:{ 'a.$.b':1 } } );
-assert( db.getLastError(), "An error is reported." );
-assert.eq( [ { _id:0 } ], t.find().toArray(), "No update occurred." );
-
-
-
-// Seed the collection with a document suitable for the following check.
-t.remove({});
-t.save( { _id:0, a:[ { b:{ c:1 } } ] } );
-
-// Now, attempt to apply an update with two nested positional operators. There is a positional
-// query match for the first positional operator but not the second. Note that dollar sign
-// substitution for multiple positional opertors is not implemented (SERVER-831).
-t.update( { 'a.b.c':1 }, { $set:{ 'a.$.b.$.c':2 } } );
-assert( db.getLastError(), "An error is reported" );
-assert.eq( [ { _id:0, a:[ { b:{ c:1 } } ] } ], t.find().toArray(), "No update occurred." );
diff --git a/jstests/updatem.js b/jstests/updatem.js
deleted file mode 100644
index 3d46d2a15f3..00000000000
--- a/jstests/updatem.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// Tests that _id will exist in all updated docs.
-
-t = db.jstests_updatem;
-t.drop();
-
-// new _id from insert (upsert:true)
-t.update({a:1}, {$inc:{b:1}}, true)
-var doc = t.findOne({a:1});
-assert(doc["_id"], "missing _id")
-
-// new _id from insert (upsert:true)
-t.update({a:1}, {$inc:{b:1}}, true)
-var doc = t.findOne({a:1});
-assert(doc["_id"], "missing _id")
-
-// no _id on existing doc
-t.getDB().runCommand({godinsert:t.getName(), obj:{a:2}})
-t.update({a:2}, {$inc:{b:1}}, true)
-var doc = t.findOne({a:2});
-assert(doc["_id"], "missing _id after update")
diff --git a/jstests/upsert1.js b/jstests/upsert1.js
deleted file mode 100644
index 21f24ae8281..00000000000
--- a/jstests/upsert1.js
+++ /dev/null
@@ -1,59 +0,0 @@
-// tests to make sure that the new _id is returned after the insert
-t = db.upsert1;
-t.drop();
-
-// make sure the new _id is returned when $mods are used
-t.update( { x : 1 } , { $inc : { y : 1 } } , true );
-l = db.getLastErrorCmd();
-assert( l.upserted , "A1 - " + tojson(l) );
-assert.eq( l.upserted.str , t.findOne()._id.str , "A2" );
-
-// make sure the new _id is returned on a replacement (no $mod in update)
-t.update( { x : 2 } , { x : 2 , y : 3 } , true );
-l = db.getLastErrorCmd();
-assert( l.upserted , "B1 - " + tojson(l) );
-assert.eq( l.upserted.str , t.findOne( { x : 2 } )._id.str , "B2" );
-assert.eq( 2 , t.find().count() , "B3" );
-
-// use the _id from the query for the insert
-t.update({_id:3}, {$set: {a:'123'}}, true)
-l = db.getLastErrorCmd();
-assert( l.upserted , "C1 - " + tojson(l) );
-assert.eq( l.upserted , 3 , "C2 - " + tojson(l) );
-
-// test with an embedded doc for the _id field
-t.update({_id:{a:1}}, {$set: {a:123}}, true)
-l = db.getLastErrorCmd();
-assert( l.upserted , "D1 - " + tojson(l) );
-assert.eq( l.upserted , {a:1} , "D2 - " + tojson(l) );
-
-// test with a range query
-t.update({_id: {$gt:100}}, {$set: {a:123}}, true)
-l = db.getLastErrorCmd();
-assert( l.upserted , "E1 - " + tojson(l) );
-assert.neq( l.upserted , 100 , "E2 - " + tojson(l) );
-
-// test with an _id query
-t.update({_id: 1233}, {$set: {a:123}}, true)
-l = db.getLastErrorCmd();
-assert( l.upserted , "F1 - " + tojson(l) );
-assert.eq( l.upserted , 1233 , "F2 - " + tojson(l) );
-
-// test with an embedded _id query
-t.update({_id: {a:1, b:2}}, {$set: {a:123}}, true)
-l = db.getLastErrorCmd();
-assert( l.upserted , "G1 - " + tojson(l) );
-assert.eq( l.upserted , {a:1, b:2} , "G2 - " + tojson(l) );
-
-// test with no _id inserted
-db.no_id.drop();
-db.createCollection("no_id", {autoIndexId:false})
-db.no_id.update({foo:1}, {$set:{a:1}}, true)
-l = db.getLastErrorCmd();
-assert( l.upserted , "H1 - " + tojson(l) );
-assert( !l.err, "H1.5 No error expected - " + tojson(l) )
-assert.eq( 0, db.no_id.getIndexes().length, "H2" );
-assert.eq( 1, db.no_id.count(), "H3" );
-var newDoc = db.no_id.findOne();
-delete newDoc["_id"];
-assert.eq( { foo : 1, a : 1 }, newDoc, "H4" );
diff --git a/jstests/upsert2.js b/jstests/upsert2.js
deleted file mode 100644
index 7184ed807d1..00000000000
--- a/jstests/upsert2.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// A query field with a $not operator should be excluded when constructing the object to which mods
-// will be applied when performing an upsert. SERVER-8178
-
-t = db.jstests_upsert2;
-
-// The a:$not query operator does not cause an 'a' field to be added to the upsert document.
-t.drop();
-t.update( { a:{ $not:{ $lt:1 } } }, { $set:{ b:1 } }, true );
-assert( !t.findOne().a );
-
-// The a:$not query operator does not cause an 'a' field to be added to the upsert document.
-t.drop();
-t.update( { a:{ $not:{ $elemMatch:{ a:1 } } } }, { $set:{ b:1 } }, true );
-assert( !t.findOne().a );
-
-// The a:$not query operator does not cause an 'a' field to be added to the upsert document, and as
-// a result $push can be applied to the (missing) 'a' field.
-t.drop();
-t.update( { a:{ $not:{ $elemMatch:{ a:1 } } } }, { $push:{ a:{ b:1, c:0 } } }, true );
-assert.eq( [ { b:1, c:0 } ], t.findOne().a );
diff --git a/jstests/upsert3.js b/jstests/upsert3.js
deleted file mode 100644
index 34e37bde33d..00000000000
--- a/jstests/upsert3.js
+++ /dev/null
@@ -1,60 +0,0 @@
-// tests to make sure no dup fields are created when using query to do upsert
-t = db.upsert3;
-t.drop();
-
-//make sure we validate query
-t.update( {a: {"a.a": 1}} , {$inc: {y: 1}} , true );
-assert.gleError(db, function(gle) {
- return "a.a.a-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-t.update( {a: {$a: 1}} , {$inc: {y: 1}} , true );
-assert.gleError(db, function(gle) {
- return "a.$a-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-// make sure the new _id is not duplicated
-t.update( {"a.b": 1, a: {a: 1, b: 1}} , {$inc: {y: 1}} , true );
-assert.gleError(db, function(gle) {
- return "a.b-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-t.update( {"_id.a": 1, _id: {a: 1, b: 1}} , {$inc : {y: 1}} , true );
-assert.gleError(db, function(gle) {
- return "_id-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-t.update( {_id: {a: 1, b: 1}, "_id.a": 1} , { $inc: {y: 1}} , true );
-assert.gleError(db, function(gle) {
- return "_id-2 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-// Should be redundant, but including from SERVER-11363
-t.update( {_id: {a: 1, b: 1}, "_id.a": 1} , {$setOnInsert: {y: 1}} , true );
-assert.gleError(db, function(gle) {
- return "_id-3 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-//Should be redundant, but including from SERVER-11514
-t.update( {"a": {}, "a.c": 2} , {$set: {x: 1}}, true );
-assert.gleError(db, function(gle) {
- return "a.c-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-// Should be redundant, but including from SERVER-4830
-t.update( {'a': {b: 1}, 'a.c': 1}, {$inc: {z: 1}}, true );
-assert.gleError(db, function(gle) {
- return "a-1 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-// Should be redundant, but including from SERVER-4830
-t.update( {a: 1, "a.b": 1, a: [1, {b: 1}]}, {$inc: {z: 1}}, true );
-assert.gleError(db, function(gle) {
- return "a-2 - " + tojson(gle) + " doc:" + tojson(t.findOne()) });
-
-// Replacement tests
-// Query is ignored for replacements, except _id field.
-t.update( {r: {a: 1, b: 1}, "r.a": 1} , {y: 1} , true );
-assert.gleSuccess(db, "r-1");
-assert(t.findOne().y, 1, "inserted doc missing field")
-var docMinusId = t.findOne();
-delete docMinusId._id
-assert.docEq({y: 1}, docMinusId, "r-1")
-t.drop()
-
-t.update( {_id: {a:1, b:1}, "_id.a": 1} , {y: 1} , true );
-assert.gleSuccess(db, "_id-4");
-assert.docEq({_id: {a: 1, b: 1}, y: 1}, t.findOne(), "_id-4")
-t.drop() \ No newline at end of file
diff --git a/jstests/upsert4.js b/jstests/upsert4.js
deleted file mode 100644
index cbf7f2646f3..00000000000
--- a/jstests/upsert4.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// tests to ensure fields in $and conditions are created when using the query to do upsert
-coll = db.upsert4;
-coll.drop();
-
-coll.update({_id: 1, $and: [{c: 1}, {d: 1}], a: 12} , {$inc: {y: 1}} , true);
-assert.gleSuccess(db, "");
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, a: 12, y: 1})
-
-coll.remove({})
-coll.update({$and: [{c: 1}, {d: 1}]} , {$setOnInsert: {_id: 1}} , true);
-assert.gleSuccess(db, "");
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
-
-coll.remove({})
-coll.update({$and: [{c: 1}, {d: 1}, {$or: [{x:1}]}]} , {$setOnInsert: {_id: 1}} , true);
-assert.gleSuccess(db, "");
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, x:1})
-
-coll.remove({})
-coll.update({$and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]} , {$setOnInsert: {_id: 1}} , true);
-assert.gleSuccess(db, "");
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
-
-coll.remove({})
-coll.update({r: {$gt: 3}, $and: [{c: 1}, {d: 1}], $or: [{x:1}, {x:2}]} , {$setOnInsert: {_id: 1}} , true);
-assert.gleSuccess(db, "");
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
-
-coll.remove({})
-coll.update({r: /s/, $and: [{c: 1}, {d: 1}], $or: [{x:1}, {x:2}]} , {$setOnInsert: {_id: 1}} , true);
-assert.gleSuccess(db, "");
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1})
-
-coll.remove({})
-coll.update({c:2, $and: [{c: 1}, {d: 1}]} , {$setOnInsert: {_id: 1}} , true);
-assert.gleError(db, "");
diff --git a/jstests/use_power_of_2.js b/jstests/use_power_of_2.js
deleted file mode 100644
index 3200c937452..00000000000
--- a/jstests/use_power_of_2.js
+++ /dev/null
@@ -1,86 +0,0 @@
-/* This test ensures that the usePowerOf2 user flag
- * effectively reuses space. The test repeatedly inserts and
- * then deletes a batch of variable-length strings, then checks
- * that doing so does not cause the storageSize to grow. */
-
-// A bunch of strings of length 0 to 100
-var var_length_strings =
- [ "" ,
- "aaaaa" ,
- "aaaaaaaaaa" ,
- "aaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ,
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ]
-
-//insert all the strings
-var batch_insert = function(coll){
- for ( i=0; i < var_length_strings.length; i++ ){
- coll.insert( { a : var_length_strings[i] } );
- }
-}
-
-//delete the same strings
-var batch_delete = function(coll){
- for ( i=0; i < var_length_strings.length; i++ ){
- coll.remove( { a : var_length_strings[i] } );
- }
-}
-
-//number of times to repeat batch inserts/deletes
-var numrepeats = 1000;
-
-var testStorageSize = function(ns){
- //insert docs and measure storage size
- batch_insert(ns);
- var oldSize = ns.stats().storageSize;
-
- //remove and add same docs a bunch of times
- for ( n=0 ; n < numrepeats ; n++ ){
- batch_delete(ns);
- batch_insert(ns);
- }
-
- //check size didn't change
- var newSize = ns.stats().storageSize;
- assert.eq( oldSize , newSize , "storage size changed");
-}
-
-/****************** TEST 1 *****************************/
-
-//create fresh collection, set flag to true, test storage size
-var coll = "usepower1"
-var t = db.getCollection(coll);
-t.drop();
-db.createCollection(coll);
-var res = db.runCommand( { "collMod" : coll , "usePowerOf2Sizes" : true } );
-assert.eq( res.ok , 1 , "collMod failed" );
-
-res = db.runCommand( { "collMod" : coll , "usePowerOf2Sizess" : true } )
-assert.eq( res.ok , 0 , "collMod should have failed: " + tojson( res ) )
-
-testStorageSize(t);
-
-/**************** Test 2 *****************************/
-
-//repeat previous test, but with flag set at creation time
-var coll = "usepower2"
-var t = db.getCollection(coll);
-t.drop();
-db.runCommand({"create" : coll, "flags" : 1 });
-
-testStorageSize(t);
diff --git a/jstests/useindexonobjgtlt.js b/jstests/useindexonobjgtlt.js
deleted file mode 100755
index 06e94a812f6..00000000000
--- a/jstests/useindexonobjgtlt.js
+++ /dev/null
@@ -1,15 +0,0 @@
-t = db.factories
-t.drop()
-t.insert( { name: "xyz", metro: { city: "New York", state: "NY" } } )
-t.ensureIndex( { metro : 1 } )
-
-assert( db.factories.find().count() )
-
-assert( db.factories.find( { metro: { city: "New York", state: "NY" } } ).count() )
-
-assert( db.factories.find( { metro: { city: "New York", state: "NY" } } ).explain().cursor == "BtreeCursor metro_1" )
-
-assert( db.factories.find( { metro: { $gte : { city: "New York" } } } ).explain().cursor == "BtreeCursor metro_1" )
-
-assert( db.factories.find( { metro: { $gte : { city: "New York" } } } ).count() == 1 )
-
diff --git a/jstests/user_management_helpers.js b/jstests/user_management_helpers.js
deleted file mode 100644
index 50707f584ab..00000000000
--- a/jstests/user_management_helpers.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// This test is a basic sanity check of the shell helpers for manipulating user objects
-// It is not a comprehensive test of the functionality of the user manipulation commands
-function assertHasRole(rolesArray, roleName, roleDB) {
- for (i in rolesArray) {
- var curRole = rolesArray[i];
- if (curRole.role == roleName && curRole.db == roleDB) {
- return;
- }
- }
- assert(false, "role " + roleName + "@" + roleDB + " not found in array: " + tojson(rolesArray));
-}
-
-
-(function(db) {
- var db = db.getSiblingDB("user_management_helpers");
- db.dropDatabase();
- db.dropAllUsers();
-
- db.createUser({user: "spencer", pwd: "password", roles: ['readWrite']});
- db.createUser({user: "andy", pwd: "password", roles: ['readWrite']});
-
- // Test getUser
- var userObj = db.getUser('spencer');
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
-
- // Test getUsers
- var users = db.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user == 'spencer' || users[1].user == 'spencer');
- assert(users[0].user == 'andy' || users[1].user == 'andy');
- assert.eq(1, users[0].roles.length);
- assert.eq(1, users[1].roles.length);
- assertHasRole(users[0].roles, "readWrite", db.getName());
- assertHasRole(users[1].roles, "readWrite", db.getName());
-
- // Granting roles to nonexistent user fails
- assert.throws(function() { db.grantRolesToUser("fakeUser", ['dbAdmin']); });
- // Granting non-existant role fails
- assert.throws(function() { db.grantRolesToUser("spencer", ['dbAdmin', 'fakeRole']); });
-
- userObj = db.getUser('spencer');
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
-
- // Granting a role you already have is no problem
- db.grantRolesToUser("spencer", ['readWrite', 'dbAdmin']);
- userObj = db.getUser('spencer');
- assert.eq(2, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
- assertHasRole(userObj.roles, "dbAdmin", db.getName());
-
- // Revoking roles the user doesn't have is fine
- db.revokeRolesFromUser("spencer", ['dbAdmin', 'read']);
- userObj = db.getUser('spencer');
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
-
- // Update user
- db.updateUser("spencer", {customData: {hello: 'world'}, roles:['read']});
- userObj = db.getUser('spencer');
- assert.eq('world', userObj.customData.hello);
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "read", db.getName());
-
- // Test dropUser
- db.dropUser('andy');
- assert.throws(function() {printjson(db.getUser('andy'));});
-
- // Test dropAllUsers
- db.dropAllUsers()
- assert.eq(0, db.getUsers().length);
-
- // Test password digestion
- assert.throws(function() {
- db.createUser({user:'user1', pwd:'x', roles:[], digestPassword: true});});
- assert.throws(function() {
- db.createUser({user:'user1', pwd:'x', roles:[], digestPassword: false});});
- assert.throws(function() {
- db.createUser({user:'user1', pwd:'x', roles:[], passwordDigestor: 'foo'});});
- db.createUser({user:'user1', pwd:'x', roles:[], passwordDigestor:"server"});
- db.createUser({user:'user2', pwd:'x', roles:[], passwordDigestor:"client"});
- assert(db.auth('user1', 'x'));
- assert(db.auth('user2', 'x'));
-
- assert.throws(function() { db.updateUser('user1', {pwd:'y', digestPassword: true});});
- assert.throws(function() { db.updateUser('user1', {pwd:'y', digestPassword: false});});
- assert.throws(function() { db.updateUser('user1', {pwd:'y', passwordDigestor: 'foo'});});
- db.updateUser('user1', {pwd:'y', passwordDigestor: 'server'});
- db.updateUser('user2', {pwd:'y', passwordDigestor: 'client'});
- assert(db.auth('user1', 'y'));
- assert(db.auth('user2', 'y'));
-
-}(db)); \ No newline at end of file
diff --git a/jstests/validate_cmd_ns.js b/jstests/validate_cmd_ns.js
deleted file mode 100644
index b13a0d98159..00000000000
--- a/jstests/validate_cmd_ns.js
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Tests that query against the $cmd namespace will error out when the request has
- * a number to return value other than 1 or -1. Note that users cannot have
- * collections named $cmd since $ is an illegal character.
- */
-
-// Note: _exec gives you the raw response from the server.
-var res = db.$cmd.find({ whatsmyuri: 1 })._exec().next();
-assert(res.$err != null);
-assert(res.$err.indexOf('bad numberToReturn') > -1);
-
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(0)._exec().next();
-assert(res.$err != null);
-assert(res.$err.indexOf('bad numberToReturn') > -1);
-
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(-2)._exec().next();
-assert(res.$err != null);
-assert(res.$err.indexOf('bad numberToReturn') > -1);
-
-var res = db.$cmd.find({ whatsmyuri: 1 }).limit(1).next();
-assert(res.ok);
-
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(-1).next();
-assert(res.ok);
-
diff --git a/jstests/validate_user_documents.js b/jstests/validate_user_documents.js
deleted file mode 100644
index 825e1e7de11..00000000000
--- a/jstests/validate_user_documents.js
+++ /dev/null
@@ -1,65 +0,0 @@
-// Ensure that inserts and updates of the system.users collection validate the schema of inserted
-// documents.
-
-mydb = db.getSisterDB( "validate_user_documents" );
-
-function assertGLEOK(status) {
- assert(status.ok && status.err === null,
- "Expected OK status object; found " + tojson(status));
-}
-
-function assertGLENotOK(status) {
- assert(status.ok && status.err !== null,
- "Expected not-OK status object; found " + tojson(status));
-}
-
-mydb.dropDatabase();
-mydb.dropAllUsers();
-
-//
-// Tests of the insert path
-//
-
-// V0 user document document; insert should fail.
-assert.commandFailed(mydb.runCommand({ createUser:1,
- user: "spencer",
- pwd: "password",
- readOnly: true }));
-
-// V1 user document; insert should fail.
-assert.commandFailed(mydb.runCommand({ createUser:1,
- user: "spencer",
- userSource: "test2",
- roles: ["dbAdmin"] }));
-
-// Valid V2 user document; insert should succeed.
-assert.commandWorked(mydb.runCommand({ createUser: "spencer",
- pwd: "password",
- roles: ["dbAdmin"] }));
-
-// Valid V2 user document; insert should succeed.
-assert.commandWorked(mydb.runCommand({ createUser: "andy",
- pwd: "password",
- roles: [{role: "dbAdmin",
- db: "validate_user_documents",
- hasRole: true,
- canDelegate: false}] }));
-
-// Non-existent role; insert should fail
-assert.commandFailed(mydb.runCommand({ createUser: "bob",
- pwd: "password",
- roles: ["fakeRole123"] }));
-
-//
-// Tests of the update path
-//
-
-// Update a document in a legal way, expect success.
-assert.commandWorked(mydb.runCommand({updateUser: 'spencer', roles: ['read']}));
-
-// Update a document in a way that is illegal, expect failure.
-assert.commandFailed(mydb.runCommand({updateUser: 'spencer', readOnly: true}));
-assert.commandFailed(mydb.runCommand({updateUser: 'spencer', pwd: ""}));
-assert.commandFailed(mydb.runCommand({updateUser: 'spencer', roles: ['fakeRole123']}));
-
-mydb.dropDatabase();
diff --git a/jstests/verify_update_mods.js b/jstests/verify_update_mods.js
deleted file mode 100644
index b31130ec6eb..00000000000
--- a/jstests/verify_update_mods.js
+++ /dev/null
@@ -1,82 +0,0 @@
-// Verify update mods exist
-t = db.update_mods;
-t.drop();
-
-t.save({_id:1});
-t.update({}, {$set:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$unset:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$inc:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$mul:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$push:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$pushAll:{a:[1]}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$addToSet:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$pull:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$pop:{a:true}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$rename:{a:"b"}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$bit:{a:{and:NumberLong(1)}}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-// SERVER-3223 test $bit can do an upsert
-t.update({_id:1}, {$bit:{a:{and:NumberLong(3)}}}, true);
-assert.eq(t.findOne({_id:1}).a, NumberLong(0), "$bit upsert with and");
-t.update({_id:2}, {$bit:{b:{or:NumberLong(3)}}}, true);
-assert.eq(t.findOne({_id:2}).b, NumberLong(3), "$bit upsert with or (long)");
-t.update({_id:3}, {$bit:{"c.d":{or:NumberInt(3)}}}, true);
-assert.eq(t.findOne({_id:3}).c.d, NumberInt(3), "$bit upsert with or (int)");
-t.remove({});
-
-t.save({_id:1});
-t.update({}, {$currentDate:{a:true}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$max:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
-
-t.save({_id:1});
-t.update({}, {$min:{a:1}})
-assert.automsg( "!db.getLastError()" );
-t.remove({})
diff --git a/jstests/where1.js b/jstests/where1.js
deleted file mode 100644
index 7ff20a53620..00000000000
--- a/jstests/where1.js
+++ /dev/null
@@ -1,28 +0,0 @@
-
-t = db.getCollection( "where1" );
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
-
-assert.eq( 1 , t.find( function(){ return this.a == 2; } ).length() , "A" );
-
-assert.eq( 1 , t.find( { $where : "return this.a == 2" } ).toArray().length , "B" );
-assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "C" );
-
-assert.eq( 1 , t.find( "this.a == 2" ).toArray().length , "D" );
-
-// SERVER-12117
-// positional $ projection should fail on a $where query
-assert.throws( function() { t.find( { $where : "return this.a;" }, { 'a.$' : 1 } ).itcount(); } );
-
-// SERVER-12439: $where must be top-level
-assert.throws( function() { t.find( { a: 1, b: { $where : "this.a;" } } ).itcount(); } );
-assert.throws( function() { t.find( { a: { $where : "this.a;" } } ).itcount(); } );
-assert.throws( function() {
- t.find( { a: { $elemMatch : { $where : "this.a;" } } } ).itcount();
-} );
-assert.throws( function() {
- t.find( { a: 3, "b.c": { $where : "this.a;" } } ).itcount();
-} );
diff --git a/jstests/where2.js b/jstests/where2.js
deleted file mode 100644
index 9262b3076b3..00000000000
--- a/jstests/where2.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.getCollection( "where2" );
-t.drop();
-
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
-
-assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "A" );
-assert.eq( 1 , t.find( { $where : "\nthis.a == 2" } ).toArray().length , "B" );
diff --git a/jstests/where3.js b/jstests/where3.js
deleted file mode 100644
index c062ed11513..00000000000
--- a/jstests/where3.js
+++ /dev/null
@@ -1,10 +0,0 @@
-
-t = db.where3;
-t.drop()
-
-t.save( { returned_date : 5 } );
-t.save( { returned_date : 6 } );
-
-assert.eq( 1 , t.find( function(){ return this.returned_date == 5; } ).count() , "A" );
-assert.eq( 1 , t.find( { $where : "return this.returned_date == 5;" } ).count() , "B" );
-assert.eq( 1 , t.find( { $where : "this.returned_date == 5;" } ).count() , "C" );
diff --git a/jstests/where4.js b/jstests/where4.js
deleted file mode 100644
index 61ec3771bed..00000000000
--- a/jstests/where4.js
+++ /dev/null
@@ -1,27 +0,0 @@
-
-db.where4.drop();
-
-db.system.js.insert( { _id : "w4" , value : "5" } )
-
-db.where4.insert( { x : 1 , y : 1 } )
-db.where4.insert( { x : 2 , y : 1 } )
-
-db.where4.update( { $where : function() { return this.x == 1; } } ,
- { $inc : { y : 1 } } , false , true );
-
-
-assert.eq( 2 , db.where4.findOne( { x : 1 } ).y )
-assert.eq( 1 , db.where4.findOne( { x : 2 } ).y )
-
-// Test that where queries work with stored javascript
-db.system.js.save( { _id : "where4_addOne" , value : function(x) { return x + 1; } } )
-
-db.where4.update( { $where : "where4_addOne(this.x) == 2" } ,
- { $inc : { y : 1 } } , false , true );
-
-assert.eq( 3 , db.where4.findOne( { x : 1 } ).y )
-assert.eq( 1 , db.where4.findOne( { x : 2 } ).y )
-
-db.system.js.remove( { _id : "where4_equalsOne" } )
-
-db.system.js.remove( { _id : "w4" } )