summaryrefslogtreecommitdiff
path: root/jstests/core
diff options
context:
space:
mode:
Diffstat (limited to 'jstests/core')
-rw-r--r--jstests/core/all.js47
-rw-r--r--jstests/core/all2.js86
-rw-r--r--jstests/core/all3.js28
-rw-r--r--jstests/core/all4.js28
-rw-r--r--jstests/core/all5.js28
-rw-r--r--jstests/core/and.js85
-rw-r--r--jstests/core/and2.js27
-rw-r--r--jstests/core/and3.js67
-rw-r--r--jstests/core/andor.js99
-rw-r--r--jstests/core/apitest_db.js77
-rw-r--r--jstests/core/apitest_dbcollection.js115
-rw-r--r--jstests/core/apply_ops1.js66
-rw-r--r--jstests/core/apply_ops2.js71
-rw-r--r--jstests/core/array1.js14
-rw-r--r--jstests/core/array3.js8
-rw-r--r--jstests/core/array4.js30
-rw-r--r--jstests/core/array_match1.js31
-rw-r--r--jstests/core/array_match2.js20
-rw-r--r--jstests/core/array_match3.js13
-rw-r--r--jstests/core/array_match4.js30
-rw-r--r--jstests/core/arrayfind1.js40
-rw-r--r--jstests/core/arrayfind2.js29
-rw-r--r--jstests/core/arrayfind3.js16
-rw-r--r--jstests/core/arrayfind4.js22
-rw-r--r--jstests/core/arrayfind5.js23
-rw-r--r--jstests/core/arrayfind6.js26
-rw-r--r--jstests/core/arrayfind7.js52
-rw-r--r--jstests/core/arrayfind8.js175
-rw-r--r--jstests/core/arrayfind9.js34
-rw-r--r--jstests/core/arrayfinda.js21
-rw-r--r--jstests/core/auth1.js54
-rw-r--r--jstests/core/auth2.js9
-rw-r--r--jstests/core/auth_copydb.js19
-rw-r--r--jstests/core/autoid.js11
-rw-r--r--jstests/core/bad_index_plugin.js11
-rw-r--r--jstests/core/basic1.js21
-rw-r--r--jstests/core/basic2.js16
-rw-r--r--jstests/core/basic3.js45
-rw-r--r--jstests/core/basic4.js12
-rw-r--r--jstests/core/basic5.js6
-rw-r--r--jstests/core/basic6.js8
-rw-r--r--jstests/core/basic7.js11
-rw-r--r--jstests/core/basic8.js11
-rw-r--r--jstests/core/basic9.js19
-rw-r--r--jstests/core/basica.js33
-rw-r--r--jstests/core/basicb.js6
-rw-r--r--jstests/core/basicc.js21
-rw-r--r--jstests/core/batch_size.js45
-rw-r--r--jstests/core/bench_test1.js37
-rw-r--r--jstests/core/bench_test2.js48
-rw-r--r--jstests/core/bench_test3.js27
-rw-r--r--jstests/core/big_object1.js55
-rw-r--r--jstests/core/binData.js14
-rw-r--r--jstests/core/block_check_supported.js118
-rw-r--r--jstests/core/bulk_insert.js22
-rw-r--r--jstests/core/capped.js11
-rw-r--r--jstests/core/capped1.js11
-rw-r--r--jstests/core/capped2.js62
-rw-r--r--jstests/core/capped3.js45
-rw-r--r--jstests/core/capped5.js40
-rw-r--r--jstests/core/capped6.js109
-rw-r--r--jstests/core/capped7.js89
-rw-r--r--jstests/core/capped8.js108
-rw-r--r--jstests/core/capped9.js27
-rw-r--r--jstests/core/capped_empty.js24
-rw-r--r--jstests/core/capped_max.js29
-rw-r--r--jstests/core/capped_server2639.js27
-rw-r--r--jstests/core/capped_server7543.js11
-rw-r--r--jstests/core/cappeda.js32
-rw-r--r--jstests/core/check_shard_index.js141
-rw-r--r--jstests/core/collmod.js82
-rw-r--r--jstests/core/compact.js76
-rw-r--r--jstests/core/compact2.js52
-rw-r--r--jstests/core/compactPreservePadding.js26
-rw-r--r--jstests/core/connection_status.js27
-rw-r--r--jstests/core/connection_string_validation.js106
-rw-r--r--jstests/core/constructors.js313
-rw-r--r--jstests/core/copydb.js20
-rw-r--r--jstests/core/count.js25
-rw-r--r--jstests/core/count10.js59
-rw-r--r--jstests/core/count2.js28
-rw-r--r--jstests/core/count3.js26
-rw-r--r--jstests/core/count4.js17
-rw-r--r--jstests/core/count5.js30
-rw-r--r--jstests/core/count6.js61
-rw-r--r--jstests/core/count7.js25
-rw-r--r--jstests/core/count9.js28
-rw-r--r--jstests/core/count_hint.js20
-rw-r--r--jstests/core/counta.js14
-rw-r--r--jstests/core/countb.js11
-rw-r--r--jstests/core/countc.js124
-rw-r--r--jstests/core/coveredIndex1.js64
-rw-r--r--jstests/core/coveredIndex2.js18
-rw-r--r--jstests/core/coveredIndex3.js54
-rw-r--r--jstests/core/coveredIndex4.js40
-rw-r--r--jstests/core/coveredIndex5.js70
-rw-r--r--jstests/core/covered_index_compound_1.js45
-rw-r--r--jstests/core/covered_index_geo_1.js18
-rw-r--r--jstests/core/covered_index_geo_2.js22
-rw-r--r--jstests/core/covered_index_negative_1.js61
-rw-r--r--jstests/core/covered_index_simple_1.js55
-rw-r--r--jstests/core/covered_index_simple_2.js43
-rw-r--r--jstests/core/covered_index_simple_3.js57
-rw-r--r--jstests/core/covered_index_simple_id.js42
-rw-r--r--jstests/core/covered_index_sort_1.js34
-rw-r--r--jstests/core/covered_index_sort_2.js17
-rw-r--r--jstests/core/covered_index_sort_3.js16
-rw-r--r--jstests/core/create_indexes.js83
-rw-r--r--jstests/core/currentop.js79
-rw-r--r--jstests/core/cursor1.js20
-rw-r--r--jstests/core/cursor2.js24
-rw-r--r--jstests/core/cursor3.js35
-rw-r--r--jstests/core/cursor4.js47
-rw-r--r--jstests/core/cursor5.js36
-rw-r--r--jstests/core/cursor6.js100
-rw-r--r--jstests/core/cursor7.js42
-rw-r--r--jstests/core/cursora.js49
-rw-r--r--jstests/core/cursorb.js17
-rw-r--r--jstests/core/datasize.js35
-rw-r--r--jstests/core/datasize2.js27
-rw-r--r--jstests/core/datasize3.js34
-rw-r--r--jstests/core/date1.js17
-rw-r--r--jstests/core/date2.js13
-rw-r--r--jstests/core/date3.js31
-rw-r--r--jstests/core/db.js11
-rw-r--r--jstests/core/dbadmin.js105
-rw-r--r--jstests/core/dbcase.js27
-rw-r--r--jstests/core/dbcase2.js9
-rw-r--r--jstests/core/dbhash.js58
-rw-r--r--jstests/core/dbhash2.js22
-rw-r--r--jstests/core/dbref1.js10
-rw-r--r--jstests/core/dbref2.js20
-rw-r--r--jstests/core/dbref3.js45
-rw-r--r--jstests/core/delx.js30
-rw-r--r--jstests/core/depth_limit.js56
-rw-r--r--jstests/core/distinct1.js40
-rw-r--r--jstests/core/distinct2.js13
-rw-r--r--jstests/core/distinct3.js35
-rw-r--r--jstests/core/distinct_array1.js91
-rw-r--r--jstests/core/distinct_index1.js72
-rw-r--r--jstests/core/distinct_index2.js41
-rw-r--r--jstests/core/distinct_speed1.js26
-rw-r--r--jstests/core/drop.js25
-rw-r--r--jstests/core/drop2.js52
-rw-r--r--jstests/core/drop3.js25
-rw-r--r--jstests/core/drop_index.js20
-rw-r--r--jstests/core/dropdb.js25
-rw-r--r--jstests/core/dropdb_race.js41
-rw-r--r--jstests/core/elemMatchProjection.js265
-rw-r--r--jstests/core/error2.js21
-rw-r--r--jstests/core/error5.js8
-rw-r--r--jstests/core/eval0.js8
-rw-r--r--jstests/core/eval1.js17
-rw-r--r--jstests/core/eval2.js28
-rw-r--r--jstests/core/eval3.js21
-rw-r--r--jstests/core/eval4.js23
-rw-r--r--jstests/core/eval5.js23
-rw-r--r--jstests/core/eval6.js15
-rw-r--r--jstests/core/eval7.js3
-rw-r--r--jstests/core/eval8.js19
-rw-r--r--jstests/core/eval9.js22
-rw-r--r--jstests/core/eval_nolock.js16
-rw-r--r--jstests/core/evala.js9
-rw-r--r--jstests/core/evalb.js40
-rw-r--r--jstests/core/evalc.js25
-rw-r--r--jstests/core/evald.js97
-rw-r--r--jstests/core/evale.js5
-rw-r--r--jstests/core/evalf.js27
-rw-r--r--jstests/core/exists.js49
-rw-r--r--jstests/core/exists2.js16
-rw-r--r--jstests/core/exists3.js21
-rw-r--r--jstests/core/exists4.js20
-rw-r--r--jstests/core/exists5.js33
-rw-r--r--jstests/core/exists6.js79
-rw-r--r--jstests/core/exists7.js21
-rw-r--r--jstests/core/exists8.js76
-rw-r--r--jstests/core/exists9.js41
-rw-r--r--jstests/core/existsa.js114
-rw-r--r--jstests/core/existsb.js76
-rw-r--r--jstests/core/explain1.js48
-rw-r--r--jstests/core/explain2.js27
-rw-r--r--jstests/core/explain3.js23
-rw-r--r--jstests/core/explain4.js68
-rw-r--r--jstests/core/explain5.js38
-rw-r--r--jstests/core/explain6.js25
-rw-r--r--jstests/core/explain7.js181
-rw-r--r--jstests/core/explain8.js24
-rw-r--r--jstests/core/explain9.js24
-rw-r--r--jstests/core/explain_batch_size.js19
-rw-r--r--jstests/core/explaina.js45
-rw-r--r--jstests/core/explainb.js46
-rw-r--r--jstests/core/extent.js11
-rw-r--r--jstests/core/extent2.js34
-rw-r--r--jstests/core/filemd5.js11
-rw-r--r--jstests/core/find1.js54
-rw-r--r--jstests/core/find2.js16
-rw-r--r--jstests/core/find3.js10
-rw-r--r--jstests/core/find4.js26
-rw-r--r--jstests/core/find5.js51
-rw-r--r--jstests/core/find6.js41
-rw-r--r--jstests/core/find7.js8
-rw-r--r--jstests/core/find8.js27
-rw-r--r--jstests/core/find9.js28
-rw-r--r--jstests/core/find_and_modify.js38
-rw-r--r--jstests/core/find_and_modify2.js16
-rw-r--r--jstests/core/find_and_modify3.js21
-rw-r--r--jstests/core/find_and_modify4.js55
-rw-r--r--jstests/core/find_and_modify_server6226.js7
-rw-r--r--jstests/core/find_and_modify_server6254.js10
-rw-r--r--jstests/core/find_and_modify_server6582.js18
-rw-r--r--jstests/core/find_and_modify_server6588.js22
-rw-r--r--jstests/core/find_and_modify_server6659.js7
-rw-r--r--jstests/core/find_and_modify_server6909.js21
-rw-r--r--jstests/core/find_and_modify_server6993.js9
-rw-r--r--jstests/core/find_and_modify_server7660.js18
-rw-r--r--jstests/core/find_and_modify_where.js10
-rw-r--r--jstests/core/find_dedup.js35
-rw-r--r--jstests/core/find_size.js26
-rw-r--r--jstests/core/finda.js106
-rw-r--r--jstests/core/fm1.js12
-rw-r--r--jstests/core/fm2.js9
-rw-r--r--jstests/core/fm3.js37
-rw-r--r--jstests/core/fm4.js16
-rw-r--r--jstests/core/fsync.js21
-rw-r--r--jstests/core/fts1.js29
-rw-r--r--jstests/core/fts2.js24
-rw-r--r--jstests/core/fts3.js22
-rw-r--r--jstests/core/fts4.js22
-rw-r--r--jstests/core/fts5.js22
-rw-r--r--jstests/core/fts_blog.js26
-rw-r--r--jstests/core/fts_blogwild.js40
-rw-r--r--jstests/core/fts_enabled.js5
-rw-r--r--jstests/core/fts_explain.js18
-rw-r--r--jstests/core/fts_index.js110
-rw-r--r--jstests/core/fts_mix.js159
-rw-r--r--jstests/core/fts_partition1.js23
-rw-r--r--jstests/core/fts_partition_no_multikey.js13
-rw-r--r--jstests/core/fts_phrase.js25
-rw-r--r--jstests/core/fts_proj.js20
-rw-r--r--jstests/core/fts_projection.js99
-rw-r--r--jstests/core/fts_querylang.js93
-rw-r--r--jstests/core/fts_score_sort.js28
-rw-r--r--jstests/core/fts_spanish.js30
-rw-r--r--jstests/core/geo1.js37
-rw-r--r--jstests/core/geo10.js15
-rw-r--r--jstests/core/geo2.js40
-rw-r--r--jstests/core/geo3.js77
-rw-r--r--jstests/core/geo4.js11
-rw-r--r--jstests/core/geo5.js18
-rw-r--r--jstests/core/geo6.js24
-rw-r--r--jstests/core/geo7.js20
-rw-r--r--jstests/core/geo8.js13
-rw-r--r--jstests/core/geo9.js28
-rw-r--r--jstests/core/geo_2d_explain.js29
-rw-r--r--jstests/core/geo_2d_with_geojson_point.js20
-rw-r--r--jstests/core/geo_allowedcomparisons.js95
-rw-r--r--jstests/core/geo_array0.js26
-rw-r--r--jstests/core/geo_array1.js38
-rw-r--r--jstests/core/geo_array2.js161
-rw-r--r--jstests/core/geo_borders.js162
-rw-r--r--jstests/core/geo_box1.js43
-rw-r--r--jstests/core/geo_box1_noindex.js32
-rw-r--r--jstests/core/geo_box2.js52
-rw-r--r--jstests/core/geo_box3.js36
-rw-r--r--jstests/core/geo_center_sphere1.js98
-rw-r--r--jstests/core/geo_center_sphere2.js160
-rw-r--r--jstests/core/geo_circle1.js43
-rw-r--r--jstests/core/geo_circle1_noindex.js29
-rw-r--r--jstests/core/geo_circle2.js26
-rw-r--r--jstests/core/geo_circle2a.js37
-rw-r--r--jstests/core/geo_circle3.js28
-rw-r--r--jstests/core/geo_circle4.js31
-rw-r--r--jstests/core/geo_circle5.js28
-rw-r--r--jstests/core/geo_distinct.js14
-rw-r--r--jstests/core/geo_exactfetch.js17
-rw-r--r--jstests/core/geo_fiddly_box.js46
-rw-r--r--jstests/core/geo_fiddly_box2.js32
-rw-r--r--jstests/core/geo_group.js37
-rw-r--r--jstests/core/geo_haystack1.js59
-rw-r--r--jstests/core/geo_haystack2.js60
-rw-r--r--jstests/core/geo_haystack3.js28
-rw-r--r--jstests/core/geo_invalid_polygon.js29
-rw-r--r--jstests/core/geo_mapreduce.js56
-rw-r--r--jstests/core/geo_mapreduce2.js36
-rw-r--r--jstests/core/geo_max.js49
-rw-r--r--jstests/core/geo_mindistance.js214
-rw-r--r--jstests/core/geo_mindistance_boundaries.js124
-rw-r--r--jstests/core/geo_multikey0.js26
-rw-r--r--jstests/core/geo_multikey1.js19
-rw-r--r--jstests/core/geo_multinest0.js60
-rw-r--r--jstests/core/geo_multinest1.js36
-rw-r--r--jstests/core/geo_near_random1.js12
-rw-r--r--jstests/core/geo_near_random2.js21
-rw-r--r--jstests/core/geo_nearwithin.js27
-rw-r--r--jstests/core/geo_or.js62
-rw-r--r--jstests/core/geo_poly_edge.js22
-rw-r--r--jstests/core/geo_poly_line.js17
-rw-r--r--jstests/core/geo_polygon1.js73
-rw-r--r--jstests/core/geo_polygon1_noindex.js46
-rw-r--r--jstests/core/geo_polygon2.js263
-rw-r--r--jstests/core/geo_polygon3.js54
-rw-r--r--jstests/core/geo_queryoptimizer.js27
-rw-r--r--jstests/core/geo_regex0.js18
-rw-r--r--jstests/core/geo_s2cursorlimitskip.js68
-rw-r--r--jstests/core/geo_s2dedupnear.js11
-rw-r--r--jstests/core/geo_s2descindex.js64
-rw-r--r--jstests/core/geo_s2disjoint_holes.js81
-rw-r--r--jstests/core/geo_s2dupe_points.js71
-rwxr-xr-xjstests/core/geo_s2edgecases.js40
-rw-r--r--jstests/core/geo_s2exact.js21
-rw-r--r--jstests/core/geo_s2holesameasshell.js44
-rwxr-xr-xjstests/core/geo_s2index.js114
-rwxr-xr-xjstests/core/geo_s2indexoldformat.js28
-rw-r--r--jstests/core/geo_s2indexversion1.js150
-rw-r--r--jstests/core/geo_s2intersection.js141
-rw-r--r--jstests/core/geo_s2largewithin.js45
-rw-r--r--jstests/core/geo_s2meridian.js108
-rw-r--r--jstests/core/geo_s2multi.js46
-rw-r--r--jstests/core/geo_s2near.js84
-rw-r--r--jstests/core/geo_s2nearComplex.js268
-rw-r--r--jstests/core/geo_s2near_equator_opposite.js31
-rw-r--r--jstests/core/geo_s2nearcorrect.js12
-rw-r--r--jstests/core/geo_s2nearwithin.js41
-rw-r--r--jstests/core/geo_s2nongeoarray.js26
-rwxr-xr-xjstests/core/geo_s2nonstring.js22
-rw-r--r--jstests/core/geo_s2nopoints.js7
-rw-r--r--jstests/core/geo_s2oddshapes.js138
-rw-r--r--jstests/core/geo_s2ordering.js52
-rw-r--r--jstests/core/geo_s2overlappingpolys.js213
-rwxr-xr-xjstests/core/geo_s2polywithholes.js46
-rw-r--r--jstests/core/geo_s2selfintersectingpoly.js11
-rw-r--r--jstests/core/geo_s2sparse.js113
-rw-r--r--jstests/core/geo_s2twofields.js65
-rw-r--r--jstests/core/geo_s2validindex.js26
-rw-r--r--jstests/core/geo_s2within.js36
-rw-r--r--jstests/core/geo_small_large.js158
-rw-r--r--jstests/core/geo_sort1.js22
-rw-r--r--jstests/core/geo_uniqueDocs.js40
-rw-r--r--jstests/core/geo_uniqueDocs2.js80
-rw-r--r--jstests/core/geo_update.js37
-rw-r--r--jstests/core/geo_update1.js36
-rw-r--r--jstests/core/geo_update2.js39
-rw-r--r--jstests/core/geo_update_btree.js28
-rw-r--r--jstests/core/geo_update_btree2.js71
-rw-r--r--jstests/core/geo_update_dedup.js60
-rw-r--r--jstests/core/geo_withinquery.js15
-rw-r--r--jstests/core/geoa.js12
-rw-r--r--jstests/core/geob.js35
-rw-r--r--jstests/core/geoc.js24
-rw-r--r--jstests/core/geod.js14
-rw-r--r--jstests/core/geoe.js32
-rw-r--r--jstests/core/geof.js19
-rw-r--r--jstests/core/geonear_cmd_input_validation.js119
-rw-r--r--jstests/core/geonear_validate.js8
-rw-r--r--jstests/core/getlog1.js24
-rw-r--r--jstests/core/getlog2.js46
-rw-r--r--jstests/core/group1.js64
-rw-r--r--jstests/core/group2.js38
-rw-r--r--jstests/core/group3.js43
-rw-r--r--jstests/core/group4.js45
-rw-r--r--jstests/core/group5.js38
-rw-r--r--jstests/core/group6.js32
-rw-r--r--jstests/core/group7.js47
-rw-r--r--jstests/core/group_empty.js8
-rw-r--r--jstests/core/grow_hash_table.js42
-rw-r--r--jstests/core/hashindex1.js94
-rw-r--r--jstests/core/hashtest1.js78
-rw-r--r--jstests/core/hint1.js16
-rw-r--r--jstests/core/hostinfo.js33
-rw-r--r--jstests/core/id1.js16
-rw-r--r--jstests/core/idhack.js43
-rw-r--r--jstests/core/in.js24
-rw-r--r--jstests/core/in2.js33
-rw-r--r--jstests/core/in3.js11
-rw-r--r--jstests/core/in4.js42
-rw-r--r--jstests/core/in5.js56
-rw-r--r--jstests/core/in6.js13
-rw-r--r--jstests/core/in8.js23
-rw-r--r--jstests/core/in9.js35
-rw-r--r--jstests/core/ina.js15
-rw-r--r--jstests/core/inb.js19
-rw-r--r--jstests/core/inc-SERVER-7446.js39
-rw-r--r--jstests/core/inc1.js32
-rw-r--r--jstests/core/inc2.js22
-rw-r--r--jstests/core/inc3.js16
-rw-r--r--jstests/core/index1.js24
-rw-r--r--jstests/core/index10.js32
-rw-r--r--jstests/core/index13.js147
-rw-r--r--jstests/core/index2.js40
-rw-r--r--jstests/core/index3.js16
-rw-r--r--jstests/core/index4.js33
-rw-r--r--jstests/core/index5.js24
-rw-r--r--jstests/core/index6.js8
-rw-r--r--jstests/core/index7.js67
-rw-r--r--jstests/core/index8.js62
-rw-r--r--jstests/core/index9.js25
-rw-r--r--jstests/core/indexOtherNamespace.js27
-rw-r--r--jstests/core/indexStatsCommand.js88
-rw-r--r--jstests/core/index_arr1.js23
-rw-r--r--jstests/core/index_arr2.js51
-rw-r--r--jstests/core/index_big1.js38
-rwxr-xr-xjstests/core/index_bigkeys.js59
-rw-r--r--jstests/core/index_bigkeys_update.js18
-rw-r--r--jstests/core/index_bounds_number_edge_cases.js50
-rw-r--r--jstests/core/index_check1.js31
-rw-r--r--jstests/core/index_check2.js41
-rw-r--r--jstests/core/index_check3.js63
-rw-r--r--jstests/core/index_check5.js17
-rw-r--r--jstests/core/index_check6.js82
-rw-r--r--jstests/core/index_check7.js15
-rw-r--r--jstests/core/index_check8.js21
-rw-r--r--jstests/core/index_diag.js50
-rw-r--r--jstests/core/index_elemmatch1.js43
-rw-r--r--jstests/core/index_filter_commands.js167
-rw-r--r--jstests/core/index_many.js51
-rw-r--r--jstests/core/index_many2.js31
-rw-r--r--jstests/core/index_sparse1.js45
-rw-r--r--jstests/core/index_sparse2.js23
-rw-r--r--jstests/core/indexa.js22
-rw-r--r--jstests/core/indexapi.js48
-rw-r--r--jstests/core/indexb.js29
-rw-r--r--jstests/core/indexc.js20
-rw-r--r--jstests/core/indexd.js10
-rw-r--r--jstests/core/indexe.js22
-rw-r--r--jstests/core/indexes_on_indexes.js19
-rw-r--r--jstests/core/indexf.js13
-rw-r--r--jstests/core/indexg.js13
-rw-r--r--jstests/core/indexh.js41
-rw-r--r--jstests/core/indexi.js34
-rw-r--r--jstests/core/indexj.js44
-rw-r--r--jstests/core/indexl.js27
-rw-r--r--jstests/core/indexm.js38
-rw-r--r--jstests/core/indexn.js49
-rw-r--r--jstests/core/indexo.js15
-rw-r--r--jstests/core/indexp.js23
-rw-r--r--jstests/core/indexq.js20
-rw-r--r--jstests/core/indexr.js44
-rw-r--r--jstests/core/indexs.js21
-rw-r--r--jstests/core/indext.js21
-rw-r--r--jstests/core/indexu.js108
-rw-r--r--jstests/core/indexv.js18
-rw-r--r--jstests/core/indexw.js15
-rw-r--r--jstests/core/insert1.js34
-rw-r--r--jstests/core/insert2.js13
-rw-r--r--jstests/core/insert_id_undefined.js5
-rw-r--r--jstests/core/insert_illegal_doc.js22
-rw-r--r--jstests/core/insert_long_index_key.js10
-rw-r--r--jstests/core/ismaster.js28
448 files changed, 19250 insertions, 0 deletions
diff --git a/jstests/core/all.js b/jstests/core/all.js
new file mode 100644
index 00000000000..50087882ad1
--- /dev/null
+++ b/jstests/core/all.js
@@ -0,0 +1,47 @@
+t = db.jstests_all;
+t.drop();
+
+doTest = function() {
+
+ t.save( { a:[ 1,2,3 ] } );
+ t.save( { a:[ 1,2,4 ] } );
+ t.save( { a:[ 1,8,5 ] } );
+ t.save( { a:[ 1,8,6 ] } );
+ t.save( { a:[ 1,9,7 ] } );
+ t.save( { a : [] } );
+ t.save( {} );
+
+ assert.eq( 5, t.find( { a: { $all: [ 1 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 1, 2 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 1, 8 ] } } ).count() );
+ assert.eq( 1, t.find( { a: { $all: [ 1, 3 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 2 ] } } ).count() );
+ assert.eq( 1, t.find( { a: { $all: [ 2, 3 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 2, 1 ] } } ).count() );
+
+ t.save( { a: [ 2, 2 ] } );
+ assert.eq( 3, t.find( { a: { $all: [ 2, 2 ] } } ).count() );
+
+ t.save( { a: [ [ 2 ] ] } );
+ assert.eq( 3, t.find( { a: { $all: [ 2 ] } } ).count() );
+
+ t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 10 ] } } ).count() );
+ assert.eq( 1, t.find( { a: { $all: [ 11 ] } } ).count() );
+
+ t.save( { a: { b: [ 20, 30 ] } } );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() );
+
+
+ assert.eq( 5 , t.find( { a : { $all : [1] } } ).count() , "E1" );
+ assert.eq( 0 , t.find( { a : { $all : [19] } } ).count() , "E2" );
+ assert.eq( 0 , t.find( { a : { $all : [] } } ).count() , "E3" );
+
+
+}
+
+doTest();
+t.drop();
+t.ensureIndex( {a:1} );
+doTest();
diff --git a/jstests/core/all2.js b/jstests/core/all2.js
new file mode 100644
index 00000000000..64372ca5e97
--- /dev/null
+++ b/jstests/core/all2.js
@@ -0,0 +1,86 @@
+
+t = db.all2;
+t.drop();
+
+t.save( { a : [ { x : 1 } , { x : 2 } ] } )
+t.save( { a : [ { x : 2 } , { x : 3 } ] } )
+t.save( { a : [ { x : 3 } , { x : 4 } ] } )
+
+state = "no index";
+
+function check( n , q , e ){
+ assert.eq( n , t.find( q ).count() , tojson( q ) + " " + e + " count " + state );
+ assert.eq( n , t.find( q ).itcount() , tojson( q ) + " " + e + " itcount" + state );
+}
+
+check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
+
+t.ensureIndex( { "a.x" : 1 } );
+state = "index";
+
+check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
+
+// --- more
+
+t.drop();
+
+t.save( { a : [ 1 , 2 ] } )
+t.save( { a : [ 2 , 3 ] } )
+t.save( { a : [ 3 , 4 ] } )
+
+state = "more no index";
+
+check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
+
+t.ensureIndex( { "a" : 1 } );
+state = "more index";
+
+check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
+
+
+// more 2
+
+state = "more 2"
+
+t.drop();
+t.save( { name : [ "harry","jack","tom" ] } )
+check( 0 , { name : { $all : ["harry","john"] } } , "A" );
+t.ensureIndex( { name : 1 } );
+check( 0 , { name : { $all : ["harry","john"] } } , "B" );
+
diff --git a/jstests/core/all3.js b/jstests/core/all3.js
new file mode 100644
index 00000000000..b7a05321bbf
--- /dev/null
+++ b/jstests/core/all3.js
@@ -0,0 +1,28 @@
+// Check that $all matching null is consistent with $in - SERVER-3820
+
+t = db.jstests_all3;
+t.drop();
+
+t.save({});
+
+assert.eq( 1, t.count( {foo:{$in:[null]}} ) );
+assert.eq( 1, t.count( {foo:{$all:[null]}} ) );
+assert.eq( 0, t.count( {foo:{$not:{$all:[null]}}} ) );
+assert.eq( 0, t.count( {foo:{$not:{$in:[null]}}} ) );
+
+t.remove({});
+t.save({foo:1});
+assert.eq( 0, t.count( {foo:{$in:[null]}} ) );
+assert.eq( 0, t.count( {foo:{$all:[null]}} ) );
+assert.eq( 1, t.count( {foo:{$not:{$in:[null]}}} ) );
+assert.eq( 1, t.count( {foo:{$not:{$all:[null]}}} ) );
+
+t.remove({});
+t.save( {foo:[0,1]} );
+assert.eq( 1, t.count( {foo:{$in:[[0,1]]}} ) );
+assert.eq( 1, t.count( {foo:{$all:[[0,1]]}} ) );
+
+t.remove({});
+t.save( {foo:[]} );
+assert.eq( 1, t.count( {foo:{$in:[[]]}} ) );
+assert.eq( 1, t.count( {foo:{$all:[[]]}} ) );
diff --git a/jstests/core/all4.js b/jstests/core/all4.js
new file mode 100644
index 00000000000..109795754bc
--- /dev/null
+++ b/jstests/core/all4.js
@@ -0,0 +1,28 @@
+// Test $all/$elemMatch with missing field - SERVER-4492
+
+t = db.jstests_all4;
+t.drop();
+
+function checkQuery( query, val ) {
+ assert.eq( val, t.count(query) );
+ assert.eq( val, t.find(query).itcount() );
+}
+
+checkQuery( {a:{$all:[]}}, 0 );
+checkQuery( {a:{$all:[1]}}, 0 );
+checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
+
+t.save({});
+checkQuery( {a:{$all:[]}}, 0 );
+checkQuery( {a:{$all:[1]}}, 0 );
+checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
+
+t.save({a:1});
+checkQuery( {a:{$all:[]}}, 0 );
+checkQuery( {a:{$all:[1]}}, 1 );
+checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
+
+t.save({a:[{b:1}]});
+checkQuery( {a:{$all:[]}}, 0 );
+checkQuery( {a:{$all:[1]}}, 1 );
+checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 1 );
diff --git a/jstests/core/all5.js b/jstests/core/all5.js
new file mode 100644
index 00000000000..a5d9e312292
--- /dev/null
+++ b/jstests/core/all5.js
@@ -0,0 +1,28 @@
+// Test $all/$elemMatch/null matching - SERVER-4517
+
+t = db.jstests_all5;
+t.drop();
+
+function checkMatch( doc ) {
+ t.drop();
+ t.save( doc );
+ assert.eq( 1, t.count( {a:{$elemMatch:{b:null}}} ) );
+ assert.eq( 1, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) );
+}
+
+function checkNoMatch( doc ) {
+ t.drop();
+ t.save( doc );
+ assert.eq( 0, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) );
+}
+
+checkNoMatch( {} );
+checkNoMatch( {a:1} );
+
+checkNoMatch( {a:[]} );
+checkNoMatch( {a:[1]} );
+
+checkMatch( {a:[{}]} );
+checkMatch( {a:[{c:1}]} );
+checkMatch( {a:[{b:null}]} );
+checkNoMatch( {a:[{b:1}]}, 0 );
diff --git a/jstests/core/and.js b/jstests/core/and.js
new file mode 100644
index 00000000000..4d8c2cd7d49
--- /dev/null
+++ b/jstests/core/and.js
@@ -0,0 +1,85 @@
+// Some tests for $and SERVER-1089
+
+t = db.jstests_and;
+t.drop();
+
+t.save( {a:[1,2]} );
+t.save( {a:'foo'} );
+
+function check() {
+ // $and must be an array
+ assert.throws( function() { t.find( {$and:4} ).toArray() } );
+ // $and array must not be empty
+ assert.throws( function() { t.find( {$and:[]} ).toArray() } );
+ // $and elements must be objects
+ assert.throws( function() { t.find( {$and:[4]} ).toArray() } );
+
+ // Check equality matching
+ assert.eq( 1, t.count( {$and:[{a:1}]} ) );
+ assert.eq( 1, t.count( {$and:[{a:1},{a:2}]} ) );
+ assert.eq( 0, t.count( {$and:[{a:1},{a:3}]} ) );
+ assert.eq( 0, t.count( {$and:[{a:1},{a:2},{a:3}]} ) );
+ assert.eq( 1, t.count( {$and:[{a:'foo'}]} ) );
+ assert.eq( 0, t.count( {$and:[{a:'foo'},{a:'g'}]} ) );
+
+ // Check $and with other fields
+ assert.eq( 1, t.count( {a:2,$and:[{a:1}]} ) );
+ assert.eq( 0, t.count( {a:0,$and:[{a:1}]} ) );
+ assert.eq( 0, t.count( {a:2,$and:[{a:0}]} ) );
+ assert.eq( 1, t.count( {a:1,$and:[{a:1}]} ) );
+
+ // Check recursive $and
+ assert.eq( 1, t.count( {a:2,$and:[{$and:[{a:1}]}]} ) );
+ assert.eq( 0, t.count( {a:0,$and:[{$and:[{a:1}]}]} ) );
+ assert.eq( 0, t.count( {a:2,$and:[{$and:[{a:0}]}]} ) );
+ assert.eq( 1, t.count( {a:1,$and:[{$and:[{a:1}]}]} ) );
+
+ assert.eq( 1, t.count( {$and:[{a:2},{$and:[{a:1}]}]} ) );
+ assert.eq( 0, t.count( {$and:[{a:0},{$and:[{a:1}]}]} ) );
+ assert.eq( 0, t.count( {$and:[{a:2},{$and:[{a:0}]}]} ) );
+ assert.eq( 1, t.count( {$and:[{a:1},{$and:[{a:1}]}]} ) );
+
+ // Some of these cases were more important with an alternative $and syntax
+ // that was rejected, but they're still valid checks.
+
+ // Check simple regex
+ assert.eq( 1, t.count( {$and:[{a:/foo/}]} ) );
+ // Check multiple regexes
+ assert.eq( 1, t.count( {$and:[{a:/foo/},{a:/^f/},{a:/o/}]} ) );
+ assert.eq( 0, t.count( {$and:[{a:/foo/},{a:/^g/}]} ) );
+ assert.eq( 1, t.count( {$and:[{a:/^f/},{a:'foo'}]} ) );
+ // Check regex flags
+ assert.eq( 0, t.count( {$and:[{a:/^F/},{a:'foo'}]} ) );
+ assert.eq( 1, t.count( {$and:[{a:/^F/i},{a:'foo'}]} ) );
+
+
+
+ // Check operator
+ assert.eq( 1, t.count( {$and:[{a:{$gt:0}}]} ) );
+
+ // Check where
+ assert.eq( 1, t.count( {a:'foo',$where:'this.a=="foo"'} ) );
+ assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) );
+ assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) );
+
+ // Nested where ok
+ assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}]}) );
+ assert.eq( 1, t.count({$and:[{a:'foo'},{$where:'this.a=="foo"'}]}) );
+ assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}],$where:'this.a=="foo"'}) );
+}
+
+check();
+t.ensureIndex( {a:1} );
+check();
+var e = t.find( {$and:[{a:1}]} ).explain();
+assert.eq( 'BtreeCursor a_1', e.cursor );
+assert.eq( [[1,1]], e.indexBounds.a );
+
+function checkBounds( query ) {
+ var e = t.find( query ).explain(true);
+ printjson(e);
+ assert.eq( 1, e.n );
+}
+
+checkBounds( {a:1,$and:[{a:2}]} );
+checkBounds( {$and:[{a:1},{a:2}]} );
diff --git a/jstests/core/and2.js b/jstests/core/and2.js
new file mode 100644
index 00000000000..0bd13eb7a1d
--- /dev/null
+++ b/jstests/core/and2.js
@@ -0,0 +1,27 @@
+// Test dollar sign operator with $and SERVER-1089
+
+t = db.jstests_and2;
+
+t.drop();
+t.save( {a:[1,2]} );
+t.update( {a:1}, {$set:{'a.$':5}} );
+assert.eq( [5,2], t.findOne().a );
+
+t.drop();
+t.save( {a:[1,2]} );
+t.update( {$and:[{a:1}]}, {$set:{'a.$':5}} );
+assert.eq( [5,2], t.findOne().a );
+
+// Make sure dollar sign operator with $and is consistent with no $and case
+t.drop();
+t.save( {a:[1,2],b:[3,4]} );
+t.update( {a:1,b:4}, {$set:{'a.$':5}} );
+// Probably not what we want here, just trying to make sure $and is consistent
+assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] );
+
+// Make sure dollar sign operator with $and is consistent with no $and case
+t.drop();
+t.save( {a:[1,2],b:[3,4]} );
+t.update( {a:1,$and:[{b:4}]}, {$set:{'a.$':5}} );
+// Probably not what we want here, just trying to make sure $and is consistent
+assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] );
diff --git a/jstests/core/and3.js b/jstests/core/and3.js
new file mode 100644
index 00000000000..036c63c02f0
--- /dev/null
+++ b/jstests/core/and3.js
@@ -0,0 +1,67 @@
+// Check key match with sub matchers - part of SERVER-3192
+
+t = db.jstests_and3;
+t.drop();
+
+t.save( {a:1} );
+t.save( {a:'foo'} );
+
+t.ensureIndex( {a:1} );
+
+function checkScanMatch( query, nscannedObjects, n ) {
+ var e = t.find( query ).hint( {a:1} ).explain();
+ assert.eq( nscannedObjects, e.nscannedObjects );
+ assert.eq( n, e.n );
+}
+
+checkScanMatch( {a:/o/}, 1, 1 );
+checkScanMatch( {a:/a/}, 0, 0 );
+checkScanMatch( {a:{$not:/o/}}, 2, 1 );
+checkScanMatch( {a:{$not:/a/}}, 2, 2 );
+
+checkScanMatch( {$and:[{a:/o/}]}, 1, 1 );
+checkScanMatch( {$and:[{a:/a/}]}, 0, 0 );
+checkScanMatch( {$and:[{a:{$not:/o/}}]}, 2, 1 );
+checkScanMatch( {$and:[{a:{$not:/a/}}]}, 2, 2 );
+checkScanMatch( {$and:[{a:/o/},{a:{$not:/o/}}]}, 1, 0 );
+checkScanMatch( {$and:[{a:/o/},{a:{$not:/a/}}]}, 1, 1 );
+checkScanMatch( {$or:[{a:/o/}]}, 1, 1 );
+checkScanMatch( {$or:[{a:/a/}]}, 0, 0 );
+checkScanMatch( {$nor:[{a:/o/}]}, 2, 1 );
+checkScanMatch( {$nor:[{a:/a/}]}, 2, 2 );
+
+checkScanMatch( {$and:[{$and:[{a:/o/}]}]}, 1, 1 );
+checkScanMatch( {$and:[{$and:[{a:/a/}]}]}, 0, 0 );
+checkScanMatch( {$and:[{$and:[{a:{$not:/o/}}]}]}, 2, 1 );
+checkScanMatch( {$and:[{$and:[{a:{$not:/a/}}]}]}, 2, 2 );
+checkScanMatch( {$and:[{$or:[{a:/o/}]}]}, 1, 1 );
+checkScanMatch( {$and:[{$or:[{a:/a/}]}]}, 0, 0 );
+checkScanMatch( {$or:[{a:{$not:/o/}}]}, 2, 1 );
+checkScanMatch( {$and:[{$or:[{a:{$not:/o/}}]}]}, 2, 1 );
+checkScanMatch( {$and:[{$or:[{a:{$not:/a/}}]}]}, 2, 2 );
+checkScanMatch( {$and:[{$nor:[{a:/o/}]}]}, 2, 1 );
+checkScanMatch( {$and:[{$nor:[{a:/a/}]}]}, 2, 2 );
+
+checkScanMatch( {$where:'this.a==1'}, 2, 1 );
+checkScanMatch( {$and:[{$where:'this.a==1'}]}, 2, 1 );
+
+checkScanMatch( {a:1,$where:'this.a==1'}, 1, 1 );
+checkScanMatch( {a:1,$and:[{$where:'this.a==1'}]}, 1, 1 );
+checkScanMatch( {$and:[{a:1},{$where:'this.a==1'}]}, 1, 1 );
+checkScanMatch( {$and:[{a:1,$where:'this.a==1'}]}, 1, 1 );
+checkScanMatch( {a:1,$and:[{a:1},{a:1,$where:'this.a==1'}]}, 1, 1 );
+
+function checkImpossibleMatch( query ) {
+ var e = t.find( query ).explain();
+ assert.eq( 0, e.n );
+ // The explain output should include the indexBounds field.
+ // The presence of the indexBounds field indicates that the
+ // query can make use of an index.
+ assert('indexBounds' in e, 'index bounds are missing');
+}
+
+// With a single key index, all bounds are utilized.
+assert.eq( [[1,1]], t.find( {$and:[{a:1}]} ).explain().indexBounds.a );
+assert.eq( [[1,1]], t.find( {a:1,$and:[{a:1}]} ).explain().indexBounds.a );
+checkImpossibleMatch( {a:1,$and:[{a:2}]} );
+checkImpossibleMatch( {$and:[{a:1},{a:2}]} );
diff --git a/jstests/core/andor.js b/jstests/core/andor.js
new file mode 100644
index 00000000000..f433ade8228
--- /dev/null
+++ b/jstests/core/andor.js
@@ -0,0 +1,99 @@
+// SERVER-1089 Test and/or nesting
+
+t = db.jstests_andor;
+t.drop();
+
+// not ok
+function ok( q ) {
+ assert.eq( 1, t.find( q ).itcount() );
+}
+
+t.save( {a:1} );
+
+test = function() {
+
+ ok( {a:1} );
+
+ ok( {$and:[{a:1}]} );
+ ok( {$or:[{a:1}]} );
+
+ ok( {$and:[{$and:[{a:1}]}]} );
+ ok( {$or:[{$or:[{a:1}]}]} );
+
+ ok( {$and:[{$or:[{a:1}]}]} );
+ ok( {$or:[{$and:[{a:1}]}]} );
+
+ ok( {$and:[{$and:[{$or:[{a:1}]}]}]} );
+ ok( {$and:[{$or:[{$and:[{a:1}]}]}]} );
+ ok( {$or:[{$and:[{$and:[{a:1}]}]}]} );
+
+ ok( {$or:[{$and:[{$or:[{a:1}]}]}]} );
+
+ // now test $nor
+
+ ok( {$and:[{a:1}]} );
+ ok( {$nor:[{a:2}]} );
+
+ ok( {$and:[{$and:[{a:1}]}]} );
+ ok( {$nor:[{$nor:[{a:1}]}]} );
+
+ ok( {$and:[{$nor:[{a:2}]}]} );
+ ok( {$nor:[{$and:[{a:2}]}]} );
+
+ ok( {$and:[{$and:[{$nor:[{a:2}]}]}]} );
+ ok( {$and:[{$nor:[{$and:[{a:2}]}]}]} );
+ ok( {$nor:[{$and:[{$and:[{a:2}]}]}]} );
+
+ ok( {$nor:[{$and:[{$nor:[{a:1}]}]}]} );
+
+}
+
+test();
+t.ensureIndex( {a:1} );
+test();
+
+// Test an inequality base match.
+
+test = function() {
+
+ ok( {a:{$ne:2}} );
+
+ ok( {$and:[{a:{$ne:2}}]} );
+ ok( {$or:[{a:{$ne:2}}]} );
+
+ ok( {$and:[{$and:[{a:{$ne:2}}]}]} );
+ ok( {$or:[{$or:[{a:{$ne:2}}]}]} );
+
+ ok( {$and:[{$or:[{a:{$ne:2}}]}]} );
+ ok( {$or:[{$and:[{a:{$ne:2}}]}]} );
+
+ ok( {$and:[{$and:[{$or:[{a:{$ne:2}}]}]}]} );
+ ok( {$and:[{$or:[{$and:[{a:{$ne:2}}]}]}]} );
+ ok( {$or:[{$and:[{$and:[{a:{$ne:2}}]}]}]} );
+
+ ok( {$or:[{$and:[{$or:[{a:{$ne:2}}]}]}]} );
+
+ // now test $nor
+
+ ok( {$and:[{a:{$ne:2}}]} );
+ ok( {$nor:[{a:{$ne:1}}]} );
+
+ ok( {$and:[{$and:[{a:{$ne:2}}]}]} );
+ ok( {$nor:[{$nor:[{a:{$ne:2}}]}]} );
+
+ ok( {$and:[{$nor:[{a:{$ne:1}}]}]} );
+ ok( {$nor:[{$and:[{a:{$ne:1}}]}]} );
+
+ ok( {$and:[{$and:[{$nor:[{a:{$ne:1}}]}]}]} );
+ ok( {$and:[{$nor:[{$and:[{a:{$ne:1}}]}]}]} );
+ ok( {$nor:[{$and:[{$and:[{a:{$ne:1}}]}]}]} );
+
+ ok( {$nor:[{$and:[{$nor:[{a:{$ne:2}}]}]}]} );
+
+}
+
+t.drop();
+t.save( {a:1} );
+test();
+t.ensureIndex( {a:1} );
+test();
diff --git a/jstests/core/apitest_db.js b/jstests/core/apitest_db.js
new file mode 100644
index 00000000000..c734d67bba7
--- /dev/null
+++ b/jstests/core/apitest_db.js
@@ -0,0 +1,77 @@
+/**
+ * Tests for the db object enhancement
+ */
+
+assert( "test" == db, "wrong database currently not test" );
+
+dd = function( x ){
+ //print( x );
+}
+
+dd( "a" );
+
+
+dd( "b" );
+
+/*
+ * be sure the public collection API is complete
+ */
+assert(db.createCollection , "createCollection" );
+assert(db.getProfilingLevel , "getProfilingLevel" );
+assert(db.setProfilingLevel , "setProfilingLevel" );
+assert(db.dbEval , "dbEval" );
+assert(db.group , "group" );
+
+dd( "c" );
+
+/*
+ * test createCollection
+ */
+
+db.getCollection( "test" ).drop();
+db.getCollection( "system.namespaces" ).find().forEach( function(x) { assert(x.name != "test.test"); });
+
+dd( "d" );
+
+db.createCollection("test");
+var found = false;
+db.getCollection( "system.namespaces" ).find().forEach( function(x) { if (x.name == "test.test") found = true; });
+assert(found, "found test.test in system.namespaces");
+
+dd( "e" );
+
+/*
+ * profile level
+ */
+
+db.setProfilingLevel(0);
+assert(db.getProfilingLevel() == 0, "prof level 0");
+
+db.setProfilingLevel(1);
+assert(db.getProfilingLevel() == 1, "p1");
+
+db.setProfilingLevel(2);
+assert(db.getProfilingLevel() == 2, "p2");
+
+db.setProfilingLevel(0);
+assert(db.getProfilingLevel() == 0, "prof level 0");
+
+dd( "f" );
+asserted = false;
+try {
+ db.setProfilingLevel(10);
+ assert(false);
+}
+catch (e) {
+ asserted = true;
+ assert(e.dbSetProfilingException);
+}
+assert( asserted, "should have asserted" );
+
+dd( "g" );
+
+
+
+assert.eq( "foo" , db.getSisterDB( "foo" ).getName() )
+assert.eq( "foo" , db.getSiblingDB( "foo" ).getName() )
+
diff --git a/jstests/core/apitest_dbcollection.js b/jstests/core/apitest_dbcollection.js
new file mode 100644
index 00000000000..0983b065477
--- /dev/null
+++ b/jstests/core/apitest_dbcollection.js
@@ -0,0 +1,115 @@
+/**
+ * Tests for the db collection
+ */
+
+
+
+/*
+ * test drop
+ */
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).find().length() == 0,1);
+
+db.getCollection( "test_db" ).save({a:1});
+assert(db.getCollection( "test_db" ).find().length() == 1,2);
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).find().length() == 0,3);
+
+/*
+ * test count
+ */
+
+assert(db.getCollection( "test_db" ).count() == 0,4);
+db.getCollection( "test_db" ).save({a:1});
+assert(db.getCollection( "test_db" ).count() == 1,5);
+for (i = 0; i < 100; i++) {
+ db.getCollection( "test_db" ).save({a:1});
+}
+assert(db.getCollection( "test_db" ).count() == 101,6);
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).count() == 0,7);
+
+/*
+ * test clean (not sure... just be sure it doen't blow up, I guess
+ */
+
+ db.getCollection( "test_db" ).clean();
+
+ /*
+ * test validate
+ */
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).count() == 0,8);
+
+for (i = 0; i < 100; i++) {
+ db.getCollection( "test_db" ).save({a:1});
+}
+
+var v = db.getCollection( "test_db" ).validate();
+if( v.ns != "test.test_db" ) {
+ print("Error: wrong ns name");
+ print(tojson(v));
+}
+assert (v.ns == "test.test_db",9);
+assert (v.ok == 1,10);
+
+assert.eq(100,v.nrecords,11)
+
+/*
+ * test deleteIndex, deleteIndexes
+ */
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).count() == 0,12);
+db.getCollection( "test_db" ).dropIndexes();
+assert(db.getCollection( "test_db" ).getIndexes().length == 0,13);
+
+db.getCollection( "test_db" ).save({a:10});
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,14);
+
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).save({a:10});
+
+print( tojson( db.getCollection( "test_db" ).getIndexes() ) );
+assert.eq(db.getCollection( "test_db" ).getIndexes().length , 2,15);
+
+db.getCollection( "test_db" ).dropIndex({a:1});
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,16);
+
+db.getCollection( "test_db" ).save({a:10});
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).save({a:10});
+
+assert(db.getCollection( "test_db" ).getIndexes().length == 2,17);
+
+db.getCollection( "test_db" ).dropIndex("a_1");
+assert.eq( db.getCollection( "test_db" ).getIndexes().length , 1,18);
+
+db.getCollection( "test_db" ).save({a:10, b:11});
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).ensureIndex({b:1});
+db.getCollection( "test_db" ).save({a:10, b:12});
+
+assert(db.getCollection( "test_db" ).getIndexes().length == 3,19);
+
+db.getCollection( "test_db" ).dropIndex({b:1});
+assert(db.getCollection( "test_db" ).getIndexes().length == 2,20);
+db.getCollection( "test_db" ).dropIndex({a:1});
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,21);
+
+db.getCollection( "test_db" ).save({a:10, b:11});
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).ensureIndex({b:1});
+db.getCollection( "test_db" ).save({a:10, b:12});
+
+assert(db.getCollection( "test_db" ).getIndexes().length == 3,22);
+
+db.getCollection( "test_db" ).dropIndexes();
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,23);
+
+db.getCollection( "test_db" ).find();
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).getIndexes().length == 0,24);
diff --git a/jstests/core/apply_ops1.js b/jstests/core/apply_ops1.js
new file mode 100644
index 00000000000..9e6cb39f7c7
--- /dev/null
+++ b/jstests/core/apply_ops1.js
@@ -0,0 +1,66 @@
+
+t = db.apply_ops1;
+t.drop();
+
+assert.eq( 0 , t.find().count() , "A0" );
+a = db.adminCommand( { applyOps : [ { "op" : "i" , "ns" : t.getFullName() , "o" : { _id : 5 , x : 17 } } ] } )
+assert.eq( 1 , t.find().count() , "A1a" );
+assert.eq( true, a.results[0], "A1b" );
+
+o = { _id : 5 , x : 17 }
+assert.eq( o , t.findOne() , "A2" );
+
+res = db.runCommand( { applyOps : [
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } }
+] } )
+
+o.x++;
+o.x++;
+
+assert.eq( 1 , t.find().count() , "A3" );
+assert.eq( o , t.findOne() , "A4" );
+assert.eq( true, res.results[0], "A1b" );
+assert.eq( true, res.results[1], "A1b" );
+
+
+res = db.runCommand( { applyOps :
+ [
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } }
+ ]
+ ,
+ preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ]
+ } );
+
+o.x++;
+o.x++;
+
+assert.eq( 1 , t.find().count() , "B1" );
+assert.eq( o , t.findOne() , "B2" );
+assert.eq( true, res.results[0], "B2a" );
+assert.eq( true, res.results[1], "B2b" );
+
+
+res = db.runCommand( { applyOps :
+ [
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } }
+ ]
+ ,
+ preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ]
+ } );
+
+assert.eq( 1 , t.find().count() , "B3" );
+assert.eq( o , t.findOne() , "B4" );
+
+res = db.runCommand( { applyOps :
+ [
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } ,
+ { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 6 } , "o" : { $inc : { x : 1 } } }
+ ]
+ } );
+
+assert.eq( true, res.results[0], "B5" );
+assert.eq( true, res.results[1], "B6" );
+
diff --git a/jstests/core/apply_ops2.js b/jstests/core/apply_ops2.js
new file mode 100644
index 00000000000..1a5923c3465
--- /dev/null
+++ b/jstests/core/apply_ops2.js
@@ -0,0 +1,71 @@
+//Test applyops upsert flag SERVER-7452
+
+var t = db.apply_ops2;
+t.drop();
+
+assert.eq(0, t.find().count(), "test collection not empty");
+
+t.insert({_id:1, x:"init"});
+
+//alwaysUpsert = true
+print("Testing applyOps with alwaysUpsert = true");
+
+var res = db.runCommand({ applyOps: [
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2 : { _id: 1 },
+ o: { $set: { x: "upsert=true existing" }}
+ },
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: { _id: 2 },
+ o: { $set : { x: "upsert=true non-existing" }}
+ }], alwaysUpsert: true });
+
+assert.eq(true, res.results[0], "upsert = true, existing doc update failed");
+assert.eq(true, res.results[1], "upsert = true, nonexisting doc not upserted");
+assert.eq(2, t.find().count(), "2 docs expected after upsert");
+
+//alwaysUpsert = false
+print("Testing applyOps with alwaysUpsert = false");
+
+res = db.runCommand({ applyOps: [
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: { _id: 1 },
+ o: { $set : { x: "upsert=false existing" }}
+ },
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: { _id: 3 },
+ o: { $set: { x: "upsert=false non-existing" }}
+ }], alwaysUpsert: false });
+
+assert.eq(true, res.results[0], "upsert = false, existing doc update failed");
+assert.eq(false, res.results[1], "upsert = false, nonexisting doc upserted");
+assert.eq(2, t.find().count(), "2 docs expected after upsert failure");
+
+//alwaysUpsert not specified, should default to true
+print("Testing applyOps with default alwaysUpsert");
+
+res = db.runCommand({ applyOps: [
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: { _id: 1 },
+ o: { $set: { x: "upsert=default existing" }}
+ },
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: { _id: 4 },
+ o: { $set: { x: "upsert=defaults non-existing" }}
+ }]});
+
+assert.eq(true, res.results[0], "default upsert, existing doc update failed");
+assert.eq(true, res.results[1], "default upsert, nonexisting doc not upserted");
+assert.eq(3, t.find().count(), "2 docs expected after upsert failure");
diff --git a/jstests/core/array1.js b/jstests/core/array1.js
new file mode 100644
index 00000000000..4409b7bb4d3
--- /dev/null
+++ b/jstests/core/array1.js
@@ -0,0 +1,14 @@
+t = db.array1
+t.drop()
+
+x = { a : [ 1 , 2 ] };
+
+t.save( { a : [ [1,2] ] } );
+assert.eq( 1 , t.find( x ).count() , "A" );
+
+t.save( x );
+delete x._id;
+assert.eq( 2 , t.find( x ).count() , "B" );
+
+t.ensureIndex( { a : 1 } );
+assert.eq( 2 , t.find( x ).count() , "C" ); // TODO SERVER-146
diff --git a/jstests/core/array3.js b/jstests/core/array3.js
new file mode 100644
index 00000000000..3d053f99417
--- /dev/null
+++ b/jstests/core/array3.js
@@ -0,0 +1,8 @@
+
+assert.eq( 5 , Array.sum( [ 1 , 4 ] ), "A" )
+assert.eq( 2.5 , Array.avg( [ 1 , 4 ] ), "B" )
+
+arr = [ 2 , 4 , 4 , 4 , 5 , 5 , 7 , 9 ]
+assert.eq( 5 , Array.avg( arr ) , "C" )
+assert.eq( 2 , Array.stdDev( arr ) , "D" )
+
diff --git a/jstests/core/array4.js b/jstests/core/array4.js
new file mode 100644
index 00000000000..1053e160f11
--- /dev/null
+++ b/jstests/core/array4.js
@@ -0,0 +1,30 @@
+
+t = db.array4;
+t.drop();
+
+t.insert({"a": ["1", "2", "3"]});
+t.insert({"a" : ["2", "1"]});
+
+var x = {'a.0' : /1/};
+
+assert.eq(t.count(x), 1);
+
+assert.eq(t.findOne(x).a[0], 1);
+assert.eq(t.findOne(x).a[1], 2);
+
+t.drop();
+
+t.insert({"a" : {"0" : "1"}});
+t.insert({"a" : ["2", "1"]});
+
+assert.eq(t.count(x), 1);
+assert.eq(t.findOne(x).a[0], 1);
+
+t.drop();
+
+t.insert({"a" : ["0", "1", "2", "3", "4", "5", "6", "1", "1", "1", "2", "3", "2", "1"]});
+t.insert({"a" : ["2", "1"]});
+
+x = {"a.12" : /2/};
+assert.eq(t.count(x), 1);
+assert.eq(t.findOne(x).a[0], 0);
diff --git a/jstests/core/array_match1.js b/jstests/core/array_match1.js
new file mode 100644
index 00000000000..f764fb913b1
--- /dev/null
+++ b/jstests/core/array_match1.js
@@ -0,0 +1,31 @@
+
+t = db.array_match1
+t.drop();
+
+t.insert( { _id : 1 , a : [ 5 , 5 ] } )
+t.insert( { _id : 2 , a : [ 6 , 6 ] } )
+t.insert( { _id : 3 , a : [ 5 , 5 ] } )
+
+function test( f , m ){
+ var q = {};
+
+ q[f] = [5,5];
+ assert.eq( 2 , t.find( q ).itcount() , m + "1" )
+
+ q[f] = [6,6];
+ assert.eq( 1 , t.find( q ).itcount() , m + "2" )
+}
+
+test( "a" , "A" );
+t.ensureIndex( { a : 1 } )
+test( "a" , "B" );
+
+t.drop();
+
+t.insert( { _id : 1 , a : { b : [ 5 , 5 ] } } )
+t.insert( { _id : 2 , a : { b : [ 6 , 6 ] } } )
+t.insert( { _id : 3 , a : { b : [ 5 , 5 ] } } )
+
+test( "a.b" , "C" );
+t.ensureIndex( { a : 1 } )
+test( "a.b" , "D" );
diff --git a/jstests/core/array_match2.js b/jstests/core/array_match2.js
new file mode 100644
index 00000000000..d254b0a3fdd
--- /dev/null
+++ b/jstests/core/array_match2.js
@@ -0,0 +1,20 @@
+
+t = db.jstests_array_match2;
+t.drop();
+
+t.save( {a:[{1:4},5]} );
+// When the array index is the last field, both of these match types work.
+assert.eq( 1, t.count( {'a.1':4} ) );
+assert.eq( 1, t.count( {'a.1':5} ) );
+
+t.remove({});
+// When the array index is not the last field, only one of the match types works.
+t.save( {a:[{1:{foo:4}},{foo:5}]} );
+assert.eq( 1, t.count( {'a.1.foo':4} ) );
+assert.eq( 1, t.count( {'a.1.foo':5} ) );
+
+// Same issue with the $exists operator
+t.remove({});
+t.save( {a:[{1:{foo:4}},{}]} );
+assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.1.foo':{$exists:true}} ) );
diff --git a/jstests/core/array_match3.js b/jstests/core/array_match3.js
new file mode 100644
index 00000000000..c8653430770
--- /dev/null
+++ b/jstests/core/array_match3.js
@@ -0,0 +1,13 @@
+// SERVER-2902 Test indexing of numerically referenced array elements.
+
+t = db.jstests_array_match3;
+t.drop();
+
+// Test matching numericallly referenced array element.
+t.save( {a:{'0':5}} );
+t.save( {a:[5]} );
+assert.eq( 2, t.count( {'a.0':5} ) );
+
+// Test with index.
+t.ensureIndex( {'a.0':1} );
+assert.eq( 2, t.count( {'a.0':5} ) );
diff --git a/jstests/core/array_match4.js b/jstests/core/array_match4.js
new file mode 100644
index 00000000000..b4cdec5143a
--- /dev/null
+++ b/jstests/core/array_match4.js
@@ -0,0 +1,30 @@
+var t = db.array_match4;
+
+t.drop();
+t.save({a: [1, 2]});
+
+var query_gte = {a: {$gte: [1, 2]}};
+
+//
+// without index
+//
+
+assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (without index)');
+assert.eq(1, t.find(query_gte).itcount(), '$gte (without index)');
+
+//
+// with index
+//
+
+t.ensureIndex({a: 1});
+assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (with index)');
+
+// display explain output (for index bounds)
+var explain = t.find(query_gte).explain();
+print('explain for ' + tojson(query_gte, '', true) + ' = ' + tojson(explain));
+
+// number of documents returned by indexes query should be consistent
+// with non-indexed case.
+// XXX: The following assertion documents current behavior.
+// XXX: 2.4 and 2.6 both return 0 documents.
+assert.eq(0, t.find(query_gte).itcount(), '$gte (with index)');
diff --git a/jstests/core/arrayfind1.js b/jstests/core/arrayfind1.js
new file mode 100644
index 00000000000..539fa6193a1
--- /dev/null
+++ b/jstests/core/arrayfind1.js
@@ -0,0 +1,40 @@
+
+t = db.arrayfind1;
+t.drop();
+
+t.save( { a : [ { x : 1 } ] } )
+t.save( { a : [ { x : 1 , y : 2 , z : 1 } ] } )
+t.save( { a : [ { x : 1 , y : 1 , z : 3 } ] } )
+
+function test( exptected , q , name ){
+ assert.eq( exptected , t.find( q ).itcount() , name + " " + tojson( q ) + " itcount" );
+ assert.eq( exptected , t.find( q ).count() , name + " " + tojson( q ) + " count" );
+}
+
+test( 3 , {} , "A1" );
+test( 1 , { "a.y" : 2 } , "A2" );
+test( 1 , { "a" : { x : 1 } } , "A3" );
+test( 3 , { "a" : { $elemMatch : { x : 1 } } } , "A4" ); // SERVER-377
+
+
+t.save( { a : [ { x : 2 } ] } )
+t.save( { a : [ { x : 3 } ] } )
+t.save( { a : [ { x : 4 } ] } )
+
+assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "B1" );
+assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "B2" );
+
+t.ensureIndex( { "a.x" : 1 } );
+assert( t.find( { "a" : { $elemMatch : { x : 1 } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "C1" );
+
+assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "D1" );
+
+t.find( { "a.x" : 1 } ).count();
+t.find( { "a.x" : { $gt : 1 } } ).count();
+
+res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain()
+assert( res.cursor.indexOf( "BtreeC" ) == 0 , "D2" );
+assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" );
+
+assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" );
+assert( t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "E2" );
diff --git a/jstests/core/arrayfind2.js b/jstests/core/arrayfind2.js
new file mode 100644
index 00000000000..c6a78042c3d
--- /dev/null
+++ b/jstests/core/arrayfind2.js
@@ -0,0 +1,29 @@
+
+t = db.arrayfind2;
+t.drop();
+
+function go( prefix ){
+ assert.eq( 3 , t.count() , prefix + " A1" );
+ assert.eq( 3 , t.find( { a : { $elemMatch : { x : { $gt : 4 } } } } ).count() , prefix + " A2" );
+ assert.eq( 1 , t.find( { a : { $elemMatch : { x : { $lt : 2 } } } } ).count() , prefix + " A3" );
+ assert.eq( 1 , t.find( { a : { $all : [ { $elemMatch : { x : { $lt : 4 } } } ,
+ { $elemMatch : { x : { $gt : 5 } } } ] } } ).count() , prefix + " A4" );
+
+ assert.throws( function() { return t.findOne( { a : { $all : [ 1, { $elemMatch : { x : 3 } } ] } } ) } );
+ assert.throws( function() { return t.findOne( { a : { $all : [ /a/, { $elemMatch : { x : 3 } } ] } } ) } );
+
+}
+
+t.save( { a : [ { x : 1 } , { x : 5 } ] } )
+t.save( { a : [ { x : 3 } , { x : 5 } ] } )
+t.save( { a : [ { x : 3 } , { x : 6 } ] } )
+
+go( "no index" );
+t.ensureIndex( { a : 1 } );
+go( "index(a)" );
+
+t.ensureIndex( { "a.x": 1 } );
+
+assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } } ] } } ).explain().indexBounds );
+// only first $elemMatch used to find bounds
+assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } }, { $elemMatch : { y : 5 } } ] } } ).explain().indexBounds );
diff --git a/jstests/core/arrayfind3.js b/jstests/core/arrayfind3.js
new file mode 100644
index 00000000000..de038c84264
--- /dev/null
+++ b/jstests/core/arrayfind3.js
@@ -0,0 +1,16 @@
+
+t = db.arrayfind3;
+t.drop()
+
+t.save({a:[1,2]})
+t.save({a:[1, 2, 6]})
+t.save({a:[1, 4, 6]})
+
+
+assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "A1" )
+assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "A2" )
+
+t.ensureIndex( { a : 1 } )
+
+assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "B1" );
+assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "B2" )
diff --git a/jstests/core/arrayfind4.js b/jstests/core/arrayfind4.js
new file mode 100644
index 00000000000..17b02c8886b
--- /dev/null
+++ b/jstests/core/arrayfind4.js
@@ -0,0 +1,22 @@
+// Test query empty array SERVER-2258
+
+t = db.jstests_arrayfind4;
+t.drop();
+
+t.save( {a:[]} );
+t.ensureIndex( {a:1} );
+
+assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() );
+assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() );
+
+assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() );
+assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() );
+
+t.remove({});
+t.save( {a:[[]]} );
+
+assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() );
+assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() );
+
+assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() );
+assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() );
diff --git a/jstests/core/arrayfind5.js b/jstests/core/arrayfind5.js
new file mode 100644
index 00000000000..9ff6e2b8a5f
--- /dev/null
+++ b/jstests/core/arrayfind5.js
@@ -0,0 +1,23 @@
+// Test indexed elemmatch of missing field.
+
+t = db.jstests_arrayfind5;
+t.drop();
+
+function check( nullElemMatch ) {
+ assert.eq( 1, t.find( {'a.b':1} ).itcount() );
+ assert.eq( 1, t.find( {a:{$elemMatch:{b:1}}} ).itcount() );
+ assert.eq( nullElemMatch ? 1 : 0 , t.find( {'a.b':null} ).itcount() );
+ assert.eq( nullElemMatch ? 1 : 0, t.find( {a:{$elemMatch:{b:null}}} ).itcount() ); // see SERVER-3377
+}
+
+t.save( {a:[{},{b:1}]} );
+check( true );
+t.ensureIndex( {'a.b':1} );
+check( true );
+
+t.drop();
+
+t.save( {a:[5,{b:1}]} );
+check( false );
+t.ensureIndex( {'a.b':1} );
+check( false );
diff --git a/jstests/core/arrayfind6.js b/jstests/core/arrayfind6.js
new file mode 100644
index 00000000000..f4531cea96a
--- /dev/null
+++ b/jstests/core/arrayfind6.js
@@ -0,0 +1,26 @@
+// Check index bound determination for $not:$elemMatch queries. SERVER-5740
+
+t = db.jstests_arrayfind6;
+t.drop();
+
+t.save( { a:[ { b:1, c:2 } ] } );
+
+function checkElemMatchMatches() {
+ assert.eq( 1, t.count( { a:{ $elemMatch:{ b:1, c:2 } } } ) );
+ assert.eq( 0, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:2 } } } } ) );
+ assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:3 } } } } ) );
+ assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:1 }, c:3 } } } } ) );
+ // Index bounds must be determined for $not:$elemMatch, not $not:$ne. In this case if index
+ // bounds are determined for $not:$ne, the a.b index will be constrained to the interval [2,2]
+ // and the saved document will not be matched as it should.
+ assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ) );
+}
+
+checkElemMatchMatches();
+t.ensureIndex( { 'a.b':1 } );
+checkElemMatchMatches();
+
+// We currently never use an index for negations of
+// ELEM_MATCH_OBJECT expressions.
+var explain = t.find( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ).explain();
+assert.eq( "BasicCursor", explain.cursor );
diff --git a/jstests/core/arrayfind7.js b/jstests/core/arrayfind7.js
new file mode 100644
index 00000000000..7c44de1dc1d
--- /dev/null
+++ b/jstests/core/arrayfind7.js
@@ -0,0 +1,52 @@
+// Nested $elemMatch clauses. SERVER-5741
+
+t = db.jstests_arrayfind7;
+t.drop();
+
+t.save( { a:[ { b:[ { c:1, d:2 } ] } ] } );
+
+function checkElemMatchMatches() {
+ assert.eq( 1, t.count( { a:{ $elemMatch:{ b:{ $elemMatch:{ c:1, d:2 } } } } } ) );
+}
+
+// The document is matched using nested $elemMatch expressions, with and without an index.
+checkElemMatchMatches();
+t.ensureIndex( { 'a.b.c':1 } );
+checkElemMatchMatches();
+
+function checkElemMatch( index, document, query ) {
+ // The document is matched without an index, and with single and multi key indexes.
+ t.drop();
+ t.save( document );
+ assert.eq( 1, t.count( query ) );
+ t.ensureIndex( index );
+ assert.eq( 1, t.count( query ) );
+ t.save( { a:{ b:{ c:[ 10, 11 ] } } } ); // Make the index multikey.
+ assert.eq( 1, t.count( query ) );
+}
+
+// Two constraints within a nested $elemMatch expression.
+checkElemMatch( { 'a.b.c':1 },
+ { a:[ { b:[ { c:1 } ] } ] },
+ { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $lte:1 } } } } } });
+
+// Two constraints within a nested $elemMatch expression, one of which contains the other.
+checkElemMatch( { 'a.b.c':1 },
+ { a:[ { b:[ { c:2 } ] } ] },
+ { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $in:[2] } } } } } });
+
+// Two nested $elemMatch expressions.
+checkElemMatch( { 'a.d.e':1, 'a.b.c':1 },
+ { a:[ { b:[ { c:1 } ], d:[ { e:1 } ] } ] },
+ { a:{ $elemMatch:{ d:{ $elemMatch:{ e:{ $lte:1 } } },
+ b:{ $elemMatch:{ c:{ $gte:1 } } } } } });
+
+// A non $elemMatch expression and a nested $elemMatch expression.
+checkElemMatch( { 'a.x':1, 'a.b.c':1 },
+ { a:[ { b:[ { c:1 } ], x:1 } ] },
+ { 'a.x':1, a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1 } } } } } });
+
+// $elemMatch is applied directly to a top level field.
+checkElemMatch( { 'a.b.c':1 },
+ { a:[ { b:[ { c:[ 1 ] } ] } ] },
+ { a:{ $elemMatch:{ 'b.c':{ $elemMatch:{ $gte:1, $lte:1 } } } } });
diff --git a/jstests/core/arrayfind8.js b/jstests/core/arrayfind8.js
new file mode 100644
index 00000000000..07d44ace26e
--- /dev/null
+++ b/jstests/core/arrayfind8.js
@@ -0,0 +1,175 @@
+// Matching behavior for $elemMatch applied to a top level element.
+// SERVER-1264
+// SERVER-4180
+
+t = db.jstests_arrayfind8;
+t.drop();
+
+function debug( x ) {
+ if ( debuggingEnabled = false ) {
+ printjson( x );
+ }
+}
+
+/** Set index state for the test. */
+function setIndexKey( key ) {
+ indexKey = key;
+ indexSpec = {};
+ indexSpec[ key ] = 1;
+}
+
+setIndexKey( 'a' );
+
+function indexBounds( query ) {
+ debug( query );
+ debug( t.find( query ).hint( indexSpec ).explain() );
+ return t.find( query ).hint( indexSpec ).explain().indexBounds[ indexKey ];
+}
+
+/** Check that the query results match the documents in the 'expected' array. */
+function assertResults( expected, query, context ) {
+ debug( query );
+ assert.eq( expected.length, t.count( query ), 'unexpected count in ' + context );
+ results = t.find( query ).toArray();
+ for( i in results ) {
+ found = false;
+ for( j in expected ) {
+ if ( friendlyEqual( expected[ j ], results[ i ].a ) ) {
+ found = true;
+ }
+ }
+ assert( found, 'unexpected result ' + results[ i ] + ' in ' + context );
+ }
+}
+
+/**
+ * Check matching for different query types.
+ * @param bothMatch - document matched by both standardQuery and elemMatchQuery
+ * @param elemMatch - document matched by elemMatchQuery but not standardQuery
+ * @param notElemMatch - document matched by standardQuery but not elemMatchQuery
+ */
+function checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, context ) {
+
+ function mayPush( arr, elt ) {
+ if ( elt ) {
+ arr.push( elt );
+ }
+ }
+
+ expectedStandardQueryResults = [];
+ mayPush( expectedStandardQueryResults, bothMatch );
+ mayPush( expectedStandardQueryResults, nonElemMatch );
+ assertResults( expectedStandardQueryResults, standardQuery, context + ' standard query' );
+
+ expectedElemMatchQueryResults = [];
+ mayPush( expectedElemMatchQueryResults, bothMatch );
+ mayPush( expectedElemMatchQueryResults, elemMatch );
+ assertResults( expectedElemMatchQueryResults, elemMatchQuery, context + ' elemMatch query' );
+}
+
+/**
+ * Check matching and for different query types.
+ * @param subQuery - part of a query, to be provided as is for a standard query and within a
+ * $elemMatch clause for a $elemMatch query
+ * @param bothMatch - document matched by both standardQuery and elemMatchQuery
+ * @param elemMatch - document matched by elemMatchQuery but not standardQuery
+ * @param notElemMatch - document matched by standardQuery but not elemMatchQuery
+ * @param additionalConstraints - additional query parameters not generated from @param subQuery
+ */
+function checkQuery( subQuery, bothMatch, elemMatch, nonElemMatch,
+ additionalConstraints ) {
+ t.drop();
+ additionalConstraints = additionalConstraints || {};
+
+ // Construct standard and elemMatch queries from subQuery.
+ firstSubQueryKey = Object.keySet( subQuery )[ 0 ];
+ if ( firstSubQueryKey[ 0 ] == '$' ) {
+ standardQuery = { $and:[ { a:subQuery }, additionalConstraints ] };
+ }
+ else {
+ // If the subQuery contains a field rather than operators, append to the 'a' field.
+ modifiedSubQuery = {};
+ modifiedSubQuery[ 'a.' + firstSubQueryKey ] = subQuery[ firstSubQueryKey ];
+ standardQuery = { $and:[ modifiedSubQuery, additionalConstraints ] };
+ }
+ elemMatchQuery = { $and:[ { a:{ $elemMatch:subQuery } }, additionalConstraints ] };
+ debug( elemMatchQuery );
+
+ function maySave( aValue ) {
+ if ( aValue ) {
+ debug( { a:aValue } );
+ t.save( { a:aValue } );
+ }
+ }
+
+ // Save all documents and check matching without indexes.
+ maySave( bothMatch );
+ maySave( elemMatch );
+ maySave( nonElemMatch );
+
+ checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'unindexed' );
+
+ // Check matching and index bounds for a single key index.
+
+ t.drop();
+ maySave( bothMatch );
+ maySave( elemMatch );
+ // The nonElemMatch document is not tested here, as it will often make the index multikey.
+ t.ensureIndex( indexSpec );
+ checkMatch( bothMatch, elemMatch, null, standardQuery, elemMatchQuery, 'single key index' );
+
+ // Check matching and index bounds for a multikey index.
+
+ // Now the nonElemMatch document is tested.
+ maySave( nonElemMatch );
+ // Force the index to be multikey.
+ t.save( { a:[ -1, -2 ] } );
+ t.save( { a:{ b:[ -1, -2 ] } } );
+ checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery,
+ 'multikey index' );
+}
+
+maxNumber = Infinity;
+
+// Basic test.
+checkQuery( { $gt:4 }, [ 5 ] );
+
+// Multiple constraints within a $elemMatch clause.
+checkQuery( { $gt:4, $lt:6 }, [ 5 ], null, [ 3, 7 ] );
+checkQuery( { $gt:4, $not:{ $gte:6 } }, [ 5 ] );
+checkQuery( { $gt:4, $not:{ $ne:6 } }, [ 6 ] );
+checkQuery( { $gte:5, $lte:5 }, [ 5 ], null, [ 4, 6 ] );
+checkQuery( { $in:[ 4, 6 ], $gt:5 }, [ 6 ], null, [ 4, 7 ] );
+checkQuery( { $regex:'^a' }, [ 'a' ] );
+
+// Some constraints within a $elemMatch clause and other constraints outside of it.
+checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $lt:6 } } );
+checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $lte:5 } } );
+checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $gt:5 } } );
+
+// Constraints in different $elemMatch clauses.
+checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } );
+checkQuery( { $gt:4 }, [ 3, 7 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } );
+checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lte:5 } } } );
+checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $elemMatch:{ $gt:5 } } } );
+
+// TODO SERVER-1264
+if ( 0 ) {
+checkQuery( { $elemMatch:{ $in:[ 5 ] } }, null, [[ 5 ]], [ 5 ], null );
+}
+
+setIndexKey( 'a.b' );
+checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:1 } ]],
+ [ { b:1 } ], null );
+checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:[ 0, 2 ] } ]],
+ [ { b:[ 0, 2 ] } ], null );
+
+// Constraints for a top level (SERVER-1264 style) $elemMatch nested within a non top level
+// $elemMatch.
+checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:1 } } }, [ { b:[ 1 ] } ] );
+checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 1 ] } ] );
+
+checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 2 ] } ], null,
+ null, { 'a.b':{ $in:[ 2, 5 ] } } );
+checkQuery( { b:{ $elemMatch:{ $in:[ 1, 2 ] }, $in:[ 2, 3 ] } },
+ [ { b:[ 2 ] } ], null, [ { b:[ 1 ] }, { b:[ 3 ] } ], null );
diff --git a/jstests/core/arrayfind9.js b/jstests/core/arrayfind9.js
new file mode 100644
index 00000000000..4ee14c56580
--- /dev/null
+++ b/jstests/core/arrayfind9.js
@@ -0,0 +1,34 @@
+// Assorted $elemMatch behavior checks.
+
+t = db.jstests_arrayfind9;
+t.drop();
+
+// Top level field $elemMatch:$not matching
+t.save( { a:[ 1 ] } );
+assert.eq( 1, t.count( { a:{ $elemMatch:{ $not:{ $ne:1 } } } } ) );
+
+// Top level field object $elemMatch matching.
+t.drop();
+t.save( { a:[ {} ] } );
+assert.eq( 1, t.count( { a:{ $elemMatch:{ $gte:{} } } } ) );
+
+// Top level field array $elemMatch matching.
+t.drop();
+t.save( { a:[ [] ] } );
+assert.eq( 1, t.count( { a:{ $elemMatch:{ $in:[ [] ] } } } ) );
+
+// Matching by array index.
+t.drop();
+t.save( { a:[ [ 'x' ] ] } );
+assert.eq( 1, t.count( { a:{ $elemMatch:{ '0':'x' } } } ) );
+
+// Matching multiple values of a nested array.
+t.drop();
+t.save( { a:[ { b:[ 0, 2 ] } ] } );
+t.ensureIndex( { a:1 } );
+t.ensureIndex( { 'a.b':1 } );
+plans = [ { $natural:1 }, { a:1 }, { 'a.b':1 } ];
+for( i in plans ) {
+ p = plans[ i ];
+ assert.eq( 1, t.find( { a:{ $elemMatch:{ b:{ $gte:1, $lte:1 } } } } ).hint( p ).itcount() );
+}
diff --git a/jstests/core/arrayfinda.js b/jstests/core/arrayfinda.js
new file mode 100644
index 00000000000..179d3985580
--- /dev/null
+++ b/jstests/core/arrayfinda.js
@@ -0,0 +1,21 @@
+// Assorted $elemMatch matching behavior checks.
+
+t = db.jstests_arrayfinda;
+t.drop();
+
+// $elemMatch only matches elements within arrays (a descriptive, not a normative test).
+t.save( { a:[ { b:1 } ] } );
+t.save( { a:{ b:1 } } );
+
+function assertExpectedMatch( cursor ) {
+ assert.eq( [ { b:1 } ], cursor.next().a );
+ assert( !cursor.hasNext() );
+}
+
+assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ) );
+assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ) );
+
+// $elemMatch is not used to perform key matching. SERVER-6001
+t.ensureIndex( { a:1 } );
+assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ).hint( { a:1 } ) );
+assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ).hint( { a:1 } ) );
diff --git a/jstests/core/auth1.js b/jstests/core/auth1.js
new file mode 100644
index 00000000000..4ab26e8d2e5
--- /dev/null
+++ b/jstests/core/auth1.js
@@ -0,0 +1,54 @@
+var mydb = db.getSiblingDB('auth1_db');
+mydb.dropAllUsers();
+
+pass = "a" + Math.random();
+//print( "password [" + pass + "]" );
+
+mydb.createUser({user: "eliot" ,pwd: pass, roles: jsTest.basicUserRoles});
+
+assert( mydb.auth( "eliot" , pass ) , "auth failed" );
+assert( ! mydb.auth( "eliot" , pass + "a" ) , "auth should have failed" );
+
+pass2 = "b" + Math.random();
+mydb.changeUserPassword("eliot", pass2);
+
+assert( ! mydb.auth( "eliot" , pass ) , "failed to change password failed" );
+assert( mydb.auth( "eliot" , pass2 ) , "new password didn't take" );
+
+assert( mydb.auth( "eliot" , pass2 ) , "what?" );
+mydb.dropUser( "eliot" );
+assert( ! mydb.auth( "eliot" , pass2 ) , "didn't drop user" );
+
+
+var a = mydb.getMongo().getDB( "admin" );
+a.dropAllUsers();
+pass = "c" + Math.random();
+a.createUser({user: "super", pwd: pass, roles: jsTest.adminUserRoles});
+assert( a.auth( "super" , pass ) , "auth failed" );
+assert( !a.auth( "super" , pass + "a" ) , "auth should have failed" );
+
+mydb.dropAllUsers();
+pass = "a" + Math.random();
+
+mydb.createUser({user: "eliot" , pwd: pass, roles: jsTest.basicUserRoles});
+
+assert.commandFailed( mydb.runCommand( { authenticate: 1, user: "eliot", nonce: "foo", key: "bar" } ) );
+
+// check sanity check SERVER-3003
+
+var before = a.system.users.count({db: mydb.getName()});
+
+assert.throws( function(){
+ mydb.createUser({ user: "" , pwd: "abc", roles: jsTest.basicUserRoles});
+} , null , "C1" )
+assert.throws( function(){
+ mydb.createUser({ user: "abc" , pwd: "", roles: jsTest.basicUserRoles});
+} , null , "C2" )
+
+
+var after = a.system.users.count({db: mydb.getName()});
+assert( before > 0 , "C3" )
+assert.eq( before , after , "C4" )
+
+// Clean up after ourselves so other tests using authentication don't get messed up.
+mydb.dropAllUsers()
diff --git a/jstests/core/auth2.js b/jstests/core/auth2.js
new file mode 100644
index 00000000000..9c2b38f682d
--- /dev/null
+++ b/jstests/core/auth2.js
@@ -0,0 +1,9 @@
+// just make sure logout doesn't break anything
+
+// SERVER-724
+db.runCommand({logout : 1});
+x = db.runCommand({logout : 1});
+assert.eq( 1 , x.ok , "A" )
+
+x = db.logout();
+assert.eq( 1 , x.ok , "B" )
diff --git a/jstests/core/auth_copydb.js b/jstests/core/auth_copydb.js
new file mode 100644
index 00000000000..f04cd0b0d29
--- /dev/null
+++ b/jstests/core/auth_copydb.js
@@ -0,0 +1,19 @@
+a = db.getSisterDB( "copydb2-test-a" );
+b = db.getSisterDB( "copydb2-test-b" );
+
+a.dropDatabase();
+b.dropDatabase();
+a.dropAllUsers();
+b.dropAllUsers();
+
+a.foo.save( { a : 1 } );
+
+a.createUser({user: "chevy" , pwd: "chase", roles: jsTest.basicUserRoles});
+
+assert.eq( 1 , a.foo.count() , "A" );
+assert.eq( 0 , b.foo.count() , "B" );
+
+// SERVER-727
+a.copyDatabase( a._name , b._name, "" , "chevy" , "chase" );
+assert.eq( 1 , a.foo.count() , "C" );
+assert.eq( 1 , b.foo.count() , "D" );
diff --git a/jstests/core/autoid.js b/jstests/core/autoid.js
new file mode 100644
index 00000000000..6c8062fd093
--- /dev/null
+++ b/jstests/core/autoid.js
@@ -0,0 +1,11 @@
+f = db.jstests_autoid;
+f.drop();
+
+f.save( {z:1} );
+a = f.findOne( {z:1} );
+f.update( {z:1}, {z:2} );
+b = f.findOne( {z:2} );
+assert.eq( a._id.str, b._id.str );
+c = f.update( {z:2}, {z:"abcdefgabcdefgabcdefg"} );
+c = f.findOne( {} );
+assert.eq( a._id.str, c._id.str );
diff --git a/jstests/core/bad_index_plugin.js b/jstests/core/bad_index_plugin.js
new file mode 100644
index 00000000000..98ebdb9bb28
--- /dev/null
+++ b/jstests/core/bad_index_plugin.js
@@ -0,0 +1,11 @@
+// SERVER-5826 ensure you can't build an index with a non-existent plugin
+t = db.bad_index_plugin;
+
+assert.writeOK(t.ensureIndex({good: 1}));
+assert.eq(t.getIndexes().length, 2); // good + _id
+
+var err = t.ensureIndex({bad: 'bad'});
+assert.writeError(err);
+assert(err.getWriteError().code >= 0);
+
+assert.eq(t.getIndexes().length, 2); // good + _id (no bad)
diff --git a/jstests/core/basic1.js b/jstests/core/basic1.js
new file mode 100644
index 00000000000..e5fa577f0b2
--- /dev/null
+++ b/jstests/core/basic1.js
@@ -0,0 +1,21 @@
+
+t = db.getCollection( "basic1" );
+t.drop();
+
+o = { a : 1 };
+t.save( o );
+
+assert.eq( 1 , t.findOne().a , "first" );
+assert( o._id , "now had id" );
+assert( o._id.str , "id not a real id" );
+
+o.a = 2;
+t.save( o );
+
+assert.eq( 2 , t.findOne().a , "second" );
+
+assert(t.validate().valid);
+
+// not a very good test of currentOp, but tests that it at least
+// is sort of there:
+assert( db.currentOp().inprog != null );
diff --git a/jstests/core/basic2.js b/jstests/core/basic2.js
new file mode 100644
index 00000000000..aaa3de4366e
--- /dev/null
+++ b/jstests/core/basic2.js
@@ -0,0 +1,16 @@
+
+t = db.getCollection( "basic2" );
+t.drop();
+
+o = { n : 2 };
+t.save( o );
+
+assert.eq( 1 , t.find().count() );
+
+assert.eq( 2 , t.find( o._id ).toArray()[0].n );
+assert.eq( 2 , t.find( o._id , { n : 1 } ).toArray()[0].n );
+
+t.remove( o._id );
+assert.eq( 0 , t.find().count() );
+
+assert(t.validate().valid);
diff --git a/jstests/core/basic3.js b/jstests/core/basic3.js
new file mode 100644
index 00000000000..5fb5581a252
--- /dev/null
+++ b/jstests/core/basic3.js
@@ -0,0 +1,45 @@
+// Tests that "." cannot be in field names
+t = db.getCollection( "foo_basic3" );
+t.drop()
+
+//more diagnostics on bad save, if exception fails
+doBadSave = function(param) {
+ print("doing save with " + tojson(param))
+ var res = t.save(param);
+ // Should not get here.
+ printjson(res);
+}
+
+//more diagnostics on bad save, if exception fails
+doBadUpdate = function(query, update) {
+ print("doing update with " + tojson(query) + " " + tojson(update))
+ var res = t.update(query, update);
+ // Should not get here.
+ printjson(res);
+}
+
+assert.throws(doBadSave, [{"a.b":5}], ". in names aren't allowed doesn't work");
+
+assert.throws(doBadSave,
+ [{ "x" : { "a.b" : 5 } }],
+ ". in embedded names aren't allowed doesn't work");
+
+// following tests make sure update keys are checked
+t.save({"a": 0,"b": 1})
+
+assert.throws(doBadUpdate, [{a:0}, { "b.b" : 1 }],
+ "must deny '.' in key of update");
+
+// upsert with embedded doc
+assert.throws(doBadUpdate, [{a:10}, { c: {"b.b" : 1 }}],
+ "must deny embedded '.' in key of update");
+
+// if it is a modifier, it should still go through
+t.update({"a": 0}, {$set: { "c.c": 1}})
+t.update({"a": 0}, {$inc: { "c.c": 1}})
+
+// edge cases
+assert.throws(doBadUpdate, [{a:0}, { "":{"b.b" : 1} }],
+ "must deny '' embedded '.' in key of update");
+t.update({"a": 0}, {})
+
diff --git a/jstests/core/basic4.js b/jstests/core/basic4.js
new file mode 100644
index 00000000000..0cf7a261e63
--- /dev/null
+++ b/jstests/core/basic4.js
@@ -0,0 +1,12 @@
+t = db.getCollection( "basic4" );
+t.drop();
+
+t.save( { a : 1 , b : 1.0 } );
+
+assert( t.findOne() );
+assert( t.findOne( { a : 1 } ) );
+assert( t.findOne( { a : 1.0 } ) );
+assert( t.findOne( { b : 1 } ) );
+assert( t.findOne( { b : 1.0 } ) );
+
+assert( ! t.findOne( { b : 2.0 } ) );
diff --git a/jstests/core/basic5.js b/jstests/core/basic5.js
new file mode 100644
index 00000000000..bfa40fb8f5e
--- /dev/null
+++ b/jstests/core/basic5.js
@@ -0,0 +1,6 @@
+t = db.getCollection( "basic5" );
+t.drop();
+
+t.save( { a : 1 , b : [ 1 , 2 , 3 ] } );
+assert.eq( 3 , t.findOne().b.length );
+
diff --git a/jstests/core/basic6.js b/jstests/core/basic6.js
new file mode 100644
index 00000000000..e0cd6f1586e
--- /dev/null
+++ b/jstests/core/basic6.js
@@ -0,0 +1,8 @@
+
+t = db.basic6;
+
+t.findOne();
+t.a.findOne();
+
+assert.eq( "test.basic6" , t.toString() );
+assert.eq( "test.basic6.a" , t.a.toString() );
diff --git a/jstests/core/basic7.js b/jstests/core/basic7.js
new file mode 100644
index 00000000000..7bb0d470e82
--- /dev/null
+++ b/jstests/core/basic7.js
@@ -0,0 +1,11 @@
+
+t = db.basic7;
+t.drop();
+
+t.save( { a : 1 } )
+t.ensureIndex( { a : 1 } );
+
+assert.eq( t.find().toArray()[0].a , 1 );
+assert.eq( t.find().arrayAccess(0).a , 1 );
+assert.eq( t.find()[0].a , 1 );
+
diff --git a/jstests/core/basic8.js b/jstests/core/basic8.js
new file mode 100644
index 00000000000..513da0d15d1
--- /dev/null
+++ b/jstests/core/basic8.js
@@ -0,0 +1,11 @@
+
+t = db.basic8;
+t.drop();
+
+t.save( { a : 1 } );
+o = t.findOne();
+o.b = 2;
+t.save( o );
+
+assert.eq( 1 , t.find().count() , "A" );
+assert.eq( 2 , t.findOne().b , "B" );
diff --git a/jstests/core/basic9.js b/jstests/core/basic9.js
new file mode 100644
index 00000000000..814b72b2ae7
--- /dev/null
+++ b/jstests/core/basic9.js
@@ -0,0 +1,19 @@
+// Tests that $<prefix> field names are not allowed, but you can use a $ anywhere else.
+t = db.getCollection( "foo_basic9" );
+t.drop()
+
+// more diagnostics on bad save, if exception fails
+doBadSave = function(param) {
+ print("doing save with " + tojson(param))
+ var res = t.save(param);
+ // Should not get here.
+ print('Should have errored out: ' + tojson(res));
+}
+
+t.save({foo$foo:5});
+t.save({foo$:5});
+
+assert.throws(doBadSave, [{$foo:5}], "key names aren't allowed to start with $ doesn't work");
+assert.throws(doBadSave,
+ [{x:{$foo:5}}],
+ "embedded key names aren't allowed to start with $ doesn't work");
diff --git a/jstests/core/basica.js b/jstests/core/basica.js
new file mode 100644
index 00000000000..0cc364beb42
--- /dev/null
+++ b/jstests/core/basica.js
@@ -0,0 +1,33 @@
+
+t = db.basica;
+
+
+t.drop();
+
+t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+
+x = t.findOne();
+x.b["0"].x = 4;
+x.b["0"].z = 4;
+x.b[0].m = 9;
+x.b[0]["asd"] = 11;
+x.a = 2;
+x.z = 11;
+
+tojson( x );
+t.save( x );
+assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" );
+
+// -----
+
+t.drop();
+
+t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+
+x = t.findOne();
+x.b["0"].z = 4;
+
+//printjson( x );
+t.save( x );
+assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" );
+
diff --git a/jstests/core/basicb.js b/jstests/core/basicb.js
new file mode 100644
index 00000000000..95eb60151af
--- /dev/null
+++ b/jstests/core/basicb.js
@@ -0,0 +1,6 @@
+
+t = db.basicb;
+t.drop();
+
+assert.throws( function() { t.insert( { '$a' : 5 } ); });
+
diff --git a/jstests/core/basicc.js b/jstests/core/basicc.js
new file mode 100644
index 00000000000..8da8c68a8b0
--- /dev/null
+++ b/jstests/core/basicc.js
@@ -0,0 +1,21 @@
+// test writing to two db's at the same time.
+
+t1 = db.jstests_basicc;
+var db = db.getSisterDB("test_basicc");
+t2 = db.jstests_basicc;
+t1.drop();
+t2.drop();
+
+js = "while( 1 ) { db.jstests.basicc1.save( {} ); }";
+pid = startMongoProgramNoConnect( "mongo" , "--eval" , js , db.getMongo().host );
+
+for( var i = 0; i < 1000; ++i ) {
+ assert.writeOK(t2.save( {} ));
+}
+
+stopMongoProgramByPid( pid );
+// put things back the way we found it
+t1.drop();
+t2.drop();
+db.dropDatabase();
+db = db.getSisterDB("test");
diff --git a/jstests/core/batch_size.js b/jstests/core/batch_size.js
new file mode 100644
index 00000000000..2bc144cd554
--- /dev/null
+++ b/jstests/core/batch_size.js
@@ -0,0 +1,45 @@
+// Test subtleties of batchSize and limit.
+
+var t = db.jstests_batch_size;
+t.drop();
+
+for (var i = 0; i < 4; i++) {
+ t.save({_id: i, a: i});
+}
+
+function runIndexedTests() {
+ // With limit, indexed.
+ assert.eq(2, t.find().limit(2).itcount(), 'G');
+ assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'H');
+
+ // With batchSize, indexed.
+ // SERVER-12438: If there is an index that provides the sort,
+ // then a plan with an unindexed sort should never be used.
+ // Consequently, batchSize will NOT be a hard limit in this case.
+ // WARNING: the behavior described above may change in the future.
+ assert.eq(4, t.find().batchSize(2).itcount(), 'I');
+ assert.eq(4, t.find().sort({a: 1}).batchSize(2).itcount(), 'J');
+}
+
+// Without batch size or limit, unindexed.
+assert.eq(4, t.find().itcount(), 'A');
+assert.eq(4, t.find().sort({a: 1}).itcount(), 'B');
+
+// With limit, unindexed.
+assert.eq(2, t.find().limit(2).itcount(), 'C');
+assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'D');
+
+// With batchSize, unindexed.
+// SERVER-12438: in general batch size does not mean a hard
+// limit. With an unindexed sort, however, the server interprets
+// batch size as a hard limit so that it can do a top k sort.
+// WARNING: this behavior may change in the future.
+assert.eq(4, t.find().batchSize(2).itcount(), 'E');
+assert.eq(2, t.find().sort({a: 1}).batchSize(2).itcount(), 'F');
+
+// Run the tests with the index twice in order to double check plan caching.
+t.ensureIndex({a: 1});
+for (var i = 0; i < 2; i++) {
+ runIndexedTests();
+}
+
diff --git a/jstests/core/bench_test1.js b/jstests/core/bench_test1.js
new file mode 100644
index 00000000000..bb1423ee8b8
--- /dev/null
+++ b/jstests/core/bench_test1.js
@@ -0,0 +1,37 @@
+
+t = db.bench_test1;
+t.drop();
+
+t.insert( { _id : 1 , x : 1 } )
+t.insert( { _id : 2 , x : 1 } )
+
+ops = [
+ { op : "findOne" , ns : t.getFullName() , query : { _id : 1 } } ,
+ { op : "update" , ns : t.getFullName() , query : { _id : 1 } , update : { $inc : { x : 1 } } }
+]
+
+seconds = .7
+
+benchArgs = { ops : ops , parallel : 2 , seconds : seconds , host : db.getMongo().host };
+
+if (jsTest.options().auth) {
+ benchArgs['db'] = 'admin';
+ benchArgs['username'] = jsTest.options().adminUser;
+ benchArgs['password'] = jsTest.options().adminPassword;
+}
+res = benchRun( benchArgs );
+
+assert.lte( seconds * res.update , t.findOne( { _id : 1 } ).x * 1.05 , "A1" )
+
+
+assert.eq( 1 , t.getIndexes().length , "B1" )
+benchArgs['ops']=[ { op : "createIndex" , ns : t.getFullName() , key : { x : 1 } } ];
+benchArgs['parallel']=1;
+benchArgs['seconds']=1;
+benchRun( benchArgs );
+assert.eq( 2 , t.getIndexes().length , "B2" )
+benchArgs['ops']=[ { op : "dropIndex" , ns : t.getFullName() , key : { x : 1 } } ];
+benchRun( benchArgs );
+assert.soon( function(){ return t.getIndexes().length == 1; } );
+
+
diff --git a/jstests/core/bench_test2.js b/jstests/core/bench_test2.js
new file mode 100644
index 00000000000..871b24ca051
--- /dev/null
+++ b/jstests/core/bench_test2.js
@@ -0,0 +1,48 @@
+
+t = db.bench_test2
+t.drop();
+
+for ( i=0; i<100; i++ )
+ t.insert( { _id : i , x : 0 } );
+
+benchArgs = { ops : [ { ns : t.getFullName() ,
+ op : "update" ,
+ query : { _id : { "#RAND_INT" : [ 0 , 100 ] } } ,
+ update : { $inc : { x : 1 } } } ] ,
+ parallel : 2 ,
+ seconds : 1 ,
+ totals : true ,
+ host : db.getMongo().host }
+
+if (jsTest.options().auth) {
+ benchArgs['db'] = 'admin';
+ benchArgs['username'] = jsTest.options().adminUser;
+ benchArgs['password'] = jsTest.options().adminPassword;
+}
+
+res = benchRun( benchArgs )
+printjson( res );
+
+sumsq = 0
+sum = 0
+
+min = 1000
+max = 0;
+t.find().forEach(
+ function(z){
+ sum += z.x;
+ sumsq += Math.pow( ( res.update / 100 ) - z.x , 2 );
+ min = Math.min( z.x , min );
+ max = Math.max( z.x , max );
+ }
+)
+
+avg = sum / 100
+std = Math.sqrt( sumsq / 100 )
+
+print( "Avg: " + avg )
+print( "Std: " + std )
+print( "Min: " + min )
+print( "Max: " + max )
+
+
diff --git a/jstests/core/bench_test3.js b/jstests/core/bench_test3.js
new file mode 100644
index 00000000000..4bc21ed2505
--- /dev/null
+++ b/jstests/core/bench_test3.js
@@ -0,0 +1,27 @@
+t = db.bench_test3
+t.drop();
+
+
+benchArgs = { ops : [ { ns : t.getFullName() ,
+ op : "update" ,
+ upsert : true ,
+ query : { _id : { "#RAND_INT" : [ 0 , 5 , 4 ] } } ,
+ update : { $inc : { x : 1 } } } ] ,
+ parallel : 2 ,
+ seconds : 1 ,
+ totals : true ,
+ host : db.getMongo().host }
+
+if (jsTest.options().auth) {
+ benchArgs['db'] = 'admin';
+ benchArgs['username'] = jsTest.options().adminUser;
+ benchArgs['password'] = jsTest.options().adminPassword;
+}
+
+res = benchRun( benchArgs )
+printjson( res );
+
+var keys = []
+var totals = {}
+db.bench_test3.find().sort( { _id : 1 } ).forEach( function(z){ keys.push( z._id ); totals[z._id] = z.x } );
+assert.eq( [ 0 , 4 , 8 , 12 , 16 ] , keys )
diff --git a/jstests/core/big_object1.js b/jstests/core/big_object1.js
new file mode 100644
index 00000000000..be61dbd3041
--- /dev/null
+++ b/jstests/core/big_object1.js
@@ -0,0 +1,55 @@
+
+t = db.big_object1
+t.drop();
+
+if ( db.adminCommand( "buildinfo" ).bits == 64 ){
+
+ var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ var s = large;
+ while ( s.length < 850 * 1024 ){
+ s += large;
+ }
+ x = 0;
+ while ( true ){
+ var result;
+ n = { _id : x , a : [] }
+ for ( i=0; i<14+x; i++ )
+ n.a.push( s )
+ try {
+ result = t.insert( n )
+ o = n
+ }
+ catch ( e ){
+ break;
+ }
+
+ if ( result.hasWriteErrors() )
+ break;
+ x++;
+ }
+
+ printjson( t.stats(1024*1024) )
+
+ assert.lt( 15 * 1024 * 1024 , Object.bsonsize( o ) , "A1" )
+ assert.gt( 17 * 1024 * 1024 , Object.bsonsize( o ) , "A2" )
+
+ assert.eq( x , t.count() , "A3" )
+
+ for ( i=0; i<x; i++ ){
+ o = t.findOne( { _id : i } )
+ try {
+ // test large mongo -> js conversion
+ var a = o.a;
+ } catch(e) {
+ assert(false, "Caught exception trying to insert during iteration " + i + ": " + e);
+ }
+ assert( o , "B" + i );
+ }
+
+ t.drop()
+}
+else {
+ print( "skipping big_object1 b/c not 64-bit" )
+}
+
+print("SUCCESS");
diff --git a/jstests/core/binData.js b/jstests/core/binData.js
new file mode 100644
index 00000000000..3f037650e05
--- /dev/null
+++ b/jstests/core/binData.js
@@ -0,0 +1,14 @@
+
+var x = new BinData(3, "OEJTfmD8twzaj/LPKLIVkA==");
+assert.eq(x.hex(), "3842537e60fcb70cda8ff2cf28b21590", "bad hex");
+assert.eq(x.base64(), "OEJTfmD8twzaj/LPKLIVkA==", "bad base64");
+assert.eq(x.type, 3, "bad type");
+assert.eq(x.length(), 16, "bad length");
+
+x = new BinData(0, "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=");
+assert.eq(x.hex(), "4d616e2069732064697374696e677569736865642c206e6f74206f6e6c792062792068697320726561736f6e2c2062757420627920746869732073696e67756c61722070617373696f6e2066726f6d206f7468657220616e696d616c732c2077686963682069732061206c757374206f6620746865206d696e642c20746861742062792061207065727365766572616e6365206f662064656c6967687420696e2074686520636f6e74696e75656420616e6420696e6465666174696761626c652067656e65726174696f6e206f66206b6e6f776c656467652c2065786365656473207468652073686f727420766568656d656e6365206f6620616e79206361726e616c20706c6561737572652e", "bad hex");
+assert.eq(x.base64(), "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=", "bad base64");
+assert.eq(x.type, 0, "bad type");
+assert.eq(x.length(), 269, "bad length");
+
+
diff --git a/jstests/core/block_check_supported.js b/jstests/core/block_check_supported.js
new file mode 100644
index 00000000000..21d04ca93c7
--- /dev/null
+++ b/jstests/core/block_check_supported.js
@@ -0,0 +1,118 @@
+// Test that serverStatus() features dependent on the ProcessInfo::blockCheckSupported() routine
+// work correctly. These features are db.serverStatus({workingSet:1}).workingSet and
+// db.serverStatus().indexCounters.
+// Related to SERVER-9242, SERVER-6450.
+
+// Check that an object contains a specific set of fields and only those fields
+// NOTE: destroys 'item'
+//
+var testExpectedFields = function(itemString, item, fieldList) {
+ print('Testing ' + itemString + ' for expected fields');
+ for (var i = 0; i < fieldList.length; ++i) {
+ var field = fieldList[i];
+ if (typeof item[field] == 'undefined') {
+ doassert('Test FAILED: missing "' + field + '" field');
+ }
+ delete item[field];
+ }
+ if (!friendlyEqual({}, item)) {
+ doassert('Test FAILED: found unexpected field(s): ' + tojsononeline(item));
+ }
+}
+
+// Run test as function to keep cruft out of global namespace
+//
+var doTest = function () {
+
+ print('Testing workingSet and indexCounters portions of serverStatus');
+ var hostInfo = db.hostInfo();
+ var isXP = (hostInfo.os.name == 'Windows XP') ? true : false;
+ var isEmpty = (hostInfo.os.name == '') ? true : false;
+
+ // Check that the serverStatus command returns something for these sub-documents
+ //
+ var serverStatus = db.serverStatus({ workingSet: 1 });
+ if (!serverStatus) {
+ doassert('Test FAILED: db.serverStatus({workingSet:1}) did not return a value');
+ }
+ if (!serverStatus.workingSet) {
+ doassert('Test FAILED: db.serverStatus({workingSet:1}).workingSet was not returned');
+ }
+ if (!serverStatus.indexCounters) {
+ doassert('Test FAILED: db.serverStatus().indexCounters was not returned');
+ }
+ var workingSet_1 = serverStatus.workingSet;
+ var indexCounters_1 = serverStatus.indexCounters;
+
+ if (isXP) {
+ // Windows XP is the only supported platform that should be missing this data; make sure
+ // that we don't get bogus data back
+ //
+ var expectedResult = { info: 'not supported' };
+ print('Testing db.serverStatus({workingSet:1}).workingSet on Windows XP -- expecting ' +
+ tojsononeline(expectedResult));
+ assert.eq(expectedResult, workingSet_1,
+ 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' +
+ ' did not return the expected value');
+ expectedResult = { note: 'not supported on this platform' };
+ print('Testing db.serverStatus().indexCounters on Windows XP -- expecting ' +
+ tojsononeline(expectedResult));
+ assert.eq(expectedResult, indexCounters_1,
+ 'Test FAILED: db.serverStatus().indexCounters' +
+ ' did not return the expected value');
+ }
+ else if (isEmpty) {
+ // Until SERVER-9325 is fixed, Solaris/SmartOS will also be missing this data; make sure
+ // that we don't get bogus data back
+ //
+ expectedResult = { info: 'not supported' };
+ print('Testing db.serverStatus({workingSet:1}).workingSet on "" (Solaris?) -- expecting ' +
+ tojsononeline(expectedResult));
+ assert.eq(expectedResult, workingSet_1,
+ 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' +
+ ' did not return the expected value');
+ expectedResult = { note: 'not supported on this platform' };
+ print('Testing db.serverStatus().indexCounters on "" (Solaris?) -- expecting ' +
+ tojsononeline(expectedResult));
+ assert.eq(expectedResult, indexCounters_1,
+ 'Test FAILED: db.serverStatus().indexCounters' +
+ ' did not return the expected value');
+ }
+ else {
+ // Check that we get both workingSet and indexCounters and that all expected
+ // fields are present with no unexpected fields
+ //
+ testExpectedFields('db.serverStatus({workingSet:1}).workingSet',
+ workingSet_1,
+ ['note', 'pagesInMemory', 'computationTimeMicros', 'overSeconds']);
+ testExpectedFields('db.serverStatus().indexCounters',
+ indexCounters_1,
+ ['accesses', 'hits', 'misses', 'resets', 'missRatio']);
+
+ if (0) { // comment out until SERVER-9284 is fixed
+ // See if we can make the index counters values change
+ //
+ print('Testing that indexCounters accesses and hits increase by 1 on indexed find()');
+ var blockDB = db.getSiblingDB('block_check_supported');
+ blockDB.dropDatabase();
+ blockDB.coll.insert({ a: 1 });
+ blockDB.coll.ensureIndex({ a: 1 });
+ indexCounters_1 = db.serverStatus().indexCounters;
+ var doc = blockDB.coll.findOne({ a: 1 });
+ var indexCounters_2 = db.serverStatus().indexCounters;
+ assert.gt(indexCounters_2.accesses, indexCounters_1.accesses,
+ 'Test FAILED: db.serverStatus().indexCounters.accesses' +
+ ' should have had a value greater than ' + indexCounters_1.accesses +
+ ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) +
+ ', after find(): ' + tojsononeline(indexCounters_2));
+ assert.gt(indexCounters_2.hits, indexCounters_1.hits,
+ 'Test FAILED: db.serverStatus().indexCounters.hits' +
+ ' should have had a value greater than ' + indexCounters_1.hits +
+ ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) +
+ ', after find(): ' + tojsononeline(indexCounters_2));
+ } // comment out until SERVER-9284 is fixed
+ }
+ print('Test PASSED!');
+};
+
+doTest();
diff --git a/jstests/core/bulk_insert.js b/jstests/core/bulk_insert.js
new file mode 100644
index 00000000000..e26b323c6d9
--- /dev/null
+++ b/jstests/core/bulk_insert.js
@@ -0,0 +1,22 @@
+// Tests bulk insert of docs from the shell
+
+var coll = db.bulkInsertTest
+coll.drop()
+
+Random.srand( new Date().getTime() )
+
+var bulkSize = Math.floor( Random.rand() * 200 ) + 1
+var numInserts = Math.floor( Random.rand() * 300 ) + 1
+
+print( "Inserting " + numInserts + " bulks of " + bulkSize + " documents." )
+
+for( var i = 0; i < numInserts; i++ ){
+ var bulk = []
+ for( var j = 0; j < bulkSize; j++ ){
+ bulk.push({ hi : "there", i : i, j : j })
+ }
+
+ coll.insert( bulk )
+}
+
+assert.eq( coll.count(), bulkSize * numInserts )
diff --git a/jstests/core/capped.js b/jstests/core/capped.js
new file mode 100644
index 00000000000..421132b6f75
--- /dev/null
+++ b/jstests/core/capped.js
@@ -0,0 +1,11 @@
+db.jstests_capped.drop();
+db.createCollection("jstests_capped", {capped:true, size:30000});
+
+assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_capped"} ).count(), "expected a count of one index for new capped collection" );
+t = db.jstests_capped;
+
+t.save({x:1});
+t.save({x:2});
+
+assert( t.find().sort({$natural:1})[0].x == 1 , "expected obj.x==1");
+assert( t.find().sort({$natural:-1})[0].x == 2, "expected obj.x == 2");
diff --git a/jstests/core/capped1.js b/jstests/core/capped1.js
new file mode 100644
index 00000000000..0bbeaa40894
--- /dev/null
+++ b/jstests/core/capped1.js
@@ -0,0 +1,11 @@
+
+t = db.capped1;
+t.drop();
+
+db.createCollection("capped1" , {capped:true, size:1024 });
+v = t.validate();
+assert( v.valid , "A : " + tojson( v ) ); // SERVER-485
+
+t.save( { x : 1 } )
+assert( t.validate().valid , "B" )
+
diff --git a/jstests/core/capped2.js b/jstests/core/capped2.js
new file mode 100644
index 00000000000..65bb82f4c07
--- /dev/null
+++ b/jstests/core/capped2.js
@@ -0,0 +1,62 @@
+db.capped2.drop();
+db._dbCommand( { create: "capped2", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
+tzz = db.capped2;
+
+function debug( x ) {
+// print( x );
+}
+
+var val = new Array( 2000 );
+var c = "";
+for( i = 0; i < 2000; ++i, c += "---" ) { // bigger and bigger objects through the array...
+ val[ i ] = { a: c };
+}
+
+function checkIncreasing( i ) {
+ res = tzz.find().sort( { $natural: -1 } );
+ assert( res.hasNext(), "A" );
+ var j = i;
+ while( res.hasNext() ) {
+ try {
+ assert.eq( val[ j-- ].a, res.next().a, "B" );
+ } catch( e ) {
+ debug( "capped2 err " + j );
+ throw e;
+ }
+ }
+ res = tzz.find().sort( { $natural: 1 } );
+ assert( res.hasNext(), "C" );
+ while( res.hasNext() )
+ assert.eq( val[ ++j ].a, res.next().a, "D" );
+ assert.eq( j, i, "E" );
+}
+
+function checkDecreasing( i ) {
+ res = tzz.find().sort( { $natural: -1 } );
+ assert( res.hasNext(), "F" );
+ var j = i;
+ while( res.hasNext() ) {
+ assert.eq( val[ j++ ].a, res.next().a, "G" );
+ }
+ res = tzz.find().sort( { $natural: 1 } );
+ assert( res.hasNext(), "H" );
+ while( res.hasNext() )
+ assert.eq( val[ --j ].a, res.next().a, "I" );
+ assert.eq( j, i, "J" );
+}
+
+for( i = 0 ;; ++i ) {
+ debug( "capped 2: " + i );
+ tzz.insert( val[ i ] );
+ if ( tzz.count() == 0 ) {
+ assert( i > 100, "K" );
+ break;
+ }
+ checkIncreasing( i );
+}
+
+for( i = 600 ; i >= 0 ; --i ) {
+ debug( "capped 2: " + i );
+ tzz.insert( val[ i ] );
+ checkDecreasing( i );
+}
diff --git a/jstests/core/capped3.js b/jstests/core/capped3.js
new file mode 100644
index 00000000000..2e5e6790cb7
--- /dev/null
+++ b/jstests/core/capped3.js
@@ -0,0 +1,45 @@
+t = db.jstests_capped3;
+t2 = db.jstests_capped3_clone;
+t.drop();
+t2.drop();
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ), "A" );
+c = t2.find();
+for( i = 0; i < 1000; ++i ) {
+ assert.eq( i, c.next().i, "B" );
+}
+assert( !c.hasNext(), "C" );
+
+t.drop();
+t2.drop();
+
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ), "D" );
+c = t2.find().sort( {$natural:-1} );
+i = 999;
+while( c.hasNext() ) {
+ assert.eq( i--, c.next().i, "E" );
+}
+//print( "i: " + i );
+var str = tojson( t2.stats() );
+//print( "stats: " + tojson( t2.stats() ) );
+assert( i < 990, "F" );
+
+t.drop();
+t2.drop();
+
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+assert.commandWorked( t.convertToCapped( 1000 ), "G" );
+c = t.find().sort( {$natural:-1} );
+i = 999;
+while( c.hasNext() ) {
+ assert.eq( i--, c.next().i, "H" );
+}
+assert( i < 990, "I" );
+assert( i > 900, "J" );
diff --git a/jstests/core/capped5.js b/jstests/core/capped5.js
new file mode 100644
index 00000000000..37b776ee1ca
--- /dev/null
+++ b/jstests/core/capped5.js
@@ -0,0 +1,40 @@
+
+tn = "capped5"
+
+t = db[tn]
+t.drop();
+
+
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 , z : 52 } );
+assert.eq( 1 , t.getIndexKeys().length , "A0" ) //now we assume _id index even on capped coll
+assert.eq( 52 , t.findOne( { x : 11 } ).z , "A1" );
+
+t.ensureIndex( { _id : 1 } )
+t.ensureIndex( { x : 1 } )
+
+assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" );
+assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" );
+
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 5 , x : 12 } );
+assert.eq( 1, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index
+assert.eq( 1, t.find().toArray().length ); //_id index unique, so second insert fails
+
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 6 , x : 12 } );
+t.ensureIndex( { x:1 }, {unique:true} );
+assert.eq( 2, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index
+assert.eq( 2, t.find().hint( {x:1} ).toArray().length );
+
+// SERVER-525 (closed) unique indexes in capped collection
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.ensureIndex( { _id:1 } ); // note we assume will be automatically unique because it is _id
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 5 , x : 12 } );
+assert.eq( 1, t.find().toArray().length );
diff --git a/jstests/core/capped6.js b/jstests/core/capped6.js
new file mode 100644
index 00000000000..5db12b2fcf9
--- /dev/null
+++ b/jstests/core/capped6.js
@@ -0,0 +1,109 @@
+// Test NamespaceDetails::cappedTruncateAfter via 'captrunc' command
+
+Random.setRandomSeed();
+
+db.capped6.drop();
+db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
+tzz = db.capped6;
+
+function debug( x ) {
+// print( x );
+}
+
+/**
+ * Check that documents in the collection are in order according to the value
+ * of a, which corresponds to the insert order. This is a check that the oldest
+ * document(s) is/are deleted when space is needed for the newest document. The
+ * check is performed in both forward and reverse directions.
+ */
+function checkOrder( i ) {
+ res = tzz.find().sort( { $natural: -1 } );
+ assert( res.hasNext(), "A" );
+ var j = i;
+ while( res.hasNext() ) {
+ try {
+ assert.eq( val[ j-- ].a, res.next().a, "B" );
+ } catch( e ) {
+ debug( "capped6 err " + j );
+ throw e;
+ }
+ }
+ res = tzz.find().sort( { $natural: 1 } );
+ assert( res.hasNext(), "C" );
+ while( res.hasNext() )
+ assert.eq( val[ ++j ].a, res.next().a, "D" );
+ assert.eq( j, i, "E" );
+}
+
+var val = new Array( 500 );
+var c = "";
+for( i = 0; i < 500; ++i, c += "-" ) {
+ // The a values are strings of increasing length.
+ val[ i ] = { a: c };
+}
+
+var oldMax = Random.randInt( 500 );
+var max = 0;
+
+/**
+ * Insert new documents until there are 'oldMax' documents in the collection,
+ * then remove a random number of documents (often all but one) via one or more
+ * 'captrunc' requests.
+ */
+function doTest() {
+ for( var i = max; i < oldMax; ++i ) {
+ tzz.insert( val[ i ] );
+ }
+ max = oldMax;
+ count = tzz.count();
+
+ var min = 1;
+ if ( Random.rand() > 0.3 ) {
+ min = Random.randInt( count ) + 1;
+ }
+
+ // Iteratively remove a random number of documents until we have no more
+ // than 'min' documents.
+ while( count > min ) {
+ // 'n' is the number of documents to remove - we must account for the
+ // possibility that 'inc' will be true, and avoid removing all documents
+ // from the collection in that case, as removing all documents is not
+ // allowed by 'captrunc'
+ var n = Random.randInt( count - min - 1 ); // 0 <= x <= count - min - 1
+ var inc = Random.rand() > 0.5;
+ debug( count + " " + n + " " + inc );
+ assert.commandWorked( db.runCommand( { captrunc:"capped6", n:n, inc:inc } ) );
+ if ( inc ) {
+ n += 1;
+ }
+ count -= n;
+ max -= n;
+ // Validate the remaining documents.
+ checkOrder( max - 1 );
+ }
+}
+
+// Repeatedly add up to 'oldMax' documents and then truncate the newest
+// documents. Newer documents take up more space than older documents.
+for( var i = 0; i < 10; ++i ) {
+ doTest();
+}
+
+// reverse order of values
+var val = new Array( 500 );
+
+var c = "";
+for( i = 499; i >= 0; --i, c += "-" ) {
+ val[ i ] = { a: c };
+}
+db.capped6.drop();
+db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
+tzz = db.capped6;
+
+// Same test as above, but now the newer documents take less space than the
+// older documents instead of more.
+for( var i = 0; i < 10; ++i ) {
+ doTest();
+}
+
+tzz.drop();
diff --git a/jstests/core/capped7.js b/jstests/core/capped7.js
new file mode 100644
index 00000000000..693828da85f
--- /dev/null
+++ b/jstests/core/capped7.js
@@ -0,0 +1,89 @@
+// Test NamespaceDetails::emptyCappedCollection via 'emptycapped' command
+
+Random.setRandomSeed();
+
+db.capped7.drop();
+db._dbCommand( { create: "capped7", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
+tzz = db.capped7;
+
+var ten = new Array( 11 ).toString().replace( /,/g, "-" );
+
+count = 0;
+
+/**
+ * Insert new documents until the capped collection loops and the document
+ * count doesn't increase on insert.
+ */
+function insertUntilFull() {
+count = tzz.count();
+ var j = 0;
+while( 1 ) {
+ tzz.save( {i:ten,j:j++} );
+ var newCount = tzz.count();
+ if ( count == newCount ) {
+ break;
+ }
+ count = newCount;
+}
+}
+
+insertUntilFull();
+
+// oldCount == count before empty
+oldCount = count;
+
+assert.eq.automsg( "11", "tzz.stats().numExtents" );
+
+// oldSize == size before empty
+var oldSize = tzz.stats().storageSize;
+
+assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) );
+
+// check that collection storage parameters are the same after empty
+assert.eq.automsg( "11", "tzz.stats().numExtents" );
+assert.eq.automsg( "oldSize", "tzz.stats().storageSize" );
+
+// check that the collection is empty after empty
+assert.eq.automsg( "0", "tzz.find().itcount()" );
+assert.eq.automsg( "0", "tzz.count()" );
+
+// check that we can reuse the empty collection, inserting as many documents
+// as we were able to the first time through.
+insertUntilFull();
+assert.eq.automsg( "oldCount", "count" );
+assert.eq.automsg( "oldCount", "tzz.find().itcount()" );
+assert.eq.automsg( "oldCount", "tzz.count()" );
+
+assert.eq.automsg( "11", "tzz.stats().numExtents" );
+var oldSize = tzz.stats().storageSize;
+
+assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) );
+
+// check that the collection storage parameters are unchanged after another empty
+assert.eq.automsg( "11", "tzz.stats().numExtents" );
+assert.eq.automsg( "oldSize", "tzz.stats().storageSize" );
+
+// insert an arbitrary number of documents
+var total = Random.randInt( 2000 );
+for( var j = 1; j <= total; ++j ) {
+ tzz.save( {i:ten,j:j} );
+ // occasionally check that only the oldest documents are removed to make room
+ // for the newest documents
+ if ( Random.rand() > 0.95 ) {
+ assert.automsg( "j >= tzz.count()" );
+ assert.eq.automsg( "tzz.count()", "tzz.find().itcount()" );
+ var c = tzz.find().sort( {$natural:-1} );
+ var k = j;
+ assert.automsg( "c.hasNext()" );
+ while( c.hasNext() ) {
+ assert.eq.automsg( "c.next().j", "k--" );
+ }
+ // check the same thing with a reverse iterator as well
+ var c = tzz.find().sort( {$natural:1} );
+ assert.automsg( "c.hasNext()" );
+ while( c.hasNext() ) {
+ assert.eq.automsg( "c.next().j", "++k" );
+ }
+ assert.eq.automsg( "j", "k" );
+ }
+} \ No newline at end of file
diff --git a/jstests/core/capped8.js b/jstests/core/capped8.js
new file mode 100644
index 00000000000..0f30e37aebf
--- /dev/null
+++ b/jstests/core/capped8.js
@@ -0,0 +1,108 @@
+// Test NamespaceDetails::cappedTruncateAfter with empty extents
+
+Random.setRandomSeed();
+
+t = db.jstests_capped8;
+
+function debug( x ) {
+// printjson( x );
+}
+
+/** Generate an object with a string field of specified length */
+function obj( size, x ) {
+ return {X:x, a:new Array( size + 1 ).toString()};;
+}
+
+function withinOne( a, b ) {
+ assert( Math.abs( a - b ) <= 1, "not within one: " + a + ", " + b )
+}
+
+var X = 0;
+
+/**
+ * Insert enough documents of the given size spec that the collection will
+ * contain only documents having this size spec.
+ */
+function insertManyRollingOver( objsize ) {
+ // Add some variability, as the precise number can trigger different cases.
+ X++;
+ n = 250 + Random.randInt(10);
+
+ assert(t.count() == 0 || t.findOne().X != X);
+
+ for( i = 0; i < n; ++i ) {
+ t.save( obj( objsize, X ) );
+ debug( t.count() );
+ }
+
+ if (t.findOne().X != X) {
+ printjson(t.findOne());
+ print("\n\nERROR didn't roll over in insertManyRollingOver " + objsize);
+ print("approx amountwritten: " + (objsize * n));
+ printjson(t.stats());
+ assert(false);
+ }
+}
+
+/**
+ * Insert some documents in such a way that there may be an empty extent, then
+ * truncate the capped collection.
+ */
+function insertAndTruncate( first ) {
+ myInitialCount = t.count();
+ // Insert enough documents to make the capped allocation loop over.
+ insertManyRollingOver( 150 );
+ myFiftyCount = t.count();
+ // Insert documents that are too big to fit in the smaller extents.
+ insertManyRollingOver( 5000 );
+ myTwokCount = t.count();
+ if ( first ) {
+ initialCount = myInitialCount;
+ fiftyCount = myFiftyCount;
+ twokCount = myTwokCount;
+ // Sanity checks for collection count
+ assert( fiftyCount > initialCount );
+ assert( fiftyCount > twokCount );
+ } else {
+ // Check that we are able to insert roughly the same number of documents
+ // after truncating. The exact values are slightly variable as a result
+ // of the capped allocation algorithm.
+ withinOne( initialCount, myInitialCount );
+ withinOne( fiftyCount, myFiftyCount );
+ withinOne( twokCount, myTwokCount );
+ }
+ count = t.count();
+ // Check that we can truncate the collection successfully.
+ assert.commandWorked( db.runCommand( { captrunc:"jstests_capped8", n:count - 1, inc:false } ) );
+}
+
+/** Test truncating and subsequent inserts */
+function testTruncate() {
+ insertAndTruncate( true );
+ insertAndTruncate( false );
+ insertAndTruncate( false );
+}
+
+var pass = 1;
+
+print("pass " + pass++);
+t.drop();
+db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 10000, 4000 ] } );
+testTruncate();
+
+print("pass " + pass++);
+t.drop();
+db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 1000, 4000 ] } );
+testTruncate();
+
+print("pass " + pass++);
+t.drop();
+db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 4000 ] } );
+testTruncate();
+
+print("pass " + pass++);
+t.drop();
+db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000 ] } );
+testTruncate();
+
+t.drop();
diff --git a/jstests/core/capped9.js b/jstests/core/capped9.js
new file mode 100644
index 00000000000..2e0c2c74640
--- /dev/null
+++ b/jstests/core/capped9.js
@@ -0,0 +1,27 @@
+
+t = db.capped9;
+t.drop();
+
+db.createCollection("capped9" , {capped:true, size:1024*50 });
+
+t.insert( { _id : 1 , x : 2 , y : 3 } )
+
+assert.eq( 1 , t.find( { x : 2 } ).itcount() , "A1" )
+assert.eq( 1 , t.find( { y : 3 } ).itcount() , "A2" )
+//assert.throws( function(){ t.find( { _id : 1 } ).itcount(); } , [] , "A3" ); // SERVER-3064
+
+t.update( { _id : 1 } , { $set : { y : 4 } } )
+//assert( db.getLastError() , "B1" ); // SERVER-3064
+//assert.eq( 3 , t.findOne().y , "B2" ); // SERVER-3064
+
+t.ensureIndex( { _id : 1 } )
+
+assert.eq( 1 , t.find( { _id : 1 } ).itcount() , "D1" )
+
+assert.writeOK( t.update( { _id: 1 }, { $set: { y: 4 } } ));
+assert.eq( 4 , t.findOne().y , "D2" )
+
+
+
+
+
diff --git a/jstests/core/capped_empty.js b/jstests/core/capped_empty.js
new file mode 100644
index 00000000000..5b0fb6b8f8e
--- /dev/null
+++ b/jstests/core/capped_empty.js
@@ -0,0 +1,24 @@
+
+t = db.capped_empty;
+t.drop();
+
+db.createCollection( t.getName() , { capped : true , size : 100 } )
+
+t.insert( { x : 1 } );
+t.insert( { x : 2 } );
+t.insert( { x : 3 } );
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 3 , t.count() );
+assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned );
+
+t.runCommand( "emptycapped" );
+
+assert.eq( 0 , t.count() );
+
+t.insert( { x : 1 } );
+t.insert( { x : 2 } );
+t.insert( { x : 3 } );
+
+assert.eq( 3 , t.count() );
+assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned );
diff --git a/jstests/core/capped_max.js b/jstests/core/capped_max.js
new file mode 100644
index 00000000000..1d7cbc3ef23
--- /dev/null
+++ b/jstests/core/capped_max.js
@@ -0,0 +1,29 @@
+
+t = db.capped_max;
+sz = 1024 * 16;
+
+t.drop();
+db.createCollection( t.getName() , {capped: true, size: sz } );
+assert.lt( Math.pow( 2, 62 ), t.stats().max.floatApprox )
+
+t.drop();
+db.createCollection( t.getName() , {capped: true, size: sz, max: 123456 } );
+assert.eq( 123456, t.stats().max );
+
+// create a collection with the max possible doc cap (2^31-2 docs)
+t.drop();
+mm = Math.pow(2, 31) - 2;
+db.createCollection( t.getName() , {capped: true, size: sz, max: mm } );
+assert.eq( mm, t.stats().max );
+
+// create a collection with the 'no max' value (2^31-1 docs)
+t.drop();
+mm = Math.pow(2, 31) - 1;
+db.createCollection( t.getName() , {capped: true, size: sz, max: mm } );
+assert.eq(NumberLong("9223372036854775807"), t.stats().max );
+
+t.drop();
+res = db.createCollection( t.getName() , {capped: true, size: sz, max: Math.pow(2, 31) } );
+assert.eq( 0, res.ok, tojson(res) );
+assert.eq( 0, t.stats().ok )
+
diff --git a/jstests/core/capped_server2639.js b/jstests/core/capped_server2639.js
new file mode 100644
index 00000000000..adc6f994163
--- /dev/null
+++ b/jstests/core/capped_server2639.js
@@ -0,0 +1,27 @@
+
+name = "server2639"
+
+t = db.getCollection( name );
+t.drop();
+
+
+db.createCollection( name , { capped : true , size : 1 } );
+
+size = t.stats().storageSize;
+
+bigString = "";
+while ( bigString.length < size )
+ bigString += ".";
+
+t.insert( { x : 1 } );
+
+var res = t.insert( { x : 2 , bigString : bigString } );
+assert.writeError( res );
+assert.eq( 16328, res.getWriteError().code, res.getWriteError().toString() );
+
+assert.eq( 1 , t.count() ); // make sure small doc didn't get deleted
+assert.eq( 1 , t.findOne().x );
+
+// make sure can still insert
+t.insert( { x : 2 } );
+assert.eq( 2 , t.count() );
diff --git a/jstests/core/capped_server7543.js b/jstests/core/capped_server7543.js
new file mode 100644
index 00000000000..514cd7964b2
--- /dev/null
+++ b/jstests/core/capped_server7543.js
@@ -0,0 +1,11 @@
+
+mydb = db.getSisterDB( "capped_server7543" );
+mydb.dropDatabase();
+
+mydb.createCollection( "foo" , { capped : true , size : 12288 } );
+
+assert.eq( 12288, mydb.foo.stats().storageSize );
+assert.eq( 1, mydb.foo.validate(true).extentCount );
+
+mydb.dropDatabase();
+
diff --git a/jstests/core/cappeda.js b/jstests/core/cappeda.js
new file mode 100644
index 00000000000..3244ffae84f
--- /dev/null
+++ b/jstests/core/cappeda.js
@@ -0,0 +1,32 @@
+
+t = db.scan_capped_id;
+t.drop()
+
+x = t.runCommand( "create" , { capped : true , size : 10000 } )
+assert( x.ok )
+
+for ( i=0; i<100; i++ )
+ t.insert( { _id : i , x : 1 } )
+
+function q() {
+ return t.findOne( { _id : 5 } )
+}
+
+function u() {
+ var res = t.update( { _id : 5 } , { $set : { x : 2 } } );
+ if ( res.hasWriteErrors() )
+ throw res;
+}
+
+
+// SERVER-3064
+//assert.throws( q , [] , "A1" );
+//assert.throws( u , [] , "B1" );
+
+t.ensureIndex( { _id : 1 } )
+
+assert.eq( 1 , q().x )
+q()
+u()
+
+assert.eq( 2 , q().x )
diff --git a/jstests/core/check_shard_index.js b/jstests/core/check_shard_index.js
new file mode 100644
index 00000000000..f85071124fb
--- /dev/null
+++ b/jstests/core/check_shard_index.js
@@ -0,0 +1,141 @@
+// -------------------------
+// CHECKSHARDINGINDEX TEST UTILS
+// -------------------------
+
+f = db.jstests_shardingindex;
+f.drop();
+
+
+// -------------------------
+// Case 1: all entries filled or empty should make a valid index
+//
+
+f.drop();
+f.ensureIndex( { x: 1 , y: 1 } );
+assert.eq( 0 , f.count() , "1. initial count should be zero" );
+
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( true , res.ok, "1a" );
+
+f.save( { x: 1 , y : 1 } );
+assert.eq( 1 , f.count() , "1. count after initial insert should be 1" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( true , res.ok , "1b" );
+
+
+// -------------------------
+// Case 2: entry with null values would make an index unsuitable
+//
+
+f.drop();
+f.ensureIndex( { x: 1 , y: 1 } );
+assert.eq( 0 , f.count() , "2. initial count should be zero" );
+
+f.save( { x: 1 , y : 1 } );
+f.save( { x: null , y : 1 } );
+
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( true , res.ok , "2a " + tojson(res) );
+
+f.save( { y: 2 } );
+assert.eq( 3 , f.count() , "2. count after initial insert should be 3" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( false , res.ok , "2b " + tojson(res) );
+
+// Check _id index
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {_id:1} });
+assert.eq( true , res.ok , "2c " + tojson(res) );
+assert( res.idskip , "2d " + tojson(res) )
+
+// -------------------------
+// Case 3: entry with array values would make an index unsuitable
+//
+
+f.drop();
+f.ensureIndex( { x: 1 , y: 1 } );
+assert.eq( 0 , f.count() , "3. initial count should be zero" );
+
+f.save( { x: 1 , y : 1 } );
+f.save( { x: [1, 2] , y : 2 } );
+
+assert.eq( 2 , f.count() , "3. count after initial insert should be 2" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( false , res.ok , "3a " + tojson(res) );
+
+f.remove( { y : 2 } );
+f.reIndex();
+
+assert.eq( 1 , f.count() , "3. count after removing array value should be 1" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( true , res.ok , "3b " + tojson(res) );
+
+f.save( { x : 2, y : [1, 2] } )
+
+assert.eq( 2 , f.count() , "3. count after adding array value should be 2" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( false , res.ok , "3c " + tojson(res) );
+
+// -------------------------
+// Case 4: Handles prefix shard key indexes.
+//
+
+f.drop();
+f.ensureIndex( { x: 1 , y: 1, z: 1 } );
+assert.eq( 0 , f.count() , "4. initial count should be zero" );
+
+f.save( { x: 1 , y : 1, z : 1 } );
+
+assert.eq( 1 , f.count() , "4. count after initial insert should be 1" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
+assert.eq( true , res.ok , "4a " + tojson(res) );
+
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( true , res.ok , "4b " + tojson(res) );
+
+f.save( { x: [1, 2] , y : 2, z : 2 } );
+
+assert.eq( 2 , f.count() , "4. count after adding array value should be 2" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
+assert.eq( false , res.ok , "4c " + tojson(res) );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( false , res.ok , "4d " + tojson(res) );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
+assert.eq( false , res.ok , "4e " + tojson(res) );
+
+
+f.remove( { y : 2 } );
+f.reIndex();
+
+assert.eq( 1 , f.count() , "4. count after removing array value should be 1" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
+assert.eq( true , res.ok , "4f " + tojson(res) );
+
+f.save( { x : 3, y : [1, 2], z : 3 } )
+
+assert.eq( 2 , f.count() , "4. count after adding array value on second key should be 2" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
+assert.eq( false , res.ok , "4g " + tojson(res) );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( false , res.ok , "4h " + tojson(res) );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
+assert.eq( false , res.ok , "4i " + tojson(res) );
+
+f.remove( { x : 3 } );
+f.reIndex(); // Necessary so that the index is no longer marked as multikey
+
+assert.eq( 1 , f.count() , "4. count after removing array value should be 1 again" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
+assert.eq( true , res.ok , "4e " + tojson(res) );
+
+f.save( { x : 4, y : 4, z : [1, 2] } )
+
+assert.eq( 2 , f.count() , "4. count after adding array value on third key should be 2" );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
+assert.eq( false , res.ok , "4c " + tojson(res) );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
+assert.eq( false , res.ok , "4d " + tojson(res) );
+res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
+assert.eq( false , res.ok , "4e " + tojson(res) );
+
+
+print("PASSED");
diff --git a/jstests/core/collmod.js b/jstests/core/collmod.js
new file mode 100644
index 00000000000..2dc5555f3ec
--- /dev/null
+++ b/jstests/core/collmod.js
@@ -0,0 +1,82 @@
+// Basic js tests for the collMod command.
+// Test setting the usePowerOf2Sizes flag, and modifying TTL indexes.
+
+function debug( x ) {
+ //printjson( x );
+}
+
+var coll = "collModTest";
+var t = db.getCollection( coll );
+t.drop();
+
+db.createCollection( coll );
+
+
+// Verify the new collection has userFlags set to 1
+printjson(t.stats());
+assert.eq( t.stats().userFlags , 1 , "fresh collection doesn't have userFlags = 1 ");
+
+// Modify the collection with the usePowerOf2Sizes flag. Verify userFlags now = 0.
+var res = db.runCommand( { "collMod" : coll, "usePowerOf2Sizes" : false } );
+debug( res );
+assert.eq( res.ok , 1 , "collMod failed" );
+assert.eq( t.stats().userFlags , 0 , "modified collection should have userFlags = 0 ");
+var nso = db.system.namespaces.findOne( { name : t.getFullName() } );
+debug( nso );
+assert.eq( 0, nso.options.flags, "options didn't sync to system.namespaces: " + tojson( nso ) );
+
+// Try to modify it with some unrecognized value
+var res = db.runCommand( { "collMod" : coll, "unrecognized" : true } );
+debug( res );
+assert.eq( res.ok , 0 , "collMod shouldn't return ok with unrecognized value" );
+
+// add a TTL index
+t.ensureIndex( {a : 1}, { "expireAfterSeconds": 50 } )
+assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 50 } ),
+ "TTL index not added" );
+
+// try to modify it with a bad key pattern
+var res = db.runCommand( { "collMod" : coll,
+ "index" : { "keyPattern" : "bad" , "expireAfterSeconds" : 100 } } );
+debug( res );
+assert.eq( 0 , res.ok , "mod shouldn't work with bad keypattern");
+
+// try to modify it without expireAfterSeconds field
+var res = db.runCommand( { "collMod" : coll,
+ "index" : { "keyPattern" : {a : 1} } } );
+debug( res );
+assert.eq( 0 , res.ok , "TTL mod shouldn't work without expireAfterSeconds");
+
+// try to modify it with a non-numeric expireAfterSeconds field
+var res = db.runCommand( { "collMod" : coll,
+ "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : "100" } } );
+debug( res );
+assert.eq( 0 , res.ok , "TTL mod shouldn't work with non-numeric expireAfterSeconds");
+
+// this time modifying should finally work
+var res = db.runCommand( { "collMod" : coll,
+ "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : 100 } } );
+debug( res );
+assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ),
+ "TTL index not modified" );
+
+// try to modify a faulty TTL index with a non-numeric expireAfterSeconds field
+t.dropIndex( {a : 1 } );
+t.ensureIndex( {a : 1} , { "expireAfterSeconds": "50" } )
+var res = db.runCommand( { "collMod" : coll,
+ "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } );
+debug( res );
+assert.eq( 0, res.ok, "shouldn't be able to modify faulty index spec" );
+
+// try with new index, this time set both expireAfterSeconds and the usePowerOf2Sizes flag
+t.dropIndex( {a : 1 } );
+t.ensureIndex( {a : 1} , { "expireAfterSeconds": 50 } )
+var res = db.runCommand( { "collMod" : coll ,
+ "usePowerOf2Sizes" : true,
+ "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } );
+debug( res );
+assert.eq( 1, res.ok, "should be able to modify both userFlags and expireAfterSeconds" );
+assert.eq( t.stats().userFlags , 1 , "userflags should be 1 now");
+assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ),
+ "TTL index should be 100 now" );
+
diff --git a/jstests/core/compact.js b/jstests/core/compact.js
new file mode 100644
index 00000000000..2121debc17e
--- /dev/null
+++ b/jstests/core/compact.js
@@ -0,0 +1,76 @@
+// compact.js
+
+var mydb = db.getSiblingDB('compact');
+t = mydb.compacttest;
+t.drop();
+t.insert({ x: 3 });
+t.insert({ x: 3 });
+t.insert({ x: 5 });
+t.insert({ x: 4, z: 2, k: 'aaa' });
+t.insert({ x: 4, z: 2, k: 'aaa' });
+t.insert({ x: 4, z: 2, k: 'aaa' });
+t.insert({ x: 4, z: 2, k: 'aaa' });
+t.insert({ x: 4, z: 2, k: 'aaa' });
+t.insert({ x: 4, z: 2, k: 'aaa' });
+t.ensureIndex({ x: 1 });
+
+print("1");
+
+var res = mydb.runCommand({ compact: 'compacttest', dev: true, force: true });
+printjson(res);
+assert(res.ok);
+assert(t.count() == 9);
+var v = t.validate(true);
+assert(v.ok);
+assert(v.extentCount == 1);
+assert(v.deletedCount == 1);
+assert(t.getIndexes().length == 2);
+var ssize = t.stats().storageSize;
+
+print("2");
+res = mydb.runCommand({ compact: 'compacttest', dev: true,paddingBytes:1000, force:true });
+assert(res.ok);
+assert(t.count() == 9);
+var v = t.validate(true);
+assert(v.ok);
+assert(t.stats().storageSize > ssize, "expected more storage given padding is higher. however it rounds off so if something changed this could be");
+//printjson(t.stats());
+
+print("z");
+
+t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
+t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
+t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
+t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
+t.insert({ x: 4, z: null, k: { f: "", b: ""} });
+t.insert({ x: 4, z: null, k: { c: ""} });
+t.insert({ x: 4, z: null, k: { h: ""} });
+t.insert({ x: 4, z: null });
+t.insert({ x: 4, z: 3});
+t.insert({ x: 4, z: 2, k: { a: "", b: ""} });
+t.insert({ x: 4, z: null, k: { c: ""} });
+t.insert({ x: 4, z: null, k: { c: ""} });
+t.insert({ x: 4, z: 3, k: { c: ""} });
+
+t.ensureIndex({ z: 1, k: 1 });
+//t.ensureIndex({ z: 1, k: 1 }, { unique: true });
+//t.ensureIndex({ z: 1, k: 1 }, { dropDups: true, unique:true });
+
+res = mydb.runCommand({ compact: 'compacttest', dev: true, paddingFactor: 1.2, force:true });
+printjson(res);
+assert(res.ok);
+assert(t.count() > 13);
+var v = t.validate(true);
+assert(v.ok);
+
+print("3");
+
+// works on an empty collection?
+t.remove({});
+assert(mydb.runCommand({ compact: 'compacttest', dev: true, force:true }).ok);
+assert(t.count() == 0);
+v = t.validate(true);
+assert(v.ok);
+assert(v.extentCount == 1);
+assert(t.getIndexes().length == 3);
+
diff --git a/jstests/core/compact2.js b/jstests/core/compact2.js
new file mode 100644
index 00000000000..0a7c343a3f9
--- /dev/null
+++ b/jstests/core/compact2.js
@@ -0,0 +1,52 @@
+// Compaction of a v0 index converts it to a v1 index using a v1 index comparator during external
+// sort. SERVER-6499
+
+t = db.jstests_compact2;
+t.drop();
+
+/**
+ * Assert that the index is of the expected version and its keys are ordered consistently with this
+ * version, and that the unique and background fields are set correctly.
+ */
+function assertIndex( expectedVersion, unique, background ) {
+ indexSpec = db.system.indexes.findOne( { ns:t.toString(), key:{ date:1 } } );
+ // The index version is as expected.
+ assert.eq( expectedVersion, indexSpec.v );
+ // The index uniqueness is as expected (treat missing and false unique specs as equivalent).
+ assert.eq( !unique, !indexSpec.unique );
+ // Background is as expected.
+ assert.eq( !background, !indexSpec.background );
+ // Check that 'date' key ordering is consistent with the index version.
+ dates = t.find().hint( { date:1 } ).toArray().map( function( x ) { return x.date; } );
+ if ( expectedVersion == 0 ) {
+ // Under v0 index comparison, new Date( -1 ) > new Date( 1 ).
+ assert.eq( [ new Date( 1 ), new Date( -1 ) ], dates );
+ }
+ else {
+ // Under v1 index comparsion, new Date( -1 ) < new Date( 1 ).
+ assert.eq( [ new Date( -1 ), new Date( 1 ) ], dates );
+ }
+}
+
+/** Compact a collection and check the resulting indexes. */
+function checkCompact( originalVersion, unique, background ) {
+ t.drop();
+ t.save( { date:new Date( 1 ) } );
+ t.save( { date:new Date( -1 ) } );
+ t.ensureIndex( { date:1 }, { unique:unique, v:originalVersion, background:background } );
+ assertIndex( originalVersion, unique, background );
+
+ // Under SERVER-6499, compact fails when a v0 index is converted to a v1 index and key
+ // comparisons are inconsistent, as with the date values in this test.
+ assert.commandWorked( t.runCommand( "compact" ) );
+ assert( !db.getLastError() );
+
+ // Compact built an index with the default index version (v1). Uniqueness is maintained, but
+ // background always becomes false.
+ assertIndex( 1, unique, false );
+}
+
+checkCompact( 0, true, true );
+checkCompact( 0, false, false );
+checkCompact( 1, true, false );
+checkCompact( 1, false, true );
diff --git a/jstests/core/compactPreservePadding.js b/jstests/core/compactPreservePadding.js
new file mode 100644
index 00000000000..4748afb9a82
--- /dev/null
+++ b/jstests/core/compactPreservePadding.js
@@ -0,0 +1,26 @@
+// test preservePadding
+
+var mydb = db.getSiblingDB('compactPreservePadding');
+var collName = "compactPreservePadding";
+var t = mydb.getCollection(collName);
+t.drop();
+
+// use larger keyname to avoid hitting an edge case with extents
+for (i = 0; i < 10000; i++) {
+ t.insert({useLargerKeyName:i});
+}
+
+// remove half the entries
+t.remove({useLargerKeyName:{$mod:[2,0]}})
+printjson(t.stats());
+originalSize = t.stats().size;
+originalStorage = t.stats().storageSize;
+
+// compact!
+mydb.runCommand({compact: collName, preservePadding: true});
+printjson(t.stats());
+
+// object sizes ('size') should be the same (unless we hit an edge case involving extents, which
+// this test doesn't) and storage size should shrink
+assert(originalSize == t.stats().size);
+assert(originalStorage > t.stats().storageSize);
diff --git a/jstests/core/connection_status.js b/jstests/core/connection_status.js
new file mode 100644
index 00000000000..08d05cbf28d
--- /dev/null
+++ b/jstests/core/connection_status.js
@@ -0,0 +1,27 @@
+// Tests the connectionStatus command
+
+var dbName = 'connection_status';
+var myDB = db.getSiblingDB(dbName);
+myDB.dropAllUsers();
+
+function test(userName) {
+ myDB.createUser({user: userName, pwd: "weak password", roles: jsTest.basicUserRoles});
+ myDB.auth(userName, "weak password");
+
+ var output = myDB.runCommand("connectionStatus");
+ assert.commandWorked(output);
+ var users = output.authInfo.authenticatedUsers;
+
+ var matches = 0;
+ for (var i=0; i < users.length; i++) {
+ if (users[i].db != dbName)
+ continue;
+
+ assert.eq(users[i].user, userName);
+ matches++;
+ }
+ assert.eq(matches, 1);
+}
+
+test("someone");
+test("someone else"); // replaces someone
diff --git a/jstests/core/connection_string_validation.js b/jstests/core/connection_string_validation.js
new file mode 100644
index 00000000000..4ecd1f926ee
--- /dev/null
+++ b/jstests/core/connection_string_validation.js
@@ -0,0 +1,106 @@
+// Test validation of connection strings passed to the JavaScript "connect()" function.
+// Related to SERVER-8030.
+
+port = "27017"
+
+if ( db.getMongo().host.indexOf( ":" ) >= 0 ) {
+ var idx = db.getMongo().host.indexOf( ":" );
+ port = db.getMongo().host.substring( idx + 1 );
+}
+
+var goodStrings = [
+ "localhost:" + port + "/test",
+ "127.0.0.1:" + port + "/test"
+ ];
+
+var badStrings = [
+ { s: undefined, r: /^Missing connection string$/ },
+ { s: 7, r: /^Incorrect type/ },
+ { s: null, r: /^Incorrect type/ },
+ { s: "", r: /^Empty connection string$/ },
+ { s: " ", r: /^Empty connection string$/ },
+ { s: ":", r: /^Missing host name/ },
+ { s: "/", r: /^Missing host name/ },
+ { s: ":/", r: /^Missing host name/ },
+ { s: ":/test", r: /^Missing host name/ },
+ { s: ":" + port + "/", r: /^Missing host name/ },
+ { s: ":" + port + "/test", r: /^Missing host name/ },
+ { s: "/test", r: /^Missing host name/ },
+ { s: "localhost:/test", r: /^Missing port number/ },
+ { s: "127.0.0.1:/test", r: /^Missing port number/ },
+ { s: "127.0.0.1:cat/test", r: /^Invalid port number/ },
+ { s: "127.0.0.1:1cat/test", r: /^Invalid port number/ },
+ { s: "127.0.0.1:123456/test", r: /^Invalid port number/ },
+ { s: "127.0.0.1:65536/test", r: /^Invalid port number/ },
+ { s: "::1:65536/test", r: /^Invalid port number/ },
+ { s: "127.0.0.1:" + port + "/", r: /^Missing database name/ },
+ { s: "::1:" + port + "/", r: /^Missing database name/ }
+ ];
+
+function testGood(i, connectionString) {
+ print("\nTesting good connection string " + i + " (\"" + connectionString + "\") ...");
+ var gotException = false;
+ var exception;
+ try {
+ var connectDB = connect(connectionString);
+ connectDB = null;
+ }
+ catch (e) {
+ gotException = true;
+ exception = e;
+ }
+ if (!gotException) {
+ print("Good connection string " + i +
+ " (\"" + connectionString + "\") correctly validated");
+ return;
+ }
+ var message = "FAILED to correctly validate goodString " + i +
+ " (\"" + connectionString + "\"): exception was \"" + tojson(exception) + "\"";
+ doassert(message);
+}
+
+function testBad(i, connectionString, errorRegex) {
+ print("\nTesting bad connection string " + i + " (\"" + connectionString + "\") ...");
+ var gotException = false;
+ var gotCorrectErrorText = false;
+ var exception;
+ try {
+ var connectDB = connect(connectionString);
+ connectDB = null;
+ }
+ catch (e) {
+ gotException = true;
+ exception = e;
+ if (errorRegex.test(e.message)) {
+ gotCorrectErrorText = true;
+ }
+ }
+ if (gotCorrectErrorText) {
+ print("Bad connection string " + i + " (\"" + connectionString +
+ "\") correctly rejected:\n" + tojson(exception));
+ return;
+ }
+ var message = "FAILED to generate correct exception for badString " + i +
+ " (\"" + connectionString + "\"): ";
+ if (gotException) {
+ message += "exception was \"" + tojson(exception) +
+ "\", it should have matched \"" + errorRegex.toString() + "\"";
+ }
+ else {
+ message += "no exception was thrown";
+ }
+ doassert(message);
+}
+
+var i;
+jsTest.log("TESTING " + goodStrings.length + " good connection strings");
+for (i = 0; i < goodStrings.length; ++i) {
+ testGood(i, goodStrings[i]);
+}
+
+jsTest.log("TESTING " + badStrings.length + " bad connection strings");
+for (i = 0; i < badStrings.length; ++i) {
+ testBad(i, badStrings[i].s, badStrings[i].r);
+}
+
+jsTest.log("SUCCESSFUL test completion");
diff --git a/jstests/core/constructors.js b/jstests/core/constructors.js
new file mode 100644
index 00000000000..ac8ae08d7c0
--- /dev/null
+++ b/jstests/core/constructors.js
@@ -0,0 +1,313 @@
+// Tests to see what validity checks are done for 10gen specific object construction
+
+// Takes a list of constructors and returns a new list with an extra entry for each constructor with
+// "new" prepended
+function addConstructorsWithNew (constructorList) {
+ function prependNew (constructor) {
+ return "new " + constructor;
+ }
+
+ var valid = constructorList.valid;
+ var invalid = constructorList.invalid;
+ // We use slice(0) here to make a copy of our lists
+ var validWithNew = valid.concat(valid.slice(0).map(prependNew));
+ var invalidWithNew = invalid.concat(invalid.slice(0).map(prependNew));
+ return { "valid" : validWithNew, "invalid" : invalidWithNew };
+}
+
+function clientEvalConstructorTest (constructorList) {
+ constructorList = addConstructorsWithNew(constructorList);
+ constructorList.valid.forEach(function (constructor) {
+ try {
+ eval(constructor);
+ }
+ catch (e) {
+ throw ("valid constructor: " + constructor + " failed in eval context: " + e);
+ }
+ });
+ constructorList.invalid.forEach(function (constructor) {
+ assert.throws(function () { eval(constructor) },
+ [], "invalid constructor did not throw error in eval context: " + constructor);
+ });
+}
+
+function dbEvalConstructorTest (constructorList) {
+ constructorList = addConstructorsWithNew(constructorList);
+ constructorList.valid.forEach(function (constructor) {
+ try {
+ db.eval(constructor);
+ }
+ catch (e) {
+ throw ("valid constructor: " + constructor + " failed in db.eval context: " + e);
+ }
+ });
+ constructorList.invalid.forEach(function (constructor) {
+ assert.throws(function () { db.eval(constructor) },
+ [], "invalid constructor did not throw error in db.eval context: " + constructor);
+ });
+}
+
+function mapReduceConstructorTest (constructorList) {
+ constructorList = addConstructorsWithNew(constructorList);
+ t = db.mr_constructors;
+ t.drop();
+
+ t.save( { "partner" : 1, "visits" : 9 } )
+ t.save( { "partner" : 2, "visits" : 9 } )
+ t.save( { "partner" : 1, "visits" : 11 } )
+ t.save( { "partner" : 1, "visits" : 30 } )
+ t.save( { "partner" : 2, "visits" : 41 } )
+ t.save( { "partner" : 2, "visits" : 41 } )
+
+ constructorList.valid.forEach(function (constructor) {
+ try {
+ m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }");
+
+ r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }");
+
+ res = t.mapReduce( m , r , { out : "mr_constructors_out" , scope : { xx : 1 } } );
+ }
+ catch (e) {
+ throw ("valid constructor: " + constructor + " failed in mapReduce context: " + e);
+ }
+ });
+ constructorList.invalid.forEach(function (constructor) {
+ m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }");
+
+ r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }");
+
+ assert.throws(function () { res = t.mapReduce( m , r ,
+ { out : "mr_constructors_out" , scope : { xx : 1 } } ) },
+ [], "invalid constructor did not throw error in mapReduce context: " + constructor);
+ });
+
+ db.mr_constructors_out.drop();
+ t.drop();
+}
+
+function whereConstructorTest (constructorList) {
+ constructorList = addConstructorsWithNew(constructorList);
+ t = db.where_constructors;
+ t.drop();
+ assert.writeOK( t.insert({ x : 1 }));
+
+ constructorList.valid.forEach(function (constructor) {
+ try {
+ t.findOne({ $where : constructor });
+ }
+ catch (e) {
+ throw ("valid constructor: " + constructor + " failed in $where query: " + e);
+ }
+ });
+ constructorList.invalid.forEach(function (constructor) {
+ assert.throws(function () { t.findOne({ $where : constructor }) },
+ [], "invalid constructor did not throw error in $where query: " + constructor);
+ });
+}
+
+var dbrefConstructors = {
+ "valid" : [
+ "DBRef(\"namespace\", 0)",
+ "DBRef(\"namespace\", \"test\")",
+ "DBRef(\"namespace\", ObjectId())",
+ "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"))",
+ ],
+ "invalid" : [
+ "DBRef()",
+ "DBRef(true, ObjectId())",
+ "DBRef(\"namespace\")",
+ "DBRef(\"namespace\", ObjectId(), true)",
+ ]
+}
+
+var dbpointerConstructors = {
+ "valid" : [
+ "DBPointer(\"namespace\", ObjectId())",
+ "DBPointer(\"namespace\", ObjectId(\"000000000000000000000000\"))",
+ ],
+ "invalid" : [
+ "DBPointer()",
+ "DBPointer(true, ObjectId())",
+ "DBPointer(\"namespace\", 0)",
+ "DBPointer(\"namespace\", \"test\")",
+ "DBPointer(\"namespace\")",
+ "DBPointer(\"namespace\", ObjectId(), true)",
+ ]
+}
+
+
+var objectidConstructors = {
+ "valid" : [
+ 'ObjectId()',
+ 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFF")',
+ ],
+ "invalid" : [
+ 'ObjectId(5)',
+ 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFQ")',
+ ]
+}
+
+var timestampConstructors = {
+ "valid" : [
+ 'Timestamp()',
+ 'Timestamp(0,0)',
+ 'Timestamp(1.0,1.0)',
+ ],
+ "invalid" : [
+ 'Timestamp(0)',
+ 'Timestamp(0,0,0)',
+ 'Timestamp("test","test")',
+ 'Timestamp("test",0)',
+ 'Timestamp(0,"test")',
+ 'Timestamp(true,true)',
+ 'Timestamp(true,0)',
+ 'Timestamp(0,true)',
+ ]
+}
+
+var bindataConstructors = {
+ "valid" : [
+ 'BinData(0,"test")',
+ ],
+ "invalid" : [
+ 'BinData(0,"test", "test")',
+ 'BinData()',
+ 'BinData(-1, "")',
+ 'BinData(256, "")',
+ 'BinData("string","aaaa")',
+ // SERVER-10152
+ //'BinData(0, true)',
+ //'BinData(0, null)',
+ //'BinData(0, undefined)',
+ //'BinData(0, {})',
+ //'BinData(0, [])',
+ //'BinData(0, function () {})',
+ ]
+}
+
+var uuidConstructors = {
+ "valid" : [
+ 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
+ ],
+ "invalid" : [
+ 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
+ 'UUID()',
+ 'UUID("aa")',
+ 'UUID("invalidhex")',
+ // SERVER-9686
+ //'UUID("invalidhexbutstilltherequiredlen")',
+ 'UUID(true)',
+ 'UUID(null)',
+ 'UUID(undefined)',
+ 'UUID({})',
+ 'UUID([])',
+ 'UUID(function () {})',
+ ]
+}
+
+var md5Constructors = {
+ "valid" : [
+ 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
+ ],
+ "invalid" : [
+ 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
+ 'MD5()',
+ 'MD5("aa")',
+ 'MD5("invalidhex")',
+ // SERVER-9686
+ //'MD5("invalidhexbutstilltherequiredlen")',
+ 'MD5(true)',
+ 'MD5(null)',
+ 'MD5(undefined)',
+ 'MD5({})',
+ 'MD5([])',
+ 'MD5(function () {})',
+ ]
+}
+
+var hexdataConstructors = {
+ "valid" : [
+ 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
+ // Numbers as the payload are converted to strings, so HexData(0, 100) == HexData(0, "100")
+ 'HexData(0, 100)',
+ 'HexData(0, "")',
+ 'HexData(0, "aaa")',
+ 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
+ 'HexData(0, "000000000000000000000005")', // SERVER-9605
+ ],
+ "invalid" : [
+ 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
+ 'HexData()',
+ 'HexData(0)',
+ 'HexData(-1, "")',
+ 'HexData(256, "")',
+ 'HexData("string","aaaa")',
+ // SERVER-10152
+ //'HexData(0, true)',
+ //'HexData(0, null)',
+ //'HexData(0, undefined)',
+ //'HexData(0, {})',
+ //'HexData(0, [])',
+ //'HexData(0, function () {})',
+ // SERVER-9686
+ //'HexData(0, "invalidhex")',
+ ]
+}
+
+var dateConstructors = {
+ "valid" : [
+ 'Date()',
+ 'Date(0)',
+ 'Date(0,0)',
+ 'Date(0,0,0)',
+ 'Date("foo")',
+ ],
+ "invalid" : [
+ ]
+}
+
+clientEvalConstructorTest(dbrefConstructors);
+clientEvalConstructorTest(dbpointerConstructors);
+clientEvalConstructorTest(objectidConstructors);
+clientEvalConstructorTest(timestampConstructors);
+clientEvalConstructorTest(bindataConstructors);
+clientEvalConstructorTest(uuidConstructors);
+clientEvalConstructorTest(md5Constructors);
+clientEvalConstructorTest(hexdataConstructors);
+clientEvalConstructorTest(dateConstructors);
+
+dbEvalConstructorTest(dbrefConstructors);
+dbEvalConstructorTest(dbpointerConstructors);
+dbEvalConstructorTest(objectidConstructors);
+dbEvalConstructorTest(timestampConstructors);
+dbEvalConstructorTest(bindataConstructors);
+dbEvalConstructorTest(uuidConstructors);
+dbEvalConstructorTest(md5Constructors);
+dbEvalConstructorTest(hexdataConstructors);
+dbEvalConstructorTest(dateConstructors);
+
+// SERVER-8963
+if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") {
+ mapReduceConstructorTest(dbrefConstructors);
+ mapReduceConstructorTest(dbpointerConstructors);
+ mapReduceConstructorTest(objectidConstructors);
+ mapReduceConstructorTest(timestampConstructors);
+ mapReduceConstructorTest(bindataConstructors);
+ mapReduceConstructorTest(uuidConstructors);
+ mapReduceConstructorTest(md5Constructors);
+ mapReduceConstructorTest(hexdataConstructors);
+}
+mapReduceConstructorTest(dateConstructors);
+
+// SERVER-8963
+if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") {
+ whereConstructorTest(dbrefConstructors);
+ whereConstructorTest(dbpointerConstructors);
+ whereConstructorTest(objectidConstructors);
+ whereConstructorTest(timestampConstructors);
+ whereConstructorTest(bindataConstructors);
+ whereConstructorTest(uuidConstructors);
+ whereConstructorTest(md5Constructors);
+ whereConstructorTest(hexdataConstructors);
+}
+whereConstructorTest(dateConstructors);
diff --git a/jstests/core/copydb.js b/jstests/core/copydb.js
new file mode 100644
index 00000000000..7c7c02542a4
--- /dev/null
+++ b/jstests/core/copydb.js
@@ -0,0 +1,20 @@
+
+
+
+
+a = db.getSisterDB( "copydb-test-a" );
+b = db.getSisterDB( "copydb-test-b" );
+
+a.dropDatabase();
+b.dropDatabase();
+
+a.foo.save( { a : 1 } );
+
+assert.eq( 1 , a.foo.count() , "A" );
+assert.eq( 0 , b.foo.count() , "B" );
+
+a.copyDatabase( a._name , b._name );
+
+assert.eq( 1 , a.foo.count() , "C" );
+assert.eq( 1 , b.foo.count() , "D" );
+
diff --git a/jstests/core/count.js b/jstests/core/count.js
new file mode 100644
index 00000000000..5502d7176c1
--- /dev/null
+++ b/jstests/core/count.js
@@ -0,0 +1,25 @@
+t = db.jstests_count;
+
+t.drop();
+t.save( { i: 1 } );
+t.save( { i: 2 } );
+assert.eq( 1, t.find( { i: 1 } ).count(), "A" );
+assert.eq( 1, t.count( { i: 1 } ) , "B" );
+assert.eq( 2, t.find().count() , "C" );
+assert.eq( 2, t.find( undefined ).count() , "D" );
+assert.eq( 2, t.find( null ).count() , "E" );
+assert.eq( 2, t.count() , "F" );
+
+t.drop();
+t.save( {a:true,b:false} );
+t.ensureIndex( {b:1,a:1} );
+assert.eq( 1, t.find( {a:true,b:false} ).count() , "G" );
+assert.eq( 1, t.find( {b:false,a:true} ).count() , "H" );
+
+t.drop();
+t.save( {a:true,b:false} );
+t.ensureIndex( {b:1,a:1,c:1} );
+
+assert.eq( 1, t.find( {a:true,b:false} ).count() , "I" );
+assert.eq( 1, t.find( {b:false,a:true} ).count() , "J" );
+
diff --git a/jstests/core/count10.js b/jstests/core/count10.js
new file mode 100644
index 00000000000..21243b3151d
--- /dev/null
+++ b/jstests/core/count10.js
@@ -0,0 +1,59 @@
+// Test that interrupting a count returns an error code.
+
+t = db.count10;
+t.drop();
+
+for ( i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+// Start a parallel shell which repeatedly checks for a count
+// query using db.currentOp(). As soon as the op is found,
+// kill it via db.killOp().
+s = startParallelShell(
+ 'assert.soon(function() {' +
+ ' current = db.currentOp({"ns": db.count10.getFullName(), ' +
+ ' "query.count": db.count10.getName()}); ' +
+
+ // Check that we found the count op. If not, return false so
+ // that assert.soon will retry.
+ ' assert("inprog" in current); ' +
+ ' if (current.inprog.length === 0) { ' +
+ ' jsTest.log("count10.js: did not find count op, retrying"); ' +
+ ' printjson(current); ' +
+ ' return false; ' +
+ ' } ' +
+ ' countOp = current.inprog[0]; ' +
+ ' if (!countOp) { ' +
+ ' jsTest.log("count10.js: did not find count op, retrying"); ' +
+ ' printjson(current); ' +
+ ' return false; ' +
+ ' } ' +
+
+ // Found the count op. Try to kill it.
+ ' jsTest.log("count10.js: found count op:"); ' +
+ ' printjson(current); ' +
+ ' printjson(db.killOp(countOp.opid)); ' +
+ ' return true; ' +
+ '}, "count10.js: could not find count op after retrying, gave up");'
+);
+
+function getKilledCount() {
+ try {
+ db.count10.find("sleep(1000)").count();
+ jsTest.log("count10.js: count op completed without being killed");
+ } catch (e) {
+ return e;
+ }
+}
+
+var res = getKilledCount();
+jsTest.log("count10.js: killed count output start");
+printjson(res);
+jsTest.log("count10.js: killed count output end");
+assert(res);
+assert(res.match(/count failed/) !== null);
+assert(res.match(/\"code\"/) !== null);
+
+s();
+
diff --git a/jstests/core/count2.js b/jstests/core/count2.js
new file mode 100644
index 00000000000..4d060aaac20
--- /dev/null
+++ b/jstests/core/count2.js
@@ -0,0 +1,28 @@
+t = db.count2;
+t.drop();
+
+for ( var i=0; i<1000; i++ ){
+ t.save( { num : i , m : i % 20 } );
+}
+
+assert.eq( 1000 , t.count() , "A" )
+assert.eq( 1000 , t.find().count() , "B" )
+assert.eq( 1000 , t.find().toArray().length , "C" )
+
+assert.eq( 50 , t.find( { m : 5 } ).toArray().length , "D" )
+assert.eq( 50 , t.find( { m : 5 } ).count() , "E" )
+
+assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).toArray().length , "F" )
+assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).count() , "G" )
+assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).countReturn() , "H" )
+
+assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).toArray().length , "I" )
+assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).limit(20).count() , "J" )
+assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).countReturn() , "K" )
+
+assert.eq( 5 , t.find( { m : 5 } ).skip( 45 ).limit(20).countReturn() , "L" )
+
+// Negative skip values should return error
+var negSkipResult = db.runCommand({ count: 't', skip : -2 });
+assert( ! negSkipResult.ok , "negative skip value shouldn't work, n = " + negSkipResult.n );
+assert( negSkipResult.errmsg.length > 0 , "no error msg for negative skip" );
diff --git a/jstests/core/count3.js b/jstests/core/count3.js
new file mode 100644
index 00000000000..a8c3ef5faad
--- /dev/null
+++ b/jstests/core/count3.js
@@ -0,0 +1,26 @@
+
+t = db.count3;
+
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 1 , b : 2 } );
+
+assert.eq( 2 , t.find( { a : 1 } ).itcount() , "A" );
+assert.eq( 2 , t.find( { a : 1 } ).count() , "B" );
+
+assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).itcount() , "C" );
+assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).count() , "D" );
+
+t.drop();
+
+t.save( { a : 1 } );
+
+assert.eq( 1 , t.find( { a : 1 } ).itcount() , "E" );
+assert.eq( 1 , t.find( { a : 1 } ).count() , "F" );
+
+assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).itcount() , "G" );
+assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).count() , "H" );
+
+
+
diff --git a/jstests/core/count4.js b/jstests/core/count4.js
new file mode 100644
index 00000000000..7be74362603
--- /dev/null
+++ b/jstests/core/count4.js
@@ -0,0 +1,17 @@
+
+t = db.count4;
+t.drop();
+
+for ( i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+q = { x : { $gt : 25 , $lte : 75 } }
+
+assert.eq( 50 , t.find( q ).count() , "A" );
+assert.eq( 50 , t.find( q ).itcount() , "B" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 50 , t.find( q ).count() , "C" );
+assert.eq( 50 , t.find( q ).itcount() , "D" );
diff --git a/jstests/core/count5.js b/jstests/core/count5.js
new file mode 100644
index 00000000000..b6bbc543352
--- /dev/null
+++ b/jstests/core/count5.js
@@ -0,0 +1,30 @@
+
+t = db.count5;
+t.drop();
+
+for ( i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+q = { x : { $gt : 25 , $lte : 75 } };
+
+assert.eq( 50 , t.find( q ).count() , "A" );
+assert.eq( 50 , t.find( q ).itcount() , "B" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 50 , t.find( q ).count() , "C" );
+assert.eq( 50 , t.find( q ).itcount() , "D" );
+
+assert.eq( 50 , t.find( q ).limit(1).count() , "E" );
+assert.eq( 1 , t.find( q ).limit(1).itcount() , "F" );
+
+assert.eq( 5 , t.find( q ).limit(5).size() , "G" );
+assert.eq( 5 , t.find( q ).skip(5).limit(5).size() , "H" );
+assert.eq( 2 , t.find( q ).skip(48).limit(5).size() , "I" );
+
+assert.eq( 20 , t.find().limit(20).size() , "J" );
+
+assert.eq( 0 , t.find().skip(120).size() , "K" );
+assert.eq( 1 , db.$cmd.findOne( { count: "count5" } )["ok"] , "L" );
+assert.eq( 1 , db.$cmd.findOne( { count: "count5", skip: 120 } )["ok"] , "M" );
diff --git a/jstests/core/count6.js b/jstests/core/count6.js
new file mode 100644
index 00000000000..44c5fa33bc7
--- /dev/null
+++ b/jstests/core/count6.js
@@ -0,0 +1,61 @@
+// Some correctness checks for fast and normal count modes, including with skip and limit.
+
+t = db.jstests_count6;
+
+function checkCountForObject( obj ) {
+ t.drop();
+ t.ensureIndex( {b:1,a:1} );
+
+ function checkCounts( query, expected ) {
+ assert.eq( expected, t.count( query ) , "A1" );
+ assert.eq( expected, t.find( query ).skip( 0 ).limit( 0 ).count( true ) , "A2" );
+ // Check proper counts with various skip and limit specs.
+ for( var skip = 1; skip <= 2; ++skip ) {
+ for( var limit = 1; limit <= 2; ++limit ) {
+ assert.eq( Math.max( expected - skip, 0 ), t.find( query ).skip( skip ).count( true ) , "B1" );
+ assert.eq( Math.min( expected, limit ), t.find( query ).limit( limit ).count( true ) , "B2" );
+ assert.eq( Math.min( Math.max( expected - skip, 0 ), limit ), t.find( query ).skip( skip ).limit( limit ).count( true ) , "B4" );
+
+ // Check limit(x) = limit(-x)
+ assert.eq( t.find( query ).limit( limit ).count( true ),
+ t.find( query ).limit( -limit ).count( true ) , "C1" );
+ assert.eq( t.find( query ).skip( skip ).limit( limit ).count( true ),
+ t.find( query ).skip( skip ).limit( -limit ).count( true ) , "C2" );
+ }
+ }
+
+ // Check limit(0) has no effect
+ assert.eq( expected, t.find( query ).limit( 0 ).count( true ) , "D1" );
+ assert.eq( Math.max( expected - skip, 0 ),
+ t.find( query ).skip( skip ).limit( 0 ).count( true ) , "D2" );
+ assert.eq( expected, t.getDB().runCommand({ count: t.getName(),
+ query: query, limit: 0 }).n , "D3" );
+ assert.eq( Math.max( expected - skip, 0 ),
+ t.getDB().runCommand({ count: t.getName(),
+ query: query, limit: 0, skip: skip }).n , "D4" );
+ }
+
+ for( var i = 0; i < 5; ++i ) {
+ checkCounts( {a:obj.a,b:obj.b}, i );
+ checkCounts( {b:obj.b,a:obj.a}, i );
+ t.insert( obj );
+ }
+
+ t.insert( {a:true,b:true} );
+ t.insert( {a:true,b:1} );
+ t.insert( {a:false,b:1} );
+ t.insert( {a:false,b:true} );
+ t.insert( {a:false,b:false} );
+
+ checkCounts( {a:obj.a,b:obj.b}, i );
+ checkCounts( {b:obj.b,a:obj.a}, i );
+
+ // Check with no query
+ checkCounts( {}, 10 );
+}
+
+// Check fast count mode.
+checkCountForObject( {a:true,b:false} );
+
+// Check normal count mode.
+checkCountForObject( {a:1,b:0} );
diff --git a/jstests/core/count7.js b/jstests/core/count7.js
new file mode 100644
index 00000000000..c2c1260d49b
--- /dev/null
+++ b/jstests/core/count7.js
@@ -0,0 +1,25 @@
+// Check normal count matching and deduping.
+
+t = db.jstests_count7;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:'algebra'} );
+t.save( {a:'apple'} );
+t.save( {a:'azores'} );
+t.save( {a:'bumper'} );
+t.save( {a:'supper'} );
+t.save( {a:'termite'} );
+t.save( {a:'zeppelin'} );
+t.save( {a:'ziggurat'} );
+t.save( {a:'zope'} );
+
+assert.eq( 5, t.count( {a:/p/} ) );
+
+t.remove({});
+
+t.save( {a:[1,2,3]} );
+t.save( {a:[1,2,3]} );
+t.save( {a:[1]} );
+
+assert.eq( 2, t.count( {a:{$gt:1}} ) );
diff --git a/jstests/core/count9.js b/jstests/core/count9.js
new file mode 100644
index 00000000000..888ffe3b544
--- /dev/null
+++ b/jstests/core/count9.js
@@ -0,0 +1,28 @@
+// Test fast mode count with multikey entries.
+
+t = db.jstests_count9;
+t.drop();
+
+t.ensureIndex( {a:1} );
+
+t.save( {a:['a','b','a']} );
+assert.eq( 1, t.count( {a:'a'} ) );
+
+t.save( {a:['a','b','a']} );
+assert.eq( 2, t.count( {a:'a'} ) );
+
+t.drop();
+t.ensureIndex( {a:1,b:1} );
+
+t.save( {a:['a','b','a'],b:'r'} );
+assert.eq( 1, t.count( {a:'a',b:'r'} ) );
+assert.eq( 1, t.count( {a:'a'} ) );
+
+t.save( {a:['a','b','a'],b:'r'} );
+assert.eq( 2, t.count( {a:'a',b:'r'} ) );
+assert.eq( 2, t.count( {a:'a'} ) );
+
+t.drop();
+t.ensureIndex( {'a.b':1,'a.c':1} );
+t.save( {a:[{b:'b',c:'c'},{b:'b',c:'c'}]} );
+assert.eq( 1, t.count( {'a.b':'b','a.c':'c'} ) );
diff --git a/jstests/core/count_hint.js b/jstests/core/count_hint.js
new file mode 100644
index 00000000000..93322d282db
--- /dev/null
+++ b/jstests/core/count_hint.js
@@ -0,0 +1,20 @@
+// test passing hint to the count cmd
+// hints are ignored if there is no query predicate
+t = db.jstests_count_hint;
+t.drop();
+
+t.save( { i: 1 } );
+t.save( { i: 2 } );
+assert.eq( 2, t.find().count() );
+
+t.ensureIndex( { i:1 } );
+
+assert.eq( 1, t.find( { i: 1 } ).hint( "_id_" ).count(), "A" );
+assert.eq( 2, t.find().hint( "_id_" ).count(), "B" );
+assert.throws( function() { t.find( { i: 1 } ).hint( "BAD HINT" ).count(); } );
+
+// create a sparse index which should have no entries
+t.ensureIndex( { x:1 }, { sparse:true } );
+
+assert.eq( 0, t.find( { i: 1 } ).hint( "x_1" ).count(), "C" );
+assert.eq( 2, t.find().hint( "x_1" ).count(), "D" );
diff --git a/jstests/core/counta.js b/jstests/core/counta.js
new file mode 100644
index 00000000000..f0834d455dd
--- /dev/null
+++ b/jstests/core/counta.js
@@ -0,0 +1,14 @@
+// Check that count returns 0 in some exception cases.
+
+t = db.jstests_counta;
+t.drop();
+
+for( i = 0; i < 10; ++i ) {
+ t.save( {a:i} );
+}
+
+// f() is undefined, causing an assertion
+assert.throws(
+ function(){
+ t.count( { $where:function() { if ( this.a < 5 ) { return true; } else { f(); } } } );
+ } );
diff --git a/jstests/core/countb.js b/jstests/core/countb.js
new file mode 100644
index 00000000000..8f7131a5a6c
--- /dev/null
+++ b/jstests/core/countb.js
@@ -0,0 +1,11 @@
+// Test fast count mode with single key index unsatisfiable constraints on a multi key index.
+
+t = db.jstests_countb;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:['a','b']} );
+assert.eq( 0, t.find( {a:{$in:['a'],$gt:'b'}} ).count() );
+assert.eq( 0, t.find( {$and:[{a:'a'},{a:{$gt:'b'}}]} ).count() );
+assert.eq( 1, t.find( {$and:[{a:'a'},{$where:"this.a[1]=='b'"}]} ).count() );
+assert.eq( 0, t.find( {$and:[{a:'a'},{$where:"this.a[1]!='b'"}]} ).count() );
diff --git a/jstests/core/countc.js b/jstests/core/countc.js
new file mode 100644
index 00000000000..260dbb1f264
--- /dev/null
+++ b/jstests/core/countc.js
@@ -0,0 +1,124 @@
+// In fast count mode the Matcher is bypassed when matching can be performed by a BtreeCursor and
+// its delegate FieldRangeVector or an IntervalBtreeCursor. The tests below check that fast count
+// mode is implemented appropriately in specific cases.
+//
+// SERVER-1752
+
+t = db.jstests_countc;
+t.drop();
+
+
+// Match a subset of inserted values within a $in operator.
+t.drop();
+t.ensureIndex( { a:1 } );
+// Save 'a' values 0, 0.5, 1.5, 2.5 ... 97.5, 98.5, 99.
+t.save( { a:0 } );
+t.save( { a:99 } );
+for( i = 0; i < 99; ++i ) {
+ t.save( { a:( i + 0.5 ) } );
+}
+// Query 'a' values $in 0, 1, 2, ..., 99.
+vals = [];
+for( i = 0; i < 100; ++i ) {
+ vals.push( i );
+}
+// Only values 0 and 99 of the $in set are present in the collection, so the expected count is 2.
+assert.eq( 2, t.count( { a:{ $in:vals } } ) );
+
+
+// Match 'a' values within upper and lower limits.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.save( { a:[ 1, 2 ] } ); // Will match because 'a' is in range.
+t.save( { a:9 } ); // Will not match because 'a' is not in range.
+// Only one document matches.
+assert.eq( 1, t.count( { a:{ $gt:0, $lt:5 } } ) );
+
+
+// Match two nested fields within an array.
+t.drop();
+t.ensureIndex( { 'a.b':1, 'a.c':1 } );
+t.save( { a:[ { b:2, c:3 }, {} ] } );
+// The document does not match because its c value is 3.
+assert.eq( 0, t.count( { 'a.b':2, 'a.c':2 } ) );
+
+
+// $gt:string only matches strings.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.save( { a:'a' } ); // Will match.
+t.save( { a:{} } ); // Will not match because {} is not a string.
+// Only one document matches.
+assert.eq( 1, t.count( { a:{ $gte:'' } } ) );
+
+
+// $lte:date only matches dates.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.save( { a:new Date( 1 ) } ); // Will match.
+t.save( { a:true } ); // Will not match because 'true' is not a date.
+// Only one document matches.
+assert.eq( 1, t.count( { a:{ $lte:new Date( 1 ) } } ) );
+
+
+// Querying for 'undefined' triggers an error.
+t.drop();
+t.ensureIndex( { a:1 } );
+assert.throws( function() { t.count( { a:undefined } ); } );
+
+
+// Count using a descending order index.
+t.drop();
+t.ensureIndex( { a:-1 } );
+t.save( { a:1 } );
+t.save( { a:2 } );
+t.save( { a:3 } );
+assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
+assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
+assert.eq( 2, t.count( { a:{ $lte:2 } } ) );
+assert.eq( 2, t.count( { a:{ $lt:3 } } ) );
+
+
+// Count using a compound index.
+t.drop();
+t.ensureIndex( { a:1, b:1 } );
+t.save( { a:1, b:2 } );
+t.save( { a:2, b:1 } );
+t.save( { a:2, b:3 } );
+t.save( { a:3, b:4 } );
+assert.eq( 1, t.count( { a:1 }));
+assert.eq( 2, t.count( { a:2 }));
+assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
+assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
+assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) );
+assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) );
+assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) );
+
+
+// Count using a compound descending order index.
+t.drop();
+t.ensureIndex( { a:1, b:-1 } );
+t.save( { a:1, b:2 } );
+t.save( { a:2, b:1 } );
+t.save( { a:2, b:3 } );
+t.save( { a:3, b:4 } );
+assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
+assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
+assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) );
+assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) );
+assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) );
+
+
+// Count with a multikey value.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.save( { a:[ 1, 2 ] } );
+assert.eq( 1, t.count( { a:{ $gt:0, $lte:2 } } ) );
+
+
+// Count with a match constraint on an unindexed field.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.save( { a:1, b:1 } );
+t.save( { a:1, b:2 } );
+assert.eq( 1, t.count( { a:1, $where:'this.b == 1' } ) );
diff --git a/jstests/core/coveredIndex1.js b/jstests/core/coveredIndex1.js
new file mode 100644
index 00000000000..ce11f89ceed
--- /dev/null
+++ b/jstests/core/coveredIndex1.js
@@ -0,0 +1,64 @@
+
+t = db["jstests_coveredIndex1"];
+t.drop();
+
+t.save({fn: "john", ln: "doe"})
+t.save({fn: "jack", ln: "doe"})
+t.save({fn: "john", ln: "smith"})
+t.save({fn: "jack", ln: "black"})
+t.save({fn: "bob", ln: "murray"})
+t.save({fn: "aaa", ln: "bbb", obj: {a: 1, b: "blah"}})
+assert.eq( t.findOne({ln: "doe"}).fn, "john", "Cannot find right record" );
+assert.eq( t.count(), 6, "Not right length" );
+
+// use simple index
+t.ensureIndex({ln: 1});
+assert.eq( t.find({ln: "doe"}).explain().indexOnly, false, "Find using covered index but all fields are returned");
+assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find using covered index but _id is returned");
+assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+
+// this time, without a query spec
+// SERVER-2109
+//assert.eq( t.find({}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+assert.eq( t.find({}, {ln: 1, _id: 0}).hint({ln: 1}).explain().indexOnly, true, "Find is not using covered index");
+
+// use compound index
+t.dropIndex({ln: 1})
+t.ensureIndex({ln: 1, fn: 1});
+// return 1 field
+assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+// return both fields, multiple docs returned
+assert.eq( t.find({ln: "doe"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+// match 1 record using both fields
+assert.eq( t.find({ln: "doe", fn: "john"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+// change ordering
+assert.eq( t.find({fn: "john", ln: "doe"}, {fn: 1, ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+// ask from 2nd index key
+assert.eq( t.find({fn: "john"}, {fn: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key");
+
+// repeat above but with _id field
+t.dropIndex({ln: 1, fn: 1})
+t.ensureIndex({_id: 1, ln: 1});
+// return 1 field
+assert.eq( t.find({_id: 123, ln: "doe"}, {_id: 1}).explain().indexOnly, true, "Find is not using covered index");
+// match 1 record using both fields
+assert.eq( t.find({_id: 123, ln: "doe"}, {ln: 1}).explain().indexOnly, true, "Find is not using covered index");
+// change ordering
+assert.eq( t.find({ln: "doe", _id: 123}, {ln: 1, _id: 1}).explain().indexOnly, true, "Find is not using covered index");
+// ask from 2nd index key
+assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key");
+
+// repeat above but with embedded obj
+t.dropIndex({_id: 1, ln: 1})
+t.ensureIndex({obj: 1});
+assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object");
+assert.eq( t.find({obj: {a: 1, b: "blah"}}).explain().indexOnly, false, "Index doesnt have all fields to cover");
+assert.eq( t.find({obj: {a: 1, b: "blah"}}, {obj: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+
+// repeat above but with index on sub obj field
+t.dropIndex({obj: 1});
+t.ensureIndex({"obj.a": 1, "obj.b": 1})
+assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object");
+
+assert(t.validate().valid);
+
diff --git a/jstests/core/coveredIndex2.js b/jstests/core/coveredIndex2.js
new file mode 100644
index 00000000000..56a23f43211
--- /dev/null
+++ b/jstests/core/coveredIndex2.js
@@ -0,0 +1,18 @@
+t = db["jstests_coveredIndex2"];
+t.drop();
+
+t.save({a: 1})
+t.save({a: 2})
+assert.eq( t.findOne({a: 1}).a, 1, "Cannot find right record" );
+assert.eq( t.count(), 2, "Not right length" );
+
+// use simple index
+t.ensureIndex({a: 1});
+assert.eq( t.find({a:1}).explain().indexOnly, false, "Find using covered index but all fields are returned");
+assert.eq( t.find({a:1}, {a: 1}).explain().indexOnly, false, "Find using covered index but _id is returned");
+assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
+
+// add multikey
+t.save({a:[3,4]})
+assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index even after multikey insert");
+
diff --git a/jstests/core/coveredIndex3.js b/jstests/core/coveredIndex3.js
new file mode 100644
index 00000000000..66180342605
--- /dev/null
+++ b/jstests/core/coveredIndex3.js
@@ -0,0 +1,54 @@
+// Check proper covered index handling when query and processGetMore yield.
+// SERVER-4975
+
+if ( 0 ) { // SERVER-4975
+
+t = db.jstests_coveredIndex3;
+t2 = db.jstests_coveredIndex3_other;
+t.drop();
+t2.drop();
+
+function doTest( batchSize ) {
+
+ // Insert an array, which will make the { a:1 } index multikey and should disable covered index
+ // matching.
+ p1 = startParallelShell(
+ 'for( i = 0; i < 60; ++i ) { \
+ db.jstests_coveredIndex3.save( { a:[ 2000, 2001 ] } ); \
+ sleep( 300 ); \
+ }'
+ );
+
+ // Frequent writes cause the find operation to yield.
+ p2 = startParallelShell(
+ 'for( i = 0; i < 1800; ++i ) { \
+ db.jstests_coveredIndex3_other.save( {} ); \
+ sleep( 10 ); \
+ }'
+ );
+
+ for( i = 0; i < 30; ++i ) {
+ t.drop();
+ t.ensureIndex( { a:1 } );
+
+ for( j = 0; j < 1000; ++j ) {
+ t.save( { a:j } );
+ }
+
+ c = t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).batchSize( batchSize );
+ while( c.hasNext() ) {
+ o = c.next();
+ // If o contains a high numeric 'a' value, it must come from an array saved in p1.
+ assert( !( o.a > 1500 ), 'improper object returned ' + tojson( o ) );
+ }
+ }
+
+ p1();
+ p2();
+
+}
+
+doTest( 2000 ); // Test query.
+doTest( 500 ); // Try to test getMore - not clear if this will actually trigger the getMore issue.
+
+}
diff --git a/jstests/core/coveredIndex4.js b/jstests/core/coveredIndex4.js
new file mode 100644
index 00000000000..136eba603cf
--- /dev/null
+++ b/jstests/core/coveredIndex4.js
@@ -0,0 +1,40 @@
+// Test covered index projection with $or clause, specifically in getMore
+// SERVER-4980
+
+t = db.jstests_coveredIndex4;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+t.ensureIndex( { b:1 } );
+
+orClause = [];
+for( i = 0; i < 200; ++i ) {
+ if ( i % 2 == 0 ) {
+ t.save( { a:i } );
+ orClause.push( { a:i } );
+ }
+ else {
+ t.save( { b:i } );
+ orClause.push( { b:i } );
+ }
+}
+
+c = t.find( { $or:orClause }, { _id:0, a:1 } );
+
+// No odd values of a were saved, so we should not see any in the results.
+while( c.hasNext() ) {
+ o = c.next();
+ if ( o.a ) {
+ assert.eq( 0, o.a % 2, 'unexpected result: ' + tojson( o ) );
+ }
+}
+
+c = t.find( { $or:orClause }, { _id:0, b:1 } );
+
+// No even values of b were saved, so we should not see any in the results.
+while( c.hasNext() ) {
+ o = c.next();
+ if ( o.b ) {
+ assert.eq( 1, o.b % 2, 'unexpected result: ' + tojson( o ) );
+ }
+}
diff --git a/jstests/core/coveredIndex5.js b/jstests/core/coveredIndex5.js
new file mode 100644
index 00000000000..ee383cd93e2
--- /dev/null
+++ b/jstests/core/coveredIndex5.js
@@ -0,0 +1,70 @@
+// Test use of covered indexes when there are multiple candidate indexes.
+
+t = db.jstests_coveredIndex5;
+t.drop();
+
+t.ensureIndex( { a:1, b:1 } );
+t.ensureIndex( { a:1, c:1 } );
+
+function checkFields( query, projection ) {
+ t.ensureIndex( { z:1 } ); // clear query patterns
+ t.dropIndex( { z:1 } );
+
+ results = t.find( query, projection ).toArray();
+
+ expectedFields = [];
+ for ( k in projection ) {
+ if ( k != '_id' ) {
+ expectedFields.push( k );
+ }
+ }
+
+ vals = [];
+ for ( i in results ) {
+ r = results[ i ];
+ printjson(r);
+ assert.eq( 0, r.a );
+ assert.eq( expectedFields, Object.keySet( r ) );
+ for ( k in projection ) {
+ if ( k != '_id' && k != 'a' ) {
+ vals.push( r[ k ] );
+ }
+ }
+ }
+
+ if ( vals.length != 0 ) {
+ vals.sort();
+ assert.eq( [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ], vals );
+ }
+}
+
+function checkCursorCovered( cursor, covered, count, query, projection ) {
+ checkFields( query, projection );
+ explain = t.find( query, projection ).explain( true );
+ if (covered) {
+ assert.eq( cursor, explain.cursor );
+ }
+ assert.eq( covered, explain.indexOnly );
+ assert.eq( count, explain.n );
+}
+
+for( i = 0; i < 10; ++i ) {
+ t.save( { a:0, b:i, c:9-i } );
+}
+
+checkCursorCovered( 'BtreeCursor a_1_b_1', true, 10, { a:0 }, { _id:0, a:1 } );
+
+checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1 } );
+checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, b:1 } );
+
+// Covered index on a,c not preferentially selected.
+checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, c:1 } );
+
+t.save( { a:0, c:[ 1, 2 ] } );
+t.save( { a:1 } );
+checkCursorCovered( 'BtreeCursor a_1_b_1', false, 11, { a:0, d:null }, { _id:0, a:1 } );
+
+t.save( { a:0, b:[ 1, 2 ] } );
+t.save( { a:1 } );
+checkCursorCovered( 'BtreeCursor a_1_b_1', false, 12, { a:0, d:null }, { _id:0, a:1 } );
+
diff --git a/jstests/core/covered_index_compound_1.js b/jstests/core/covered_index_compound_1.js
new file mode 100644
index 00000000000..7e529785d12
--- /dev/null
+++ b/jstests/core/covered_index_compound_1.js
@@ -0,0 +1,45 @@
+// Compound index covered query tests
+
+var coll = db.getCollection("covered_compound_1")
+coll.drop()
+for (i=0;i<100;i++) {
+ coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)})
+}
+coll.ensureIndex({a:1,b:-1,c:1})
+
+// Test equality - all indexed fields queried and projected
+var plan = coll.find({a:10, b:"strvar_10", c:0}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query")
+
+// Test query on subset of fields queried and project all
+var plan = coll.find({a:26, b:"strvar_0"}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.2 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.2 - nscannedObjects should be 0 for covered query")
+
+// Test query on all fields queried and project subset
+var plan = coll.find({a:38, b:"strvar_12", c: 8}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.3 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.3 - nscannedObjects should be 0 for covered query")
+
+// Test no query
+var plan = coll.find({}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.4 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.4 - nscannedObjects should be 0 for covered query")
+
+// Test range query
+var plan = coll.find({a:{$gt:25,$lt:43}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.5 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.5 - nscannedObjects should be 0 for covered query")
+
+// Test in query
+var plan = coll.find({a:38, b:"strvar_12", c:{$in:[5,8]}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.6 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.6 - nscannedObjects should be 0 for covered query")
+
+// Test no result
+var plan = coll.find({a:38, b:"strvar_12", c:55},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.7 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.7 - nscannedObjects should be 0 for covered query")
+
+print('all tests passed')
diff --git a/jstests/core/covered_index_geo_1.js b/jstests/core/covered_index_geo_1.js
new file mode 100644
index 00000000000..1d647dfa94c
--- /dev/null
+++ b/jstests/core/covered_index_geo_1.js
@@ -0,0 +1,18 @@
+var coll = db.getCollection("covered_geo_1")
+coll.drop()
+
+coll.insert({_id : 1, loc : [ 5 , 5 ], type : "type1"})
+coll.insert({_id : 2, loc : [ 6 , 6 ], type : "type2"})
+coll.insert({_id : 3, loc : [ 7 , 7 ], type : "type3"})
+
+coll.ensureIndex({loc : "2d", type : 1});
+
+var plan = coll.find({loc : [ 6 , 6 ]}, {loc:1, type:1, _id:0}).hint({loc:"2d", type:1}).explain();
+assert.eq(false, plan.indexOnly, "geo.1.1 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "geo.1.1 - nscannedObjects should not be 0 for a non covered query")
+
+var plan = coll.find({loc : [ 6 , 6 ]}, {type:1, _id:0}).hint({loc:"2d", type:1}).explain();
+assert.eq(false, plan.indexOnly, "geo.1.2 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "geo.1.2 - nscannedObjects should not be 0 for a non covered query")
+
+print("all tests passed") \ No newline at end of file
diff --git a/jstests/core/covered_index_geo_2.js b/jstests/core/covered_index_geo_2.js
new file mode 100644
index 00000000000..52f610b7e64
--- /dev/null
+++ b/jstests/core/covered_index_geo_2.js
@@ -0,0 +1,22 @@
+var coll = db.getCollection("covered_geo_2")
+coll.drop()
+
+coll.insert({_id : 1, loc1 : [ 5 , 5 ], type1 : "type1",
+ loc2 : [ 5 , 5 ], type2 : 1})
+coll.insert({_id : 2, loc1 : [ 6 , 6 ], type1 : "type2",
+ loc2 : [ 5 , 5 ], type2 : 2})
+coll.insert({_id : 3, loc1 : [ 7 , 7 ], type1 : "type3",
+ loc2 : [ 5 , 5 ], type2 : 3})
+
+coll.ensureIndex({loc1 : "2dsphere", type1 : 1});
+coll.ensureIndex({type2: 1, loc2 : "2dsphere"});
+
+var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {loc1:1, type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain();
+assert.eq(false, plan.indexOnly, "geo.2.1 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "geo.2.1 - nscannedObjects should not be 0 for a non covered query")
+
+var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain();
+assert.eq(false, plan.indexOnly, "geo.2.2 - indexOnly should be false for a non covered query")
+assert.neq(0, plan.nscannedObjects, "geo.2.2 - nscannedObjects should not be 0 for a non covered query")
+
+print("all tests passed")
diff --git a/jstests/core/covered_index_negative_1.js b/jstests/core/covered_index_negative_1.js
new file mode 100644
index 00000000000..ab03e7566f6
--- /dev/null
+++ b/jstests/core/covered_index_negative_1.js
@@ -0,0 +1,61 @@
+// Miscellaneous covered query tests. Mostly negative tests
+// These are tests where we do not expect the query to be a
+// covered index query. Hence we expect indexOnly=false and
+// nscannedObjects > 0
+
+var coll = db.getCollection("covered_negative_1")
+coll.drop()
+for (i=0;i<100;i++) {
+ coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10), d: i*10, e: [i, i%10],
+ f:i})
+}
+coll.ensureIndex({a:1,b:-1,c:1})
+coll.ensureIndex({e:1})
+coll.ensureIndex({d:1})
+coll.ensureIndex({f:"hashed"})
+
+// Test no projection
+var plan = coll.find({a:10, b:"strvar_10", c:0}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(false, plan.indexOnly, "negative.1.1 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "negative.1.1 - nscannedObjects should not be 0 for a non covered query")
+
+// Test projection and not excluding _id
+var plan = coll.find({a:10, b:"strvar_10", c:0},{a:1, b:1, c:1}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(false, plan.indexOnly, "negative.1.2 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "negative.1.2 - nscannedObjects should not be 0 for a non covered query")
+
+// Test projection of non-indexed field
+var plan = coll.find({d:100},{d:1, c:1, _id:0}).hint({d:1}).explain()
+assert.eq(false, plan.indexOnly, "negative.1.3 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "negative.1.3 - nscannedObjects should not be 0 for a non covered query")
+
+// Test query and projection on a multi-key index
+var plan = coll.find({e:99},{e:1, _id:0}).hint({e:1}).explain()
+assert.eq(false, plan.indexOnly, "negative.1.4 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "negative.1.4 - nscannedObjects should not be 0 for a non covered query")
+
+// Commenting out negative.1.5 and 1.6 pending fix in SERVER-8650
+// // Test projection and $natural sort
+// var plan = coll.find({a:{$gt:70}},{a:1, b:1, c:1, _id:0}).sort({$natural:1}).hint({a:1, b:-1, c:1}).explain()
+// // indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8561
+// assert.eq(true, plan.indexOnly, "negative.1.5 - indexOnly should be false on a non covered query")
+// assert.neq(0, plan.nscannedObjects, "negative.1.5 - nscannedObjects should not be 0 for a non covered query")
+
+// // Test sort on non-indexed field
+// var plan = coll.find({d:{$lt:1000}},{d:1, _id:0}).sort({c:1}).hint({d:1}).explain()
+// //indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562
+// assert.eq(true, plan.indexOnly, "negative.1.6 - indexOnly should be false on a non covered query")
+// assert.neq(0, plan.nscannedObjects, "negative.1.6 - nscannedObjects should not be 0 for a non covered query")
+
+// Test query on non-indexed field
+var plan = coll.find({d:{$lt:1000}},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain()
+//indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562
+// assert.eq(true, plan.indexOnly, "negative.1.7 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "negative.1.7 - nscannedObjects should not be 0 for a non covered query")
+
+// Test query on hashed indexed field
+var plan = coll.find({f:10},{f:1, _id:0}).hint({f:"hashed"}).explain()
+assert.eq(false, plan.indexOnly, "negative.1.8 - indexOnly should be false on a non covered query")
+assert.neq(0, plan.nscannedObjects, "negative.1.8 - nscannedObjects should not be 0 for a non covered query")
+
+print('all tests passed')
diff --git a/jstests/core/covered_index_simple_1.js b/jstests/core/covered_index_simple_1.js
new file mode 100644
index 00000000000..44e3c00a9f8
--- /dev/null
+++ b/jstests/core/covered_index_simple_1.js
@@ -0,0 +1,55 @@
+// Simple covered index query test
+
+var coll = db.getCollection("covered_simple_1")
+coll.drop()
+for (i=0;i<10;i++) {
+ coll.insert({foo:i})
+}
+for (i=0;i<10;i++) {
+ coll.insert({foo:i})
+}
+for (i=0;i<5;i++) {
+ coll.insert({bar:i})
+}
+coll.insert({foo:"string"})
+coll.insert({foo:{bar:1}})
+coll.insert({foo:null})
+coll.ensureIndex({foo:1})
+
+// Test equality with int value
+var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.1 - nscannedObjects should be 0 for covered query")
+
+// Test equality with string value
+var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.2 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.2 - nscannedObjects should be 0 for covered query")
+
+// Test equality with doc value
+var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.3 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.3 - nscannedObjects should be 0 for covered query")
+
+// Test no query
+var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.4 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.4 - nscannedObjects should be 0 for covered query")
+
+// Test range query
+var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.5 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.5 - nscannedObjects should be 0 for covered query")
+
+// Test in query
+var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.6 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.6 - nscannedObjects should be 0 for covered query")
+
+// Test no return
+var plan = coll.find({foo:"2"}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.1.7 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.1.7 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass')
+
diff --git a/jstests/core/covered_index_simple_2.js b/jstests/core/covered_index_simple_2.js
new file mode 100644
index 00000000000..313cca439d8
--- /dev/null
+++ b/jstests/core/covered_index_simple_2.js
@@ -0,0 +1,43 @@
+// Simple covered index query test with unique index
+
+var coll = db.getCollection("covered_simple_2")
+coll.drop()
+for (i=0;i<10;i++) {
+ coll.insert({foo:i})
+}
+coll.insert({foo:"string"})
+coll.insert({foo:{bar:1}})
+coll.insert({foo:null})
+coll.ensureIndex({foo:1},{unique:true})
+
+// Test equality with int value
+var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.2.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.2.1 - nscannedObjects should be 0 for covered query")
+
+// Test equality with string value
+var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.2.2 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.2.2 - nscannedObjects should be 0 for covered query")
+
+// Test equality with int value on a dotted field
+var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.2.3 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.2.3 - nscannedObjects should be 0 for covered query")
+
+// Test no query
+var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.2.4 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.2.4 - nscannedObjects should be 0 for covered query")
+
+// Test range query
+var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.2.5 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.2.5 - nscannedObjects should be 0 for covered query")
+
+// Test in query
+var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.2.6 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.2.6 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass')
diff --git a/jstests/core/covered_index_simple_3.js b/jstests/core/covered_index_simple_3.js
new file mode 100644
index 00000000000..ee586540ea4
--- /dev/null
+++ b/jstests/core/covered_index_simple_3.js
@@ -0,0 +1,57 @@
+// Simple covered index query test with a unique sparse index
+
+var coll = db.getCollection("covered_simple_3");
+coll.drop();
+for (i=0;i<10;i++) {
+ coll.insert({foo:i});
+}
+for (i=0;i<5;i++) {
+ coll.insert({bar:i});
+}
+coll.insert({foo:"string"});
+coll.insert({foo:{bar:1}});
+coll.insert({foo:null});
+coll.ensureIndex({foo:1}, {sparse:true, unique:true});
+
+// Test equality with int value
+var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.1 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.1 - nscannedObjects should be 0 for covered query");
+
+// Test equality with string value
+var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.2 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.2 - nscannedObjects should be 0 for covered query");
+
+// Test equality with int value on a dotted field
+var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.3 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.3 - nscannedObjects should be 0 for covered query");
+
+// Test no query
+var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.4 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.4 - nscannedObjects should be 0 for covered query");
+
+// Test range query
+var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.5 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.5 - nscannedObjects should be 0 for covered query");
+
+// Test in query
+var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.6 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.6 - nscannedObjects should be 0 for covered query");
+
+// Test $exists true
+var plan = coll.find({foo:{$exists:true}}, {foo:1, _id:0}).hint({foo:1}).explain();
+assert.eq(true, plan.indexOnly, "simple.3.7 - indexOnly should be true on covered query");
+assert.eq(0, plan.nscannedObjects, "simple.3.7 - nscannedObjects should be 0 for covered query");
+
+// SERVER-12262: currently $nin will always use a collection scan
+//var plan = coll.find({foo:{$nin:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain()
+//assert.eq(true, plan.indexOnly, "simple.3.8 - indexOnly should be true on covered query")
+// this should be 0 but is not due to bug https://jira.mongodb.org/browse/SERVER-3187
+//assert.eq(13, plan.nscannedObjects, "simple.3.8 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass')
diff --git a/jstests/core/covered_index_simple_id.js b/jstests/core/covered_index_simple_id.js
new file mode 100644
index 00000000000..c7f6811a33c
--- /dev/null
+++ b/jstests/core/covered_index_simple_id.js
@@ -0,0 +1,42 @@
+// Simple covered index query test
+
+var coll = db.getCollection("covered_simple_id")
+coll.drop()
+for (i=0;i<10;i++) {
+ coll.insert({_id:i})
+}
+coll.insert({_id:"string"})
+coll.insert({_id:{bar:1}})
+coll.insert({_id:null})
+
+// Test equality with int value
+var plan = coll.find({_id:1}, {_id:1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.id.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.id.1 - nscannedObjects should be 0 for covered query")
+
+// Test equality with string value
+var plan = coll.find({_id:"string"}, {_id:1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.id.2 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.id.2 - nscannedObjects should be 0 for covered query")
+
+// Test equality with int value on a dotted field
+var plan = coll.find({_id:{bar:1}}, {_id:1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.id.3 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.id.3 - nscannedObjects should be 0 for covered query")
+
+// Test no query
+var plan = coll.find({}, {_id:1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.id.4 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.id.4 - nscannedObjects should be 0 for covered query")
+
+// Test range query
+var plan = coll.find({_id:{$gt:2,$lt:6}}, {_id:1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.id.5 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.id.5 - nscannedObjects should be 0 for covered query")
+
+// Test in query
+var plan = coll.find({_id:{$in:[5,8]}}, {_id:1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "simple.id.6 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "simple.id.6 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass')
diff --git a/jstests/core/covered_index_sort_1.js b/jstests/core/covered_index_sort_1.js
new file mode 100644
index 00000000000..adfcb5c6cb6
--- /dev/null
+++ b/jstests/core/covered_index_sort_1.js
@@ -0,0 +1,34 @@
+// Simple covered index query test with sort
+
+var coll = db.getCollection("covered_sort_1")
+coll.drop()
+for (i=0;i<10;i++) {
+ coll.insert({foo:i})
+}
+for (i=0;i<10;i++) {
+ coll.insert({foo:i})
+}
+for (i=0;i<5;i++) {
+ coll.insert({bar:i})
+}
+coll.insert({foo:"1"})
+coll.insert({foo:{bar:1}})
+coll.insert({foo:null})
+coll.ensureIndex({foo:1})
+
+// Test no query and sort ascending
+var plan = coll.find({}, {foo:1, _id:0}).sort({foo:1}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "sort.1.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "sort.1.1 - nscannedObjects should be 0 for covered query")
+
+// Test no query and sort descending
+var plan = coll.find({}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "sort.1.2 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "sort.1.2 - nscannedObjects should be 0 for covered query")
+
+// Test range query with sort
+var plan = coll.find({foo:{$gt:2}}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain()
+assert.eq(true, plan.indexOnly, "sort.1.5 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "sort.1.5 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass') \ No newline at end of file
diff --git a/jstests/core/covered_index_sort_2.js b/jstests/core/covered_index_sort_2.js
new file mode 100644
index 00000000000..e5dd48b47af
--- /dev/null
+++ b/jstests/core/covered_index_sort_2.js
@@ -0,0 +1,17 @@
+// Simple covered index query test with sort on _id
+
+var coll = db.getCollection("covered_sort_2")
+coll.drop()
+for (i=0;i<10;i++) {
+ coll.insert({_id:i})
+}
+coll.insert({_id:"1"})
+coll.insert({_id:{bar:1}})
+coll.insert({_id:null})
+
+// Test no query
+var plan = coll.find({}, {_id:1}).sort({_id:-1}).hint({_id:1}).explain()
+assert.eq(true, plan.indexOnly, "sort.2.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "sort.2.1 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass') \ No newline at end of file
diff --git a/jstests/core/covered_index_sort_3.js b/jstests/core/covered_index_sort_3.js
new file mode 100644
index 00000000000..8f5986c4d76
--- /dev/null
+++ b/jstests/core/covered_index_sort_3.js
@@ -0,0 +1,16 @@
+// Compound index covered query tests with sort
+
+var coll = db.getCollection("covered_sort_3")
+coll.drop()
+for (i=0;i<100;i++) {
+ coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)})
+}
+coll.insert
+coll.ensureIndex({a:1,b:-1,c:1})
+
+// Test no query, sort on all fields in index order
+var plan = coll.find({}, {b:1, c:1, _id:0}).sort({a:1,b:-1,c:1}).hint({a:1, b:-1, c:1}).explain()
+assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query")
+assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query")
+
+print ('all tests pass')
diff --git a/jstests/core/create_indexes.js b/jstests/core/create_indexes.js
new file mode 100644
index 00000000000..6c54c4de0cb
--- /dev/null
+++ b/jstests/core/create_indexes.js
@@ -0,0 +1,83 @@
+t = db.create_indexes;
+t.drop();
+
+// TODO: revisit this after createIndexes api stabilizes.
+var isMongos = ("isdbgrid" == db.runCommand("ismaster").msg);
+var extractResult = function(obj) {
+ if (!isMongos) return obj;
+
+ // Sample mongos format:
+ // {
+ // raw: {
+ // "localhost:30000": {
+ // createdCollectionAutomatically: false,
+ // numIndexesBefore: 3,
+ // numIndexesAfter: 5,
+ // ok: 1
+ // }
+ // },
+ // ok: 1
+ // }
+
+ var numFields = 0;
+ var result = null;
+ for (var field in obj.raw) {
+ result = obj.raw[field];
+ numFields++;
+ }
+
+ assert.neq(null, result);
+ assert.eq(1, numFields);
+ return result;
+};
+
+
+res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } );
+res = extractResult( res );
+assert( res.createdCollectionAutomatically );
+assert.eq( 1, res.numIndexesBefore );
+assert.eq( 2, res.numIndexesAfter );
+
+res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } );
+res = extractResult( res );
+assert.eq( 2, res.numIndexesBefore );
+assert( res.noChangesMade );
+
+res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" },
+ { key : { "y" : 1 }, name : "y_1" } ] } );
+res = extractResult( res );
+assert( !res.createdCollectionAutomatically );
+assert.eq( 2, res.numIndexesBefore );
+assert.eq( 3, res.numIndexesAfter );
+
+res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" },
+ { key : { "b" : 1 }, name : "b_1" } ] } );
+res = extractResult( res );
+assert( !res.createdCollectionAutomatically );
+assert.eq( 3, res.numIndexesBefore );
+assert.eq( 5, res.numIndexesAfter );
+
+res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" },
+ { key : { "b" : 1 }, name : "b_1" } ] } );
+res = extractResult( res );
+assert.eq( 5, res.numIndexesBefore );
+assert( res.noChangesMade );
+
+res = t.runCommand( "createIndexes", { indexes : [ {} ] } );
+assert( !res.ok );
+
+res = t.runCommand( "createIndexes", { indexes : [ {} , { key : { m : 1 }, name : "asd" } ] } );
+assert( !res.ok );
+
+assert.eq( 5, t.getIndexes().length );
+
+res = t.runCommand( "createIndexes",
+ { indexes : [ { key : { "c" : 1 }, sparse : true, name : "c_1" } ] } )
+assert.eq( 6, t.getIndexes().length );
+assert.eq( 1, t.getIndexes().filter( function(z){ return z.sparse; } ).length );
+
+res = t.runCommand( "createIndexes",
+ { indexes : [ { key : { "x" : "foo" }, name : "x_1" } ] } );
+assert( !res.ok )
+
+assert.eq( 6, t.getIndexes().length );
diff --git a/jstests/core/currentop.js b/jstests/core/currentop.js
new file mode 100644
index 00000000000..e1d2e73cc7a
--- /dev/null
+++ b/jstests/core/currentop.js
@@ -0,0 +1,79 @@
+print("BEGIN currentop.js");
+
+// test basic currentop functionality + querying of nested documents
+t = db.jstests_currentop
+t.drop();
+
+for(i=0;i<100;i++) {
+ t.save({ "num": i });
+}
+
+print("count:" + t.count());
+
+function ops(q) {
+ printjson( db.currentOp().inprog );
+ return db.currentOp(q).inprog;
+}
+
+print("start shell");
+
+// sleep for a second for each (of 100) documents; can be killed in between documents & test should complete before 100 seconds
+s1 = startParallelShell("db.jstests_currentop.count( { '$where': function() { sleep(1000); } } )");
+
+print("sleep");
+sleep(1000);
+
+print("inprog:");
+printjson(db.currentOp().inprog)
+print()
+sleep(1);
+print("inprog:");
+printjson(db.currentOp().inprog)
+print()
+
+// need to wait for read to start
+print("wait have some ops");
+assert.soon( function(){
+ return ops( { "locks.^test": "r", "ns": "test.jstests_currentop" } ).length +
+ ops({ "locks.^test": "R", "ns": "test.jstests_currentop" }).length >= 1;
+}, "have_some_ops");
+print("ok");
+
+s2 = startParallelShell( "db.jstests_currentop.update({ '$where': function() { sleep(150); } }," +
+ " { 'num': 1 }, false, true );" );
+
+o = [];
+
+function f() {
+ o = ops({ "ns": "test.jstests_currentop" });
+
+ printjson(o);
+
+ var writes = ops({ "locks.^test": "w", "ns": "test.jstests_currentop" }).length;
+
+ var readops = ops({ "locks.^test": "r", "ns": "test.jstests_currentop" });
+ print("readops:");
+ printjson(readops);
+ var reads = readops.length;
+
+ print("total: " + o.length + " w: " + writes + " r:" + reads);
+
+ return o.length > writes && o.length > reads;
+}
+
+print("go");
+
+assert.soon( f, "f" );
+
+// avoid waiting for the operations to complete (if soon succeeded)
+for(var i in o) {
+ db.killOp(o[i].opid);
+}
+
+start = new Date();
+
+s1();
+s2();
+
+// don't want to pass if timeout killed the js function
+assert( ( new Date() ) - start < 30000 );
diff --git a/jstests/core/cursor1.js b/jstests/core/cursor1.js
new file mode 100644
index 00000000000..8448752bb0c
--- /dev/null
+++ b/jstests/core/cursor1.js
@@ -0,0 +1,20 @@
+
+t = db.cursor1
+t.drop();
+
+big = "";
+while ( big.length < 50000 )
+ big += "asdasdasdasdsdsdadsasdasdasD";
+
+num = Math.ceil( 10000000 / big.length );
+
+for ( var i=0; i<num; i++ ){
+ t.save( { num : i , str : big } );
+}
+
+assert.eq( num , t.find().count() );
+assert.eq( num , t.find().itcount() );
+
+assert.eq( num / 2 , t.find().limit(num/2).itcount() );
+
+t.drop(); // save some space
diff --git a/jstests/core/cursor2.js b/jstests/core/cursor2.js
new file mode 100644
index 00000000000..2389a6a5d74
--- /dev/null
+++ b/jstests/core/cursor2.js
@@ -0,0 +1,24 @@
+
+/**
+ * test to see if the count returned from the cursor is the number of objects that would be returned
+ *
+ * BUG 884
+ */
+function testCursorCountVsArrLen(dbConn) {
+
+ var coll = dbConn.ed_db_cursor2_ccvsal;
+
+ coll.drop();
+
+ coll.save({ a: 1, b : 1});
+ coll.save({ a: 2, b : 1});
+ coll.save({ a: 3});
+
+ var fromCount = coll.find({}, {b:1}).count();
+ var fromArrLen = coll.find({}, {b:1}).toArray().length;
+
+ assert(fromCount == fromArrLen, "count from cursor [" + fromCount + "] != count from arrlen [" + fromArrLen + "]");
+}
+
+
+testCursorCountVsArrLen(db);
diff --git a/jstests/core/cursor3.js b/jstests/core/cursor3.js
new file mode 100644
index 00000000000..d23264c94ba
--- /dev/null
+++ b/jstests/core/cursor3.js
@@ -0,0 +1,35 @@
+// Test inequality bounds combined with ordering for a single-field index.
+// BUG 1079 (fixed)
+
+testNum = 1;
+
+function checkResults( expected, cursor , testNum ) {
+ assert.eq( expected.length, cursor.count() , "testNum: " + testNum + " A : " + tojson( cursor.toArray() ) + " " + tojson( cursor.explain() ) );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ], cursor[ i ][ "a" ] , "testNum: " + testNum + " B" );
+ }
+}
+
+t = db.cursor3;
+t.drop()
+
+t.save( { a: 0 } );
+t.save( { a: 1 } );
+t.save( { a: 2 } );
+
+t.ensureIndex( { a: 1 } );
+
+
+
+checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+
+checkResults( [ 1, 2 ], t.find( { a: { $gt: 0 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 2, 1 ], t.find( { a: { $gt: 0 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1, 2 ], t.find( { a: { $gte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 2, 1 ], t.find( { a: { $gte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+
+checkResults( [ 0, 1 ], t.find( { a: { $lt: 2 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1, 0 ], t.find( { a: { $lt: 2 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 0, 1 ], t.find( { a: { $lte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1, 0 ], t.find( { a: { $lte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
diff --git a/jstests/core/cursor4.js b/jstests/core/cursor4.js
new file mode 100644
index 00000000000..b08a72f62e5
--- /dev/null
+++ b/jstests/core/cursor4.js
@@ -0,0 +1,47 @@
+// Test inequality bounds with multi-field sorting
+
+function checkResults( expected, cursor ) {
+ assert.eq( expected.length, cursor.count() );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ].a, cursor[ i ].a );
+ assert.eq( expected[ i ].b, cursor[ i ].b );
+ }
+}
+
+function testConstrainedFindMultiFieldSorting( db ) {
+ r = db.ed_db_cursor4_cfmfs;
+ r.drop();
+
+ entries = [ { a: 0, b: 0 },
+ { a: 0, b: 1 },
+ { a: 1, b: 1 },
+ { a: 1, b: 1 },
+ { a: 2, b: 0 } ];
+ for( i = 0; i < entries.length; ++i )
+ r.save( entries[ i ] );
+ r.ensureIndex( { a: 1, b: 1 } );
+ reverseEntries = entries.slice();
+ reverseEntries.reverse();
+
+ checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( entries.slice( 2, 5 ), r.find( { a: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 0, 3 ), r.find( { a: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 0, 4 ), r.find( { a: { $lt: 2 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 1, 5 ), r.find( { a: { $lt: 2 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( entries.slice( 4, 5 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 2, 4 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( reverseEntries.slice( 0, 1 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 1, 3 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( entries.slice( 0, 1 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( reverseEntries.slice( 4, 5 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+}
+
+testConstrainedFindMultiFieldSorting( db );
diff --git a/jstests/core/cursor5.js b/jstests/core/cursor5.js
new file mode 100644
index 00000000000..6434d2b3887
--- /dev/null
+++ b/jstests/core/cursor5.js
@@ -0,0 +1,36 @@
+// Test bounds with subobject indexes.
+
+function checkResults( expected, cursor ) {
+ assert.eq( expected.length, cursor.count() );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ].a.b, cursor[ i ].a.b );
+ assert.eq( expected[ i ].a.c, cursor[ i ].a.c );
+ assert.eq( expected[ i ].a.d, cursor[ i ].a.d );
+ assert.eq( expected[ i ].e, cursor[ i ].e );
+ }
+}
+
+function testBoundsWithSubobjectIndexes( db ) {
+ r = db.ed_db_cursor5_bwsi;
+ r.drop();
+
+ z = [ { a: { b: 1, c: 2, d: 3 }, e: 4 },
+ { a: { b: 1, c: 2, d: 3 }, e: 5 },
+ { a: { b: 1, c: 2, d: 4 }, e: 4 },
+ { a: { b: 1, c: 2, d: 4 }, e: 5 },
+ { a: { b: 2, c: 2, d: 3 }, e: 4 },
+ { a: { b: 2, c: 2, d: 3 }, e: 5 } ];
+ for( i = 0; i < z.length; ++i )
+ r.save( z[ i ] );
+ idx = { "a.d": 1, a: 1, e: -1 };
+ rIdx = { "a.d": -1, a: -1, e: 1 };
+ r.ensureIndex( idx );
+
+ checkResults( [ z[ 0 ], z[ 4 ], z[ 2 ] ], r.find( { e: 4 } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 1 ], z[ 3 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( idx ).hint( idx ) );
+
+ checkResults( [ z[ 2 ], z[ 4 ], z[ 0 ] ], r.find( { e: 4 } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 3 ], z[ 1 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( rIdx ).hint( idx ) );
+}
+
+testBoundsWithSubobjectIndexes( db );
diff --git a/jstests/core/cursor6.js b/jstests/core/cursor6.js
new file mode 100644
index 00000000000..33944eafd3a
--- /dev/null
+++ b/jstests/core/cursor6.js
@@ -0,0 +1,100 @@
+// Test different directions for compound indexes
+
+function eq( one, two ) {
+ assert.eq( one.a, two.a );
+ assert.eq( one.b, two.b );
+}
+
+function checkExplain( e, idx, reverse, nScanned ) {
+ if ( !reverse ) {
+ if ( idx ) {
+ assert.eq( "BtreeCursor a_1_b_-1", e.cursor );
+ } else {
+ assert.eq( "BasicCursor", e.cursor );
+ }
+ } else {
+ if ( idx ) {
+ assert.eq( "BtreeCursor a_1_b_-1 reverse", e.cursor );
+ } else {
+ assert( false );
+ }
+ }
+ assert.eq( nScanned, e.nscanned );
+}
+
+function check( indexed ) {
+ var hint;
+ if ( indexed ) {
+ hint = { a: 1, b: -1 };
+ } else {
+ hint = { $natural: 1 };
+ }
+
+ e = r.find().sort( { a: 1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, 4 );
+ f = r.find().sort( { a: 1, b: 1 } ).hint( hint );
+ eq( z[ 0 ], f[ 0 ] );
+ eq( z[ 1 ], f[ 1 ] );
+ eq( z[ 2 ], f[ 2 ] );
+ eq( z[ 3 ], f[ 3 ] );
+
+ e = r.find().sort( { a: 1, b: -1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, 4 );
+ f = r.find().sort( { a: 1, b: -1 } ).hint( hint );
+ eq( z[ 1 ], f[ 0 ] );
+ eq( z[ 0 ], f[ 1 ] );
+ eq( z[ 3 ], f[ 2 ] );
+ eq( z[ 2 ], f[ 3 ] );
+
+ e = r.find().sort( { a: -1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, true && indexed, 4 );
+ f = r.find().sort( { a: -1, b: 1 } ).hint( hint );
+ eq( z[ 2 ], f[ 0 ] );
+ eq( z[ 3 ], f[ 1 ] );
+ eq( z[ 0 ], f[ 2 ] );
+ eq( z[ 1 ], f[ 3 ] );
+
+ e = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, indexed ? 2 : 4 );
+ f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint );
+ eq( z[ 3 ], f[ 0 ] );
+ eq( z[ 2 ], f[ 1 ] );
+
+ e = r.find( { a : { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, true && indexed, indexed ? 2 : 4 );
+ f = r.find( { a: { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint );
+ eq( z[ 2 ], f[ 0 ] );
+ eq( z[ 3 ], f[ 1 ] );
+
+ e = r.find( { a : { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, indexed ? 2 : 4 );
+ f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint );
+ eq( z[ 2 ], f[ 0 ] );
+ eq( z[ 3 ], f[ 1 ] );
+
+ e = r.find().sort( { a: -1, b: -1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, 4 );
+ f = r.find().sort( { a: -1, b: -1 } ).hint( hint );
+ eq( z[ 3 ], f[ 0 ] );
+ eq( z[ 2 ], f[ 1 ] );
+ eq( z[ 1 ], f[ 2 ] );
+ eq( z[ 0 ], f[ 3 ] );
+}
+
+db.setProfilingLevel( 1 );
+r = db.ed_db_cursor6;
+r.drop();
+
+z = [ { a: 1, b: 1 },
+ { a: 1, b: 2 },
+ { a: 2, b: 1 },
+ { a: 2, b: 2 } ];
+for( i = 0; i < z.length; ++i )
+ r.save( z[ i ] );
+
+r.ensureIndex( { a: 1, b: -1 } );
+
+check( false );
+check( true );
+
+assert.eq( "BasicCursor", r.find().sort( { a: 1, b: -1, z: 1 } ).hint( { $natural: -1 } ).explain().cursor );
diff --git a/jstests/core/cursor7.js b/jstests/core/cursor7.js
new file mode 100644
index 00000000000..97cfbb738b3
--- /dev/null
+++ b/jstests/core/cursor7.js
@@ -0,0 +1,42 @@
+// Test bounds with multiple inequalities and sorting.
+
+function checkResults( expected, cursor ) {
+ assert.eq( expected.length, cursor.count() );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ].a, cursor[ i ].a );
+ assert.eq( expected[ i ].b, cursor[ i ].b );
+ }
+}
+
+function testMultipleInequalities( db ) {
+ r = db.ed_db_cursor_mi;
+ r.drop();
+
+ z = [ { a: 1, b: 2 },
+ { a: 3, b: 4 },
+ { a: 5, b: 6 },
+ { a: 7, b: 8 } ];
+ for( i = 0; i < z.length; ++i )
+ r.save( z[ i ] );
+ idx = { a: 1, b: 1 };
+ rIdx = { a: -1, b: -1 };
+ r.ensureIndex( idx );
+
+ checkResults( [ z[ 2 ], z[ 3 ] ], r.find( { a: { $gt: 3 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gt: 3 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( idx ).hint( idx ) );
+
+ checkResults( [ z[ 3 ], z[ 2 ] ], r.find( { a: { $gt: 3 } } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gt: 3 } } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( rIdx ).hint( idx ) );
+
+ checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( rIdx ).hint( idx ) );
+
+ checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
+}
+
+testMultipleInequalities( db );
diff --git a/jstests/core/cursora.js b/jstests/core/cursora.js
new file mode 100644
index 00000000000..6710c1e9dc6
--- /dev/null
+++ b/jstests/core/cursora.js
@@ -0,0 +1,49 @@
+t = db.cursora
+
+function run( n , atomic ){
+ if( !isNumber(n) ) {
+ print("n:");
+ printjson(n);
+ assert(isNumber(n), "cursora.js isNumber");
+ }
+ t.drop()
+
+ var bulk = t.initializeUnorderedBulkOp();
+ for ( i=0; i<n; i++ )
+ bulk.insert( { _id : i } );
+ assert.writeOK(bulk.execute());
+
+ print("cursora.js startParallelShell n:"+n+" atomic:"+atomic)
+ join = startParallelShell( "sleep(50);" +
+ "db.cursora.remove({" + ( atomic ? "$atomic:true" : "" ) + "});" );
+
+ var start = null;
+ var ex = null;
+ var num = null;
+ var end = null;
+ try {
+ start = new Date()
+ ex = t.find(function () { num = 2; for (var x = 0; x < 1000; x++) num += 2; return num > 0; }).sort({ _id: -1 }).explain()
+ num = ex.n
+ end = new Date()
+ }
+ catch (e) {
+ print("cursora.js FAIL " + e);
+ join();
+ throw e;
+ }
+
+ join()
+
+ //print( "cursora.js num: " + num + " time:" + ( end.getTime() - start.getTime() ) )
+ assert.eq( 0 , t.count() , "after remove: " + tojson( ex ) )
+ // assert.lt( 0 , ex.nYields , "not enough yields : " + tojson( ex ) ); // TODO make this more reliable so cen re-enable assert
+ if ( n == num )
+ print( "cursora.js warning: shouldn't have counted all n: " + n + " num: " + num );
+}
+
+run( 1500 )
+run( 5000 )
+run( 1500 , true )
+run( 5000 , true )
+print("cursora.js SUCCESS")
diff --git a/jstests/core/cursorb.js b/jstests/core/cursorb.js
new file mode 100644
index 00000000000..65e356e89cb
--- /dev/null
+++ b/jstests/core/cursorb.js
@@ -0,0 +1,17 @@
+// The 'cursor not found in map -1' warning is not logged when get more exhausts a client cursor.
+// SERVER-6931
+
+t = db.jstests_cursorb;
+t.drop();
+
+// Exhaust a client cursor in get more.
+for( i = 0; i < 200; ++i ) {
+ t.save( { a:i } );
+}
+t.find().itcount();
+
+// Check that the 'cursor not found in map -1' message is not printed. This message indicates an
+// attempt to look up a cursor with an invalid id and should never appear in the log.
+log = db.adminCommand( { getLog:'global' } ).log
+log.forEach( function( line ) { assert( !line.match( /cursor not found in map -1 / ),
+ 'Cursor map lookup with id -1.' ); } );
diff --git a/jstests/core/datasize.js b/jstests/core/datasize.js
new file mode 100644
index 00000000000..13e9f11bf0c
--- /dev/null
+++ b/jstests/core/datasize.js
@@ -0,0 +1,35 @@
+// test end-to-end data allocation without powerOf2Sizes enabled
+f = db.jstests_datasize;
+f.drop();
+
+// this test requires usePowerOf2Sizes to be off
+db.createCollection( f.getName(), { usePowerOf2Sizes: false } );
+assert.eq(0, f.stats().userFlags);
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'c'} );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'fg'} );
+assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+
+f.drop();
+db.createCollection( f.getName(), { usePowerOf2Sizes: false} );
+
+f.ensureIndex( {qq:1} );
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'c'} );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'fg'} );
+assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}} ).ok );
+
+assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'z' }} ).size );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }} ).size );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{qq:1}} ).size );
+assert.eq( 36, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'d'}, max:{qq:'z' }, keyPattern:{qq:1}} ).size );
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'c' }} ).size );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'d' }} ).size );
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{a:1}} ).ok );
diff --git a/jstests/core/datasize2.js b/jstests/core/datasize2.js
new file mode 100644
index 00000000000..103cb2096ee
--- /dev/null
+++ b/jstests/core/datasize2.js
@@ -0,0 +1,27 @@
+
+t = db.datasize2
+t.drop();
+
+N = 1000
+for ( i=0; i<N; i++ ){
+ t.insert( { _id : i , s : "asdasdasdasdasdasdasd" } );
+}
+
+c = { dataSize : "test.datasize2" ,
+ "keyPattern" : {
+ "_id" : 1
+ },
+ "min" : {
+ "_id" : 0
+ },
+ "max" : {
+ "_id" : N
+ }
+ };
+
+
+assert.eq( N , db.runCommand( c ).numObjects , "A" )
+
+c.maxObjects = 100;
+assert.eq( 101 , db.runCommand( c ).numObjects , "B" )
+
diff --git a/jstests/core/datasize3.js b/jstests/core/datasize3.js
new file mode 100644
index 00000000000..df79e6d9b04
--- /dev/null
+++ b/jstests/core/datasize3.js
@@ -0,0 +1,34 @@
+
+t = db.datasize3;
+t.drop()
+
+function run( options ){
+ var c = { dataSize : "test.datasize3" };
+ if ( options )
+ Object.extend( c , options );
+ return db.runCommand( c );
+}
+
+t.insert( { x : 1 } )
+
+a = run()
+b = run( { estimate : true } )
+
+assert.eq( a.size , b.size );
+
+
+t.ensureIndex( { x : 1 } )
+
+for ( i=2; i<100; i++ )
+ t.insert( { x : i } )
+
+a = run( { min : { x : 20 } , max : { x : 50 } } ).size
+b = run( { min : { x : 20 } , max : { x : 50 } , estimate : true } ).size
+
+ratio = Math.min( a , b ) / Math.max( a , b );
+
+assert.lt( 0.97 , ratio , "sizes not equal a: " + a + " b: " + b );
+
+
+
+
diff --git a/jstests/core/date1.js b/jstests/core/date1.js
new file mode 100644
index 00000000000..e6fc147f9f4
--- /dev/null
+++ b/jstests/core/date1.js
@@ -0,0 +1,17 @@
+
+t = db.date1;
+
+
+function go( d , msg ){
+ t.drop();
+ t.save({ a: 1, d: d });
+// printjson(d);
+// printjson(t.findOne().d);
+ assert.eq( d , t.findOne().d , msg )
+}
+
+go( new Date() , "A" )
+go( new Date( 1 ) , "B")
+go( new Date( 0 ) , "C (old spidermonkey lib fails this test)")
+go(new Date(-10), "neg")
+
diff --git a/jstests/core/date2.js b/jstests/core/date2.js
new file mode 100644
index 00000000000..ec13865a862
--- /dev/null
+++ b/jstests/core/date2.js
@@ -0,0 +1,13 @@
+// Check that it's possible to compare a Date to a Timestamp - SERVER-3304
+// Check Date / Timestamp comparison equivalence - SERVER-3222
+
+t = db.jstests_date2;
+t.drop();
+
+t.ensureIndex( {a:1} );
+
+t.save( {a:new Timestamp()} );
+
+if ( 0 ) { // SERVER-3304
+assert.eq( 1, t.find( {a:{$gt:new Date(0)}} ).itcount() );
+}
diff --git a/jstests/core/date3.js b/jstests/core/date3.js
new file mode 100644
index 00000000000..e7ddf717c73
--- /dev/null
+++ b/jstests/core/date3.js
@@ -0,0 +1,31 @@
+// Check dates before Unix epoch - SERVER-405
+
+t = db.date3;
+t.drop()
+
+d1 = new Date(-1000)
+dz = new Date(0)
+d2 = new Date(1000)
+
+t.save( {x: 3, d: dz} )
+t.save( {x: 2, d: d2} )
+t.save( {x: 1, d: d1} )
+
+function test () {
+ var list = t.find( {d: {$lt: dz}} )
+ assert.eq ( 1, list.size() )
+ assert.eq ( 1, list[0].x )
+ assert.eq ( d1, list[0].d )
+ var list = t.find( {d: {$gt: dz}} )
+ assert.eq ( 1, list.size() )
+ assert.eq ( 2, list[0].x )
+ var list = t.find().sort( {d:1} )
+ assert.eq ( 3, list.size() )
+ assert.eq ( 1, list[0].x )
+ assert.eq ( 3, list[1].x )
+ assert.eq ( 2, list[2].x )
+}
+
+test()
+t.ensureIndex( {d: 1} )
+test()
diff --git a/jstests/core/db.js b/jstests/core/db.js
new file mode 100644
index 00000000000..66a0bd73ede
--- /dev/null
+++ b/jstests/core/db.js
@@ -0,0 +1,11 @@
+function testInvalidDBNameThrowsExceptionWithConstructor() {
+ assert.throws( function() { return new DB( null, "/\\" ); } );
+}
+
+function testInvalidDBNameThrowsExceptionWithSibling() {
+ assert.throws( function() { return db.getSiblingDB( "/\\" ); } );
+}
+
+testInvalidDBNameThrowsExceptionWithConstructor();
+testInvalidDBNameThrowsExceptionWithSibling();
+
diff --git a/jstests/core/dbadmin.js b/jstests/core/dbadmin.js
new file mode 100644
index 00000000000..bab348d5700
--- /dev/null
+++ b/jstests/core/dbadmin.js
@@ -0,0 +1,105 @@
+load('jstests/aggregation/extras/utils.js');
+
+// Check that smallArray is entirely contained by largeArray
+// returns false if a member of smallArray is not in largeArray
+function arrayIsSubset(smallArray, largeArray) {
+
+ for(var i = 0; i < smallArray.length; i++) {
+ if(!Array.contains(largeArray, smallArray[i])) {
+ print("Could not find " + smallArray[i] + " in largeArray");
+ return false;
+ }
+ }
+
+ return true;
+}
+
+t = db.dbadmin;
+t.save( { x : 1 } );
+
+before = db._adminCommand( "serverStatus" )
+if ( before.mem.supported ){
+ cmdres = db._adminCommand( "closeAllDatabases" );
+ after = db._adminCommand( "serverStatus" );
+ assert( before.mem.mapped > after.mem.mapped , "closeAllDatabases does something before:" + tojson( before.mem ) + " after:" + tojson( after.mem ) + " cmd res:" + tojson( cmdres ) );
+ print( before.mem.mapped + " -->> " + after.mem.mapped );
+}
+else {
+ print( "can't test serverStatus on this machine" );
+}
+
+t.save( { x : 1 } );
+
+res = db._adminCommand( "listDatabases" );
+assert( res.databases && res.databases.length > 0 , "listDatabases 1 " + tojson(res) );
+
+now = new Date();
+x = db._adminCommand( "ismaster" );
+assert( x.ismaster , "ismaster failed: " + tojson( x ) )
+assert( x.localTime, "ismaster didn't include time: " + tojson(x))
+localTimeSkew = x.localTime - now
+if ( localTimeSkew >= 50 ) {
+ print( "Warning: localTimeSkew " + localTimeSkew + " > 50ms." )
+}
+assert.lt( localTimeSkew, 500, "isMaster.localTime" )
+
+before = db.runCommand( "serverStatus" )
+print(before.uptimeEstimate);
+sleep( 5000 )
+after = db.runCommand( "serverStatus" )
+print(after.uptimeEstimate);
+assert.lt( 2 , after.uptimeEstimate , "up1" )
+assert.gt( after.uptimeEstimate , before.uptimeEstimate , "up2" )
+
+// Test startup_log
+var stats = db.getSisterDB( "local" ).startup_log.stats();
+assert(stats.capped);
+
+var latestStartUpLog = db.getSisterDB( "local" ).startup_log.find().sort( { $natural: -1 } ).limit(1).next();
+var serverStatus = db._adminCommand( "serverStatus" );
+var cmdLine = db._adminCommand( "getCmdLineOpts" ).parsed;
+
+// Test that the startup log has the expected keys
+var verbose = false;
+var expectedKeys = ["_id", "hostname", "startTime", "startTimeLocal", "cmdLine", "pid", "buildinfo"];
+var keys = Object.keySet(latestStartUpLog);
+assert(arrayEq(expectedKeys, keys, verbose), 'startup_log keys failed');
+
+// Tests _id implicitly - should be comprised of host-timestamp
+// Setup expected startTime and startTimeLocal from the supplied timestamp
+var _id = latestStartUpLog._id.split('-'); // _id should consist of host-timestamp
+var _idUptime = _id.pop();
+var _idHost = _id.join('-');
+var uptimeSinceEpochRounded = Math.floor(_idUptime/1000) * 1000;
+var startTime = new Date(uptimeSinceEpochRounded); // Expected startTime
+
+assert.eq(_idHost, latestStartUpLog.hostname, "Hostname doesn't match one from _id");
+assert.eq(serverStatus.host.split(':')[0], latestStartUpLog.hostname, "Hostname doesn't match one in server status");
+assert.closeWithinMS(startTime, latestStartUpLog.startTime,
+ "StartTime doesn't match one from _id", 2000); // Expect less than 2 sec delta
+assert.eq(cmdLine, latestStartUpLog.cmdLine, "cmdLine doesn't match that from getCmdLineOpts");
+assert.eq(serverStatus.pid, latestStartUpLog.pid, "pid doesn't match that from serverStatus");
+
+// Test buildinfo
+var buildinfo = db.runCommand( "buildinfo" );
+delete buildinfo.ok; // Delete extra meta info not in startup_log
+var isMaster = db._adminCommand( "ismaster" );
+
+// Test buildinfo has the expected keys
+var expectedKeys = ["version", "gitVersion", "OpenSSLVersion", "sysInfo", "loaderFlags", "compilerFlags", "allocator", "versionArray", "javascriptEngine", "bits", "debug", "maxBsonObjectSize"];
+var keys = Object.keySet(latestStartUpLog.buildinfo);
+// Disabled to check
+assert(arrayIsSubset(expectedKeys, keys), "buildinfo keys failed! \n expected:\t" + expectedKeys + "\n actual:\t" + keys);
+assert.eq(buildinfo, latestStartUpLog.buildinfo, "buildinfo doesn't match that from buildinfo command");
+
+// Test version and version Array
+var version = latestStartUpLog.buildinfo.version.split('-')[0];
+var versionArray = latestStartUpLog.buildinfo.versionArray;
+var versionArrayCleaned = [];
+// Only create a string with 2 dots (2.5.5, not 2.5.5.0)
+for (var i = 0; i < (versionArray.length - 1); i++) if (versionArray[i] >= 0) { versionArrayCleaned.push(versionArray[i]); }
+
+assert.eq(serverStatus.version, latestStartUpLog.buildinfo.version, "Mongo version doesn't match that from ServerStatus");
+assert.eq(version, versionArrayCleaned.join('.'), "version doesn't match that from the versionArray");
+assert(["V8", "SpiderMonkey", "Unknown"].indexOf(latestStartUpLog.buildinfo.javascriptEngine) > -1);
+assert.eq(isMaster.maxBsonObjectSize, latestStartUpLog.buildinfo.maxBsonObjectSize, "maxBsonObjectSize doesn't match one from ismaster");
diff --git a/jstests/core/dbcase.js b/jstests/core/dbcase.js
new file mode 100644
index 00000000000..c3aa466ba17
--- /dev/null
+++ b/jstests/core/dbcase.js
@@ -0,0 +1,27 @@
+// Check db name duplication constraint SERVER-2111
+
+a = db.getSisterDB( "dbcasetest_dbnamea" )
+b = db.getSisterDB( "dbcasetest_dbnameA" )
+
+a.dropDatabase();
+b.dropDatabase();
+
+assert.writeOK( a.foo.save( { x : 1 } ));
+
+res = b.foo.save( { x : 1 } );
+assert.writeError( res );
+assert.eq( 13297, res.getWriteError().code, res.toString() );
+
+assert.neq( -1, db.getMongo().getDBNames().indexOf( a.getName() ) );
+assert.eq( -1, db.getMongo().getDBNames().indexOf( b.getName() ) );
+printjson( db.getMongo().getDBs().databases );
+
+a.dropDatabase();
+b.dropDatabase();
+
+ai = db.getMongo().getDBNames().indexOf( a.getName() );
+bi = db.getMongo().getDBNames().indexOf( b.getName() );
+// One of these dbs may exist if there is a slave active, but they must
+// not both exist.
+assert( ai == -1 || bi == -1 );
+printjson( db.getMongo().getDBs().databases );
diff --git a/jstests/core/dbcase2.js b/jstests/core/dbcase2.js
new file mode 100644
index 00000000000..f9973d98837
--- /dev/null
+++ b/jstests/core/dbcase2.js
@@ -0,0 +1,9 @@
+// SERVER-2111 Check that an in memory db name will block creation of a db with a similar but differently cased name.
+
+a = db.getSisterDB( "dbcase2test_dbnamea" )
+b = db.getSisterDB( "dbcase2test_dbnameA" )
+
+a.c.count();
+assert.throws( function() { b.c.count() } );
+
+assert.eq( -1, db.getMongo().getDBNames().indexOf( "dbcase2test_dbnameA" ) );
diff --git a/jstests/core/dbhash.js b/jstests/core/dbhash.js
new file mode 100644
index 00000000000..7fea4b4d50c
--- /dev/null
+++ b/jstests/core/dbhash.js
@@ -0,0 +1,58 @@
+
+a = db.dbhasha;
+b = db.dbhashb;
+
+a.drop();
+b.drop();
+
+// debug SERVER-761
+db.getCollectionNames().forEach( function( x ) {
+ v = db[ x ].validate();
+ if ( !v.valid ) {
+ print( x );
+ printjson( v );
+ }
+ } );
+
+function dbhash( mydb ) {
+ var ret = mydb.runCommand( "dbhash" );
+ assert.commandWorked( ret, "dbhash failure" );
+ return ret;
+}
+
+function gh( coll , mydb ){
+ if ( ! mydb ) mydb = db;
+ var x = dbhash( mydb ).collections[coll.getName()];
+ if ( ! x )
+ return "";
+ return x;
+}
+
+function dbh( mydb ){
+ return dbhash( mydb ).md5;
+}
+
+assert.eq( gh( a ) , gh( b ) , "A1" );
+
+a.insert( { _id : 5 } );
+assert.neq( gh( a ) , gh( b ) , "A2" );
+
+b.insert( { _id : 5 } );
+assert.eq( gh( a ) , gh( b ) , "A3" );
+
+dba = db.getSisterDB( "dbhasha" );
+dbb = db.getSisterDB( "dbhashb" );
+
+dba.dropDatabase();
+dbb.dropDatabase();
+
+assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B1" );
+assert.eq( dbh( dba ) , dbh( dbb ) , "C1" );
+
+dba.foo.insert( { _id : 5 } );
+assert.neq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B2" );
+assert.neq( dbh( dba ) , dbh( dbb ) , "C2" );
+
+dbb.foo.insert( { _id : 5 } );
+assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B3" );
+assert.eq( dbh( dba ) , dbh( dbb ) , "C3" );
diff --git a/jstests/core/dbhash2.js b/jstests/core/dbhash2.js
new file mode 100644
index 00000000000..ac491291c2b
--- /dev/null
+++ b/jstests/core/dbhash2.js
@@ -0,0 +1,22 @@
+
+mydb = db.getSisterDB( "config" );
+
+t = mydb.foo;
+t.drop();
+
+t.insert( { x : 1 } );
+res1 = mydb.runCommand( "dbhash" );
+assert( res1.fromCache.indexOf( "config.foo" ) == -1 );
+
+res2 = mydb.runCommand( "dbhash" );
+assert( res2.fromCache.indexOf( "config.foo" ) >= 0 );
+assert.eq( res1.collections.foo, res2.collections.foo );
+
+t.insert( { x : 2 } );
+res3 = mydb.runCommand( "dbhash" );
+assert( res3.fromCache.indexOf( "config.foo" ) < 0 );
+assert.neq( res1.collections.foo, res3.collections.foo );
+
+
+
+
diff --git a/jstests/core/dbref1.js b/jstests/core/dbref1.js
new file mode 100644
index 00000000000..4a827662c1a
--- /dev/null
+++ b/jstests/core/dbref1.js
@@ -0,0 +1,10 @@
+
+a = db.dbref1a;
+b = db.dbref1b;
+
+a.drop();
+b.drop();
+
+a.save( { name : "eliot" } );
+b.save( { num : 1 , link : new DBPointer( "dbref1a" , a.findOne()._id ) } );
+assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
diff --git a/jstests/core/dbref2.js b/jstests/core/dbref2.js
new file mode 100644
index 00000000000..d1b4870322d
--- /dev/null
+++ b/jstests/core/dbref2.js
@@ -0,0 +1,20 @@
+
+a = db.dbref2a;
+b = db.dbref2b;
+c = db.dbref2c;
+
+a.drop();
+b.drop();
+c.drop();
+
+a.save( { name : "eliot" } );
+b.save( { num : 1 , link : new DBRef( "dbref2a" , a.findOne()._id ) } );
+c.save( { num : 1 , links : [ new DBRef( "dbref2a" , a.findOne()._id ) ] } );
+
+assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
+assert.neq( "el" , b.findOne().link.fetch().name , "B" );
+
+// $elemMatch value
+var doc = c.findOne( { links: { $elemMatch: { $ref : "dbref2a", $id : a.findOne()._id } } } );
+assert.eq( "eliot" , doc.links[0].fetch().name , "C" );
+assert.neq( "el" , doc.links[0].fetch().name , "D" );
diff --git a/jstests/core/dbref3.js b/jstests/core/dbref3.js
new file mode 100644
index 00000000000..2f3ab8fa79c
--- /dev/null
+++ b/jstests/core/dbref3.js
@@ -0,0 +1,45 @@
+// Make sure we only make a DBRef object for objects where the first field is a string named $ref
+// and the second field is $id with any type. Only the first two fields matter for deciding if it
+// is a DBRef. See http://docs.mongodb.org/manual/reference/database-references/#dbrefs.
+
+var t = db.dbref3;
+
+t.drop();
+
+// true cases
+t.insert({sub: {$ref: "foo", $id: "bar"}, dbref: true});
+t.insert({sub: {$ref: "foo", $id: "bar", $db: "baz"}, dbref: true});
+t.insert({sub: {$ref: "foo", $id: "bar", db: "baz"}, dbref: true}); // out of spec but accepted
+t.insert({sub: {$ref: "foo", $id: ObjectId()}, dbref: true});
+t.insert({sub: {$ref: "foo", $id: 1}, dbref: true});
+
+t.insert({sub: {$ref: 123/*not a string*/, $id: "bar"}, dbref: false});
+t.insert({sub: {$id: "bar", $ref: "foo"}, dbref: false});
+t.insert({sub: {$ref: "foo"}, dbref: false});
+t.insert({sub: {$id: "foo"}, dbref: false});
+t.insert({sub: {other: 1, $ref: "foo", $id: "bar"}, dbref: false});
+
+t.find().forEach(function(obj) {
+ assert.eq(obj.sub.constructor == DBRef, obj.dbref, tojson(obj));
+});
+
+// We should be able to run distinct against DBRef fields.
+var distinctRefs = t.distinct('sub.$ref');
+print('distinct $ref = ' + distinctRefs);
+
+var distinctIDs = t.distinct('sub.$id');
+print('distinct $id = ' + distinctIDs);
+
+var distinctDBs = t.distinct('sub.$db');
+print('distinct $db = ' + distinctDBs);
+
+// Confirm number of unique values in each DBRef field.
+assert.eq(2, distinctRefs.length);
+assert.eq(4, distinctIDs.length);
+assert.eq(1, distinctDBs.length);
+
+// $id is an array. perform positional projection on $id.
+t.insert({sub: {$ref: "foo", $id: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}]}});
+var k = t.findOne({'sub.$id': {$elemMatch: {x: 2}}}, {_id: 0, 'sub.$id.$': 1});
+print('k = ' + tojson(k));
+assert.eq({sub: {$id: [{x: 2, y:2}]}}, k); \ No newline at end of file
diff --git a/jstests/core/delx.js b/jstests/core/delx.js
new file mode 100644
index 00000000000..d28b2063898
--- /dev/null
+++ b/jstests/core/delx.js
@@ -0,0 +1,30 @@
+
+a = db.getSisterDB("delxa" )
+b = db.getSisterDB("delxb" )
+
+function setup( mydb ){
+ mydb.dropDatabase();
+ for ( i=0; i<100; i++ ){
+ mydb.foo.insert( { _id : i } );
+ }
+}
+
+setup( a );
+setup( b );
+
+assert.eq( 100 , a.foo.find().itcount() , "A1" )
+assert.eq( 100 , b.foo.find().itcount() , "A2" )
+
+x = a.foo.find().sort( { _id : 1 } ).batchSize( 60 )
+y = b.foo.find().sort( { _id : 1 } ).batchSize( 60 )
+
+x.next();
+y.next();
+
+a.foo.remove( { _id : { $gt : 50 } } );
+
+assert.eq( 51 , a.foo.find().itcount() , "B1" )
+assert.eq( 100 , b.foo.find().itcount() , "B2" )
+
+assert.eq( 59 , x.itcount() , "C1" )
+assert.eq( 99 , y.itcount() , "C2" ); // this was asserting because ClientCursor byLoc doesn't take db into consideration
diff --git a/jstests/core/depth_limit.js b/jstests/core/depth_limit.js
new file mode 100644
index 00000000000..7523a1fc9fe
--- /dev/null
+++ b/jstests/core/depth_limit.js
@@ -0,0 +1,56 @@
+// SERVER-11781 Don't crash when converting deeply nested or cyclical JS objects to BSON.
+
+function test() {
+ function assertTooBig(obj) {
+ // This used to crash rather than throwing an exception.
+ assert.throws(function(){Object.bsonsize(obj)});
+ }
+
+ function assertNotTooBig(obj) {
+ assert.doesNotThrow(function(){Object.bsonsize(obj)});
+ }
+
+ function objWithDepth(depth) {
+ var out = 1;
+ while (depth--) {
+ out = {o: out};
+ }
+ return out;
+ }
+
+ function arrayWithDepth(depth) {
+ var out = 1;
+ while (depth--) {
+ out = [out];
+ }
+ return out;
+ }
+
+ assertNotTooBig({});
+ assertNotTooBig({array: []});
+
+ var objCycle = {};
+ objCycle.cycle = objCycle;
+ assertTooBig(objCycle);
+
+ var arrayCycle = [];
+ arrayCycle.push(arrayCycle);
+ assertTooBig({array: arrayCycle});
+
+ var objDepthLimit = 150;
+ assertNotTooBig(objWithDepth(objDepthLimit - 1));
+ assertTooBig(objWithDepth(objDepthLimit));
+
+
+ var arrayDepthLimit = objDepthLimit - 1; // one lower due to wrapping object
+ assertNotTooBig({array: arrayWithDepth(arrayDepthLimit - 1)});
+ assertTooBig({array: arrayWithDepth(arrayDepthLimit)});
+}
+
+// test in shell
+test();
+
+// test on server
+db.depth_limit.drop();
+db.depth_limit.insert({});
+db.depth_limit.find({$where: test}).itcount(); // itcount ensures that cursor is executed on server
diff --git a/jstests/core/distinct1.js b/jstests/core/distinct1.js
new file mode 100644
index 00000000000..03e425af761
--- /dev/null
+++ b/jstests/core/distinct1.js
@@ -0,0 +1,40 @@
+
+t = db.distinct1;
+t.drop();
+
+assert.eq( 0 , t.distinct( "a" ).length , "test empty" );
+
+t.save( { a : 1 } )
+t.save( { a : 2 } )
+t.save( { a : 2 } )
+t.save( { a : 2 } )
+t.save( { a : 3 } )
+
+
+res = t.distinct( "a" );
+assert.eq( "1,2,3" , res.toString() , "A1" );
+
+assert.eq( "1,2" , t.distinct( "a" , { a : { $lt : 3 } } ) , "A2" );
+
+t.drop();
+
+t.save( { a : { b : "a" } , c : 12 } );
+t.save( { a : { b : "b" } , c : 12 } );
+t.save( { a : { b : "c" } , c : 12 } );
+t.save( { a : { b : "c" } , c : 12 } );
+
+res = t.distinct( "a.b" );
+assert.eq( "a,b,c" , res.toString() , "B1" );
+printjson(t._distinct( "a.b" ).stats);
+assert.eq( "BasicCursor" , t._distinct( "a.b" ).stats.cursor , "B2" )
+
+t.drop();
+
+t.save({_id: 1, a: 1});
+t.save({_id: 2, a: 2});
+
+// Test distinct with _id.
+res = t.distinct( "_id" );
+assert.eq( "1,2", res.toString(), "C1" );
+res = t.distinct( "a", {_id: 1} );
+assert.eq( "1", res.toString(), "C2" );
diff --git a/jstests/core/distinct2.js b/jstests/core/distinct2.js
new file mode 100644
index 00000000000..41ee78c5117
--- /dev/null
+++ b/jstests/core/distinct2.js
@@ -0,0 +1,13 @@
+
+t = db.distinct2;
+t.drop();
+
+t.save({a:null});
+assert.eq( 0 , t.distinct('a.b').length , "A" );
+
+t.drop();
+t.save( { a : 1 } );
+assert.eq( [1] , t.distinct( "a" ) , "B" );
+t.save( {} )
+assert.eq( [1] , t.distinct( "a" ) , "C" );
+
diff --git a/jstests/core/distinct3.js b/jstests/core/distinct3.js
new file mode 100644
index 00000000000..0add7aeb95e
--- /dev/null
+++ b/jstests/core/distinct3.js
@@ -0,0 +1,35 @@
+// Yield and delete test case for query optimizer cursor. SERVER-4401
+
+t = db.jstests_distinct3;
+t.drop();
+
+t.ensureIndex({a:1});
+t.ensureIndex({b:1});
+
+var bulk = t.initializeUnorderedBulkOp();
+for( i = 0; i < 50; ++i ) {
+ for( j = 0; j < 2; ++j ) {
+ bulk.insert({a:i,c:i,d:j});
+ }
+}
+for( i = 0; i < 100; ++i ) {
+ bulk.insert({b:i,c:i+50});
+}
+assert.writeOK(bulk.execute());
+
+// Attempt to remove the last match for the {a:1} index scan while distinct is yielding.
+p = startParallelShell( 'var bulk = db.jstests_distinct3.initializeUnorderedBulkOp();' +
+ 'for( i = 0; i < 100; ++i ) { ' +
+ ' bulk.remove( { a:49 } ); ' +
+ ' for( j = 0; j < 20; ++j ) { ' +
+ ' bulk.insert( { a:49, c:49, d:j } ); ' +
+ ' } ' +
+ ' bulk.execute(); ' +
+ '} ' );
+
+for( i = 0; i < 100; ++i ) {
+ count = t.distinct( 'c', {$or:[{a:{$gte:0},d:0},{b:{$gte:0}}]} ).length;
+ assert.gt( count, 100 );
+}
+
+p();
diff --git a/jstests/core/distinct_array1.js b/jstests/core/distinct_array1.js
new file mode 100644
index 00000000000..2f289ad2e79
--- /dev/null
+++ b/jstests/core/distinct_array1.js
@@ -0,0 +1,91 @@
+t = db.distinct_array1;
+t.drop();
+
+t.save( { a : [1,2,3] } )
+t.save( { a : [2,3,4] } )
+t.save( { a : [3,4,5] } )
+t.save( { a : 9 } )
+
+
+// Without index.
+res = t.distinct( "a" ).sort();
+assert.eq( "1,2,3,4,5,9" , res.toString() , "A1" );
+
+// Array element 0 without index.
+res = t.distinct( "a.0" ).sort();
+assert.eq( "1,2,3" , res.toString() , "A2" );
+
+// Array element 1 without index.
+res = t.distinct( "a.1" ).sort();
+assert.eq( "2,3,4" , res.toString() , "A3" );
+
+// With index.
+t.ensureIndex( { a : 1 } );
+res = t.distinct( "a" ).sort();
+assert.eq( "1,2,3,4,5,9" , res.toString() , "A4" );
+
+// Array element 0 with index.
+res = t.distinct( "a.0" ).sort();
+assert.eq( "1,2,3" , res.toString() , "A5" );
+
+// Array element 1 with index.
+res = t.distinct( "a.1" ).sort();
+assert.eq( "2,3,4" , res.toString() , "A6" );
+
+//t.drop();
+
+t.save( { a : [{b:"a"}, {b:"d"}] , c : 12 } );
+t.save( { a : [{b:"b"}, {b:"d"}] , c : 12 } );
+t.save( { a : [{b:"c"}, {b:"e"}] , c : 12 } );
+t.save( { a : [{b:"c"}, {b:"f"}] , c : 12 } );
+t.save( { a : [] , c : 12 } );
+t.save( { a : { b : "z"} , c : 12 } );
+
+// Without index.
+res = t.distinct( "a.b" ).sort();
+assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B1" );
+
+// Array element 0 without index
+res = t.distinct( "a.0.b" ).sort();
+assert.eq( "a,b,c" , res.toString() , "B2" );
+
+// Array element 1 without index
+res = t.distinct( "a.1.b" ).sort();
+assert.eq( "d,e,f" , res.toString() , "B3" );
+
+// With index.
+t.ensureIndex( { "a.b" : 1 } );
+res = t.distinct( "a.b" );
+res.sort()
+assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B4" );
+
+// _id as an document containing an array
+t.save( { _id : { a : [1,2,3] } } )
+t.save( { _id : { a : [2,3,4] } } )
+t.save( { _id : { a : [3,4,5] } } )
+t.save( { _id : { a : 9 } } )
+
+// Without index.
+res = t.distinct( "_id.a" ).sort();
+assert.eq( "1,2,3,4,5,9" , res.toString() , "C1" );
+
+// Array element 0 without index.
+res = t.distinct( "_id.a.0" ).sort();
+assert.eq( "1,2,3" , res.toString() , "C2" );
+
+// Array element 1 without index.
+res = t.distinct( "_id.a.1" ).sort();
+assert.eq( "2,3,4" , res.toString() , "C3" );
+
+// With index.
+t.ensureIndex( { "_id.a" : 1 } );
+res = t.distinct( "_id.a" ).sort();
+assert.eq( "1,2,3,4,5,9" , res.toString() , "C4" );
+
+// Array element 0 with index.
+res = t.distinct( "_id.a.0" ).sort();
+assert.eq( "1,2,3" , res.toString() , "C5" );
+
+// Array element 1 with index.
+res = t.distinct( "_id.a.1" ).sort();
+assert.eq( "2,3,4" , res.toString() , "C6" );
diff --git a/jstests/core/distinct_index1.js b/jstests/core/distinct_index1.js
new file mode 100644
index 00000000000..6de1a7927e4
--- /dev/null
+++ b/jstests/core/distinct_index1.js
@@ -0,0 +1,72 @@
+
+t = db.distinct_index1
+t.drop();
+
+function r( x ){
+ return Math.floor( Math.sqrt( x * 123123 ) ) % 10;
+}
+
+function d( k , q ){
+ return t.runCommand( "distinct" , { key : k , query : q || {} } )
+}
+
+for ( i=0; i<1000; i++ ){
+ o = { a : r(i*5) , b : r(i) };
+ t.insert( o );
+}
+
+x = d( "a" );
+assert.eq( 1000 , x.stats.n , "AA1" )
+assert.eq( 1000 , x.stats.nscanned , "AA2" )
+assert.eq( 1000 , x.stats.nscannedObjects , "AA3" )
+
+x = d( "a" , { a : { $gt : 5 } } );
+assert.eq( 398 , x.stats.n , "AB1" )
+assert.eq( 1000 , x.stats.nscanned , "AB2" )
+assert.eq( 1000 , x.stats.nscannedObjects , "AB3" )
+
+x = d( "b" , { a : { $gt : 5 } } );
+assert.eq( 398 , x.stats.n , "AC1" )
+assert.eq( 1000 , x.stats.nscanned , "AC2" )
+assert.eq( 1000 , x.stats.nscannedObjects , "AC3" )
+
+
+
+t.ensureIndex( { a : 1 } )
+
+x = d( "a" );
+// There are only 10 values. We use the fast distinct hack and only examine each value once.
+assert.eq( 10 , x.stats.n , "BA1" )
+assert.eq( 10 , x.stats.nscanned , "BA2" )
+
+x = d( "a" , { a : { $gt : 5 } } );
+// Only 4 values of a are >= 5 and we use the fast distinct hack.
+assert.eq(4, x.stats.n , "BB1" )
+assert.eq(4, x.stats.nscanned , "BB2" )
+assert.eq(0, x.stats.nscannedObjects , "BB3" )
+
+x = d( "b" , { a : { $gt : 5 } } );
+// We can't use the fast distinct hack here because we're distinct-ing over 'b'.
+assert.eq( 398 , x.stats.n , "BC1" )
+assert.eq( 398 , x.stats.nscanned , "BC2" )
+assert.eq( 398 , x.stats.nscannedObjects , "BC3" )
+
+// Check proper nscannedObjects count when using a query optimizer cursor.
+t.dropIndexes();
+t.ensureIndex( { a : 1, b : 1 } );
+x = d( "b" , { a : { $gt : 5 }, b : { $gt : 5 } } );
+printjson(x);
+// 171 is the # of results we happen to scan when we don't use a distinct
+// hack. When we use the distinct hack we scan 16, currently.
+assert.lte(x.stats.n, 171);
+assert.eq( 0 , x.stats.nscannedObjects , "BB3" )
+
+
+
+// Cursor name should not be empty when using $or with hashed index.
+//
+t.dropIndexes();
+t.ensureIndex( { a : "hashed" } );
+x = d( "a", { $or : [ { a : 3 }, { a : 5 } ] } );
+assert.eq( 188, x.stats.n, "DA1" );
+assert.neq( "", x.stats.cursor, "DA2" );
diff --git a/jstests/core/distinct_index2.js b/jstests/core/distinct_index2.js
new file mode 100644
index 00000000000..67d28b8b95e
--- /dev/null
+++ b/jstests/core/distinct_index2.js
@@ -0,0 +1,41 @@
+t = db.distinct_index2;
+t.drop();
+
+t.ensureIndex( { a : 1 , b : 1 } )
+t.ensureIndex( { c : 1 } )
+
+// Uniformly distributed dataset.
+// If we use a randomly generated dataset, we might not
+// generate all the distinct values in the range [0, 10).
+for ( var a=0; a<10; a++ ) {
+ for ( var b=0; b<10; b++ ) {
+ for ( var c=0; c<10; c++ ) {
+ t.insert( { a : a , b : b , c : c } );
+ }
+ }
+}
+
+correct = []
+for ( i=0; i<10; i++ )
+ correct.push( i )
+
+function check( field ){
+ res = t.distinct( field )
+ res = res.sort()
+ assert.eq( correct , res , "check: " + field );
+
+ if ( field != "a" ){
+ res = t.distinct( field , { a : 1 } )
+ res = res.sort()
+ assert.eq( correct , res , "check 2: " + field );
+ }
+}
+
+check( "a" )
+check( "b" )
+check( "c" )
+
+// hashed index should produce same results.
+t.dropIndexes();
+t.ensureIndex( { a : "hashed" } );
+check( "a" );
diff --git a/jstests/core/distinct_speed1.js b/jstests/core/distinct_speed1.js
new file mode 100644
index 00000000000..4cae5b0ae06
--- /dev/null
+++ b/jstests/core/distinct_speed1.js
@@ -0,0 +1,26 @@
+
+t = db.distinct_speed1;
+
+t.drop();
+for ( var i=0; i<10000; i++ ){
+ t.save( { x : i % 10 } );
+}
+
+assert.eq( 10 , t.distinct("x").length , "A1" );
+
+function fast(){
+ t.find().explain().millis;
+}
+
+function slow(){
+ t.distinct("x");
+}
+
+for ( i=0; i<3; i++ ){
+ print( "it: " + Date.timeFunc( fast ) );
+ print( "di: " + Date.timeFunc( slow ) );
+}
+
+
+t.ensureIndex( { x : 1 } );
+t.distinct( "x" , { x : 5 } )
diff --git a/jstests/core/drop.js b/jstests/core/drop.js
new file mode 100644
index 00000000000..154c35d1db3
--- /dev/null
+++ b/jstests/core/drop.js
@@ -0,0 +1,25 @@
+var coll = db.jstests_drop;
+
+coll.drop();
+
+res = coll.runCommand("drop");
+assert( !res.ok, tojson( res ) );
+
+
+assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "A");
+coll.save({});
+assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "B");
+coll.ensureIndex({a : 1});
+assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "C");
+assert.commandWorked(db.runCommand({drop : coll.getName()}));
+assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "D");
+
+coll.ensureIndex({a : 1});
+assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "E");
+assert.commandWorked(db.runCommand({deleteIndexes : coll.getName(), index : "*"}),
+ "delete indexes A");
+assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "G");
+
+// make sure we can still use it
+coll.save({});
+assert.eq(1, coll.find().hint("_id_").toArray().length, "H");
diff --git a/jstests/core/drop2.js b/jstests/core/drop2.js
new file mode 100644
index 00000000000..5eef20adc61
--- /dev/null
+++ b/jstests/core/drop2.js
@@ -0,0 +1,52 @@
+var coll = db.jstests_drop2;
+coll.drop();
+
+function debug( x ) {
+ printjson( x );
+}
+
+coll.save( {} );
+
+function getOpId( drop ) {
+ var inProg = db.currentOp().inprog;
+ debug( inProg );
+ for ( var id in inProg ) {
+ var op = inProg[ id ];
+ if ( drop ) {
+ if ( op.query && op.query.drop && op.query.drop == coll.getName() ) {
+ return op.opid;
+ }
+ } else {
+ if ( op.query && op.query.query && op.query.query.$where && op.ns == (coll + "") ) {
+ return op.opid;
+ }
+ }
+ }
+ return null;
+}
+
+var shell1 = startParallelShell( "print(\"Count thread started\");"
+ + "db.getMongo().getCollection(\""
+ + (coll + "") + "\")"
+ + ".count( { $where: function() {"
+ + "while( 1 ) { sleep( 1 ); } } } );"
+ + "print(\"Count thread terminating\");" );
+countOpId = null;
+assert.soon( function() { countOpId = getOpId( false ); return countOpId; } );
+
+var shell2 = startParallelShell( "print(\"Drop thread started\");"
+ + "print(\"drop result: \" + "
+ + "db.getMongo().getCollection(\""
+ + (coll + "") + "\")"
+ + ".drop() );"
+ + "print(\"Drop thread terminating\")" );
+dropOpId = null;
+assert.soon( function() { dropOpId = getOpId( true ); return dropOpId; } );
+
+db.killOp( dropOpId );
+db.killOp( countOpId );
+
+shell1();
+shell2();
+
+coll.drop(); // in SERVER-1818, this fails
diff --git a/jstests/core/drop3.js b/jstests/core/drop3.js
new file mode 100644
index 00000000000..d97b40989b8
--- /dev/null
+++ b/jstests/core/drop3.js
@@ -0,0 +1,25 @@
+t = db.jstests_drop3;
+sub = t.sub;
+
+t.drop();
+sub.drop();
+
+
+for (var i = 0; i < 10; i++){
+ t.insert({});
+ sub.insert({});
+}
+
+var cursor = t.find().batchSize(2);
+var subcursor = sub.find().batchSize(2);
+
+cursor.next();
+subcursor.next();
+assert.eq( cursor.objsLeftInBatch(), 1 );
+assert.eq( subcursor.objsLeftInBatch(), 1 );
+
+t.drop(); // should invalidate cursor, but not subcursor
+
+assert.throws( function(){ cursor.itcount() } ); // throws "cursor doesn't exist on server" error on getMore
+assert.eq( subcursor.itcount(), 9 ); //one already seen
+
diff --git a/jstests/core/drop_index.js b/jstests/core/drop_index.js
new file mode 100644
index 00000000000..8e2278d00c5
--- /dev/null
+++ b/jstests/core/drop_index.js
@@ -0,0 +1,20 @@
+
+t = db.dropIndex;
+t.drop();
+
+t.insert( { _id : 1 , a : 2 , b : 3 } );
+assert.eq( 1 , t.getIndexes().length , "A1" );
+
+t.ensureIndex( { a : 1 } );
+t.ensureIndex( { b : 1 } );
+assert.eq( 3 , t.getIndexes().length , "A2" );
+
+x = db._dbCommand( { dropIndexes: t.getName() , index : t._genIndexName( { a : 1 } ) } );
+assert.eq( 2 , t.getIndexes().length , "B1 " + tojson(x) );
+
+x = db._dbCommand( { dropIndexes: t.getName() , index : { b : 1 } } )
+assert.eq( 1 , t.getIndexes().length , "B2" );
+
+// ensure you can recreate indexes, even if you don't use dropIndex method
+t.ensureIndex({a:1});
+assert.eq(2 , t.getIndexes().length);
diff --git a/jstests/core/dropdb.js b/jstests/core/dropdb.js
new file mode 100644
index 00000000000..0c080ffb286
--- /dev/null
+++ b/jstests/core/dropdb.js
@@ -0,0 +1,25 @@
+// Test that a db does not exist after it is dropped.
+// Disabled in the small oplog suite because the slave may create a master db
+// with the same name as the dropped db when requesting a clone.
+
+m = db.getMongo();
+baseName = "jstests_dropdb";
+ddb = db.getSisterDB( baseName );
+
+print("initial dbs: " + tojson(m.getDBNames()));
+
+function check(shouldExist) {
+ var dbs = m.getDBNames();
+ assert.eq(Array.contains(dbs, baseName), shouldExist,
+ "DB " + baseName + " should " + (shouldExist ? "" : "not ") + "exist."
+ + " dbs: " + tojson(dbs) + "\n" + tojson( m.getDBs() ) );
+}
+
+ddb.c.save( {} );
+check(true);
+
+ddb.dropDatabase();
+check(false);
+
+ddb.dropDatabase();
+check(false);
diff --git a/jstests/core/dropdb_race.js b/jstests/core/dropdb_race.js
new file mode 100644
index 00000000000..61fa0887ef5
--- /dev/null
+++ b/jstests/core/dropdb_race.js
@@ -0,0 +1,41 @@
+// test dropping a db with simultaneous commits
+
+m = db.getMongo();
+baseName = "jstests_dur_droprace";
+d = db.getSisterDB(baseName);
+t = d.foo;
+
+assert(d.adminCommand({ setParameter: 1, syncdelay: 5 }).ok);
+
+var s = 0;
+
+var start = new Date();
+
+for (var pass = 0; pass < 100; pass++) {
+ if (pass % 2 == 0) {
+ // sometimes wait for create db first, to vary the timing of things
+ var options = ( pass % 4 == 0 )? { writeConcern: { j: true }} : undefined;
+ t.insert({}, options);
+ }
+ t.insert({ x: 1 });
+ t.insert({ x: 3 });
+ t.ensureIndex({ x: 1 });
+ sleep(s);
+ if (pass % 37 == 0)
+ d.adminCommand("closeAllDatabases");
+ else if (pass % 13 == 0)
+ t.drop();
+ else if (pass % 17 == 0)
+ t.dropIndexes();
+ else
+ d.dropDatabase();
+ if (pass % 7 == 0)
+ d.runCommand({getLastError:1,j:1});
+ d.getLastError();
+ s = (s + 1) % 25;
+ //print(pass);
+ if ((new Date()) - start > 60000) {
+ print("stopping early");
+ break;
+ }
+}
diff --git a/jstests/core/elemMatchProjection.js b/jstests/core/elemMatchProjection.js
new file mode 100644
index 00000000000..73088fab699
--- /dev/null
+++ b/jstests/core/elemMatchProjection.js
@@ -0,0 +1,265 @@
+// Tests for $elemMatch projections and $ positional operator projection.
+t = db.SERVER828Test;
+t.drop();
+
+date1 = new Date();
+
+// Insert various styles of arrays
+for ( i = 0; i < 100; i++ ) {
+ t.insert({ group: 1, x: [ 1, 2, 3, 4, 5 ] });
+ t.insert({ group: 2, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ] });
+ t.insert({ group: 3, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ],
+ y: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] });
+ t.insert({ group: 3, x: [ { a: 1, b: 3 }, { a: -6, c: 3 } ] });
+ t.insert({ group: 4, x: [ { a: 1, b: 4 }, { a: -6, c: 3 } ] });
+ t.insert({ group: 5, x: [ new Date(), 5, 10, 'string', new ObjectId(), 123.456 ] });
+ t.insert({ group: 6, x: [ { a: 'string', b: date1 },
+ { a: new ObjectId(), b: 1.2345 },
+ { a: 'string2', b: date1 } ] });
+ t.insert({ group: 7, x: [ { y: [ 1, 2, 3, 4 ] } ] });
+ t.insert({ group: 8, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] });
+ t.insert({ group: 9, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] },
+ { z: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] });
+ t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
+ y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
+ t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
+ y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
+ t.insert({ group: 11, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ],
+ covered: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] });
+ t.insert({ group: 12, x: { y : [ { a: 1, b: 1 }, { a: 1, b: 2} ] } } );
+ t.insert({ group: 13, x: [ { a: 1, b: 1 }, {a: 1, b: 2 } ] } );
+ t.insert({ group: 13, x: [ { a: 1, b: 2 }, {a: 1, b: 1 } ] } );
+}
+t.ensureIndex({group:1, 'y.d':1}); // for regular index test (not sure if this is really adding anything useful)
+t.ensureIndex({group:1, covered:1}); // for covered index test
+
+//
+// SERVER-828: Positional operator ($) projection tests
+//
+assert.eq( 1,
+ t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).toArray()[0].x.length,
+ "single object match (array length match)" );
+
+assert.eq( 2,
+ t.find( { group:3, 'x.a':1 }, { 'x.$':1 } ).toArray()[0].x[0].b,
+ "single object match first" );
+
+assert.eq( undefined,
+ t.find( { group:3, 'x.a':2 }, { _id:0, 'x.$':1 } ).toArray()[0]._id,
+ "single object match with filtered _id" );
+
+assert.eq( 1,
+ t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { _id:1 } ).toArray()[0].x.length,
+ "sorted single object match with filtered _id (array length match)" );
+
+assert.eq( 1,
+ t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':1 } ).toArray()[0].x.length,
+ "single object match with elemMatch" );
+
+assert.eq( 1,
+ t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':1} } ).toArray()[0].x.length,
+ "single object match with elemMatch and positive slice" );
+
+assert.eq( 1,
+ t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':-1} } ).toArray()[0].x.length,
+ "single object match with elemMatch and negative slice" );
+
+assert.eq( 1,
+ t.find( { 'group':12, 'x.y.a':1 }, { 'x.y.$': 1 } ).toArray()[0].x.y.length,
+ "single object match with two level dot notation" );
+
+assert.eq( 1,
+ t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { x:1 } ).toArray()[0].x.length,
+ "sorted object match (array length match)" );
+
+assert.eq( { aa:1, dd:5 },
+ t.find( { group:3, 'y.dd':5 }, { 'y.$':1 } ).toArray()[0].y[0],
+ "single object match (value match)" );
+
+assert.throws( function() {
+ t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).toArray();
+ }, [], "throw on invalid projection (field mismatch)" );
+
+assert.throws( function() {
+ t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).sort( { x:1 } ).toArray()
+ }, [], "throw on invalid sorted projection (field mismatch)" );
+
+assert.throws( function() {x
+ t.find( { group:3, 'x.a':2 }, { 'x.$':1, group:0 } ).sort( { x:1 } ).toArray();
+ }, [], "throw on invalid projection combination (include and exclude)" );
+
+assert.throws( function() {
+ t.find( { group:3, 'x.a':1, 'y.aa':1 }, { 'x.$':1, 'y.$':1 } ).toArray();
+ }, [], "throw on multiple projections" );
+
+assert.throws( function() {
+ t.find( { group:3}, { 'g.$':1 } ).toArray()
+ }, [], "throw on invalid projection (non-array field)" );
+
+assert.eq( { aa:1, dd:5 },
+ t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).toArray()[0].covered[0],
+ "single object match (covered index)" );
+
+assert.eq( { aa:1, dd:5 },
+ t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).sort( { covered:1 } ).toArray()[0].covered[0],
+ "single object match (sorted covered index)" );
+
+assert.eq( 1,
+ t.find( { group:10, 'y.d': 4 }, { 'y.$':1 } ).toArray()[0].y.length,
+ "single object match (regular index" );
+
+if (false) {
+
+ assert.eq( 2, // SERVER-1013: allow multiple positional operators
+ t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].y[0].bb,
+ "multi match, multi proj 1" );
+
+ assert.eq( 5, // SSERVER-1013: allow multiple positional operators
+ t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].x[0].d,
+ "multi match, multi proj 2" );
+
+ assert.eq( 2, // SERVER-1243: allow multiple results from same matcher
+ t.find( { group:2, x: { $elemMatchAll: { a:1 } } }, { 'x.$':1 } ).toArray()[0].x.length,
+ "multi element match, single proj" );
+
+ assert.eq( 2, // SERVER-1013: multiple array matches with one prositional operator
+ t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1 } ).toArray()[0].y[0].bb,
+ "multi match, single proj 1" );
+
+ assert.eq( 2, // SERVER-1013: multiple array matches with one positional operator
+ t.find( { group:3, 'y.cc':3, 'x.b':2 }, { 'x.$':1 } ).toArray()[0].x[0].b,
+ "multi match, single proj 2" );
+
+}
+
+//
+// SERVER-2238: $elemMatch projections
+//
+assert.eq( -6,
+ t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x[0].a,
+ "single object match" );
+
+assert.eq( 1,
+ t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x.length,
+ "filters non-matching array elements" );
+
+assert.eq( 1,
+ t.find( { group:4 }, { x: { $elemMatch: { a:-6, c:3 } } } ).toArray()[0].x.length,
+ "filters non-matching array elements with multiple elemMatch criteria" );
+
+assert.eq( 1,
+ t.find( { group: 13 }, { 'x' : {'$elemMatch' : { a: {$gt: 0, $lt: 2} } } } ).toArray()[0].x.length,
+ "filters non-matching array elements with multiple criteria for a single element in the array" );
+
+assert.eq( 3,
+ t.find( { group:4 }, { x: { $elemMatch: { a:{ $lt:1 } } } } ).toArray()[0].x[0].c,
+ "object operator match" );
+
+assert.eq( [ 4 ],
+ t.find( { group:1 }, { x: { $elemMatch: { $in:[100, 4, -123] } } } ).toArray()[0].x,
+ "$in number match" );
+
+assert.eq( [ {a : 1, b : 2} ],
+ t.find( { group:2 }, { x: { $elemMatch: { a: { $in:[1] } } } } ).toArray()[0].x,
+ "$in number match" );
+
+assert.eq( [1],
+ t.find( { group:1 }, { x: { $elemMatch: { $nin:[4, 5, 6] } } } ).toArray()[0].x,
+ "$nin number match" );
+
+// but this may become a user assertion, since a single element of an array can't match more than one value
+assert.eq( [ 1],
+ t.find( { group:1 }, { x: { $elemMatch: { $all:[1] } } } ).toArray()[0].x,
+ "$in number match" );
+
+assert.eq( [ { a: 'string', b: date1 } ],
+ t.find( { group:6 }, { x: { $elemMatch: { a:'string' } } } ).toArray()[0].x,
+ "mixed object match on string eq" );
+
+assert.eq( [ { a: 'string2', b: date1 } ],
+ t.find( { group:6 }, { x: { $elemMatch: { a:/ring2/ } } } ).toArray()[0].x,
+ "mixed object match on regexp" );
+
+assert.eq( [ { a: 'string', b: date1 } ],
+ t.find( { group:6 }, { x: { $elemMatch: { a: { $type: 2 } } } } ).toArray()[0].x,
+ "mixed object match on type" );
+
+assert.eq( [ { a : 2, c : 3} ],
+ t.find( { group:2 }, { x: { $elemMatch: { a: { $ne: 1 } } } } ).toArray()[0].x,
+ "mixed object match on ne" );
+
+assert.eq( [ {a : 1, d : 5} ],
+ t.find( { group:3 }, { x: { $elemMatch: { d: { $exists: true } } } } ).toArray()[0].x,
+ "mixed object match on exists" );
+
+assert.eq( [ {a : 2, c : 3} ],
+ t.find( { group:3 }, { x: { $elemMatch: { a: { $mod : [2, 0 ] } } } } ).toArray()[0].x,
+ "mixed object match on mod" );
+
+assert.eq( {"x" : [ { "a" : 1, "b" : 2 } ], "y" : [ { "c" : 3, "d" : 4 } ] },
+ t.find( { group:10 }, { _id : 0,
+ x: { $elemMatch: { a: 1 } },
+ y: { $elemMatch: { c: 3 } } } ).toArray()[0],
+ "multiple $elemMatch on unique fields 1" );
+
+if (false) {
+
+ assert.eq( 2 , // SERVER-1243: handle multiple $elemMatch results
+ t.find( { group:4 }, { x: { $elemMatchAll: { a:{ $lte:2 } } } } ).toArray()[0].x.length,
+ "multi object match" );
+
+ assert.eq( 3 , // SERVER-1243: handle multiple $elemMatch results
+ t.find( { group:1 }, { x: { $elemMatchAll: { $in:[1, 2, 3] } } } ).toArray()[0].x.length,
+ "$in number match" );
+
+ assert.eq( 1 , // SERVER-1243: handle multiple $elemMatch results
+ t.find( { group:5 }, { x: { $elemMatchAll: { $ne: 5 } } } ).toArray()[0].x.length,
+ "single mixed type match 1" );
+
+ assert.eq( 1 , // SERVER-831: handle nested arrays
+ t.find( { group:9 }, { 'x.y': { $elemMatch: { a: 1 } } } ).toArray()[0].x.length,
+ "single dotted match" );
+
+}
+
+//
+// Batch/getMore tests
+//
+// test positional operator across multiple batches
+a = t.find( { group:3, 'x.b':2 }, { 'x.$':1 } ).batchSize(1)
+while ( a.hasNext() ) {
+ assert.eq( 2, a.next().x[0].b, "positional getMore test");
+}
+
+// test $elemMatch operator across multiple batches
+a = t.find( { group:3 }, { x:{$elemMatch:{a:1}} } ).batchSize(1)
+while ( a.hasNext() ) {
+ assert.eq( 1, a.next().x[0].a, "positional getMore test");
+}
+
+// verify the positional update operator matches the same element as the the positional find. this
+// is to ensure consistent behavior with updates until SERVER-1013 is resolved, at which point the
+// following tests should be updated.
+
+t.update({ group: 10, 'x.a': 3, 'y.c':1 }, { $set:{'x.$':100} }, false, true );
+// updated the wrong element, so the following assertions should be true
+assert.eq( 100,
+ t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0],
+ "wrong single element match after update" );
+
+assert.eq( 100,
+ t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0],
+ "wrong single element match after update" );
+
+t.remove({ group: 10 });
+t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
+ y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
+
+t.update({ group: 10, 'y.c':1, 'x.a': 3 }, { $set:{'x.$':100} }, false, true );
+// updated the correct element
+assert.eq( 100,
+ t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0],
+ "right single element match after update" );
+assert.eq( 100,
+ t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0],
+ "right single element match after update" );
diff --git a/jstests/core/error2.js b/jstests/core/error2.js
new file mode 100644
index 00000000000..8c27d6250e1
--- /dev/null
+++ b/jstests/core/error2.js
@@ -0,0 +1,21 @@
+// Test that client gets stack trace on failed invoke
+
+f = db.jstests_error2;
+
+f.drop();
+
+f.save( {a:1} );
+
+assert.throws(
+ function(){
+ c = f.find({$where : function(){ return a() }});
+ c.next();
+ }
+);
+
+assert.throws(
+ function(){
+ db.eval( function() { return a(); } );
+ }
+);
+
diff --git a/jstests/core/error5.js b/jstests/core/error5.js
new file mode 100644
index 00000000000..5884d20d8c1
--- /dev/null
+++ b/jstests/core/error5.js
@@ -0,0 +1,8 @@
+
+t = db.error5
+t.drop();
+
+assert.throws( function(){ t.save( 4 ); printjson( t.findOne() ) } , null , "A" );
+t.save( { a : 1 } )
+assert.eq( 1 , t.count() , "B" );
+
diff --git a/jstests/core/eval0.js b/jstests/core/eval0.js
new file mode 100644
index 00000000000..4375cace839
--- /dev/null
+++ b/jstests/core/eval0.js
@@ -0,0 +1,8 @@
+
+assert.eq( 17 , db.eval( function(){ return 11 + 6; } ) , "A" );
+assert.eq( 17 , db.eval( function( x ){ return 10 + x; } , 7 ) , "B" );
+
+// check that functions in system.js work
+db.system.js.insert({_id: "add", value: function(x,y){ return x + y;}});
+assert.eq( 20 , db.eval( "this.add(15, 5);" ) , "C" );
+
diff --git a/jstests/core/eval1.js b/jstests/core/eval1.js
new file mode 100644
index 00000000000..4a5ca75f09b
--- /dev/null
+++ b/jstests/core/eval1.js
@@ -0,0 +1,17 @@
+
+t = db.eval1;
+t.drop();
+
+t.save( { _id : 1 , name : "eliot" } );
+t.save( { _id : 2 , name : "sara" } );
+
+f = function(id){
+ return db["eval1"].findOne( { _id : id } ).name;
+}
+
+
+assert.eq( "eliot" , f( 1 ) , "A" );
+assert.eq( "sara" , f( 2 ) , "B" );
+assert.eq( "eliot" , db.eval( f , 1 ) , "C" );
+assert.eq( "sara" , db.eval( f , 2 ) , "D" );
+
diff --git a/jstests/core/eval2.js b/jstests/core/eval2.js
new file mode 100644
index 00000000000..6e39bb4a7bd
--- /dev/null
+++ b/jstests/core/eval2.js
@@ -0,0 +1,28 @@
+
+t = db.eval2;
+t.drop();
+t.save({a:1});
+t.save({a:1});
+
+var f = db.group(
+ {
+ ns: t.getName(),
+ key: { a:true},
+ cond: { a:1 },
+ reduce: function(obj,prev) { prev.csum++; } ,
+ initial: { csum: 0}
+ }
+);
+
+assert(f[0].a == 1 && f[0].csum == 2 , "on db" );
+
+var f = t.group(
+ {
+ key: { a:true},
+ cond: { a:1 },
+ reduce: function(obj,prev) { prev.csum++; } ,
+ initial: { csum: 0}
+ }
+);
+
+assert(f[0].a == 1 && f[0].csum == 2 , "on coll" );
diff --git a/jstests/core/eval3.js b/jstests/core/eval3.js
new file mode 100644
index 00000000000..404d4d863b7
--- /dev/null
+++ b/jstests/core/eval3.js
@@ -0,0 +1,21 @@
+
+t = db.eval3;
+t.drop();
+
+t.save( { _id : 1 , name : "eliot" } );
+assert.eq( 1 , t.count() , "A" );
+
+function z( a , b ){
+ db.eval3.save( { _id : a , name : b } );
+ return b;
+}
+
+z( 2 , "sara" );
+assert.eq( 2 , t.count() , "B" );
+
+assert.eq( "eliot,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
+
+assert.eq( "joe" , db.eval( z , 3 , "joe" ) , "C" );
+assert.eq( 3 , t.count() , "D" );
+
+assert.eq( "eliot,joe,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
diff --git a/jstests/core/eval4.js b/jstests/core/eval4.js
new file mode 100644
index 00000000000..31d6ef0c2a8
--- /dev/null
+++ b/jstests/core/eval4.js
@@ -0,0 +1,23 @@
+
+t = db.eval4;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+
+assert.eq( 3 , t.count() , "A" );
+
+function f( x ){
+ db.eval4.remove( { a : x } );
+}
+
+f( 2 );
+assert.eq( 2 , t.count() , "B" );
+
+db.eval( f , 2 );
+assert.eq( 2 , t.count() , "C" );
+
+db.eval( f , 3 );
+assert.eq( 1 , t.count() , "D" );
+
diff --git a/jstests/core/eval5.js b/jstests/core/eval5.js
new file mode 100644
index 00000000000..a9223a555a6
--- /dev/null
+++ b/jstests/core/eval5.js
@@ -0,0 +1,23 @@
+
+t = db.eval5;
+t.drop();
+
+t.save( { a : 1 , b : 2 , c : 3 } );
+
+assert.eq( 3 ,
+ db.eval(
+ function(z){
+ return db.eval5.find().toArray()[0].c;
+ }
+ ) ,
+ "something weird A"
+ );
+
+assert.isnull(
+ db.eval(
+ function(z){
+ return db.eval5.find( {} , { a : 1 } ).toArray()[0].c;
+ }
+ ),
+ "field spec didn't work"
+ );
diff --git a/jstests/core/eval6.js b/jstests/core/eval6.js
new file mode 100644
index 00000000000..5fe096974c6
--- /dev/null
+++ b/jstests/core/eval6.js
@@ -0,0 +1,15 @@
+
+t = db.eval6;
+t.drop();
+
+t.save( { a : 1 } );
+
+db.eval(
+ function(){
+ o = db.eval6.findOne();
+ o.b = 2;
+ db.eval6.save( o );
+ }
+);
+
+assert.eq( 2 , t.findOne().b );
diff --git a/jstests/core/eval7.js b/jstests/core/eval7.js
new file mode 100644
index 00000000000..45e06af276c
--- /dev/null
+++ b/jstests/core/eval7.js
@@ -0,0 +1,3 @@
+
+assert.eq( 6 , db.eval( "5 + 1" ) , "A" )
+assert.throws( function(z){ db.eval( "5 + function x; + 1" )} );
diff --git a/jstests/core/eval8.js b/jstests/core/eval8.js
new file mode 100644
index 00000000000..072a890e80a
--- /dev/null
+++ b/jstests/core/eval8.js
@@ -0,0 +1,19 @@
+
+t = db.eval8;
+t.drop();
+
+x = { a : 1 , b : 2 };
+t.save( x );
+x = t.findOne();
+
+assert( x.a && x.b , "A" );
+delete x.b;
+
+assert( x.a && ! x.b , "B" )
+x.b = 3;
+assert( x.a && x.b , "C" );
+assert.eq( 3 , x.b , "D" );
+
+t.save( x );
+y = t.findOne();
+assert.eq( tojson( x ) , tojson( y ) , "E" );
diff --git a/jstests/core/eval9.js b/jstests/core/eval9.js
new file mode 100644
index 00000000000..9c6642901e4
--- /dev/null
+++ b/jstests/core/eval9.js
@@ -0,0 +1,22 @@
+
+a = [ 1 , "asd" , null , [ 2 , 3 ] , new Date() , { x : 1 } ]
+
+for ( var i=0; i<a.length; i++ ){
+ var ret = db.eval( "function( a , i ){ return a[i]; }" , a , i );
+ assert.eq( typeof( a[i] ) , typeof( ret ) , "type test" );
+ assert.eq( a[i] , ret , "val test: " + typeof( a[i] ) );
+}
+
+db.eval9.drop();
+db.eval9.save( { a : 17 } );
+
+assert.eq( 1 , db.eval( "return db.eval9.find().toArray()" ).length , "A" );
+assert.eq( 17 , db.eval( "return db.eval9.find().toArray()" )[0].a , "B" );
+
+// just to make sure these things don't crash (but may throw an exception)
+try {
+ db.eval( "return db.eval9.find()" );
+ db.eval( "return db.eval9" );
+ db.eval( "return db" );
+ db.eval( "return print" );
+} catch (ex) { } \ No newline at end of file
diff --git a/jstests/core/eval_nolock.js b/jstests/core/eval_nolock.js
new file mode 100644
index 00000000000..2ab96a302a5
--- /dev/null
+++ b/jstests/core/eval_nolock.js
@@ -0,0 +1,16 @@
+
+t = db.eval_nolock
+t.drop();
+
+for ( i=0; i<10; i++ )
+ t.insert( { _id : i } );
+
+res = db.runCommand( { eval :
+ function(){
+ db.eval_nolock.insert( { _id : 123 } );
+ return db.eval_nolock.count();
+ }
+ , nolock : true } );
+
+assert.eq( 11 , res.retval , "A" )
+
diff --git a/jstests/core/evala.js b/jstests/core/evala.js
new file mode 100644
index 00000000000..ed72582fbb6
--- /dev/null
+++ b/jstests/core/evala.js
@@ -0,0 +1,9 @@
+
+t = db.evala;
+t.drop()
+
+t.save( { x : 5 } )
+
+assert.eq( 5 , db.eval( "function(){ return db.evala.findOne().x; }" ) , "A" );
+assert.eq( 5 , db.eval( "/* abc */function(){ return db.evala.findOne().x; }" ) , "B" );
+
diff --git a/jstests/core/evalb.js b/jstests/core/evalb.js
new file mode 100644
index 00000000000..0caae39498b
--- /dev/null
+++ b/jstests/core/evalb.js
@@ -0,0 +1,40 @@
+// Check the return value of a db.eval function running a database query, and ensure the function's
+// contents are logged in the profile log.
+
+// Use a reserved database name to avoid a conflict in the parallel test suite.
+var stddb = db;
+var db = db.getSisterDB( 'evalb' );
+
+function profileCursor() {
+ return db.system.profile.find( { user:username + "@" + db.getName() } );
+}
+
+function lastOp() {
+ return profileCursor().sort( { $natural:-1 } ).next();
+}
+
+try {
+
+ username = 'jstests_evalb_user';
+ db.createUser({user: username, pwd: 'password', roles: jsTest.basicUserRoles});
+ db.auth( username, 'password' );
+
+ t = db.evalb;
+ t.drop();
+
+ t.save( { x:3 } );
+
+ assert.eq( 3, db.eval( function() { return db.evalb.findOne().x; } ), 'A' );
+
+ db.setProfilingLevel( 2 );
+
+ assert.eq( 3, db.eval( function() { return db.evalb.findOne().x; } ), 'B' );
+
+ o = lastOp();
+ assert( tojson( o ).indexOf( 'findOne().x' ) > 0, 'C : ' + tojson( o ) );
+}
+finally {
+
+ db.setProfilingLevel(0);
+ db = stddb;
+}
diff --git a/jstests/core/evalc.js b/jstests/core/evalc.js
new file mode 100644
index 00000000000..0320ecd5133
--- /dev/null
+++ b/jstests/core/evalc.js
@@ -0,0 +1,25 @@
+t = db.jstests_evalc;
+t.drop();
+
+t2 = db.evalc_done
+t2.drop()
+
+for( i = 0; i < 10; ++i ) {
+ t.save( {i:i} );
+}
+
+// SERVER-1610
+
+assert.eq( 0 , t2.count() , "X1" )
+
+s = startParallelShell( "print( 'starting forked:' + Date() ); for ( i=0; i<50000; i++ ){ db.currentOp(); } print( 'ending forked:' + Date() ); db.evalc_done.insert( { x : 1 } ); " )
+
+print( "starting eval: " + Date() )
+while ( true ) {
+ db.eval( "db.jstests_evalc.count( {i:10} );" );
+ if ( t2.count() > 0 )
+ break;
+}
+print( "end eval: " + Date() )
+
+s();
diff --git a/jstests/core/evald.js b/jstests/core/evald.js
new file mode 100644
index 00000000000..7bb0eb825b1
--- /dev/null
+++ b/jstests/core/evald.js
@@ -0,0 +1,97 @@
+t = db.jstests_evald;
+t.drop();
+
+function debug( x ) {
+// printjson( x );
+}
+
+for( i = 0; i < 10; ++i ) {
+ t.save( {i:i} );
+}
+
+function op( ev, where ) {
+ p = db.currentOp().inprog;
+ debug( p );
+ for ( var i in p ) {
+ var o = p[ i ];
+ if ( where ) {
+ if ( o.active && o.query && o.query.query && o.query.query.$where && o.ns == "test.jstests_evald" ) {
+ return o.opid;
+ }
+ } else {
+ if ( o.active && o.query && o.query.$eval && o.query.$eval == ev ) {
+ return o.opid;
+ }
+ }
+ }
+ return -1;
+}
+
+function doIt( ev, wait, where ) {
+
+ if ( where ) {
+ s = startParallelShell( ev );
+ } else {
+ s = startParallelShell( "db.eval( '" + ev + "' )" );
+ }
+
+ o = null;
+ assert.soon( function() { o = op( ev, where ); return o != -1 } );
+
+ if ( wait ) {
+ sleep( 2000 );
+ }
+
+ debug( "going to kill" );
+
+ db.killOp( o );
+
+ debug( "sent kill" );
+
+ s();
+
+}
+
+// nested scope with nested invoke()
+doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", true, true);
+doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", false, true);
+
+// simple tight loop tests with callback
+doIt("while(1) { sleep(1); }", false);
+doIt("while(1) { sleep(1); }", true);
+
+// simple tight loop tests without callback
+doIt("while(1) {;}", false);
+doIt("while(1) {;}", true);
+
+// the for loops are currently required, as a spawned op masks the parent op - see SERVER-1931
+doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", true);
+doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", false);
+doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", true);
+doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", false);
+
+// try/catch with tight-loop kill tests. Catch testing is important
+// due to v8::TerminateExecution internals.
+// native callback with nested invoke(), drop JS exceptions
+doIt("while(1) { " +
+ " for(var i = 0; i < 10000; ++i) {;} " +
+ " try { " +
+ " db.jstests_evald.count({i:10}); " +
+ " } catch (e) {} " +
+ "}", true );
+
+// native callback, drop JS exceptions
+doIt("while(1) { " +
+ " try { " +
+ " while(1) { " +
+ " sleep(1); " +
+ " } " +
+ " } catch (e) {} " +
+ "}", true );
+
+// no native callback and drop JS exceptions
+doIt("while(1) { " +
+ " try { " +
+ " while(1) {;} " +
+ " } catch (e) {} " +
+ "}", true );
diff --git a/jstests/core/evale.js b/jstests/core/evale.js
new file mode 100644
index 00000000000..af5a303f167
--- /dev/null
+++ b/jstests/core/evale.js
@@ -0,0 +1,5 @@
+t = db.jstests_evale;
+t.drop();
+
+db.eval( function() { return db.jstests_evale.count( { $where:function() { return true; } } ) } );
+db.eval( "db.jstests_evale.count( { $where:function() { return true; } } )" ); \ No newline at end of file
diff --git a/jstests/core/evalf.js b/jstests/core/evalf.js
new file mode 100644
index 00000000000..01b7907ba93
--- /dev/null
+++ b/jstests/core/evalf.js
@@ -0,0 +1,27 @@
+// test that killing a parent op interrupts the child op
+
+t = db.jstests_evalf;
+t.drop();
+
+//if ( typeof _threadInject == "undefined" ) { // don't run in v8 mode - SERVER-1900
+
+// the code in eval must be under 512 chars because otherwise it's not displayed in curOp()
+try {
+db.eval( function() {
+ opid = null;
+ while( opid == null ) {
+ ops = db.currentOp().inprog;
+ for( i in ops ) {
+ o = ops[ i ];
+ if ( o.active && o.query && o.query.$eval ) { opid = o.opid; }
+ }}
+ db.jstests_evalf.save( {"opid":opid} );
+ db.jstests_evalf.count( { $where:function() { var id = db.jstests_evalf.findOne().opid; db.killOp( id ); while( 1 ) { ; } } } );
+ } );
+} catch (ex) {
+ // exception is thrown in V8 when job gets killed. Does not seem like bad behavior.
+}
+
+// make sure server and JS still work
+db.eval( function() { db.jstests_evalf.count(); });
+//}
diff --git a/jstests/core/exists.js b/jstests/core/exists.js
new file mode 100644
index 00000000000..3f1e904e52f
--- /dev/null
+++ b/jstests/core/exists.js
@@ -0,0 +1,49 @@
+t = db.jstests_exists;
+t.drop();
+
+t.save( {} );
+t.save( {a:1} );
+t.save( {a:{b:1}} );
+t.save( {a:{b:{c:1}}} );
+t.save( {a:{b:{c:{d:null}}}} );
+
+function dotest( n ){
+
+ assert.eq( 5, t.count() , n );
+ assert.eq( 1, t.count( {a:null} ) , n );
+ assert.eq( 2, t.count( {'a.b':null} ) , n );
+ assert.eq( 3, t.count( {'a.b.c':null} ) , n );
+ assert.eq( 5, t.count( {'a.b.c.d':null} ) , n );
+
+ assert.eq( 5, t.count() , n );
+ assert.eq( 4, t.count( {a:{$ne:null}} ) , n );
+ assert.eq( 3, t.count( {'a.b':{$ne:null}} ) , n );
+ assert.eq( 2, t.count( {'a.b.c':{$ne:null}} ) , n );
+ assert.eq( 0, t.count( {'a.b.c.d':{$ne:null}} ) , n );
+
+ assert.eq( 4, t.count( {a: {$exists:true}} ) , n );
+ assert.eq( 3, t.count( {'a.b': {$exists:true}} ) , n );
+ assert.eq( 2, t.count( {'a.b.c': {$exists:true}} ) , n );
+ assert.eq( 1, t.count( {'a.b.c.d': {$exists:true}} ) , n );
+
+ assert.eq( 1, t.count( {a: {$exists:false}} ) , n );
+ assert.eq( 2, t.count( {'a.b': {$exists:false}} ) , n );
+ assert.eq( 3, t.count( {'a.b.c': {$exists:false}} ) , n );
+ assert.eq( 4, t.count( {'a.b.c.d': {$exists:false}} ) , n );
+}
+
+dotest( "before index" )
+t.ensureIndex( { "a" : 1 } )
+t.ensureIndex( { "a.b" : 1 } )
+t.ensureIndex( { "a.b.c" : 1 } )
+t.ensureIndex( { "a.b.c.d" : 1 } )
+dotest( "after index" )
+assert.eq( 1, t.find( {a: {$exists:false}} ).hint( {a:1} ).itcount() );
+
+t.drop();
+
+t.save( {r:[{s:1}]} );
+assert( t.findOne( {'r.s':{$exists:true}} ) );
+assert( !t.findOne( {'r.s':{$exists:false}} ) );
+assert( !t.findOne( {'r.t':{$exists:true}} ) );
+assert( t.findOne( {'r.t':{$exists:false}} ) );
diff --git a/jstests/core/exists2.js b/jstests/core/exists2.js
new file mode 100644
index 00000000000..e925c168f50
--- /dev/null
+++ b/jstests/core/exists2.js
@@ -0,0 +1,16 @@
+
+t = db.exists2;
+t.drop();
+
+t.save( { a : 1 , b : 1 } )
+t.save( { a : 1 , b : 1 , c : 1 } )
+
+assert.eq( 2 , t.find().itcount() , "A1" );
+assert.eq( 2 , t.find( { a : 1 , b : 1 } ).itcount() , "A2" );
+assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "A3" );
+assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "A4" );
+
+t.ensureIndex( { a : 1 , b : 1 , c : 1 } )
+assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "B1" );
+assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "B2" );
+
diff --git a/jstests/core/exists3.js b/jstests/core/exists3.js
new file mode 100644
index 00000000000..53a69d6c3bb
--- /dev/null
+++ b/jstests/core/exists3.js
@@ -0,0 +1,21 @@
+// Check exists with non empty document, based on SERVER-2470 example.
+
+t = db.jstests_exists3;
+t.drop();
+
+t.insert({a: 1, b: 2});
+
+assert.eq( 1, t.find({}).sort({c: -1}).itcount() );
+assert.eq( 1, t.count({c: {$exists: false}}) );
+assert.eq( 1, t.find({c: {$exists: false}}).itcount() );
+assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() );
+
+// now we have an index on the sort key
+t.ensureIndex({c: -1})
+
+assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() );
+assert.eq( 1, t.find({c: {$exists: false}}).itcount() );
+// still ok without the $exists
+assert.eq( 1, t.find({}).sort({c: -1}).itcount() );
+// and ok with a convoluted $not $exists
+assert.eq( 1, t.find({c: {$not: {$exists: true}}}).sort({c: -1}).itcount() );
diff --git a/jstests/core/exists4.js b/jstests/core/exists4.js
new file mode 100644
index 00000000000..fb801ed62e9
--- /dev/null
+++ b/jstests/core/exists4.js
@@ -0,0 +1,20 @@
+// Check various exists cases, based on SERVER-1735 example.
+
+t = db.jstests_exists4;
+t.drop();
+
+t.ensureIndex({date: -1, country_code: 1, user_id: 1}, {unique: 1, background: 1});
+t.insert({ date: new Date("08/27/2010"), tot_visit: 100});
+t.insert({ date: new Date("08/27/2010"), country_code: "IT", tot_visit: 77});
+t.insert({ date: new Date("08/27/2010"), country_code: "ES", tot_visit: 23});
+t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "and...@spacca.org", tot_visit: 11});
+t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@gmail.com", tot_visit: 5});
+t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@progloedizioni.com", tot_visit: 7});
+
+assert.eq( 6, t.find({date: new Date("08/27/2010")}).count() );
+assert.eq( 5, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}}).count() );
+assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: {$exists: false}}).count() );
+assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: null}).count() );
+assert.eq( 3, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: true}}).count() );
+assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: false}}).count() );
+assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: null}).count() );
diff --git a/jstests/core/exists5.js b/jstests/core/exists5.js
new file mode 100644
index 00000000000..a90a94f908f
--- /dev/null
+++ b/jstests/core/exists5.js
@@ -0,0 +1,33 @@
+// Test some $not/$exists cases.
+
+t = db.jstests_exists5;
+t.drop();
+
+t.save( {a:1} );
+assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
+assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) );
+assert.eq( 1, t.count( {'c.d':{$not:{$exists:true}}} ) );
+assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) );
+assert.eq( 0, t.count( {'c.d':{$not:{$exists:false}}} ) );
+
+t.drop();
+t.save( {a:{b:1}} );
+assert.eq( 1, t.count( {'a.b':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) );
+assert.eq( 0, t.count( {'a.b':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) );
+
+t.drop();
+t.save( {a:[1]} );
+assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
+assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) );
+assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) );
+
+t.drop();
+t.save( {a:[{b:1}]} );
+assert.eq( 1, t.count( {'a.b':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) );
+assert.eq( 0, t.count( {'a.b':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) );
diff --git a/jstests/core/exists6.js b/jstests/core/exists6.js
new file mode 100644
index 00000000000..2fa4ba85d49
--- /dev/null
+++ b/jstests/core/exists6.js
@@ -0,0 +1,79 @@
+// SERVER-393 Test indexed matching with $exists.
+
+t = db.jstests_exists6;
+t.drop();
+
+t.ensureIndex( {b:1} );
+t.save( {} );
+t.save( {b:1} );
+t.save( {b:null} );
+
+//---------------------------------
+
+function checkIndexUse( query, usesIndex, index, bounds ) {
+ var x = t.find( query ).explain()
+ if ( usesIndex ) {
+ assert.eq( x.cursor.indexOf(index), 0 , tojson(x) );
+ if ( ! x.indexBounds ) x.indexBounds = {}
+ assert.eq( bounds, x.indexBounds.b , tojson(x) );
+ }
+ else {
+ assert.eq( 'BasicCursor', x.cursor, tojson(x) );
+ }
+}
+
+function checkExists( query, usesIndex, bounds ) {
+ checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds );
+ // Whether we use an index or not, we will always scan all docs.
+ assert.eq( 3, t.find( query ).explain().nscanned );
+ // 2 docs will match.
+ assert.eq( 2, t.find( query ).itcount() );
+}
+
+function checkMissing( query, usesIndex, bounds ) {
+ checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds );
+ // Nscanned changes based on index usage.
+ if ( usesIndex ) assert.eq( 2, t.find( query ).explain().nscanned );
+ else assert.eq( 3, t.find( query ).explain().nscanned );
+ // 1 doc is missing 'b'.
+ assert.eq( 1, t.find( query ).itcount() );
+}
+
+function checkExistsCompound( query, usesIndex, bounds ) {
+ checkIndexUse( query, usesIndex, 'BtreeCursor', bounds );
+ if ( usesIndex ) assert.eq( 3, t.find( query ).explain().nscanned );
+ else assert.eq( 3, t.find( query ).explain().nscanned );
+ // 2 docs have a:1 and b:exists.
+ assert.eq( 2, t.find( query ).itcount() );
+}
+
+function checkMissingCompound( query, usesIndex, bounds ) {
+ checkIndexUse( query, usesIndex, 'BtreeCursor', bounds );
+ // two possible indexes to use
+ // 1 doc should match
+ assert.eq( 1, t.find( query ).itcount() );
+}
+
+//---------------------------------
+
+var allValues = [ [ { $minElement:1 }, { $maxElement:1 } ] ];
+var nullNull = [ [ null, null ] ];
+
+// Basic cases
+checkExists( {b:{$exists:true}}, true, allValues );
+// We change this to not -> not -> exists:true, and get allValue for bounds
+// but we use a BasicCursor?
+checkExists( {b:{$not:{$exists:false}}}, false, allValues );
+checkMissing( {b:{$exists:false}}, true, nullNull );
+checkMissing( {b:{$not:{$exists:true}}}, true, nullNull );
+
+// Now check existence of second compound field.
+t.ensureIndex( {a:1,b:1} );
+t.save( {a:1} );
+t.save( {a:1,b:1} );
+t.save( {a:1,b:null} );
+
+checkExistsCompound( {a:1,b:{$exists:true}}, true, allValues );
+checkExistsCompound( {a:1,b:{$not:{$exists:false}}}, true, allValues );
+checkMissingCompound( {a:1,b:{$exists:false}}, true, nullNull );
+checkMissingCompound( {a:1,b:{$not:{$exists:true}}}, true, nullNull );
diff --git a/jstests/core/exists7.js b/jstests/core/exists7.js
new file mode 100644
index 00000000000..91fd589f30d
--- /dev/null
+++ b/jstests/core/exists7.js
@@ -0,0 +1,21 @@
+
+// Test that non boolean value types are allowed with $explain spec. SERVER-2322
+
+t = db.jstests_explain7;
+t.drop();
+
+function testIntegerExistsSpec() {
+ t.remove({});
+ t.save( {} );
+ t.save( {a:1} );
+ t.save( {a:2} );
+ t.save( {a:3, b:3} );
+ t.save( {a:4, b:4} );
+
+ assert.eq( 2, t.count( {b:{$exists:1}} ) );
+ assert.eq( 3, t.count( {b:{$exists:0}} ) );
+}
+
+testIntegerExistsSpec();
+t.ensureIndex( {b:1} );
+testIntegerExistsSpec();
diff --git a/jstests/core/exists8.js b/jstests/core/exists8.js
new file mode 100644
index 00000000000..ca62ebeb9ab
--- /dev/null
+++ b/jstests/core/exists8.js
@@ -0,0 +1,76 @@
+// Test $exists with array element field names SERVER-2897
+
+t = db.jstests_exists8;
+t.drop();
+
+t.save( {a:[1]} );
+assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.1':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.1':{$exists:true}} ) );
+
+t.remove({});
+t.save( {a:[1,2]} );
+assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.1':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
+assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
+
+t.remove({});
+t.save( {a:[{}]} );
+assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.1':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.1':{$exists:true}} ) );
+
+t.remove({});
+t.save( {a:[{},{}]} );
+assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.1':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
+assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
+
+t.remove({});
+t.save( {a:[{'b':2},{'a':1}]} );
+assert.eq( 1, t.count( {'a.a':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.1.a':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.0.a':{$exists:false}} ) );
+
+t.remove({});
+t.save( {a:[[1]]} );
+assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.0.0':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.0.0':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.0.0.0':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.0.0.0':{$exists:false}} ) );
+
+t.remove({});
+t.save( {a:[[[1]]]} );
+assert.eq( 1, t.count( {'a.0.0.0':{$exists:true}} ) );
+
+t.remove({});
+t.save( {a:[[{b:1}]]} );
+assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
+assert.eq( 1, t.count( {'a.0.b':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.0.b':{$exists:false}} ) );
+
+t.remove({});
+t.save( {a:[[],[{b:1}]]} );
+assert.eq( 0, t.count( {'a.0.b':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.0.b':{$exists:false}} ) );
+
+t.remove({});
+t.save( {a:[[],[{b:1}]]} );
+assert.eq( 1, t.count( {'a.1.b':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.1.b':{$exists:false}} ) );
+
+t.remove({});
+t.save( {a:[[],[{b:1}]]} );
+assert.eq( 1, t.count( {'a.1.0.b':{$exists:true}} ) );
+assert.eq( 0, t.count( {'a.1.0.b':{$exists:false}} ) );
+
+t.remove({});
+t.save( {a:[[],[{b:1}]]} );
+assert.eq( 0, t.count( {'a.1.1.b':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.1.1.b':{$exists:false}} ) );
diff --git a/jstests/core/exists9.js b/jstests/core/exists9.js
new file mode 100644
index 00000000000..66378d1b424
--- /dev/null
+++ b/jstests/core/exists9.js
@@ -0,0 +1,41 @@
+// SERVER-393 Test exists with various empty array and empty object cases.
+
+t = db.jstests_exists9;
+t.drop();
+
+// Check existence of missing nested field.
+t.save( {a:{}} );
+assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
+assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
+
+// With index.
+t.ensureIndex( {'a.b':1} );
+assert.eq( 1, t.find( {'a.b':{$exists:false}} ).hint( {'a.b':1} ).itcount() );
+assert.eq( 0, t.find( {'a.b':{$exists:true}} ).hint( {'a.b':1} ).itcount() );
+
+t.drop();
+
+// Check that an empty array 'exists'.
+t.save( {} );
+t.save( {a:[]} );
+assert.eq( 1, t.count( {a:{$exists:true}} ) );
+assert.eq( 1, t.count( {a:{$exists:false}} ) );
+
+// With index.
+t.ensureIndex( {a:1} );
+assert.eq( 1, t.find( {a:{$exists:true}} ).hint( {a:1} ).itcount() );
+assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).itcount() );
+assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).explain().nscanned );
+
+t.drop();
+
+// Check that an indexed field within an empty array does not exist.
+t.save( {a:{'0':1}} );
+t.save( {a:[]} );
+assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
+assert.eq( 1, t.count( {'a.0':{$exists:false}} ) );
+
+// With index.
+t.ensureIndex( {'a.0':1} );
+assert.eq( 1, t.find( {'a.0':{$exists:true}} ).hint( {'a.0':1} ).itcount() );
+assert.eq( 1, t.find( {'a.0':{$exists:false}} ).hint( {'a.0':1} ).itcount() );
diff --git a/jstests/core/existsa.js b/jstests/core/existsa.js
new file mode 100644
index 00000000000..9ef7e9f374c
--- /dev/null
+++ b/jstests/core/existsa.js
@@ -0,0 +1,114 @@
+// Sparse indexes are disallowed for $exists:false queries. SERVER-3918
+
+t = db.jstests_existsa;
+t.drop();
+
+t.save( {} );
+t.save( { a:1 } );
+t.save( { a:{ x:1 }, b:1 } );
+
+/** Configure testing of an index { <indexKeyField>:1 }. */
+function setIndex( _indexKeyField ) {
+ indexKeyField = _indexKeyField;
+ indexKeySpec = {};
+ indexKeySpec[ indexKeyField ] = 1;
+ t.ensureIndex( indexKeySpec, { sparse:true } );
+ indexCursorName = 'BtreeCursor ' + indexKeyField + '_1';
+}
+setIndex( 'a' );
+
+/** Validate the prefix of 'str'. */
+function assertPrefix( prefix, str ) {
+ assert.eq( prefix, str.substring( 0, prefix.length ) );
+}
+
+/** @return count when hinting the index to use. */
+function hintedCount( query ) {
+ assertPrefix( indexCursorName, t.find( query ).hint( indexKeySpec ).explain().cursor );
+ return t.find( query ).hint( indexKeySpec ).itcount();
+}
+
+/** The query field does not exist and the sparse index is not used without a hint. */
+function assertMissing( query, expectedMissing, expectedIndexedMissing ) {
+ expectedMissing = expectedMissing || 1;
+ expectedIndexedMissing = expectedIndexedMissing || 0;
+ assert.eq( expectedMissing, t.count( query ) );
+ assert.eq( 'BasicCursor', t.find( query ).explain().cursor );
+ // We also shouldn't get a different count depending on whether
+ // an index is used or not.
+ assert.eq( expectedIndexedMissing, hintedCount( query ) );
+}
+
+/** The query field exists and the sparse index is used without a hint. */
+function assertExists( query, expectedExists ) {
+ expectedExists = expectedExists || 2;
+ assert.eq( expectedExists, t.count( query ) );
+ assert.eq( 0, t.find( query ).explain().cursor.indexOf('BtreeCursor') );
+ // An $exists:true predicate generates no index filters. Add another predicate on the index key
+ // to trigger use of the index.
+ andClause = {}
+ andClause[ indexKeyField ] = { $ne:null };
+ Object.extend( query, { $and:[ andClause ] } );
+ assert.eq( expectedExists, t.count( query ) );
+ assertPrefix( indexCursorName, t.find( query ).explain().cursor );
+ assert.eq( expectedExists, hintedCount( query ) );
+}
+
+/** The query field exists and the sparse index is not used without a hint. */
+function assertExistsUnindexed( query, expectedExists ) {
+ expectedExists = expectedExists || 2;
+ assert.eq( expectedExists, t.count( query ) );
+ assert.eq( 'BasicCursor', t.find( query ).explain().cursor );
+ // Even with another predicate on the index key, the sparse index is disallowed.
+ andClause = {}
+ andClause[ indexKeyField ] = { $ne:null };
+ Object.extend( query, { $and:[ andClause ] } );
+ assert.eq( expectedExists, t.count( query ) );
+ assert.eq( 'BasicCursor', t.find( query ).explain().cursor );
+ assert.eq( expectedExists, hintedCount( query ) );
+}
+
+// $exists:false queries match the proper number of documents and disallow the sparse index.
+assertMissing( { a:{ $exists:false } } );
+assertMissing( { a:{ $not:{ $exists:true } } } );
+assertMissing( { $and:[ { a:{ $exists:false } } ] } );
+assertMissing( { $or:[ { a:{ $exists:false } } ] } );
+assertMissing( { $nor:[ { a:{ $exists:true } } ] } );
+assertMissing( { 'a.x':{ $exists:false } }, 2, 1 );
+
+// Currently a sparse index is disallowed even if the $exists:false query is on a different field.
+assertMissing( { b:{ $exists:false } }, 2, 1 );
+assertMissing( { b:{ $exists:false }, a:{ $ne:6 } }, 2, 1 );
+assertMissing( { b:{ $not:{ $exists:true } } }, 2, 1 );
+
+// Top level $exists:true queries match the proper number of documents
+// and use the sparse index on { a : 1 }.
+assertExists( { a:{ $exists:true } } );
+
+// Nested $exists queries match the proper number of documents and disallow the sparse index.
+assertExistsUnindexed( { $nor:[ { a:{ $exists:false } } ] } );
+assertExistsUnindexed( { $nor:[ { 'a.x':{ $exists:false } } ] }, 1 );
+assertExistsUnindexed( { a:{ $not:{ $exists:false } } } );
+
+// Nested $exists queries disallow the sparse index in some cases where it is not strictly
+// necessary to do so. (Descriptive tests.)
+assertExistsUnindexed( { $nor:[ { b:{ $exists:false } } ] }, 1 ); // Unindexed field.
+assertExists( { $or:[ { a:{ $exists:true } } ] } ); // $exists:true not $exists:false.
+
+// Behavior is similar with $elemMatch.
+t.drop();
+t.save( { a:[ {} ] } );
+t.save( { a:[ { b:1 } ] } );
+t.save( { a:[ { b:1 } ] } );
+setIndex( 'a.b' );
+
+assertMissing( { a:{ $elemMatch:{ b:{ $exists:false } } } } );
+// A $elemMatch predicate is treated as nested, and the index should be used for $exists:true.
+assertExists( { a:{ $elemMatch:{ b:{ $exists:true } } } } );
+
+// A non sparse index will not be disallowed.
+t.drop();
+t.save( {} );
+t.ensureIndex( { a:1 } );
+assert.eq( 1, t.find( { a:{ $exists:false } } ).itcount() );
+assert.eq( 'BtreeCursor a_1', t.find( { a:{ $exists:false } } ).explain().cursor );
diff --git a/jstests/core/existsb.js b/jstests/core/existsb.js
new file mode 100644
index 00000000000..a212be145c0
--- /dev/null
+++ b/jstests/core/existsb.js
@@ -0,0 +1,76 @@
+// Tests for $exists against documents that store a null value
+//
+// A document with a missing value for an indexed field
+// is indexed *as if* it had the value 'null' explicitly.
+// Therefore:
+// { b : 1 }
+// { a : null, b : 1 }
+// look identical based on a standard index on { a : 1 }.
+//
+// -- HOWEVER!! --
+// A sparse index on { a : 1 } would include { a : null, b : 1 },
+// but would not include { b : 1 }. In this case, the two documents
+// are treated equally.
+//
+// Also, super special edge case around sparse, compound indexes
+// from Mathias:
+// If we have a sparse index on { a : 1, b : 1 }
+// And we insert docs {}, { a : 1 },
+// { b : 1 }, and { a : 1, b : 1 }
+// everything but {} will have an index entry.
+// Let's make sure we handle this properly!
+
+t = db.jstests_existsb;
+t.drop();
+
+t.save( {} );
+t.save( { a: 1 } );
+t.save( { b: 1 } );
+t.save( { a: 1, b: null } );
+t.save( { a: 1, b: 1 } );
+
+/** run a series of checks, just on the number of docs found */
+function checkExistsNull() {
+ // Basic cases
+ assert.eq( 3, t.count({ a:{ $exists: true }}) );
+ assert.eq( 2, t.count({ a:{ $exists: false }}) );
+ assert.eq( 3, t.count({ b:{ $exists: true }}) );
+ assert.eq( 2, t.count({ b:{ $exists: false }}) );
+ // With negations
+ assert.eq( 3, t.count({ a:{ $not:{ $exists: false }}}) );
+ assert.eq( 2, t.count({ a:{ $not:{ $exists: true }}}) );
+ assert.eq( 3, t.count({ b:{ $not:{ $exists: false }}}) );
+ assert.eq( 2, t.count({ b:{ $not:{ $exists: true }}}) );
+ // Both fields
+ assert.eq( 2, t.count({ a:1, b: { $exists: true }}) );
+ assert.eq( 1, t.count({ a:1, b: { $exists: false }}) );
+ assert.eq( 1, t.count({ a:{ $exists: true }, b:1}) );
+ assert.eq( 1, t.count({ a:{ $exists: false }, b:1}) );
+ // Both fields, both $exists
+ assert.eq( 2, t.count({ a:{ $exists: true }, b:{ $exists: true }}) );
+ assert.eq( 1, t.count({ a:{ $exists: true }, b:{ $exists: false }}) );
+ assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: true }}) );
+ assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: false }}) );
+}
+
+// with no index, make sure we get correct results
+checkExistsNull();
+
+// try with a standard index
+t.ensureIndex({ a : 1 });
+checkExistsNull();
+
+// try with a sparse index
+t.dropIndexes();
+t.ensureIndex({ a : 1 }, { sparse:true });
+checkExistsNull();
+
+// try with a compound index
+t.dropIndexes();
+t.ensureIndex({ a : 1, b : 1 });
+checkExistsNull();
+
+// try with sparse compound index
+t.dropIndexes();
+t.ensureIndex({ a : 1, b : 1 }, { sparse:true });
+checkExistsNull();
diff --git a/jstests/core/explain1.js b/jstests/core/explain1.js
new file mode 100644
index 00000000000..4c92b102e38
--- /dev/null
+++ b/jstests/core/explain1.js
@@ -0,0 +1,48 @@
+
+t = db.explain1;
+t.drop();
+
+for ( var i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+q = { x : { $gt : 50 } };
+
+assert.eq( 49 , t.find( q ).count() , "A" );
+assert.eq( 49 , t.find( q ).itcount() , "B" );
+assert.eq( 20 , t.find( q ).limit(20).itcount() , "C" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 49 , t.find( q ).count() , "D" );
+assert.eq( 49 , t.find( q ).itcount() , "E" );
+assert.eq( 20 , t.find( q ).limit(20).itcount() , "F" );
+
+assert.eq( 49 , t.find(q).explain().n , "G" );
+assert.eq( 20 , t.find(q).limit(20).explain().n , "H" );
+assert.eq( 20 , t.find(q).limit(-20).explain().n , "I" );
+assert.eq( 49 , t.find(q).batchSize(20).explain().n , "J" );
+
+// verbose explain output with stats
+// display index bounds
+
+var explainGt = t.find({x: {$gt: 5}}).explain(true);
+var boundsVerboseGt = explainGt.stats.children[0].boundsVerbose;
+
+print('explain stats for $gt = ' + tojson(explainGt.stats));
+
+var explainGte = t.find({x: {$gte: 5}}).explain(true);
+var boundsVerboseGte = explainGte.stats.children[0].boundsVerbose;
+
+print('explain stats for $gte = ' + tojson(explainGte.stats));
+
+print('index bounds for $gt = ' + tojson(explainGt.indexBounds));
+print('index bounds for $gte = ' + tojson(explainGte.indexBounds));
+
+print('verbose bounds for $gt = ' + tojson(boundsVerboseGt));
+print('verbose bounds for $gte = ' + tojson(boundsVerboseGte));
+
+// Since the verbose bounds are opaque, all we try to confirm is that the
+// verbose bounds for $gt is different from those generated for $gte.
+assert.neq(boundsVerboseGt, boundsVerboseGte,
+ 'verbose bounds for $gt and $gte should not be the same');
diff --git a/jstests/core/explain2.js b/jstests/core/explain2.js
new file mode 100644
index 00000000000..b70ffdc0b1e
--- /dev/null
+++ b/jstests/core/explain2.js
@@ -0,0 +1,27 @@
+
+t = db.explain2
+t.drop();
+
+t.ensureIndex( { a : 1 , b : 1 } );
+
+for ( i=1; i<10; i++ ){
+ t.insert( { _id : i , a : i , b : i , c : i } );
+}
+
+function go( q , c , b , o ){
+ var e = t.find( q ).hint( {a:1,b:1} ).explain();
+ assert.eq( c , e.n , "count " + tojson( q ) )
+ assert.eq( b , e.nscanned , "nscanned " + tojson( q ) )
+ assert.eq( o , e.nscannedObjects , "nscannedObjects " + tojson( q ) )
+}
+
+q = { a : { $gt : 3 } }
+go( q , 6 , 6 , 6 );
+
+q.b = 5
+go( q , 1 , 6 , 1 );
+
+delete q.b
+q.c = 5
+go( q , 1 , 6 , 6 );
+
diff --git a/jstests/core/explain3.js b/jstests/core/explain3.js
new file mode 100644
index 00000000000..c205e57252c
--- /dev/null
+++ b/jstests/core/explain3.js
@@ -0,0 +1,23 @@
+/** SERVER-2451 Kill cursor while explain is yielding */
+
+t = db.jstests_explain3;
+t.drop();
+
+t.ensureIndex( {i:1} );
+for( var i = 0; i < 10000; ++i ) {
+ t.save( {i:i,j:0} );
+}
+
+s = startParallelShell( "sleep( 20 ); db.jstests_explain3.dropIndex( {i:1} );" );
+
+try {
+ t.find( {i:{$gt:-1},j:1} ).hint( {i:1} ).explain()
+} catch (e) {
+ print( "got exception" );
+ printjson( e );
+}
+
+s();
+
+// Sanity check to make sure mongod didn't seg fault.
+assert.eq( 10000, t.count() );
diff --git a/jstests/core/explain4.js b/jstests/core/explain4.js
new file mode 100644
index 00000000000..d6d3d818a72
--- /dev/null
+++ b/jstests/core/explain4.js
@@ -0,0 +1,68 @@
+// Basic validation of explain output fields.
+
+t = db.jstests_explain4;
+t.drop();
+
+function checkField( explain, name, value ) {
+ assert( explain.hasOwnProperty( name ) );
+ if ( value != null ) {
+ assert.eq( value, explain[ name ], name );
+ // Check that the value is of the expected type. SERVER-5288
+ assert.eq( typeof( value ), typeof( explain[ name ] ), 'type ' + name );
+ }
+}
+
+function checkNonCursorPlanFields( explain, matches, n ) {
+ checkField( explain, "n", n );
+ checkField( explain, "nscannedObjects", matches );
+ checkField( explain, "nscanned", matches );
+}
+
+function checkPlanFields( explain, matches, n ) {
+ checkField( explain, "cursor", "BasicCursor" );
+ // index related fields do not appear in non-indexed plan
+ assert(!("indexBounds" in explain));
+ checkNonCursorPlanFields( explain, matches, n );
+}
+
+function checkFields( matches, sort, limit ) {
+ cursor = t.find();
+ if ( sort ) {
+ print("sort is {a:1}");
+ cursor.sort({a:1});
+ }
+ if ( limit ) {
+ print("limit = " + limit);
+ cursor.limit( limit );
+ }
+ explain = cursor.explain( true );
+ printjson( explain );
+ checkPlanFields( explain, matches, matches > 0 ? 1 : 0 );
+ checkField( explain, "scanAndOrder", sort );
+ checkField( explain, "millis" );
+ checkField( explain, "nYields" );
+ checkField( explain, "nChunkSkips", 0 );
+ checkField( explain, "isMultiKey", false );
+ checkField( explain, "indexOnly", false );
+ checkField( explain, "server" );
+ checkField( explain, "allPlans" );
+ explain.allPlans.forEach( function( x ) { checkPlanFields( x, matches, matches ); } );
+}
+
+checkFields( 0, false );
+
+// If there's nothing in the collection, there's no point in verifying that a sort
+// is done.
+// checkFields( 0, true );
+
+t.save( {} );
+checkFields( 1, false );
+checkFields( 1, true );
+
+t.save( {} );
+checkFields( 1, false, 1 );
+
+// Check basic fields with multiple clauses.
+t.save( { _id:0 } );
+explain = t.find( { $or:[ { _id:0 }, { _id:1 } ] } ).explain( true );
+checkNonCursorPlanFields( explain, 1, 1 );
diff --git a/jstests/core/explain5.js b/jstests/core/explain5.js
new file mode 100644
index 00000000000..a90f0726317
--- /dev/null
+++ b/jstests/core/explain5.js
@@ -0,0 +1,38 @@
+// Check that the explain result count does proper deduping.
+
+t = db.jstests_explain5;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+
+t.save( {a:[1,2,3],b:[4,5,6]} );
+for( i = 0; i < 10; ++i ) {
+ t.save( {} );
+}
+
+// Check with a single in order plan.
+
+explain = t.find( {a:{$gt:0}} ).explain( true );
+assert.eq( 1, explain.n );
+assert.eq( 1, explain.allPlans[ 0 ].n );
+
+// Check with a single out of order plan.
+
+explain = t.find( {a:{$gt:0}} ).sort( {z:1} ).hint( {a:1} ).explain( true );
+assert.eq( 1, explain.n );
+assert.eq( 1, explain.allPlans[ 0 ].n );
+
+// Check with multiple plans.
+
+explain = t.find( {a:{$gt:0},b:{$gt:0}} ).explain( true );
+assert.eq( 1, explain.n );
+assert.eq( 1, explain.allPlans[ 0 ].n );
+assert.eq( 1, explain.allPlans[ 1 ].n );
+
+explain = t.find( {$or:[{a:{$gt:0},b:{$gt:0}},{a:{$gt:-1},b:{$gt:-1}}]} ).explain( true );
+assert.eq( 1, explain.n );
+// Check 'n' for every alternative query plan.
+for (var i = 0; i < explain.allPlans.length; ++i) {
+ assert.eq( 1, explain.allPlans[i].n );
+}
diff --git a/jstests/core/explain6.js b/jstests/core/explain6.js
new file mode 100644
index 00000000000..47d8d2fd731
--- /dev/null
+++ b/jstests/core/explain6.js
@@ -0,0 +1,25 @@
+// Test explain result count when a skip parameter is used.
+
+t = db.jstests_explain6;
+t.drop();
+
+t.save( {} );
+explain = t.find().skip( 1 ).explain( true );
+assert.eq( 0, explain.n );
+// With only one plan, the skip information is known for the plan. This is an arbitrary
+// implementation detail, but it changes the way n is calculated.
+assert.eq( 0, explain.allPlans[ 0 ].n );
+
+t.ensureIndex( {a:1} );
+explain = t.find( {a:null,b:null} ).skip( 1 ).explain( true );
+assert.eq( 0, explain.n );
+
+printjson( explain );
+assert.eq( 0, explain.allPlans[ 0 ].n );
+
+t.dropIndexes();
+explain = t.find().skip( 1 ).sort({a:1}).explain( true );
+// Skip is applied for an in memory sort.
+assert.eq( 0, explain.n );
+printjson(explain);
+assert.eq( 0, explain.allPlans[ 0 ].n );
diff --git a/jstests/core/explain7.js b/jstests/core/explain7.js
new file mode 100644
index 00000000000..df277aaf211
--- /dev/null
+++ b/jstests/core/explain7.js
@@ -0,0 +1,181 @@
+// Test cases for explain()'s nscannedObjects. SERVER-4161
+
+t = db.jstests_explain7;
+t.drop();
+
+t.save( { a:1 } );
+t.ensureIndex( { a:1 } );
+
+function assertExplain( expected, explain, checkAllPlans ) {
+ for( field in expected ) {
+ assert.eq( expected[ field ], explain[ field ], field );
+ }
+ if ( checkAllPlans && explain.allPlans && explain.allPlans.length == 1 ) {
+ for( field in expected ) {
+ assert.eq( expected[ field ], explain.allPlans[ 0 ][ field ], field );
+ }
+ }
+ return explain;
+}
+
+function assertHintedExplain( expected, cursor ) {
+ return assertExplain( expected, cursor.hint( { a:1 } ).explain( true ), true );
+}
+
+function assertUnhintedExplain( expected, cursor, checkAllPlans ) {
+ return assertExplain( expected, cursor.explain( true ), checkAllPlans );
+}
+
+// Standard query.
+assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
+ t.find( { a:1 } ) );
+
+// Covered index query.
+assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 /* no object loaded */ },
+ t.find( { a:1 }, { _id:0, a:1 } ) );
+
+// Covered index query, but matching requires loading document.
+assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
+ t.find( { a:1, b:null }, { _id:0, a:1 } ) );
+
+// $returnKey query.
+assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 },
+ t.find( { a:1 } )._addSpecial( "$returnKey", true ) );
+
+// $returnKey query but matching requires loading document.
+assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
+ t.find( { a:1, b:null } )._addSpecial( "$returnKey", true ) );
+
+// Skip a result.
+assertHintedExplain( { n:0, nscanned:1, nscannedObjects:1 },
+ t.find( { a:1 } ).skip( 1 ) );
+
+// Cursor sorted covered index query.
+assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0, scanAndOrder:false },
+ t.find( { a:1 }, { _id:0, a:1 } ).sort( { a:1 } ) );
+
+t.dropIndex( { a:1 } );
+t.ensureIndex( { a:1, b:1 } );
+
+// In memory sort covered index query.
+assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1, scanAndOrder:true },
+ t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } )
+ .hint( { a:1, b:1 } ) );
+
+// In memory sort $returnKey query.
+assertUnhintedExplain( { n:1, nscanned:1, scanAndOrder:true },
+ t.find( { a:{ $gt:0 } } )._addSpecial( "$returnKey", true ).sort( { b:1 } )
+ .hint( { a:1, b:1 } ) );
+
+// In memory sort with skip.
+assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 /* The record is still loaded. */ },
+ t.find( { a:{ $gt:0 } } ).sort( { b:1 } ).skip( 1 ).hint( { a:1, b:1 } ),
+ false );
+
+// With a multikey index.
+t.drop();
+t.ensureIndex( { a:1 } );
+t.save( { a:[ 1, 2 ] } );
+
+assertHintedExplain( { n:1, scanAndOrder:false },
+ t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ) );
+assertHintedExplain( { n:1, scanAndOrder:true },
+ t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } ) );
+
+// Dedup matches from multiple query plans.
+t.drop();
+t.ensureIndex( { a:1, b:1 } );
+t.ensureIndex( { b:1, a:1 } );
+t.save( { a:1, b:1 } );
+
+// Document matched by three query plans.
+assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
+ t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ) );
+
+// Document matched by three query plans, with sorting.
+assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 },
+ t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).sort( { c:1 } ) );
+
+// Document matched by three query plans, with a skip.
+assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 },
+ t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).skip( 1 ) );
+
+// Hybrid ordered and unordered plans.
+
+t.drop();
+t.ensureIndex( { a:1, b:1 } );
+t.ensureIndex( { b:1 } );
+for( i = 0; i < 30; ++i ) {
+ t.save( { a:i, b:i } );
+}
+
+// Ordered plan chosen.
+assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:30,
+ scanAndOrder:false },
+ t.find( { b:{ $gte:0 } } ).sort( { a:1 } ) );
+
+// SERVER-12769: When an index is used to provide a sort, our covering
+// analysis isn't good. This could execute as a covered query, but currently
+// does not.
+/*
+// Ordered plan chosen with a covered index.
+//assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:0,
+ //scanAndOrder:false },
+ //t.find( { b:{ $gte:0 } }, { _id:0, b:1 } ).sort( { a:1 } ) );
+*/
+
+// Ordered plan chosen, with a skip. Skip is not included in counting nscannedObjects for a single
+// plan.
+assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:29, nscanned:30, nscannedObjects:30,
+ scanAndOrder:false },
+ t.find( { b:{ $gte:0 } } ).sort( { a:1 } ).skip( 1 ) );
+
+// Unordered plan chosen.
+assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1,
+ //nscannedObjects:1, nscannedObjectsAllPlans:2,
+ scanAndOrder:true },
+ t.find( { b:1 } ).sort( { a:1 } ) );
+
+// Unordered plan chosen and projected.
+assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1,
+ scanAndOrder:true },
+ t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ) );
+
+// Unordered plan chosen, with a skip.
+// Note that all plans are equally unproductive here, so we can't test which one is picked reliably.
+assertUnhintedExplain( { n:0 },
+ t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ).skip( 1 ) );
+
+// Unordered plan chosen, $returnKey specified.
+assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, scanAndOrder:true },
+ t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } )
+ ._addSpecial( "$returnKey", true ) );
+
+// Unordered plan chosen, $returnKey specified, matching requires loading document.
+assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1,
+ scanAndOrder:true },
+ t.find( { b:1, c:null }, { _id:0, b:1 } ).sort( { a:1 } )
+ ._addSpecial( "$returnKey", true ) );
+
+t.ensureIndex( { a:1, b:1, c:1 } );
+
+// Documents matched by four query plans.
+assertUnhintedExplain( { n:30, nscanned:30, nscannedObjects:30,
+ //nscannedObjectsAllPlans:90 // Not 120 because deduping occurs before
+ // loading results.
+ },
+ t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).sort( { b:1 } ) );
+
+for( i = 30; i < 150; ++i ) {
+ t.save( { a:i, b:i } );
+}
+
+explain = assertUnhintedExplain( { n:150},
+ t.find( { $or:[ { a:{ $gte:-1, $lte:200 },
+ b:{ $gte:0, $lte:201 } },
+ { a:{ $gte:0, $lte:201 },
+ b:{ $gte:-1, $lte:200 } } ] },
+ { _id:0, a:1, b:1 } ).hint( { a:1, b:1 } ) );
+printjson(explain);
+// Check nscannedObjects for each clause.
+assert.eq( 0, explain.clauses[ 0 ].nscannedObjects );
diff --git a/jstests/core/explain8.js b/jstests/core/explain8.js
new file mode 100644
index 00000000000..fde6adbd8f4
--- /dev/null
+++ b/jstests/core/explain8.js
@@ -0,0 +1,24 @@
+// Test calculation of the 'millis' field in explain output.
+
+t = db.jstests_explain8;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+for( i = 1000; i < 4000; i += 1000 ) {
+ t.save( { a:i } );
+}
+
+// Run a query with one $or clause per a-value, each of which sleeps for 'a' milliseconds.
+function slow() {
+ sleep( this.a );
+ return true;
+}
+clauses = [];
+for( i = 1000; i < 4000; i += 1000 ) {
+ clauses.push( { a:i, $where:slow } );
+}
+explain = t.find( { $or:clauses } ).explain( true );
+//printjson( explain );
+
+// Verify the duration of the whole query, and of each clause.
+assert.gt( explain.millis, 1000 - 500 + 2000 - 500 + 3000 - 500 );
diff --git a/jstests/core/explain9.js b/jstests/core/explain9.js
new file mode 100644
index 00000000000..80cab856aa7
--- /dev/null
+++ b/jstests/core/explain9.js
@@ -0,0 +1,24 @@
+// Test that limit is applied by explain when there are both in order and out of order candidate
+// plans. SERVER-4150
+
+t = db.jstests_explain9;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+
+for( i = 0; i < 10; ++i ) {
+ t.save( { a:i, b:0 } );
+}
+
+explain = t.find( { a:{ $gte:0 }, b:0 } ).sort( { a:1 } ).limit( 5 ).explain( true );
+// Five results are expected, matching the limit spec.
+assert.eq( 5, explain.n );
+explain.allPlans.forEach( function( x ) {
+ // Five results are expected for the in order plan.
+ if ( x.cursor == "BtreeCursor a_1" ) {
+ assert.eq( 5, x.n );
+ }
+ else {
+ assert.gte( 5, x.n );
+ }
+ } );
diff --git a/jstests/core/explain_batch_size.js b/jstests/core/explain_batch_size.js
new file mode 100644
index 00000000000..65bc1df40d7
--- /dev/null
+++ b/jstests/core/explain_batch_size.js
@@ -0,0 +1,19 @@
+// minimal test to check handling of batch size when explain info is requested
+// expected behavior is to return explain.n = total number of documents matching query
+// batch size is also tested in another smoke test jstest/explain1.js but that test
+// also covers the use of an indexed collection and includes a couple of test cases
+// using limit()
+
+t = db.explain_batch_size;
+t.drop();
+
+n = 3
+for (i=0; i<n; i++) {
+ t.save( { x : i } );
+}
+
+q = {};
+
+assert.eq( n , t.find( q ).count() , "A" );
+assert.eq( n , t.find( q ).itcount() , "B" );
+assert.eq( n , t.find( q ).batchSize(1).explain().n , "C" );
diff --git a/jstests/core/explaina.js b/jstests/core/explaina.js
new file mode 100644
index 00000000000..18c237b84c0
--- /dev/null
+++ b/jstests/core/explaina.js
@@ -0,0 +1,45 @@
+// Check explain results when an in order plan is selected among mixed in order and out of order
+// plans.
+
+t = db.jstests_explaina;
+t.drop();
+
+t.ensureIndex( { a:1 } );
+t.ensureIndex( { b:1 } );
+
+for( i = 0; i < 1000; ++i ) {
+ t.save( { a:i, b:i%3 } );
+}
+
+// Query with an initial set of documents.
+explain1 = t.find( { a:{ $gte:0 }, b:2 } ).sort( { a:1 } ).explain( true );
+
+for( i = 1000; i < 2000; ++i ) {
+ t.save( { a:i, b:i%3 } );
+}
+
+// Query with some additional documents.
+explain2 = t.find( { a:{ $gte:0 }, b:2 } ).sort( { a:1 } ).explain( true );
+
+function plan( explain, cursor ) {
+ for( i in explain.allPlans ) {
+ e = explain.allPlans[ i ];
+ if ( e.cursor == cursor ) {
+ return e;
+ }
+ }
+ assert( false );
+}
+
+// Check query totals.
+assert.eq( 333, explain1.n );
+assert.eq( 666, explain2.n );
+
+printjson(explain1);
+printjson(explain2);
+
+// Check totals for the selected in order a:1 plan.
+assert.eq( 333, plan( explain1, "BtreeCursor a_1" ).n );
+assert.eq( 1000, plan( explain1, "BtreeCursor a_1" ).nscanned );
+assert.eq( 666, plan( explain2, "BtreeCursor a_1" ).n );
+assert.eq( 2000, plan( explain2, "BtreeCursor a_1" ).nscanned );
diff --git a/jstests/core/explainb.js b/jstests/core/explainb.js
new file mode 100644
index 00000000000..ab49a38ca72
--- /dev/null
+++ b/jstests/core/explainb.js
@@ -0,0 +1,46 @@
+// nscanned and nscannedObjects report results for the winning plan; nscannedAllPlans and
+// nscannedObjectsAllPlans report results for all plans. SERVER-6268
+//
+// This file tests the output of .explain.
+
+t = db.jstests_explainb;
+t.drop();
+
+t.ensureIndex( { a:1, b:1 } );
+t.ensureIndex( { b:1, a:1 } );
+
+t.save( { a:0, b:1 } );
+t.save( { a:1, b:0 } );
+
+explain = t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).explain( true );
+
+// We don't check explain.cursor because all plans perform the same.
+assert.eq( 2, explain.n );
+// nscanned and nscannedObjects are reported.
+assert.eq( 2, explain.nscanned );
+assert.eq( 2, explain.nscannedObjects );
+
+// A limit of 2.
+explain = t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).limit( -2 ).explain( true );
+assert.eq( 2, explain.n );
+
+// A $or query.
+explain = t.find( { $or:[ { a:{ $gte:0 }, b:{ $gte:1 } },
+ { a:{ $gte:1 }, b:{ $gte:0 } } ] } ).explain( true );
+// One result from the first $or clause
+assert.eq( 1, explain.clauses[ 0 ].n );
+// But 2 total.
+assert.eq( 2, explain.n );
+
+// These are computed by summing the values for each clause.
+printjson(explain);
+assert.eq( 2, explain.n );
+
+// A non $or case where nscanned != number of results
+t.remove({});
+
+t.save( { a:'0', b:'1' } );
+t.save( { a:'1', b:'0' } );
+explain = t.find( { a:/0/, b:/1/ } ).explain( true );
+assert.eq( 1, explain.n );
+assert.eq( 2, explain.nscanned );
diff --git a/jstests/core/extent.js b/jstests/core/extent.js
new file mode 100644
index 00000000000..47ae868606a
--- /dev/null
+++ b/jstests/core/extent.js
@@ -0,0 +1,11 @@
+t = db.reclaimExtentsTest;
+t.drop();
+
+for ( var i=0; i<50; i++ ) { // enough iterations to break 32 bit.
+ db.createCollection('reclaimExtentsTest', { size : 100000000 });
+ t.insert({x:1});
+ assert( t.count() == 1 );
+ t.drop();
+}
+t.drop();
+
diff --git a/jstests/core/extent2.js b/jstests/core/extent2.js
new file mode 100644
index 00000000000..75bf0d0b1b8
--- /dev/null
+++ b/jstests/core/extent2.js
@@ -0,0 +1,34 @@
+
+
+mydb = db.getSisterDB( "test_extent2" );
+mydb.dropDatabase();
+
+t = mydb.foo;
+
+function insert(){
+ t.insert( { _id : 1 , x : 1 } )
+ t.insert( { _id : 2 , x : 1 } )
+ t.insert( { _id : 3 , x : 1 } )
+ t.ensureIndex( { x : 1 } );
+}
+
+insert();
+t.drop();
+
+start = mydb.stats();
+
+for ( i=0; i<100; i++ ) {
+ insert();
+ t.drop();
+}
+
+end = mydb.stats();
+
+printjson( start );
+printjson( end )
+assert.eq( start.extentFreeList.num, end.extentFreeList.num );
+
+// 3: 1 data, 1 _id idx, 1 x idx
+// used to be 4, but we no longer waste an extent for the freelist
+assert.eq( 3, start.extentFreeList.num );
+assert.eq( 3, end.extentFreeList.num );
diff --git a/jstests/core/filemd5.js b/jstests/core/filemd5.js
new file mode 100644
index 00000000000..41d03a1bb30
--- /dev/null
+++ b/jstests/core/filemd5.js
@@ -0,0 +1,11 @@
+
+db.fs.chunks.drop();
+db.fs.chunks.insert({files_id:1,n:0,data:new BinData(0,"test")})
+
+x = db.runCommand({"filemd5":1,"root":"fs"});
+assert( ! x.ok , tojson(x) )
+
+db.fs.chunks.ensureIndex({files_id:1,n:1})
+x = db.runCommand({"filemd5":1,"root":"fs"});
+assert( x.ok , tojson(x) )
+
diff --git a/jstests/core/find1.js b/jstests/core/find1.js
new file mode 100644
index 00000000000..ed79c3dd2c7
--- /dev/null
+++ b/jstests/core/find1.js
@@ -0,0 +1,54 @@
+t = db.find1;
+t.drop();
+
+lookAtDocumentMetrics = false;
+
+// QUERY MIGRATION
+// New system is still not connected to server status
+if ( db.serverStatus().metrics ) {
+ // var ss = db.serverStatus();
+ // lookAtDocumentMetrics = ss.metrics.document != null && ss.metrics.queryExecutor.scanned != null;
+}
+
+print( "lookAtDocumentMetrics: " + lookAtDocumentMetrics );
+
+if ( lookAtDocumentMetrics ) {
+ // ignore mongos
+ nscannedStart = db.serverStatus().metrics.queryExecutor.scanned
+}
+
+
+t.save( { a : 1 , b : "hi" } );
+t.save( { a : 2 , b : "hi" } );
+
+/* very basic test of $snapshot just that we get some result */
+// we are assumign here that snapshot uses the id index; maybe one day it doesn't if so this would need to change then
+assert( t.find({$query:{},$snapshot:1})[0].a == 1 , "$snapshot simple test 1" );
+var q = t.findOne();
+q.c = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz";
+t.save(q); // will move a:1 object to after a:2 in the file
+assert( t.find({$query:{},$snapshot:1})[0].a == 1 , "$snapshot simple test 2" );
+
+assert( t.findOne( { a : 1 } ).b != null , "A" );
+assert( t.findOne( { a : 1 } , { a : 1 } ).b == null , "B");
+
+assert( t.find( { a : 1 } )[0].b != null , "C" );
+assert( t.find( { a : 1 } , { a : 1 } )[0].b == null , "D" );
+assert( t.find( { a : 1 } , { a : 1 } ).sort( { a : 1 } )[0].b == null , "D" );
+
+id = t.findOne()._id;
+
+assert( t.findOne( id ) , "E" );
+assert( t.findOne( id ).a , "F" );
+assert( t.findOne( id ).b , "G" );
+
+assert( t.findOne( id , { a : 1 } ).a , "H" );
+assert( ! t.findOne( id , { a : 1 } ).b , "I" );
+
+assert(t.validate().valid,"not valid");
+
+if ( lookAtDocumentMetrics ) {
+ // ignore mongos
+ nscannedEnd = db.serverStatus().metrics.queryExecutor.scanned
+ assert.lte( nscannedStart + 16, nscannedEnd );
+}
diff --git a/jstests/core/find2.js b/jstests/core/find2.js
new file mode 100644
index 00000000000..f72203419bc
--- /dev/null
+++ b/jstests/core/find2.js
@@ -0,0 +1,16 @@
+// Test object id sorting.
+
+function testObjectIdFind( db ) {
+ r = db.ed_db_find2_oif;
+ r.drop();
+
+ for( i = 0; i < 3; ++i )
+ r.save( {} );
+
+ f = r.find().sort( { _id: 1 } );
+ assert.eq( 3, f.count() );
+ assert( f[ 0 ]._id < f[ 1 ]._id );
+ assert( f[ 1 ]._id < f[ 2 ]._id );
+}
+
+testObjectIdFind( db );
diff --git a/jstests/core/find3.js b/jstests/core/find3.js
new file mode 100644
index 00000000000..a5e4b7a4d66
--- /dev/null
+++ b/jstests/core/find3.js
@@ -0,0 +1,10 @@
+t = db.find3;
+t.drop();
+
+for ( i=1; i<=50; i++)
+ t.save( { a : i } );
+
+assert.eq( 50 , t.find().toArray().length );
+assert.eq( 20 , t.find().limit(20).toArray().length );
+
+assert(t.validate().valid);
diff --git a/jstests/core/find4.js b/jstests/core/find4.js
new file mode 100644
index 00000000000..17639d3a684
--- /dev/null
+++ b/jstests/core/find4.js
@@ -0,0 +1,26 @@
+
+t = db.find4;
+t.drop();
+
+t.save( { a : 1123 , b : 54332 } );
+
+o = t.find( {} , {} )[0];
+assert.eq( 1123 , o.a , "A" );
+assert.eq( 54332 , o.b , "B" );
+assert( o._id.str , "C" );
+
+o = t.find( {} , { a : 1 } )[0];
+assert.eq( 1123 , o.a , "D" );
+assert( o._id.str , "E" );
+assert( ! o.b , "F" );
+
+o = t.find( {} , { b : 1 } )[0];
+assert.eq( 54332 , o.b , "G" );
+assert( o._id.str , "H" );
+assert( ! o.a , "I" );
+
+t.drop();
+t.save( { a : 1 , b : 1 } );
+t.save( { a : 2 , b : 2 } );
+assert.eq( "1-1,2-2" , t.find().map( function(z){ return z.a + "-" + z.b } ).toString() );
+assert.eq( "1-undefined,2-undefined" , t.find( {} , { a : 1 }).map( function(z){ return z.a + "-" + z.b } ).toString() );
diff --git a/jstests/core/find5.js b/jstests/core/find5.js
new file mode 100644
index 00000000000..b4a2c0f8865
--- /dev/null
+++ b/jstests/core/find5.js
@@ -0,0 +1,51 @@
+
+t = db.find5;
+t.drop();
+
+t.save({a: 1});
+t.save({b: 5});
+
+assert.eq( 2 , t.find({}, {b:1}).count(), "A");
+
+function getIds( f ){
+ return t.find( {} , f ).map( function(z){ return z._id; } );
+}
+
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( {} ) ) , "B1 " );
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { a : 1 } ) ) , "B2 " );
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { b : 1 } ) ) , "B3 " );
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { c : 1 } ) ) , "B4 " );
+
+x = t.find( {} , { a : 1 } )[0];
+assert.eq( 1 , x.a , "C1" );
+assert.isnull( x.b , "C2" );
+
+x = t.find( {} , { a : 1 } )[1];
+assert.isnull( x.a , "C3" );
+assert.isnull( x.b , "C4" );
+
+x = t.find( {} , { b : 1 } )[0];
+assert.isnull( x.a , "C5" );
+assert.isnull( x.b , "C6" );
+
+x = t.find( {} , { b : 1 } )[1];
+assert.isnull( x.a , "C7" );
+assert.eq( 5 , x.b , "C8" );
+
+t.drop();
+
+
+t.save( { a : 1 , b : { c : 2 , d : 3 , e : 4 } } );
+assert.eq( 2 , t.find( {} , { "b.c" : 1 } ).toArray()[0].b.c , "D" );
+
+o = t.find( {} , { "b.c" : 1 , "b.d" : 1 } ).toArray()[0];
+assert( o.b.c , "E 1" );
+assert( o.b.d , "E 2" );
+assert( !o.b.e , "E 3" );
+
+assert( ! t.find( {} , { "b.c" : 1 } ).toArray()[0].b.d , "F" );
+
+t.drop();
+t.save( { a : { b : { c : 1 } } } )
+assert.eq( 1 , t.find( {} , { "a.b.c" : 1 } )[0].a.b.c , "G" );
+
diff --git a/jstests/core/find6.js b/jstests/core/find6.js
new file mode 100644
index 00000000000..c4efd3b88d3
--- /dev/null
+++ b/jstests/core/find6.js
@@ -0,0 +1,41 @@
+
+t = db.find6;
+t.drop();
+
+t.save( { a : 1 } )
+t.save( { a : 1 , b : 1 } )
+
+assert.eq( 2 , t.find().count() , "A" );
+assert.eq( 1 , t.find( { b : null } ).count() , "B" );
+assert.eq( 1 , t.find( "function() { return this.b == null; }" ).itcount() , "C" );
+assert.eq( 1 , t.find( "function() { return this.b == null; }" ).count() , "D" );
+
+/* test some stuff with dot array notation */
+q = db.find6a;
+q.drop();
+q.insert( { "a" : [ { "0" : 1 } ] } );
+q.insert( { "a" : [ { "0" : 2 } ] } );
+q.insert( { "a" : [ 1 ] } );
+q.insert( { "a" : [ 9, 1 ] } );
+
+function f() {
+
+ assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da1");
+ assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da2");
+
+ assert.eq( 1, q.find( { 'a.0' : { $gt : 8 } } ).count(), "da3");
+ assert.eq( 0, q.find( { 'a.0' : { $lt : 0 } } ).count(), "da4");
+
+}
+
+for( var pass = 0; pass <= 1 ; pass++ ) {
+ f();
+ q.ensureIndex({a:1});
+}
+
+t = db.multidim;
+t.drop();
+t.insert({"a" : [ [ ], 1, [ 3, 4 ] ] });
+assert.eq(1, t.find({"a.2":[3,4]}).count(), "md1");
+assert.eq(1, t.find({"a.2.1":4}).count(), "md2");
+assert.eq(0, t.find({"a.2.1":3}).count(), "md3");
diff --git a/jstests/core/find7.js b/jstests/core/find7.js
new file mode 100644
index 00000000000..ca4c7d449bf
--- /dev/null
+++ b/jstests/core/find7.js
@@ -0,0 +1,8 @@
+t = db.find7;
+t.drop();
+
+x = { "_id" : { "d" : 3649, "w" : "signed" }, "u" : { "3649" : 5 } };
+t.insert(x );
+assert.eq( x , t.findOne() , "A1" );
+assert.eq( x , t.findOne( { _id : x._id } ) , "A2" );
+
diff --git a/jstests/core/find8.js b/jstests/core/find8.js
new file mode 100644
index 00000000000..60f66a500e3
--- /dev/null
+++ b/jstests/core/find8.js
@@ -0,0 +1,27 @@
+// SERVER-1932 Test unindexed matching of a range that is only valid in a multikey context.
+
+t = db.jstests_find8;
+t.drop();
+
+t.save( {a:[1,10]} );
+assert.eq( 1, t.count( { a: { $gt:2,$lt:5} } ) );
+
+// Check that we can do a query with 'invalid' range.
+assert.eq( 1, t.count( { a: { $gt:5,$lt:2} } ) );
+
+t.save( {a:[-1,12]} );
+
+// Check that we can do a query with 'invalid' range and sort.
+assert.eq( 2, t.find( { a: { $gt:5,$lt:2} } ).sort( {a:1} ).itcount() );
+assert.eq( 2, t.find( { a: { $gt:5,$lt:2} } ).sort( {$natural:-1} ).itcount() );
+
+// SERVER-2864
+if( 0 ) {
+t.find( { a: { $gt:5,$lt:2} } ).itcount();
+// Check that we can record a plan for an 'invalid' range.
+assert( t.find( { a: { $gt:5,$lt:2} } ).explain( true ).oldPlan );
+}
+
+t.ensureIndex( {b:1} );
+// Check that if we do a table scan of an 'invalid' range in an or clause we don't check subsequent clauses.
+assert.eq( "BasicCursor", t.find( { $or:[{ a: { $gt:5,$lt:2} }, {b:1}] } ).explain().cursor );
diff --git a/jstests/core/find9.js b/jstests/core/find9.js
new file mode 100644
index 00000000000..85adf93cc98
--- /dev/null
+++ b/jstests/core/find9.js
@@ -0,0 +1,28 @@
+// Test that the MaxBytesToReturnToClientAtOnce limit is enforced.
+
+t = db.jstests_find9;
+t.drop();
+
+big = new Array( 500000 ).toString();
+for( i = 0; i < 20; ++i ) {
+ t.save( { a:i, b:big } );
+}
+
+// Check size limit with a simple query.
+assert.eq( 20, t.find( {}, { a:1 } ).objsLeftInBatch() ); // Projection resizes the result set.
+assert.gt( 20, t.find().objsLeftInBatch() );
+
+// Check size limit on a query with an explicit batch size.
+assert.eq( 20, t.find( {}, { a:1 } ).batchSize( 30 ).objsLeftInBatch() );
+assert.gt( 20, t.find().batchSize( 30 ).objsLeftInBatch() );
+
+for( i = 0; i < 20; ++i ) {
+ t.save( { a:i, b:big } );
+}
+
+// Check size limit with get more.
+c = t.find().batchSize( 30 );
+while( c.hasNext() ) {
+ assert.gt( 20, c.objsLeftInBatch() );
+ c.next();
+}
diff --git a/jstests/core/find_and_modify.js b/jstests/core/find_and_modify.js
new file mode 100644
index 00000000000..a80859ab60c
--- /dev/null
+++ b/jstests/core/find_and_modify.js
@@ -0,0 +1,38 @@
+t = db.find_and_modify;
+t.drop();
+
+// fill db
+for(var i=1; i<=10; i++) {
+ t.insert({priority:i, inprogress:false, value:0});
+}
+
+// returns old
+out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}});
+assert.eq(out.value, 0);
+assert.eq(out.inprogress, false);
+t.update({_id: out._id}, {$set: {inprogress: false}});
+
+// returns new
+out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}, 'new': true});
+assert.eq(out.value, 2);
+assert.eq(out.inprogress, true);
+t.update({_id: out._id}, {$set: {inprogress: false}});
+
+// update highest priority
+out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
+assert.eq(out.priority, 10);
+// update next highest priority
+out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
+assert.eq(out.priority, 9);
+
+// remove lowest priority
+out = t.findAndModify({sort:{priority:1}, remove:true});
+assert.eq(out.priority, 1);
+
+// remove next lowest priority
+out = t.findAndModify({sort:{priority:1}, remove:1});
+assert.eq(out.priority, 2);
+
+// return null (was {} before 1.5.4) if no matches (drivers may handle this differently)
+out = t.findAndModify({query:{no_such_field:1}, remove:1});
+assert.eq(out, null);
diff --git a/jstests/core/find_and_modify2.js b/jstests/core/find_and_modify2.js
new file mode 100644
index 00000000000..2c8ab5b3bb6
--- /dev/null
+++ b/jstests/core/find_and_modify2.js
@@ -0,0 +1,16 @@
+t = db.find_and_modify2;
+t.drop();
+
+t.insert({_id:1, i:0, j:0});
+
+out = t.findAndModify({update: {$inc: {i:1}}, 'new': true, fields: {i:1}});
+assert.eq(out, {_id:1, i:1});
+
+out = t.findAndModify({update: {$inc: {i:1}}, fields: {i:0}});
+assert.eq(out, {_id:1, j:0});
+
+out = t.findAndModify({update: {$inc: {i:1}}, fields: {_id:0, j:1}});
+assert.eq(out, {j:0});
+
+out = t.findAndModify({update: {$inc: {i:1}}, fields: {_id:0, j:1}, 'new': true});
+assert.eq(out, {j:0});
diff --git a/jstests/core/find_and_modify3.js b/jstests/core/find_and_modify3.js
new file mode 100644
index 00000000000..4214dfbcd0a
--- /dev/null
+++ b/jstests/core/find_and_modify3.js
@@ -0,0 +1,21 @@
+t = db.find_and_modify3;
+t.drop();
+
+t.insert({_id:0, other:0, comments:[{i:0, j:0}, {i:1, j:1}]});
+t.insert({_id:1, other:1, comments:[{i:0, j:0}, {i:1, j:1}]}); // this is the only one that gets modded
+t.insert({_id:2, other:2, comments:[{i:0, j:0}, {i:1, j:1}]});
+
+orig0 = t.findOne({_id:0})
+orig2 = t.findOne({_id:2})
+
+out = t.findAndModify({query: {_id:1, 'comments.i':0}, update: {$set: {'comments.$.j':2}}, 'new': true, sort:{other:1}});
+assert.eq(out.comments[0], {i:0, j:2});
+assert.eq(out.comments[1], {i:1, j:1});
+assert.eq(t.findOne({_id:0}), orig0);
+assert.eq(t.findOne({_id:2}), orig2);
+
+out = t.findAndModify({query: {other:1, 'comments.i':1}, update: {$set: {'comments.$.j':3}}, 'new': true, sort:{other:1}});
+assert.eq(out.comments[0], {i:0, j:2});
+assert.eq(out.comments[1], {i:1, j:3});
+assert.eq(t.findOne({_id:0}), orig0);
+assert.eq(t.findOne({_id:2}), orig2);
diff --git a/jstests/core/find_and_modify4.js b/jstests/core/find_and_modify4.js
new file mode 100644
index 00000000000..04abc2f1ce7
--- /dev/null
+++ b/jstests/core/find_and_modify4.js
@@ -0,0 +1,55 @@
+t = db.find_and_modify4;
+t.drop();
+
+// this is the best way to build auto-increment
+function getNextVal(counterName){
+ var ret = t.findAndModify({
+ query: {_id: counterName},
+ update: {$inc: {val: 1}},
+ upsert: true,
+ 'new': true,
+ });
+ return ret;
+}
+
+assert.eq(getNextVal("a"), {_id:"a", val:1});
+assert.eq(getNextVal("a"), {_id:"a", val:2});
+assert.eq(getNextVal("a"), {_id:"a", val:3});
+assert.eq(getNextVal("z"), {_id:"z", val:1});
+assert.eq(getNextVal("z"), {_id:"z", val:2});
+assert.eq(getNextVal("a"), {_id:"a", val:4});
+
+t.drop();
+
+function helper(upsert){
+ return t.findAndModify({
+ query: {_id: "asdf"},
+ update: {$inc: {val: 1}},
+ upsert: upsert,
+ 'new': false // the default
+ });
+}
+
+// upsert:false so nothing there before and after
+assert.eq(helper(false), null);
+assert.eq(t.count(), 0);
+
+// upsert:true so nothing there before; something there after
+assert.eq(helper(true), null);
+assert.eq(t.count(), 1);
+assert.eq(helper(true), {_id: 'asdf', val: 1});
+assert.eq(helper(false), {_id: 'asdf', val: 2}); // upsert only matters when obj doesn't exist
+assert.eq(helper(true), {_id: 'asdf', val: 3});
+
+
+// _id created if not specified
+var out = t.findAndModify({
+ query: {a:1},
+ update: {$set: {b: 2}},
+ upsert: true,
+ 'new': true
+ });
+assert.neq(out._id, undefined);
+assert.eq(out.a, 1);
+assert.eq(out.b, 2);
+
diff --git a/jstests/core/find_and_modify_server6226.js b/jstests/core/find_and_modify_server6226.js
new file mode 100644
index 00000000000..a44cb59cb05
--- /dev/null
+++ b/jstests/core/find_and_modify_server6226.js
@@ -0,0 +1,7 @@
+
+t = db.find_and_modify_server6226;
+t.drop();
+
+ret = t.findAndModify( { query : { _id : 1 } , update : { "$inc" : { i : 1 } } , upsert : true } )
+assert.isnull( ret )
+
diff --git a/jstests/core/find_and_modify_server6254.js b/jstests/core/find_and_modify_server6254.js
new file mode 100644
index 00000000000..7c7c340b7ed
--- /dev/null
+++ b/jstests/core/find_and_modify_server6254.js
@@ -0,0 +1,10 @@
+
+t = db.find_and_modify_server6254;
+t.drop();
+
+t.insert( { x : 1 } )
+ret = t.findAndModify( { query : { x : 1 } , update : { $set : { x : 2 } } , new : true } )
+assert.eq( 2 , ret.x , tojson( ret ) )
+
+assert.eq( 1 , t.count() )
+
diff --git a/jstests/core/find_and_modify_server6582.js b/jstests/core/find_and_modify_server6582.js
new file mode 100644
index 00000000000..88ce5a7c4f4
--- /dev/null
+++ b/jstests/core/find_and_modify_server6582.js
@@ -0,0 +1,18 @@
+
+t = db.find_and_modify_server6582;
+
+t.drop();
+x = t.runCommand( "findAndModify" , {query:{f:1}, update:{$set:{f:2}}, upsert:true, new:true})
+le = x.lastErrorObject
+assert.eq( le.updatedExisting, false )
+assert.eq( le.n, 1 )
+assert.eq( le.upserted, x.value._id )
+
+t.drop();
+t.insert( { f : 1 } )
+x = t.runCommand( "findAndModify" , {query:{f:1}, remove : true } )
+le = x.lastErrorObject
+assert.eq( le.n, 1 )
+
+
+
diff --git a/jstests/core/find_and_modify_server6588.js b/jstests/core/find_and_modify_server6588.js
new file mode 100644
index 00000000000..a21855f5c38
--- /dev/null
+++ b/jstests/core/find_and_modify_server6588.js
@@ -0,0 +1,22 @@
+
+t = db.find_and_modify_sever6588;
+
+initial = { _id : 1 , a : [ { b : 1 } ] , z : 1 }
+up = { "$set" : { "a.$.b" : 2 } }
+q = { _id : 1 , "a.b" : 1 }
+correct = { _id : 1 , a : [ { b : 2 } ] , z : 1 }
+
+t.drop();
+t.insert( initial )
+t.update( q , up )
+assert.eq( correct , t.findOne() )
+
+t.drop()
+t.insert( initial )
+x = t.findAndModify( { query : q , update : up } )
+assert.eq( correct , t.findOne() )
+
+t.drop()
+t.insert( initial )
+x = t.findAndModify( { query : { z : 1 , "a.b" : 1 } , update : up } )
+assert.eq( correct , t.findOne() )
diff --git a/jstests/core/find_and_modify_server6659.js b/jstests/core/find_and_modify_server6659.js
new file mode 100644
index 00000000000..7a0419cc72b
--- /dev/null
+++ b/jstests/core/find_and_modify_server6659.js
@@ -0,0 +1,7 @@
+
+t = db.find_and_modify_server6659;
+t.drop();
+
+x = t.findAndModify({query:{f:1}, update:{$set:{f:2}}, upsert:true, new:true})
+assert.eq( 2, x.f );
+assert.eq( 2, t.findOne().f );
diff --git a/jstests/core/find_and_modify_server6909.js b/jstests/core/find_and_modify_server6909.js
new file mode 100644
index 00000000000..2f688459698
--- /dev/null
+++ b/jstests/core/find_and_modify_server6909.js
@@ -0,0 +1,21 @@
+c = db.find_and_modify_server6906;
+
+
+c.drop();
+
+c.insert( { _id : 5 , a:{ b:1 } } );
+ret = c.findAndModify( { query:{ 'a.b':1 },
+ update:{ $set:{ 'a.b':2 } }, // Ensure the query on 'a.b' no longer matches.
+ new:true } );
+assert.eq( 5, ret._id );
+assert.eq( 2, ret.a.b );
+
+
+c.drop();
+
+c.insert( { _id : null , a:{ b:1 } } );
+ret = c.findAndModify( { query:{ 'a.b':1 },
+ update:{ $set:{ 'a.b':2 } }, // Ensure the query on 'a.b' no longer matches.
+ new:true } );
+assert.eq( 2, ret.a.b );
+
diff --git a/jstests/core/find_and_modify_server6993.js b/jstests/core/find_and_modify_server6993.js
new file mode 100644
index 00000000000..b8a31915372
--- /dev/null
+++ b/jstests/core/find_and_modify_server6993.js
@@ -0,0 +1,9 @@
+
+c = db.find_and_modify_server6993;
+c.drop();
+
+c.insert( { a:[ 1, 2 ] } );
+
+c.findAndModify( { query:{ a:1 }, update:{ $set:{ 'a.$':5 } } } );
+
+assert.eq( 5, c.findOne().a[ 0 ] );
diff --git a/jstests/core/find_and_modify_server7660.js b/jstests/core/find_and_modify_server7660.js
new file mode 100644
index 00000000000..cae50d17dcc
--- /dev/null
+++ b/jstests/core/find_and_modify_server7660.js
@@ -0,0 +1,18 @@
+
+t = db.find_and_modify_server7660;
+t.drop();
+
+a = t.findAndModify({
+ query : { foo : 'bar' },
+ update : { $set : { bob : 'john' } },
+ sort: { foo : 1},
+ upsert: true,
+ new : true
+});
+
+b = t.findOne();
+assert.eq( a, b );
+assert.eq( "bar", a.foo );
+assert.eq( "john", a.bob )
+
+
diff --git a/jstests/core/find_and_modify_where.js b/jstests/core/find_and_modify_where.js
new file mode 100644
index 00000000000..2092dc5566f
--- /dev/null
+++ b/jstests/core/find_and_modify_where.js
@@ -0,0 +1,10 @@
+
+t = db.find_and_modify_where;
+t.drop();
+
+t.insert( { _id : 1 , x : 1 } );
+
+res = t.findAndModify( { query : { $where : "return this.x == 1" } , update : { $set : { y : 1 } } } )
+
+assert.eq( 1 , t.findOne().y )
+
diff --git a/jstests/core/find_dedup.js b/jstests/core/find_dedup.js
new file mode 100644
index 00000000000..401384ceb7a
--- /dev/null
+++ b/jstests/core/find_dedup.js
@@ -0,0 +1,35 @@
+// Test that duplicate query results are not returned.
+
+var t = db.jstests_find_dedup;
+
+function checkDedup(query, idArray) {
+ resultsArr = t.find(query).toArray();
+ assert.eq(resultsArr.length, idArray.length, "same number of results");
+
+ for (var i = 0; i < idArray.length; i++) {
+ assert(("_id" in resultsArr[i]), "result doc missing _id");
+ assert.eq(idArray[i], resultsArr[i]._id, "_id mismatch for doc " + i);
+ }
+}
+
+// Deduping $or
+t.drop();
+t.ensureIndex({a: 1, b: 1});
+t.save({_id: 1, a: 1, b: 1});
+t.save({_id: 2, a: 1, b: 1});
+t.save({_id: 3, a: 2, b: 2});
+t.save({_id: 4, a: 3, b: 3});
+t.save({_id: 5, a: 3, b: 3});
+checkDedup({$or: [{a:{$gte:0,$lte:2},b:{$gte:0,$lte:2}},
+ {a:{$gte:1,$lte:3},b:{$gte:1,$lte:3}},
+ {a:{$gte:1,$lte:4},b:{$gte:1,$lte:4}}]},
+ [1, 2, 3, 4, 5]);
+
+// Deduping multikey
+t.drop();
+t.save({_id: 1, a: [1, 2, 3], b: [4, 5, 6]});
+t.save({_id: 2, a: [1, 2, 3], b: [4, 5, 6]});
+assert.eq( 2, t.count() );
+checkDedup({$or: [{a: {$in: [1, 2]}}, {b: {$in: [4, 5]}}]}, [1, 2]);
+t.ensureIndex( { a : 1 } );
+checkDedup({$or: [{a: {$in: [1, 2]}}, {b: {$in: [4, 5]}}]}, [1, 2]);
diff --git a/jstests/core/find_size.js b/jstests/core/find_size.js
new file mode 100644
index 00000000000..d5a93d59cd2
--- /dev/null
+++ b/jstests/core/find_size.js
@@ -0,0 +1,26 @@
+// Basic test for $size.
+
+var t = db.jstests_find_size;
+t.drop();
+
+t.save({arr: []});
+t.save({arr: []});
+t.save({arr: [1]});
+t.save({arr: [1, 2, 3, 4]});
+
+// ints and longs
+assert.eq(2, t.count({arr: {$size: 0}}));
+assert.eq(2, t.count({arr: {$size: NumberLong(0)}}));
+assert.eq(0, t.count({arr: {$size: -1}}));
+assert.eq(0, t.count({arr: {$size: NumberLong(-10000)}}));
+assert.eq(1, t.count({arr: {$size: NumberInt(4)}}));
+
+// Descriptive test: string is equivalent to {$size: 0}
+assert.eq(2, t.count({arr: {$size: "str"}}));
+
+// doubles return nothing
+assert.eq(0, t.count({arr: {$size: 3.2}}));
+assert.eq(0, t.count({arr: {$size: 0.1}}));
+
+// SERVER-11952
+assert.eq(0, t.count({arr: {$size: NumberLong(-9223372036854775808)}}));
diff --git a/jstests/core/finda.js b/jstests/core/finda.js
new file mode 100644
index 00000000000..cf717d5b929
--- /dev/null
+++ b/jstests/core/finda.js
@@ -0,0 +1,106 @@
+// Tests where the QueryOptimizerCursor enters takeover mode during a query rather than a get more.
+
+t = db.jstests_finda;
+t.drop();
+
+numDocs = 200;
+
+function clearQueryPlanCache() {
+ t.ensureIndex( { c:1 } );
+ t.dropIndex( { c:1 } );
+}
+
+function assertAllFound( matches ) {
+// printjson( matches );
+ found = new Array( numDocs );
+ for( i = 0; i < numDocs; ++i ) {
+ found[ i ] = false;
+ }
+ for( i in matches ) {
+ m = matches[ i ];
+ found[ m._id ] = true;
+ }
+ for( i = 0; i < numDocs; ++i ) {
+ assert( found[ i ], i );
+ }
+}
+
+function makeCursor( query, projection, sort, batchSize, returnKey ) {
+ print("\n*** query:");
+ printjson(query);
+ print("proj:");
+ printjson(projection);
+ cursor = t.find( query, projection );
+ if ( sort ) {
+ cursor.sort( sort );
+ print("sort:");
+ printjson(sort);
+ }
+ if ( batchSize ) {
+ cursor.batchSize( batchSize );
+ print("bs: " + batchSize);
+ }
+ if ( returnKey ) {
+ cursor._addSpecial( "$returnKey", true );
+ }
+ return cursor;
+}
+
+function checkCursorWithBatchSizeProjection( query, projection, sort, batchSize,
+ expectedLeftInBatch ) {
+ clearQueryPlanCache();
+ cursor = makeCursor( query, projection, sort, batchSize );
+ // XXX: this
+ assert.eq( expectedLeftInBatch, cursor.objsLeftInBatch() );
+ assertAllFound( cursor.toArray() );
+}
+
+function checkCursorWithBatchSize( query, sort, batchSize, expectedLeftInBatch ) {
+ checkCursorWithBatchSizeProjection( query, {}, sort, batchSize, expectedLeftInBatch );
+ checkCursorWithBatchSizeProjection( query, { a:1, _id:1 }, sort, batchSize,
+ expectedLeftInBatch );
+ // In the cases tested, when expectedLeftInBatch is high enough takeover will occur during
+ // the query operation rather than getMore and the last few matches should properly return keys
+ // from the a,_id index.
+ clearQueryPlanCache();
+ if ( expectedLeftInBatch > 110 ) {
+ cursor = makeCursor( query, {}, sort, batchSize, true );
+ lastNonAIndexResult = -1;
+ for( i = 0; i < expectedLeftInBatch; ++i ) {
+ next = cursor.next();
+ // Identify the query plan used by checking the fields of a returnKey query.
+ if ( !friendlyEqual( [ 'a', '_id' ], Object.keySet( next ) ) ) {
+ lastNonAIndexResult = i;
+ }
+ }
+ // The last results should come from the a,_id index.
+ assert.lt( lastNonAIndexResult, expectedLeftInBatch - 5 );
+ }
+}
+
+function queryWithPlanTypes( withDups ) {
+ t.drop();
+ for( i = 1; i < numDocs; ++i ) {
+ t.save( { _id:i, a:i, b:0 } );
+ }
+ if ( withDups ) {
+ t.save( { _id:0, a:[ 0, numDocs ], b:0 } ); // Add a dup on a:1 index.
+ }
+ else {
+ t.save( { _id:0, a:0, b:0 } );
+ }
+ t.ensureIndex( { a:1, _id:1 } ); // Include _id for a covered index projection.
+
+ // All plans in order.
+ checkCursorWithBatchSize( { a:{ $gte:0 } }, null, 150, 150 );
+
+ // All plans out of order.
+ checkCursorWithBatchSize( { a:{ $gte:0 } }, { c:1 }, null, 101 );
+
+ // Some plans in order, some out of order.
+ checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, 150, 150 );
+ checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, null, 101 );
+}
+
+queryWithPlanTypes( false );
+queryWithPlanTypes( true );
diff --git a/jstests/core/fm1.js b/jstests/core/fm1.js
new file mode 100644
index 00000000000..bc60a3d8911
--- /dev/null
+++ b/jstests/core/fm1.js
@@ -0,0 +1,12 @@
+
+t = db.fm1;
+t.drop();
+
+t.insert({foo:{bar:1}})
+t.find({},{foo:1}).toArray();
+t.find({},{'foo.bar':1}).toArray();
+t.find({},{'baz':1}).toArray();
+t.find({},{'baz.qux':1}).toArray();
+t.find({},{'foo.qux':1}).toArray();
+
+
diff --git a/jstests/core/fm2.js b/jstests/core/fm2.js
new file mode 100644
index 00000000000..00ccdf4afee
--- /dev/null
+++ b/jstests/core/fm2.js
@@ -0,0 +1,9 @@
+
+t = db.fm2
+t.drop();
+
+t.insert( { "one" : { "two" : {"three":"four"} } } );
+
+x = t.find({},{"one.two":1})[0]
+assert.eq( 1 , Object.keySet( x.one ).length , "ks l 1" );
+
diff --git a/jstests/core/fm3.js b/jstests/core/fm3.js
new file mode 100644
index 00000000000..8ccde6d5ab3
--- /dev/null
+++ b/jstests/core/fm3.js
@@ -0,0 +1,37 @@
+t = db.fm3
+t.drop();
+
+t.insert( {a:[{c:{e:1, f:1}}, {d:2}, 'z'], b:1} );
+
+
+res = t.findOne({}, {a:1});
+assert.eq(res.a, [{c:{e:1, f:1}}, {d:2}, 'z'], "one a");
+assert.eq(res.b, undefined, "one b");
+
+res = t.findOne({}, {a:0});
+assert.eq(res.a, undefined, "two a");
+assert.eq(res.b, 1, "two b");
+
+res = t.findOne({}, {'a.d':1});
+assert.eq(res.a, [{}, {d:2}], "three a");
+assert.eq(res.b, undefined, "three b");
+
+res = t.findOne({}, {'a.d':0});
+assert.eq(res.a, [{c:{e:1, f:1}}, {}, 'z'], "four a");
+assert.eq(res.b, 1, "four b");
+
+res = t.findOne({}, {'a.c':1});
+assert.eq(res.a, [{c:{e:1, f:1}}, {}], "five a");
+assert.eq(res.b, undefined, "five b");
+
+res = t.findOne({}, {'a.c':0});
+assert.eq(res.a, [{}, {d:2}, 'z'], "six a");
+assert.eq(res.b, 1, "six b");
+
+res = t.findOne({}, {'a.c.e':1});
+assert.eq(res.a, [{c:{e:1}}, {}], "seven a");
+assert.eq(res.b, undefined, "seven b");
+
+res = t.findOne({}, {'a.c.e':0});
+assert.eq(res.a, [{c:{f:1}}, {d:2}, 'z'], "eight a");
+assert.eq(res.b, 1, "eight b");
diff --git a/jstests/core/fm4.js b/jstests/core/fm4.js
new file mode 100644
index 00000000000..1ce947ad5e7
--- /dev/null
+++ b/jstests/core/fm4.js
@@ -0,0 +1,16 @@
+t = db.fm4
+t.drop();
+
+t.insert({_id:1, a:1, b:1});
+
+assert.eq( t.findOne({}, {_id:1}), {_id:1}, 1)
+assert.eq( t.findOne({}, {_id:0}), {a:1, b:1}, 2)
+
+assert.eq( t.findOne({}, {_id:1, a:1}), {_id:1, a:1}, 3)
+assert.eq( t.findOne({}, {_id:0, a:1}), {a:1}, 4)
+
+assert.eq( t.findOne({}, {_id:0, a:0}), {b:1}, 6)
+assert.eq( t.findOne({}, { a:0}), {_id:1, b:1}, 5)
+
+// not sure if we want to suport this since it is the same as above
+//assert.eq( t.findOne({}, {_id:1, a:0}), {_id:1, b:1}, 5)
diff --git a/jstests/core/fsync.js b/jstests/core/fsync.js
new file mode 100644
index 00000000000..9238c992466
--- /dev/null
+++ b/jstests/core/fsync.js
@@ -0,0 +1,21 @@
+// test the lock/unlock snapshotting feature a bit
+
+x=db.runCommand({fsync:1,lock:1}); // not on admin db
+assert(!x.ok,"D");
+
+x=db.fsyncLock(); // uses admin automatically
+
+assert(x.ok,"C");
+
+y = db.currentOp();
+assert(y.fsyncLock,"B");
+
+z = db.fsyncUnlock();
+assert( db.currentOp().fsyncLock == null, "A2" );
+
+// make sure the db is unlocked
+db.jstests_fsync.insert({x:1});
+
+assert( db.currentOp().fsyncLock == null, "A" );
+
+assert( !db.eval('db.fsyncLock()').ok, "eval('db.fsyncLock()') should fail." )
diff --git a/jstests/core/fts1.js b/jstests/core/fts1.js
new file mode 100644
index 00000000000..6bd138d6c25
--- /dev/null
+++ b/jstests/core/fts1.js
@@ -0,0 +1,29 @@
+load( "jstests/libs/fts.js" );
+
+t = db.text1;
+t.drop();
+
+// this test requires usePowerOf2Sizes to be off
+db.createCollection( t.getName(), {"usePowerOf2Sizes" : false } );
+assert.eq(0, t.stats().userFlags);
+
+assert.eq( [] , queryIDS( t , "az" ) , "A0" );
+
+t.save( { _id : 1 , x : "az b c" } );
+t.save( { _id : 2 , x : "az b" } );
+t.save( { _id : 3 , x : "b c" } );
+t.save( { _id : 4 , x : "b c d" } );
+
+assert.eq(t.stats().userFlags, 0,
+ "A new collection should not have power-of-2 storage allocation strategy");
+t.ensureIndex( { x : "text" } );
+assert.eq(t.stats().userFlags, 1,
+ "Creating a text index on a collection should change the allocation strategy " +
+ "to power-of-2.");
+
+assert.eq( [1,2,3,4] , queryIDS( t , "c az" ) , "A1" );
+assert.eq( [4] , queryIDS( t , "d" ) , "A2" );
+
+idx = db.system.indexes.findOne( { ns: t.getFullName(), "weights.x" : 1 } )
+assert( idx.v >= 1, tojson( idx ) )
+assert( idx.textIndexVersion >= 1, tojson( idx ) )
diff --git a/jstests/core/fts2.js b/jstests/core/fts2.js
new file mode 100644
index 00000000000..e0e7469fa5e
--- /dev/null
+++ b/jstests/core/fts2.js
@@ -0,0 +1,24 @@
+
+load( "jstests/libs/fts.js" );
+
+t = db.text2;
+t.drop();
+
+t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } );
+t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } );
+
+t.ensureIndex( { x : "text" } , { weights : { x : 10 , y : 1 } } );
+
+assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
+assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+
+assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
+assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
+
+assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
+assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
+
+printjson(lastCommadResult);
+assert.eq( 2 , lastCommadResult.stats.nscannedObjects , "B3" );
+assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
+
diff --git a/jstests/core/fts3.js b/jstests/core/fts3.js
new file mode 100644
index 00000000000..f5f72c4df0a
--- /dev/null
+++ b/jstests/core/fts3.js
@@ -0,0 +1,22 @@
+
+load( "jstests/libs/fts.js" );
+
+t = db.text3;
+t.drop();
+
+t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } );
+t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } );
+
+t.ensureIndex( { x : "text" , z : 1 } , { weights : { x : 10 , y : 1 } } );
+
+assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
+assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+
+assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
+assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
+
+assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
+assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
+
+assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" );
+assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
diff --git a/jstests/core/fts4.js b/jstests/core/fts4.js
new file mode 100644
index 00000000000..8598457b033
--- /dev/null
+++ b/jstests/core/fts4.js
@@ -0,0 +1,22 @@
+
+load( "jstests/libs/fts.js" );
+
+t = db.text4;
+t.drop();
+
+t.save( { _id : 1 , x : [ "az" , "b" , "x" ] , y : [ "c" , "d" , "m" ] , z : 1 } );
+t.save( { _id : 2 , x : [ "c" , "d" , "y" ] , y : [ "az" , "b" , "n" ] , z : 2 } );
+
+t.ensureIndex( { y : "text" , z : 1 } , { weights : { x : 10 } } );
+
+assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
+assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+
+assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
+assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
+
+assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
+assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
+
+assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" );
+assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
diff --git a/jstests/core/fts5.js b/jstests/core/fts5.js
new file mode 100644
index 00000000000..a3097b47a4a
--- /dev/null
+++ b/jstests/core/fts5.js
@@ -0,0 +1,22 @@
+
+load( "jstests/libs/fts.js" );
+
+t = db.text5;
+t.drop();
+
+t.save( { _id: 1 , x: [ { a: "az" } , { a: "b" } , { a: "x" } ] , y: [ "c" , "d" , "m" ] , z: 1 } );
+t.save( { _id: 2 , x: [ { a: "c" } , { a: "d" } , { a: "y" } ] , y: [ "az" , "b" , "n" ] , z: 2 } );
+
+t.ensureIndex( { y: "text" , z: 1 } , { weights: { "x.a": 10 } } );
+
+assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
+assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+
+assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
+assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
+
+assert.eq( [1] , queryIDS( t , "az" , { z: 1 } ) , "B1" );
+assert.eq( [1] , queryIDS( t , "d" , { z: 1 } ) , "B2" );
+
+assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" );
+assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" );
diff --git a/jstests/core/fts_blog.js b/jstests/core/fts_blog.js
new file mode 100644
index 00000000000..38cbb826eff
--- /dev/null
+++ b/jstests/core/fts_blog.js
@@ -0,0 +1,26 @@
+t = db.text_blog;
+t.drop();
+
+t.save( { _id : 1 , title : "my blog post" , text : "this is a new blog i am writing. yay" } );
+t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am writing. yay" } );
+t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } );
+
+// default weight is 1
+// specify weights if you want a field to be more meaningull
+t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } );
+
+res = t.runCommand( "text" , { search : "blog" } )
+assert.eq( 3, res.results.length );
+assert.eq( 1, res.results[0].obj._id );
+
+res = t.runCommand( "text" , { search : "write" } )
+assert.eq( 3, res.results.length );
+assert.eq( res.results[0].score, res.results[1].score );
+assert.eq( res.results[0].score, res.results[2].score );
+
+
+
+
+
+
+
diff --git a/jstests/core/fts_blogwild.js b/jstests/core/fts_blogwild.js
new file mode 100644
index 00000000000..ecad0ce0b19
--- /dev/null
+++ b/jstests/core/fts_blogwild.js
@@ -0,0 +1,40 @@
+t = db.text_blogwild;
+t.drop();
+
+t.save( { _id: 1 , title: "my blog post" , text: "this is a new blog i am writing. yay eliot" } );
+t.save( { _id: 2 , title: "my 2nd post" , text: "this is a new blog i am writing. yay" } );
+t.save( { _id: 3 , title: "knives are Fun for writing eliot" , text: "this is a new blog i am writing. yay" } );
+
+// default weight is 1
+// specify weights if you want a field to be more meaningull
+t.ensureIndex( { dummy: "text" } , { weights: "$**" } );
+
+res = t.runCommand( "text" , { search: "blog" } );
+assert.eq( 3 , res.stats.n , "A1" );
+
+res = t.runCommand( "text" , { search: "write" } );
+assert.eq( 3 , res.stats.n , "B1" );
+
+// mixing
+t.dropIndex( "dummy_text" );
+assert.eq( 1 , t.getIndexKeys().length , "C1" );
+t.ensureIndex( { dummy: "text" } , { weights: { "$**": 1 , title: 2 } } );
+
+
+res = t.runCommand( "text" , { search: "write" } );
+assert.eq( 3 , res.stats.n , "C2" );
+assert.eq( 3 , res.results[0].obj._id , "C3" );
+
+res = t.runCommand( "text" , { search: "blog" } );
+assert.eq( 3 , res.stats.n , "D1" );
+assert.eq( 1 , res.results[0].obj._id , "D2" );
+
+res = t.runCommand( "text" , { search: "eliot" } );
+assert.eq( 2 , res.stats.n , "E1" );
+assert.eq( 3 , res.results[0].obj._id , "E2" );
+
+
+
+
+
+
diff --git a/jstests/core/fts_enabled.js b/jstests/core/fts_enabled.js
new file mode 100644
index 00000000000..8617caff59f
--- /dev/null
+++ b/jstests/core/fts_enabled.js
@@ -0,0 +1,5 @@
+// Test that the textSearchEnabled server parameter works correctly (now deprecated).
+
+// Value true is accepted, value false is rejected.
+assert.commandWorked(db.adminCommand({setParameter: 1, textSearchEnabled: true}));
+assert.commandFailed(db.adminCommand({setParameter: 1, textSearchEnabled: false}));
diff --git a/jstests/core/fts_explain.js b/jstests/core/fts_explain.js
new file mode 100644
index 00000000000..0d9c1fd7a9d
--- /dev/null
+++ b/jstests/core/fts_explain.js
@@ -0,0 +1,18 @@
+// Test $text explain. SERVER-12037.
+
+var coll = db.fts_explain;
+
+coll.drop();
+coll.ensureIndex({content: "text"}, {default_language: "none"});
+assert.gleSuccess(db);
+
+coll.insert({content: "some data"});
+assert.gleSuccess(db);
+
+var explain = coll.find({$text:{$search: "\"a\" -b -\"c\""}}).explain(true);
+assert.eq(explain.cursor, "TextCursor");
+assert.eq(explain.stats.type, "TEXT");
+assert.eq(explain.stats.parsedTextQuery.terms, ["a"]);
+assert.eq(explain.stats.parsedTextQuery.negatedTerms, ["b"]);
+assert.eq(explain.stats.parsedTextQuery.phrases, ["a"]);
+assert.eq(explain.stats.parsedTextQuery.negatedPhrases, ["c"]);
diff --git a/jstests/core/fts_index.js b/jstests/core/fts_index.js
new file mode 100644
index 00000000000..ab1971816bd
--- /dev/null
+++ b/jstests/core/fts_index.js
@@ -0,0 +1,110 @@
+// Test that:
+// 1. Text indexes properly validate the index spec used to create them.
+// 2. Text indexes properly enforce a schema on the language_override field.
+// 3. Collections may have at most one text index.
+// 4. Text indexes properly handle large documents.
+
+var coll = db.fts_index;
+var indexName = "textIndex";
+coll.drop();
+coll.getDB().createCollection(coll.getName());
+
+//
+// 1. Text indexes properly validate the index spec used to create them.
+//
+
+// Spec passes text-specific index validation.
+assert.writeOK(coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanish"}));
+assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName}));
+coll.dropIndexes();
+
+// Spec fails text-specific index validation ("spanglish" unrecognized).
+assert.writeError(coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanglish"}));
+assert.eq(0, coll.system.indexes.count({ns: coll.getFullName(), name: indexName}));
+coll.dropIndexes();
+
+// Spec passes general index validation.
+assert.writeOK(coll.ensureIndex({"$**": "text"}, {name: indexName}));
+assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName}));
+coll.dropIndexes();
+
+// Spec fails general index validation ("a.$**" invalid field name for key).
+assert.writeError(coll.ensureIndex({"a.$**": "text"}, {name: indexName}));
+assert.eq(0, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName}));
+coll.dropIndexes();
+
+//
+// 2. Text indexes properly enforce a schema on the language_override field.
+//
+
+// Can create a text index on a collection where no documents have invalid language_override.
+coll.insert({a: ""});
+coll.insert({a: "", language: "spanish"});
+assert.writeOK(coll.ensureIndex({a: "text"}));
+coll.drop();
+
+// Can't create a text index on a collection containing document with an invalid language_override.
+coll.insert({a: "", language: "spanglish"});
+assert.writeError(coll.ensureIndex({a: "text"}));
+coll.drop();
+
+// Can insert documents with valid language_override into text-indexed collection.
+assert.writeOK(coll.ensureIndex({a: "text"}));
+coll.insert({a: ""});
+assert.writeOK( coll.insert({a: "", language: "spanish"}));
+coll.drop();
+
+// Can't insert documents with invalid language_override into text-indexed collection.
+assert.writeOK(coll.ensureIndex({a: "text"}));
+assert.writeError( coll.insert({a: "", language: "spanglish"}));
+coll.drop();
+
+//
+// 3. Collections may have at most one text index.
+//
+assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}));
+assert.eq(2, coll.getIndexes().length);
+
+// ensureIndex() becomes a no-op on an equivalent index spec.
+assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}));
+assert.eq(2, coll.getIndexes().length);
+assert.writeOK(coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {b: 1}}));
+assert.eq(2, coll.getIndexes().length);
+assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "english"}));
+assert.eq(2, coll.getIndexes().length);
+assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 2}));
+assert.eq(2, coll.getIndexes().length);
+assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "language"}));
+assert.eq(2, coll.getIndexes().length);
+
+// ensureIndex() fails if a second text index would be built.
+assert.writeError(coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {d: 1}}));
+assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "none"}));
+assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 1}));
+assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "idioma"}));
+assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {weights: {d: 1}}));
+assert.writeError(coll.ensureIndex({a: 1, b: "text", d: 1}));
+assert.writeError(coll.ensureIndex({a: 1, d: "text", c: 1}));
+assert.writeError(coll.ensureIndex({b: "text"}));
+assert.writeError(coll.ensureIndex({b: "text", c: 1}));
+assert.writeError(coll.ensureIndex({a: 1, b: "text"}));
+
+coll.dropIndexes();
+
+//
+// 4. Text indexes properly handle large keys.
+//
+
+assert.writeOK(coll.ensureIndex({a: "text"}));
+
+var longstring = "";
+var longstring2 = "";
+for(var i = 0; i < 1024 * 1024; ++i) {
+ longstring = longstring + "a";
+ longstring2 = longstring2 + "b";
+}
+coll.insert({a: longstring});
+coll.insert({a: longstring2});
+assert.eq(1, coll.find({$text: {$search: longstring}}).itcount(), "long string not found in index");
+
+coll.drop();
diff --git a/jstests/core/fts_mix.js b/jstests/core/fts_mix.js
new file mode 100644
index 00000000000..56da123cdc3
--- /dev/null
+++ b/jstests/core/fts_mix.js
@@ -0,0 +1,159 @@
+
+load( "jstests/libs/fts.js" );
+
+// test collection
+tc = db.text_mix;
+tc.drop();
+
+// creation of collection documents
+// content generated using wikipedia random article
+tc.save( { _id: 1, title: "Olivia Shakespear",text: "Olivia Shakespear (born Olivia Tucker; 17 March 1863 – 3 October 1938) was a British novelist, playwright, and patron of the arts. She wrote six books that are described as \"marriage problem\" novels. Her works sold poorly, sometimes only a few hundred copies. Her last novel, Uncle Hilary, is considered her best. She wrote two plays in collaboration with Florence Farr." } );
+tc.save( { _id: 2, title: "Mahim Bora", text: "Mahim Bora (born 1926) is an Indian writer and educationist from Assam state. He was born at a tea estate of Sonitpur district. He is an M.A. in Assamese literature from Gauhati University and had been a teacher in the Nowgong College for most of his teaching career. He has now retired and lives at Nagaon. Bora spent a good part of his childhood in the culture-rich surroundings of rural Nagaon, where the river Kalong was the life-blood of a community. His impressionable mind was to capture a myriad memories of that childhood, later to find expression in his poems, short stories and novels with humour, irony and pathos woven into their texture. When this river was dammed up, its disturbing effect was on the entire community dependant on nature's bounty." } );
+tc.save( { _id: 3, title: "A break away!", text: "A break away! is an 1891 painting by Australian artist Tom Roberts. The painting depicts a mob of thirsty sheep stampeding towards a dam. A drover on horseback is attempting to turn the mob before they drown or crush each other in their desire to drink. The painting, an \"icon of Australian art\", is part of a series of works by Roberts that \"captures what was an emerging spirit of national identity.\" Roberts painted the work at Corowa. The painting depicts a time of drought, with little grass and the soil kicked up as dust. The work itself is a reflection on the pioneering days of the pastoral industry, which were coming to an end by the 1890s." } );
+tc.save( { _id: 4, title: "Linn-Kristin Riegelhuth Koren", text: "Linn-Kristin Riegelhuth Koren (born 1 August 1984, in Ski) is a Norwegian handballer playing for Larvik HK and the Norwegian national team. She is commonly known as Linka. Outside handball she is a qualified nurse." } );
+tc.save( { _id: 5, title: "Morten Jensen", text: "Morten Jensen (born December 2, 1982 in Lynge) is a Danish athlete. He primarily participates in long jump, 100 metres and 200 metres. He competed at the World Championships in 2005 and 2007, the 2006 World Indoor Championships, the 2006 European Championships, the 2007 World Championships and the 2008 Olympic Games without qualifying for the final round. He was runner-up in the 2010 Finnish Elite Games rankings, just missing out to Levern Spencer for that year's jackpot. He holds the Danish record in both long jump and 100 metres. He also holds the Danish indoor record in the 200 metres. He has been a part of the Sparta teamsine 2005, before then he was a part of FIF Hillerd. His coach was Leif Dahlberg after the 2010 European Championships he change to Lars Nielsen and Anders Miller." } );
+tc.save( { _id: 6, title: "Janet Laurence", text: "Janet Laurence (born 1947) is a Sydney based Australian artist who works in mixed media and installation. Her work has been included in major survey exhibitions, nationally and internationally and is regularly exhibited in Sydney, Melbourne and Japan. Her work explores a relationship to the natural world, often from an architectural context. It extends from the gallery space into the urban fabric, and has been realized in many site specific projects, often involving collaborations with architects, landscape architects and environmental scientists. She has received many grants and awards including a Rockefeller Residency in 1997. Laurence was a Trustee of the Art Gallery of NSW from 1995 to 2005. Laurence was the subject of John Beard's winning entry for the 2007 Archibald Prize." } );
+tc.save( { _id: 7, title: "Glen-Coats Baronets", text: "The Glen-Coats Baronetcy, of Ferguslie Park in the Parish of Abbey in the County of Renfrew, was a title in the Baronetage of the United Kingdom. It was created on 25 June 1894 for Thomas Glen-Coats, Director of the thread-making firm of J. & P. Coats, Ltd, and later Liberal Member of Parliament for Renfrewshire West. Born Thomas Coats, he assumed the additional surname of Glen, which was that of his maternal grandfather. He was succeeded by his son, the second Baronet. He won a gold medal in sailing at the 1908 Summer Olympics. The title became extinct on his death in 1954. Two other members of the Coats family also gained distinction. George Coats, 1st Baron Glentanar, was the younger brother of the first Baronet, while Sir James Coats, 1st Baronet (see Coats Baronets), was the first cousin of the first Baronet." } );
+tc.save( { _id: 8, title: "Grapeleaf Skeletonizer", text: "The Grapeleaf Skeletonizer, Harrisina americana is a moth in the family Zygaenidae. It is widespread in the eastern half of the United States, and commonly noticed defoliating grapes, especially of the Virginia creeper (Parthenocissus quinquefolia). The western grapeleaf skeletonizer, Harrisina brillians is very similar to and slightly larger than H. americana, but their distributions are different. Members of this family all produce hydrogen cyanide, a potent antipredator toxin." } );
+tc.save( { _id: 9, title: "Physics World", text: "Physics World is the membership magazine of the Institute of Physics, one of the largest physical societies in the world. It is an international monthly magazine covering all areas of physics, both pure and applied, and is aimed at physicists in research, industry and education worldwide. It was launched in 1988 by IOP Publishing Ltd and has established itself as one of the world's leading physics magazines. The magazine is sent free to members of the Institute of Physics, who can also access a digital edition of the magazine, although selected articles can be read by anyone for free online. It was redesigned in September 2005 and has an audited circulation of just under 35000. The current editor is Matin Durrani. Also on the team are Dens Milne (associate editor), Michael Banks (news editor), Louise Mayor (features editor) and Margaret Harris (reviews and careers editor). Hamish Johnston is the editor of the magazine's website physicsworld.com and James Dacey is its reporter." } );
+tc.save( { _id: 10, title: "Mallacoota, Victoria", text: "Mallacoota is a small town in the East Gippsland region of Victoria, Australia. At the 2006 census, Mallacoota had a population of 972. At holiday times, particularly Easter and Christmas, the population increases by about 8,000. It is one of the most isolated towns in the state of Victoria, 25 kilometres off the Princes Highway and 523 kilometres (325 mi) from Melbourne. It is 526 kilometres (327 mi) from Sydney, New South Wales. It is halfway between Melbourne and Sydney when travelling via Princes Highway, though that is a long route between Australia's two main cities. It is the last official township on Victoria's east coast before the border with New South Wales. Mallacoota has a regional airport (Mallacoota Airport) YMCO (XMC) consisting of a grassed field for private light planes. It is known for its wild flowers, abalone industry, the inlet estuary consisting of Top Lake and Bottom Lake, and Croajingolong National Park that surround it. It is a popular and beautiful holiday spot for boating, fishing, walking the wilderness coast, swimming, birdwatching, and surfing. The Mallacoota Arts Council runs events throughout each year. Mallacoota Inlet is one of the main villages along the wilderness coast walk from NSW to Victoria, Australia." } );
+
+// begin tests
+
+// -------------------------------------------- INDEXING & WEIGHTING -------------------------------
+
+// start with basic index, one item with default weight
+tc.ensureIndex( { "title": "text" } );
+
+// test the single result case..
+res = tc.runCommand( "text", { search: "Victoria" } );
+assert.eq( 1, res.results.length );
+assert.eq( 10, res.results[0].obj._id );
+
+tc.dropIndexes();
+
+// now let's see about multiple fields, with specific weighting
+tc.ensureIndex( { "title": "text", "text": "text" }, { weights: { "title": 10 } } );
+assert.eq( [9,7,8], queryIDS( tc, "members physics" ) );
+
+tc.dropIndexes();
+
+// test all-1 weighting with "$**"
+tc.ensureIndex( { "$**": "text" } );
+assert.eq( [2,8,7], queryIDS( tc, "family tea estate" ) );
+
+tc.dropIndexes();
+
+// non-1 weight on "$**" + other weight specified for some field
+tc.ensureIndex( { "$**": "text" }, { weights: { "$**": 10, "text": 2 } } );
+assert.eq( [7,5], queryIDS( tc, "Olympic Games gold medal" ) );
+
+tc.dropIndexes();
+
+// -------------------------------------------- SEARCHING ------------------------------------------
+
+// go back to "$**": 1, "title": 10.. and test more specific search functionality!
+tc.ensureIndex( { "$**": "text" }, { weights: { "title": 10 } } );
+
+// -------------------------------------------- STEMMING -------------------------------------------
+
+// tests stemming for basic plural case
+res = tc.runCommand( "text", { search: "member" } );
+res2 = tc.runCommand( "text", { search: "members" } );
+assert.eq( getIDS( res ), getIDS( res2 ) );
+
+// search for something with potential 's bug.
+res = tc.runCommand( "text", { search: "magazine's" } );
+res2 = tc.runCommand( "text", { search: "magazine" } );
+assert.eq( getIDS( res ), getIDS( res2 ) );
+
+// -------------------------------------------- LANGUAGE -------------------------------------------
+
+res = tc.runCommand( "text", { search: "member", language: "spanglish" } );
+assert.commandFailed( res );
+res = tc.runCommand( "text", { search: "member", language: "english" } );
+assert.commandWorked( res );
+
+// -------------------------------------------- LIMIT RESULTS --------------------------------------
+
+// ensure limit limits results
+assert.eq( [2], queryIDS( tc, "rural river dam", null , { limit : 1 } ) );
+
+// ensure top results are the same regardless of limit
+// make sure that this uses a case where it wouldn't be otherwise..
+res = tc.runCommand( "text", { search: "united kingdom british princes", limit: 1 } );
+res2 = tc.runCommand( "text", { search: "united kingdom british princes" } );
+assert.eq( 1, res.results.length );
+assert.eq( 4, res2.results.length );
+assert.eq( res.results[0].obj._id, res2.results[0].obj._id );
+
+// -------------------------------------------- PROJECTION -----------------------------------------
+
+// test projection.. show just title and id
+res = tc.runCommand( "text", { search: "Morten Jensen", project: { title: 1 } } );
+assert.eq( 1, res.results.length );
+assert.eq( 5, res.results[0].obj._id );
+assert.eq( null, res.results[0].obj.text );
+assert.neq( null, res.results[0].obj.title );
+assert.neq( null, res.results[0].obj._id );
+
+// test negative projection, ie. show everything but text
+res = tc.runCommand( "text", { search: "handball", project: { text: 0 } } );
+assert.eq( 1, res.results.length );
+assert.eq( 4, res.results[0].obj._id );
+assert.eq( null, res.results[0].obj.text );
+assert.neq( null, res.results[0].obj.title );
+assert.neq( null, res.results[0].obj._id );
+
+// test projection only title, no id
+res = tc.runCommand( "text", { search: "Mahim Bora", project: { _id: 0, title: 1 } } );
+assert.eq( 1, res.results.length );
+assert.eq( "Mahim Bora", res.results[0].obj.title );
+assert.eq( null, res.results[0].obj.text );
+assert.neq( null, res.results[0].obj.title );
+assert.eq( null, res.results[0].obj._id );
+
+// -------------------------------------------- NEGATION -------------------------------------------
+
+// test negation
+assert.eq( [8], queryIDS( tc, "United -Kingdom" ) );
+assert.eq( -1, tc.findOne( { _id : 8 } ).text.search(/Kingdom/i) );
+
+// test negation edge cases... hyphens, double dash, etc.
+assert.eq( [4], queryIDS( tc, "Linn-Kristin" ) );
+
+// -------------------------------------------- PHRASE MATCHING ------------------------------------
+
+// test exact phrase matching on
+assert.eq( [7], queryIDS( tc, "\"Summer Olympics\"" ) );
+assert.neq( -1, tc.findOne( { _id: 7 } ).text.indexOf("Summer Olympics") );
+
+// phrasematch with other stuff.. negation, other terms, etc.
+assert.eq( [10], queryIDS( tc, "\"wild flowers\" Sydney" ) );
+
+assert.eq( [3], queryIDS( tc, "\"industry\" -Melbourne -Physics" ) );
+
+// -------------------------------------------- EDGE CASES -----------------------------------------
+
+// test empty string
+res = tc.runCommand( "text", { search: "" } );
+assert.eq( 0, res.ok )
+
+// test string with a space in it
+res = tc.runCommand( "text", { search: " " } );
+assert.eq( 0, res.results.length );
+
+// -------------------------------------------- FILTERING ------------------------------------------
+
+assert.eq( [2], queryIDS( tc, "Mahim" ) );
+assert.eq( [2], queryIDS( tc, "Mahim", { _id: 2 } ) );
+assert.eq( [], queryIDS( tc, "Mahim", { _id: 1 } ) );
+assert.eq( [], queryIDS( tc, "Mahim", { _id: { $gte: 4 } } ) );
+assert.eq( [2], queryIDS( tc, "Mahim", { _id: { $lte: 4 } } ) );
+
+// using regex conditional filtering
+assert.eq( [9], queryIDS( tc, "members", { title: { $regex: /Phy.*/i } } ) );
+
+// -------------------------------------------------------------------------------------------------
+
+assert( tc.validate().valid );
diff --git a/jstests/core/fts_partition1.js b/jstests/core/fts_partition1.js
new file mode 100644
index 00000000000..f1b4c437c3c
--- /dev/null
+++ b/jstests/core/fts_partition1.js
@@ -0,0 +1,23 @@
+load( "jstests/libs/fts.js" )
+
+t = db.text_parition1;
+t.drop();
+
+t.insert( { _id : 1 , x : 1 , y : "foo" } );
+t.insert( { _id : 2 , x : 1 , y : "bar" } );
+t.insert( { _id : 3 , x : 2 , y : "foo" } );
+t.insert( { _id : 4 , x : 2 , y : "bar" } );
+
+t.ensureIndex( { x : 1, y : "text" } );
+
+res = t.runCommand( "text", { search : "foo" } );
+assert.eq( 0, res.ok, tojson(res) );
+
+assert.eq( [ 1 ], queryIDS( t, "foo" , { x : 1 } ) );
+
+res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } } );
+assert( res.results[0].score > 0, tojson( res ) )
+
+// repeat search with "language" specified, SERVER-8999
+res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } , language : "english" } );
+assert( res.results[0].score > 0, tojson( res ) )
diff --git a/jstests/core/fts_partition_no_multikey.js b/jstests/core/fts_partition_no_multikey.js
new file mode 100644
index 00000000000..29b9c371612
--- /dev/null
+++ b/jstests/core/fts_partition_no_multikey.js
@@ -0,0 +1,13 @@
+
+t = db.fts_partition_no_multikey;
+t.drop();
+
+t.ensureIndex( { x : 1, y : "text" } )
+
+assert.writeOK( t.insert( { x : 5 , y : "this is fun" } ));
+
+assert.writeError( t.insert( { x : [] , y : "this is fun" } ));
+
+assert.writeError( t.insert( { x : [1] , y : "this is fun" } ));
+
+assert.writeError( t.insert( { x : [1,2] , y : "this is fun" } ));
diff --git a/jstests/core/fts_phrase.js b/jstests/core/fts_phrase.js
new file mode 100644
index 00000000000..0b58bef817e
--- /dev/null
+++ b/jstests/core/fts_phrase.js
@@ -0,0 +1,25 @@
+
+t = db.text_phrase;
+t.drop()
+
+t.save( { _id : 1 , title : "my blog post" , text : "i am writing a blog. yay" } );
+t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am typing. yay" } );
+t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } );
+
+t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } );
+
+res = t.runCommand( "text" , { search : "blog write" } );
+assert.eq( 3, res.results.length );
+assert.eq( 1, res.results[0].obj._id );
+assert( res.results[0].score > (res.results[1].score*2), tojson(res) );
+
+res = t.runCommand( "text" , { search : "write blog" } );
+assert.eq( 3, res.results.length );
+assert.eq( 1, res.results[0].obj._id );
+assert( res.results[0].score > (res.results[1].score*2), tojson(res) );
+
+
+
+
+
+
diff --git a/jstests/core/fts_proj.js b/jstests/core/fts_proj.js
new file mode 100644
index 00000000000..1ecc6688d1b
--- /dev/null
+++ b/jstests/core/fts_proj.js
@@ -0,0 +1,20 @@
+t = db.text_proj;
+t.drop();
+
+t.save( { _id : 1 , x : "a", y: "b", z : "c"});
+t.save( { _id : 2 , x : "d", y: "e", z : "f"});
+t.save( { _id : 3 , x : "a", y: "g", z : "h"});
+
+t.ensureIndex( { x : "text"} , { default_language : "none" } );
+
+res = t.runCommand("text", {search : "a"});
+assert.eq( 2, res.results.length );
+assert( res.results[0].obj.y, tojson(res) );
+
+res = t.runCommand("text", {search : "a", project: {x: 1}});
+assert.eq( 2, res.results.length );
+assert( !res.results[0].obj.y, tojson(res) );
+
+
+
+
diff --git a/jstests/core/fts_projection.js b/jstests/core/fts_projection.js
new file mode 100644
index 00000000000..9bdb9dbca8a
--- /dev/null
+++ b/jstests/core/fts_projection.js
@@ -0,0 +1,99 @@
+// Test $text with $textScore projection.
+
+var t = db.getSiblingDB("test").getCollection("fts_projection");
+t.drop();
+
+db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
+
+t.insert({_id: 0, a: "textual content"});
+t.insert({_id: 1, a: "additional content", b: -1});
+t.insert({_id: 2, a: "irrelevant content"});
+t.ensureIndex({a:"text"});
+
+// Project the text score.
+var results = t.find({$text: {$search: "textual content -irrelevant"}}, {_idCopy:0, score:{$meta: "textScore"}}).toArray();
+// printjson(results);
+// Scores should exist.
+assert.eq(results.length, 2);
+assert(results[0].score);
+assert(results[1].score);
+
+// indexed by _id.
+var scores = [0, 0, 0];
+scores[results[0]._id] = results[0].score;
+scores[results[1]._id] = results[1].score;
+
+//
+// Edge/error cases:
+//
+
+// Project text score into 2 fields.
+results = t.find({$text: {$search: "textual content -irrelevant"}}, {otherScore: {$meta: "textScore"}, score:{$meta: "textScore"}}).toArray();
+assert.eq(2, results.length);
+for (var i = 0; i < results.length; ++i) {
+ assert.close(scores[results[i]._id], results[i].score);
+ assert.close(scores[results[i]._id], results[i].otherScore);
+}
+
+// printjson(results);
+
+// Project text score into "x.$" shouldn't crash
+assert.throws(function() { t.find({$text: {$search: "textual content -irrelevant"}}, {'x.$': {$meta: "textScore"}}).toArray(); });
+
+// TODO: We can't project 'x.y':1 and 'x':1 (yet).
+
+// Clobber an existing field and behave nicely.
+results = t.find({$text: {$search: "textual content -irrelevant"}},
+ {b: {$meta: "textScore"}}).toArray();
+assert.eq(2, results.length);
+for (var i = 0; i < results.length; ++i) {
+ assert.close(scores[results[i]._id], results[i].b,
+ i + ': existing field in ' + tojson(results[i], '', true) +
+ ' is not clobbered with score');
+}
+
+assert.neq(-1, results[0].b);
+
+// Don't crash if we have no text score.
+var results = t.find({a: /text/}, {score: {$meta: "textScore"}}).toArray();
+// printjson(results);
+
+// No textScore proj. with nested fields
+assert.throws(function() { t.find({$text: {$search: "blah"}}, {'x.y':{$meta: "textScore"}}).toArray(); });
+
+// SERVER-12173
+// When $text operator is in $or, should evaluate first
+results = t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {_id: 1}]},
+ {_idCopy:0, score:{$meta: "textScore"}}).toArray();
+printjson(results);
+assert.eq(2, results.length);
+for (var i = 0; i < results.length; ++i) {
+ assert.close(scores[results[i]._id], results[i].score,
+ i + ': TEXT under OR invalid score: ' + tojson(results[i], '', true));
+}
+
+// SERVER-12592
+// When $text operator is in $or, all non-$text children must be indexed. Otherwise, we should produce
+// a readable error.
+var errorMessage = '';
+assert.throws( function() {
+ try {
+ t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {b: 1}]}).itcount();
+ }
+ catch (e) {
+ errorMessage = e;
+ throw e;
+ }
+}, null, 'Expected error from failed TEXT under OR planning');
+assert.neq(-1, errorMessage.indexOf('TEXT'),
+ 'message from failed text planning does not mention TEXT: ' + errorMessage);
+assert.neq(-1, errorMessage.indexOf('OR'),
+ 'message from failed text planning does not mention OR: ' + errorMessage);
+
+// Scores should exist.
+assert.eq(results.length, 2);
+assert(results[0].score,
+ "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or");
+assert(results[1].score,
+ "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or");
+
diff --git a/jstests/core/fts_querylang.js b/jstests/core/fts_querylang.js
new file mode 100644
index 00000000000..2a139f5b766
--- /dev/null
+++ b/jstests/core/fts_querylang.js
@@ -0,0 +1,93 @@
+// Test $text query operator.
+
+var t = db.getSiblingDB("test").getCollection("fts_querylang");
+var cursor;
+var results;
+
+db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
+
+t.drop();
+
+t.insert({_id: 0, unindexedField: 0, a: "textual content"});
+t.insert({_id: 1, unindexedField: 1, a: "additional content"});
+t.insert({_id: 2, unindexedField: 2, a: "irrelevant content"});
+t.ensureIndex({a: "text"});
+
+// Test text query with no results.
+assert.eq(false, t.find({$text: {$search: "words"}}).hasNext());
+
+// Test basic text query.
+results = t.find({$text: {$search: "textual content -irrelevant"}}).toArray();
+assert.eq(results.length, 2);
+assert.neq(results[0]._id, 2);
+assert.neq(results[1]._id, 2);
+
+// Test sort with basic text query.
+results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).toArray();
+assert.eq(results.length, 2);
+assert.eq(results[0]._id, 0);
+assert.eq(results[1]._id, 1);
+
+// Test skip with basic text query.
+results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).skip(1).toArray();
+assert.eq(results.length, 1);
+assert.eq(results[0]._id, 1);
+
+// Test limit with basic text query.
+results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).limit(1).toArray();
+assert.eq(results.length, 1);
+assert.eq(results[0]._id, 0);
+
+// TODO Test basic text query with sort, once sort is enabled in the new query framework.
+
+// TODO Test basic text query with projection, once projection is enabled in the new query
+// framework.
+
+// Test $and of basic text query with indexed expression.
+results = t.find({$text: {$search: "content -irrelevant"},
+ _id: 1}).toArray();
+assert.eq(results.length, 1);
+assert.eq(results[0]._id, 1);
+
+// Test $and of basic text query with unindexed expression.
+results = t.find({$text: {$search: "content -irrelevant"},
+ unindexedField: 1}).toArray();
+assert.eq(results.length, 1);
+assert.eq(results[0]._id, 1);
+
+// TODO Test invalid inputs for $text, $search, $language.
+
+// Test $language.
+cursor = t.find({$text: {$search: "contents", $language: "none"}});
+assert.eq(false, cursor.hasNext());
+
+cursor = t.find({$text: {$search: "contents", $language: "EN"}});
+assert.eq(true, cursor.hasNext());
+
+cursor = t.find({$text: {$search: "contents", $language: "spanglish"}});
+assert.throws(function() { cursor.next() });
+
+// TODO Test $and of basic text query with geo expression.
+
+// Test update with $text.
+t.update({$text: {$search: "textual content -irrelevant"}}, {$set: {b: 1}}, {multi: true});
+assert.eq(2, t.find({b: 1}).itcount(),
+ 'incorrect number of documents updated');
+
+// TODO Test remove with $text, once it is enabled with the new query framework.
+
+// TODO Test count with $text, once it is enabled with the new query framework.
+
+// TODO Test findAndModify with $text, once it is enabled with the new query framework.
+
+// TODO Test aggregate with $text, once it is enabled with the new query framework.
+
+// TODO Test that old query framework rejects $text queries.
+
+// TODO Test that $text fails without a text index.
+
+// TODO Test that $text accepts a hint of the text index.
+
+// TODO Test that $text fails if a different index is hinted.
+
+// TODO Test $text with {$natural:1} sort, {$natural:1} hint.
diff --git a/jstests/core/fts_score_sort.js b/jstests/core/fts_score_sort.js
new file mode 100644
index 00000000000..59fb852a774
--- /dev/null
+++ b/jstests/core/fts_score_sort.js
@@ -0,0 +1,28 @@
+// Test sorting with text score metadata.
+
+var t = db.getSiblingDB("test").getCollection("fts_score_sort");
+t.drop();
+
+db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
+
+t.insert({_id: 0, a: "textual content"});
+t.insert({_id: 1, a: "additional content"});
+t.insert({_id: 2, a: "irrelevant content"});
+t.ensureIndex({a:"text"});
+
+// Sort by the text score.
+var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({score: {$meta: "textScore"}}).toArray();
+// printjson(results);
+assert.eq(results.length, 2);
+assert.eq(results[0]._id, 0);
+assert.eq(results[1]._id, 1);
+assert(results[0].score > results[1].score);
+
+// Sort by {_id descending, score} and verify the order is right.
+var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({_id: -1, score: {$meta: "textScore"}}).toArray();
+printjson(results);
+assert.eq(results.length, 2);
+assert.eq(results[0]._id, 1);
+assert.eq(results[1]._id, 0);
+// Note the reversal from above.
+assert(results[0].score < results[1].score);
diff --git a/jstests/core/fts_spanish.js b/jstests/core/fts_spanish.js
new file mode 100644
index 00000000000..b322c369f3f
--- /dev/null
+++ b/jstests/core/fts_spanish.js
@@ -0,0 +1,30 @@
+
+load( "jstests/libs/fts.js" );
+
+t = db.text_spanish;
+t.drop();
+
+t.save( { _id: 1, title: "mi blog", text: "Este es un blog de prueba" } );
+t.save( { _id: 2, title: "mi segundo post", text: "Este es un blog de prueba" } );
+t.save( { _id: 3, title: "cuchillos son divertidos", text: "este es mi tercer blog stemmed" } );
+t.save( { _id: 4, language: "en", title: "My fourth blog", text: "This stemmed blog is in english" } );
+
+// default weight is 1
+// specify weights if you want a field to be more meaningull
+t.ensureIndex( { "title": "text", text: "text" }, { weights: { title: 10 },
+ default_language: "es" } );
+
+res = t.runCommand( "text", { search: "blog" } );
+assert.eq( 4, res.results.length );
+
+assert.eq( [4], queryIDS( t, "stem" ) );
+assert.eq( [3], queryIDS( t, "stemmed" ) );
+assert.eq( [4], queryIDS( t, "stemmed", null, { language : "en" } ) );
+
+assert.eq( [1,2], queryIDS( t, "prueba" ) );
+
+assert.writeError( t.save( { _id: 5, language: "spanglish", title: "", text: "" } ));
+
+t.dropIndexes();
+res = t.ensureIndex( { "title": "text", text: "text" }, { default_language: "spanglish" } );
+assert.neq(null, res);
diff --git a/jstests/core/geo1.js b/jstests/core/geo1.js
new file mode 100644
index 00000000000..5e28713d581
--- /dev/null
+++ b/jstests/core/geo1.js
@@ -0,0 +1,37 @@
+
+t = db.geo1
+t.drop();
+
+idx = { loc : "2d" , zip : 1 }
+
+t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } )
+t.insert( { zip : "10024" , loc : [ 40.786387 , 73.97709 ] } )
+assert.writeOK( t.insert( { zip : "94061" , loc : [ 37.463911 , 122.23396 ] } ));
+
+// test "2d" has to be first
+assert.eq( 1 , t.getIndexKeys().length , "S1" );
+t.ensureIndex( { zip : 1 , loc : "2d" } );
+assert.eq( 1 , t.getIndexKeys().length , "S2" );
+
+t.ensureIndex( idx );
+assert.eq( 2 , t.getIndexKeys().length , "S3" );
+
+assert.eq( 3 , t.count() , "B1" );
+assert.writeError( t.insert( { loc : [ 200 , 200 ] } ));
+assert.eq( 3 , t.count() , "B3" );
+
+// test normal access
+
+wb = t.findOne( { zip : "06525" } )
+assert( wb , "C1" );
+
+assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" )
+assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" )
+// assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" )
+
+// test config options
+
+t.drop();
+
+t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } );
+assert.writeOK( t.insert( { loc : [ 200 , 200 ] } ));
diff --git a/jstests/core/geo10.js b/jstests/core/geo10.js
new file mode 100644
index 00000000000..b122da99f2a
--- /dev/null
+++ b/jstests/core/geo10.js
@@ -0,0 +1,15 @@
+// Test for SERVER-2746
+
+coll = db.geo10
+coll.drop();
+
+assert.writeOK( db.geo10.ensureIndex( { c : '2d', t : 1 }, { min : 0, max : Math.pow( 2, 40 ) } ));
+assert( db.system.indexes.count({ ns : "test.geo10" }) == 2, "A3" )
+
+printjson( db.system.indexes.find().toArray() )
+
+assert.writeOK( db.geo10.insert( { c : [ 1, 1 ], t : 1 } ));
+assert.writeOK( db.geo10.insert( { c : [ 3600, 3600 ], t : 1 } ));
+assert.writeOK( db.geo10.insert( { c : [ 0.001, 0.001 ], t : 1 } ));
+
+printjson( db.geo10.find({ c : { $within : { $box : [[0.001, 0.001], [Math.pow(2, 40) - 0.001, Math.pow(2, 40) - 0.001]] } }, t : 1 }).toArray() )
diff --git a/jstests/core/geo2.js b/jstests/core/geo2.js
new file mode 100644
index 00000000000..f9632ebd16d
--- /dev/null
+++ b/jstests/core/geo2.js
@@ -0,0 +1,40 @@
+
+t = db.geo2
+t.drop();
+
+n = 1
+for ( var x=-100; x<100; x+=2 ){
+ for ( var y=-100; y<100; y+=2 ){
+ t.insert( { _id : n++ , loc : [ x , y ] } )
+ }
+}
+
+t.ensureIndex( { loc : "2d" } )
+
+fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
+
+function a( cur ){
+ var total = 0;
+ var outof = 0;
+ while ( cur.hasNext() ){
+ var o = cur.next();
+ total += Geo.distance( [ 50 , 50 ] , o.loc );
+ outof++;
+ }
+ return total/outof;
+}
+
+assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B1" )
+assert.close( 1.33333 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(3) ) , "B2" );
+assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B3" );
+
+printjson( t.find( { loc : { $near : [ 50 , 50 ] } } ).explain() )
+
+
+assert.lt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(50) ) , "C1" )
+assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 , 3 ] } } ).limit(50) ) , "C2" )
+assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] , $maxDistance : 3 } } ).limit(50) ) , "C3" )
+
+// SERVER-8974 - test if $geoNear operator works with 2d index as well
+var geoNear_cursor = t.find( { loc : { $geoNear : [50, 50] } } );
+assert.eq( geoNear_cursor.count(), 100 )
diff --git a/jstests/core/geo3.js b/jstests/core/geo3.js
new file mode 100644
index 00000000000..47637783f5b
--- /dev/null
+++ b/jstests/core/geo3.js
@@ -0,0 +1,77 @@
+
+t = db.geo3
+t.drop();
+
+n = 1
+for ( var x=-100; x<100; x+=2 ){
+ for ( var y=-100; y<100; y+=2 ){
+ t.insert( { _id : n++ , loc : [ x , y ] , a : Math.abs( x ) % 5 , b : Math.abs( y ) % 5 } )
+ }
+}
+
+
+t.ensureIndex( { loc : "2d" } )
+
+fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
+
+// test filter
+
+filtered1 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
+assert.eq( 10 , filtered1.results.length , "B1" );
+filtered1.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B2: " + tojson( z ) ); } )
+//printjson( filtered1.stats );
+
+function avgA( q , len ){
+ if ( ! len )
+ len = 10;
+ var realq = { loc : { $near : [ 50 , 50 ] } };
+ if ( q )
+ Object.extend( realq , q );
+ var as =
+ t.find( realq ).limit(len).map(
+ function(z){
+ return z.a;
+ }
+ );
+ assert.eq( len , as.length , "length in avgA" );
+ return Array.avg( as );
+}
+
+function testFiltering( msg ){
+ assert.gt( 2 , avgA( {} ) , msg + " testFiltering 1 " );
+ assert.eq( 2 , avgA( { a : 2 } ) , msg + " testFiltering 2 " );
+ assert.eq( 4 , avgA( { a : 4 } ) , msg + " testFiltering 3 " );
+}
+
+testFiltering( "just loc" );
+
+t.dropIndex( { loc : "2d" } )
+assert.eq( 1 , t.getIndexKeys().length , "setup 3a" )
+t.ensureIndex( { loc : "2d" , a : 1 } )
+assert.eq( 2 , t.getIndexKeys().length , "setup 3b" )
+
+filtered2 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
+assert.eq( 10 , filtered2.results.length , "B3" );
+filtered2.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B4: " + tojson( z ) ); } )
+
+assert.eq( filtered1.stats.avgDistance , filtered2.stats.avgDistance , "C1" )
+assert.eq( filtered1.stats.nscanned , filtered2.stats.nscanned , "C3" )
+assert.gt( filtered1.stats.objectsLoaded , filtered2.stats.objectsLoaded , "C3" )
+
+testFiltering( "loc and a" );
+
+t.dropIndex( { loc : "2d" , a : 1 } )
+assert.eq( 1 , t.getIndexKeys().length , "setup 4a" )
+t.ensureIndex( { loc : "2d" , b : 1 } )
+assert.eq( 2 , t.getIndexKeys().length , "setup 4b" )
+
+testFiltering( "loc and b" );
+
+
+q = { loc : { $near : [ 50 , 50 ] } }
+assert.eq( 100 , t.find( q ).limit(100).itcount() , "D1" )
+assert.eq( 100 , t.find( q ).limit(100).count() , "D2" )
+
+assert.eq( 20 , t.find( q ).limit(20).itcount() , "D3" )
+assert.eq( 20 , t.find( q ).limit(20).size() , "D4" )
+
diff --git a/jstests/core/geo4.js b/jstests/core/geo4.js
new file mode 100644
index 00000000000..c1be468bb52
--- /dev/null
+++ b/jstests/core/geo4.js
@@ -0,0 +1,11 @@
+var t = db.geo4;
+t.drop();
+
+t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } );
+
+var err = t.ensureIndex( { loc : "2d" }, { bits : 33 } );
+assert.writeError(err);
+assert( err.getWriteError().errmsg.indexOf("bits in geo index must be between 1 and 32") >= 0,
+ tojson( err ));
+
+assert.writeOK(t.ensureIndex( { loc : "2d" }, { bits : 32 } ));
diff --git a/jstests/core/geo5.js b/jstests/core/geo5.js
new file mode 100644
index 00000000000..67b00f85b44
--- /dev/null
+++ b/jstests/core/geo5.js
@@ -0,0 +1,18 @@
+t = db.geo5;
+t.drop();
+
+t.insert( { p : [ 0,0 ] } )
+t.ensureIndex( { p : "2d" } )
+
+res = t.runCommand( "geoNear" , { near : [1,1] } );
+assert.eq( 1 , res.results.length , "A1" );
+
+t.insert( { p : [ 1,1 ] } )
+t.insert( { p : [ -1,-1 ] } )
+res = t.runCommand( "geoNear" , { near : [50,50] } );
+assert.eq( 3 , res.results.length , "A2" );
+
+t.insert( { p : [ -1,-1 ] } )
+res = t.runCommand( "geoNear" , { near : [50,50] } );
+assert.eq( 4 , res.results.length , "A3" );
+
diff --git a/jstests/core/geo6.js b/jstests/core/geo6.js
new file mode 100644
index 00000000000..185795c57ba
--- /dev/null
+++ b/jstests/core/geo6.js
@@ -0,0 +1,24 @@
+
+t = db.geo6;
+t.drop();
+
+t.ensureIndex( { loc : "2d" } );
+
+assert.eq( 0 , t.find().itcount() , "pre0" );
+assert.eq( 0 , t.find( { loc : { $near : [50,50] } } ).itcount() , "pre1" )
+
+t.insert( { _id : 1 , loc : [ 1 , 1 ] } )
+t.insert( { _id : 2 , loc : [ 1 , 2 ] } )
+t.insert( { _id : 3 } )
+
+assert.eq( 3 , t.find().itcount() , "A1" )
+assert.eq( 2 , t.find().hint( { loc : "2d" } ).itcount() , "A2" )
+assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).itcount() , "A3" )
+
+t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).forEach(printjson);
+assert.eq( 1 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).next()._id , "B1" )
+assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : -1 } ).next()._id , "B1" )
+
+
+t.insert( { _id : 4 , loc : [] } )
+assert.eq( 4 , t.find().itcount() , "C1" )
diff --git a/jstests/core/geo7.js b/jstests/core/geo7.js
new file mode 100644
index 00000000000..c220da54249
--- /dev/null
+++ b/jstests/core/geo7.js
@@ -0,0 +1,20 @@
+
+t = db.geo7;
+t.drop();
+
+t.insert({_id:1,y:[1,1]})
+t.insert({_id:2,y:[1,1],z:3})
+t.insert({_id:3,y:[1,1],z:4})
+t.insert({_id:4,y:[1,1],z:5})
+
+t.ensureIndex({y:"2d",z:1})
+
+assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A1" );
+
+t.dropIndex({y:"2d",z:1})
+
+t.ensureIndex({y:"2d"})
+assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A2" );
+
+t.insert( { _id : 5 , y : 5 } );
+assert.eq( 5 , t.findOne( { y : 5 } )._id , "B1" );
diff --git a/jstests/core/geo8.js b/jstests/core/geo8.js
new file mode 100644
index 00000000000..301f3bcc0d1
--- /dev/null
+++ b/jstests/core/geo8.js
@@ -0,0 +1,13 @@
+
+t = db.geo8
+t.drop()
+
+t.insert( { loc : [ 5 , 5 ] } )
+t.insert( { loc : [ 5 , 6 ] } )
+t.insert( { loc : [ 5 , 7 ] } )
+t.insert( { loc : [ 4 , 5 ] } )
+t.insert( { loc : [ 100 , 100 ] } )
+
+t.ensureIndex( { loc : "2d" } )
+
+t.runCommand( "geoWalk" );
diff --git a/jstests/core/geo9.js b/jstests/core/geo9.js
new file mode 100644
index 00000000000..8b6510f03b5
--- /dev/null
+++ b/jstests/core/geo9.js
@@ -0,0 +1,28 @@
+
+t = db.geo9
+t.drop();
+
+t.save( { _id : 1 , a : [ 10 , 10 ] , b : [ 50 , 50 ] } )
+t.save( { _id : 2 , a : [ 11 , 11 ] , b : [ 51 , 52 ] } )
+t.save( { _id : 3 , a : [ 12 , 12 ] , b : [ 52 , 52 ] } )
+
+t.save( { _id : 4 , a : [ 50 , 50 ] , b : [ 10 , 10 ] } )
+t.save( { _id : 5 , a : [ 51 , 51 ] , b : [ 11 , 11 ] } )
+t.save( { _id : 6 , a : [ 52 , 52 ] , b : [ 12 , 12 ] } )
+
+t.ensureIndex( { a : "2d" } )
+t.ensureIndex( { b : "2d" } )
+
+function check( field ){
+ var q = {}
+ q[field] = { $near : [ 11 , 11 ] }
+ arr = t.find( q ).limit(3).map(
+ function(z){
+ return Geo.distance( [ 11 , 11 ] , z[field] );
+ }
+ );
+ assert.eq( 2 * Math.sqrt( 2 ) , Array.sum( arr ) , "test " + field );
+}
+
+check( "a" )
+check( "b" )
diff --git a/jstests/core/geo_2d_explain.js b/jstests/core/geo_2d_explain.js
new file mode 100644
index 00000000000..8195642aabc
--- /dev/null
+++ b/jstests/core/geo_2d_explain.js
@@ -0,0 +1,29 @@
+var t = db.geo_2d_explain;
+
+t.drop();
+
+var n = 1000;
+
+// insert n documents with integer _id, a can be 1-5, loc is close to [40, 40]
+t.drop()
+t.ensureIndex({loc: "2d", _id: 1})
+
+var x = 40;
+var y = 40;
+for (var i = 0; i < n; i++) {
+ // random number in range [1, 5]
+ var a = Math.floor(Math.random() * 5) + 1;
+ var dist = 4.0;
+ var dx = (Math.random() - 0.5) * dist;
+ var dy = (Math.random() - 0.5) * dist;
+ var loc = [x + dx, y + dy];
+ t.save({_id: i, a: a, loc: loc});
+}
+
+var explain = t.find({loc: {$near: [40, 40]}, _id: {$lt: 50}}).explain();
+
+print('explain = ' + tojson(explain));
+
+assert.eq({}, explain.indexBounds);
+assert.eq(explain.n, explain.nscannedObjects);
+assert.lte(explain.n, explain.nscanned);
diff --git a/jstests/core/geo_2d_with_geojson_point.js b/jstests/core/geo_2d_with_geojson_point.js
new file mode 100644
index 00000000000..b5afc8b77b8
--- /dev/null
+++ b/jstests/core/geo_2d_with_geojson_point.js
@@ -0,0 +1,20 @@
+/*
+ * Use of GeoJSON points should be prohibited with a 2d index, SERVER-10636.
+ */
+
+var t = db.geo_2d_with_geojson_point;
+t.drop();
+t.ensureIndex({loc: '2d'});
+
+var geoJSONPoint = {
+ type: 'Point',
+ coordinates: [0, 0]
+};
+
+print(assert.throws(
+ function() {
+ t.findOne({
+ loc: {$near: {$geometry: geoJSONPoint}}});
+ },
+ [],
+ 'querying 2d index with GeoJSON point.'));
diff --git a/jstests/core/geo_allowedcomparisons.js b/jstests/core/geo_allowedcomparisons.js
new file mode 100644
index 00000000000..61eb3f43e52
--- /dev/null
+++ b/jstests/core/geo_allowedcomparisons.js
@@ -0,0 +1,95 @@
+// A test for what geometries can interact with what other geometries.
+t = db.geo_allowedcomparisons;
+
+// Any GeoJSON object can intersect with any geojson object.
+geojsonPoint = { "type" : "Point", "coordinates": [ 0, 0 ] };
+oldPoint = [0,0];
+
+// GeoJSON polygons can contain any geojson object and OLD points.
+geojsonPoly = { "type" : "Polygon",
+ "coordinates" : [ [ [-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]]};
+
+// This can be contained by GJ polygons, intersected by anything GJ and old points.
+geojsonLine = { "type" : "LineString", "coordinates": [ [ 0, 0], [1, 1]]}
+
+// $centerSphere can contain old or new points.
+oldCenterSphere = [[0, 0], Math.PI / 180];
+// $box can contain old points.
+oldBox = [[-5,-5], [5,5]];
+// $polygon can contain old points.
+oldPolygon = [[-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]
+// $center can contain old points.
+oldCenter = [[0, 0], 1];
+
+t.drop();
+t.ensureIndex({geo: "2d"});
+// 2d doesn't know what to do w/this
+assert.writeError(t.insert({geo: geojsonPoint}));
+// Old points are OK.
+assert.writeOK(t.insert({geo: oldPoint}));
+// Lines not OK in 2d
+assert.writeError(t.insert({geo: geojsonLine}));
+// Shapes are not OK to insert in 2d
+assert.writeError(t.insert({geo: geojsonPoly}));
+assert.writeError(t.insert({geo: oldCenterSphere}));
+assert.writeError(t.insert({geo: oldCenter}));
+// If we try to insert a polygon, it thinks it's an array of points. Let's not
+// do that. Ditto for the box.
+
+// Verify that even if we can't index them, we can use them in a matcher.
+t.insert({gj: geojsonLine})
+t.insert({gj: geojsonPoly})
+geojsonPoint2 = { "type" : "Point", "coordinates": [ 0, 0.001 ] };
+t.insert({gjp: geojsonPoint2})
+
+// We convert between old and new style points.
+assert.eq(1, t.find({gjp: {$geoWithin: {$box: oldBox}}}).itcount());
+assert.eq(1, t.find({gjp: {$geoWithin: {$polygon: oldPolygon}}}).itcount());
+assert.eq(1, t.find({gjp: {$geoWithin: {$center: oldCenter}}}).itcount());
+assert.eq(1, t.find({gjp: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount())
+
+function runTests() {
+ // Each find the box, the polygon, and the old point.
+ assert.eq(1, t.find({geo: {$geoWithin: {$box: oldBox}}}).itcount())
+ assert.eq(1, t.find({geo: {$geoWithin: {$polygon: oldPolygon}}}).itcount())
+ // Each find the old point.
+ assert.eq(1, t.find({geo: {$geoWithin: {$center: oldCenter}}}).itcount())
+ assert.eq(1, t.find({geo: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount())
+ // Using geojson with 2d-style geoWithin syntax should choke.
+ assert.throws(function() { return t.find({geo: {$geoWithin: {$polygon: geojsonPoly}}})
+ .itcount();})
+ // Using old polygon w/new syntax should choke too.
+ assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldPolygon}}})
+ .itcount();})
+ assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldBox}}})
+ .itcount();})
+ assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenter}}})
+ .itcount();})
+ assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenterSphere}}})
+ .itcount();})
+ // Even if we only have a 2d index, the 2d suitability function should
+ // allow the matcher to deal with this. If we have a 2dsphere index we use it.
+ assert.eq(1, t.find({geo: {$geoWithin: {$geometry: geojsonPoly}}}).itcount())
+ assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoly}}}).itcount())
+ assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: oldPoint}}}).itcount())
+ assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoint}}}).itcount())
+}
+
+// We have a 2d index right now. Let's see what it does.
+runTests();
+
+// No index now.
+t.dropIndex({geo: "2d"})
+runTests();
+
+// 2dsphere index now.
+assert.writeOK( t.ensureIndex({geo: "2dsphere"}) );
+// 2dsphere does not support arrays of points.
+assert.writeError(t.insert({geo: [geojsonPoint2, geojsonPoint]}));
+runTests();
+
+// Old stuff is not GeoJSON (or old-style point). All should fail.
+assert.writeError(t.insert({geo: oldBox}));
+assert.writeError(t.insert({geo: oldPolygon}));
+assert.writeError(t.insert({geo: oldCenter}));
+assert.writeError(t.insert({geo: oldCenterSphere}));
diff --git a/jstests/core/geo_array0.js b/jstests/core/geo_array0.js
new file mode 100644
index 00000000000..39c7b10c083
--- /dev/null
+++ b/jstests/core/geo_array0.js
@@ -0,0 +1,26 @@
+// Make sure the very basics of geo arrays are sane by creating a few multi location docs
+t = db.geoarray
+
+function test(index) {
+ t.drop();
+ t.insert( { zip : "10001", loc : { home : [ 10, 10 ], work : [ 50, 50 ] } } )
+ t.insert( { zip : "10002", loc : { home : [ 20, 20 ], work : [ 50, 50 ] } } )
+ var res = t.insert( { zip : "10003", loc : { home : [ 30, 30 ], work : [ 50, 50 ] } } );
+ assert.writeOK( res );
+
+ if (index) {
+ assert.writeOK(t.ensureIndex( { loc : "2d", zip : 1 } ));
+ assert.eq( 2, t.getIndexKeys().length )
+ }
+
+ res = t.insert( { zip : "10004", loc : { home : [ 40, 40 ], work : [ 50, 50 ] } } );
+ assert.writeOK( res );
+
+ // test normal access
+ printjson( t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() )
+ assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
+ assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
+}
+
+//test(false); // this was removed as part of SERVER-6400
+test(true)
diff --git a/jstests/core/geo_array1.js b/jstests/core/geo_array1.js
new file mode 100644
index 00000000000..ca61050c888
--- /dev/null
+++ b/jstests/core/geo_array1.js
@@ -0,0 +1,38 @@
+// Make sure many locations in one doc works, in the form of an array
+
+t = db.geoarray1
+function test(index) {
+ t.drop();
+
+ var locObj = []
+ // Add locations everywhere
+ for ( var i = 0; i < 10; i++ ) {
+ for ( var j = 0; j < 10; j++ ) {
+ if ( j % 2 == 0 )
+ locObj.push( [ i, j ] )
+ else
+ locObj.push( { x : i, y : j } )
+ }
+ }
+
+ // Add docs with all these locations
+ for( var i = 0; i < 300; i++ ){
+ t.insert( { loc : locObj } )
+ }
+
+ if (index) {
+ t.ensureIndex( { loc : "2d" } )
+ }
+
+ // Pull them back
+ for ( var i = 0; i < 10; i++ ) {
+ for ( var j = 0; j < 10; j++ ) {
+ assert.eq(300, t.find({loc: {$within: {$box: [[i - 0.5, j - 0.5 ],
+ [i + 0.5,j + 0.5]]}}})
+ .count())
+ }
+ }
+}
+
+test(true)
+test(false)
diff --git a/jstests/core/geo_array2.js b/jstests/core/geo_array2.js
new file mode 100644
index 00000000000..acfc6a15abf
--- /dev/null
+++ b/jstests/core/geo_array2.js
@@ -0,0 +1,161 @@
+// Check the semantics of near calls with multiple locations
+
+t = db.geoarray2
+t.drop();
+
+var numObjs = 10;
+var numLocs = 100;
+
+// Test the semantics of near / nearSphere / etc. queries with multiple keys per object
+
+for( var i = -1; i < 2; i++ ){
+ for(var j = -1; j < 2; j++ ){
+
+ locObj = []
+
+ if( i != 0 || j != 0 )
+ locObj.push( { x : i * 50 + Random.rand(),
+ y : j * 50 + Random.rand() } )
+ locObj.push( { x : Random.rand(),
+ y : Random.rand() } )
+ locObj.push( { x : Random.rand(),
+ y : Random.rand() } )
+
+ t.insert({ name : "" + i + "" + j , loc : locObj , type : "A" })
+ t.insert({ name : "" + i + "" + j , loc : locObj , type : "B" })
+ }
+}
+
+assert.writeOK(t.ensureIndex({ loc : "2d" , type : 1 }));
+
+print( "Starting testing phase... ")
+
+for( var t = 0; t < 2; t++ ){
+
+var type = t == 0 ? "A" : "B"
+
+for( var i = -1; i < 2; i++ ){
+ for(var j = -1; j < 2; j++ ){
+
+ var center = [ i * 50 , j * 50 ]
+ var count = i == 0 && j == 0 ? 9 : 1
+ var objCount = 1
+
+ // Do near check
+
+ var nearResults = db.runCommand( { geoNear : "geoarray2" ,
+ near : center ,
+ num : count,
+ query : { type : type } } ).results
+ //printjson( nearResults )
+
+ var objsFound = {}
+ var lastResult = 0;
+ for( var k = 0; k < nearResults.length; k++ ){
+
+ // All distances should be small, for the # of results
+ assert.gt( 1.5 , nearResults[k].dis )
+ // Distances should be increasing
+ assert.lte( lastResult, nearResults[k].dis )
+ // Objs should be of the right type
+ assert.eq( type, nearResults[k].obj.type )
+
+ lastResult = nearResults[k].dis
+
+ var objKey = "" + nearResults[k].obj._id
+
+ if( objKey in objsFound ) objsFound[ objKey ]++
+ else objsFound[ objKey ] = 1
+
+ }
+
+ // Make sure we found the right objects each time
+ // Note: Multiple objects could be found for diff distances.
+ for( var q in objsFound ){
+ assert.eq( objCount , objsFound[q] )
+ }
+
+
+ // Do nearSphere check
+
+ // Earth Radius
+ var eRad = 6371
+
+ nearResults = db.geoarray2.find( { loc : { $nearSphere : center , $maxDistance : 500 /* km */ / eRad }, type : type } ).toArray()
+
+ assert.eq( nearResults.length , count )
+
+ objsFound = {}
+ lastResult = 0;
+ for( var k = 0; k < nearResults.length; k++ ){
+ var objKey = "" + nearResults[k]._id
+ if( objKey in objsFound ) objsFound[ objKey ]++
+ else objsFound[ objKey ] = 1
+
+ }
+
+ // Make sure we found the right objects each time
+ for( var q in objsFound ){
+ assert.eq( objCount , objsFound[q] )
+ }
+
+
+
+ // Within results do not return duplicate documents
+
+ var count = i == 0 && j == 0 ? 9 : 1
+ var objCount = i == 0 && j == 0 ? 1 : 1
+
+ // Do within check
+ objsFound = {}
+
+ var box = [ [center[0] - 1, center[1] - 1] , [center[0] + 1, center[1] + 1] ]
+
+ //printjson( box )
+
+ var withinResults = db.geoarray2.find({ loc : { $within : { $box : box } } , type : type }).toArray()
+
+ assert.eq( withinResults.length , count )
+
+ for( var k = 0; k < withinResults.length; k++ ){
+ var objKey = "" + withinResults[k]._id
+ if( objKey in objsFound ) objsFound[ objKey ]++
+ else objsFound[ objKey ] = 1
+ }
+
+ //printjson( objsFound )
+
+ // Make sure we found the right objects each time
+ for( var q in objsFound ){
+ assert.eq( objCount , objsFound[q] )
+ }
+
+
+ // Do within check (circle)
+ objsFound = {}
+
+ withinResults = db.geoarray2.find({ loc : { $within : { $center : [ center, 1.5 ] } } , type : type }).toArray()
+
+ assert.eq( withinResults.length , count )
+
+ for( var k = 0; k < withinResults.length; k++ ){
+ var objKey = "" + withinResults[k]._id
+ if( objKey in objsFound ) objsFound[ objKey ]++
+ else objsFound[ objKey ] = 1
+ }
+
+ // Make sure we found the right objects each time
+ for( var q in objsFound ){
+ assert.eq( objCount , objsFound[q] )
+ }
+
+
+
+ }
+}
+
+}
+
+
+
+
diff --git a/jstests/core/geo_borders.js b/jstests/core/geo_borders.js
new file mode 100644
index 00000000000..20781409b1e
--- /dev/null
+++ b/jstests/core/geo_borders.js
@@ -0,0 +1,162 @@
+t = db.borders
+t.drop()
+
+epsilon = 0.0001;
+
+// For these tests, *required* that step ends exactly on max
+min = -1
+max = 1
+step = 1
+numItems = 0;
+
+for ( var x = min; x <= max; x += step ) {
+ for ( var y = min; y <= max; y += step ) {
+ t.insert( { loc : { x : x, y : y } } )
+ numItems++;
+ }
+}
+
+overallMin = -1
+overallMax = 1
+
+// Create a point index slightly smaller than the points we have
+var res = t.ensureIndex({ loc: "2d" },
+ { max: overallMax - epsilon / 2,
+ min: overallMin + epsilon / 2 });
+assert.neq(null, res);
+
+// Create a point index only slightly bigger than the points we have
+res = t.ensureIndex( { loc : "2d" }, { max : overallMax + epsilon, min : overallMin - epsilon } );
+assert.writeOK(res);
+
+// ************
+// Box Tests
+// ************
+
+// If the bounds are bigger than the box itself, just clip at the borders
+assert.eq( numItems, t.find(
+ { loc : { $within : { $box : [
+ [ overallMin - 2 * epsilon, overallMin - 2 * epsilon ],
+ [ overallMax + 2 * epsilon, overallMax + 2 * epsilon ] ] } } } ).count() );
+
+// Check this works also for bounds where only a single dimension is off-bounds
+assert.eq( numItems - 5, t.find(
+ { loc : { $within : { $box : [
+ [ overallMin - 2 * epsilon, overallMin - 0.5 * epsilon ],
+ [ overallMax - epsilon, overallMax - epsilon ] ] } } } ).count() );
+
+// Make sure we can get at least close to the bounds of the index
+assert.eq( numItems, t.find(
+ { loc : { $within : { $box : [
+ [ overallMin - epsilon / 2, overallMin - epsilon / 2 ],
+ [ overallMax + epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() );
+
+// Make sure we can get at least close to the bounds of the index
+assert.eq( numItems, t.find(
+ { loc : { $within : { $box : [
+ [ overallMax + epsilon / 2, overallMax + epsilon / 2 ],
+ [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() );
+
+// Check that swapping min/max has good behavior
+assert.eq( numItems, t.find(
+ { loc : { $within : { $box : [
+ [ overallMax + epsilon / 2, overallMax + epsilon / 2 ],
+ [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() );
+
+assert.eq( numItems, t.find(
+ { loc : { $within : { $box : [
+ [ overallMax + epsilon / 2, overallMin - epsilon / 2 ],
+ [ overallMin - epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() );
+
+// **************
+// Circle tests
+// **************
+
+center = ( overallMax + overallMin ) / 2
+center = [ center, center ]
+radius = overallMax
+
+offCenter = [ center[0] + radius, center[1] + radius ]
+onBounds = [ offCenter[0] + epsilon, offCenter[1] + epsilon ]
+offBounds = [ onBounds[0] + epsilon, onBounds[1] + epsilon ]
+onBoundsNeg = [ -onBounds[0], -onBounds[1] ]
+
+// Make sure we can get all points when radius is exactly at full bounds
+assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + epsilon ] } } } ).count() );
+
+// Make sure we can get points when radius is over full bounds
+assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + 2 * epsilon ] } } } ).count() );
+
+// Make sure we can get points when radius is over full bounds, off-centered
+assert.lt( 0, t.find( { loc : { $within : { $center : [ offCenter, radius + 2 * epsilon ] } } } ).count() );
+
+// Make sure we get correct corner point when center is in bounds
+// (x bounds wrap, so could get other corner)
+cornerPt = t.findOne( { loc : { $within : { $center : [ offCenter, step / 2 ] } } } );
+assert.eq( cornerPt.loc.y, overallMax )
+
+// Make sure we get correct corner point when center is on bounds
+// NOTE: Only valid points on MIN bounds
+cornerPt = t
+ .findOne( { loc : { $within : { $center : [ onBoundsNeg, Math.sqrt( 2 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
+assert.eq( cornerPt.loc.y, overallMin )
+
+// Make sure we can't get corner point when center is over bounds
+try {
+ t.findOne( { loc : { $within : { $center : [ offBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
+ assert( false )
+} catch (e) {
+}
+
+// Make sure we can't get corner point when center is on max bounds
+try {
+ t.findOne( { loc : { $within : { $center : [ onBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
+ assert( false )
+} catch (e) {
+}
+
+// ***********
+// Near tests
+// ***********
+
+// Make sure we can get all nearby points to point in range
+assert.eq( overallMax, t.find( { loc : { $near : offCenter } } ).next().loc.y );
+
+// Make sure we can get all nearby points to point on boundary
+assert.eq( overallMin, t.find( { loc : { $near : onBoundsNeg } } ).next().loc.y );
+
+// Make sure we can't get all nearby points to point over boundary
+try {
+ t.findOne( { loc : { $near : offBounds } } )
+ assert( false )
+} catch (e) {
+}
+// Make sure we can't get all nearby points to point on max boundary
+try {
+ t.findOne( { loc : { $near : onBoundsNeg } } )
+ assert( false )
+} catch (e) {
+}
+
+// Make sure we can get all nearby points within one step (4 points in top
+// corner)
+assert.eq( 4, t.find( { loc : { $near : offCenter, $maxDistance : step * 1.9 } } ).count() );
+
+// **************
+// Command Tests
+// **************
+// Make sure we can get all nearby points to point in range
+assert.eq( overallMax, db.runCommand( { geoNear : "borders", near : offCenter } ).results[0].obj.loc.y );
+
+// Make sure we can get all nearby points to point on boundary
+assert.eq( overallMin, db.runCommand( { geoNear : "borders", near : onBoundsNeg } ).results[0].obj.loc.y );
+
+// Make sure we can't get all nearby points to point over boundary
+assert.commandFailed( db.runCommand( { geoNear : "borders", near : offBounds } ));
+
+// Make sure we can't get all nearby points to point on max boundary
+assert.commandWorked( db.runCommand( { geoNear : "borders", near : onBounds } ));
+
+// Make sure we can get all nearby points within one step (4 points in top
+// corner)
+assert.eq( 4, db.runCommand( { geoNear : "borders", near : offCenter, maxDistance : step * 1.5 } ).results.length );
diff --git a/jstests/core/geo_box1.js b/jstests/core/geo_box1.js
new file mode 100644
index 00000000000..5ef335158e1
--- /dev/null
+++ b/jstests/core/geo_box1.js
@@ -0,0 +1,43 @@
+
+t = db.geo_box1;
+t.drop();
+
+num = 0;
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ }
+}
+
+t.ensureIndex( { loc : "2d" } );
+
+searches = [
+ [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
+ [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
+ [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
+ [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
+];
+
+
+for ( i=0; i<searches.length; i++ ){
+ b = searches[i];
+ //printjson( b );
+
+ q = { loc : { $within : { $box : b } } }
+ numWanetd = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
+ assert.eq( numWanetd , t.find(q).itcount() , "itcount: " + tojson( q ) );
+ printjson( t.find(q).explain() )
+}
+
+
+
+assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" )
+assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" )
+
+
+assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" )
+assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" )
+
+assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" )
+
diff --git a/jstests/core/geo_box1_noindex.js b/jstests/core/geo_box1_noindex.js
new file mode 100644
index 00000000000..8c1ae87e08e
--- /dev/null
+++ b/jstests/core/geo_box1_noindex.js
@@ -0,0 +1,32 @@
+// SERVER-7343: allow $within without a geo index.
+t = db.geo_box1_noindex;
+t.drop();
+
+num = 0;
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ }
+}
+
+searches = [
+ [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
+ [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
+ [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
+ [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
+];
+
+for ( i=0; i<searches.length; i++ ){
+ b = searches[i];
+ q = { loc : { $within : { $box : b } } }
+ numWanted = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
+ assert.eq( numWanted , t.find(q).itcount() , "itcount: " + tojson( q ) );
+ printjson( t.find(q).explain() )
+}
+
+assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" )
+assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" )
+assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" )
+assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" )
+assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" )
diff --git a/jstests/core/geo_box2.js b/jstests/core/geo_box2.js
new file mode 100644
index 00000000000..4905a6cda8d
--- /dev/null
+++ b/jstests/core/geo_box2.js
@@ -0,0 +1,52 @@
+
+t = db.geo_box2;
+
+t.drop()
+
+for (i=1; i<10; i++) {
+ for(j=1; j<10; j++) {
+ t.insert({loc : [i,j]});
+ }
+}
+
+t.ensureIndex({"loc" : "2d"} )
+assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "A1" );
+
+t.dropIndex( { "loc" : "2d" } )
+
+t.ensureIndex({"loc" : "2d"} , {"min" : 0, "max" : 10})
+assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "B1" );
+
+// 'indexBounds.loc' in explain output should be filled in with at least
+// one bounding box.
+// Actual values is dependent on implementation of 2d execution stage.
+var explain = t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).explain(true);
+print( 'explain = ' + tojson(explain) );
+assert.neq( undefined, explain.indexBounds.loc, "C1" );
+assert.gt( explain.indexBounds.loc.length, 0, "C2" );
+
+// Check covering.
+var covering = explain.indexBounds.loc[0];
+for (var i = 1; i < explain.indexBounds.loc.length; ++i) {
+ var currentBox = explain.indexBounds.loc[i];
+ // min X
+ covering[0][0] = Math.min(covering[0][0], currentBox[0][0]);
+ // min Y
+ covering[0][1] = Math.min(covering[0][1], currentBox[0][1]);
+ // max X
+ covering[1][0] = Math.max(covering[1][0], currentBox[1][0]);
+ // max Y
+ covering[1][1] = Math.max(covering[1][1], currentBox[1][1]);
+}
+print('covering computed from index bounds = ' +
+ '(' + covering[0][0] + ',' + covering[0][1] + ') -->> ' +
+ '(' + covering[1][0] + ',' + covering[1][1] + ')');
+// Compare covering against $box coordinates.
+// min X
+assert.lte(covering[0][0], 4);
+// min Y
+assert.lte(covering[0][1], 4);
+// max X
+assert.gte(covering[1][0], 6);
+// max Y
+assert.gte(covering[1][1], 6);
diff --git a/jstests/core/geo_box3.js b/jstests/core/geo_box3.js
new file mode 100644
index 00000000000..8941f637518
--- /dev/null
+++ b/jstests/core/geo_box3.js
@@ -0,0 +1,36 @@
+// How to construct a test to stress the flaw in SERVER-994:
+// construct an index, think up a bounding box inside the index that
+// doesn't include the center of the index, and put a point inside the
+// bounding box.
+
+// This is the bug reported in SERVER-994.
+t=db.geo_box3;
+t.drop();
+t.insert({ point : { x : -15000000, y : 10000000 } });
+t.ensureIndex( { point : "2d" } , { min : -21000000 , max : 21000000 } );
+var c=t.find({point: {"$within": {"$box": [[-20000000, 7000000], [0, 15000000]]} } });
+assert.eq(1, c.count(), "A1");
+
+// Same thing, modulo 1000000.
+t=db.geo_box3;
+t.drop();
+t.insert({ point : { x : -15, y : 10 } });
+t.ensureIndex( { point : "2d" } , { min : -21 , max : 21 } );
+var c=t.find({point: {"$within": {"$box": [[-20, 7], [0, 15]]} } });
+assert.eq(1, c.count(), "B1");
+
+// Two more examples, one where the index is centered at the origin,
+// one not.
+t=db.geo_box3;
+t.drop();
+t.insert({ point : { x : 1.0 , y : 1.0 } });
+t.ensureIndex( { point : "2d" } , { min : -2 , max : 2 } );
+var c=t.find({point: {"$within": {"$box": [[.1, .1], [1.99, 1.99]]} } });
+assert.eq(1, c.count(), "C1");
+
+t=db.geo_box3;
+t.drop();
+t.insert({ point : { x : 3.9 , y : 3.9 } });
+t.ensureIndex( { point : "2d" } , { min : 0 , max : 4 } );
+var c=t.find({point: {"$within": {"$box": [[2.05, 2.05], [3.99, 3.99]]} } });
+assert.eq(1, c.count(), "D1");
diff --git a/jstests/core/geo_center_sphere1.js b/jstests/core/geo_center_sphere1.js
new file mode 100644
index 00000000000..4e4a658f058
--- /dev/null
+++ b/jstests/core/geo_center_sphere1.js
@@ -0,0 +1,98 @@
+t = db.geo_center_sphere1;
+
+function test(index) {
+ t.drop();
+ skip = 8 // lower for more rigor, higher for more speed (tested with .5, .678, 1, 2, 3, and 4)
+
+ searches = [
+ // x , y rad
+ [ [ 5 , 0 ] , 0.05 ] , // ~200 miles
+ [ [ 135 , 0 ] , 0.05 ] ,
+
+ [ [ 5 , 70 ] , 0.05 ] ,
+ [ [ 135 , 70 ] , 0.05 ] ,
+ [ [ 5 , 85 ] , 0.05 ] ,
+
+ [ [ 20 , 0 ] , 0.25 ] , // ~1000 miles
+ [ [ 20 , -45 ] , 0.25 ] ,
+ [ [ -20 , 60 ] , 0.25 ] ,
+ [ [ -20 , -70 ] , 0.25 ] ,
+ ];
+ correct = searches.map( function(z){ return []; } );
+
+ num = 0;
+
+ var bulk = t.initializeUnorderedBulkOp();
+ for ( x=-179; x<=179; x += skip ){
+ for ( y=-89; y<=89; y += skip ){
+ o = { _id : num++ , loc : [ x , y ] }
+ bulk.insert( o );
+ for ( i=0; i<searches.length; i++ ){
+ if ( Geo.sphereDistance( [ x , y ] , searches[i][0] ) <= searches[i][1])
+ correct[i].push( o );
+ }
+ }
+ gc(); // needed with low skip values
+ }
+ assert.writeOK(bulk.execute());
+
+ if (index) {
+ t.ensureIndex( { loc : index } );
+ }
+
+ for ( i=0; i<searches.length; i++ ){
+ print('------------');
+ print( tojson( searches[i] ) + "\t" + correct[i].length )
+ q = { loc : { $within : { $centerSphere : searches[i] } } }
+
+ //correct[i].forEach( printjson )
+ //printjson( q );
+ //t.find( q ).forEach( printjson )
+
+ //printjson(t.find( q ).explain())
+
+ //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
+ //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
+
+ var numExpected = correct[i].length
+ var x = correct[i].map( function(z){ return z._id; } )
+ var y = t.find(q).map( function(z){ return z._id; } )
+
+ missing = [];
+ epsilon = 0.001; // allow tenth of a percent error due to conversions
+ for (var j=0; j<x.length; j++){
+ if (!Array.contains(y, x[j])){
+ missing.push(x[j]);
+ var obj = t.findOne({_id: x[j]});
+ var dist = Geo.sphereDistance(searches[i][0], obj.loc);
+ print("missing: " + tojson(obj) + " " + dist)
+ if ((Math.abs(dist - searches[i][1]) / dist) < epsilon)
+ numExpected -= 1;
+ }
+ }
+ for (var j=0; j<y.length; j++){
+ if (!Array.contains(x, y[j])){
+ missing.push(y[j]);
+ var obj = t.findOne({_id: y[j]});
+ var dist = Geo.sphereDistance(searches[i][0], obj.loc);
+ print("extra: " + tojson(obj) + " " + dist)
+ if ((Math.abs(dist - searches[i][1]) / dist) < epsilon)
+ numExpected += 1;
+ }
+ }
+
+
+ assert.eq( numExpected , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
+ assert.eq( numExpected , t.find( q ).count() , "count : " + tojson( searches[i] ) );
+ if (index == "2d") {
+ var explain = t.find( q ).explain();
+ print( 'explain for ' + tojson( q , '' , true ) + ' = ' + tojson( explain ) );
+ assert.gt( numExpected * 2 , explain.nscanned ,
+ "nscanned : " + tojson( searches[i] ) )
+ }
+ }
+}
+
+test("2d")
+test("2dsphere")
+test(false)
diff --git a/jstests/core/geo_center_sphere2.js b/jstests/core/geo_center_sphere2.js
new file mode 100644
index 00000000000..8cb77a44c09
--- /dev/null
+++ b/jstests/core/geo_center_sphere2.js
@@ -0,0 +1,160 @@
+//
+// Tests the error handling of spherical queries
+// along with multi-location documents.
+// This is necessary since the error handling must manage
+// multiple documents, and so requires simultaneous testing.
+//
+
+function deg2rad(arg) { return arg * Math.PI / 180.0; }
+function rad2deg(arg) { return arg * 180.0 / Math.PI; }
+
+function computexscandist(y, maxDistDegrees) {
+ return maxDistDegrees / Math.min(Math.cos(deg2rad(Math.min(89.0, y + maxDistDegrees))),
+ Math.cos(deg2rad(Math.max(-89.0, y - maxDistDegrees))));
+}
+
+function pointIsOK(startPoint, radius) {
+ yscandist = rad2deg(radius) + 0.01;
+ xscandist = computexscandist(startPoint[1], yscandist);
+ return (startPoint[0] + xscandist < 180)
+ && (startPoint[0] - xscandist > -180)
+ && (startPoint[1] + yscandist < 90)
+ && (startPoint[1] - yscandist > -90);
+}
+
+var numTests = 30;
+
+for ( var test = 0; test < numTests; test++ ) {
+ Random.srand( 1337 + test );
+
+ var radius = 5000 * Random.rand(); // km
+ radius = radius / 6371; // radians;
+ var numDocs = Math.floor( 400 * Random.rand() );
+ // TODO: Wrapping uses the error value to figure out what would overlap...
+ var bits = Math.floor( 5 + Random.rand() * 28 );
+ var maxPointsPerDoc = 50;
+
+ var t = db.sphere;
+
+ var randomPoint = function() {
+ return [ Random.rand() * 360 - 180, Random.rand() * 180 - 90 ];
+ };
+
+ // Get a start point that doesn't require wrapping
+ // TODO: Are we a bit too aggressive with wrapping issues?
+ var startPoint;
+ var ex = null;
+ do {
+ t.drop();
+ startPoint = randomPoint();
+ t.ensureIndex( { loc : "2d" }, { bits : bits } );
+ } while (!pointIsOK(startPoint, radius))
+
+ var pointsIn = 0;
+ var pointsOut = 0;
+ var docsIn = 0;
+ var docsOut = 0;
+ var totalPoints = 0;
+
+ var bulk = t.initializeUnorderedBulkOp();
+ for ( var i = 0; i < numDocs; i++ ) {
+
+ var numPoints = Math.floor( Random.rand() * maxPointsPerDoc + 1 );
+ var docIn = false;
+ var multiPoint = [];
+
+ totalPoints += numPoints;
+
+ for ( var p = 0; p < numPoints; p++ ) {
+ var point = randomPoint();
+ multiPoint.push( point );
+
+ if ( Geo.sphereDistance( startPoint, point ) <= radius ) {
+ pointsIn++;
+ docIn = true;
+ } else {
+ pointsOut++;
+ }
+ }
+
+ bulk.insert( { loc : multiPoint } );
+
+ if ( docIn )
+ docsIn++;
+ else
+ docsOut++;
+
+ }
+
+ printjson( { test: test, radius : radius, bits : bits, numDocs : numDocs,
+ pointsIn : pointsIn, docsIn : docsIn, pointsOut : pointsOut,
+ docsOut : docsOut } );
+
+ assert.writeOK(bulk.execute());
+ assert.eq( docsIn + docsOut, numDocs );
+ assert.eq( pointsIn + pointsOut, totalPoints );
+
+ // $centerSphere
+ assert.eq( docsIn, t.find({ loc: { $within:
+ { $centerSphere: [ startPoint, radius ]}}} ).count() );
+
+ // $nearSphere
+ var cursor = t.find({ loc: { $nearSphere: startPoint, $maxDistance: radius }});
+ var results = cursor.limit( 2 * pointsIn ).toArray();
+
+ assert.eq( docsIn, results.length );
+
+ var distance = 0;
+ for ( var i = 0; i < results.length; i++ ) {
+ var minNewDistance = radius + 1;
+ for( var j = 0; j < results[i].loc.length; j++ ){
+ var newDistance = Geo.sphereDistance( startPoint, results[i].loc[j] );
+ if( newDistance < minNewDistance && newDistance >= distance ) {
+ minNewDistance = newDistance;
+ }
+ }
+
+ //print( "Dist from : " + results[i].loc[j] + " to " + startPoint + " is "
+ // + minNewDistance + " vs " + radius )
+
+ assert.lte( minNewDistance, radius );
+ assert.gte( minNewDistance, distance );
+ distance = minNewDistance;
+ }
+
+ // geoNear
+ results = db.runCommand({ geoNear: "sphere", near: startPoint, maxDistance: radius,
+ num : 2 * pointsIn, spherical : true } ).results;
+
+ /*
+ printjson( results );
+
+ for ( var j = 0; j < results[0].obj.loc.length; j++ ) {
+ var newDistance = Geo.sphereDistance( startPoint, results[0].obj.loc[j] )
+ if( newDistance <= radius ) print( results[0].obj.loc[j] + " : " + newDistance )
+ }
+ */
+
+ assert.eq( docsIn, results.length );
+
+ var distance = 0;
+ for ( var i = 0; i < results.length; i++ ) {
+ var retDistance = results[i].dis
+
+ // print( "Dist from : " + results[i].loc + " to " + startPoint + " is "
+ // + retDistance + " vs " + radius )
+
+ var distInObj = false;
+ for ( var j = 0; j < results[i].obj.loc.length && distInObj == false; j++ ) {
+ var newDistance = Geo.sphereDistance( startPoint, results[i].obj.loc[j] );
+ distInObj = ( newDistance >= retDistance - 0.0001 &&
+ newDistance <= retDistance + 0.0001 );
+ }
+
+ assert( distInObj );
+ assert.lte( retDistance, radius );
+ assert.gte( retDistance, distance );
+ distance = retDistance;
+ }
+}
+
diff --git a/jstests/core/geo_circle1.js b/jstests/core/geo_circle1.js
new file mode 100644
index 00000000000..852b60d186b
--- /dev/null
+++ b/jstests/core/geo_circle1.js
@@ -0,0 +1,43 @@
+
+t = db.geo_circle1;
+t.drop();
+
+searches = [
+ [ [ 5 , 5 ] , 3 ] ,
+ [ [ 5 , 5 ] , 1 ] ,
+ [ [ 5 , 5 ] , 5 ] ,
+ [ [ 0 , 5 ] , 5 ] ,
+];
+correct = searches.map( function(z){ return []; } );
+
+num = 0;
+
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ for ( i=0; i<searches.length; i++ )
+ if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
+ correct[i].push( o );
+ }
+}
+
+t.ensureIndex( { loc : "2d" } );
+
+for ( i=0; i<searches.length; i++ ){
+ //print( tojson( searches[i] ) + "\t" + correct[i].length )
+ q = { loc : { $within : { $center : searches[i] } } }
+
+ //correct[i].forEach( printjson )
+ //printjson( q );
+ //t.find( q ).forEach( printjson )
+
+ //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
+ //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
+
+ assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
+ assert.eq( correct[i].length , t.find( q ).count() , "count : " + tojson( searches[i] ) );
+ var explain = t.find( q ).explain();
+ print( 'explain for ' + tojson( q , '' , true ) + ' = ' + tojson( explain ) );
+ assert.gt( correct[i].length * 2 , explain.nscanned , "nscanned : " + tojson( searches[i] ) );
+}
diff --git a/jstests/core/geo_circle1_noindex.js b/jstests/core/geo_circle1_noindex.js
new file mode 100644
index 00000000000..1569174a679
--- /dev/null
+++ b/jstests/core/geo_circle1_noindex.js
@@ -0,0 +1,29 @@
+// SERVER-7343: allow $within without a geo index.
+t = db.geo_circle1_noindex;
+t.drop();
+
+searches = [
+ [ [ 5 , 5 ] , 3 ] ,
+ [ [ 5 , 5 ] , 1 ] ,
+ [ [ 5 , 5 ] , 5 ] ,
+ [ [ 0 , 5 ] , 5 ] ,
+];
+correct = searches.map( function(z){ return []; } );
+
+num = 0;
+
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ for ( i=0; i<searches.length; i++ )
+ if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
+ correct[i].push( o );
+ }
+}
+
+for ( i=0; i<searches.length; i++ ){
+ q = { loc : { $within : { $center : searches[i] } } }
+ assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
+ assert.eq( correct[i].length , t.find( q ).count() , "count : " + tojson( searches[i] ) );
+}
diff --git a/jstests/core/geo_circle2.js b/jstests/core/geo_circle2.js
new file mode 100644
index 00000000000..ef76884609c
--- /dev/null
+++ b/jstests/core/geo_circle2.js
@@ -0,0 +1,26 @@
+
+t = db.geo_circle2;
+t.drop();
+
+t.ensureIndex({loc : "2d", categories:1}, {"name":"placesIdx", "min": -100, "max": 100});
+
+t.insert({ "uid" : 368900 , "loc" : { "x" : -36 , "y" : -8} ,"categories" : [ "sports" , "hotel" , "restaurant"]});
+t.insert({ "uid" : 555344 , "loc" : { "x" : 13 , "y" : 29} ,"categories" : [ "sports" , "hotel"]});
+t.insert({ "uid" : 855878 , "loc" : { "x" : 38 , "y" : 30} ,"categories" : [ "sports" , "hotel"]});
+t.insert({ "uid" : 917347 , "loc" : { "x" : 15 , "y" : 46} ,"categories" : [ "hotel"]});
+t.insert({ "uid" : 647874 , "loc" : { "x" : 25 , "y" : 23} ,"categories" : [ "hotel" , "restaurant"]});
+t.insert({ "uid" : 518482 , "loc" : { "x" : 4 , "y" : 25} ,"categories" : [ ]});
+t.insert({ "uid" : 193466 , "loc" : { "x" : -39 , "y" : 22} ,"categories" : [ "sports" , "hotel"]});
+t.insert({ "uid" : 622442 , "loc" : { "x" : -24 , "y" : -46} ,"categories" : [ "hotel"]});
+t.insert({ "uid" : 297426 , "loc" : { "x" : 33 , "y" : -49} ,"categories" : [ "hotel"]});
+t.insert({ "uid" : 528464 , "loc" : { "x" : -43 , "y" : 48} ,"categories" : [ "restaurant"]});
+t.insert({ "uid" : 90579 , "loc" : { "x" : -4 , "y" : -23} ,"categories" : [ "restaurant"]});
+t.insert({ "uid" : 368895 , "loc" : { "x" : -8 , "y" : 14} ,"categories" : [ "sports" ]});
+t.insert({ "uid" : 355844 , "loc" : { "x" : 34 , "y" : -4} ,"categories" : [ "sports" , "hotel"]});
+
+
+assert.eq( 10 , t.find({ "loc" : { "$within" : { "$center" : [ { "x" : 0 ,"y" : 0} , 50]}} } ).itcount() , "A" );
+assert.eq( 6 , t.find({ "loc" : { "$within" : { "$center" : [ { "x" : 0 ,"y" : 0} , 50]}}, "categories" : "sports" } ).itcount() , "B" );
+
+// When not a $near or $within query, geo index should not be used. Fails if geo index is used.
+assert.eq( 1 , t.find({ "loc" : { "x" : -36, "y" : -8}, "categories" : "sports" }).itcount(), "C" )
diff --git a/jstests/core/geo_circle2a.js b/jstests/core/geo_circle2a.js
new file mode 100644
index 00000000000..67a6ba17243
--- /dev/null
+++ b/jstests/core/geo_circle2a.js
@@ -0,0 +1,37 @@
+// From SERVER-2381
+// Tests to make sure that nested multi-key indexing works for geo indexes and is not used for direct position
+// lookups
+
+var coll = db.geo_circle2a;
+coll.drop();
+coll.insert({ p : [1112,3473], t : [{ k : 'a', v : 'b' }, { k : 'c', v : 'd' }] })
+coll.ensureIndex({ p : '2d', 't.k' : 1 }, { min : 0, max : 10000 })
+
+// Succeeds, since on direct lookup should not use the index
+assert(1 == coll.find({p:[1112,3473],'t.k':'a'}).count(), "A")
+// Succeeds and uses the geo index
+assert(1 == coll.find({p:{$within:{$box:[[1111,3472],[1113,3475]]}}, 't.k' : 'a' }).count(), "B")
+
+
+coll.drop()
+coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' }, { k : 'key2', v : 123 } ] })
+coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' } ] })
+
+coll.ensureIndex({ point : "2d" , "tags.k" : 1, "tags.v" : 1 })
+
+// Succeeds, since should now lookup multi-keys correctly
+assert(2 == coll.find({ point : { $within : { $box : [[0,0],[12,12]] } } }).count(), "C")
+// Succeeds, and should not use geoindex
+assert(2 == coll.find({ point : [1, 10] }).count(), "D")
+assert(2 == coll.find({ point : [1, 10], "tags.v" : "value" }).count(), "E")
+assert(1 == coll.find({ point : [1, 10], "tags.v" : 123 }).count(), "F")
+
+
+coll.drop()
+coll.insert({ point:[ 1, 10 ], tags : [ { k : { 'hello' : 'world'}, v : 'value' }, { k : 'key2', v : 123 } ] })
+coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' } ] })
+
+coll.ensureIndex({ point : "2d" , "tags.k" : 1, "tags.v" : 1 })
+
+// Succeeds, should be able to look up the complex element
+assert(1 == coll.find({ point : { $within : { $box : [[0,0],[12,12]] } }, 'tags.k' : { 'hello' : 'world' } }).count(), "G") \ No newline at end of file
diff --git a/jstests/core/geo_circle3.js b/jstests/core/geo_circle3.js
new file mode 100644
index 00000000000..2882b47378e
--- /dev/null
+++ b/jstests/core/geo_circle3.js
@@ -0,0 +1,28 @@
+// SERVER-848 and SERVER-1191.
+db.places.drop()
+
+n = 0;
+db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 52 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 52 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 52 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 52.0001 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 51.9999 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 52.0001 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 52.0001 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 51.9999 } })
+db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 51.9999 } })
+db.places.ensureIndex( { loc : "2d" } )
+radius=0.0001
+center=[5,52]
+//print(db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).count())
+// FIXME: we want an assert, e.g., that there be 5 answers in the find().
+db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).forEach(printjson);
+
+
+// the result:
+// { "_id" : ObjectId("4bb1f2f088df513435bcb4e1"), "loc" : { "x" : 5, "y" : 52 } }
+// { "_id" : ObjectId("4bb1f54383459c40223a8ae7"), "loc" : { "x" : 5, "y" : 51.9999 } }
+// { "_id" : ObjectId("4bb1f54583459c40223a8aeb"), "loc" : { "x" : 5.0001, "y" : 51.9999 } }
+// { "_id" : ObjectId("4bb1f2e588df513435bcb4e0"), "loc" : { "x" : 4.9999, "y" : 52 } }
+// { "_id" : ObjectId("4bb1f30888df513435bcb4e2"), "loc" : { "x" : 5.0001, "y" : 52 } }
+// { "_id" : ObjectId("4bb1f54383459c40223a8ae8"), "loc" : { "x" : 4.9999, "y" : 52.0001 } }
diff --git a/jstests/core/geo_circle4.js b/jstests/core/geo_circle4.js
new file mode 100644
index 00000000000..dc3c564389e
--- /dev/null
+++ b/jstests/core/geo_circle4.js
@@ -0,0 +1,31 @@
+// Reported as server-848.
+function test(index) {
+ db.server848.drop();
+
+ radius=0.0001;
+ center=[5,52];
+
+ db.server848.save({ "_id": 1, "loc" : { "x" : 4.9999, "y" : 52 } });
+ db.server848.save({ "_id": 2, "loc" : { "x" : 5, "y" : 52 } });
+ db.server848.save({ "_id": 3, "loc" : { "x" : 5.0001, "y" : 52 } });
+ db.server848.save({ "_id": 4, "loc" : { "x" : 5, "y" : 52.0001 } });
+ db.server848.save({ "_id": 5, "loc" : { "x" : 5, "y" : 51.9999 } });
+ db.server848.save({ "_id": 6, "loc" : { "x" : 4.9999, "y" : 52.0001 } });
+ db.server848.save({ "_id": 7, "loc" : { "x" : 5.0001, "y" : 52.0001 } });
+ db.server848.save({ "_id": 8, "loc" : { "x" : 4.9999, "y" : 51.9999 } });
+ db.server848.save({ "_id": 9, "loc" : { "x" : 5.0001, "y" : 51.9999 } });
+ if (index) {
+ db.server848.ensureIndex( { loc : "2d" } );
+ }
+ r=db.server848.find({"loc" : {"$within" : {"$center" : [center, radius]}}}, {_id:1});
+ assert.eq(5, r.count(), "A1");
+ // FIXME: surely code like this belongs in utils.js.
+ a=r.toArray();
+ x=[];
+ for (k in a) { x.push(a[k]["_id"]) }
+ x.sort()
+ assert.eq([1,2,3,4,5], x, "B1");
+}
+
+test(false)
+test(true)
diff --git a/jstests/core/geo_circle5.js b/jstests/core/geo_circle5.js
new file mode 100644
index 00000000000..fea9c56fd02
--- /dev/null
+++ b/jstests/core/geo_circle5.js
@@ -0,0 +1,28 @@
+// reported as server-1238.
+
+db.server1238.drop();
+db.server1238.remove({})
+db.server1238.save({ loc: [ 5000000, 900000 ], id: 1})
+db.server1238.save({ loc: [ 5000000, 900000 ], id: 2})
+db.server1238.ensureIndex( { loc : "2d" } , { min : -21000000 , max : 21000000 } )
+db.server1238.save({ loc: [ 5000000, 900000 ], id: 3})
+db.server1238.save({ loc: [ 5000000, 900000 ], id: 4})
+
+c1=db.server1238.find({"loc" : {"$within" : {"$center" : [[5000000, 900000], 1.0]}}}).count()
+
+c2=db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000], 5.0]}}}).count()
+
+
+assert.eq(4, c1, "A1");
+assert.eq(c1, c2, "B1");
+//print(db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000], 5.0]}}}).toArray());
+// [
+// {
+// "_id" : ObjectId("4c173306f5d9d34a46cb7b11"),
+// "loc" : [
+// 5000000,
+// 900000
+// ],
+// "id" : 4
+// }
+// ]
diff --git a/jstests/core/geo_distinct.js b/jstests/core/geo_distinct.js
new file mode 100644
index 00000000000..db6806fe747
--- /dev/null
+++ b/jstests/core/geo_distinct.js
@@ -0,0 +1,14 @@
+// Test distinct with geo queries SERVER-2135
+
+t = db.commits
+t.drop()
+
+t.save( { _id : ObjectId( "4ce63ec2f360622431000013" ), loc : [ 55.59664, 13.00156 ], author : "FredrikL" } )
+
+assert.commandWorked( db.runCommand( { distinct : 'commits', key : 'loc' } ) );
+
+t.ensureIndex( { loc : '2d' } )
+
+printjson( t.getIndexes() )
+
+assert.commandWorked( db.runCommand( { distinct : 'commits', key : 'loc' } ) );
diff --git a/jstests/core/geo_exactfetch.js b/jstests/core/geo_exactfetch.js
new file mode 100644
index 00000000000..c0a0e714eae
--- /dev/null
+++ b/jstests/core/geo_exactfetch.js
@@ -0,0 +1,17 @@
+// SERVER-7322
+t = db.geo_exactfetch
+t.drop();
+
+function test(indexname) {
+ assert.eq(1, t.find({lon_lat: [-71.34895, 42.46037]}).itcount(), indexname);
+ t.ensureIndex({lon_lat: indexname, population: -1})
+ assert.eq(2, t.find({lon_lat: {$nearSphere: [-71.34895, 42.46037]}}).itcount(), indexname);
+ assert.eq(1, t.find({lon_lat: [-71.34895, 42.46037]}).itcount(), indexname);
+ t.dropIndex({lon_lat: indexname, population: -1})
+}
+
+t.insert({ city: "B", lon_lat: [-71.34895, 42.46037], population: 1000})
+t.insert({ city: "A", lon_lat: [1.48736, 42.55327], population: 100})
+
+test("2d")
+test("2dsphere")
diff --git a/jstests/core/geo_fiddly_box.js b/jstests/core/geo_fiddly_box.js
new file mode 100644
index 00000000000..95f33a32987
--- /dev/null
+++ b/jstests/core/geo_fiddly_box.js
@@ -0,0 +1,46 @@
+// Reproduces simple test for SERVER-2832
+
+// The setup to reproduce was/is to create a set of points where the
+// "expand" portion of the geo-lookup expands the 2d range in only one
+// direction (so points are required on either side of the expanding range)
+
+t = db.geo_fiddly_box
+
+t.drop();
+t.ensureIndex({ loc : "2d" })
+
+t.insert({ "loc" : [3, 1] })
+t.insert({ "loc" : [3, 0.5] })
+t.insert({ "loc" : [3, 0.25] })
+t.insert({ "loc" : [3, -0.01] })
+t.insert({ "loc" : [3, -0.25] })
+t.insert({ "loc" : [3, -0.5] })
+t.insert({ "loc" : [3, -1] })
+
+// OK!
+print( t.count() )
+assert.eq( 7, t.count({ "loc" : { "$within" : { "$box" : [ [2, -2], [46, 2] ] } } }), "Not all locations found!" );
+
+
+// Test normal lookup of a small square of points as a sanity check.
+
+epsilon = 0.0001;
+min = -1
+max = 1
+step = 1
+numItems = 0;
+
+t.drop()
+t.ensureIndex({ loc : "2d" }, { max : max + epsilon / 2, min : min - epsilon / 2 })
+
+for(var x = min; x <= max; x += step){
+ for(var y = min; y <= max; y += step){
+ t.insert({ "loc" : { x : x, y : y } })
+ numItems++;
+ }
+}
+
+assert.eq( numItems, t.count({ loc : { $within : { $box : [[min - epsilon / 3,
+ min - epsilon / 3],
+ [max + epsilon / 3,
+ max + epsilon / 3]] } } }), "Not all locations found!");
diff --git a/jstests/core/geo_fiddly_box2.js b/jstests/core/geo_fiddly_box2.js
new file mode 100644
index 00000000000..0588abfa1de
--- /dev/null
+++ b/jstests/core/geo_fiddly_box2.js
@@ -0,0 +1,32 @@
+// Reproduces simple test for SERVER-2115
+
+// The setup to reproduce is to create a set of points and a really big bounds so that we are required to do
+// exact lookups on the points to get correct results.
+
+t = db.geo_fiddly_box2
+t.drop()
+
+t.insert( { "letter" : "S", "position" : [ -3, 0 ] } )
+t.insert( { "letter" : "C", "position" : [ -2, 0 ] } )
+t.insert( { "letter" : "R", "position" : [ -1, 0 ] } )
+t.insert( { "letter" : "A", "position" : [ 0, 0 ] } )
+t.insert( { "letter" : "B", "position" : [ 1, 0 ] } )
+t.insert( { "letter" : "B", "position" : [ 2, 0 ] } )
+t.insert( { "letter" : "L", "position" : [ 3, 0 ] } )
+t.insert( { "letter" : "E", "position" : [ 4, 0 ] } )
+
+t.ensureIndex( { position : "2d" } )
+result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } )
+assert.eq( 4, result.count() )
+
+t.dropIndex( { position : "2d" } )
+t.ensureIndex( { position : "2d" }, { min : -10000000, max : 10000000 } )
+
+result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } )
+assert.eq( 4, result.count() )
+
+t.dropIndex( { position : "2d" } )
+t.ensureIndex( { position : "2d" }, { min : -1000000000, max : 1000000000 } )
+
+result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } )
+assert.eq( 4, result.count() )
diff --git a/jstests/core/geo_group.js b/jstests/core/geo_group.js
new file mode 100644
index 00000000000..32e06890568
--- /dev/null
+++ b/jstests/core/geo_group.js
@@ -0,0 +1,37 @@
+t = db.geo_group;
+t.drop();
+
+n = 1;
+var bulk = t.initializeUnorderedBulkOp();
+for ( var x=-100; x<100; x+=2 ){
+ for ( var y=-100; y<100; y+=2 ){
+ bulk.insert( { _id : n++ , loc : [ x , y ] } );
+ }
+}
+assert.writeOK(bulk.execute());
+
+t.ensureIndex( { loc : "2d" } );
+
+// Test basic count with $near
+assert.eq(t.find().count(), 10000);
+assert.eq(t.find( { loc : { $within : {$center : [[56,8], 10]}}}).count(), 81);
+assert.eq(t.find( { loc : { $near : [56, 8, 10] } } ).count(), 81);
+
+// Test basic group that effectively does a count
+assert.eq(
+ t.group( {
+ reduce : function (obj, prev) { prev.sums = { count : prev.sums.count + 1} },
+ initial : { sums:{count:0} } }
+ ),
+ [ { "sums" : { "count" : 10000 } } ]
+);
+
+// Test basic group + $near that does a count
+assert.eq(
+ t.group( {
+ reduce : function (obj, prev) { prev.sums = { count : prev.sums.count + 1} },
+ initial : { sums:{count:0} },
+ cond : { loc : { $near : [56, 8, 10] } } }
+ ),
+ [ { "sums" : { "count" : 81 } } ]
+);
diff --git a/jstests/core/geo_haystack1.js b/jstests/core/geo_haystack1.js
new file mode 100644
index 00000000000..f4035ecbcf2
--- /dev/null
+++ b/jstests/core/geo_haystack1.js
@@ -0,0 +1,59 @@
+
+t = db.geo_haystack1
+t.drop()
+
+function distance( a , b ){
+ var x = a[0] - b[0];
+ var y = a[1] - b[1];
+ return Math.sqrt( ( x * x ) + ( y * y ) );
+}
+
+function distanceTotal( a , arr , f ){
+ var total = 0;
+ for ( var i=0; i<arr.length; i++ ){
+ total += distance( a , arr[i][f] );
+ }
+ return total;
+}
+
+queries = [
+ { near : [ 7 , 8 ] , maxDistance : 3 , search : { z : 3 } } ,
+]
+
+answers = queries.map( function(){ return { totalDistance : 0 , results : [] }; } )
+
+
+n = 0;
+for ( x=0; x<20; x++ ){
+ for ( y=0; y<20; y++ ){
+ t.insert( { _id : n , loc : [ x , y ] , z : n % 5 } );
+
+ for ( i=0; i<queries.length; i++ ){
+ var d = distance( queries[i].near , [ x , y ] )
+ if ( d > queries[i].maxDistance )
+ continue;
+ if ( queries[i].search.z != n % 5 )
+ continue;
+ answers[i].results.push( { _id : n , loc : [ x , y ]} )
+ answers[i].totalDistance += d;
+ }
+
+ n++;
+ }
+}
+
+t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } );
+
+for ( i=0; i<queries.length; i++ ){
+ print( "---------" );
+ printjson( queries[i] );
+ res = t.runCommand( "geoSearch" , queries[i] )
+ print( "\t" + tojson( res.stats ) );
+ print( "\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n );
+ assert.eq( answers[i].results.length , res.stats.n, "num:"+ i + " number matches" )
+ assert.eq( answers[i].totalDistance , distanceTotal( queries[i].near , res.results , "loc" ), "num:"+ i + " totalDistance" )
+ //printjson( res );
+ //printjson( answers[i].length );
+}
+
+
diff --git a/jstests/core/geo_haystack2.js b/jstests/core/geo_haystack2.js
new file mode 100644
index 00000000000..2e0eb5710fb
--- /dev/null
+++ b/jstests/core/geo_haystack2.js
@@ -0,0 +1,60 @@
+
+t = db.geo_haystack2
+t.drop()
+
+function distance( a , b ){
+ var x = a[0] - b[0];
+ var y = a[1] - b[1];
+ return Math.sqrt( ( x * x ) + ( y * y ) );
+}
+
+function distanceTotal( a , arr , f ){
+ var total = 0;
+ for ( var i=0; i<arr.length; i++ ){
+ total += distance( a , arr[i][f] );
+ }
+ return total;
+}
+
+queries = [
+ { near : [ 7 , 8 ] , maxDistance : 3 , search : { z : 3 } } ,
+]
+
+answers = queries.map( function(){ return { totalDistance : 0 , results : [] }; } )
+
+
+n = 0;
+for ( x=0; x<20; x++ ){
+ for ( y=0; y<20; y++ ){
+ t.insert( { _id : n , loc : [ x , y ] , z : [ n % 10 , ( n + 5 ) % 10 ] } );
+
+ for ( i=0; i<queries.length; i++ ){
+ var d = distance( queries[i].near , [ x , y ] )
+ if ( d > queries[i].maxDistance )
+ continue;
+ if ( queries[i].search.z != n % 10 &&
+ queries[i].search.z != ( n + 5 ) % 10 )
+ continue;
+ answers[i].results.push( { _id : n , loc : [ x , y ] } )
+ answers[i].totalDistance += d;
+ }
+
+ n++;
+ }
+}
+
+t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } );
+
+for ( i=0; i<queries.length; i++ ){
+ print( "---------" );
+ printjson( queries[i] );
+ res = t.runCommand( "geoSearch" , queries[i] )
+ print( "\t" + tojson( res.stats ) );
+ print( "\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n );
+ assert.eq( answers[i].results.length , res.stats.n, "num:"+ i + " number matches" )
+ assert.eq( answers[i].totalDistance , distanceTotal( queries[i].near , res.results , "loc" ), "num:"+ i + " totalDistance" )
+ //printjson( res );
+ //printjson( answers[i].length );
+}
+
+
diff --git a/jstests/core/geo_haystack3.js b/jstests/core/geo_haystack3.js
new file mode 100644
index 00000000000..f5a2ab7becb
--- /dev/null
+++ b/jstests/core/geo_haystack3.js
@@ -0,0 +1,28 @@
+t = db.geo_haystack3
+t.drop()
+
+t.insert({ pos : { long : 34, lat : 33 }})
+t.insert({ pos : { long : 34.2, lat : 33.3 }, type : ["bar", "restaurant" ]})
+t.insert({ pos : { long : 34.2, lat : 37.3 }, type : ["bar", "chicken" ]})
+t.insert({ pos : { long : 59.1, lat : 87.2 }, type : ["baz", "office" ]})
+t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 })
+
+// This only matches the first insert. What do we want? First 3 or just the first?
+res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : {}, limit : 30 })
+assert.eq(1, res.stats.n, "Right # of matches");
+assert.eq(34, res.results[0].pos.long, "expected longitude");
+assert.eq(33, res.results[0].pos.lat, "expected latitude");
+
+// This matches the middle 2 of the 4 elements above.
+res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
+ limit : 2 })
+assert.eq(2, res.stats.n, "Right # of matches");
+assert.eq("bar", res.results[0].type[0], "expected value for type");
+assert.eq("bar", res.results[1].type[0], "expected value for type");
+assert.neq(res.results[0].type[1], res.results[1].type[1], "should get 2 diff results");
+
+// This is a test for the limit being reached/only 1 returned.
+res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
+ limit : 1 })
+assert.eq(1, res.stats.n, "Right # of matches");
+assert.eq("bar", res.results[0].type[0], "expected value for type");
diff --git a/jstests/core/geo_invalid_polygon.js b/jstests/core/geo_invalid_polygon.js
new file mode 100644
index 00000000000..9ccf619dae5
--- /dev/null
+++ b/jstests/core/geo_invalid_polygon.js
@@ -0,0 +1,29 @@
+// With invalid geometry, error message should include _id
+// SERVER-8992
+t = db.geo_invalid_polygon;
+t.drop();
+
+// Self-intersecting polygon, triggers
+// "Exterior shell of polygon is invalid".
+var geometry = {
+ type: "Polygon",
+ coordinates: [
+ [
+ [ 0, 0 ],
+ [ 0, 1 ],
+ [ 1, 1 ],
+ [-2,-1 ],
+ [ 0, 0 ]
+ ]
+ ]
+};
+
+t.insert({_id: 42, geometry: geometry});
+var err = assert.writeError(t.createIndex({geometry: '2dsphere'}));
+
+// Document's _id should be in error message.
+assert(
+ -1 != err.getWriteError().errmsg.indexOf('42'),
+ "Error message didn't contain document _id.\nMessage: \"" + err.errmsg
+ + '"\n'
+);
diff --git a/jstests/core/geo_mapreduce.js b/jstests/core/geo_mapreduce.js
new file mode 100644
index 00000000000..a6ecf763ae1
--- /dev/null
+++ b/jstests/core/geo_mapreduce.js
@@ -0,0 +1,56 @@
+// Test script from SERVER-1742
+
+// MongoDB test script for mapreduce with geo query
+
+// setup test collection
+db.apples.drop()
+db.apples.insert( { "geo" : { "lat" : 32.68331909, "long" : 69.41610718 }, "apples" : 5 } );
+db.apples.insert( { "geo" : { "lat" : 35.01860809, "long" : 70.92027283 }, "apples" : 2 } );
+db.apples.insert( { "geo" : { "lat" : 31.11639023, "long" : 64.19970703 }, "apples" : 11 } );
+db.apples.insert( { "geo" : { "lat" : 32.64500046, "long" : 69.36251068 }, "apples" : 4 } );
+db.apples.insert( { "geo" : { "lat" : 33.23638916, "long" : 69.81360626 }, "apples" : 9 } );
+db.apples.ensureIndex( { "geo" : "2d" } );
+
+center = [ 32.68, 69.41 ];
+radius = 10 / 111; // 10km; 1 arcdegree ~= 111km
+geo_query = { geo : { '$within' : { '$center' : [ center, radius ] } } };
+
+// geo query on collection works fine
+res = db.apples.find( geo_query );
+assert.eq( 2, res.count() );
+
+// map function
+m = function() {
+ emit( null, { "apples" : this.apples } );
+};
+
+// reduce function
+r = function(key, values) {
+ var total = 0;
+ for ( var i = 0; i < values.length; i++ ) {
+ total += values[i].apples;
+ }
+ return { "apples" : total };
+};
+
+// mapreduce without geo query works fine
+res = db.apples.mapReduce( m, r, { out : { inline : 1 } } );
+
+printjson( res )
+total = res.results[0];
+assert.eq( 31, total.value.apples );
+
+// mapreduce with regular query works fine too
+res = db.apples.mapReduce( m, r, { out : { inline : 1 }, query : { apples : { '$lt' : 9 } } } );
+total = res.results[0];
+assert.eq( 11, total.value.apples );
+
+// mapreduce with geo query gives error on mongodb version 1.6.2
+// uncaught exception: map reduce failed: {
+// "assertion" : "manual matcher config not allowed",
+// "assertionCode" : 13285,
+// "errmsg" : "db assertion failure",
+// "ok" : 0 }
+res = db.apples.mapReduce( m, r, { out : { inline : 1 }, query : geo_query } );
+total = res.results[0];
+assert.eq( 9, total.value.apples );
diff --git a/jstests/core/geo_mapreduce2.js b/jstests/core/geo_mapreduce2.js
new file mode 100644
index 00000000000..9c393457c7b
--- /dev/null
+++ b/jstests/core/geo_mapreduce2.js
@@ -0,0 +1,36 @@
+// Geo mapreduce 2 from SERVER-3478
+
+var coll = db.geoMR2
+coll.drop()
+
+for( var i = 0; i < 300; i++ )
+ coll.insert({ i : i, location : [ 10, 20 ] })
+
+coll.ensureIndex({ location : "2d" })
+
+// map function
+m = function() {
+ emit( null, { count : this.i } )
+}
+
+// reduce function
+r = function( key, values ) {
+
+ var total = 0
+ for ( var i = 0; i < values.length; i++ ) {
+ total += values[i].count
+ }
+
+ return { count : total }
+};
+
+try{ coll.mapReduce( m, r,
+ { out : coll.getName() + "_mr",
+ sort : { _id : 1 },
+ query : { 'location' : { $within : { $centerSphere : [[ 10, 20 ], 0.01 ] } } } })
+
+}
+catch( e ){
+ // This should occur, since we can't in-mem sort for mreduce
+ printjson( e )
+}
diff --git a/jstests/core/geo_max.js b/jstests/core/geo_max.js
new file mode 100644
index 00000000000..03932004b75
--- /dev/null
+++ b/jstests/core/geo_max.js
@@ -0,0 +1,49 @@
+// Test where points are on _max (180)
+// Using GeoNearRandom because this test needs a lot of points in the index.
+// If there aren't enough points the test passes even if the code is broken.
+load("jstests/libs/geo_near_random.js");
+
+var test = new GeoNearRandomTest("geo_near_max")
+
+test.insertPts(/*numPts*/1000, /*indexBounds*/{min:-180, max:180}, /*scale*/0.9);
+
+test.t.insert({loc: [ 180, 0]})
+test.t.insert({loc: [-180, 0]})
+test.t.insert({loc: [ 179.999, 0]})
+test.t.insert({loc: [-179.999, 0]})
+
+assertXIsNegative = function(obj) { assert.lt(obj.loc[0], 0); }
+assertXIsPositive = function(obj) { assert.gt(obj.loc[0], 0); }
+
+assert.eq(test.t.count({loc:{$within: {$center:[[ 180, 0], 1]}}}), 2)
+assert.eq(test.t.count({loc:{$within: {$center:[[-180, 0], 1]}}}), 2)
+test.t.find({loc:{$within: {$center:[[ 180, 0], 1]}}}).forEach(assertXIsPositive)
+test.t.find({loc:{$within: {$center:[[-180, 0], 1]}}}).forEach(assertXIsNegative)
+
+var oneDegree = Math.PI / 180; // in radians
+
+// errors out due to SERVER-1760
+if (0) {
+assert.eq(test.t.count({loc:{$within: {$centerSphere:[[ 180, 0], oneDegree]}}}), 2)
+assert.eq(test.t.count({loc:{$within: {$centerSphere:[[-180, 0], oneDegree]}}}), 2)
+test.t.find({loc:{$within: {$centerSphere:[[ 180, 0], oneDegree]}}}).forEach(assertXIsPositive)
+test.t.find({loc:{$within: {$centerSphere:[[-180, 0], oneDegree]}}}).forEach(assertXIsNegative)
+}
+
+assert.eq(test.t.count({loc:{$within: {$box:[[ 180, 0.1], [ 179, -0.1]]}}}), 2)
+assert.eq(test.t.count({loc:{$within: {$box:[[-180, 0.1], [-179, -0.1]]}}}), 2)
+test.t.find({loc:{$within: {$box:[[ 180, 0.1], [ 179, -0.1]]}}}).forEach(assertXIsPositive)
+test.t.find({loc:{$within: {$box:[[-180, 0.1], [-179, -0.1]]}}}).forEach(assertXIsNegative)
+
+assert.eq(test.t.count({loc:{$within: {$polygon:[[ 180, 0], [ 179, 0], [ 179.5, 0.5]]}}}), 2)
+assert.eq(test.t.count({loc:{$within: {$polygon:[[-180, 0], [-179, 0], [ 179.5, 0.5]]}}}), 2)
+test.t.find({loc:{$within: {$polygon:[[ 180, 0], [ 179, 0], [ 179.5, 0.5]]}}}).forEach(assertXIsPositive)
+test.t.find({loc:{$within: {$polygon:[[-180, 0], [-179, 0], [ 179.5, 0.5]]}}}).forEach(assertXIsNegative)
+
+assert.eq(test.t.find({loc:{$near:[ 180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [ 180, 0]}, {loc: [ 179.999, 0]}])
+assert.eq(test.t.find({loc:{$near:[-180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [-180, 0]}, {loc: [-179.999, 0]}])
+
+// These will need to change when SERVER-1760 is fixed
+assert.eq(test.t.find({loc:{$nearSphere:[ 180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [ 180, 0]}, {loc: [ 179.999, 0]}])
+assert.eq(test.t.find({loc:{$nearSphere:[-180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [-180, 0]}, {loc: [-179.999, 0]}])
+
diff --git a/jstests/core/geo_mindistance.js b/jstests/core/geo_mindistance.js
new file mode 100644
index 00000000000..b429eacb708
--- /dev/null
+++ b/jstests/core/geo_mindistance.js
@@ -0,0 +1,214 @@
+/* Test $minDistance option for $near and $nearSphere queries, and geoNear
+ * command. SERVER-9395.
+*/
+var t = db.geo_mindistance;
+t.drop();
+
+//
+// Useful constants and functions.
+//
+
+var km = 1000,
+ earthRadiusMeters = 6378.1 * km;
+
+function metersToRadians(m) { return m / earthRadiusMeters; }
+
+/* Count documents within some radius of (0, 0), in kilometers.
+ * With this function we can use the existing $maxDistance option to test
+ * the newer $minDistance option's behavior.
+ */
+function n_docs_within(radius_km) {
+ // geoNear's distances are in meters for geoJSON points.
+ var cmdResult = db.runCommand({
+ geoNear: t.getName(),
+ near: {type: 'Point', coordinates: [0, 0]},
+ spherical: true,
+ maxDistance: radius_km * km,
+ num: 1000
+ });
+
+ return cmdResult.results.length;
+}
+
+//
+// Setup.
+//
+
+/* Make 121 points from long, lat = (0, 0) (in Gulf of Guinea) to (10, 10)
+ * (inland Nigeria).
+ */
+for (var x = 0; x <= 10; x += 1) {
+ for (var y = 0; y <= 10; y += 1) {
+ t.insert({loc: [x, y]});
+ }
+}
+
+/* $minDistance is supported for 2dsphere index only, not 2d or geoHaystack. */
+t.ensureIndex({loc: "2dsphere"});
+
+var n_docs = t.count(),
+ geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
+ legacyPoint = [0, 0];
+
+//
+// Test $near with GeoJSON point (required for $near with 2dsphere index).
+// min/maxDistance are in meters.
+//
+
+var n_min1400_count = t.find({loc: {
+ $near: {$geometry: geoJSONPoint, $minDistance: 1400 * km
+}}}).count();
+
+assert.eq(
+ n_docs - n_docs_within(1400),
+ n_min1400_count,
+ "Expected " + (n_docs - n_docs_within(1400))
+ + " points $near (0, 0) with $minDistance 1400 km, got "
+ + n_min1400_count
+);
+
+var n_bw500_and_1000_count = t.find({loc: {
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: 500 * km,
+ $maxDistance: 1000 * km
+}}}).count();
+
+assert.eq(
+ n_docs_within(1000) - n_docs_within(500),
+ n_bw500_and_1000_count,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500))
+ + " points $near (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
+ + n_bw500_and_1000_count
+);
+
+//
+// $nearSphere with 2dsphere index can take a legacy or GeoJSON point.
+// First test $nearSphere with legacy point.
+// min/maxDistance are in radians.
+//
+
+n_min1400_count = t.find({loc: {
+ $nearSphere: legacyPoint, $minDistance: metersToRadians(1400 * km)
+}}).count();
+
+assert.eq(
+ n_docs - n_docs_within(1400),
+ n_min1400_count,
+ "Expected " + (n_docs - n_docs_within(1400))
+ + " points $nearSphere (0, 0) with $minDistance 1400 km, got "
+ + n_min1400_count
+);
+
+n_bw500_and_1000_count = t.find({loc: {
+ $nearSphere: legacyPoint,
+ $minDistance: metersToRadians(500 * km),
+ $maxDistance: metersToRadians(1000 * km)
+}}).count();
+
+assert.eq(
+ n_docs_within(1000) - n_docs_within(500),
+ n_bw500_and_1000_count,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500))
+ + " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
+ + n_bw500_and_1000_count
+);
+
+//
+// Test $nearSphere with GeoJSON point.
+// min/maxDistance are in meters.
+//
+
+n_min1400_count = t.find({loc: {
+ $nearSphere: geoJSONPoint, $minDistance: 1400 * km
+}}).count();
+
+assert.eq(
+ n_docs - n_docs_within(1400),
+ n_min1400_count,
+ "Expected " + (n_docs - n_docs_within(1400))
+ + " points $nearSphere (0, 0) with $minDistance 1400 km, got "
+ + n_min1400_count
+);
+
+n_bw500_and_1000_count = t.find({loc: {
+ $nearSphere: geoJSONPoint,
+ $minDistance: 500 * km,
+ $maxDistance: 1000 * km
+}}).count();
+
+assert.eq(
+ n_docs_within(1000) - n_docs_within(500),
+ n_bw500_and_1000_count,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500))
+ + " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
+ + n_bw500_and_1000_count
+);
+
+
+//
+// Test geoNear command with GeoJSON point.
+// Distances are in meters.
+//
+
+var cmdResult = db.runCommand({
+ geoNear: t.getName(),
+ near: {type: 'Point', coordinates: [0, 0]},
+ minDistance: 1400 * km,
+ spherical: true // spherical required for 2dsphere index
+});
+assert.eq(
+ n_docs - n_docs_within(1400),
+ cmdResult.results.length,
+ "Expected " + (n_docs - n_docs_within(1400))
+ + " points geoNear (0, 0) with $minDistance 1400 km, got "
+ + cmdResult.results.length
+);
+
+cmdResult = db.runCommand({
+ geoNear: t.getName(),
+ near: {type: 'Point', coordinates: [0, 0]},
+ minDistance: 500 * km,
+ maxDistance: 1000 * km,
+ spherical: true
+});
+assert.eq(
+ n_docs_within(1000) - n_docs_within(500),
+ cmdResult.results.length,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500))
+ + " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
+ + cmdResult.results.length
+);
+
+//
+// Test geoNear command with legacy point.
+// Distances are in radians.
+//
+
+cmdResult = db.runCommand({
+ geoNear: t.getName(),
+ near: legacyPoint,
+ minDistance: metersToRadians(1400 * km),
+ spherical: true // spherical required for 2dsphere index
+});
+assert.eq(
+ n_docs - n_docs_within(1400),
+ cmdResult.results.length,
+ "Expected " + (n_docs - n_docs_within(1400))
+ + " points geoNear (0, 0) with $minDistance 1400 km, got "
+ + cmdResult.results.length
+);
+
+cmdResult = db.runCommand({
+ geoNear: t.getName(),
+ near: legacyPoint,
+ minDistance: metersToRadians(500 * km),
+ maxDistance: metersToRadians(1000 * km),
+ spherical: true
+});
+assert.eq(
+ n_docs_within(1000) - n_docs_within(500),
+ cmdResult.results.length,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500))
+ + " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
+ + cmdResult.results.length
+);
diff --git a/jstests/core/geo_mindistance_boundaries.js b/jstests/core/geo_mindistance_boundaries.js
new file mode 100644
index 00000000000..80e933827b6
--- /dev/null
+++ b/jstests/core/geo_mindistance_boundaries.js
@@ -0,0 +1,124 @@
+/* Test boundary conditions for $minDistance option for $near and $nearSphere
+ * queries. SERVER-9395.
+*/
+var t = db.geo_mindistance_boundaries;
+t.drop();
+t.insert({loc: [1, 0]}); // 1 degree of longitude from origin.
+
+/* $minDistance is supported for 2dsphere index only, not 2d or geoHaystack. */
+t.ensureIndex({loc: "2dsphere"});
+
+//
+// Useful constants.
+//
+
+var km = 1000,
+ earthRadiusMeters = 6378.1 * km,
+ geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
+ // One degree of longitude at the equator, about 111 km.
+ degreeInMeters = 2 * Math.PI * earthRadiusMeters / 360,
+ metersEpsilon = Number.MIN_VALUE;
+
+/* Grow epsilon's exponent until epsilon exceeds the margin of error for the
+ * representation of degreeInMeters. The server uses 64-bit math, too, so we'll
+ * find the smallest epsilon the server can detect.
+*/
+while (degreeInMeters + metersEpsilon == degreeInMeters) { metersEpsilon *= 2; }
+
+//
+// Test boundary conditions for $near and GeoJSON, in meters.
+//
+
+
+// minDistance must be within the args to $near, not on the side.
+assert.throws(function() { t.find({loc:{$near:{$geometry: geoJSONPoint},
+ $minDistance:0.1}}).itcount();});
+
+assert.eq(
+ 1, t.find({loc: {
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters
+ }}}).itcount(),
+ "Expected to find (0, 1) within $minDistance 1 degree from origin"
+);
+
+assert.eq(
+ 1, t.find({loc: {
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters - metersEpsilon
+ }}}).itcount(),
+ "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
+);
+
+assert.eq(
+ 0, t.find({loc: {
+ $near: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters + metersEpsilon
+ }}}).itcount(),
+ "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
+);
+
+//
+// Test boundary conditions for $nearSphere and GeoJSON, in meters.
+//
+
+assert.eq(
+ 1, t.find({loc: {
+ $nearSphere: {$geometry: geoJSONPoint,
+ $minDistance: degreeInMeters
+ }}}).itcount(),
+ "Expected to find (0, 1) within $minDistance 1 degree from origin"
+);
+
+assert.eq(
+ 1, t.find({loc: {
+ $nearSphere: geoJSONPoint,
+ $minDistance: degreeInMeters - metersEpsilon
+ }}).itcount(),
+ "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
+);
+
+assert.eq(
+ 0, t.find({loc: {
+ $nearSphere: geoJSONPoint,
+ $minDistance: degreeInMeters + metersEpsilon
+ }}).itcount(),
+ "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
+);
+
+//
+// Test boundary conditions for $nearSphere and a legacy point, in radians.
+//
+// $minDistance with legacy point requires $nearSphere; $near not
+// supported.
+//
+
+var legacyPoint = [0, 0],
+ degreeInRadians = 2 * Math.PI / 360,
+ radiansEpsilon = Number.MIN_VALUE;
+
+while (1 + radiansEpsilon == 1) { radiansEpsilon *= 2; }
+
+assert.eq(
+ 1, t.find({loc: {
+ $nearSphere: legacyPoint,
+ $minDistance: degreeInRadians
+ }}).itcount(),
+ "Expected to find (0, 1) within $minDistance 1 degree from origin"
+);
+
+assert.eq(
+ 1, t.find({loc: {
+ $nearSphere: legacyPoint,
+ $minDistance: degreeInRadians - radiansEpsilon
+ }}).itcount(),
+ "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
+);
+
+assert.eq(
+ 0, t.find({loc: {
+ $nearSphere: legacyPoint,
+ $minDistance: degreeInRadians + radiansEpsilon
+ }}).itcount(),
+ "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
+);
diff --git a/jstests/core/geo_multikey0.js b/jstests/core/geo_multikey0.js
new file mode 100644
index 00000000000..7d0ea57e329
--- /dev/null
+++ b/jstests/core/geo_multikey0.js
@@ -0,0 +1,26 @@
+// Multikey geo values tests - SERVER-3793.
+
+t = db.jstests_geo_multikey0;
+t.drop();
+
+// Check that conflicting constraints are satisfied by parallel array elements.
+t.save( {loc:[{x:20,y:30},{x:30,y:40}]} );
+assert.eq( 1, t.count( {loc:{x:20,y:30},$and:[{loc:{$gt:{x:20,y:35},$lt:{x:20,y:34}}}]} ) );
+
+// Check that conflicting constraints are satisfied by parallel array elements with a 2d index on loc.
+if ( 0 ) { // SERVER-3793
+t.ensureIndex( {loc:'2d'} );
+assert.eq( 1, t.count( {loc:{x:20,y:30},$and:[{loc:{$gt:{x:20,y:35},$lt:{x:20,y:34}}}]} ) );
+}
+
+t.drop();
+
+// Check that conflicting constraints are satisfied by parallel array elements of x.
+t.save( {loc:[20,30],x:[1,2]} );
+assert.eq( 1, t.count( {loc:[20,30],x:{$gt:1.7,$lt:1.2}} ) );
+
+// Check that conflicting constraints are satisfied by parallel array elements of x with a 2d index on loc,x.
+if ( 0 ) { // SERVER-3793
+t.ensureIndex( {loc:'2d',x:1} );
+assert.eq( 1, t.count( {loc:[20,30],x:{$gt:1.7,$lt:1.2}} ) );
+}
diff --git a/jstests/core/geo_multikey1.js b/jstests/core/geo_multikey1.js
new file mode 100644
index 00000000000..ce55609db25
--- /dev/null
+++ b/jstests/core/geo_multikey1.js
@@ -0,0 +1,19 @@
+// Multikey geo index tests with parallel arrays.
+
+t = db.jstests_geo_multikey1;
+t.drop();
+
+locArr = [];
+arr = [];
+for( i = 0; i < 10; ++i ) {
+ locArr.push( [i,i+1] );
+ arr.push( i );
+}
+t.save( {loc:locArr,a:arr,b:arr,c:arr} );
+
+// Parallel arrays are allowed for geo indexes.
+assert.writeOK(t.ensureIndex( {loc:'2d',a:1,b:1,c:1} ));
+
+// Parallel arrays are not allowed for normal indexes.
+assert.writeError(t.ensureIndex( {loc:1,a:1,b:1,c:1} ));
+
diff --git a/jstests/core/geo_multinest0.js b/jstests/core/geo_multinest0.js
new file mode 100644
index 00000000000..c15eb7d60d7
--- /dev/null
+++ b/jstests/core/geo_multinest0.js
@@ -0,0 +1,60 @@
+// Make sure nesting of location arrays also works.
+
+t = db.geonest
+t.drop();
+
+t.insert( { zip : "10001", data : [ { loc : [ 10, 10 ], type : "home" },
+ { loc : [ 50, 50 ], type : "work" } ] } )
+t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
+ { loc : [ 50, 50 ], type : "work" } ] } )
+var res = t.insert( { zip : "10003", data : [ { loc : [ 30, 30 ], type : "home" },
+ { loc : [ 50, 50 ], type : "work" } ] } );
+assert.writeOK( res );
+
+assert.writeOK(t.ensureIndex( { "data.loc" : "2d", zip : 1 } ));
+assert.eq( 2, t.getIndexKeys().length )
+
+res = t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
+ { loc : [ 50, 50 ], type : "work" } ] } );
+assert.writeOK( res );
+
+// test normal access
+
+printjson( t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() )
+
+assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
+
+assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
+
+
+
+
+
+// Try a complex nesting
+
+t = db.geonest
+t.drop();
+
+t.insert( { zip : "10001", data : [ { loc : [ [ 10, 10 ], { lat : 50, long : 50 } ], type : "home" } ] } )
+t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
+ { loc : [ 50, 50 ], type : "work" } ] } )
+res = t.insert({ zip: "10003", data: [{ loc: [{ x: 30, y: 30 }, [ 50, 50 ]], type: "home" }]});
+assert( !res.hasWriteErrors() );
+
+assert.writeOK(t.ensureIndex( { "data.loc" : "2d", zip : 1 } ));
+assert.eq( 2, t.getIndexKeys().length )
+
+res = t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
+ { loc : [ 50, 50 ], type : "work" } ] } );
+
+assert.writeOK( res );
+
+// test normal access
+printjson( t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() )
+
+assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
+
+assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
+
+
+
diff --git a/jstests/core/geo_multinest1.js b/jstests/core/geo_multinest1.js
new file mode 100644
index 00000000000..478e2ab83e4
--- /dev/null
+++ b/jstests/core/geo_multinest1.js
@@ -0,0 +1,36 @@
+// Test distance queries with interleaved distances
+
+t = db.multinest
+t.drop();
+
+t.insert( { zip : "10001", data : [ { loc : [ 10, 10 ], type : "home" },
+ { loc : [ 29, 29 ], type : "work" } ] } )
+t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
+ { loc : [ 39, 39 ], type : "work" } ] } )
+var res = t.insert( { zip : "10003", data : [ { loc : [ 30, 30 ], type : "home" },
+ { loc : [ 49, 49 ], type : "work" } ] } );
+assert.writeOK( res );
+
+assert.writeOK(t.ensureIndex( { "data.loc" : "2d", zip : 1 } ));
+assert.eq( 2, t.getIndexKeys().length )
+
+res = t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
+ { loc : [ 59, 59 ], type : "work" } ] } );
+assert.writeOK( res );
+
+// test normal access
+
+var result = t.find({ "data.loc" : { $near : [0, 0] } }).toArray();
+
+printjson( result )
+
+assert.eq( 4, result.length )
+
+var order = [ 1, 2, 3, 4 ]
+
+for( var i = 0; i < result.length; i++ ){
+ assert.eq( "1000" + order[i], result[i].zip )
+}
+
+
+
diff --git a/jstests/core/geo_near_random1.js b/jstests/core/geo_near_random1.js
new file mode 100644
index 00000000000..50539f3ea5d
--- /dev/null
+++ b/jstests/core/geo_near_random1.js
@@ -0,0 +1,12 @@
+// this tests all points using $near
+load("jstests/libs/geo_near_random.js");
+
+var test = new GeoNearRandomTest("geo_near_random1");
+
+test.insertPts(50);
+
+test.testPt([0,0]);
+test.testPt(test.mkPt());
+test.testPt(test.mkPt());
+test.testPt(test.mkPt());
+test.testPt(test.mkPt());
diff --git a/jstests/core/geo_near_random2.js b/jstests/core/geo_near_random2.js
new file mode 100644
index 00000000000..1673abb88e7
--- /dev/null
+++ b/jstests/core/geo_near_random2.js
@@ -0,0 +1,21 @@
+// this tests 1% of all points using $near and $nearSphere
+load("jstests/libs/geo_near_random.js");
+
+var test = new GeoNearRandomTest("geo_near_random2");
+
+test.insertPts(5000);
+
+opts = {sphere:0, nToTest:test.nPts*0.01};
+test.testPt([0,0], opts);
+test.testPt(test.mkPt(), opts);
+test.testPt(test.mkPt(), opts);
+test.testPt(test.mkPt(), opts);
+test.testPt(test.mkPt(), opts);
+
+opts.sphere = 1
+test.testPt([0,0], opts);
+test.testPt(test.mkPt(0.8), opts);
+test.testPt(test.mkPt(0.8), opts);
+test.testPt(test.mkPt(0.8), opts);
+test.testPt(test.mkPt(0.8), opts);
+
diff --git a/jstests/core/geo_nearwithin.js b/jstests/core/geo_nearwithin.js
new file mode 100644
index 00000000000..6f38f5dd7d9
--- /dev/null
+++ b/jstests/core/geo_nearwithin.js
@@ -0,0 +1,27 @@
+// Test geoNear + $within.
+t = db.geo_nearwithin
+t.drop();
+
+points = 10
+for (var x = -points; x < points; x += 1) {
+ for (var y = -points; y < points; y += 1) {
+ t.insert({geo: [x, y]})
+ }
+}
+
+t.ensureIndex({ geo : "2d" })
+
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 1]}}}})
+assert.eq(resNear.results.length, 5)
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 0]}}}})
+assert.eq(resNear.results.length, 1)
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 0.5]}}}})
+assert.eq(resNear.results.length, 1)
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 1.5]}}}})
+assert.eq(resNear.results.length, 9)
+
+// We want everything distance >1 from us but <1.5
+// These points are (-+1, -+1)
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {$and: [{geo: {$within: {$center: [[0, 0], 1.5]}}},
+ {geo: {$not: {$within: {$center: [[0,0], 1]}}}}]}})
+assert.eq(resNear.results.length, 4)
diff --git a/jstests/core/geo_or.js b/jstests/core/geo_or.js
new file mode 100644
index 00000000000..fd9b7234a21
--- /dev/null
+++ b/jstests/core/geo_or.js
@@ -0,0 +1,62 @@
+// multiple geo clauses with $or
+
+t = db.geoor;
+
+t.drop();
+
+var p = [-71.34895, 42.46037];
+var q = [1.48736, 42.55327];
+
+t.save({loc: p});
+t.save({loc: q});
+
+var indexname = "2dsphere";
+
+t.ensureIndex({loc: indexname})
+
+assert.eq(1, t.find({loc: p}).itcount(), indexname);
+
+// $or supports at most one $near clause
+assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}]}).itcount(),
+ 'geo query not supported by $or. index type: ' + indexname);
+assert.throws(function() {
+ assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}},
+ {loc: {$nearSphere: q}}]}).itcount(),
+ 'geo query not supported by $or. index type: ' + indexname);
+}, null, '$or with multiple $near clauses');
+
+// the following tests should match the points in the collection
+
+assert.eq(2, t.find({$or: [
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}},
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}}
+ ]}).itcount(),
+ 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname);
+assert.eq(2, t.find({$or: [
+ {loc: {$geoIntersects: {$geometry: {type: 'LineString', coordinates: [p, q]}}}},
+ {loc: {$geoIntersects: {$geometry: {type: 'LineString',
+ coordinates: [[0,0], [1,1]]}}}}
+ ]}).itcount(),
+ 'multiple $geoIntersects LineString clauses not supported by $or. index type: ' + indexname);
+assert.eq(2, t.find({$or: [
+ {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: p}}}},
+ {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: q}}}}
+ ]}).itcount(),
+ 'multiple $geoIntersects Point clauses not supported by $or. index type: ' + indexname);
+assert.eq(2, t.find({$or: [
+ {loc: {$geoIntersects: {$geometry: {type: 'Polygon',
+ coordinates: [[[0, 0], p, q, [0, 0]]]}}}},
+ {loc: {$geoIntersects: {$geometry:
+ {type: 'Polygon', coordinates: [[[0, 0], [1, 1], [0, 1], [0, 0]]]}}}}
+ ]}).itcount(),
+ 'multiple $geoIntersects Polygon clauses not supported by $or. index type: ' + indexname);
+
+t.dropIndexes();
+
+var indexname = "2d";
+
+t.ensureIndex({loc: indexname})
+
+assert.eq(2, t.find({$or: [{loc: {$geoWithin: {$centerSphere: [p, 10]}}},
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}}]}).itcount(),
+ 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname);
diff --git a/jstests/core/geo_poly_edge.js b/jstests/core/geo_poly_edge.js
new file mode 100644
index 00000000000..31a0849e67d
--- /dev/null
+++ b/jstests/core/geo_poly_edge.js
@@ -0,0 +1,22 @@
+//
+// Tests polygon edge cases
+//
+
+var coll = db.getCollection( 'jstests_geo_poly_edge' )
+coll.drop();
+
+coll.ensureIndex({ loc : "2d" })
+
+coll.insert({ loc : [10, 10] })
+coll.insert({ loc : [10, -10] })
+
+assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, -10 ]] } } }).itcount(), 2 )
+
+assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, 10 ]] } } }).itcount(), 1 )
+
+
+coll.insert({ loc : [179, 0] })
+coll.insert({ loc : [0, 179] })
+
+assert.eq( coll.find({ loc : { $within : { $polygon : [[0, 0], [1000, 0], [1000, 1000], [0, 1000]] } } }).itcount(), 3 )
+
diff --git a/jstests/core/geo_poly_line.js b/jstests/core/geo_poly_line.js
new file mode 100644
index 00000000000..aca77b6ab0a
--- /dev/null
+++ b/jstests/core/geo_poly_line.js
@@ -0,0 +1,17 @@
+// Test that weird polygons work SERVER-3725
+
+t = db.geo_polygon5;
+t.drop();
+
+t.insert({loc:[0,0]})
+t.insert({loc:[1,0]})
+t.insert({loc:[2,0]})
+t.insert({loc:[3,0]})
+t.insert({loc:[4,0]})
+
+t.ensureIndex( { loc : "2d" } );
+
+printjson( t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).toArray() )
+
+assert.eq( 5, t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).itcount() )
+
diff --git a/jstests/core/geo_polygon1.js b/jstests/core/geo_polygon1.js
new file mode 100644
index 00000000000..11f17910306
--- /dev/null
+++ b/jstests/core/geo_polygon1.js
@@ -0,0 +1,73 @@
+//
+// Tests for N-dimensional polygon querying
+//
+
+t = db.geo_polygon1;
+t.drop();
+
+num = 0;
+for ( x=1; x < 9; x++ ){
+ for ( y= 1; y < 9; y++ ){
+ o = { _id : num++ , loc : [ x , y ] };
+ t.save( o );
+ }
+}
+
+t.ensureIndex( { loc : "2d" } );
+
+triangle = [[0,0], [1,1], [0,2]];
+
+// Look at only a small slice of the data within a triangle
+assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" );
+
+boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
+
+assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" );
+
+//Make sure we can add object-based polygons
+assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() )
+
+// Look in a box much bigger than the one we have data in
+boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
+assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" );
+
+t.drop();
+
+pacman = [
+ [0,2], [0,4], [2,6], [4,6], // Head
+ [6,4], [4,3], [6,2], // Mouth
+ [4,0], [2,0] // Bottom
+ ];
+
+t.save({loc: [1,3] }); // Add a point that's in
+assert.writeOK(t.ensureIndex( { loc : "2d" } ));
+
+assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" );
+
+t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening
+t.save({ loc : [3, 7] }) // Add a point above the center of the head
+t.save({ loc : [3,-1] }) // Add a point below the center of the bottom
+
+assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" );
+
+// Make sure we can't add bad polygons
+okay = true
+try{
+ t.find( { loc : { $within : { $polygon : [1, 2] } } } ).toArray()
+ okay = false
+}
+catch(e){}
+assert(okay)
+try{
+ t.find( { loc : { $within : { $polygon : [[1, 2]] } } } ).toArray()
+ okay = false
+}
+catch(e){}
+assert(okay)
+try{
+ t.find( { loc : { $within : { $polygon : [[1, 2], [2, 3]] } } } ).toArray()
+ okay = false
+}
+catch(e){}
+assert(okay)
+
diff --git a/jstests/core/geo_polygon1_noindex.js b/jstests/core/geo_polygon1_noindex.js
new file mode 100644
index 00000000000..4fc7135c2f9
--- /dev/null
+++ b/jstests/core/geo_polygon1_noindex.js
@@ -0,0 +1,46 @@
+// SERVER-7343: allow $within without a geo index.
+
+t = db.geo_polygon1_noindex;
+t.drop();
+
+num = 0;
+for ( x=1; x < 9; x++ ){
+ for ( y= 1; y < 9; y++ ){
+ o = { _id : num++ , loc : [ x , y ] };
+ t.save( o );
+ }
+}
+
+triangle = [[0,0], [1,1], [0,2]];
+
+// Look at only a small slice of the data within a triangle
+assert.eq( 1 , t.find({ loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" );
+
+boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
+
+assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" );
+
+//Make sure we can add object-based polygons
+assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() )
+
+// Look in a box much bigger than the one we have data in
+boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
+assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" );
+
+t.drop();
+
+pacman = [
+ [0,2], [0,4], [2,6], [4,6], // Head
+ [6,4], [4,3], [6,2], // Mouth
+ [4,0], [2,0] // Bottom
+ ];
+
+assert.writeOK(t.save({loc: [1,3] })); // Add a point that's in
+
+assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" );
+
+t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening
+t.save({ loc : [3, 7] }) // Add a point above the center of the head
+t.save({ loc : [3,-1] }) // Add a point below the center of the bottom
+
+assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" );
diff --git a/jstests/core/geo_polygon2.js b/jstests/core/geo_polygon2.js
new file mode 100644
index 00000000000..c626064f153
--- /dev/null
+++ b/jstests/core/geo_polygon2.js
@@ -0,0 +1,263 @@
+//
+// More tests for N-dimensional polygon querying
+//
+
+// Create a polygon of some shape (no holes)
+// using turtle graphics. Basically, will look like a very contorted octopus (quad-pus?) shape.
+// There are no holes, but some edges will probably touch.
+
+var numTests = 4;
+
+for ( var test = 0; test < numTests; test++ ) {
+
+ Random.srand( 1337 + test );
+
+ var numTurtles = 4;
+ var gridSize = [ 20, 20 ];
+ var turtleSteps = 500;
+ var bounds = [ Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001 ];
+ var rotation = Math.PI * Random.rand();
+ var bits = Math.floor( Random.rand() * 32 );
+
+ printjson( { test : test, rotation : rotation, bits : bits });
+
+ var rotatePoint = function( x, y ) {
+
+ if( y == undefined ){
+ y = x[1];
+ x = x[0];
+ }
+
+ xp = x * Math.cos( rotation ) - y * Math.sin( rotation );
+ yp = y * Math.cos( rotation ) + x * Math.sin( rotation );
+
+ var scaleX = (bounds[1] - bounds[0]) / 360;
+ var scaleY = (bounds[1] - bounds[0]) / 360;
+
+ x *= scaleX;
+ y *= scaleY;
+
+ return [xp, yp];
+ };
+
+ var grid = [];
+ for ( var i = 0; i < gridSize[0]; i++ ) {
+ grid.push( new Array( gridSize[1] ) );
+ }
+
+ grid.toString = function() {
+
+ var gridStr = "";
+ for ( var j = grid[0].length - 1; j >= -1; j-- ) {
+ for ( var i = 0; i < grid.length; i++ ) {
+ if ( i == 0 )
+ gridStr += ( j == -1 ? " " : ( j % 10) ) + ": ";
+ if ( j != -1 )
+ gridStr += "[" + ( grid[i][j] != undefined ? grid[i][j] : " " ) + "]";
+ else
+ gridStr += " " + ( i % 10 ) + " ";
+ }
+ gridStr += "\n";
+ }
+
+ return gridStr;
+ };
+
+ var turtles = [];
+ for ( var i = 0; i < numTurtles; i++ ) {
+
+ var up = ( i % 2 == 0 ) ? i - 1 : 0;
+ var left = ( i % 2 == 1 ) ? ( i - 1 ) - 1 : 0;
+
+ turtles[i] = [
+ [ Math.floor( gridSize[0] / 2 ), Math.floor( gridSize[1] / 2 ) ],
+ [ Math.floor( gridSize[0] / 2 ) + left, Math.floor( gridSize[1] / 2 ) + up ] ];
+
+ grid[turtles[i][1][0]][turtles[i][1][1]] = i;
+
+ }
+
+ grid[Math.floor( gridSize[0] / 2 )][Math.floor( gridSize[1] / 2 )] = "S";
+
+ // print( grid.toString() )
+
+ var pickDirections = function() {
+
+ var up = Math.floor( Random.rand() * 3 );
+ if ( up == 2 )
+ up = -1;
+
+ if ( up == 0 ) {
+ var left = Math.floor( Random.rand() * 3 );
+ if ( left == 2 )
+ left = -1;
+ } else
+ left = 0;
+
+ if ( Random.rand() < 0.5 ) {
+ var swap = left;
+ left = up;
+ up = swap;
+ }
+
+ return [ left, up ];
+ };
+
+ for ( var s = 0; s < turtleSteps; s++ ) {
+
+ for ( var t = 0; t < numTurtles; t++ ) {
+
+ var dirs = pickDirections();
+ var up = dirs[0];
+ var left = dirs[1];
+
+ var lastTurtle = turtles[t][turtles[t].length - 1];
+ var nextTurtle = [ lastTurtle[0] + left, lastTurtle[1] + up ];
+
+ if ( nextTurtle[0] >= gridSize[0] ||
+ nextTurtle[1] >= gridSize[1] ||
+ nextTurtle[0] < 0 ||
+ nextTurtle[1] < 0 )
+ continue;
+
+ if ( grid[nextTurtle[0]][nextTurtle[1]] == undefined ) {
+ turtles[t].push( nextTurtle );
+ grid[nextTurtle[0]][nextTurtle[1]] = t;
+ }
+
+ }
+ }
+
+ turtlePaths = [];
+ for ( var t = 0; t < numTurtles; t++ ) {
+
+ turtlePath = [];
+
+ var nextSeg = function(currTurtle, prevTurtle) {
+
+ var pathX = currTurtle[0]
+
+ if ( currTurtle[1] < prevTurtle[1] ) {
+ pathX = currTurtle[0] + 1;
+ pathY = prevTurtle[1]
+ } else if ( currTurtle[1] > prevTurtle[1] ) {
+ pathX = currTurtle[0];
+ pathY = currTurtle[1];
+ } else if ( currTurtle[0] < prevTurtle[0] ) {
+ pathX = prevTurtle[0];
+ pathY = currTurtle[1];
+ } else if ( currTurtle[0] > prevTurtle[0] ) {
+ pathX = currTurtle[0];
+ pathY = currTurtle[1] + 1;
+ }
+
+ // print( " Prev : " + prevTurtle + " Curr : " + currTurtle + " path
+ // : "
+ // + [pathX, pathY]);
+
+ return [ pathX, pathY ]
+ };
+
+ for ( var s = 1; s < turtles[t].length; s++ ) {
+
+ currTurtle = turtles[t][s];
+ prevTurtle = turtles[t][s - 1];
+
+ turtlePath.push( nextSeg( currTurtle, prevTurtle ) );
+
+ }
+
+ for ( var s = turtles[t].length - 2; s >= 0; s-- ) {
+
+ currTurtle = turtles[t][s];
+ prevTurtle = turtles[t][s + 1];
+
+ turtlePath.push( nextSeg( currTurtle, prevTurtle ) );
+
+ }
+
+ // printjson( turtlePath )
+
+ // End of the line is not inside our polygon.
+ var lastTurtle = turtles[t][turtles[t].length - 1];
+ grid[lastTurtle[0]][lastTurtle[1]] = undefined;
+
+ fixedTurtlePath = [];
+ for ( var s = 1; s < turtlePath.length; s++ ) {
+
+ if ( turtlePath[s - 1][0] == turtlePath[s][0] &&
+ turtlePath[s - 1][1] == turtlePath[s][1] ) {
+ continue;
+ }
+
+ var up = turtlePath[s][1] - turtlePath[s - 1][1];
+ var right = turtlePath[s][0] - turtlePath[s - 1][0];
+ var addPoint = ( up != 0 && right != 0 );
+
+ if ( addPoint && up != right ) {
+ fixedTurtlePath.push( [ turtlePath[s][0], turtlePath[s - 1][1] ] );
+ } else if ( addPoint ) {
+ fixedTurtlePath.push( [ turtlePath[s - 1][0], turtlePath[s][1] ] );
+ }
+
+ fixedTurtlePath.push( turtlePath[s] );
+ }
+
+ // printjson( fixedTurtlePath )
+
+ turtlePaths.push( fixedTurtlePath );
+ }
+
+ // Uncomment to print polygon shape
+ // print( grid.toString() )
+
+ var polygon = [];
+ for ( var t = 0; t < turtlePaths.length; t++ ) {
+ for ( var s = 0; s < turtlePaths[t].length; s++ ) {
+ polygon.push( rotatePoint( turtlePaths[t][s] ) );
+ }
+ }
+
+ // Uncomment to print out polygon
+ // printjson( polygon )
+
+ t = db.polytest2;
+ t.drop();
+
+ // Test single and multi-location documents
+ var pointsIn = 0;
+ var pointsOut = 0;
+ var allPointsIn = [];
+ var allPointsOut = [];
+
+ for ( var j = grid[0].length - 1; j >= 0; j-- ) {
+ for ( var i = 0; i < grid.length; i++ ) {
+ var point = rotatePoint( [ i + 0.5, j + 0.5 ] );
+
+ t.insert( { loc : point } );
+ if ( grid[i][j] != undefined ){
+ allPointsIn.push( point );
+ pointsIn++;
+ }
+ else{
+ allPointsOut.push( point );
+ pointsOut++;
+ }
+ }
+ }
+
+ var res = t.ensureIndex({ loc: "2d" }, { bits: 1 + bits, max: bounds[1], min: bounds[0] });
+ assert.writeOK( res );
+
+ t.insert( { loc : allPointsIn } );
+ t.insert( { loc : allPointsOut } );
+ allPoints = allPointsIn.concat( allPointsOut );
+ t.insert( { loc : allPoints } );
+
+ print( "Points : " );
+ printjson( { pointsIn : pointsIn, pointsOut : pointsOut } );
+ //print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() )
+
+ assert.eq( gridSize[0] * gridSize[1] + 3, t.find().count() );
+ assert.eq( 2 + pointsIn, t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() );
+}
diff --git a/jstests/core/geo_polygon3.js b/jstests/core/geo_polygon3.js
new file mode 100644
index 00000000000..b144bfbc589
--- /dev/null
+++ b/jstests/core/geo_polygon3.js
@@ -0,0 +1,54 @@
+//
+// Tests for polygon querying with varying levels of accuracy
+//
+
+var numTests = 31;
+
+for( var n = 0; n < numTests; n++ ){
+
+ t = db.geo_polygon3;
+ t.drop();
+
+ num = 0;
+ for ( x=1; x < 9; x++ ){
+ for ( y= 1; y < 9; y++ ){
+ o = { _id : num++ , loc : [ x , y ] };
+ t.save( o );
+ }
+ }
+
+ t.ensureIndex( { loc : "2d" }, { bits : 2 + n } );
+
+ triangle = [[0,0], [1,1], [0,2]];
+
+ // Look at only a small slice of the data within a triangle
+ assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).itcount() , "Triangle Test" );
+
+
+ boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
+
+ assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Bounding Box Test" );
+
+ // Look in a box much bigger than the one we have data in
+ boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
+ assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Big Bounding Box Test" );
+
+ t.drop();
+
+ pacman = [
+ [0,2], [0,4], [2,6], [4,6], // Head
+ [6,4], [4,3], [6,2], // Mouth
+ [4,0], [2,0] // Bottom
+ ];
+
+ t.save({loc: [1,3] }); // Add a point that's in
+ t.ensureIndex( { loc : "2d" }, { bits : 2 + t } );
+
+ assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman single point" );
+
+ t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening
+ t.save({ loc : [3, 7] }) // Add a point above the center of the head
+ t.save({ loc : [3,-1] }) // Add a point below the center of the bottom
+
+ assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman double point" );
+}
diff --git a/jstests/core/geo_queryoptimizer.js b/jstests/core/geo_queryoptimizer.js
new file mode 100644
index 00000000000..7a438bce8fb
--- /dev/null
+++ b/jstests/core/geo_queryoptimizer.js
@@ -0,0 +1,27 @@
+
+t = db.geo_qo1;
+t.drop()
+
+t.ensureIndex({loc:"2d"})
+
+t.insert({'issue':0})
+t.insert({'issue':1})
+t.insert({'issue':2})
+t.insert({'issue':2, 'loc':[30.12,-118]})
+t.insert({'issue':1, 'loc':[30.12,-118]})
+t.insert({'issue':0, 'loc':[30.12,-118]})
+
+assert.eq( 6 , t.find().itcount() , "A1" )
+
+assert.eq( 2 , t.find({'issue':0}).itcount() , "A2" )
+
+assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "A3" )
+
+assert.eq( 2 , t.find({'issue':0}).itcount() , "B1" )
+
+assert.eq( 6 , t.find().itcount() , "B2" )
+
+assert.eq( 2 , t.find({'issue':0}).itcount() , "B3" )
+
+assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "B4" )
+
diff --git a/jstests/core/geo_regex0.js b/jstests/core/geo_regex0.js
new file mode 100644
index 00000000000..79042b9074e
--- /dev/null
+++ b/jstests/core/geo_regex0.js
@@ -0,0 +1,18 @@
+// From SERVER-2247
+// Tests to make sure regex works with geo indices
+
+t = db.regex0
+t.drop()
+
+t.ensureIndex( { point : '2d', words : 1 } )
+t.insert( { point : [ 1, 1 ], words : [ 'foo', 'bar' ] } )
+
+regex = { words : /^f/ }
+geo = { point : { $near : [ 1, 1 ] } }
+both = { point : { $near : [ 1, 1 ] }, words : /^f/ }
+
+assert.eq(1, t.find( regex ).count() )
+assert.eq(1, t.find( geo ).count() )
+assert.eq(1, t.find( both ).count() )
+
+
diff --git a/jstests/core/geo_s2cursorlimitskip.js b/jstests/core/geo_s2cursorlimitskip.js
new file mode 100644
index 00000000000..2417d41f24c
--- /dev/null
+++ b/jstests/core/geo_s2cursorlimitskip.js
@@ -0,0 +1,68 @@
+// Test various cursor behaviors
+var t = db.geo_s2getmmm
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+
+Random.setRandomSeed();
+var random = Random.rand;
+
+/*
+ * To test that getmore is working within 2dsphere index.
+ * We insert a bunch of points, get a cursor, and fetch some
+ * of the points. Then we insert a bunch more points, and
+ * finally fetch a bunch more.
+ * If the final fetches work successfully, then getmore should
+ * be working
+ */
+function sign() { return random() > 0.5 ? 1 : -1; }
+function insertRandomPoints(num, minDist, maxDist){
+ for(var i = 0; i < num; i++){
+ var lat = sign() * (minDist + random() * (maxDist - minDist));
+ var lng = sign() * (minDist + random() * (maxDist - minDist));
+ var point = { geo: { type: "Point", coordinates: [lng, lat] } };
+ assert.writeOK(t.insert(point));
+ }
+}
+
+var initialPointCount = 200
+var smallBit = 10
+var secondPointCount = 100
+
+// Insert points between 0.01 and 1.0 away.
+insertRandomPoints(initialPointCount, 0.01, 1.0);
+
+var cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).batchSize(4);
+assert.eq(cursor.count(), initialPointCount);
+
+for(var j = 0; j < smallBit; j++){
+ assert(cursor.hasNext());
+ cursor.next();
+}
+// We looked at (initialPointCount - smallBit) points, should be more.
+assert(cursor.hasNext())
+
+// Insert points outside of the shell we've tested thus far
+insertRandomPoints(secondPointCount, 2.01, 3.0);
+assert.eq(cursor.count(), initialPointCount + secondPointCount)
+
+for(var k = 0; k < initialPointCount + secondPointCount - smallBit; k++){
+ assert(cursor.hasNext())
+ var tmpPoint = cursor.next();
+}
+// Shouldn't be any more points to look at now.
+assert(!cursor.hasNext())
+
+var someLimit = 23;
+// Make sure limit does something.
+cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit)
+// Count doesn't work here -- ignores limit/skip, so we use itcount.
+assert.eq(cursor.itcount(), someLimit)
+// Make sure skip works by skipping some stuff ourselves.
+var someSkip = 3;
+cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit + someSkip)
+for (var i = 0; i < someSkip; ++i) { cursor.next(); }
+var cursor2 = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).skip(someSkip).limit(someLimit)
+while (cursor.hasNext()) {
+ assert(cursor2.hasNext());
+ assert.eq(cursor.next(), cursor2.next());
+}
diff --git a/jstests/core/geo_s2dedupnear.js b/jstests/core/geo_s2dedupnear.js
new file mode 100644
index 00000000000..ac31e082891
--- /dev/null
+++ b/jstests/core/geo_s2dedupnear.js
@@ -0,0 +1,11 @@
+// Make sure that we don't return several of the same result due to faulty
+// assumptions about the btree cursor. That is, don't return duplicate results.
+t = db.geo_s2dedupnear
+t.drop()
+
+t.ensureIndex( { geo : "2dsphere" } )
+var x = { "type" : "Polygon",
+ "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]}
+t.insert({geo: x})
+res = t.find({geo: {$geoNear: {"type" : "Point", "coordinates" : [31, 41]}}})
+assert.eq(res.itcount(), 1)
diff --git a/jstests/core/geo_s2descindex.js b/jstests/core/geo_s2descindex.js
new file mode 100644
index 00000000000..39d153a6e55
--- /dev/null
+++ b/jstests/core/geo_s2descindex.js
@@ -0,0 +1,64 @@
+//
+// Tests 2dsphere with descending fields, ensures correct lookup
+//
+
+var coll = db.getCollection("twodspheredesc");
+
+var descriptors = [["field1", -1], ["field2", -1], ["coordinates", "2dsphere"]]
+var docA = {field1 : "a", field2 : 1, coordinates : [-118.2400013, 34.073893]}
+var docB = {field1 : "b", field2 : 1, coordinates : [-118.2400012, 34.073894]}
+
+// Try both regular and near index cursors
+var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893],
+ 0.44915760491198753]}}};
+var queryNear = {coordinates : {$geoNear : {"type" : "Point", "coordinates" : [0, 0]}}};
+
+//
+// The idea here is we try "2dsphere" indexes in combination with descending
+// other fields in various
+// positions and ensure that we return correct results.
+//
+
+for ( var t = 0; t < descriptors.length; t++) {
+
+ var descriptor = {};
+ for ( var i = 0; i < descriptors.length; i++) {
+ descriptor[descriptors[i][0]] = descriptors[i][1];
+ }
+
+ jsTest.log("Trying 2dsphere index with descriptor " + tojson(descriptor));
+
+ coll.drop();
+ coll.ensureIndex(descriptor);
+
+ coll.insert(docA);
+ coll.insert(docB);
+
+ assert.eq(1, coll.count(Object.merge(query, {field1 : "a"})));
+ assert.eq(1, coll.count(Object.merge(query, {field1 : "b"})));
+ assert.eq(2, coll.count(Object.merge(query, {field2 : 1})));
+ assert.eq(0, coll.count(Object.merge(query, {field2 : 0})));
+
+ var firstEls = descriptors.splice(1);
+ descriptors = firstEls.concat(descriptors);
+}
+
+//
+// Data taken from previously-hanging result
+//
+
+jsTest.log("Trying case found in wild...");
+
+coll.drop();
+coll.ensureIndex({coordinates : "2dsphere", field : -1});
+coll.insert({coordinates : [-118.240013, 34.073893]});
+var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893],
+ 0.44915760491198753]}},
+ field : 1};
+
+assert.eq(null, coll.findOne(query));
+coll.remove({})
+coll.insert({coordinates : [-118.240013, 34.073893], field : 1});
+assert.neq(null, coll.findOne(query));
+
+jsTest.log("Success!");
diff --git a/jstests/core/geo_s2disjoint_holes.js b/jstests/core/geo_s2disjoint_holes.js
new file mode 100644
index 00000000000..26d94d9343a
--- /dev/null
+++ b/jstests/core/geo_s2disjoint_holes.js
@@ -0,0 +1,81 @@
+//
+// We should prohibit polygons with holes not bounded by their exterior shells.
+//
+// From spec:
+//
+// "For Polygons with multiple rings, the first must be the exterior ring and
+// any others must be interior rings or holes."
+// http://geojson.org/geojson-spec.html#polygon
+//
+
+var t = db.geo_s2disjoint_holes,
+ coordinates = [
+ // One square.
+ [[9, 9], [9, 11], [11, 11], [11, 9], [9, 9]],
+ // Another disjoint square.
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]
+ ],
+ poly = {
+ type: 'Polygon',
+ coordinates: coordinates
+ },
+ multiPoly = {
+ type: 'MultiPolygon',
+ // Multi-polygon's coordinates are wrapped in one more array.
+ coordinates: [coordinates]
+ };
+
+t.drop();
+
+jsTest.log("We're going to print some error messages, don't be alarmed.");
+
+//
+// Can't query with a polygon or multi-polygon that has a non-contained hole.
+//
+print(assert.throws(
+ function() {
+ t.findOne({geo: {$geoWithin: {$geometry: poly}}});
+ },
+ [],
+ "parsing a polygon with non-overlapping holes."));
+
+print(assert.throws(
+ function() {
+ t.findOne({geo: {$geoWithin: {$geometry: multiPoly}}});
+ },
+ [],
+ "parsing a multi-polygon with non-overlapping holes."));
+
+//
+// Can't insert a bad polygon or a bad multi-polygon with a 2dsphere index.
+//
+t.createIndex({p: '2dsphere'});
+assert.writeError(t.insert({p: poly}));
+assert.writeError(t.insert({p: multiPoly}));
+
+//
+// Can't create a 2dsphere index when the collection contains a bad polygon or
+// bad multi-polygon.
+//
+t.drop();
+t.insert({p: poly});
+res = t.createIndex({p: '2dsphere'});
+assert(!res.ok, tojson(res));
+assert.eq(1, t.getIndexes().length);
+
+t.drop();
+t.insert({p: multiPoly});
+res = t.createIndex({p: '2dsphere'});
+assert(!res.ok, tojson(res));
+assert.eq(1, t.getIndexes().length);
+
+//
+// But with no index we can insert bad polygons and bad multi-polygons.
+//
+t.drop();
+assert.writeOK(t.insert({p: poly}));
+assert.writeOK(t.insert({p: multiPoly}));
+
+t.drop();
+
+jsTest.log("Success.")
diff --git a/jstests/core/geo_s2dupe_points.js b/jstests/core/geo_s2dupe_points.js
new file mode 100644
index 00000000000..8dd6e804c78
--- /dev/null
+++ b/jstests/core/geo_s2dupe_points.js
@@ -0,0 +1,71 @@
+// See: SERVER-9240, SERVER-9401.
+// s2 rejects shapes with duplicate adjacent points as invalid, but they are
+// valid in GeoJSON. We store the duplicates, but internally remove them
+// before indexing or querying.
+t = db.geo_s2dupe_points
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+function testDuplicates(shapeName, shapeWithDupes, shapeWithoutDupes) {
+ // insert a doc with dupes
+ assert.writeOK(t.insert(shapeWithDupes));
+
+ // duplicates are preserved when the document is fetched by _id
+ assert.eq(shapeWithDupes, t.findOne({_id: shapeName}));
+ assert.neq(shapeWithoutDupes, t.findOne({_id: shapeName}).geo);
+
+ // can query with $geoIntersects inserted doc using both the duplicated and de-duplicated docs
+ assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithDupes.geo } } } ).itcount(), 1);
+ assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithoutDupes } } } ).itcount(), 1);
+
+ // direct document equality in queries is preserved
+ assert.eq(t.find({ geo: shapeWithoutDupes} ).itcount(), 0);
+ assert.eq(t.find({ geo: shapeWithDupes.geo } ).itcount(), 1);
+}
+
+// LineString
+var lineWithDupes = { _id: "line", geo: { type: "LineString",
+ coordinates: [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6] ]
+ }
+};
+var lineWithoutDupes = { type: "LineString", coordinates: [ [40,5], [41,6] ] };
+
+// Polygon
+var polygonWithDupes = { _id: "poly", geo: { type: "Polygon",
+ coordinates: [
+ [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ],
+ [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0], [-2.0, -2.0] ]
+ ] }
+};
+var polygonWithoutDupes = { type: "Polygon",
+ coordinates: [
+ [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ],
+ [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0] ]
+ ]
+};
+
+// MultiPolygon
+var multiPolygonWithDupes = { _id: "multi", geo: { type: "MultiPolygon", coordinates: [
+ [
+ [ [102.0, 2.0], [103.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ]
+ ],
+ [
+ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
+ [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.8, 0.8], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
+ ]
+ ]
+} };
+var multiPolygonWithoutDupes = { type: "MultiPolygon", coordinates: [
+ [
+ [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ]
+ ],
+ [
+ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
+ [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
+ ]
+ ]
+};
+
+testDuplicates("line", lineWithDupes, lineWithoutDupes);
+testDuplicates("poly", polygonWithDupes, polygonWithoutDupes);
+testDuplicates("multi", multiPolygonWithDupes, multiPolygonWithoutDupes);
diff --git a/jstests/core/geo_s2edgecases.js b/jstests/core/geo_s2edgecases.js
new file mode 100755
index 00000000000..bf46baba744
--- /dev/null
+++ b/jstests/core/geo_s2edgecases.js
@@ -0,0 +1,40 @@
+t = db.geo_s2edgecases
+t.drop()
+
+roundworldpoint = { "type" : "Point", "coordinates": [ 180, 0 ] }
+
+// Opposite the equator
+roundworld = { "type" : "Polygon",
+ "coordinates" : [ [ [179,1], [-179,1], [-179,-1], [179,-1], [179,1]]]}
+t.insert({geo : roundworld})
+
+roundworld2 = { "type" : "Polygon",
+ "coordinates" : [ [ [179,1], [179,-1], [-179,-1], [-179,1], [179,1]]]}
+t.insert({geo : roundworld2})
+
+// North pole
+santapoint = { "type" : "Point", "coordinates": [ 180, 90 ] }
+santa = { "type" : "Polygon",
+ "coordinates" : [ [ [179,89], [179,90], [-179,90], [-179,89], [179,89]]]}
+t.insert({geo : santa})
+santa2 = { "type" : "Polygon",
+ "coordinates" : [ [ [179,89], [-179,89], [-179,90], [179,90], [179,89]]]}
+t.insert({geo : santa2})
+
+// South pole
+penguinpoint = { "type" : "Point", "coordinates": [ 0, -90 ] }
+penguin1 = { "type" : "Polygon",
+ "coordinates" : [ [ [0,-89], [0,-90], [179,-90], [179,-89], [0,-89]]]}
+t.insert({geo : penguin1})
+penguin2 = { "type" : "Polygon",
+ "coordinates" : [ [ [0,-89], [179,-89], [179,-90], [0,-90], [0,-89]]]}
+t.insert({geo : penguin2})
+
+t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } )
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : roundworldpoint} } });
+assert.eq(res.count(), 2);
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : santapoint} } });
+assert.eq(res.count(), 2);
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : penguinpoint} } });
+assert.eq(res.count(), 2);
diff --git a/jstests/core/geo_s2exact.js b/jstests/core/geo_s2exact.js
new file mode 100644
index 00000000000..a7cf9627765
--- /dev/null
+++ b/jstests/core/geo_s2exact.js
@@ -0,0 +1,21 @@
+// Queries on exact geometry should return the exact geometry.
+t = db.geo_s2exact
+t.drop()
+
+function test(geometry) {
+ t.insert({geo: geometry})
+ assert.eq(1, t.find({geo: geometry}).itcount(), geometry)
+ t.ensureIndex({geo: "2dsphere"})
+ assert.eq(1, t.find({geo: geometry}).itcount(), geometry)
+ t.dropIndex({geo: "2dsphere"})
+}
+
+pointA = { "type" : "Point", "coordinates": [ 40, 5 ] }
+test(pointA)
+
+someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]}
+test(someline)
+
+somepoly = { "type" : "Polygon",
+ "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
+test(somepoly)
diff --git a/jstests/core/geo_s2holesameasshell.js b/jstests/core/geo_s2holesameasshell.js
new file mode 100644
index 00000000000..89ba5ef571b
--- /dev/null
+++ b/jstests/core/geo_s2holesameasshell.js
@@ -0,0 +1,44 @@
+// If polygons have holes, the holes cannot be equal to the entire geometry.
+var t = db.geo_s2holessameasshell
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+
+var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]};
+var edgePoint = {"type": "Point", "coordinates": [0, 0.5]};
+var cornerPoint = {"type": "Point", "coordinates": [0, 0]};
+
+// Various "edge" cases. None of them should be returned by the non-polygon
+// polygon below.
+t.insert({geo : centerPoint});
+t.insert({geo : edgePoint});
+t.insert({geo : cornerPoint});
+
+// This generates an empty covering.
+var polygonWithFullHole = { "type" : "Polygon", "coordinates": [
+ [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
+ [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
+ ]
+};
+
+// No keys for insert should error.
+assert.writeError(t.insert({geo: polygonWithFullHole}));
+
+// No covering to search over should give an empty result set.
+assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$geometry: polygonWithFullHole}}}).count()})
+
+// Similar polygon to the one above, but is covered by two holes instead of
+// one.
+var polygonWithTwoHolesCoveringWholeArea = {"type" : "Polygon", "coordinates": [
+ [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
+ [[0,0], [0,0.5], [1, 0.5], [1, 0], [0, 0]],
+ [[0,0.5], [0,1], [1, 1], [1, 0.5], [0, 0.5]]
+ ]
+};
+
+// No keys for insert should error.
+assert.writeError(t.insert({geo: polygonWithTwoHolesCoveringWholeArea}));
+
+// No covering to search over should give an empty result set.
+assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$geometry: polygonWithTwoHolesCoveringWholeArea}}}).count()})
diff --git a/jstests/core/geo_s2index.js b/jstests/core/geo_s2index.js
new file mode 100755
index 00000000000..974e4578dce
--- /dev/null
+++ b/jstests/core/geo_s2index.js
@@ -0,0 +1,114 @@
+t = db.geo_s2index
+t.drop()
+
+// We internally drop adjacent duplicate points in lines.
+someline = { "type" : "LineString", "coordinates": [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6]]}
+t.insert( {geo : someline , nonGeo: "someline"})
+t.ensureIndex({geo: "2dsphere"})
+foo = t.find({geo: {$geoIntersects: {$geometry: {type: "Point", coordinates: [40,5]}}}}).next();
+assert.eq(foo.geo, someline);
+t.dropIndex({geo: "2dsphere"})
+
+pointA = { "type" : "Point", "coordinates": [ 40, 5 ] }
+t.insert( {geo : pointA , nonGeo: "pointA"})
+
+pointD = { "type" : "Point", "coordinates": [ 41.001, 6.001 ] }
+t.insert( {geo : pointD , nonGeo: "pointD"})
+
+pointB = { "type" : "Point", "coordinates": [ 41, 6 ] }
+t.insert( {geo : pointB , nonGeo: "pointB"})
+
+pointC = { "type" : "Point", "coordinates": [ 41, 6 ] }
+t.insert( {geo : pointC} )
+
+// Add a point within the polygon but not on the border. Don't want to be on
+// the path of the polyline.
+pointE = { "type" : "Point", "coordinates": [ 40.6, 5.4 ] }
+t.insert( {geo : pointE} )
+
+// Make sure we can index this without error.
+t.insert({nonGeo: "noGeoField!"})
+
+somepoly = { "type" : "Polygon",
+ "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
+t.insert( {geo : somepoly, nonGeo: "somepoly" })
+
+var res = t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } );
+// We have a point without any geo data. Don't error.
+assert.writeOK(res);
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointA} } });
+assert.eq(res.itcount(), 3);
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointB} } });
+assert.eq(res.itcount(), 4);
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointD} } });
+assert.eq(res.itcount(), 1);
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : someline} } })
+assert.eq(res.itcount(), 5);
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 6);
+
+res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 6);
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }).limit(1)
+assert.eq(res.itcount(), 1);
+
+res = t.find({ "nonGeo": "pointA",
+ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 1);
+
+// Don't crash mongod if we give it bad input.
+t.drop()
+t.ensureIndex({loc: "2dsphere", x:1})
+t.save({loc: [0,0]})
+assert.throws(function() { return t.count({loc: {$foo:[0,0]}}) })
+assert.throws(function() { return t.find({ "nonGeo": "pointA",
+ "geo" : { "$geoIntersects" : { "$geometry" : somepoly},
+ "$near": {"$geometry" : somepoly }}}).count()})
+
+// If we specify a datum, it has to be valid (WGS84).
+t.drop()
+t.ensureIndex({loc: "2dsphere"})
+res = t.insert({ loc: { type: 'Point',
+ coordinates: [40, 5],
+ crs: { type: 'name', properties: { name: 'EPSG:2000' }}}});
+assert.writeError(res);
+assert.eq(0, t.find().itcount())
+res = t.insert({ loc: { type: 'Point', coordinates: [40, 5] }});
+assert.writeOK(res);
+res = t.insert({ loc: { type: 'Point',
+ coordinates: [40, 5],
+ crs: { type: 'name', properties: {name :'EPSG:4326' }}}});
+assert.writeOK(res);
+res = t.insert({ loc: { type:'Point',
+ coordinates: [40, 5],
+ crs: { type: 'name',
+ properties: { name: 'urn:ogc:def:crs:OGC:1.3:CRS84'}}}});
+assert.writeOK(res);
+
+// We can pass level parameters and we verify that they're valid.
+// 0 <= coarsestIndexedLevel <= finestIndexedLevel <= 30.
+t.drop();
+t.save({loc: [0,0]})
+res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 17, coarsestIndexedLevel: 5 });
+assert.writeOK(res);
+
+t.drop();
+t.save({loc: [0,0]})
+res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 31, coarsestIndexedLevel: 5 });
+assert.writeError(res);
+
+t.drop();
+t.save({loc: [0,0]})
+res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 30, coarsestIndexedLevel: 0 });
+assert.writeOK(res);
+
+t.drop();
+t.save({loc: [0,0]})
+res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 30, coarsestIndexedLevel: -1 });
+assert.writeError(res);
diff --git a/jstests/core/geo_s2indexoldformat.js b/jstests/core/geo_s2indexoldformat.js
new file mode 100755
index 00000000000..e2cc1f353ee
--- /dev/null
+++ b/jstests/core/geo_s2indexoldformat.js
@@ -0,0 +1,28 @@
+// Make sure that the 2dsphere index can deal with non-GeoJSON points.
+// 2dsphere does not accept legacy shapes, only legacy points.
+t = db.geo_s2indexoldformat
+t.drop()
+
+t.insert( {geo : [40, 5], nonGeo: ["pointA"]})
+t.insert( {geo : [41.001, 6.001], nonGeo: ["pointD"]})
+t.insert( {geo : [41, 6], nonGeo: ["pointB"]})
+t.insert( {geo : [41, 6]} )
+t.insert( {geo : {x:40.6, y:5.4}} )
+
+t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } )
+
+res = t.find({ "geo" : { "$geoIntersects" : { "$geometry": {x:40, y:5}}}})
+assert.eq(res.count(), 1);
+
+res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [41,6]}}})
+assert.eq(res.count(), 2);
+
+// We don't support legacy polygons in 2dsphere.
+assert.writeError(t.insert( {geo : [[40,5],[40,6],[41,6],[41,5]], nonGeo: ["somepoly"] }));
+assert.writeError(t.insert( {geo : {a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}}));
+
+// Test "Can't canonicalize query: BadValue bad geo query" error.
+assert.throws(function() {
+ t.findOne({ "geo" : { "$geoIntersects" : {"$geometry": [[40,5],[40,6],[41,6],[41,5]]}}});
+});
+
diff --git a/jstests/core/geo_s2indexversion1.js b/jstests/core/geo_s2indexversion1.js
new file mode 100644
index 00000000000..8524faeddbd
--- /dev/null
+++ b/jstests/core/geo_s2indexversion1.js
@@ -0,0 +1,150 @@
+// Tests 2dsphere index option "2dsphereIndexVersion". Verifies that GeoJSON objects that are new
+// in version 2 are not allowed in version 1.
+
+var coll = db.getCollection("geo_s2indexversion1");
+coll.drop();
+
+//
+// Index build should fail for invalid values of "2dsphereIndexVersion".
+//
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": -1});
+assert.gleError(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 0});
+assert.gleError(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 3});
+assert.gleError(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": Infinity});
+assert.gleError(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": "foo"});
+assert.gleError(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": {a: 1}});
+assert.gleError(db);
+coll.drop();
+
+//
+// Index build should succeed for valid values of "2dsphereIndexVersion".
+//
+
+coll.ensureIndex({geo: "2dsphere"});
+assert.gleSuccess(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1});
+assert.gleSuccess(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(1)});
+assert.gleSuccess(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(1)});
+assert.gleSuccess(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2});
+assert.gleSuccess(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(2)});
+assert.gleSuccess(db);
+coll.drop();
+
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(2)});
+assert.gleSuccess(db);
+coll.drop();
+
+//
+// {2dsphereIndexVersion: 2} should be the default for new indexes.
+//
+
+coll.ensureIndex({geo: "2dsphere"});
+assert.gleSuccess(db);
+var specObj = coll.getDB().system.indexes.findOne({ns: coll.getFullName(), name: "geo_2dsphere"});
+assert.eq(2, specObj["2dsphereIndexVersion"]);
+coll.drop();
+
+//
+// Test compatibility of various GeoJSON objects with both 2dsphere index versions.
+//
+
+var pointDoc = {geo: {type: "Point", coordinates: [40, 5]}};
+var lineStringDoc = {geo: {type: "LineString", coordinates: [[40, 5], [41, 6]]}};
+var polygonDoc = {geo: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 1], [0, 0]]]}};
+var multiPointDoc = {geo: {type: "MultiPoint",
+ coordinates: [[-73.9580, 40.8003], [-73.9498, 40.7968],
+ [-73.9737, 40.7648], [-73.9814, 40.7681]]}};
+var multiLineStringDoc = {geo: {type: "MultiLineString",
+ coordinates: [[[-73.96943, 40.78519], [-73.96082, 40.78095]],
+ [[-73.96415, 40.79229], [-73.95544, 40.78854]],
+ [[-73.97162, 40.78205], [-73.96374, 40.77715]],
+ [[-73.97880, 40.77247], [-73.97036, 40.76811]]]}};
+var multiPolygonDoc = {geo: {type: "MultiPolygon",
+ coordinates: [[[[-73.958, 40.8003], [-73.9498, 40.7968],
+ [-73.9737, 40.7648], [-73.9814, 40.7681],
+ [-73.958, 40.8003]]],
+ [[[-73.958, 40.8003], [-73.9498, 40.7968],
+ [-73.9737, 40.7648], [-73.958, 40.8003]]]]}};
+var geometryCollectionDoc = {geo: {type: "GeometryCollection",
+ geometries: [{type: "MultiPoint",
+ coordinates: [[-73.9580, 40.8003],
+ [-73.9498, 40.7968],
+ [-73.9737, 40.7648],
+ [-73.9814, 40.7681]]},
+ {type: "MultiLineString",
+ coordinates: [[[-73.96943, 40.78519],
+ [-73.96082, 40.78095]],
+ [[-73.96415, 40.79229],
+ [-73.95544, 40.78854]],
+ [[-73.97162, 40.78205],
+ [-73.96374, 40.77715]],
+ [[-73.97880, 40.77247],
+ [-73.97036, 40.76811]]]}]}};
+
+// {2dsphereIndexVersion: 2} indexes allow all supported GeoJSON objects.
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2});
+assert.gleSuccess(db);
+coll.insert(pointDoc);
+assert.gleSuccess(db);
+coll.insert(lineStringDoc);
+assert.gleSuccess(db);
+coll.insert(polygonDoc);
+assert.gleSuccess(db);
+coll.insert(multiPointDoc);
+assert.gleSuccess(db);
+coll.insert(multiLineStringDoc);
+assert.gleSuccess(db);
+coll.insert(multiPolygonDoc);
+assert.gleSuccess(db);
+coll.insert(geometryCollectionDoc);
+assert.gleSuccess(db);
+coll.drop();
+
+// {2dsphereIndexVersion: 1} indexes allow only Point, LineString, and Polygon.
+coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1});
+assert.gleSuccess(db);
+coll.insert(pointDoc);
+assert.gleSuccess(db);
+coll.insert(lineStringDoc);
+assert.gleSuccess(db);
+coll.insert(polygonDoc);
+assert.gleSuccess(db);
+coll.insert(multiPointDoc);
+assert.gleError(db);
+coll.insert(multiLineStringDoc);
+assert.gleError(db);
+coll.insert(multiPolygonDoc);
+assert.gleError(db);
+coll.insert(geometryCollectionDoc);
+assert.gleError(db);
+coll.drop();
diff --git a/jstests/core/geo_s2intersection.js b/jstests/core/geo_s2intersection.js
new file mode 100644
index 00000000000..42abacca98d
--- /dev/null
+++ b/jstests/core/geo_s2intersection.js
@@ -0,0 +1,141 @@
+var t = db.geo_s2intersectinglines
+t.drop()
+t.ensureIndex( { geo : "2dsphere" } );
+
+/* All the tests in this file are generally confirming intersections based upon
+ * these three geo objects.
+ */
+var canonLine = {
+ name: 'canonLine',
+ geo: {
+ type: "LineString",
+ coordinates: [[0.0, 0.0], [1.0, 0.0]]
+ }
+};
+
+var canonPoint = {
+ name: 'canonPoint',
+ geo: {
+ type: "Point",
+ coordinates: [10.0, 10.0]
+ }
+};
+
+var canonPoly = {
+ name: 'canonPoly',
+ geo: {
+ type: "Polygon",
+ coordinates: [
+ [[50.0, 50.0], [51.0, 50.0], [51.0, 51.0], [50.0, 51.0], [50.0, 50.0]]
+ ]
+ }
+};
+
+t.insert(canonLine);
+t.insert(canonPoint);
+t.insert(canonPoly);
+
+
+//Case 1: Basic sanity intersection.
+var testLine = {type: "LineString",
+ coordinates: [[0.5, 0.5], [0.5, -0.5]]};
+
+var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonLine');
+
+
+//Case 2: Basic Polygon intersection.
+// we expect that the canonLine should intersect with this polygon.
+var testPoly = {type: "Polygon",
+ coordinates: [
+ [[0.4, -0.1],[0.4, 0.1], [0.6, 0.1], [0.6, -0.1], [0.4, -0.1]]
+ ]}
+
+result = t.find({geo: {$geoIntersects: {$geometry: testPoly}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonLine');
+
+
+//Case 3: Intersects the vertex of a line.
+// When a line intersects the vertex of a line, we expect this to
+// count as a geoIntersection.
+testLine = {type: "LineString",
+ coordinates: [[0.0, 0.5], [0.0, -0.5]]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonLine');
+
+// Case 4: Sanity no intersection.
+// This line just misses the canonLine in the negative direction. This
+// should not count as a geoIntersection.
+testLine = {type: "LineString",
+ coordinates: [[-0.1, 0.5], [-0.1, -0.5]]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 0);
+
+
+// Case 5: Overlapping line - only partially overlaps.
+// Undefined behaviour: does intersect
+testLine = {type: "LineString",
+ coordinates: [[-0.5, 0.0], [0.5, 0.0]]};
+
+var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonLine');
+
+
+// Case 6: Contained line - this line is fully contained by the canonLine
+// Undefined behaviour: doesn't intersect.
+testLine = {type: "LineString",
+ coordinates: [[0.1, 0.0], [0.9, 0.0]]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 0);
+
+// Case 7: Identical line in the identical position.
+// Undefined behaviour: does intersect.
+testLine = {type: "LineString",
+ coordinates: [[0.0, 0.0], [1.0, 0.0]]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonLine');
+
+// Case 8: Point intersection - we search with a line that intersects
+// with the canonPoint.
+testLine = {type: "LineString",
+ coordinates: [[10.0, 11.0], [10.0, 9.0]]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonPoint');
+
+// Case 9: Point point intersection
+// as above but with an identical point to the canonPoint. We expect an
+// intersection here.
+testPoint = {type: "Point",
+ coordinates: [10.0, 10.0]}
+
+result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonPoint');
+
+
+//Case 10: Sanity point non-intersection.
+var testPoint = {type: "Point",
+ coordinates: [12.0, 12.0]}
+
+result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
+assert.eq(result.count(), 0);
+
+// Case 11: Point polygon intersection
+// verify that a point inside a polygon $geoIntersects.
+testPoint = {type: "Point",
+ coordinates: [50.5, 50.5]}
+
+result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
+assert.eq(result.count(), 1);
+assert.eq(result[0]['name'], 'canonPoly');
diff --git a/jstests/core/geo_s2largewithin.js b/jstests/core/geo_s2largewithin.js
new file mode 100644
index 00000000000..2327f1fb02d
--- /dev/null
+++ b/jstests/core/geo_s2largewithin.js
@@ -0,0 +1,45 @@
+// If our $within is enormous, create a coarse covering for the search so it
+// doesn't take forever.
+t = db.geo_s2largewithin
+t.drop()
+t.ensureIndex( { geo : "2dsphere" } );
+
+testPoint = {
+ name: "origin",
+ geo: {
+ type: "Point",
+ coordinates: [0.0, 0.0]
+ }
+};
+
+testHorizLine = {
+ name: "horiz",
+ geo: {
+ type: "LineString",
+ coordinates: [[-2.0, 10.0], [2.0, 10.0]]
+ }
+};
+
+testVertLine = {
+ name: "vert",
+ geo: {
+ type: "LineString",
+ coordinates: [[10.0, -2.0], [10.0, 2.0]]
+ }
+};
+
+t.insert(testPoint);
+t.insert(testHorizLine);
+t.insert(testVertLine);
+
+//Test a poly that runs horizontally along the equator.
+
+longPoly = {type: "Polygon",
+ coordinates: [
+ [[30.0, 1.0], [-30.0, 1.0], [-30.0, -1.0], [30.0, -1.0], [30.0, 1.0]]
+ ]};
+
+result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
+assert.eq("origin", result[0].name)
diff --git a/jstests/core/geo_s2meridian.js b/jstests/core/geo_s2meridian.js
new file mode 100644
index 00000000000..6bc7dc735f2
--- /dev/null
+++ b/jstests/core/geo_s2meridian.js
@@ -0,0 +1,108 @@
+t = db.geo_s2meridian;
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+
+/*
+ * Test 1: check that intersection works on the meridian. We insert a line
+ * that crosses the meridian, and then run a geoIntersect with a line
+ * that runs along the meridian.
+ */
+
+meridianCrossingLine = {
+ geo: {
+ type: "LineString",
+ coordinates: [
+ [-178.0, 10.0],
+ [178.0, 10.0]]
+ }
+};
+
+assert.writeOK(t.insert(meridianCrossingLine));
+
+lineAlongMeridian = {
+ type: "LineString",
+ coordinates: [
+ [180.0, 11.0],
+ [180.0, 9.0]
+ ]
+}
+
+result = t.find({geo: {$geoIntersects: {$geometry: lineAlongMeridian}}});
+assert.eq(result.itcount(), 1);
+
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+/*
+ * Test 2: check that within work across the meridian. We insert points
+ * on the meridian, and immediately on either side, and confirm that a poly
+ * covering all of them returns them all.
+ */
+pointOnNegativeSideOfMeridian = {
+ geo: {
+ type: "Point",
+ coordinates: [-179.0, 1.0]
+ }
+};
+pointOnMeridian = {
+ geo: {
+ type: "Point",
+ coordinates: [180.0, 1.0]
+ }
+};
+pointOnPositiveSideOfMeridian = {
+ geo: {
+ type: "Point",
+ coordinates: [179.0, 1.0]
+ }
+};
+
+t.insert(pointOnMeridian);
+t.insert(pointOnNegativeSideOfMeridian);
+t.insert(pointOnPositiveSideOfMeridian);
+
+meridianCrossingPoly = {
+ type: "Polygon",
+ coordinates: [
+ [[-178.0, 10.0], [178.0, 10.0], [178.0, -10.0], [-178.0, -10.0], [-178.0, 10.0]]
+ ]
+};
+
+result = t.find({geo: {$geoWithin: {$geometry: meridianCrossingPoly}}});
+assert.eq(result.itcount(), 3);
+
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+/*
+ * Test 3: Check that near works around the meridian. Insert two points, one
+ * closer, but across the meridian, and confirm they both come back, and
+ * that the order is correct.
+ */
+pointOnNegativeSideOfMerid = {
+ name: "closer",
+ geo: {
+ type: "Point",
+ coordinates: [-179.0, 0.0]
+ }
+};
+
+pointOnPositiveSideOfMerid = {
+ name: "farther",
+ geo: {
+ type: "Point",
+ coordinates: [176.0, 0.0]
+ }
+};
+
+t.insert(pointOnNegativeSideOfMerid);
+t.insert(pointOnPositiveSideOfMerid);
+
+pointOnPositiveSideOfMeridian = {
+ type: "Point",
+ coordinates: [179.0, 0.0]
+};
+
+result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}});
+assert.eq(result.itcount(), 2);
+result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}});
+assert.eq(result[0].name, "closer");
+assert.eq(result[1].name, "farther");
diff --git a/jstests/core/geo_s2multi.js b/jstests/core/geo_s2multi.js
new file mode 100644
index 00000000000..56e3ef77068
--- /dev/null
+++ b/jstests/core/geo_s2multi.js
@@ -0,0 +1,46 @@
+t = db.geo_s2index
+t.drop()
+
+t.ensureIndex({geo: "2dsphere"})
+
+// Let's try the examples in the GeoJSON spec.
+multiPointA = { "type": "MultiPoint", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] }
+assert.writeOK(t.insert({geo: multiPointA}));
+
+multiLineStringA = { "type": "MultiLineString", "coordinates": [ [ [100.0, 0.0], [101.0, 1.0] ],
+ [ [102.0, 2.0], [103.0, 3.0] ]]}
+assert.writeOK(t.insert({geo: multiLineStringA}));
+
+multiPolygonA = { "type": "MultiPolygon", "coordinates": [
+ [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
+ [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
+ [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]]}
+assert.writeOK(t.insert({geo: multiPolygonA}))
+
+assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
+ {"type": "Point", "coordinates": [100,0]}}}}).itcount());
+assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
+ {"type": "Point", "coordinates": [101.0,1.0]}}}}).itcount());
+
+// Inside the hole in multiPolygonA
+assert.eq(0, t.find({geo: {$geoIntersects: {$geometry:
+ {"type": "Point", "coordinates": [100.21,0.21]}}}}).itcount());
+
+// One point inside the hole, one out.
+assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
+ {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21]]}}}}).itcount());
+assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
+ {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21],[101,1]]}}}}).itcount());
+// Polygon contains itself and the multipoint.
+assert.eq(2, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount());
+
+partialPolygonA = { "type": "Polygon", "coordinates":
+ [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ] };
+assert.writeOK(t.insert({geo: partialPolygonA}));
+// Polygon contains itself, the partial poly, and the multipoint
+assert.eq(3, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount());
+
+assert.eq(1, t.find({geo: {$geoWithin: {$geometry: partialPolygonA}}}).itcount());
+
+// Itself, the multi poly, the multipoint...
+assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: partialPolygonA}}}).itcount());
diff --git a/jstests/core/geo_s2near.js b/jstests/core/geo_s2near.js
new file mode 100644
index 00000000000..136e821b4b8
--- /dev/null
+++ b/jstests/core/geo_s2near.js
@@ -0,0 +1,84 @@
+// Test 2dsphere near search, called via find and geoNear.
+t = db.geo_s2near
+t.drop();
+
+// Make sure that geoNear gives us back loc
+goldenPoint = {type: "Point", coordinates: [ 31.0, 41.0]}
+t.insert({geo: goldenPoint})
+t.ensureIndex({ geo : "2dsphere" })
+resNear = db.runCommand({geoNear : t.getName(), near: [30, 40], num: 1, spherical: true, includeLocs: true})
+assert.eq(resNear.results[0].loc, goldenPoint)
+
+// FYI:
+// One degree of long @ 0 is 111km or so.
+// One degree of lat @ 0 is 110km or so.
+lat = 0
+lng = 0
+points = 10
+for (var x = -points; x < points; x += 1) {
+ for (var y = -points; y < points; y += 1) {
+ t.insert({geo : { "type" : "Point", "coordinates" : [lng + x/1000.0, lat + y/1000.0]}})
+ }
+}
+
+origin = { "type" : "Point", "coordinates": [ lng, lat ] }
+
+t.ensureIndex({ geo : "2dsphere" })
+
+// Near only works when the query is a point.
+someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]}
+somepoly = { "type" : "Polygon",
+ "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
+assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : someline } } }).count()})
+assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : somepoly } } }).count()})
+assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: someline, spherical:true }).results.length})
+assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: somepoly, spherical:true }).results.length})
+
+// Do some basic near searches.
+res = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 2000} } }).limit(10)
+resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, maxDistance: Math.PI, spherical: true})
+assert.eq(res.itcount(), resNear.results.length, 10)
+
+res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10)
+resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, spherical: true})
+assert.eq(res.itcount(), resNear.results.length, 10)
+
+// Find all the points!
+res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000)
+resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true})
+assert.eq(resNear.results.length, res.itcount(), (2 * points) * (2 * points))
+
+// longitude goes -180 to 180
+// latitude goes -90 to 90
+// Let's put in some perverse (polar) data and make sure we get it back.
+// Points go long, lat.
+t.insert({geo: { "type" : "Point", "coordinates" : [-180, -90]}})
+t.insert({geo: { "type" : "Point", "coordinates" : [180, -90]}})
+t.insert({geo: { "type" : "Point", "coordinates" : [180, 90]}})
+t.insert({geo: { "type" : "Point", "coordinates" : [-180, 90]}})
+res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000)
+resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true})
+assert.eq(res.itcount(), resNear.results.length, (2 * points) * (2 * points) + 4)
+
+function testRadAndDegreesOK(distance) {
+ // Distance for old style points is radians.
+ resRadians = t.find({geo: {$nearSphere: [0,0], $maxDistance: (distance / (6378.1 * 1000))}})
+ // Distance for new style points is meters.
+ resMeters = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: distance} } })
+ // And we should get the same # of results no matter what.
+ assert.eq(resRadians.itcount(), resMeters.itcount())
+
+ // Also, geoNear should behave the same way.
+ resGNMeters = db.runCommand({geoNear : t.getName(), near: origin, maxDistance: distance, spherical: true})
+ resGNRadians = db.runCommand({geoNear : t.getName(), near: [0,0], maxDistance: (distance / (6378.1 * 1000)), spherical: true})
+ assert.eq(resGNRadians.results.length, resGNMeters.results.length)
+ for (var i = 0; i < resGNRadians.length; ++i) {
+ // Radius of earth * radians = distance in meters.
+ assert.close(resGNRadians.results[i].dis * 6378.1 * 1000, resGNMeters.results[i].dis)
+ }
+}
+
+testRadAndDegreesOK(1);
+testRadAndDegreesOK(10)
+testRadAndDegreesOK(50)
+testRadAndDegreesOK(10000)
diff --git a/jstests/core/geo_s2nearComplex.js b/jstests/core/geo_s2nearComplex.js
new file mode 100644
index 00000000000..835dfe88481
--- /dev/null
+++ b/jstests/core/geo_s2nearComplex.js
@@ -0,0 +1,268 @@
+var t = db.get_s2nearcomplex
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+/* Short names for math operations */
+Random.setRandomSeed();
+var random = Random.rand;
+var PI = Math.PI;
+var asin = Math.asin;
+var sin = Math.sin;
+var cos = Math.cos;
+var atan2 = Math.atan2
+
+
+var originGeo = {type: "Point", coordinates: [20.0, 20.0]};
+// Center point for all tests.
+var origin = {
+ name: "origin",
+ geo: originGeo
+}
+
+
+/*
+ * Convenience function for checking that coordinates match. threshold let's you
+ * specify how accurate equals should be.
+ */
+function coordinateEqual(first, second, threshold){
+ threshold = threshold || 0.001
+ first = first['geo']['coordinates']
+ second = second['geo']['coordinates']
+ if(Math.abs(first[0] - second[0]) <= threshold){
+ if(Math.abs(first[1] - second[1]) <= threshold){
+ return true;
+ }
+ }
+ return false;
+}
+
+/*
+ * Creates `count` random and uniformly distributed points centered around `origin`
+ * no points will be closer to origin than minDist, and no points will be further
+ * than maxDist. Points will be inserted into the global `t` collection, and will
+ * be returned.
+ * based on this algorithm: http://williams.best.vwh.net/avform.htm#LL
+ */
+function uniformPoints(origin, count, minDist, maxDist){
+ var i;
+ var lng = origin['geo']['coordinates'][0];
+ var lat = origin['geo']['coordinates'][1];
+ var distances = [];
+ var points = [];
+ for(i=0; i < count; i++){
+ distances.push((random() * (maxDist - minDist)) + minDist);
+ }
+ distances.sort();
+ while(points.length < count){
+ var angle = random() * 2 * PI;
+ var distance = distances[points.length];
+ var pointLat = asin((sin(lat) * cos(distance)) + (cos(lat) * sin(distance) * cos(angle)));
+ var pointDLng = atan2(sin(angle) * sin(distance) * cos(lat), cos(distance) - sin(lat) * sin(pointLat));
+ var pointLng = ((lng - pointDLng + PI) % 2*PI) - PI;
+
+ // Latitude must be [-90, 90]
+ var newLat = lat + pointLat;
+ if (newLat > 90) newLat -= 180;
+ if (newLat < -90) newLat += 180;
+
+ // Longitude must be [-180, 180]
+ var newLng = lng + pointLng;
+ if (newLng > 180) newLng -= 360;
+ if (newLng < -180) newLng += 360;
+
+ var newPoint = {
+ geo: {
+ type: "Point",
+ //coordinates: [lng + pointLng, lat + pointLat]
+ coordinates: [newLng, newLat]
+ }
+ };
+
+ points.push(newPoint);
+ }
+ for(i=0; i < points.length; i++){
+ t.insert(points[i]);
+ }
+ return points;
+}
+
+/*
+ * Creates a random uniform field as above, excepting for `numberOfHoles` gaps that
+ * have `sizeOfHoles` points missing centered around a random point.
+ */
+function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, sizeOfHoles){
+ var points = uniformPoints(origin, count, minDist, maxDist);
+ var i;
+ for(i=0; i<numberOfHoles; i++){
+ var randomPoint = points[Math.floor(random() * points.length)];
+ removeNearest(randomPoint, sizeOfHoles);
+ }
+}
+
+/*
+ * Creates a random uniform field as above, expcepting for `numberOfClusters` clusters,
+ * which will consist of N points where `minClusterSize` <= N <= `maxClusterSize.
+ * you may specify an optional `distRatio` parameter which will specify the area that the cluster
+ * covers as a fraction of the full area that points are created on. Defaults to 10.
+ */
+function uniformPointsWithClusters(origin, count, minDist, maxDist, numberOfClusters, minClusterSize, maxClusterSize, distRatio){
+ distRatio = distRatio || 10
+ var points = uniformPoints(origin, count, minDist, maxDist);
+ for(j=0; j<numberOfClusters; j++){
+ var randomPoint = points[Math.floor(random() * points.length)];
+ var clusterSize = (random() * (maxClusterSize - minClusterSize)) + minClusterSize;
+ uniformPoints(randomPoint, clusterSize, minDist / distRatio, maxDist / distRatio);
+ }
+}
+/*
+ * Function used to create gaps in existing point field. Will remove the `number` nearest
+ * geo objects to the specified `point`.
+ */
+function removeNearest(point, number){
+ var pointsToRemove = t.find({geo: {$geoNear: {$geometry: point['geo']}}}).limit(number);
+ var idsToRemove = [];
+ while(pointsToRemove.hasNext()){
+ point = pointsToRemove.next();
+ idsToRemove.push(point['_id']);
+ }
+
+ t.remove({_id: {$in: idsToRemove}});
+}
+/*
+ * Validates the ordering of the nearest results is the same no matter how many
+ * geo objects are requested. This could fail if two points have the same dist
+ * from origin, because they may not be well-ordered. If we see strange failures,
+ * we should consider that.
+ */
+function validateOrdering(query){
+ var near10 = t.find(query).limit(10);
+ var near20 = t.find(query).limit(20);
+ var near30 = t.find(query).limit(30);
+ var near40 = t.find(query).limit(40);
+
+ for(i=0;i<10;i++){
+ assert(coordinateEqual(near10[i], near20[i]));
+ assert(coordinateEqual(near10[i], near30[i]));
+ assert(coordinateEqual(near10[i], near40[i]));
+ }
+
+ for(i=0;i<20;i++){
+ assert(coordinateEqual(near20[i], near30[i]));
+ assert(coordinateEqual(near20[i], near40[i]));
+ }
+
+ for(i=0;i<30;i++){
+ assert(coordinateEqual(near30[i], near40[i]));
+ }
+}
+
+var query = {geo: {$geoNear: {$geometry: originGeo}}};
+
+// Test a uniform distribution of 1000 points.
+uniformPoints(origin, 1000, 0.5, 1.5);
+
+validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
+
+print("Millis for uniform:")
+print(t.find(query).explain().millis)
+print("Total points:");
+print(t.find(query).itcount());
+
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+// Test a uniform distribution with 5 gaps each with 10 points missing.
+uniformPointsWithGaps(origin, 1000, 1, 10.0, 5, 10);
+
+validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
+
+print("Millis for uniform with gaps:")
+print(t.find(query).explain().millis)
+print("Total points:");
+print(t.find(query).itcount());
+
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+// Test a uniform distribution with 5 clusters each with between 10 and 100 points.
+uniformPointsWithClusters(origin, 1000, 1, 10.0, 5, 10, 100);
+
+validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
+
+print("Millis for uniform with clusters:");
+print(t.find(query).explain().millis);
+print("Total points:");
+print(t.find(query).itcount());
+
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+// Test a uniform near search with origin around the pole.
+
+// Center point near pole.
+originGeo = {type: "Point", coordinates: [0.0, 89.0]};
+origin = {
+ name: "origin",
+ geo: originGeo
+}
+uniformPoints(origin, 50, 0.5, 1.5);
+
+validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
+
+print("Millis for uniform near pole:")
+print(t.find({geo: {$geoNear: {$geometry: originGeo}}}).explain().millis)
+assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
+
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+// Center point near the meridian
+originGeo = {type: "Point", coordinates: [179.0, 0.0]};
+origin = {
+ name: "origin",
+ geo: originGeo
+}
+uniformPoints(origin, 50, 0.5, 1.5);
+
+validateOrdering({geo: {$geoNear: {$geometry: originGeo}}})
+
+print("Millis for uniform on meridian:")
+print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis)
+assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
+
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+// Center point near the negative meridian
+originGeo = {type: "Point", coordinates: [-179.0, 0.0]};
+origin = {
+ name: "origin",
+ geo: originGeo
+}
+uniformPoints(origin, 50, 0.5, 1.5);
+
+validateOrdering({geo: {$near: {$geometry: originGeo}}})
+
+print("Millis for uniform on negative meridian:");
+print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis);
+assert.eq(t.find({geo: {$near: {$geometry: originGeo}}}).itcount(), 50);
+
+// Near search with points that are really far away.
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+originGeo = {type: "Point", coordinates: [0.0, 0.0]};
+origin = {
+ name: "origin",
+ geo: originGeo
+}
+
+uniformPoints(origin, 10, 89, 90);
+
+cur = t.find({geo: {$near: {$geometry: originGeo}}})
+
+assert.eq(cur.itcount(), 10);
+cur = t.find({geo: {$near: {$geometry: originGeo}}})
+
+print("Near search on very distant points:");
+print(t.find({geo: {$near: {$geometry: originGeo}}}).explain().millis);
+pt = cur.next();
+assert(pt)
diff --git a/jstests/core/geo_s2near_equator_opposite.js b/jstests/core/geo_s2near_equator_opposite.js
new file mode 100644
index 00000000000..8ee5d486d5e
--- /dev/null
+++ b/jstests/core/geo_s2near_equator_opposite.js
@@ -0,0 +1,31 @@
+// Tests geo near with 2 points diametrically opposite to each other
+// on the equator
+// First reported in SERVER-11830 as a regression in 2.5
+
+var t = db.geos2nearequatoropposite;
+
+t.drop();
+
+t.insert({loc: {type: 'Point', coordinates: [0, 0]}});
+t.insert({loc: {type: 'Point', coordinates: [-1, 0]}});
+
+t.ensureIndex({loc: '2dsphere'});
+
+// upper bound for half of earth's circumference in meters
+var dist = 40075000 / 2 + 1;
+
+var nearSphereCount = t.find({loc: {$nearSphere:
+ {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}}).itcount();
+var nearCount = t.find({loc: {$near:
+ {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}}).itcount();
+var geoNearResult = db.runCommand({geoNear: t.getName(), near:
+ {type: 'Point', coordinates: [180, 0]}, spherical: true});
+
+print('nearSphere count = ' + nearSphereCount);
+print('near count = ' + nearCount);
+print('geoNearResults = ' + tojson(geoNearResult));
+
+assert.eq(2, nearSphereCount, 'unexpected document count for nearSphere');
+assert.eq(2, nearCount, 'unexpected document count for near');
+assert.eq(2, geoNearResult.results.length, 'unexpected document count in geoNear results');
+assert.gt(dist, geoNearResult.stats.maxDistance, 'unexpected maximum distance in geoNear results');
diff --git a/jstests/core/geo_s2nearcorrect.js b/jstests/core/geo_s2nearcorrect.js
new file mode 100644
index 00000000000..cdb5404a08d
--- /dev/null
+++ b/jstests/core/geo_s2nearcorrect.js
@@ -0,0 +1,12 @@
+// SERVER-9484
+// A geometry may have several covers, one of which is in a search ring and the other of which is
+// not. If we see the cover that's not in the search ring, we can't mark the object as 'seen' for
+// this ring.
+t = db.geo_s2nearcorrect
+t.drop()
+
+longline = { "type" : "LineString", "coordinates": [ [0,0], [179, 89]]};
+t.insert({geo: longline});
+t.ensureIndex({geo: "2dsphere"});
+origin = { "type" : "Point", "coordinates": [ 45, 45] }
+assert.eq(1, t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 20000000} } }).count());
diff --git a/jstests/core/geo_s2nearwithin.js b/jstests/core/geo_s2nearwithin.js
new file mode 100644
index 00000000000..5df27581e5f
--- /dev/null
+++ b/jstests/core/geo_s2nearwithin.js
@@ -0,0 +1,41 @@
+// Test geoNear + $within.
+t = db.geo_s2nearwithin
+t.drop();
+
+points = 10
+for (var x = -points; x < points; x += 1) {
+ for (var y = -points; y < points; y += 1) {
+ t.insert({geo: [x, y]})
+ }
+}
+
+origin = { "type" : "Point", "coordinates": [ 0, 0] }
+
+t.ensureIndex({ geo : "2dsphere" })
+// Near requires an index, and 2dsphere is an index. Spherical isn't
+// specified so this doesn't work.
+assert.commandFailed( db.runCommand({ geoNear: t.getName(), near: [0, 0],
+ query: { geo: { $within: { $center: [[0, 0], 1] }}}}));
+
+// Spherical is specified so this does work. Old style points are weird
+// because you can use them with both $center and $centerSphere. Points are
+// the only things we will do this conversion for.
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
+ query: {geo: {$within: {$center: [[0, 0], 1]}}}})
+assert.eq(resNear.results.length, 5)
+
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
+ query: {geo: {$within: {$centerSphere: [[0, 0], Math.PI/180.0]}}}})
+assert.eq(resNear.results.length, 5)
+
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
+ query: {geo: {$within: {$centerSphere: [[0, 0], 0]}}}})
+assert.eq(resNear.results.length, 1)
+
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
+ query: {geo: {$within: {$centerSphere: [[1, 0], 0.5 * Math.PI/180.0]}}}})
+assert.eq(resNear.results.length, 1)
+
+resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
+ query: {geo: {$within: {$center: [[1, 0], 1.5]}}}})
+assert.eq(resNear.results.length, 9)
diff --git a/jstests/core/geo_s2nongeoarray.js b/jstests/core/geo_s2nongeoarray.js
new file mode 100644
index 00000000000..02f307fbab1
--- /dev/null
+++ b/jstests/core/geo_s2nongeoarray.js
@@ -0,0 +1,26 @@
+// Explode arrays when indexing non-geo fields in 2dsphere, and make sure that
+// we find them with queries.
+t = db.geo_s2nongeoarray
+
+oldPoint = [40,5]
+
+var data = {geo: oldPoint, nonGeo: [123,456], otherNonGeo: [{b:[1,2]},{b:[3,4]}]};
+
+t.drop();
+assert.writeOK(t.insert(data));
+assert.writeOK(t.ensureIndex({otherNonGeo: 1}));
+assert.eq(1, t.find({otherNonGeo: {b:[1,2]}}).itcount());
+assert.eq(0, t.find({otherNonGeo: 1}).itcount());
+assert.eq(1, t.find({'otherNonGeo.b': 1}).itcount());
+
+t.drop();
+t.insert(data);
+t.ensureIndex({geo: "2d", nonGeo: 1, otherNonGeo: 1})
+assert.eq(t.find({nonGeo: 123, geo: {$nearSphere: oldPoint}}).itcount(), 1);
+assert.eq(t.find({'otherNonGeo.b': 1, geo: {$nearSphere: oldPoint}}).itcount(), 1);
+
+t.drop()
+t.insert(data);
+t.ensureIndex({geo: "2dsphere", nonGeo: 1, otherNonGeo: 1})
+assert.eq(t.find({nonGeo: 123, geo: {$nearSphere: oldPoint}}).itcount(), 1);
+assert.eq(t.find({'otherNonGeo.b': 1, geo: {$nearSphere: oldPoint}}).itcount(), 1);
diff --git a/jstests/core/geo_s2nonstring.js b/jstests/core/geo_s2nonstring.js
new file mode 100755
index 00000000000..11fc8f4f4c4
--- /dev/null
+++ b/jstests/core/geo_s2nonstring.js
@@ -0,0 +1,22 @@
+// Added to make sure that S2 indexing's string AND non-string keys work.
+t = db.geo_s2nonstring
+t.drop()
+
+t.ensureIndex( { geo:'2dsphere', x:1 } );
+
+t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:'a' } );
+t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:5 } );
+
+t.drop()
+t.ensureIndex( { geo:'2dsphere', x:1 } );
+
+t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:'a' } );
+t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] } } );
+
+// Expect 1 match, where x is 'a'
+assert.eq( 1, t.count( { geo:{ $near:{ $geometry:{ type:'Point', coordinates:[ 0, 0 ] },
+ $maxDistance: 20 } }, x:'a' } ) );
+
+// Expect 1 match, where x matches null (missing matches null).
+assert.eq( 1, t.count( { geo:{ $near:{ $geometry:{ type:'Point', coordinates:[ 0, 0 ] },
+ $maxDistance: 20 } }, x:null } ) );
diff --git a/jstests/core/geo_s2nopoints.js b/jstests/core/geo_s2nopoints.js
new file mode 100644
index 00000000000..c897f39f815
--- /dev/null
+++ b/jstests/core/geo_s2nopoints.js
@@ -0,0 +1,7 @@
+// See SERVER-7794.
+t = db.geo_s2nopoints
+t.drop()
+
+t.ensureIndex({loc: "2dsphere", x:1})
+assert.eq(0, t.count({loc: {$near: {$geometry: {type: 'Point', coordinates:[0,0]},
+ $maxDistance: 10}}}))
diff --git a/jstests/core/geo_s2oddshapes.js b/jstests/core/geo_s2oddshapes.js
new file mode 100644
index 00000000000..24a318d5b98
--- /dev/null
+++ b/jstests/core/geo_s2oddshapes.js
@@ -0,0 +1,138 @@
+// Verify that odd polygons (huge or "narrow") behave as we expect.
+// Note that since 2dsphere is spherical, polygons that seem narrow are actually
+// rather wide if their latitude (or longitude) range is large.
+var t = db.geo_s2oddshapes
+t.drop()
+t.ensureIndex( { geo : "2dsphere" } );
+
+var testPoint = {
+ name: "origin",
+ geo: {
+ type: "Point",
+ coordinates: [0.0, 0.0]
+ }
+};
+
+var testHorizLine = {
+ name: "horiz",
+ geo: {
+ type: "LineString",
+ coordinates: [[-2.0, 10.0], [2.0, 10.0]]
+ }
+};
+
+var testVertLine = {
+ name: "vert",
+ geo: {
+ type: "LineString",
+ coordinates: [[10.0, -2.0], [10.0, 2.0]]
+ }
+};
+
+t.insert(testPoint);
+t.insert(testHorizLine);
+t.insert(testVertLine);
+
+//Test a poly that runs vertically all the way along the meridian.
+
+var tallPoly = {type: "Polygon",
+ coordinates: [
+ [[1.0, 89.0], [-1.0, 89.0], [-1.0, -89.0], [1.0, -89.0], [1.0, 89.0]]
+ ]};
+//We expect that the testPoint (at the origin) will be within this poly.
+var result = t.find({geo: {$within: {$geometry: tallPoly}}});
+assert.eq(result.itcount(), 1);
+var result = t.find({geo: {$within: {$geometry: tallPoly}}});
+assert.eq(result[0].name, 'origin');
+
+//We expect that the testPoint, and the testHorizLine should geoIntersect
+//with this poly.
+result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
+assert.eq(result.itcount(), 2);
+result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
+
+//Test a poly that runs horizontally along the equator.
+
+var longPoly = {type: "Polygon",
+ coordinates: [
+ [[89.0, 1.0], [-89.0, 1.0], [-89.0, -1.0], [89.0, -1.0], [89.0, 1.0]]
+ ]};
+
+// Thanks to spherical geometry, this poly contains most of the hemisphere.
+result = t.find({geo: {$within: {$geometry: longPoly}}});
+assert.eq(result.itcount(), 3);
+result = t.find({geo: {$geoIntersects: {$geometry: longPoly}}});
+assert.eq(result.itcount(), 3);
+
+//Test a poly that is the size of half the earth.
+
+t.drop()
+t.ensureIndex( { geo : "2dsphere" } );
+
+var insidePoint = {
+ name: "inside",
+ geo: {
+ type: "Point",
+ name: "inside",
+ coordinates: [100.0, 0.0]
+ }
+};
+
+var outsidePoint = {
+ name: "inside",
+ geo: {
+ type: "Point",
+ name: "inside",
+ coordinates: [-100.0, 0.0]
+ }
+};
+
+t.insert(insidePoint);
+t.insert(outsidePoint);
+
+var largePoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -90.0], [0.0, 90.0], [180.0, 0], [0.0, -90.0]]
+ ]};
+
+result = t.find({geo: {$within: {$geometry: largePoly}}});
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$within: {$geometry: largePoly}}});
+var point = result[0]
+assert.eq(point.name, 'inside');
+
+//Test a poly that is very small. A couple meters around.
+
+t.drop()
+t.ensureIndex( { geo : "2dsphere" } );
+
+insidePoint = {
+ name: "inside",
+ geo: {
+ type: "Point",
+ name: "inside",
+ coordinates: [0.01, 0.0]
+ }};
+
+outsidePoint = {
+ name: "inside",
+ geo: {
+ type: "Point",
+ name: "inside",
+ coordinates: [0.2, 0.0]
+ }};
+
+t.insert(insidePoint);
+t.insert(outsidePoint);
+
+smallPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -0.01], [0.015, -0.01], [0.015, 0.01], [0.0, 0.01], [0.0, -0.01]]
+ ]};
+
+result = t.find({geo: {$within: {$geometry: smallPoly}}});
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$within: {$geometry: smallPoly}}});
+point = result[0]
+assert.eq(point.name, 'inside');
+
diff --git a/jstests/core/geo_s2ordering.js b/jstests/core/geo_s2ordering.js
new file mode 100644
index 00000000000..13847b08745
--- /dev/null
+++ b/jstests/core/geo_s2ordering.js
@@ -0,0 +1,52 @@
+// This tests that 2dsphere indices can be ordered arbitrarily, and that the ordering
+// actually matters for lookup speed. That is, if we're looking for a non-geo key of which
+// there are not many, the index order (nongeo, geo) should be faster than (geo, nongeo)
+// for 2dsphere.
+t = db.geo_s2ordering
+t.drop();
+
+needle = "hari"
+
+// We insert lots of points in a region and look for a non-geo key which is rare.
+function makepoints(needle) {
+ lat = 0
+ lng = 0
+ points = 50.0
+ var bulk = t.initializeUnorderedBulkOp();
+ for (var x = -points; x < points; x += 1) {
+ for (var y = -points; y < points; y += 1) {
+ tag = x.toString() + "," + y.toString();
+ bulk.insert({ nongeo: tag,
+ geo: {
+ type: "Point",
+ coordinates: [lng + x/points, lat + y/points]}});
+ }
+ }
+ bulk.insert({ nongeo: needle, geo: { type: "Point", coordinates: [0,0] }});
+ assert.writeOK(bulk.execute());
+}
+
+function runTest(index) {
+ t.ensureIndex(index)
+ // If both tests take longer than this, then we will error. This is intentional
+ // since the tests shouldn't take that long.
+ mintime = 100000.0;
+ resultcount = 0;
+ iterations = 10;
+ for (var x = 0; x < iterations; ++x) {
+ res = t.find({nongeo: needle, geo: {$within: {$centerSphere: [[0,0], Math.PI/180.0]}}})
+ if (res.explain().millis < mintime) {
+ mintime = res.explain().millis
+ resultcount = res.itcount()
+ }
+ }
+ t.dropIndex(index)
+ return {time: mintime, results: resultcount}
+}
+
+makepoints(needle)
+// Indexing non-geo first should be quicker.
+fast = runTest({nongeo: 1, geo: "2dsphere"})
+slow = runTest({geo: "2dsphere", nongeo: 1})
+assert.eq(fast.results, slow.results)
+assert(fast.time < slow.time)
diff --git a/jstests/core/geo_s2overlappingpolys.js b/jstests/core/geo_s2overlappingpolys.js
new file mode 100644
index 00000000000..0d96222206c
--- /dev/null
+++ b/jstests/core/geo_s2overlappingpolys.js
@@ -0,0 +1,213 @@
+var t = db.geo_s2overlappingpolys
+t.drop()
+
+t.ensureIndex( { geo : "2dsphere" } );
+
+var minError = 0.8e-13;
+
+var canonPoly = {type: "Polygon",
+ coordinates: [
+ [[-1.0, -1.0], [1.0, -1.0], [1.0, 1.0], [-1.0, 1.0], [-1.0, -1.0]]
+ ]};
+t.insert({geo: canonPoly});
+
+// Test 1: If a poly completely encloses the canonPoly, we expect the canonPoly
+// to be returned for both $within and $geoIntersect
+
+var outerPoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
+ ]};
+var result = t.find({geo: {$within: {$geometry: outerPoly}}});
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$geoIntersects: {$geometry: outerPoly}}});
+assert.eq(result.itcount(), 1);
+
+
+// Test 2: If a poly that covers half of the canonPoly, we expect that it should
+// geoIntersect, but should not be within.
+
+var partialPoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -2.0], [2.0, -2.0], [2.0, 0.0], [-2.0, 0.0], [-2.0, -2.0]]
+ ]};
+
+//Should not be within
+result = t.find({geo: {$within: {$geometry: partialPoly}}});
+assert.eq(result.itcount(), 0);
+
+//This should however count as a geoIntersect
+result = t.find({geo: {$geoIntersects: {$geometry: partialPoly}}});
+assert.eq(result.itcount(), 1);
+
+
+// Test 3: Polygons that intersect at a point or an edge have undefined
+// behaviour in s2 The s2 library we're using appears to have
+// the following behaviour.
+
+// Case (a): Polygons that intersect at one point (not a vertex).
+// behaviour: geoIntersects.
+
+var sharedPointPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -2.0], [0.0, -1.0], [1.0, -2.0], [0.0, -2.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: sharedPointPoly}}});
+assert.eq(result.itcount(), 1);
+
+// Case (b): Polygons that intersect at one point (a vertex).
+// behaviour: not geoIntersect
+
+var sharedVertexPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -2.0], [1.0, -1.0], [1.0, -2.0], [0.0, -2.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: sharedVertexPoly}}});
+assert.eq(result.itcount(), 0);
+
+// Case (c): Polygons that intesersect at one point that is very close to a
+// vertex should have the same behaviour as Case (b).
+
+var almostSharedVertexPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -2.0], [1.0 - minError, -1.0], [1.0, -2.0], [0.0, -2.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: almostSharedVertexPoly}}});
+assert.eq(result.itcount(), 0);
+
+
+// Case (d): Polygons that intesersect at one point that is not quite as close
+// to a vertex should behave as though it were not a vertex, and should
+// geoIntersect
+
+var notCloseEnoughSharedVertexPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -2.0], [1.0 - (10 * minError), -1.0], [1.0, -2.0], [0.0, -2.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedVertexPoly}}});
+assert.eq(result.itcount(), 1);
+
+// Case (e): Polygons that come very close to having a point intersection
+// on a non-vertex coordinate should intersect.
+
+var almostSharedPointPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -2.0], [0.0, (-1.0 - minError)], [1.0, -2.0], [0.0, -2.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: almostSharedPointPoly}}});
+assert.eq(result.itcount(), 1);
+
+
+// Case (f): If we increase the error a little, it should no longer act
+// as though it's intersecting.
+// NOTE: I think this error bound seems odd. Going to 0.000152297 will break this test.
+// I've confirmed there is an error bound, but it's a lot larger than we experienced above.
+var errorBound = 0.000152298
+var notCloseEnoughSharedPointPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -2.0], [0.0, -1.0 - errorBound], [1.0, -2.0], [0.0, -2.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedPointPoly}}});
+assert.eq(result.itcount(), 0);
+
+/* Test 3: Importantly, polygons with shared edges have undefined intersection
+ * under s2. Therefore these test serve more to make sure nothing changes than
+ * to confirm an expected behaviour.
+ */
+
+// Case 1: A polygon who shares an edge with another polygon, where the searching
+// polygon's edge is fully covered by the canon polygon's edge.
+// Result: No intersection.
+var fullyCoveredEdgePoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -0.5], [-1.0, -0.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -0.5]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: fullyCoveredEdgePoly}}});
+assert.eq(result.itcount(), 0);
+
+// Case 2: A polygon who shares an edge with another polygon, where the searching
+// polygon's edge fully covers the canon polygon's edge.
+// Result: Intersection.
+var coveringEdgePoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -1.5], [-1.0, -1.5], [-1.0, 1.5], [-2.0, 1.5], [-2.0, -1.5]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: coveringEdgePoly}}});
+assert.eq(result.itcount(), 1);
+
+// Case 2a: same as Case 2, except pulled slightly away from the polygon.
+// Result: Intersection.
+// NOTE: Scales of errors?
+var closebyCoveringEdgePoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -1.5], [-1.0 - (minError / 1000), -1.5], [-1.0 - (minError / 1000), 1.5], [-2.0, 1.5], [-2.0, -1.5]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: closebyCoveringEdgePoly}}});
+assert.eq(result.itcount(), 1);
+
+// Case 2b: same as Case 4, except pulled slightly away from the polygon, so that it's not intersecting.
+// Result: No Intersection.
+// NOTE: Scales of errors?
+var notCloseEnoughCoveringEdgePoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -1.5], [-1.0 - (minError / 100), -1.5], [-1.0 - (minError / 100), 1.5], [-2.0, 1.5], [-2.0, -1.5]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughCoveringEdgePoly}}});
+assert.eq(result.itcount(), 0);
+
+// Case 3: A polygon who shares an edge with another polygon, where the searching
+// polygon's edge partially covers by the canon polygon's edge.
+// Result: No intersection.
+var partiallyCoveringEdgePoly = {type: "Polygon",
+ coordinates: [
+ [[-2.0, -1.5], [-1.0, -1.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -1.5]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: partiallyCoveringEdgePoly}}});
+assert.eq(result.itcount(), 0);
+
+
+//Polygons that intersect at three non-co-linear points should geoIntersect
+var sharedPointsPoly = {type: "Polygon",
+ coordinates: [
+ [[0.0, -3.0], [0.0, -1.0], [2.0, -2.0], [1.0, 0.0], [2.0, 2.0], [0.0, 1.0], [0.0, 3.0], [3.0, 3.0], [3.0, -3.0], [0.0, -3.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: sharedPointsPoly}}});
+assert.eq(result.itcount(), 1);
+
+//If a polygon contains a hole, and another polygon is within that hole, it should not be within or intersect.
+
+var bigHolePoly = {type: "Polygon",
+ coordinates: [
+ [[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
+ [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
+ ]};
+result = t.find({geo: {$within: {$geometry: bigHolePoly}}});
+assert.eq(result.itcount(), 0);
+result = t.find({geo: {$geoIntersects: {$geometry: bigHolePoly}}});
+assert.eq(result.itcount(), 0);
+
+// If a polygon has a hole, and another polygon is contained partially by that hole, it should be an intersection
+// but not a within.
+
+var internalOverlapPoly = {type: "Polygon",
+ coordinates: [
+ [[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
+ [[-2.0, 0.0], [2.0, 0.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, 0.0]]
+ ]};
+
+result = t.find({geo: {$geoIntersects: {$geometry: internalOverlapPoly}}});
+assert.eq(result.itcount(), 1);
+result = t.find({geo: {$within: {$geometry: internalOverlapPoly}}});
+assert.eq(result.itcount(), 0);
diff --git a/jstests/core/geo_s2polywithholes.js b/jstests/core/geo_s2polywithholes.js
new file mode 100755
index 00000000000..f396f6b9a4f
--- /dev/null
+++ b/jstests/core/geo_s2polywithholes.js
@@ -0,0 +1,46 @@
+var t = db.geo_s2weirdpolys;
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+
+var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]};
+var edgePoint = {"type": "Point", "coordinates": [0, 0.5]};
+var cornerPoint = {"type": "Point", "coordinates": [0, 0]};
+
+t.insert({geo : centerPoint});
+t.insert({geo : edgePoint});
+t.insert({geo : cornerPoint});
+
+var polygonWithNoHole = {"type" : "Polygon", "coordinates": [
+ [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
+ ]
+};
+
+// Test 1: Sanity check. Expect all three points.
+var sanityResult = t.find({geo: {$within: {$geometry: polygonWithNoHole}}});
+assert.eq(sanityResult.itcount(), 3);
+
+// Test 2: Polygon with a hole that isn't contained byt the poly shell.
+var polygonWithProtrudingHole = {"type" : "Polygon", "coordinates": [
+ [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
+ [[0.4,0.9], [0.4,1.1], [0.5, 1.1], [0.5, 0.9], [0.4, 0.9]]
+ ]
+};
+
+// Bad shell, should error.
+assert.writeError(t.insert({geo: polygonWithProtrudingHole}));
+
+// Can't search with bogus poly.
+assert.throws(function() {
+ return t.find({geo: {$within: {$geometry: polygonWithProtrudingHole}}}).itcount()
+})
+
+// Test 3: This test will confirm that a polygon with overlapping holes throws
+// an error.
+var polyWithOverlappingHoles = {"type" : "Polygon", "coordinates": [
+ [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
+ [[0.2,0.6], [0.2,0.9], [0.6, 0.9], [0.6, 0.6], [0.2, 0.6]],
+ [[0.5,0.4], [0.5,0.7], [0.8, 0.7], [0.8, 0.4], [0.5, 0.4]]
+ ]
+};
+
+assert.writeError(t.insert({geo: polyWithOverlappingHoles}));
diff --git a/jstests/core/geo_s2selfintersectingpoly.js b/jstests/core/geo_s2selfintersectingpoly.js
new file mode 100644
index 00000000000..f34ea3a5ff1
--- /dev/null
+++ b/jstests/core/geo_s2selfintersectingpoly.js
@@ -0,0 +1,11 @@
+var t = db.geo_s2selfintersectingpoly;
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+
+var intersectingPolygon = {"type": "Polygon", "coordinates": [
+ [[0.0, 0.0], [0.0, 4.0], [-3.0, 2.0], [1.0, 2.0], [0.0, 0.0]]
+]};
+/*
+ * Self intersecting polygons should cause a parse exception.
+ */
+assert.writeError(t.insert({geo : intersectingPolygon}));
diff --git a/jstests/core/geo_s2sparse.js b/jstests/core/geo_s2sparse.js
new file mode 100644
index 00000000000..3fbc01188a3
--- /dev/null
+++ b/jstests/core/geo_s2sparse.js
@@ -0,0 +1,113 @@
+// Test behavior of 2dsphere and sparse. See SERVER-9639.
+// All V2 2dsphere indices are sparse in the geo fields.
+
+var coll = db.geo_s2sparse;
+
+var point = { type: "Point", coordinates: [5, 5] }
+
+var indexSpec = { geo: "2dsphere", nonGeo: 1 };
+
+var indexName = 'test.geo_s2sparse.$geo_2dsphere_nonGeo_1';
+
+//
+// V2 indices are "geo sparse" always.
+//
+
+// Clean up.
+coll.drop();
+coll.ensureIndex(indexSpec);
+
+// Insert N documents with the geo field.
+var N = 1000;
+for (var i = 0; i < N; i++) {
+ coll.insert({ geo: point, nonGeo: "point_"+i });
+}
+
+// Expect N keys.
+assert.eq(N, coll.validate().keysPerIndex[indexName]);
+
+// Insert N documents without the geo field.
+for (var i = 0; i < N; i++) {
+ coll.insert({ wrongGeo: point, nonGeo: i});
+}
+
+// Still expect N keys as we didn't insert any geo stuff.
+assert.eq(N, coll.validate().keysPerIndex[indexName]);
+
+// Insert N documents with just the geo field.
+for (var i = 0; i < N; i++) {
+ coll.insert({ geo: point});
+}
+
+// Expect 2N keys.
+assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
+
+// Add some "not geo" stuff.
+for (var i = 0; i < N; i++) {
+ coll.insert({ geo: null});
+ coll.insert({ geo: []});
+ coll.insert({ geo: undefined});
+ coll.insert({ geo: {}});
+}
+
+// Still expect 2N keys.
+assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
+
+//
+// V1 indices are never sparse
+//
+
+coll.drop();
+coll.ensureIndex(indexSpec, {"2dsphereIndexVersion": 1});
+
+// Insert N documents with the geo field.
+for (var i = 0; i < N; i++) {
+ coll.insert({ geo: point, nonGeo: "point_"+i });
+}
+
+// Expect N keys.
+assert.eq(N, coll.validate().keysPerIndex[indexName]);
+
+// Insert N documents without the geo field.
+for (var i = 0; i < N; i++) {
+ coll.insert({ wrongGeo: point, nonGeo: i});
+}
+
+// Expect N keys as it's a V1 index.
+assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
+
+//
+// V2 indices with several 2dsphere-indexed fields are only sparse if all are missing.
+//
+
+// Clean up.
+coll.drop();
+coll.ensureIndex({geo: "2dsphere", otherGeo: "2dsphere"});
+
+indexName = 'test.geo_s2sparse.$geo_2dsphere_otherGeo_2dsphere';
+
+// Insert N documents with the first geo field.
+var N = 1000;
+for (var i = 0; i < N; i++) {
+ coll.insert({ geo: point});
+}
+
+// Expect N keys.
+assert.eq(N, coll.validate().keysPerIndex[indexName]);
+
+// Insert N documents with the second geo field.
+var N = 1000;
+for (var i = 0; i < N; i++) {
+ coll.insert({ otherGeo: point});
+}
+
+// They get inserted too.
+assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
+
+// Insert N documents with neither geo field.
+for (var i = 0; i < N; i++) {
+ coll.insert({ nonGeo: i});
+}
+
+// Still expect 2N keys as the neither geo docs were omitted from the index.
+assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
diff --git a/jstests/core/geo_s2twofields.js b/jstests/core/geo_s2twofields.js
new file mode 100644
index 00000000000..2292e8936e8
--- /dev/null
+++ b/jstests/core/geo_s2twofields.js
@@ -0,0 +1,65 @@
+// Verify that we can index multiple geo fields with 2dsphere, and that
+// performance is what we expect it to be with indexing both fields.
+var t = db.geo_s2twofields
+t.drop()
+
+Random.setRandomSeed();
+var random = Random.rand;
+var PI = Math.PI;
+
+function randomCoord(center, minDistDeg, maxDistDeg) {
+ var dx = random() * (maxDistDeg - minDistDeg) + minDistDeg;
+ var dy = random() * (maxDistDeg - minDistDeg) + minDistDeg;
+ return [center[0] + dx, center[1] + dy];
+}
+
+var nyc = {type: "Point", coordinates: [-74.0064, 40.7142]};
+var miami = {type: "Point", coordinates: [-80.1303, 25.7903]};
+var maxPoints = 10000;
+var degrees = 5;
+
+for (var i = 0; i < maxPoints; ++i) {
+ var fromCoord = randomCoord(nyc.coordinates, 0, degrees);
+ var toCoord = randomCoord(miami.coordinates, 0, degrees);
+ var res = t.insert({ from: { type: "Point", coordinates: fromCoord },
+ to: { type: "Point", coordinates: toCoord}});
+ assert.writeOK(res);
+}
+
+function semiRigorousTime(func) {
+ var lowestTime = func();
+ var iter = 2;
+ for (var i = 0; i < iter; ++i) {
+ var run = func();
+ if (run < lowestTime) { lowestTime = run; }
+ }
+ return lowestTime;
+}
+
+function timeWithoutAndWithAnIndex(index, query) {
+ t.dropIndex(index);
+ var withoutTime = semiRigorousTime(function() { return t.find(query).explain().millis; });
+ t.ensureIndex(index);
+ var withTime = semiRigorousTime(function() { return t.find(query).explain().millis; });
+ t.dropIndex(index);
+ return [withoutTime, withTime];
+}
+
+var maxQueryRad = 0.5 * PI / 180.0;
+// When we're not looking at ALL the data, anything indexed should beat not-indexed.
+var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere", from: "2dsphere"},
+ {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
+print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
+// assert(smallQuery[0] > smallQuery[1]);
+
+// Let's just index one field.
+var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere"},
+ {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
+print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
+// assert(smallQuery[0] > smallQuery[1]);
+
+// And the other one.
+var smallQuery = timeWithoutAndWithAnIndex({from: "2dsphere"},
+ {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
+print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
+// assert(smallQuery[0] > smallQuery[1]);
diff --git a/jstests/core/geo_s2validindex.js b/jstests/core/geo_s2validindex.js
new file mode 100644
index 00000000000..bc8a569e559
--- /dev/null
+++ b/jstests/core/geo_s2validindex.js
@@ -0,0 +1,26 @@
+//
+// Tests valid cases for creation of 2dsphere index
+//
+
+var coll = db.getCollection("twodspherevalid");
+
+// Valid index
+coll.drop();
+assert.writeOK(coll.ensureIndex({geo : "2dsphere", other : 1}));
+
+// Valid index
+coll.drop();
+assert.writeOK(coll.ensureIndex({geo : "2dsphere", other : 1, geo2 : "2dsphere"}));
+
+// Invalid index, using hash with 2dsphere
+coll.drop();
+assert.writeError(coll.ensureIndex({geo : "2dsphere", other : "hash"}));
+
+// Invalid index, using 2d with 2dsphere
+coll.drop();
+assert.writeError(coll.ensureIndex({geo : "2dsphere", other : "2d"}));
+
+jsTest.log("Success!");
+
+// Ensure the empty collection is gone, so that small_oplog passes.
+coll.drop();
diff --git a/jstests/core/geo_s2within.js b/jstests/core/geo_s2within.js
new file mode 100644
index 00000000000..87fd32a7676
--- /dev/null
+++ b/jstests/core/geo_s2within.js
@@ -0,0 +1,36 @@
+// Test some cases that might be iffy with $within, mostly related to polygon w/holes.
+t = db.geo_s2within
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+
+somepoly = { "type" : "Polygon",
+ "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]}
+
+t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [40.2, 5.2]]}})
+// This is only partially contained within the polygon.
+t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [42, 7]]}})
+
+res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 1);
+
+t.drop()
+t.ensureIndex({geo: "2dsphere"})
+somepoly = { "type" : "Polygon",
+ "coordinates" : [ [ [40,5], [40,8], [43,8], [43,5], [40,5]],
+ [ [41,6], [42,6], [42,7], [41,7], [41,6]]]}
+
+t.insert({geo:{ "type" : "Point", "coordinates": [ 40, 5 ] }})
+res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 1);
+// In the hole. Shouldn't find it.
+t.insert({geo:{ "type" : "Point", "coordinates": [ 41.1, 6.1 ] }})
+res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 1);
+// Also in the hole.
+t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.1, 6.1], [41.2, 6.2]]}})
+res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 1);
+// Half-hole, half-not. Shouldn't be $within.
+t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.5, 6.5], [42.5, 7.5]]}})
+res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } })
+assert.eq(res.itcount(), 1);
diff --git a/jstests/core/geo_small_large.js b/jstests/core/geo_small_large.js
new file mode 100644
index 00000000000..e927e8d5402
--- /dev/null
+++ b/jstests/core/geo_small_large.js
@@ -0,0 +1,158 @@
+// SERVER-2386, general geo-indexing using very large and very small bounds
+
+load( "jstests/libs/geo_near_random.js" );
+
+// Do some random tests (for near queries) with very large and small ranges
+
+var test = new GeoNearRandomTest( "geo_small_large" );
+
+bounds = { min : -Math.pow( 2, 34 ), max : Math.pow( 2, 34 ) };
+
+test.insertPts( 50, bounds );
+
+printjson( db["geo_small_large"].find().limit( 10 ).toArray() );
+
+test.testPt( [ 0, 0 ] );
+test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt( test.mkPt( undefined, bounds ) );
+
+test = new GeoNearRandomTest( "geo_small_large" );
+
+bounds = { min : -Math.pow( 2, -34 ), max : Math.pow( 2, -34 ) };
+
+test.insertPts( 50, bounds );
+
+printjson( db["geo_small_large"].find().limit( 10 ).toArray() );
+
+test.testPt( [ 0, 0 ] );
+test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt( test.mkPt( undefined, bounds ) );
+
+
+// Check that our box and circle queries also work
+var scales = [
+ Math.pow( 2, 40 ),
+ Math.pow( 2, -40 ),
+ Math.pow(2, 2),
+ Math.pow(3, -15),
+ Math.pow(3, 15)
+];
+
+for ( var i = 0; i < scales.length; i++ ) {
+
+ var scale = scales[i];
+
+ var eps = Math.pow( 2, -7 ) * scale;
+ var radius = 5 * scale;
+ var max = 10 * scale;
+ var min = -max;
+ var range = max - min;
+ var bits = 2 + Math.random() * 30;
+
+ var t = db["geo_small_large"];
+ t.drop();
+ t.ensureIndex( { p : "2d" }, { min : min, max : max, bits : bits });
+
+ var outPoints = 0;
+ var inPoints = 0;
+
+ printjson({ eps : eps, radius : radius, max : max, min : min, range : range, bits : bits });
+
+ // Put a point slightly inside and outside our range
+ for ( var j = 0; j < 2; j++ ) {
+ var currRad = ( j % 2 == 0 ? radius + eps : radius - eps );
+ var res = t.insert( { p : { x : currRad, y : 0 } } );
+ print( res.toString() );
+ }
+
+ printjson( t.find().toArray() );
+
+ assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1,
+ "Incorrect center points found!" );
+ assert.eq( t.count( { p : { $within : { $box : [ [ -radius, -radius ], [ radius, radius ] ] } } } ), 1,
+ "Incorrect box points found!" );
+
+ var shouldFind = [];
+ var randoms = [];
+
+ for ( var j = 0; j < 2; j++ ) {
+
+ var randX = Math.random(); // randoms[j].randX
+ var randY = Math.random(); // randoms[j].randY
+
+ randoms.push({ randX : randX, randY : randY });
+
+ var x = randX * ( range - eps ) + eps + min;
+ var y = randY * ( range - eps ) + eps + min;
+
+ t.insert( { p : [ x, y ] } );
+
+ if ( x * x + y * y > radius * radius ){
+ // print( "out point ");
+ // printjson({ x : x, y : y })
+ outPoints++;
+ }
+ else{
+ // print( "in point ");
+ // printjson({ x : x, y : y })
+ inPoints++;
+ shouldFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) });
+ }
+ }
+
+ /*
+ function printDiff( didFind, shouldFind ){
+
+ for( var i = 0; i < shouldFind.length; i++ ){
+ var beenFound = false;
+ for( var j = 0; j < didFind.length && !beenFound ; j++ ){
+ beenFound = shouldFind[i].x == didFind[j].x &&
+ shouldFind[i].y == didFind[j].y
+ }
+
+ if( !beenFound ){
+ print( "Could not find: " )
+ shouldFind[i].inRadius = ( radius - shouldFind[i].radius >= 0 )
+ printjson( shouldFind[i] )
+ }
+ }
+ }
+
+ print( "Finding random pts... ")
+ var found = t.find( { p : { $within : { $center : [[0, 0], radius ] } } } ).toArray()
+ var didFind = []
+ for( var f = 0; f < found.length; f++ ){
+ //printjson( found[f] )
+ var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0]
+ var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1]
+ didFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) })
+ }
+
+ print( "Did not find but should: ")
+ printDiff( didFind, shouldFind )
+ print( "Found but should not have: ")
+ printDiff( shouldFind, didFind )
+ */
+
+ assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1 + inPoints,
+ "Incorrect random center points found!\n" + tojson( randoms ) );
+
+ print("Found " + inPoints + " points in and " + outPoints + " points out.");
+
+ var found = t.find( { p : { $near : [0, 0], $maxDistance : radius } } ).toArray();
+ var dist = 0;
+ for( var f = 0; f < found.length; f++ ){
+ var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0];
+ var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1];
+ print( "Dist: x : " + x + " y : " + y + " dist : " +
+ Math.sqrt( x * x + y * y) + " radius : " + radius );
+ }
+
+ assert.eq( t.count( { p : { $near : [0, 0], $maxDistance : radius } } ), 1 + inPoints,
+ "Incorrect random center points found near!\n" + tojson( randoms ) );
+}
+
diff --git a/jstests/core/geo_sort1.js b/jstests/core/geo_sort1.js
new file mode 100644
index 00000000000..67de80e65c7
--- /dev/null
+++ b/jstests/core/geo_sort1.js
@@ -0,0 +1,22 @@
+
+t = db.geo_sort1
+t.drop();
+
+for ( x=0; x<10; x++ ){
+ for ( y=0; y<10; y++ ){
+ t.insert( { loc : [ x , y ] , foo : x * x * y } );
+ }
+}
+
+t.ensureIndex( { loc : "2d" , foo : 1 } )
+
+q = t.find( { loc : { $near : [ 5 , 5 ] } , foo : { $gt : 20 } } )
+m = function(z){ return z.foo; }
+
+a = q.clone().map( m );
+b = q.clone().sort( { foo : 1 } ).map( m );
+
+assert.neq( a , b , "A" );
+a.sort();
+b.sort();
+assert.eq( a , b , "B" );
diff --git a/jstests/core/geo_uniqueDocs.js b/jstests/core/geo_uniqueDocs.js
new file mode 100644
index 00000000000..61f1a40522d
--- /dev/null
+++ b/jstests/core/geo_uniqueDocs.js
@@ -0,0 +1,40 @@
+// Test uniqueDocs option for $within and geoNear queries SERVER-3139
+// SERVER-12120 uniqueDocs is deprecated. Server always returns unique documents.
+
+collName = 'geo_uniqueDocs_test'
+t = db.geo_uniqueDocs_test
+t.drop()
+
+t.save( { locs : [ [0,2], [3,4]] } )
+t.save( { locs : [ [6,8], [10,10] ] } )
+
+t.ensureIndex( { locs : '2d' } )
+
+// geoNear tests
+// uniqueDocs option is ignored.
+assert.eq(2, db.runCommand({geoNear:collName, near:[0,0]}).results.length)
+assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:false}).results.length)
+assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:true}).results.length)
+results = db.runCommand({geoNear:collName, near:[0,0], num:2}).results
+assert.eq(2, results.length)
+assert.close(2, results[0].dis)
+assert.close(10, results[1].dis)
+results = db.runCommand({geoNear:collName, near:[0,0], num:2, uniqueDocs:true}).results
+assert.eq(2, results.length)
+assert.close(2, results[0].dis)
+assert.close(10, results[1].dis)
+
+// $within tests
+
+assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]]}}}).itcount())
+assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : true}}}).itcount())
+assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : false}}}).itcount())
+
+assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : true}}}).itcount())
+assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : false}}}).itcount())
+
+assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : true}}}).itcount())
+assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : false}}}).itcount())
+
+assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : true}}}).itcount())
+assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : false}}}).itcount())
diff --git a/jstests/core/geo_uniqueDocs2.js b/jstests/core/geo_uniqueDocs2.js
new file mode 100644
index 00000000000..f9b95113f78
--- /dev/null
+++ b/jstests/core/geo_uniqueDocs2.js
@@ -0,0 +1,80 @@
+// Additional checks for geo uniqueDocs and includeLocs SERVER-3139.
+// SERVER-12120 uniqueDocs is deprecated.
+// Server always returns results with implied uniqueDocs=true
+
+collName = 'jstests_geo_uniqueDocs2';
+t = db[collName];
+t.drop();
+
+t.save( {loc:[[20,30],[40,50]]} );
+t.ensureIndex( {loc:'2d'} );
+
+// Check exact matches of different locations.
+assert.eq( 1, t.count( { loc : [20,30] } ) );
+assert.eq( 1, t.count( { loc : [40,50] } ) );
+
+// Check behavior for $near, where $uniqueDocs mode is unavailable.
+assert.eq( [t.findOne()], t.find( { loc: { $near: [50,50] } } ).toArray() );
+
+// Check correct number of matches for $within / $uniqueDocs.
+// uniqueDocs ignored - does not affect results.
+assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40] } } } ) );
+assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : true } } } ) );
+assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ) );
+
+// For $within / $uniqueDocs, limit applies to docs.
+assert.eq( 1, t.find( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ).limit(1).itcount() );
+
+// Now check a circle only containing one of the locs.
+assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10] } } } ) );
+assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : true } } } ) );
+assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : false } } } ) );
+
+// Check number and character of results with geoNear / uniqueDocs / includeLocs.
+notUniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : false } );
+uniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : false } );
+notUniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } );
+uniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : true } );
+
+// Check that only unique docs are returned.
+assert.eq( 1, notUniqueNotInclude.results.length );
+assert.eq( 1, uniqueNotInclude.results.length );
+assert.eq( 1, notUniqueInclude.results.length );
+assert.eq( 1, uniqueInclude.results.length );
+
+// Check that locs are included.
+assert( !notUniqueNotInclude.results[0].loc );
+assert( !uniqueNotInclude.results[0].loc );
+assert( notUniqueInclude.results[0].loc );
+assert( uniqueInclude.results[0].loc );
+
+// For geoNear / uniqueDocs, 'num' limit seems to apply to locs.
+assert.eq( 1, db.runCommand( { geoNear : collName , near : [50,50], num : 1, uniqueDocs : false, includeLocs : false } ).results.length );
+
+// Check locs returned in includeLocs mode.
+t.remove({});
+objLocs = [{x:20,y:30,z:['loc1','loca']},{x:40,y:50,z:['loc2','locb']}];
+t.save( {loc:objLocs} );
+results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results;
+assert.contains( results[0].loc, objLocs );
+
+// Check locs returned in includeLocs mode, where locs are arrays.
+t.remove({});
+arrLocs = [[20,30],[40,50]];
+t.save( {loc:arrLocs} );
+results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results;
+// The original loc arrays are returned as objects.
+expectedLocs = arrLocs
+
+assert.contains( results[0].loc, expectedLocs );
+
+// Test a large number of locations in the array.
+t.drop();
+arr = [];
+for( i = 0; i < 10000; ++i ) {
+ arr.push( [10,10] );
+}
+arr.push( [100,100] );
+t.save( {loc:arr} );
+t.ensureIndex( {loc:'2d'} );
+assert.eq( 1, t.count( { loc : { $within : { $center : [[99, 99], 5] } } } ) );
diff --git a/jstests/core/geo_update.js b/jstests/core/geo_update.js
new file mode 100644
index 00000000000..dd4b28c8374
--- /dev/null
+++ b/jstests/core/geo_update.js
@@ -0,0 +1,37 @@
+// Tests geo queries w/ update & upsert
+// from SERVER-3428
+
+var coll = db.testGeoUpdate
+coll.drop()
+
+coll.ensureIndex({ loc : "2d" })
+
+// Test normal update
+print( "Updating..." )
+
+coll.insert({ loc : [1.0, 2.0] })
+
+coll.update({ loc : { $near : [1.0, 2.0] } },
+ { x : true, loc : [1.0, 2.0] })
+
+// Test upsert
+print( "Upserting..." )
+
+coll.update({ loc : { $within : { $center : [[10, 20], 1] } } },
+ { x : true },
+ true)
+
+coll.update({ loc : { $near : [10.0, 20.0], $maxDistance : 1 } },
+ { x : true },
+ true)
+
+
+coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } },
+ { $set : { loc : [100, 100] }, $push : { people : "chris" } },
+ true)
+
+coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } },
+ { $set : { loc : [100, 100] }, $push : { people : "john" } },
+ true)
+
+assert.eq( 4, coll.find().itcount() )
diff --git a/jstests/core/geo_update1.js b/jstests/core/geo_update1.js
new file mode 100644
index 00000000000..6352ef0aa19
--- /dev/null
+++ b/jstests/core/geo_update1.js
@@ -0,0 +1,36 @@
+
+t = db.geo_update1
+t.drop()
+
+for(var x = 0; x < 10; x++ ) {
+ for(var y = 0; y < 10; y++ ) {
+ t.insert({"loc": [x, y] , x : x , y : y , z : 1 });
+ }
+}
+
+t.ensureIndex( { loc : "2d" } )
+
+function p(){
+ print( "--------------" );
+ for ( var y=0; y<10; y++ ){
+ var c = t.find( { y : y } ).sort( { x : 1 } )
+ var s = "";
+ while ( c.hasNext() )
+ s += c.next().z + " ";
+ print( s )
+ }
+ print( "--------------" );
+}
+
+p()
+
+var res = t.update({ loc: { $within: { $center: [[ 5, 5 ], 2 ]}}}, { $inc: { z: 1 }}, false, true);
+assert.writeOK( res );
+p()
+
+assert.writeOK(t.update({}, {'$inc' : { 'z' : 1}}, false, true));
+p()
+
+res = t.update({ loc: { $within: { $center: [[ 5, 5 ], 2 ]}}}, { $inc: { z: 1 }}, false, true);
+assert.writeOK( res );
+p()
diff --git a/jstests/core/geo_update2.js b/jstests/core/geo_update2.js
new file mode 100644
index 00000000000..6a42619ac98
--- /dev/null
+++ b/jstests/core/geo_update2.js
@@ -0,0 +1,39 @@
+
+t = db.geo_update2
+t.drop()
+
+for(var x = 0; x < 10; x++ ) {
+ for(var y = 0; y < 10; y++ ) {
+ t.insert({"loc": [x, y] , x : x , y : y });
+ }
+}
+
+t.ensureIndex( { loc : "2d" } )
+
+function p(){
+ print( "--------------" );
+ for ( var y=0; y<10; y++ ){
+ var c = t.find( { y : y } ).sort( { x : 1 } )
+ var s = "";
+ while ( c.hasNext() )
+ s += c.next().z + " ";
+ print( s )
+ }
+ print( "--------------" );
+}
+
+p()
+
+
+assert.writeOK(t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}},
+ {'$inc' : { 'z' : 1}}, false, true));
+p()
+
+assert.writeOK(t.update({}, {'$inc' : { 'z' : 1}}, false, true));
+p()
+
+
+assert.writeOK(t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}},
+ {'$inc' : { 'z' : 1}}, false, true));
+p()
+
diff --git a/jstests/core/geo_update_btree.js b/jstests/core/geo_update_btree.js
new file mode 100644
index 00000000000..225a6635903
--- /dev/null
+++ b/jstests/core/geo_update_btree.js
@@ -0,0 +1,28 @@
+// Tests whether the geospatial search is stable under btree updates
+
+var coll = db.getCollection( "jstests_geo_update_btree" )
+coll.drop()
+
+coll.ensureIndex( { loc : '2d' } )
+
+var big = new Array( 3000 ).toString()
+
+if (testingReplication) {
+ coll.setWriteConcern({ w: 2 });
+}
+
+var parallelInsert = startParallelShell(
+ "for ( var i = 0; i < 1000; i++ ) {" +
+ " var doc = { loc: [ Random.rand() * 180, Random.rand() * 180 ], v: '' }" +
+ " db.jstests_geo_update_btree.insert(doc);" +
+ "}");
+
+for ( i = 0; i < 1000; i++ ) {
+ coll.update(
+ { loc : { $within : { $center : [ [ Random.rand() * 180, Random.rand() * 180 ], Random.rand() * 50 ] } } },
+ { $set : { v : big } }, false, true )
+
+ if( i % 10 == 0 ) print( i );
+}
+
+parallelInsert();
diff --git a/jstests/core/geo_update_btree2.js b/jstests/core/geo_update_btree2.js
new file mode 100644
index 00000000000..d99970c73e0
--- /dev/null
+++ b/jstests/core/geo_update_btree2.js
@@ -0,0 +1,71 @@
+// Tests whether the geospatial search is stable under btree updates
+//
+// Tests the implementation of the 2d search, not the behavior we promise. MongoDB currently
+// promises no isolation, so there is no guarantee that we get the results we expect in this file.
+
+// The old query system, if it saw a 2d query, would never consider a collscan.
+//
+// The new query system can answer the queries in this file with a collscan and ranks
+// the collscan against the indexed result.
+//
+// In order to expose the specific NON GUARANTEED isolation behavior this file tests
+// we disable table scans to ensure that the new query system only looks at the 2d
+// scan.
+assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) );
+
+var status = function( msg ){
+ print( "\n\n###\n" + msg + "\n###\n\n" )
+}
+
+var coll = db.getCollection( "jstests_geo_update_btree2" )
+coll.drop()
+
+coll.ensureIndex( { loc : '2d' } )
+
+status( "Inserting points..." )
+
+var numPoints = 10
+for ( i = 0; i < numPoints; i++ ) {
+ coll.insert( { _id : i, loc : [ Random.rand() * 180, Random.rand() * 180 ], i : i % 2 } );
+}
+
+status( "Starting long query..." )
+
+var query = coll.find({ loc : { $within : { $box : [[-180, -180], [180, 180]] } } }).batchSize( 2 )
+var firstValues = [ query.next()._id, query.next()._id ]
+printjson( firstValues )
+
+status( "Removing points not returned by query..." )
+
+var allQuery = coll.find()
+var removeIds = []
+while( allQuery.hasNext() ){
+ var id = allQuery.next()._id
+ if( firstValues.indexOf( id ) < 0 ){
+ removeIds.push( id )
+ }
+}
+
+var updateIds = []
+for( var i = 0, max = removeIds.length / 2; i < max; i++ ) updateIds.push( removeIds.pop() )
+
+printjson( removeIds )
+coll.remove({ _id : { $in : removeIds } })
+
+status( "Updating points returned by query..." )
+printjson(updateIds);
+
+var big = new Array( 3000 ).toString()
+for( var i = 0; i < updateIds.length; i++ )
+ coll.update({ _id : updateIds[i] }, { $set : { data : big } })
+
+status( "Counting final points..." )
+
+// It's not defined whether or not we return documents that are modified during a query. We
+// shouldn't crash, but it's not defined how many results we get back. This test is modifying every
+// doc not returned by the query, and since we currently handle the invalidation by removing them,
+// we won't return them. But we shouldn't crash.
+// assert.eq( ( numPoints - 2 ) / 2, query.itcount() )
+query.itcount();
+
+assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false} ) );
diff --git a/jstests/core/geo_update_dedup.js b/jstests/core/geo_update_dedup.js
new file mode 100644
index 00000000000..def93a839e4
--- /dev/null
+++ b/jstests/core/geo_update_dedup.js
@@ -0,0 +1,60 @@
+// Test that updates with geo queries which match
+// the same document multiple times only apply
+// the update once
+
+var t = db.jstests_geo_update_dedup;;
+
+// 2d index with $near
+t.drop();
+t.ensureIndex({locs: "2d"});
+t.save({locs: [[49.999,49.999], [50.0,50.0], [50.001,50.001]]});
+
+var q = {locs: {$near: [50.0, 50.0]}};
+assert.eq(1, t.find(q).itcount(), 'duplicates returned from query');
+
+var res = t.update({locs: {$near: [50.0, 50.0]}}, {$inc: {touchCount: 1}}, false, true);
+assert.eq(1, res.nMatched);
+assert.eq(1, t.findOne().touchCount);
+
+t.drop();
+t.ensureIndex({locs: "2d"});
+t.save({locs: [{x:49.999,y:49.999}, {x:50.0,y:50.0}, {x:50.001,y:50.001}]});
+res = t.update({locs: {$near: {x:50.0, y:50.0}}}, {$inc: {touchCount: 1}});
+assert.eq(1, res.nMatched);
+assert.eq(1, t.findOne().touchCount);
+
+// 2d index with $within
+t.drop();
+t.ensureIndex({loc: "2d"});
+t.save({loc: [[0, 0], [1, 1]]});
+
+res = t.update({loc: {$within: {$center: [[0, 0], 2]}}}, {$inc: {touchCount: 1}}, false, true);
+assert.eq(1, res.nMatched);
+assert.eq(1, t.findOne().touchCount);
+
+// 2dsphere index with $geoNear
+t.drop();
+t.ensureIndex({geo: "2dsphere"});
+var x = { "type" : "Polygon",
+ "coordinates" : [[[49.999,49.999], [50.0,50.0], [50.001,50.001], [49.999,49.999]]]}
+t.save({geo: x})
+
+res = t.update({geo: {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}},
+ {$inc: {touchCount: 1}}, false, true);
+assert.eq(1, res.nMatched);
+assert.eq(1, t.findOne().touchCount);
+
+t.drop();
+var locdata = [
+ {geo: {type: "Point", coordinates: [49.999,49.999]}},
+ {geo: {type: "Point", coordinates: [50.000,50.000]}},
+ {geo: {type: "Point", coordinates: [50.001,50.001]}}
+];
+t.save({locdata: locdata, count: 0})
+t.ensureIndex({"locdata.geo": "2dsphere"});
+
+res = t.update({"locdata.geo": {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}},
+ {$inc: {touchCount: 1}}, false, true);
+assert.eq(1, res.nMatched);
+assert.eq(1, t.findOne().touchCount);
+
diff --git a/jstests/core/geo_withinquery.js b/jstests/core/geo_withinquery.js
new file mode 100644
index 00000000000..11701d34c62
--- /dev/null
+++ b/jstests/core/geo_withinquery.js
@@ -0,0 +1,15 @@
+// SERVER-7343: allow $within without a geo index.
+t = db.geo_withinquery;
+t.drop();
+
+num = 0;
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ }
+}
+
+assert.eq(21 * 21 - 1, t.find({ $and: [ {loc: {$ne:[0,0]}},
+ {loc: {$within: {$box: [[0,0], [100,100]]}}},
+ ]}).itcount(), "UHOH!")
diff --git a/jstests/core/geoa.js b/jstests/core/geoa.js
new file mode 100644
index 00000000000..3081f6c5c2e
--- /dev/null
+++ b/jstests/core/geoa.js
@@ -0,0 +1,12 @@
+
+t = db.geoa
+t.drop();
+
+t.save( { _id : 1 , a : { loc : [ 5 , 5 ] } } )
+t.save( { _id : 2 , a : { loc : [ 6 , 6 ] } } )
+t.save( { _id : 3 , a : { loc : [ 7 , 7 ] } } )
+
+t.ensureIndex( { "a.loc" : "2d" } );
+
+cur = t.find( { "a.loc" : { $near : [ 6 , 6 ] } } );
+assert.eq( 2 , cur.next()._id , "A1" );
diff --git a/jstests/core/geob.js b/jstests/core/geob.js
new file mode 100644
index 00000000000..0dcc2658ba2
--- /dev/null
+++ b/jstests/core/geob.js
@@ -0,0 +1,35 @@
+var t = db.geob;
+t.drop();
+
+var a = {p: [0, 0]};
+var b = {p: [1, 0]};
+var c = {p: [3, 4]};
+var d = {p: [0, 6]};
+
+t.save(a);
+t.save(b);
+t.save(c);
+t.save(d);
+t.ensureIndex({p: "2d"});
+
+var res = t.runCommand("geoNear", {near: [0,0]});
+assert.close(3, res.stats.avgDistance, "A");
+
+assert.close(0, res.results[0].dis, "B1");
+assert.eq(a._id, res.results[0].obj._id, "B2");
+
+assert.close(1, res.results[1].dis, "C1");
+assert.eq(b._id, res.results[1].obj._id, "C2");
+
+assert.close(5, res.results[2].dis, "D1");
+assert.eq(c._id, res.results[2].obj._id, "D2");
+
+assert.close(6, res.results[3].dis, "E1");
+assert.eq(d._id, res.results[3].obj._id, "E2");
+
+res = t.runCommand("geoNear", {near: [0,0], distanceMultiplier: 2});
+assert.close(6, res.stats.avgDistance, "F");
+assert.close(0, res.results[0].dis, "G");
+assert.close(2, res.results[1].dis, "H");
+assert.close(10, res.results[2].dis, "I");
+assert.close(12, res.results[3].dis, "J");
diff --git a/jstests/core/geoc.js b/jstests/core/geoc.js
new file mode 100644
index 00000000000..8b0178095e8
--- /dev/null
+++ b/jstests/core/geoc.js
@@ -0,0 +1,24 @@
+
+t = db.geoc;
+t.drop()
+
+N = 1000;
+
+for (var i=0; i<N; i++) t.insert({loc:[100+Math.random(), 100+Math.random()], z:0})
+for (var i=0; i<N; i++) t.insert({loc:[0+Math.random(), 0+Math.random()], z:1})
+for (var i=0; i<N; i++) t.insert({loc:[-100+Math.random(), -100+Math.random()], z:2})
+
+t.ensureIndex({loc:'2d'})
+
+function test( z , l ){
+ assert.lt( 0 , t.find({loc:{$near:[100,100]}, z:z}).limit(l).itcount() , "z: " + z + " l: " + l );
+}
+
+test( 1 , 1 );
+test( 1 , 2 );
+test( 2 , 2 );
+test( 2 , 10 );
+test( 2 , 1000 );
+test( 2 , 100000 );
+test( 2 , 10000000 );
+
diff --git a/jstests/core/geod.js b/jstests/core/geod.js
new file mode 100644
index 00000000000..6e458454a71
--- /dev/null
+++ b/jstests/core/geod.js
@@ -0,0 +1,14 @@
+var t=db.geod;
+t.drop()
+t.save( { loc: [0,0] } )
+t.save( { loc: [0.5,0] } )
+t.ensureIndex({loc:"2d"})
+// do a few geoNears with different maxDistances. The first iteration
+// should match no points in the dataset.
+dists = [.49, .51, 1.0]
+for (idx in dists){
+ b=db.runCommand({geoNear:"geod", near:[1,0], num:2, maxDistance:dists[idx]});
+ assert.eq(b.errmsg, undefined, "A"+idx);
+ l=b.results.length
+ assert.eq(l, idx, "B"+idx)
+}
diff --git a/jstests/core/geoe.js b/jstests/core/geoe.js
new file mode 100644
index 00000000000..22feb83ab1e
--- /dev/null
+++ b/jstests/core/geoe.js
@@ -0,0 +1,32 @@
+// Was reported as SERVER-1283.
+// The problem seems to be that sometimes the index btrees are such that
+// the first search for a matching point in the geo code could run to
+// the end of the btree and not reverse direction (leaving the rest of
+// the search always looking at some random non-matching point).
+
+t=db.geo_box;
+t.drop();
+
+t.insert({"_id": 1, "geo" : [ 33, -11.1 ] });
+t.insert({"_id": 2, "geo" : [ -122, 33.3 ] });
+t.insert({"_id": 3, "geo" : [ -122, 33.4 ] });
+t.insert({"_id": 4, "geo" : [ -122.28, 37.67 ] });
+t.insert({"_id": 5, "geo" : [ -122.29, 37.68 ] });
+t.insert({"_id": 6, "geo" : [ -122.29, 37.67 ] });
+t.insert({"_id": 7, "geo" : [ -122.29, 37.67 ] });
+t.insert({"_id": 8, "geo" : [ -122.29, 37.68 ] });
+t.insert({"_id": 9, "geo" : [ -122.29, 37.68 ] });
+t.insert({"_id": 10, "geo" : [ -122.3, 37.67 ] });
+t.insert({"_id": 11, "geo" : [ -122.31, 37.67 ] });
+t.insert({"_id": 12, "geo" : [ -122.3, 37.66 ] });
+t.insert({"_id": 13, "geo" : [ -122.2435, 37.637072 ] });
+t.insert({"_id": 14, "geo" : [ -122.289505, 37.695774 ] });
+
+
+t.ensureIndex({ geo : "2d" });
+
+c=t.find({geo: {"$within": {"$box": [[-125.078461,36.494473], [-120.320648,38.905199]]} } });
+assert.eq(11, c.count(), "A1");
+
+c=t.find({geo: {"$within": {"$box": [[-124.078461,36.494473], [-120.320648,38.905199]]} } });
+assert.eq(11, c.count(), "B1");
diff --git a/jstests/core/geof.js b/jstests/core/geof.js
new file mode 100644
index 00000000000..786ead6a94a
--- /dev/null
+++ b/jstests/core/geof.js
@@ -0,0 +1,19 @@
+t = db.geof
+t.drop();
+
+// corners (dist ~0.98)
+t.insert({loc: [ 0.7, 0.7]})
+t.insert({loc: [ 0.7, -0.7]})
+t.insert({loc: [-0.7, 0.7]})
+t.insert({loc: [-0.7, -0.7]})
+
+// on x axis (dist == 0.9)
+t.insert({loc: [-0.9, 0]})
+t.insert({loc: [-0.9, 0]})
+
+t.ensureIndex( { loc : "2d" } )
+
+t.find({loc: {$near: [0,0]}}).limit(2).forEach( function(o){
+ //printjson(o);
+ assert.lt(Geo.distance([0,0], o.loc), 0.95);
+});
diff --git a/jstests/core/geonear_cmd_input_validation.js b/jstests/core/geonear_cmd_input_validation.js
new file mode 100644
index 00000000000..2a44391183b
--- /dev/null
+++ b/jstests/core/geonear_cmd_input_validation.js
@@ -0,0 +1,119 @@
+//
+// Test input validation for geoNear command.
+//
+var t = db.geonear_cmd_input_validation;
+t.drop();
+t.ensureIndex({loc: "2dsphere"});
+
+// The test matrix. Some combinations are not supported:
+// 2d index and minDistance.
+// 2d index and GeoJSON
+// 2dsphere index and spherical=false
+var indexTypes = ['2d', '2dsphere'],
+ pointTypes = [
+ {type: 'Point', coordinates: [0, 0]},
+ [0, 0]],
+ sphericalOptions = [true, false],
+ optionNames = ['minDistance', 'maxDistance'],
+ badNumbers = [-1, undefined, 'foo'];
+
+indexTypes.forEach(function(indexType) {
+ t.drop();
+ t.createIndex({'loc': indexType});
+
+ pointTypes.forEach(function(pointType) {
+ sphericalOptions.forEach(function(spherical) {
+ optionNames.forEach(function(optionName) {
+ var isLegacy = Array.isArray(pointType),
+ pointDescription = (isLegacy ? "legacy coordinates" : "GeoJSON point");
+
+ function makeCommand(distance) {
+ var command = {
+ geoNear: t.getName(),
+ near: pointType,
+ spherical: spherical
+ };
+ command[optionName] = distance;
+ return command;
+ }
+
+ // Unsupported combinations should return errors.
+ if (
+ (indexType == '2d' && optionName == 'minDistance') ||
+ (indexType == '2d' && !isLegacy) ||
+ (indexType == '2dsphere' && !spherical)
+ ) {
+ assert.commandFailed(
+ db.runCommand(makeCommand(1)),
+ "geoNear with spherical=" + spherical + " and " + indexType
+ + " index and " + pointDescription
+ + " should've failed."
+ );
+
+ // Stop processing this combination in the test matrix.
+ return;
+ }
+
+ // This is a supported combination. No error.
+ assert.commandWorked(db.runCommand({
+ geoNear: t.getName(),
+ near: pointType,
+ spherical: spherical
+ }));
+
+ // No error with min/maxDistance 1.
+ db.runCommand(makeCommand(1));
+
+ var outOfRangeDistances = [];
+ if (indexType == '2d') {
+ // maxDistance unlimited; no error.
+ db.runCommand(makeCommand(1e10));
+ }
+
+ // Try several bad values for min/maxDistance.
+ badNumbers.concat(outOfRangeDistances).forEach(function(badDistance) {
+
+ var msg = (
+ "geoNear with spherical=" + spherical + " and "
+ + pointDescription + " and " + indexType
+ + " index should've failed with "
+ + optionName + " " + badDistance);
+
+ assert.commandFailed(
+ db.runCommand(makeCommand(badDistance)),
+ msg);
+ });
+
+ // Bad values for limit / num.
+ ['num', 'limit'].forEach(function(limitOptionName) {
+ [-1, 'foo'].forEach(function(badLimit) {
+
+ var msg = (
+ "geoNear with spherical=" + spherical + " and "
+ + pointDescription + " and " + indexType
+ + " index should've failed with '"
+ + limitOptionName + "' " + badLimit);
+
+ var command = makeCommand(1);
+ command[limitOptionName] = badLimit;
+ assert.commandFailed(db.runCommand(command), msg);
+ });
+ });
+
+ // Bad values for distanceMultiplier.
+ badNumbers.forEach(function(badNumber) {
+
+ var msg = (
+ "geoNear with spherical=" + spherical + " and "
+ + pointDescription + " and " + indexType
+ + " index should've failed with distanceMultiplier "
+ + badNumber);
+
+ var command = makeCommand(1);
+ command['distanceMultiplier'] = badNumber;
+ assert.commandFailed(db.runCommand(command), msg);
+ });
+ });
+ });
+ });
+});
diff --git a/jstests/core/geonear_validate.js b/jstests/core/geonear_validate.js
new file mode 100644
index 00000000000..49d4c1ade15
--- /dev/null
+++ b/jstests/core/geonear_validate.js
@@ -0,0 +1,8 @@
+// Test to make sure that geoNear validates numWanted
+t = db.geonear_validate
+t.drop();
+t.ensureIndex({ geo : "2dsphere" })
+origin = { "type" : "Point", "coordinates": [ 0, 0] }
+t.insert({geo: origin})
+res = db.runCommand({geoNear: t.getName(), near: [0,0], spherical: true, num: -1});
+assert.eq(0, res.ok);
diff --git a/jstests/core/getlog1.js b/jstests/core/getlog1.js
new file mode 100644
index 00000000000..75fbeabddf2
--- /dev/null
+++ b/jstests/core/getlog1.js
@@ -0,0 +1,24 @@
+// to run:
+// ./mongo jstests/<this-file>
+
+contains = function(arr,obj) {
+ var i = arr.length;
+ while (i--) {
+ if (arr[i] === obj) {
+ return true;
+ }
+ }
+ return false;
+}
+
+var resp = db.adminCommand({getLog:"*"})
+assert( resp.ok == 1, "error executing getLog command" );
+assert( resp.names, "no names field" );
+assert( resp.names.length > 0, "names array is empty" );
+assert( contains(resp.names,"global") , "missing global category" );
+assert( !contains(resp.names,"butty") , "missing butty category" );
+
+resp = db.adminCommand({getLog:"global"})
+assert( resp.ok == 1, "error executing getLog command" );
+assert( resp.log, "no log field" );
+assert( resp.log.length > 0 , "no log lines" );
diff --git a/jstests/core/getlog2.js b/jstests/core/getlog2.js
new file mode 100644
index 00000000000..846f0548309
--- /dev/null
+++ b/jstests/core/getlog2.js
@@ -0,0 +1,46 @@
+// tests getlog as well as slow querying logging
+
+glcol = db.getLogTest2;
+glcol.drop()
+
+contains = function(arr, func) {
+ var i = arr.length;
+ while (i--) {
+ if (func(arr[i])) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// test doesn't work when talking to mongos
+if(db.isMaster().msg != "isdbgrid") {
+ // run a slow query
+ glcol.save({ "SENTINEL": 1 });
+ glcol.findOne({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } });
+
+ // run a slow update
+ glcol.update({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }, { "x": "x" });
+
+ var resp = db.adminCommand({getLog:"global"});
+ assert( resp.ok == 1, "error executing getLog command" );
+ assert( resp.log, "no log field" );
+ assert( resp.log.length > 0 , "no log lines" );
+
+ // ensure that slow query is logged in detail
+ assert( contains(resp.log, function(v) {
+ print(v);
+ return v.indexOf(" query ") != -1 && v.indexOf("query:") != -1 &&
+ v.indexOf("nscanned:") != -1 &&
+ v.indexOf("nscannedObjects:") != -1 &&
+ v.indexOf("SENTINEL") != -1;
+ }) );
+
+ // same, but for update
+ assert( contains(resp.log, function(v) {
+ return v.indexOf(" update ") != -1 && v.indexOf("query") != -1 &&
+ v.indexOf("nscanned:") != -1 &&
+ v.indexOf("nscannedObjects:") != -1 &&
+ v.indexOf("SENTINEL") != -1;
+ }) );
+}
diff --git a/jstests/core/group1.js b/jstests/core/group1.js
new file mode 100644
index 00000000000..c4147c0d89a
--- /dev/null
+++ b/jstests/core/group1.js
@@ -0,0 +1,64 @@
+t = db.group1;
+t.drop();
+
+t.save( { n : 1 , a : 1 } );
+t.save( { n : 2 , a : 1 } );
+t.save( { n : 3 , a : 2 } );
+t.save( { n : 4 , a : 2 } );
+t.save( { n : 5 , a : 2 } );
+
+var p = { key : { a : true } ,
+ reduce : function(obj,prev) { prev.count++; },
+ initial: { count: 0 }
+ };
+
+res = t.group( p );
+
+assert( res.length == 2 , "A" );
+assert( res[0].a == 1 , "B" );
+assert( res[0].count == 2 , "C" );
+assert( res[1].a == 2 , "D" );
+assert( res[1].count == 3 , "E" );
+
+assert.eq( res , t.groupcmd( p ) , "ZZ" );
+
+ret = t.groupcmd( { key : {} , reduce : p.reduce , initial : p.initial } );
+assert.eq( 1 , ret.length , "ZZ 2" );
+assert.eq( 5 , ret[0].count , "ZZ 3" );
+
+ret = t.groupcmd( { key : {} , reduce : function(obj,prev){ prev.sum += obj.n } , initial : { sum : 0 } } );
+assert.eq( 1 , ret.length , "ZZ 4" );
+assert.eq( 15 , ret[0].sum , "ZZ 5" );
+
+t.drop();
+
+t.save( { "a" : 2 } );
+t.save( { "b" : 5 } );
+t.save( { "a" : 1 } );
+t.save( { "a" : 2 } );
+
+c = {key: {a:1}, cond: {}, initial: {"count": 0}, reduce: function(obj, prev) { prev.count++; } };
+
+assert.eq( t.group( c ) , t.groupcmd( c ) , "ZZZZ" );
+
+
+t.drop();
+
+t.save( { name : { first : "a" , last : "A" } } );
+t.save( { name : { first : "b" , last : "B" } } );
+t.save( { name : { first : "a" , last : "A" } } );
+
+
+p = { key : { 'name.first' : true } ,
+ reduce : function(obj,prev) { prev.count++; },
+ initial: { count: 0 }
+ };
+
+res = t.group( p );
+assert.eq( 2 , res.length , "Z1" );
+assert.eq( "a" , res[0]['name.first'] , "Z2" )
+assert.eq( "b" , res[1]['name.first'] , "Z3" )
+assert.eq( 2 , res[0].count , "Z4" )
+assert.eq( 1 , res[1].count , "Z5" )
+
+
diff --git a/jstests/core/group2.js b/jstests/core/group2.js
new file mode 100644
index 00000000000..a8e6653470a
--- /dev/null
+++ b/jstests/core/group2.js
@@ -0,0 +1,38 @@
+t = db.group2;
+t.drop();
+
+t.save({a: 2});
+t.save({b: 5});
+t.save({a: 1});
+
+cmd = { key: {a: 1},
+ initial: {count: 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ }
+ };
+
+result = t.group(cmd);
+
+assert.eq(3, result.length, "A");
+assert.eq(null, result[1].a, "C");
+assert("a" in result[1], "D");
+assert.eq(1, result[2].a, "E");
+
+assert.eq(1, result[0].count, "F");
+assert.eq(1, result[1].count, "G");
+assert.eq(1, result[2].count, "H");
+
+
+delete cmd.key
+cmd["$keyf"] = function(x){ return { a : x.a }; };
+result2 = t.group( cmd );
+
+assert.eq( result , result2, "check result2" );
+
+
+delete cmd.$keyf
+cmd["keyf"] = function(x){ return { a : x.a }; };
+result3 = t.group( cmd );
+
+assert.eq( result , result3, "check result3" );
diff --git a/jstests/core/group3.js b/jstests/core/group3.js
new file mode 100644
index 00000000000..d113b9d570f
--- /dev/null
+++ b/jstests/core/group3.js
@@ -0,0 +1,43 @@
+t = db.group3;
+t.drop();
+
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
+t.save({a: 4});
+
+
+cmd = { initial: {count: 0, sum: 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ prev.sum += obj.a;
+ },
+ finalize: function(obj) {
+ if (obj.count){
+ obj.avg = obj.sum / obj.count;
+ }else{
+ obj.avg = 0;
+ }
+ },
+ };
+
+result1 = t.group(cmd);
+
+assert.eq(1, result1.length, "test1");
+assert.eq(10, result1[0].sum, "test1");
+assert.eq(4, result1[0].count, "test1");
+assert.eq(2.5, result1[0].avg, "test1");
+
+
+cmd['finalize'] = function(obj) {
+ if (obj.count){
+ return obj.sum / obj.count;
+ }else{
+ return 0;
+ }
+};
+
+result2 = t.group(cmd);
+
+assert.eq(1, result2.length, "test2");
+assert.eq(2.5, result2[0], "test2");
diff --git a/jstests/core/group4.js b/jstests/core/group4.js
new file mode 100644
index 00000000000..e75c0d1ae2c
--- /dev/null
+++ b/jstests/core/group4.js
@@ -0,0 +1,45 @@
+
+t = db.group4
+t.drop();
+
+function test( c , n ){
+ var x = {};
+ c.forEach(
+ function(z){
+ assert.eq( z.count , z.values.length , n + "\t" + tojson( z ) );
+ }
+ );
+}
+
+t.insert({name:'bob',foo:1})
+t.insert({name:'bob',foo:2})
+t.insert({name:'alice',foo:1})
+t.insert({name:'alice',foo:3})
+t.insert({name:'fred',foo:3})
+t.insert({name:'fred',foo:4})
+
+x = t.group(
+ {
+ key: {foo:1},
+ initial: {count:0,values:[]},
+ reduce: function (obj, prev){
+ prev.count++
+ prev.values.push(obj.name)
+ }
+ }
+);
+test( x , "A" );
+
+x = t.group(
+ {
+ key: {foo:1},
+ initial: {count:0},
+ reduce: function (obj, prev){
+ if (!prev.values) {prev.values = [];}
+ prev.count++;
+ prev.values.push(obj.name);
+ }
+ }
+);
+test( x , "B" );
+
diff --git a/jstests/core/group5.js b/jstests/core/group5.js
new file mode 100644
index 00000000000..3534fe5f030
--- /dev/null
+++ b/jstests/core/group5.js
@@ -0,0 +1,38 @@
+
+t = db.group5;
+t.drop();
+
+// each group has groupnum+1 5 users
+for ( var group=0; group<10; group++ ){
+ for ( var i=0; i<5+group; i++ ){
+ t.save( { group : "group" + group , user : i } )
+ }
+}
+
+function c( group ){
+ return t.group(
+ {
+ key : { group : 1 } ,
+ q : { group : "group" + group } ,
+ initial : { users : {} },
+ reduce : function(obj,prev){
+ prev.users[obj.user] = true; // add this user to the hash
+ },
+ finalize : function(x){
+ var count = 0;
+ for (var key in x.users){
+ count++;
+ }
+
+ //replace user obj with count
+ //count add new field and keep users
+ x.users = count;
+ return x;
+ }
+ })[0]; // returns array
+}
+
+assert.eq( "group0" , c(0).group , "g0" );
+assert.eq( 5 , c(0).users , "g0 a" );
+assert.eq( "group5" , c(5).group , "g5" );
+assert.eq( 10 , c(5).users , "g5 a" );
diff --git a/jstests/core/group6.js b/jstests/core/group6.js
new file mode 100644
index 00000000000..b77a37a5d11
--- /dev/null
+++ b/jstests/core/group6.js
@@ -0,0 +1,32 @@
+t = db.jstests_group6;
+t.drop();
+
+for( i = 1; i <= 10; ++i ) {
+ t.save( {i:new NumberLong( i ),y:1} );
+}
+
+assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
+
+t.drop();
+for( i = 1; i <= 10; ++i ) {
+ if ( i % 2 == 0 ) {
+ t.save( {i:new NumberLong( i ),y:1} );
+ } else {
+ t.save( {i:i,y:1} );
+ }
+}
+
+assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
+
+t.drop();
+for( i = 1; i <= 10; ++i ) {
+ if ( i % 2 == 1 ) {
+ t.save( {i:new NumberLong( i ),y:1} );
+ } else {
+ t.save( {i:i,y:1} );
+ }
+}
+
+assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
+
+assert.eq.automsg( "NumberLong(10)", "t.group( {$reduce: function(doc, prev) { prev.count += 1; }, initial: {count: new NumberLong(0) }} )[ 0 ].count" ); \ No newline at end of file
diff --git a/jstests/core/group7.js b/jstests/core/group7.js
new file mode 100644
index 00000000000..1413000079c
--- /dev/null
+++ b/jstests/core/group7.js
@@ -0,0 +1,47 @@
+// Test yielding group command SERVER-1395
+
+t = db.jstests_group7;
+t.drop();
+
+function checkForYield( docs, updates ) {
+ t.drop();
+ a = 0;
+ for( var i = 0; i < docs; ++i ) {
+ t.save( {a:a} );
+ }
+
+ // Iteratively update all a values atomically.
+ p = startParallelShell(
+ 'for( a = 0; a < ' + updates + '; ++a ) {' +
+ 'db.jstests_group7.update({ $atomic: true }, { $set: { a: a }}, false, true);' +
+ '}' );
+
+ for( var i = 0; i < updates; ++i ) {
+ print("running group " + i + " of " + updates);
+ ret = t.group({key:{a:1},reduce:function(){},initial:{}});
+ // Check if group sees more than one a value, indicating that it yielded.
+ if ( ret.length > 1 ) {
+ p();
+ return true;
+ }
+ printjson( ret );
+ }
+
+ p();
+ return false;
+}
+
+var yielded = false;
+var docs = 1500;
+var updates = 50;
+for( var j = 1; j <= 6; ++j ) {
+ print("Iteration " + j + " docs = " + docs + " updates = " + updates);
+ if ( checkForYield( docs, updates ) ) {
+ yielded = true;
+ break;
+ }
+ // Increase docs and updates to encourage yielding.
+ docs *= 2;
+ updates *= 2;
+}
+assert( yielded );
diff --git a/jstests/core/group_empty.js b/jstests/core/group_empty.js
new file mode 100644
index 00000000000..62a734ed0f8
--- /dev/null
+++ b/jstests/core/group_empty.js
@@ -0,0 +1,8 @@
+
+t = db.group_empty;
+t.drop();
+
+res1 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
+t.ensureIndex( { x : 1 } );
+res2 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
+assert.eq( res1, res2 );
diff --git a/jstests/core/grow_hash_table.js b/jstests/core/grow_hash_table.js
new file mode 100644
index 00000000000..1f96820d61f
--- /dev/null
+++ b/jstests/core/grow_hash_table.js
@@ -0,0 +1,42 @@
+// This test creates a large projection, which causes a set of field names to
+// be stored in a StringMap (based on UnorderedFastKeyTable). The hash table
+// starts with 20 slots, but must be grown repeatedly to hold the complete set
+// of fields. This test verifies that we can grow the hash table repeatedly
+// with no failures.
+//
+// Related to SERVER-9824.
+
+var testDB = db.getSiblingDB('grow_hash_table');
+
+var doTest = function(count) {
+ print('Testing with count of ' + count);
+ testDB.dropDatabase();
+ var id = { data: 1 };
+ var doc = { _id: id };
+ var projection = { };
+
+ // Create a document and a projection with fields r1, r2, r3 ...
+ for (var i = 1; i <= count; ++i) {
+ var r = 'r' + i;
+ doc[r] = i;
+ projection[r] = 1;
+ }
+
+ // Store the document
+ assert.writeOK(testDB.collection.insert(doc));
+
+ // Try to read the document using a large projection
+ try {
+ var findCount = testDB.collection.find({ _id: id }, projection).itcount();
+ assert(findCount == 1,
+ 'Failed to find single stored document, find().itcount() == ' + findCount);
+ }
+ catch (e) {
+ testDB.dropDatabase();
+ doassert('Test FAILED! Caught exception ' + tojsononeline(e));
+ }
+ testDB.dropDatabase();
+ jsTest.log('Test PASSED');
+}
+
+doTest(10000);
diff --git a/jstests/core/hashindex1.js b/jstests/core/hashindex1.js
new file mode 100644
index 00000000000..34bd6dc0725
--- /dev/null
+++ b/jstests/core/hashindex1.js
@@ -0,0 +1,94 @@
+var t = db.hashindex1;
+t.drop()
+
+//test non-single field hashed indexes don't get created (maybe change later)
+var badspec = {a : "hashed" , b : 1};
+t.ensureIndex( badspec );
+assert.eq( t.getIndexes().length , 1 , "only _id index should be created");
+
+//test unique index not created (maybe change later)
+var goodspec = {a : "hashed"};
+t.ensureIndex( goodspec , {"unique" : true});
+assert.eq( t.getIndexes().length , 1 , "unique index got created.");
+
+//now test that non-unique index does get created
+t.ensureIndex(goodspec);
+assert.eq( t.getIndexes().length , 2 , "hashed index didn't get created");
+
+//test basic inserts
+for(i=0; i < 10; i++ ){
+ t.insert( {a:i } );
+}
+assert.eq( t.find().count() , 10 , "basic insert didn't work");
+assert.eq( t.find().hint(goodspec).toArray().length , 10 , "basic insert didn't work");
+assert.eq( t.find({a : 3}).hint({_id : 1}).toArray()[0]._id ,
+ t.find({a : 3}).hint(goodspec).toArray()[0]._id ,
+ "hashindex lookup didn't work" );
+
+
+//make sure things with the same hash are not both returned
+t.insert( {a: 3.1} );
+assert.eq( t.find().count() , 11 , "additional insert didn't work");
+assert.eq( t.find({a : 3.1}).hint(goodspec).toArray().length , 1);
+assert.eq( t.find({a : 3}).hint(goodspec).toArray().length , 1);
+//test right obj is found
+assert.eq( t.find({a : 3.1}).hint(goodspec).toArray()[0].a , 3.1);
+
+//test that hashed cursor is used when it should be
+var cursorname = "BtreeCursor a_hashed";
+assert.eq( t.find({a : 1}).explain().cursor ,
+ cursorname ,
+ "not using hashed cursor");
+
+// SERVER-12222
+//printjson( t.find({a : {$gte : 3 , $lte : 3}}).explain() )
+//assert.eq( t.find({a : {$gte : 3 , $lte : 3}}).explain().cursor ,
+// cursorname ,
+// "not using hashed cursor");
+assert.neq( t.find({c : 1}).explain().cursor ,
+ cursorname ,
+ "using irrelevant hashed cursor");
+
+printjson( t.find({a : {$in : [1,2]}}).explain() )
+// Hash index used with a $in set membership predicate.
+assert.eq( t.find({a : {$in : [1,2]}}).explain()["cursor"],
+ "BtreeCursor a_hashed",
+ "not using hashed cursor");
+
+// Hash index used with a singleton $and predicate conjunction.
+assert.eq( t.find({$and : [{a : 1}]}).explain()["cursor"],
+ "BtreeCursor a_hashed",
+ "not using hashed cursor");
+
+// Hash index used with a non singleton $and predicate conjunction.
+assert.eq( t.find({$and : [{a : {$in : [1,2]}},{a : {$gt : 1}}]}).explain()["cursor"],
+ "BtreeCursor a_hashed",
+ "not using hashed cursor");
+
+//test creation of index based on hash of _id index
+var goodspec2 = {'_id' : "hashed"};
+t.ensureIndex( goodspec2 );
+assert.eq( t.getIndexes().length , 3 , "_id index didn't get created");
+
+var newid = t.findOne()["_id"];
+assert.eq( t.find( {_id : newid} ).hint( {_id : 1} ).toArray()[0]._id ,
+ t.find( {_id : newid} ).hint( goodspec2 ).toArray()[0]._id,
+ "using hashed index and different index returns different docs");
+
+
+//test creation of sparse hashed index
+var sparseindex = {b : "hashed"};
+t.ensureIndex( sparseindex , {"sparse" : true});
+assert.eq( t.getIndexes().length , 4 , "sparse index didn't get created");
+
+//test sparse index has smaller total items on after inserts
+for(i=0; i < 10; i++ ){
+ t.insert( {b : i} );
+}
+var totalb = t.find().hint(sparseindex).toArray().length;
+assert.eq( totalb , 10 , "sparse index has wrong total");
+
+var total = t.find().hint({"_id" : 1}).toArray().length;
+var totala = t.find().hint(goodspec).toArray().length;
+assert.eq(total , totala , "non-sparse index has wrong total");
+assert.lt(totalb , totala , "sparse index should have smaller total");
diff --git a/jstests/core/hashtest1.js b/jstests/core/hashtest1.js
new file mode 100644
index 00000000000..981a0c36877
--- /dev/null
+++ b/jstests/core/hashtest1.js
@@ -0,0 +1,78 @@
+//hashtest1.js
+//Simple tests to check hashing of various types
+//make sure that different numeric types hash to same thing, and other sanity checks
+
+var hash = function( v , seed ){
+ if (seed)
+ return db.runCommand({"_hashBSONElement" : v , "seed" : seed})["out"];
+ else
+ return db.runCommand({"_hashBSONElement" : v})["out"];
+};
+
+var oidHash = hash( ObjectId() );
+var oidHash2 = hash( ObjectId() );
+var oidHash3 = hash( ObjectId() );
+assert(! friendlyEqual( oidHash, oidHash2) , "ObjectIDs should hash to different things");
+assert(! friendlyEqual( oidHash, oidHash3) , "ObjectIDs should hash to different things");
+assert(! friendlyEqual( oidHash2, oidHash3) , "ObjectIDs should hash to different things");
+
+var intHash = hash( NumberInt(3) );
+var doubHash = hash( 3 );
+var doubHash2 = hash( 3.0 );
+var longHash = hash( NumberLong(3) );
+var fracHash = hash( NumberInt(3.5) );
+assert.eq( intHash , doubHash );
+assert.eq( intHash , doubHash2 );
+assert.eq( intHash , longHash );
+assert.eq( intHash , fracHash );
+
+var trueHash = hash( true );
+var falseHash = hash( false );
+assert(! friendlyEqual( trueHash, falseHash) , "true and false should hash to different things");
+
+var nullHash = hash( null );
+assert(! friendlyEqual( falseHash , nullHash ) , "false and null should hash to different things");
+
+var dateHash = hash( new Date() );
+sleep(1);
+var isodateHash = hash( ISODate() );
+assert(! friendlyEqual( dateHash, isodateHash) , "different dates should hash to different things");
+
+var stringHash = hash( "3" );
+assert(! friendlyEqual( intHash , stringHash ), "3 and \"3\" should hash to different things");
+
+var regExpHash = hash( RegExp("3") );
+assert(! friendlyEqual( stringHash , regExpHash) , "\"3\" and RegExp(3) should hash to different things");
+
+var intHash4 = hash( 4 );
+assert(! friendlyEqual( intHash , intHash4 ), "3 and 4 should hash to different things");
+
+var intHashSeeded = hash( 4 , 3 );
+assert(! friendlyEqual(intHash4 , intHashSeeded ), "different seeds should make different hashes");
+
+var minkeyHash = hash( MinKey );
+var maxkeyHash = hash( MaxKey );
+assert(! friendlyEqual(minkeyHash , maxkeyHash ), "minkey and maxkey should hash to different things");
+
+var arrayHash = hash( [0,1.0,NumberLong(2)] );
+var arrayHash2 = hash( [0,NumberInt(1),2] );
+assert.eq( arrayHash , arrayHash2 , "didn't squash numeric types in array");
+
+var objectHash = hash( {"0":0, "1" : NumberInt(1), "2" : 2} );
+assert(! friendlyEqual(objectHash , arrayHash2) , "arrays and sub-objects should hash to different things");
+
+var c = hash( {a : {}, b : 1} );
+var d = hash( {a : {b : 1}} );
+assert(! friendlyEqual( c , d ) , "hashing doesn't group sub-docs and fields correctly");
+
+var e = hash( {a : 3 , b : [NumberLong(3), {c : NumberInt(3)}]} );
+var f = hash( {a : NumberLong(3) , b : [NumberInt(3), {c : 3.0}]} );
+assert.eq( e , f , "recursive number squashing doesn't work");
+
+var nanHash = hash( 0/0 );
+var zeroHash = hash( 0 );
+assert.eq( nanHash , zeroHash , "NaN and Zero should hash to the same thing");
+
+
+//should also test that CodeWScope hashes correctly
+//but waiting for SERVER-3391 (CodeWScope support in shell) \ No newline at end of file
diff --git a/jstests/core/hint1.js b/jstests/core/hint1.js
new file mode 100644
index 00000000000..b5a580f2b93
--- /dev/null
+++ b/jstests/core/hint1.js
@@ -0,0 +1,16 @@
+
+p = db.jstests_hint1;
+p.drop();
+
+p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true } );
+p.ensureIndex( { ts: 1 } );
+
+e = p.find( { live: true, ts: { $lt: new Date( 1234119308272 ) }, cls: "entry", verticals: "alleyinsider" } ).sort( { ts: -1 } ).hint( { ts: 1 } ).explain();
+assert.eq(e.indexBounds.ts[0][0].getTime(), new Date(1234119308272).getTime(), "A");
+
+//printjson(e);
+
+assert.eq( /*just below min date is bool true*/true, e.indexBounds.ts[0][1], "B");
+
+assert.eq(1, p.find({ live: true, ts: { $lt: new Date(1234119308272) }, cls: "entry", verticals: "alleyinsider" }).sort({ ts: -1 }).hint({ ts: 1 }).count());
+
diff --git a/jstests/core/hostinfo.js b/jstests/core/hostinfo.js
new file mode 100644
index 00000000000..16c3810b2c4
--- /dev/null
+++ b/jstests/core/hostinfo.js
@@ -0,0 +1,33 @@
+// SERVER-4615: Ensure hostInfo() command returns expected results on each platform
+
+assert.commandWorked( db.hostInfo() );
+var hostinfo = db.hostInfo();
+
+// test for os-specific fields
+if (hostinfo.os.type == "Windows") {
+ assert.neq( hostinfo.os.name, "" || null, "Missing Windows os name" );
+ assert.neq( hostinfo.os.version, "" || null, "Missing Windows version" );
+
+} else if (hostinfo.os.type == "Linux") {
+ assert.neq( hostinfo.os.name, "" || null, "Missing Linux os/distro name" );
+ assert.neq( hostinfo.os.version, "" || null, "Missing Lindows version" );
+
+} else if (hostinfo.os.type == "Darwin") {
+ assert.neq( hostinfo.os.name, "" || null, "Missing Darwin os name" );
+ assert.neq( hostinfo.os.version, "" || null, "Missing Darwin version" );
+
+} else if (hostinfo.os.type == "BSD") {
+ assert.neq( hostinfo.os.name, "" || null, "Missing FreeBSD os name" );
+ assert.neq( hostinfo.os.version, "" || null, "Missing FreeBSD version" );
+}
+
+// comment out this block for systems which have not implemented hostinfo.
+if (hostinfo.os.type != "") {
+ assert.neq( hostinfo.system.hostname, "" || null, "Missing Hostname" );
+ assert.neq( hostinfo.system.currentTime, "" || null, "Missing Current Time" );
+ assert.neq( hostinfo.system.cpuAddrSize, "" || null || 0, "Missing CPU Address Size" );
+ assert.neq( hostinfo.system.memSizeMB, "" || null, "Missing Memory Size" );
+ assert.neq( hostinfo.system.numCores, "" || null || 0, "Missing Number of Cores" );
+ assert.neq( hostinfo.system.cpuArch, "" || null, "Missing CPU Architecture" );
+ assert.neq( hostinfo.system.numaEnabled, "" || null, "Missing NUMA flag" );
+}
diff --git a/jstests/core/id1.js b/jstests/core/id1.js
new file mode 100644
index 00000000000..9236340e4ec
--- /dev/null
+++ b/jstests/core/id1.js
@@ -0,0 +1,16 @@
+
+t = db.id1
+t.drop();
+
+t.save( { _id : { a : 1 , b : 2 } , x : "a" } );
+t.save( { _id : { a : 1 , b : 2 } , x : "b" } );
+t.save( { _id : { a : 3 , b : 2 } , x : "c" } );
+t.save( { _id : { a : 4 , b : 2 } , x : "d" } );
+t.save( { _id : { a : 4 , b : 2 } , x : "e" } );
+t.save( { _id : { a : 2 , b : 2 } , x : "f" } );
+
+assert.eq( 4 , t.find().count() , "A" );
+assert.eq( "b" , t.findOne( { _id : { a : 1 , b : 2 } } ).x );
+assert.eq( "c" , t.findOne( { _id : { a : 3 , b : 2 } } ).x );
+assert.eq( "e" , t.findOne( { _id : { a : 4 , b : 2 } } ).x );
+assert.eq( "f" , t.findOne( { _id : { a : 2 , b : 2 } } ).x );
diff --git a/jstests/core/idhack.js b/jstests/core/idhack.js
new file mode 100644
index 00000000000..21409645489
--- /dev/null
+++ b/jstests/core/idhack.js
@@ -0,0 +1,43 @@
+
+t = db.idhack
+t.drop()
+
+
+t.insert( { _id : { x : 1 } , z : 1 } )
+t.insert( { _id : { x : 2 } , z : 2 } )
+t.insert( { _id : { x : 3 } , z : 3 } )
+t.insert( { _id : 1 , z : 4 } )
+t.insert( { _id : 2 , z : 5 } )
+t.insert( { _id : 3 , z : 6 } )
+
+assert.eq( 2 , t.findOne( { _id : { x : 2 } } ).z , "A1" )
+assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).count() , "A2" )
+assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).itcount() , "A3" )
+
+t.update( { _id : { x : 2 } } , { $set : { z : 7 } } )
+assert.eq( 7 , t.findOne( { _id : { x : 2 } } ).z , "B1" )
+
+t.update( { _id : { $gte : 2 } } , { $set : { z : 8 } } , false , true )
+assert.eq( 4 , t.findOne( { _id : 1 } ).z , "C1" )
+assert.eq( 8 , t.findOne( { _id : 2 } ).z , "C2" )
+assert.eq( 8 , t.findOne( { _id : 3 } ).z , "C3" )
+
+// explain output should show that the ID hack was applied.
+var query = { _id : { x : 2 } };
+var explain = t.find( query ).explain( true );
+print( "explain for " + tojson( query , "" , true ) + " = " + tojson( explain ) );
+assert.eq( 1 , explain.n , "D1" );
+assert.eq( 1 , explain.nscanned , "D2" );
+assert.neq( undefined , explain.cursor , "D3" );
+assert.neq( "" , explain.cursor , "D4" );
+assert.neq( undefined , explain.indexBounds , "D5" );
+assert.neq( {} , explain.indexBounds , "D6" );
+
+// ID hack cannot be used with hint().
+var query = { _id : { x : 2 } };
+var explain = t.find( query ).explain();
+t.ensureIndex( { _id : 1 , a : 1 } );
+var hintExplain = t.find( query ).hint( { _id : 1 , a : 1 } ).explain();
+print( "explain for hinted query = " + tojson( hintExplain ) );
+assert.neq( explain.cursor, hintExplain.cursor, "E1" );
+
diff --git a/jstests/core/in.js b/jstests/core/in.js
new file mode 100644
index 00000000000..da1313692e1
--- /dev/null
+++ b/jstests/core/in.js
@@ -0,0 +1,24 @@
+
+t = db.in1;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+
+// $in must take an array as argument: SERVER-7445
+assert.throws( function() { return t.find( { a : { $in : { x : 1 } } } ).itcount(); } );
+assert.throws( function() { return t.find( { a : { $in : 1 } } ).itcount(); } );
+
+assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount() , "A" );
+assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "B" );
+assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "C" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount(), "D" );
+assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "E" );
+assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" );
+
+assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" );
+
+assert.eq( 1 , t.find( { a : { $gt: 1, $in : [ 2 ] } } ).itcount() , "H" );
diff --git a/jstests/core/in2.js b/jstests/core/in2.js
new file mode 100644
index 00000000000..66b90daa25a
--- /dev/null
+++ b/jstests/core/in2.js
@@ -0,0 +1,33 @@
+
+t = db.in2;
+
+function go( name , index ){
+
+ t.drop();
+
+ t.save( { a : 1 , b : 1 } );
+ t.save( { a : 1 , b : 2 } );
+ t.save( { a : 1 , b : 3 } );
+
+ t.save( { a : 1 , b : 1 } );
+ t.save( { a : 2 , b : 2 } );
+ t.save( { a : 3 , b : 3 } );
+
+ t.save( { a : 1 , b : 1 } );
+ t.save( { a : 2 , b : 1 } );
+ t.save( { a : 3 , b : 1 } );
+
+ if ( index )
+ t.ensureIndex( index );
+
+ assert.eq( 7 , t.find( { a : { $in : [ 1 , 2 ] } } ).count() , name + " A" );
+
+ assert.eq( 6 , t.find( { a : { $in : [ 1 , 2 ] } , b : { $in : [ 1 , 2 ] } } ).count() , name + " B" );
+}
+
+go( "no index" );
+go( "index on a" , { a : 1 } );
+go( "index on b" , { b : 1 } );
+go( "index on a&b" , { a : 1 , b : 1 } );
+
+
diff --git a/jstests/core/in3.js b/jstests/core/in3.js
new file mode 100644
index 00000000000..b0a8bb7b81f
--- /dev/null
+++ b/jstests/core/in3.js
@@ -0,0 +1,11 @@
+t = db.jstests_in3;
+
+t.drop();
+t.ensureIndex( {i:1} );
+assert.eq( {i:[[3,3]]}, t.find( {i:{$in:[3]}} ).explain().indexBounds , "A1" );
+assert.eq( {i:[[3,3],[6,6]]}, t.find( {i:{$in:[3,6]}} ).explain().indexBounds , "A2" );
+
+for ( var i=0; i<20; i++ )
+ t.insert( { i : i } );
+
+assert.eq( 3 , t.find( {i:{$in:[3,6]}} ).explain().nscanned , "B1" )
diff --git a/jstests/core/in4.js b/jstests/core/in4.js
new file mode 100644
index 00000000000..3e3dca29528
--- /dev/null
+++ b/jstests/core/in4.js
@@ -0,0 +1,42 @@
+t = db.jstests_in4;
+
+function checkRanges( a, b ) {
+ assert.eq( a, b );
+}
+
+t.drop();
+t.ensureIndex( {a:1,b:1} );
+checkRanges( {a:[[2,2]],b:[[3,3]]}, t.find( {a:2,b:3} ).explain().indexBounds );
+checkRanges( {a:[[2,2],[3,3]],b:[[4,4]]}, t.find( {a:{$in:[2,3]},b:4} ).explain().indexBounds );
+checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds );
+checkRanges( {a:[[2,2],[3,3]],b:[[4,4],[5,5]]}, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).explain().indexBounds );
+
+checkRanges( {a:[[2,2],[3,3]],b:[[4,10]]}, t.find( {a:{$in:[2,3]},b:{$gt:4,$lt:10}} ).explain().indexBounds );
+
+t.save( {a:1,b:1} );
+t.save( {a:2,b:4.5} );
+t.save( {a:2,b:4} );
+assert.eq( 2, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).hint( {a:1,b:1} ).explain().nscanned );
+assert.eq( 2, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).a );
+assert.eq( 4, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).b );
+
+t.drop();
+t.ensureIndex( {a:1,b:1,c:1} );
+checkRanges( {a:[[2,2]],b:[[3,3],[4,4]],c:[[5,5]]}, t.find( {a:2,b:{$in:[3,4]},c:5} ).explain().indexBounds );
+
+t.save( {a:2,b:3,c:5} );
+t.save( {a:2,b:3,c:4} );
+assert.eq( 1, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned );
+t.remove({});
+t.save( {a:2,b:4,c:5} );
+t.save( {a:2,b:4,c:4} );
+assert.eq( 2, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned );
+
+t.drop();
+t.ensureIndex( {a:1,b:-1} );
+ib = t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds;
+checkRanges( {a:[[2,2]],b:[[4,4],[3,3]]}, ib );
+assert( ib.b[ 0 ][ 0 ] > ib.b[ 1 ][ 0 ] );
+ib = t.find( {a:2,b:{$in:[3,4]}} ).sort( {a:-1,b:1} ).explain().indexBounds;
+checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, ib );
+assert( ib.b[ 0 ][ 0 ] < ib.b[ 1 ][ 0 ] );
diff --git a/jstests/core/in5.js b/jstests/core/in5.js
new file mode 100644
index 00000000000..435c8864004
--- /dev/null
+++ b/jstests/core/in5.js
@@ -0,0 +1,56 @@
+
+t = db.in5
+
+function go( fn ){
+ t.drop();
+ o = {};
+ o[fn] = { a : 1 , b : 2 };
+ t.insert( o );
+
+ x = {};
+ x[fn] = { a : 1 , b : 2 };
+ assert.eq( 1 , t.find( x ).itcount() , "A1 - " + fn );
+
+
+ y = {};
+ y[fn] = { $in : [ { a : 1 , b : 2 } ] }
+ assert.eq( 1 , t.find( y ).itcount() , "A2 - " + fn );
+
+
+ z = {};
+ z[fn+".a"] = 1;
+ z[fn+".b"] = { $in : [ 2 ] }
+ assert.eq( 1 , t.find( z ).itcount() , "A3 - " + fn ); // SERVER-1366
+
+
+ i = {}
+ i[fn] = 1
+ t.ensureIndex( i )
+
+ assert.eq( 1 , t.find( x ).itcount() , "B1 - " + fn );
+ assert.eq( 1 , t.find( y ).itcount() , "B2 - " + fn );
+ assert.eq( 1 , t.find( z ).itcount() , "B3 - " + fn ); // SERVER-1366
+
+ t.dropIndex( i )
+
+ assert.eq( 1 , t.getIndexes().length , "T2" );
+
+ i = {}
+ i[fn + ".a" ] = 1;
+ t.ensureIndex( i )
+ assert.eq( 2 , t.getIndexes().length , "T3" );
+
+ assert.eq( 1 , t.find( x ).itcount() , "C1 - " + fn );
+ assert.eq( 1 , t.find( y ).itcount() , "C2 - " + fn );
+ assert.eq( 1 , t.find( z ).itcount() , "C3 - " + fn ); // SERVER-1366
+
+ t.dropIndex( i )
+
+
+}
+
+go( "x" );
+go( "_id" )
+
+
+
diff --git a/jstests/core/in6.js b/jstests/core/in6.js
new file mode 100644
index 00000000000..f114d93442a
--- /dev/null
+++ b/jstests/core/in6.js
@@ -0,0 +1,13 @@
+t = db.jstests_in6;
+t.drop();
+
+t.save( {} );
+
+function doTest() {
+ assert.eq.automsg( "1", "t.count( {i:null} )" );
+ assert.eq.automsg( "1", "t.count( {i:{$in:[null]}} )" );
+}
+
+doTest();
+t.ensureIndex( {i:1} );
+doTest();
diff --git a/jstests/core/in8.js b/jstests/core/in8.js
new file mode 100644
index 00000000000..5e7e587629f
--- /dev/null
+++ b/jstests/core/in8.js
@@ -0,0 +1,23 @@
+// SERVER-2829 Test arrays matching themselves within a $in expression.
+
+t = db.jstests_in8;
+t.drop();
+
+t.save( {key: [1]} );
+t.save( {key: ['1']} );
+t.save( {key: [[2]]} );
+
+function doTest() {
+ assert.eq( 1, t.count( {key:[1]} ) );
+ assert.eq( 1, t.count( {key:{$in:[[1]]}} ) );
+ assert.eq( 1, t.count( {key:{$in:[[1]],$ne:[2]}} ) );
+ assert.eq( 1, t.count( {key:{$in:[['1']],$type:2}} ) );
+ assert.eq( 1, t.count( {key:['1']} ) );
+ assert.eq( 1, t.count( {key:{$in:[['1']]}} ) );
+ assert.eq( 1, t.count( {key:[2]} ) );
+ assert.eq( 1, t.count( {key:{$in:[[2]]}} ) );
+}
+
+doTest();
+t.ensureIndex( {key:1} );
+doTest();
diff --git a/jstests/core/in9.js b/jstests/core/in9.js
new file mode 100644
index 00000000000..cbe28e2e2df
--- /dev/null
+++ b/jstests/core/in9.js
@@ -0,0 +1,35 @@
+// SERVER-2343 Test $in empty array matching.
+
+t = db.jstests_in9;
+t.drop();
+
+function someData() {
+ t.remove({});
+ t.save( {key: []} );
+}
+
+function moreData() {
+ someData();
+ t.save( {key: [1]} );
+ t.save( {key: ['1']} );
+ t.save( {key: null} );
+ t.save( {} );
+}
+
+function check() {
+ assert.eq( 1, t.count( {key:[]} ) );
+ assert.eq( 1, t.count( {key:{$in:[[]]}} ) );
+}
+
+function doTest() {
+ someData();
+ check();
+ moreData();
+ check();
+}
+
+doTest();
+
+// SERVER-1943 not fixed yet
+t.ensureIndex( {key:1} );
+doTest();
diff --git a/jstests/core/ina.js b/jstests/core/ina.js
new file mode 100644
index 00000000000..cf614ab994d
--- /dev/null
+++ b/jstests/core/ina.js
@@ -0,0 +1,15 @@
+// Uassert when $elemMatch is attempted within $in SERVER-3545
+
+t = db.jstests_ina;
+t.drop();
+t.save( {} );
+
+assert.throws( function() { t.find( {a:{$in:[{$elemMatch:{b:1}}]}} ).itcount(); } );
+assert.throws( function() { t.find( {a:{$not:{$in:[{$elemMatch:{b:1}}]}}} ).itcount(); } );
+
+assert.throws( function() { t.find( {a:{$nin:[{$elemMatch:{b:1}}]}} ).itcount(); } );
+assert.throws( function() { t.find( {a:{$not:{$nin:[{$elemMatch:{b:1}}]}}} ).itcount(); } );
+
+// NOTE Above we don't check cases like {b:2,$elemMatch:{b:3,4}} - generally
+// we assume that the first key is $elemMatch if any key is, and validating
+// every key is expensive in some cases. \ No newline at end of file
diff --git a/jstests/core/inb.js b/jstests/core/inb.js
new file mode 100644
index 00000000000..34ec843d36c
--- /dev/null
+++ b/jstests/core/inb.js
@@ -0,0 +1,19 @@
+// Test $in regular expressions with overlapping index bounds. SERVER-4677
+
+t = db.jstests_inb;
+t.drop();
+
+function checkBoundsAndResults( query ) {
+ assert.eq( [ 'a', 'b' ], t.find( query ).explain().indexBounds.x[0] );
+ assert.eq( 4, t.count( query ) );
+ assert.eq( 4, t.find( query ).itcount() );
+}
+
+t.ensureIndex( {x:1} );
+t.save( {x:'aa'} );
+t.save( {x:'ab'} );
+t.save( {x:'ac'} );
+t.save( {x:'ad'} );
+
+checkBoundsAndResults( {x:{$in:[/^a/,/^ab/]}} );
+checkBoundsAndResults( {x:{$in:[/^ab/,/^a/]}} );
diff --git a/jstests/core/inc-SERVER-7446.js b/jstests/core/inc-SERVER-7446.js
new file mode 100644
index 00000000000..c8066a8e491
--- /dev/null
+++ b/jstests/core/inc-SERVER-7446.js
@@ -0,0 +1,39 @@
+var c = db.incSERVER7446
+
+// A 32 bit overflow spills to 64 bits
+c.drop();
+c.save( { a: NumberInt( "2147483647" ) } );
+var updateResult = c.update( {}, { $inc:{ a:NumberInt( 1 ) } } );
+assert.eq(1, updateResult.nMatched, "Object not modified");
+var res = c.findOne();
+assert.eq(NumberLong, res.a.constructor,
+ "NumberInt incremented beyond std::numeric_limits<in32_t>::max() not NumberLong");
+assert.eq(NumberLong("2147483648"), res.a,
+ "NumberInt incremented beyond std::numeric_limits<in32_t>::max() has wrong value");
+
+// A 32 bit underflow spills to 64 bits
+c.drop();
+c.save( { a: NumberInt( "-2147483648" ) } );
+updateResult = c.update( {}, { $inc:{ a:NumberInt( -1 ) } } );
+assert.eq(1, updateResult.nMatched, "Object not modified");
+res = c.findOne();
+assert.eq(NumberLong, res.a.constructor,
+ "NumberInt decremented beyond std::numeric_limits<in32_t>::min() not NumberLong");
+assert.eq(NumberLong("-2147483649"), res.a,
+ "NumberInt decremented beyond std::numeric_limits<in32_t>::min() has wrong value");
+
+// A 64 bit overflow is an error
+c.drop();
+c.save( { a: NumberLong( "9223372036854775807" ) } );
+updateResult = c.update( {}, { $inc:{ a:NumberInt( 1 ) } } );
+assert.eq(0, updateResult.nMatched,
+ "Did not fail to increment a NumberLong past std::numeric_limits<int64_t>::max()");
+
+// A 64 bit underflow is an error
+c.drop();
+c.save( { a: NumberLong( "-9223372036854775808" ) } );
+updateResult = c.update( {}, { $inc:{ a:NumberInt( -1 ) } } );
+assert.eq(0, updateResult.nMatched,
+ "Did not fail to decrement a NumberLong past std::numeric_limits<int64_t>::min()");
+
+c.drop()
diff --git a/jstests/core/inc1.js b/jstests/core/inc1.js
new file mode 100644
index 00000000000..027f307a476
--- /dev/null
+++ b/jstests/core/inc1.js
@@ -0,0 +1,32 @@
+
+t = db.inc1;
+t.drop();
+
+function test( num , name ){
+ assert.eq( 1 , t.count() , name + " count" );
+ assert.eq( num , t.findOne().x , name + " value" );
+}
+
+t.save( { _id : 1 , x : 1 } );
+test( 1 , "A" );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+test( 2 , "B" );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+test( 3 , "C" );
+
+t.update( { _id : 2 } , { $inc : { x : 1 } } );
+test( 3 , "D" );
+
+t.update( { _id : 1 } , { $inc : { x : 2 } } );
+test( 5 , "E" );
+
+t.update( { _id : 1 } , { $inc : { x : -1 } } );
+test( 4 , "F" );
+
+t.ensureIndex( { x : 1 } );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+test( 5 , "G" );
+
diff --git a/jstests/core/inc2.js b/jstests/core/inc2.js
new file mode 100644
index 00000000000..75a8e65a384
--- /dev/null
+++ b/jstests/core/inc2.js
@@ -0,0 +1,22 @@
+
+t = db.inc2
+t.drop();
+
+t.save( { _id : 1 , x : 1 } );
+t.save( { _id : 2 , x : 2 } );
+t.save( { _id : 3 , x : 3 } );
+
+function order(){
+ return t.find().sort( { x : 1 } ).map( function(z){ return z._id; } );
+}
+
+assert.eq( "1,2,3" , order() , "A" );
+
+t.update( { _id : 1 } , { $inc : { x : 4 } } );
+assert.eq( "2,3,1" , order() , "B" );
+
+t.ensureIndex( { x : 1 } );
+assert.eq( "2,3,1" , order() , "C" );
+
+t.update( { _id : 3 } , { $inc : { x : 4 } } );
+assert.eq( "2,1,3" , order() , "D" );
diff --git a/jstests/core/inc3.js b/jstests/core/inc3.js
new file mode 100644
index 00000000000..baeeb198cf4
--- /dev/null
+++ b/jstests/core/inc3.js
@@ -0,0 +1,16 @@
+
+t = db.inc3;
+
+t.drop();
+t.save( { _id : 1 , z : 1 , a : 1 } );
+t.update( {} , { $inc : { z : 1 , a : 1 } } );
+t.update( {} , { $inc : { a : 1 , z : 1 } } );
+assert.eq( { _id : 1 , z : 3 , a : 3 } , t.findOne() , "A" )
+
+
+t.drop();
+t.save( { _id : 1 , a : 1 , z : 1 } );
+t.update( {} , { $inc : { z : 1 , a : 1 } } );
+t.update( {} , { $inc : { a : 1 , z : 1 } } );
+assert.eq( { _id : 1 , a : 3 , z : 3 } , t.findOne() , "B" )
+
diff --git a/jstests/core/index1.js b/jstests/core/index1.js
new file mode 100644
index 00000000000..64bbfa8732b
--- /dev/null
+++ b/jstests/core/index1.js
@@ -0,0 +1,24 @@
+
+t = db.embeddedIndexTest;
+
+t.remove( {} );
+
+o = { name : "foo" , z : { a : 17 , b : 4} };
+t.save( o );
+
+assert( t.findOne().z.a == 17 );
+assert( t.findOne( { z : { a : 17 } } ) == null);
+
+t.ensureIndex( { "z.a" : 1 } );
+
+assert( t.findOne().z.a == 17 );
+assert( t.findOne( { z : { a : 17 } } ) == null);
+
+o = { name : "bar" , z : { a : 18 } };
+t.save( o );
+
+assert.eq.automsg( "2", "t.find().length()" );
+assert.eq.automsg( "2", "t.find().sort( { 'z.a' : 1 } ).length()" );
+assert.eq.automsg( "2", "t.find().sort( { 'z.a' : -1 } ).length()" );
+
+assert(t.validate().valid);
diff --git a/jstests/core/index10.js b/jstests/core/index10.js
new file mode 100644
index 00000000000..d86402e41af
--- /dev/null
+++ b/jstests/core/index10.js
@@ -0,0 +1,32 @@
+// unique index, drop dups
+
+t = db.jstests_index10;
+t.drop();
+
+t.save( {i:1} );
+t.save( {i:2} );
+t.save( {i:1} );
+t.save( {i:3} );
+t.save( {i:1} );
+
+t.ensureIndex( {i:1} );
+assert.eq( 5, t.count() );
+t.dropIndexes();
+var err = t.ensureIndex( {i:1}, true );
+assert.writeError(err)
+assert.eq( 11000, err.getWriteError().code );
+
+assert( 1 == db.system.indexes.count( {ns:"test.jstests_index10" } ), "only id index" );
+// t.dropIndexes();
+
+ts = t.totalIndexSize();
+t.ensureIndex( {i:1}, [ true, true ] );
+ts2 = t.totalIndexSize();
+
+assert.eq( ts * 2, ts2, "totalIndexSize fail" );
+
+assert.eq( 3, t.count() );
+assert.eq( 1, t.count( {i:1} ) );
+
+t.ensureIndex( {j:1}, [ true, true ] );
+assert.eq( 1, t.count() );
diff --git a/jstests/core/index13.js b/jstests/core/index13.js
new file mode 100644
index 00000000000..7e317d90d94
--- /dev/null
+++ b/jstests/core/index13.js
@@ -0,0 +1,147 @@
+// Top level match fields within an $elemMatch clause may constrain multiple subfields from a
+// compound multikey index. SERVER-3104
+//
+// Given a multikey index { 'a.b':1, 'a.c':1 } and query { 'a.b':3, 'a.c':3 } only the index field
+// 'a.b' is constrained to the range [3, 3], while the index field 'a.c' is just constrained
+// to be within minkey and maxkey. This implementation ensures that the document
+// { a:[ { b:3 }, { c:3 } ] }, which generates index keys { 'a.b':3, 'a.c':null } and
+// { 'a.b':null and 'a.c':3 } will be retrieved for the query. (See SERVER-958 for more
+// information.)
+//
+// If the query is instead { a:{ $elemMatch:{ b:3, c:3 } } } then the document
+// { a:[ { b:3 }, { c:3 } ] } does not match. Until SERVER-3104 was implemented, the index
+// constraints would be [3,3] on the 'a.b' field and [minkey,maxkey] on the 'a.c' field, the same as
+// for the non $elemMatch query in the previous paragraph. With the SERVER-3104 implementation,
+// constraints on two fields within a $elemMatch parent can both be applied to an index. Due to the
+// SERVER-3104 implementation, the index constraints become [3,3] on the 'a.b' field _and_ [3,3] on
+// the 'a.c' field.
+
+t = db.jstests_index13;
+t.drop();
+
+function assertConsistentResults( query ) {
+ assert.eq( t.find( query ).hint( { $natural:1 } ).sort( { _id:1 } ).toArray(),
+ t.find( query ).hint( index ).sort( { _id:1 } ).toArray() );
+}
+
+function assertResults( query ) {
+ explain = t.find( query ).hint( index ).explain();
+ // printjson( explain ); // debug
+ assertConsistentResults( query );
+}
+
+// Cases with single dotted index fied names.
+index = { 'a.b':1, 'a.c':1 };
+t.ensureIndex( index );
+t.save( { a:[ { b:1 }, { c:1 } ] } );
+t.save( { a:[ { b:1, c:1 } ] } );
+assert.eq( 2, t.count() );
+// Without $elemMatch.
+assertResults( { 'a.b':1, 'a.c':1 } );
+// With $elemMatch.
+assertResults( { a:{ $elemMatch:{ b:1, c:1 } } } );
+
+// Without shared $elemMatch.
+assertResults( { 'a.b':1, a:{ $elemMatch:{ c:1 } } } );
+// Two different $elemMatch expressions.
+assertResults( { $and:[ { a:{ $elemMatch:{ b:1 } } },
+ { a:{ $elemMatch:{ c:1 } } } ] } );
+
+
+// Cases relating to parse order and inclusion of intersected ranges.
+assertResults( { 'a.b':1, a:{ $elemMatch:{ b:{ $gt:0 }, c:1 } } } );
+assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':1 } );
+assertResults( { 'a.c':1, a:{ $elemMatch:{ b:1, c:1 } } } );
+assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':{ $gt:0 } } );
+
+// Cases with $elemMatch on multiple fields.
+t.remove({});
+index = { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 };
+t.ensureIndex( index );
+t.insert( { a:[ { b:1 }, { c:1 } ], d: { e:1, f:1 } } );
+t.insert( { a:[ { b:1, c:1 } ], d: { e:1, f:1 } } );
+t.insert( { a:{ b:1, c:1 }, d:[ { e:1, f:1 } ] } );
+t.insert( { a:{ b:1, c:1 }, d:[ { e:1 }, { f:1 } ] } );
+
+assert.eq( 4, t.count() );
+
+// Without $elemMatch.
+assertResults( { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 } );
+// With $elemMatch.
+assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd': { $elemMatch:{ e:1, f:1 } } } );
+assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd.e': 1, 'd.f' : 1 } );
+assertResults( { 'a.b': 1, 'a.c' : 1, 'd': { $elemMatch:{ e:1, f:1 } } } );
+
+
+// Cases with nested $elemMatch.
+t.remove({})
+index = { 'a.b.c':1, 'a.b.d' :1 };
+t.ensureIndex( index );
+t.insert( { a:[ { b: [ { c : 1, d : 1 } ] } ] } ) ;
+t.insert( { a:[ { b: [ { c : 1 } , { d : 1 } ] } ] } ) ;
+assert.eq( 2, t.count() );
+// Without $elemMatch.
+assertResults( { 'a.b.c':1, 'a.b.d':1 } );
+// With $elemMatch.
+assertResults( { "a" : { $elemMatch : { "b" : { $elemMatch : { c : 1, d : 1 } } } } } );
+
+// Cases with double dotted index field names.
+t.drop();
+index = { 'a.b.x':1, 'a.b.y':1 };
+t.ensureIndex( index );
+t.save( { a:{ b:{ x:1, y:1 } } } );
+t.save( { a:[ { b:{ x:1 } }, { b:{ y:1 } } ] } );
+t.save( { a:[ { b:[ { x:1 }, { y:1 } ] } ] } );
+t.save( { a:[ { b:[ { x:1, y:1 } ] } ] } );
+assert.eq( 4, t.count() );
+
+// No $elemMatch.
+assertResults( { 'a.b.x':1, 'a.b.y':1 } );
+// $elemMatch with dotted children.
+assertResults( { a:{ $elemMatch:{ 'b.x':1, 'b.y':1 } } } );
+// $elemMatch with undotted children.
+assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
+
+// Cases where a field is indexed along with its children.
+t.dropIndexes();
+index = { 'a':1, 'a.b.x':1, 'a.b.y':1 };
+t.ensureIndex( index );
+
+// With $ne.
+assertResults( { a:{ $ne:4 }, 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
+
+// No constraint on a prior parent field.
+assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
+
+// Cases with double dotted index field names branching to different fields at each dot.
+t.drop();
+index = { 'a.b.c':1, 'a.e.f':1, 'a.b.d':1, 'a.e.g':1 }
+t.ensureIndex( index );
+t.save( { a:{ b:{ c:1, d:1 }, e:{ f:1, g:1 } } } );
+t.save( { a:[ { b:{ c:1 }, e:{ f:1 } }, { b:{ d:1 }, e:{ g:1 } } ] } );
+t.save( { a:[ { b:{ c:1 } }, { e:{ f:1 } }, { b:{ d:1 } }, { e:{ g:1 } } ] } );
+t.save( { a:[ { b:[ { c:1 }, { d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } );
+t.save( { a:[ { b:[ { c:[ 1 ] }, { d:[ 1 ] } ] }, { e:[ { f:[ 1 ] }, { g:[ 1 ] } ] } ] } );
+t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } );
+t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1, g:1 } ] } ] } );
+assert.eq( 7, t.count() );
+
+// Constraint on a prior cousin field.
+assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } },
+ 'a.e':{ $elemMatch:{ f:1, g:1 } } } );
+
+// Different constraint on a prior cousin field.
+assertResults( { 'a.b':{ $elemMatch:{ d:1 } },
+ 'a.e':{ $elemMatch:{ f:1, g:1 } } } );
+
+
+// Cases with double dotted index field names branching to different fields at each dot, and the
+// same field name strings after the second dot.
+t.drop();
+index = { 'a.b.c':1, 'a.e.c':1, 'a.b.d':1, 'a.e.d':1 }
+t.ensureIndex( index );
+t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { c:1, d:1 } ] } ] } );
+assert.eq( 1, t.count() );
+
+// Constraint on a prior cousin field with the same field names.
+assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, 'a.e':{ $elemMatch:{ c:1, d:1 } } } );
diff --git a/jstests/core/index2.js b/jstests/core/index2.js
new file mode 100644
index 00000000000..b54abcaa792
--- /dev/null
+++ b/jstests/core/index2.js
@@ -0,0 +1,40 @@
+/* test indexing where the key is an embedded object.
+ */
+
+t = db.embeddedIndexTest2;
+
+t.drop();
+assert( t.findOne() == null );
+
+o = { name : "foo" , z : { a : 17 } };
+p = { name : "foo" , z : { a : 17 } };
+q = { name : "barrr" , z : { a : 18 } };
+r = { name : "barrr" , z : { k : "zzz", L:[1,2] } };
+
+t.save( o );
+
+assert( t.findOne().z.a == 17 );
+
+t.save( p );
+t.save( q );
+
+assert( t.findOne({z:{a:17}}).z.a==17 );
+assert( t.find({z:{a:17}}).length() == 2 );
+assert( t.find({z:{a:18}}).length() == 1 );
+
+t.save( r );
+
+assert( t.findOne({z:{a:17}}).z.a==17 );
+assert( t.find({z:{a:17}}).length() == 2 );
+assert( t.find({z:{a:18}}).length() == 1 );
+
+t.ensureIndex( { z : 1 } );
+
+assert( t.findOne({z:{a:17}}).z.a==17 );
+assert( t.find({z:{a:17}}).length() == 2 );
+assert( t.find({z:{a:18}}).length() == 1 );
+
+assert( t.find().sort( { z : 1 } ).length() == 4 );
+assert( t.find().sort( { z : -1 } ).length() == 4 );
+
+assert(t.validate().valid);
diff --git a/jstests/core/index3.js b/jstests/core/index3.js
new file mode 100644
index 00000000000..80139460cb4
--- /dev/null
+++ b/jstests/core/index3.js
@@ -0,0 +1,16 @@
+
+
+t = db.index3;
+t.drop();
+
+assert( t.getIndexes().length == 0 );
+
+t.ensureIndex( { name : 1 } );
+
+t.save( { name : "a" } );
+
+t.ensureIndex( { name : 1 } );
+
+assert( t.getIndexes().length == 2 );
+
+assert(t.validate().valid);
diff --git a/jstests/core/index4.js b/jstests/core/index4.js
new file mode 100644
index 00000000000..9dd731c83ee
--- /dev/null
+++ b/jstests/core/index4.js
@@ -0,0 +1,33 @@
+// index4.js
+
+
+t = db.index4;
+t.drop();
+
+t.save( { name : "alleyinsider" ,
+ instances : [
+ { pool : "prod1" } ,
+ { pool : "dev1" }
+ ]
+ } );
+
+t.save( { name : "clusterstock" ,
+ instances : [
+ { pool : "dev1" }
+ ]
+ } );
+
+
+// this should fail, not allowed -- we confirm that.
+t.ensureIndex( { instances : { pool : 1 } } );
+assert.eq( 0, db.system.indexes.find( {ns:"test.index4",name:{$ne:"_id_"}} ).count(), "no indexes should be here yet");
+
+t.ensureIndex( { "instances.pool" : 1 } );
+
+sleep( 10 );
+
+a = t.find( { instances : { pool : "prod1" } } );
+assert( a.length() == 1, "len1" );
+assert( a[0].name == "alleyinsider", "alley" );
+
+assert(t.validate().valid, "valid" );
diff --git a/jstests/core/index5.js b/jstests/core/index5.js
new file mode 100644
index 00000000000..841ac12ed45
--- /dev/null
+++ b/jstests/core/index5.js
@@ -0,0 +1,24 @@
+// index5.js - test reverse direction index
+
+function validate() {
+ assert.eq( 2, t.find().count() );
+ f = t.find().sort( { a: 1 } );
+ assert.eq( 2, t.count() );
+ assert.eq( 1, f[ 0 ].a );
+ assert.eq( 2, f[ 1 ].a );
+ r = t.find().sort( { a: -1 } );
+ assert.eq( 2, r.count() );
+ assert.eq( 2, r[ 0 ].a );
+ assert.eq( 1, r[ 1 ].a );
+}
+
+t = db.index5;
+t.drop();
+
+t.save( { a: 1 } );
+t.save( { a: 2 } );
+
+validate();
+
+t.ensureIndex( { a: -1 } );
+validate();
diff --git a/jstests/core/index6.js b/jstests/core/index6.js
new file mode 100644
index 00000000000..8dbd8f74fcf
--- /dev/null
+++ b/jstests/core/index6.js
@@ -0,0 +1,8 @@
+// index6.js Test indexes on array subelements.
+
+r = db.ed.db.index6;
+r.drop();
+
+r.save( { comments : [ { name : "eliot", foo : 1 } ] } );
+r.ensureIndex( { "comments.name": 1 } );
+assert( r.findOne( { "comments.name": "eliot" } ) );
diff --git a/jstests/core/index7.js b/jstests/core/index7.js
new file mode 100644
index 00000000000..9e3a6c66d11
--- /dev/null
+++ b/jstests/core/index7.js
@@ -0,0 +1,67 @@
+// index7.js Test that we use an index when and only when we expect to.
+
+function index( q ) {
+ assert( q.explain().cursor.match( /^BtreeCursor/ ) , "index assert" );
+}
+
+function noIndex( q ) {
+ assert( q.explain().cursor.match( /^BasicCursor/ ) , "noIndex assert" );
+}
+
+function start( k, q, rev) {
+ var exp = q.explain().indexBounds;
+ var s = {a:exp.a[rev?1:0][0],b:exp.b[0][0]};
+ assert.eq( k.a, s.a );
+ assert.eq( k.b, s.b );
+}
+function end( k, q, rev) {
+ var exp = q.explain().indexBounds
+ var e = {a:exp.a[rev?1:0][1],b:exp.b[0][1]};
+ assert.eq( k.a, e.a );
+ assert.eq( k.b, e.b );
+}
+function both( k, q ) {
+ start( k, q );
+ end( k, q );
+}
+
+f = db.ed_db_index7;
+f.drop();
+
+f.save( { a : 5 } )
+f.ensureIndex( { a: 1 } );
+index( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { a: 1 } ) );
+noIndex( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { $natural: 1 } ) );
+f.drop();
+
+f.ensureIndex( { a: 1, b: 1 } );
+assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] );
+assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] );
+assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] );
+assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] );
+assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c );
+assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c );
+
+start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true );
+start( { a: "a", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
+end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true );
+end( { a: "b", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
+
+start( { a: "z", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "{", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+start( { a: "az", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "a{", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { a: 1, b: 1 } ) );
+
+both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).hint( { a: 1, b: 1 } ) );
+both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+f.drop();
+f.ensureIndex( { b: 1, a: 1 } );
+both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { b: 1, a: 1 } ) );
diff --git a/jstests/core/index8.js b/jstests/core/index8.js
new file mode 100644
index 00000000000..719ad2dd2cb
--- /dev/null
+++ b/jstests/core/index8.js
@@ -0,0 +1,62 @@
+// Test key uniqueness
+
+t = db.jstests_index8;
+t.drop();
+
+t.ensureIndex( { a: 1 } );
+t.ensureIndex( { b: 1 }, true );
+t.ensureIndex( { c: 1 }, [ false, "cIndex" ] );
+
+checkIndexes = function( num ) {
+// printjson( db.system.indexes.find( { ns: "test.jstests_index8" } ).toArray() );
+ indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } ).toArray();
+ var start = 0;
+ if ( indexes[0].name == "_id_" )
+ start = 1;
+ assert( !indexes[ start ].unique , "A" + num );
+ assert( indexes[ start + 1 ].unique , "B" + num + " " + tojson( indexes[start+1] ) );
+ assert( !indexes[ start + 2 ].unique , "C" + num );
+ assert.eq( "cIndex", indexes[ start + 2 ].name , "D" + num );
+}
+
+checkIndexes( 1 );
+
+t.reIndex();
+checkIndexes( 2 );
+
+t.save( { a: 2, b: 1 } );
+t.save( { a: 2 } );
+assert.eq( 2, t.find().count() );
+
+t.save( { b: 4 } );
+t.save( { b: 4 } );
+assert.eq( 3, t.find().count() );
+assert.eq( 3, t.find().hint( {c:1} ).toArray().length );
+assert.eq( 3, t.find().hint( {b:1} ).toArray().length );
+assert.eq( 3, t.find().hint( {a:1} ).toArray().length );
+
+t.drop();
+t.ensureIndex( { a: 1, b: -1 }, true );
+t.save( { a: 2, b: 3 } );
+t.save( { a: 2, b: 3 } );
+t.save( { a: 2, b: 4 } );
+t.save( { a: 1, b: 3 } );
+assert.eq( 3, t.find().count() );
+
+t.drop();
+t.ensureIndex( { a: 1 }, true );
+t.save( { a: [ 2, 3 ] } );
+t.save( { a: 2 } );
+assert.eq( 1, t.find().count() );
+
+t.drop();
+t.ensureIndex( { a: 1 }, true );
+t.save( { a: 2 } );
+t.save( { a: [ 1, 2, 3 ] } );
+t.save( { a: [ 3, 2, 1 ] } );
+assert.eq( 1, t.find().sort( { a: 1 } ).hint( { a: 1 } ).toArray().length );
+assert.eq( 1, t.find().sort( { a: -1 } ).hint( { a: 1 } ).toArray().length );
+
+assert.eq( t._indexSpec( { x : 1 } , true ) , t._indexSpec( { x : 1 } , [ true ] ) , "spec 1" );
+assert.eq( t._indexSpec( { x : 1 } , "eliot" ) , t._indexSpec( { x : 1 } , [ "eliot" ] ) , "spec 2" );
+
diff --git a/jstests/core/index9.js b/jstests/core/index9.js
new file mode 100644
index 00000000000..04b900949ec
--- /dev/null
+++ b/jstests/core/index9.js
@@ -0,0 +1,25 @@
+t = db.jstests_index9;
+
+t.drop();
+db.createCollection( "jstests_index9" );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index with default collection" );
+t.drop();
+db.createCollection( "jstests_index9", {autoIndexId: true} );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index if autoIndexId: true" );
+
+t.drop();
+db.createCollection( "jstests_index9", {autoIndexId:false} );
+assert.eq( 0, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 0 index if autoIndexId: false" );
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
+
+t.drop();
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
+
+t.drop();
+t.save( {a:1} );
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
diff --git a/jstests/core/indexOtherNamespace.js b/jstests/core/indexOtherNamespace.js
new file mode 100644
index 00000000000..7df55188606
--- /dev/null
+++ b/jstests/core/indexOtherNamespace.js
@@ -0,0 +1,27 @@
+// SERVER-8814: Test that only the system.indexes namespace can be used to build indexes.
+
+var otherDB = db.getSiblingDB("indexOtherNS");
+otherDB.dropDatabase();
+
+otherDB.foo.insert({a:1})
+assert.eq(1, otherDB.system.indexes.count());
+assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor);
+
+if (db.getMongo().writeMode() == 'commands') {
+ assert.throws(function() {
+ otherDB.randomNS.system.indexes.insert({ ns: "indexOtherNS.foo",
+ key: { a: 1}, name: "a_1" });
+ });
+}
+else {
+ assert.writeError(otherDB.randomNS.system.indexes.insert({ ns: "indexOtherNS.foo",
+ key: { a: 1 }, name: "a_1"}));
+}
+
+
+
+// Assert that index didn't actually get built
+assert.eq(1, otherDB.system.indexes.count());
+assert.eq(null, otherDB.system.namespaces.findOne({name : "indexOtherNS.foo.$a_1"}));
+assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor);
+otherDB.dropDatabase();
diff --git a/jstests/core/indexStatsCommand.js b/jstests/core/indexStatsCommand.js
new file mode 100644
index 00000000000..9c055e37e26
--- /dev/null
+++ b/jstests/core/indexStatsCommand.js
@@ -0,0 +1,88 @@
+db.jstests_commands.drop();
+db.createCollection("jstests_commands");
+
+t = db.jstests_commands;
+
+for (var i = 0; i < 3000; ++i) {
+ t.insert({i: i, d: i % 13});
+}
+
+function textWithIndexVersion(version) {
+ var indexName = 'test_d_' + version;
+ t.ensureIndex({d: 1}, {v: version, name: indexName});
+
+ var result = t.indexStats({index: indexName});
+ if (result["bad cmd"]) {
+ print("storageDetails command not available: skipping");
+ return;
+ }
+
+ assert.commandWorked(result);
+
+ assert(result.index === indexName);
+ assert(result.isIdIndex === false);
+ assert(isObject(result.keyPattern));
+ assert.neq(result.keyPattern, null);
+ assert(isString(result.storageNs));
+ assert(isNumber(result.bucketBodyBytes));
+ assert.eq(result.depth, 1);
+ assert(isObject(result.overall));
+ assert.neq(result.overall, null);
+
+ function checkStats(data) {
+ assert(data.count instanceof NumberLong);
+ assert(isNumber(data.mean));
+ assert(isNumber(data.stddev));
+ assert(isNumber(data.min));
+ assert(isNumber(data.max));
+ }
+
+ function checkAreaStats(data) {
+ assert(isNumber(data.numBuckets));
+
+ assert(isObject(data.keyCount));
+ assert.neq(data.keyCount, null);
+ checkStats(data.keyCount);
+
+ assert(isObject(data.usedKeyCount));
+ assert.neq(data.usedKeyCount, null);
+ checkStats(data.usedKeyCount);
+
+ assert(isObject(data.bsonRatio));
+ assert.neq(data.bsonRatio, null);
+ checkStats(data.bsonRatio);
+
+ assert(isObject(data.keyNodeRatio));
+ assert.neq(data.keyNodeRatio, null);
+ checkStats(data.keyNodeRatio);
+
+ assert(isObject(data.fillRatio));
+ assert.neq(data.fillRatio, null);
+ checkStats(data.fillRatio);
+ }
+
+ assert(isObject(result.overall));
+ checkAreaStats(result.overall);
+
+ assert(result.perLevel instanceof Array);
+ for (var i = 0; i < result.perLevel.length; ++i) {
+ assert(isObject(result.perLevel[i]));
+ checkAreaStats(result.perLevel[i]);
+ }
+
+ result = t.indexStats();
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/index name is required/));
+
+ result = t.indexStats({index: "nonexistent"})
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/index does not exist/));
+
+ result = t.indexStats({index: "_id_", expandNodes: ['string']})
+ assert.commandFailed(result);
+ assert(result.errmsg.match(/expandNodes.*numbers/));
+
+ t.dropIndex(indexName);
+}
+
+[0, 1].map(textWithIndexVersion);
diff --git a/jstests/core/index_arr1.js b/jstests/core/index_arr1.js
new file mode 100644
index 00000000000..d35cb80a83f
--- /dev/null
+++ b/jstests/core/index_arr1.js
@@ -0,0 +1,23 @@
+
+t = db.index_arr1
+t.drop()
+
+t.insert( { _id : 1 , a : 5 , b : [ { x : 1 } ] } )
+t.insert( { _id : 2 , a : 5 , b : [] } )
+t.insert( { _id : 3 , a : 5 } )
+
+assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A1" )
+
+t.ensureIndex( { a : 1 , "b.x" : 1 } )
+
+//t.find().sort( { a : 1 } )._addSpecial( "$returnKey" , 1 ).forEach( printjson )
+//t.find( { a : 5 } ).forEach( printjson )
+
+assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A2" ); // SERVER-1082
+
+
+assert.eq( 2 , t.getIndexes().length , "B1" )
+t.insert( { _id : 4 , a : 5 , b : [] } )
+t.ensureIndex( { a : 1 , "b.a" : 1 , "b.c" : 1 } )
+assert.eq( 3 , t.getIndexes().length , "B2" )
+
diff --git a/jstests/core/index_arr2.js b/jstests/core/index_arr2.js
new file mode 100644
index 00000000000..101655f2ce9
--- /dev/null
+++ b/jstests/core/index_arr2.js
@@ -0,0 +1,51 @@
+NUM = 20;
+M = 5;
+
+t = db.jstests_arr2;
+
+function test( withIndex ){
+ t.drop();
+
+ // insert a bunch of items to force queries to use the index.
+ newObject = {
+ _id : 1,
+ a : [
+ { b : { c : 1 } }
+ ]
+ }
+
+ now = (new Date()).getTime() / 1000;
+ for (created = now - NUM; created <= now; created++ ) {
+ newObject['created'] = created;
+ t.insert(newObject);
+ newObject['_id'] ++;
+ }
+
+ // change the last M items.
+ query = {
+ 'created' : { '$gte' : now - M }
+ }
+
+ Z = t.find( query ).count();
+
+ if ( withIndex ){
+ //t.ensureIndex( { 'a.b.c' : 1, 'created' : -1 } )
+ //t.ensureIndex( { created : -1 } )
+ t.ensureIndex( { 'a.b.c' : 1 } , { name : "x" } )
+ }
+
+ var res = t.update(query, { '$set' : { "a.0.b.c" : 0 } } , false , true );
+ assert.eq( Z, res.nMatched, "num updated withIndex:" + withIndex );
+
+ // now see how many were actually updated.
+ query['a.b.c'] = 0;
+
+ count = t.count(query);
+
+ assert.eq( Z , count , "count after withIndex:" + withIndex );
+}
+
+test( false )
+test( true );
+
+
diff --git a/jstests/core/index_big1.js b/jstests/core/index_big1.js
new file mode 100644
index 00000000000..6fbffa4415e
--- /dev/null
+++ b/jstests/core/index_big1.js
@@ -0,0 +1,38 @@
+// check where "key to big" happens
+
+t = db.index_big1;
+
+N = 3200;
+t.drop();
+
+var s = "";
+
+t.ensureIndex( { a : 1 , x : 1 } )
+
+var bulk = t.initializeUnorderedBulkOp();
+for ( i=0; i<N; i++ ) {
+ bulk.insert( { a : i + .5 , x : s } );
+ s += "x";
+}
+assert.writeError(bulk.execute());
+
+assert.eq( 2 , t.getIndexes().length );
+
+flip = -1;
+
+for ( i=0; i<N; i++ ) {
+ var c = t.find( { a : i + .5 } ).count();
+ if ( c == 1 ) {
+ assert.eq( -1 , flip , "flipping : " + i );
+ }
+ else {
+ if ( flip == -1 ) {
+ flip = i;
+ }
+ }
+}
+
+//print(flip);
+//print(flip/1024);
+
+assert.eq( /*v0 index : 797*/1002, flip , "flip changed" );
diff --git a/jstests/core/index_bigkeys.js b/jstests/core/index_bigkeys.js
new file mode 100755
index 00000000000..b0ea66d65f8
--- /dev/null
+++ b/jstests/core/index_bigkeys.js
@@ -0,0 +1,59 @@
+
+t = db.bigkeysidxtest;
+
+var keys = []
+
+var str = "aaaabbbbccccddddeeeeffffgggghhhh";
+
+while ( str.length < 20000 ) {
+ keys.push( str );
+ str = str + str;
+}
+
+function doInsert( order ) {
+ if (order == 1) {
+ for (var i = 0; i < 10; i++) {
+ t.insert({ _id: i, k: keys[i] });
+ }
+ }
+ else {
+ for (var i = 9; i >= 0; i--) {
+ t.insert({ _id: i, k: keys[i] });
+ }
+ }
+}
+
+var expect = null;
+
+function check() {
+ assert(t.validate().valid);
+ assert.eq( 5, t.count() );
+
+ var c = t.find({ k: /^a/ }).count();
+ assert.eq( 5, c );
+}
+
+function runTest( order ) {
+ t.drop();
+ t.ensureIndex({ k: 1 });
+ doInsert( order );
+ check(); // check incremental addition
+
+ t.reIndex();
+ check(); // check bottom up
+
+ t.drop();
+ doInsert( order );
+ assert.eq( 1, t.getIndexes().length );
+ t.ensureIndex({ k: 1 });
+ assert.eq( 1, t.getIndexes().length );
+
+ t.drop();
+ doInsert( order );
+ assert.eq( 1, t.getIndexes().length );
+ t.ensureIndex({ k: 1 }, { background: true });
+ assert.eq( 1, t.getIndexes().length );
+}
+
+runTest( 1 );
+runTest( 2 );
diff --git a/jstests/core/index_bigkeys_update.js b/jstests/core/index_bigkeys_update.js
new file mode 100644
index 00000000000..6bdaf033542
--- /dev/null
+++ b/jstests/core/index_bigkeys_update.js
@@ -0,0 +1,18 @@
+
+bigString = "";
+while ( bigString.length < 16000 )
+ bigString += ".";
+
+t = db.index_bigkeys_update;
+t.drop();
+
+t.insert( { _id : 0, x : "asd" } );
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 1, t.count() );
+
+assert.writeError(t.update( {} , { $set : { x : bigString } } ));
+
+assert.eq( 1, t.count() );
+assert.eq( "asd", t.findOne().x ); // make sure doc is the old version
+assert.eq( "asd", t.findOne( { _id : 0 } ).x ); // make sure doc is the old version
diff --git a/jstests/core/index_bounds_number_edge_cases.js b/jstests/core/index_bounds_number_edge_cases.js
new file mode 100644
index 00000000000..0ab482028ed
--- /dev/null
+++ b/jstests/core/index_bounds_number_edge_cases.js
@@ -0,0 +1,50 @@
+// end-to-end tests on index bounds for numerical values
+// should handle numerical extremes
+// such as Number.MAX_VALUE and Infinity
+
+t = db.indexboundsnumberedgecases;
+
+t.drop();
+
+t.ensureIndex({a: 1});
+
+t.save({a: -Infinity});
+t.save({a: -Number.MAX_VALUE});
+t.save({a: 1});
+t.save({a: Number.MAX_VALUE});
+t.save({a: Infinity});
+
+// index bounds generated by query planner are
+// validated in unit tests
+
+// lte
+
+assert.eq(1, t.find({a: {$lte: -Infinity}}).itcount());
+assert.eq(2, t.find({a: {$lte: -Number.MAX_VALUE}}).itcount());
+assert.eq(3, t.find({a: {$lte: 1}}).itcount());
+assert.eq(4, t.find({a: {$lte: Number.MAX_VALUE}}).itcount());
+assert.eq(5, t.find({a: {$lte: Infinity}}).itcount());
+
+// lt
+
+assert.eq(0, t.find({a: {$lt: -Infinity}}).itcount());
+assert.eq(1, t.find({a: {$lt: -Number.MAX_VALUE}}).itcount());
+assert.eq(2, t.find({a: {$lt: 1}}).itcount());
+assert.eq(3, t.find({a: {$lt: Number.MAX_VALUE}}).itcount());
+assert.eq(4, t.find({a: {$lt: Infinity}}).itcount());
+
+// gt
+
+assert.eq(0, t.find({a: {$gt: Infinity}}).itcount());
+assert.eq(1, t.find({a: {$gt: Number.MAX_VALUE}}).itcount());
+assert.eq(2, t.find({a: {$gt: 1}}).itcount());
+assert.eq(3, t.find({a: {$gt: -Number.MAX_VALUE}}).itcount());
+assert.eq(4, t.find({a: {$gt: -Infinity}}).itcount());
+
+// gte
+
+assert.eq(1, t.find({a: {$gte: Infinity}}).itcount());
+assert.eq(2, t.find({a: {$gte: Number.MAX_VALUE}}).itcount());
+assert.eq(3, t.find({a: {$gte: 1}}).itcount());
+assert.eq(4, t.find({a: {$gte: -Number.MAX_VALUE}}).itcount());
+assert.eq(5, t.find({a: {$gte: -Infinity}}).itcount());
diff --git a/jstests/core/index_check1.js b/jstests/core/index_check1.js
new file mode 100644
index 00000000000..7113dff0877
--- /dev/null
+++ b/jstests/core/index_check1.js
@@ -0,0 +1,31 @@
+
+db.somecollection.drop();
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 1);
+
+db.somecollection.save({a:1});
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 2);
+
+db.somecollection.ensureIndex({a:1});
+
+var z = db.system.namespaces.find({name:/somecollection/}).length();
+assert( z >= 1 , 3 );
+
+if( z == 1 )
+ print("warning: z==1, should only happen with alternate storage engines");
+
+db.somecollection.drop();
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 4);
+
+db.somecollection.save({a:1});
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 5);
+
+db.somecollection.ensureIndex({a:1});
+
+var x = db.system.namespaces.find({name:/somecollection/}).length();
+assert( x == 2 || x == z, 6);
+
+assert(db.somecollection.validate().valid, 7);
diff --git a/jstests/core/index_check2.js b/jstests/core/index_check2.js
new file mode 100644
index 00000000000..eed3b8e42b7
--- /dev/null
+++ b/jstests/core/index_check2.js
@@ -0,0 +1,41 @@
+
+t = db.index_check2;
+t.drop();
+
+for ( var i=0; i<1000; i++ ){
+ var a = [];
+ for ( var j=1; j<5; j++ ){
+ a.push( "tag" + ( i * j % 50 ));
+ }
+ t.save( { num : i , tags : a } );
+}
+
+q1 = { tags : "tag6" };
+q2 = { tags : "tag12" };
+q3 = { tags : { $all : [ "tag6" , "tag12" ] } }
+
+assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
+assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
+assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
+
+t.ensureIndex( { tags : 1 } );
+
+assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
+assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
+assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
+
+assert.eq( "BtreeCursor tags_1" , t.find( q1 ).explain().cursor , "e1" );
+assert.eq( "BtreeCursor tags_1" , t.find( q2 ).explain().cursor , "e2" );
+assert.eq( "BtreeCursor tags_1" , t.find( q3 ).explain().cursor , "e3" );
+
+scanned1 = t.find(q1).explain().nscanned;
+scanned2 = t.find(q2).explain().nscanned;
+scanned3 = t.find(q3).explain().nscanned;
+
+//print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 );
+
+// $all should just iterate either of the words
+assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" );
+
+exp3 = t.find( q3 ).explain();
+assert.eq( exp3.indexBounds.tags[0][0], exp3.indexBounds.tags[0][1], "$all range not a single key" );
diff --git a/jstests/core/index_check3.js b/jstests/core/index_check3.js
new file mode 100644
index 00000000000..55515aff3f5
--- /dev/null
+++ b/jstests/core/index_check3.js
@@ -0,0 +1,63 @@
+
+
+t = db.index_check3;
+t.drop();
+
+
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+t.save( { a : "z" } );
+
+assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "A" );
+assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "B" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "C" );
+assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "D" );
+
+t.drop();
+
+for ( var i=0; i<100; i++ ){
+ var o = { i : i };
+ if ( i % 2 == 0 )
+ o.foo = i;
+ t.save( o );
+}
+
+t.ensureIndex( { foo : 1 } );
+
+//printjson( t.find( { foo : { $lt : 50 } } ).explain() );
+assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" );
+//printjson( t.find( { foo : { $gt : 50 } } ).explain() );
+assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" );
+
+
+t.drop();
+t.save( {i:'a'} );
+for( var i=0; i < 10; ++i ) {
+ t.save( {} );
+}
+
+t.ensureIndex( { i : 1 } );
+
+//printjson( t.find( { i : { $lte : 'a' } } ).explain() );
+assert.gt( 3 , t.find( { i : { $lte : 'a' } } ).explain().nscanned , "lte" );
+//printjson( t.find( { i : { $gte : 'a' } } ).explain() );
+// bug SERVER-99
+assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).count() , "gte a" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).count() , "gte c" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).itcount() , "gte d" );
+
+t.save( { i : "b" } );
+
+assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).count() , "gte a2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).itcount() , "gte c2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : -1 } ).itcount() , "gte d2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : 1 } ).itcount() , "gte e2" );
diff --git a/jstests/core/index_check5.js b/jstests/core/index_check5.js
new file mode 100644
index 00000000000..eabb929749f
--- /dev/null
+++ b/jstests/core/index_check5.js
@@ -0,0 +1,17 @@
+
+t = db.index_check5
+t.drop();
+
+t.save( { "name" : "Player1" ,
+ "scores" : [{"level" : 1 , "score" : 100},
+ {"level" : 2 , "score" : 50}],
+ "total" : 150 } );
+t.save( { "name" : "Player2" ,
+ "total" : 90 ,
+ "scores" : [ {"level" : 1 , "score" : 90},
+ {"level" : 2 , "score" : 0} ]
+ } );
+
+assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "A" );
+t.ensureIndex( { "scores.level" : 1 , "scores.score" : 1 } );
+assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "B" );
diff --git a/jstests/core/index_check6.js b/jstests/core/index_check6.js
new file mode 100644
index 00000000000..be395fb3d2e
--- /dev/null
+++ b/jstests/core/index_check6.js
@@ -0,0 +1,82 @@
+
+t = db.index_check6;
+t.drop();
+
+t.ensureIndex( { age : 1 , rating : 1 } );
+
+for ( var age=10; age<50; age++ ){
+ for ( var rating=0; rating<10; rating++ ){
+ t.save( { age : age , rating : rating } );
+ }
+}
+
+assert.eq( 10 , t.find( { age : 30 } ).explain().nscanned , "A" );
+assert.eq( 20 , t.find( { age : { $gte : 29 , $lte : 30 } } ).explain().nscanned , "B" );
+assert.eq( 18 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,9] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C1" );
+assert.eq( 23 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C2" );
+assert.eq( 28 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [1,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C3" );
+
+assert.eq( 4 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : 5 } ).hint( {age:1,rating:1} ).explain().nscanned , "C" ); // SERVER-371
+assert.eq( 6 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : { $gte : 4 , $lte : 5 } } ).hint( {age:1,rating:1} ).explain().nscanned , "D" ); // SERVER-371
+
+assert.eq.automsg( "2", "t.find( { age:30, rating:{ $gte:4, $lte:5} } ).explain().nscanned" );
+
+t.drop();
+
+for ( var a=1; a<10; a++ ){
+ for ( var b=0; b<10; b++ ){
+ for ( var c=0; c<10; c++ ) {
+ t.save( { a:a, b:b, c:c } );
+ }
+ }
+}
+
+function doQuery( count, query, sort, index ) {
+ var nscanned = t.find( query ).hint( index ).sort( sort ).explain().nscanned;
+ assert(Math.abs(count - nscanned) <= 2);
+}
+
+function doTest( sort, index ) {
+ doQuery( 1, { a:5, b:5, c:5 }, sort, index );
+ doQuery( 2, { a:5, b:5, c:{$gte:5,$lte:6} }, sort, index );
+ doQuery( 1, { a:5, b:5, c:{$gte:5.5,$lte:6} }, sort, index );
+ doQuery( 1, { a:5, b:5, c:{$gte:5,$lte:5.5} }, sort, index );
+ doQuery( 3, { a:5, b:5, c:{$gte:5,$lte:7} }, sort, index );
+ doQuery( 4, { a:5, b:{$gte:5,$lte:6}, c:5 }, sort, index );
+ if ( sort.b > 0 ) {
+ doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index );
+ doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index );
+ } else {
+ doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index );
+ doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index );
+ }
+ doQuery( 7, { a:5, b:{$gte:5,$lte:7}, c:5 }, sort, index );
+ doQuery( 4, { a:{$gte:5,$lte:6}, b:5, c:5 }, sort, index );
+ if ( sort.a > 0 ) {
+ doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index );
+ doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index );
+ doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
+ } else {
+ doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index );
+ doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index );
+ doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
+ }
+ doQuery( 7, { a:{$gte:5,$lte:7}, b:5, c:5 }, sort, index );
+ doQuery( 6, { a:{$gte:5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
+ doQuery( 6, { a:5, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index );
+ doQuery( 10, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:5 }, sort, index );
+ doQuery( 14, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index );
+}
+
+for ( var a = -1; a <= 1; a += 2 ) {
+ for( var b = -1; b <= 1; b += 2 ) {
+ for( var c = -1; c <= 1; c += 2 ) {
+ t.dropIndexes();
+ var spec = {a:a,b:b,c:c};
+ t.ensureIndex( spec );
+ doTest( spec, spec );
+ doTest( {a:-a,b:-b,c:-c}, spec );
+ }
+ }
+}
+
diff --git a/jstests/core/index_check7.js b/jstests/core/index_check7.js
new file mode 100644
index 00000000000..1d0aaebba35
--- /dev/null
+++ b/jstests/core/index_check7.js
@@ -0,0 +1,15 @@
+
+t = db.index_check7
+t.drop()
+
+for ( var i=0; i<100; i++ )
+ t.save( { x : i } )
+
+t.ensureIndex( { x : 1 } )
+assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "A" )
+
+t.ensureIndex( { x : -1 } )
+assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "B" )
+
+assert.eq( 40 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" );
+
diff --git a/jstests/core/index_check8.js b/jstests/core/index_check8.js
new file mode 100644
index 00000000000..1964ecbe7fc
--- /dev/null
+++ b/jstests/core/index_check8.js
@@ -0,0 +1,21 @@
+
+t = db.index_check8
+t.drop();
+
+t.insert( { a : 1 , b : 1 , c : 1 , d : 1 , e : 1 } )
+t.ensureIndex( { a : 1 , b : 1 , c : 1 } )
+t.ensureIndex({ a: 1, b: 1, d: 1, e: 1 })
+
+// this block could be added to many tests in theory...
+if ((new Date()) % 10 == 0) {
+ var coll = t.toString().substring(db.toString().length + 1);
+ print("compacting " + coll + " before continuing testing");
+ // don't check return code - false for mongos
+ print("ok: " + db.runCommand({ compact: coll, dev: true }));
+}
+
+x = t.find( { a : 1 , b : 1 , d : 1 } ).sort( { e : 1 } ).explain()
+assert( ! x.scanAndOrder , "A : " + tojson( x ) )
+
+x = t.find( { a : 1 , b : 1 , c : 1 , d : 1 } ).sort( { e : 1 } ).explain()
+//assert( ! x.scanAndOrder , "B : " + tojson( x ) )
diff --git a/jstests/core/index_diag.js b/jstests/core/index_diag.js
new file mode 100644
index 00000000000..21840682e7f
--- /dev/null
+++ b/jstests/core/index_diag.js
@@ -0,0 +1,50 @@
+
+t = db.index_diag
+t.drop();
+
+t.ensureIndex( { x : 1 } );
+
+all = []
+ids = []
+xs = []
+
+function r( a ){
+ var n = []
+ for ( var x=a.length-1; x>=0; x-- )
+ n.push( a[x] );
+ return n;
+}
+
+for ( i=1; i<4; i++ ){
+ o = { _id : i , x : -i }
+ t.insert( o );
+ all.push( o );
+ ids.push( { _id : i } );
+ xs.push( { x : -i } );
+}
+
+assert.eq( all , t.find().sort( { _id : 1 } ).toArray() , "A1" );
+assert.eq( r( all ) , t.find().sort( { _id : -1 } ).toArray() , "A2" );
+
+assert.eq( all , t.find().sort( { x : -1 } ).toArray() , "A3" );
+assert.eq( r( all ) , t.find().sort( { x : 1 } ).toArray() , "A4" );
+
+assert.eq( ids , t.find().sort( { _id : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B1" )
+assert.eq( r( ids ) , t.find().sort( { _id : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B2" )
+assert.eq( xs , t.find().sort( { x : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B3" )
+assert.eq( r( xs ) , t.find().sort( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" )
+
+assert.eq( r( xs ) , t.find().hint( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" )
+
+// SERVER-4981
+t.ensureIndex( { _id : 1 , x : 1 } );
+assert.eq( all ,
+ t.find().hint( { _id : 1 , x : 1 } )._addSpecial( "$returnKey" , true ).toArray()
+ )
+assert.eq( r( all ) ,
+ t.find().hint( { _id : 1 , x : 1 } ).sort( { x : 1 } )
+ ._addSpecial( "$returnKey" , true ).toArray()
+ )
+
+assert.eq( [ {} , {} , {} ],
+ t.find().hint( { $natural : 1 } )._addSpecial( "$returnKey" , true ).toArray() )
diff --git a/jstests/core/index_elemmatch1.js b/jstests/core/index_elemmatch1.js
new file mode 100644
index 00000000000..263eb252364
--- /dev/null
+++ b/jstests/core/index_elemmatch1.js
@@ -0,0 +1,43 @@
+
+t = db.index_elemmatch1
+t.drop()
+
+x = 0
+y = 0
+var bulk = t.initializeUnorderedBulkOp();
+for ( a=0; a<100; a++ ){
+ for ( b=0; b<100; b++ ){
+ bulk.insert( { a : a , b : b % 10 , arr : [ { x : x++ % 10 , y : y++ % 10 } ] } );
+ }
+}
+assert.writeOK(bulk.execute());
+
+t.ensureIndex( { a : 1 , b : 1 } )
+t.ensureIndex( { "arr.x" : 1 , a : 1 } )
+
+assert.eq( 100 , t.find( { a : 55 } ).itcount() , "A1" );
+assert.eq( 10 , t.find( { a : 55 , b : 7 } ).itcount() , "A2" );
+
+q = { a : 55 , b : { $in : [ 1 , 5 , 8 ] } }
+assert.eq( 30 , t.find( q ).itcount() , "A3" )
+
+q.arr = { $elemMatch : { x : 5 , y : 5 } }
+assert.eq( 10 , t.find( q ).itcount() , "A4" )
+
+function nscannedForCursor( explain, cursor ) {
+ plans = explain.allPlans;
+ for( i in plans ) {
+ if ( plans[ i ].cursor == cursor ) {
+ return plans[ i ].nscanned;
+ }
+ }
+ return -1;
+}
+
+assert.eq( t.find(q).itcount(),
+ nscannedForCursor( t.find(q).explain(true), 'BtreeCursor arr.x_1_a_1' ), "A5" );
+
+printjson(t.find(q).explain());
+print("Num results:");
+assert.eq(10, t.find(q).itcount());
+printjson(t.find(q).itcount());
diff --git a/jstests/core/index_filter_commands.js b/jstests/core/index_filter_commands.js
new file mode 100644
index 00000000000..cec2437fff0
--- /dev/null
+++ b/jstests/core/index_filter_commands.js
@@ -0,0 +1,167 @@
+/**
+ * Index Filter commands
+ *
+ * Commands:
+ * - planCacheListFilters
+ * Displays index filters for all query shapes in a collection.
+ *
+ * - planCacheClearFilters
+ * Clears index filter for a single query shape or,
+ * if the query shape is omitted, all filters for the collection.
+ *
+ * - planCacheSetFilter
+ * Sets index filter for a query shape. Overrides existing filter.
+ *
+ * Not a lot of data access in this test suite. Hint commands
+ * manage a non-persistent mapping in the server of
+ * query shape to list of index specs.
+ *
+ * Only time we might need to execute a query is to check the plan
+ * cache state. We would do this with the planCacheListPlans command
+ * on the same query shape with the index filters.
+ *
+ */
+
+var t = db.jstests_index_filter_commands;
+
+t.drop();
+
+t.save({a: 1});
+
+// Add 2 indexes.
+// 1st index is more efficient.
+// 2nd and 3rd indexes will be used to test index filters.
+var indexA1 = {a: 1};
+var indexA1B1 = {a: 1, b: 1};
+var indexA1C1 = {a: 1, c: 1};
+t.ensureIndex(indexA1);
+t.ensureIndex(indexA1B1);
+t.ensureIndex(indexA1C1);
+
+var queryA1 = {a: 1};
+var projectionA1 = {_id: 0, a: 1};
+var sortA1 = {a: -1};
+
+//
+// Tests for planCacheListFilters, planCacheClearFilters, planCacheSetFilter
+//
+
+// Utility function to list index filters.
+function getFilters() {
+ var res = t.runCommand('planCacheListFilters');
+ print('planCacheListFilters() = ' + tojson(res));
+ assert.commandWorked(res, 'planCacheListFilters failed');
+ assert(res.hasOwnProperty('filters'), 'filters missing from planCacheListFilters result');
+ return res.filters;
+
+}
+
+// Check if key is in plan cache.
+function planCacheContains(shape) {
+ var res = t.runCommand('planCacheListPlans', shape);
+ return res.ok;
+}
+
+// Utility function to list plans for a query.
+function getPlans(shape) {
+ var res = t.runCommand('planCacheListPlans', shape);
+ assert.commandWorked(res, 'planCacheListPlans(' + tojson(shape, '', true) + ' failed');
+ assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
+ tojson(shape, '', true) + ') result');
+ return res.plans;
+}
+
+// It is an error to retrieve index filters on a non-existent collection.
+var missingCollection = db.jstests_index_filter_commands_missing;
+missingCollection.drop();
+assert.commandFailed(missingCollection.runCommand('planCacheListFilters'));
+
+// Retrieve index filters from an empty test collection.
+var filters = getFilters();
+assert.eq(0, filters.length, 'unexpected number of index filters in planCacheListFilters result');
+
+// Check details of winning plan in plan cache before setting index filter.
+assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
+var shape = {query: queryA1, sort: sortA1, projection: projectionA1};
+var planBeforeSetFilter = getPlans(shape)[0];
+print('Winning plan (before setting index filters) = ' + tojson(planBeforeSetFilter));
+// Check filterSet field in plan details
+assert.eq(false, planBeforeSetFilter.filterSet, 'missing or invalid filterSet field in plan details');
+
+// Add index filters for simple query.
+assert.commandWorked(t.runCommand('planCacheSetFilter',
+ {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
+filters = getFilters();
+assert.eq(1, filters.length, 'no change in query settings after successfully setting index filters');
+assert.eq(queryA1, filters[0].query, 'unexpected query in filters');
+assert.eq(sortA1, filters[0].sort, 'unexpected sort in filters');
+assert.eq(projectionA1, filters[0].projection, 'unexpected projection in filters');
+assert.eq(2, filters[0].indexes.length, 'unexpected number of indexes in filters');
+assert.eq(indexA1B1, filters[0].indexes[0], 'unexpected first index');
+assert.eq(indexA1C1, filters[0].indexes[1], 'unexpected first index');
+
+// Plans for query shape should be removed after setting index filter.
+assert(!planCacheContains(shape), 'plan cache for query shape not flushed after updating filter');
+
+// Check details of winning plan in plan cache after setting filter and re-executing query.
+assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
+planAfterSetFilter = getPlans(shape)[0];
+print('Winning plan (after setting index filter) = ' + tojson(planAfterSetFilter));
+// Check filterSet field in plan details
+assert.eq(true, planAfterSetFilter.filterSet, 'missing or invalid filterSet field in plan details');
+
+// Execute query with cursor.hint(). Check that user-provided hint is overridden.
+// Applying the index filters will remove the user requested index from the list
+// of indexes provided to the planner.
+// If the planner still tries to use the user hint, we will get a 'bad hint' error.
+t.find(queryA1, projectionA1).sort(sortA1).hint(indexA1).itcount();
+
+// Clear filters
+assert.commandWorked(t.runCommand('planCacheClearFilters'));
+filters = getFilters();
+assert.eq(0, filters.length, 'filters not cleared after successful planCacheClearFilters command');
+
+// Plans should be removed after clearing filters
+assert(!planCacheContains(shape), 'plan cache for query shape not flushed after clearing filters');
+
+print('Plan details before setting filter = ' + tojson(planBeforeSetFilter.details, '', true));
+print('Plan details after setting filter = ' + tojson(planAfterSetFilter.details, '', true));
+
+//
+// explain.filterSet
+// cursor.explain() should indicate if index filter has been applied.
+// The following 3 runners should always provide a value for 'filterSet':
+// - SingleSolutionRunner
+// - MultiPlanRunner
+// - CachedPlanRuner
+//
+
+// No filter set.
+
+t.getPlanCache().clear();
+// SingleSolutionRunner
+assert.eq(false, t.find({z: 1}).explain().filterSet,
+ 'missing or invalid filterSet field in SingleSolutionRunner explain');
+// MultiPlanRunner
+assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
+ 'missing or invalid filterSet field in MultiPlanRunner explain');
+// CachedPlanRunner
+assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
+ 'missing or invalid filterSet field in CachedPlanRunner explain');
+
+// Add index filter.
+assert.commandWorked(t.runCommand('planCacheSetFilter',
+ {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
+// Index filter with non-existent index key pattern to force use of single solution runner.
+assert.commandWorked(t.runCommand('planCacheSetFilter', {query: {z: 1}, indexes: [{z: 1}]}));
+
+t.getPlanCache().clear();
+// SingleSolutionRunner
+assert.eq(true, t.find({z: 1}).explain().filterSet,
+ 'missing or invalid filterSet field in SingleSolutionRunner explain');
+// MultiPlanRunner
+assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
+ 'missing or invalid filterSet field in MultiPlanRunner explain');
+// CachedPlanRunner
+assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet,
+ 'missing or invalid filterSet field in CachedPlanRunner explain');
diff --git a/jstests/core/index_many.js b/jstests/core/index_many.js
new file mode 100644
index 00000000000..f14f3c3e0fc
--- /dev/null
+++ b/jstests/core/index_many.js
@@ -0,0 +1,51 @@
+/* test using lots of indexes on one collection */
+
+t = db.many;
+
+function f() {
+
+ t.drop();
+ db.many2.drop();
+
+ t.save({ x: 9, y : 99 });
+ t.save({ x: 19, y : 99 });
+
+ x = 2;
+ var lastErr = null;
+ while (x < 70) {
+ patt = {};
+ patt[x] = 1;
+ if (x == 20)
+ patt = { x: 1 };
+ if (x == 64)
+ patt = { y: 1 };
+ lastErr = t.ensureIndex(patt);
+ x++;
+ }
+
+ assert.writeError(lastErr, "should have got an error 'too many indexes'");
+
+ // 40 is the limit currently
+ lim = t.getIndexes().length;
+ if (lim != 64) {
+ print("# of indexes should be 64 but is : " + lim);
+ return;
+ }
+ assert(lim == 64, "not 64 indexes");
+
+ assert(t.find({ x: 9 }).length() == 1, "b");
+ assert(t.find({ x: 9 }).explain().cursor.match(/Btree/), "not using index?");
+
+ assert(t.find({ y: 99 }).length() == 2, "y idx");
+ assert(t.find({ y: 99 }).explain().cursor.match(/Btree/), "not using y index?");
+
+ /* check that renamecollection remaps all the indexes right */
+ assert(t.renameCollection("many2").ok, "rename failed");
+ assert(t.find({ x: 9 }).length() == 0, "many2a");
+ assert(db.many2.find({ x: 9 }).length() == 1, "many2b");
+ assert(t.find({ y: 99 }).length() == 0, "many2c");
+ assert(db.many2.find({ y: 99 }).length() == 2, "many2d");
+
+}
+
+f();
diff --git a/jstests/core/index_many2.js b/jstests/core/index_many2.js
new file mode 100644
index 00000000000..f113b8b87ed
--- /dev/null
+++ b/jstests/core/index_many2.js
@@ -0,0 +1,31 @@
+
+t = db.index_many2;
+t.drop()
+
+t.save( { x : 1 } )
+
+assert.eq( 1 , t.getIndexKeys().length , "A1" )
+
+function make( n ){
+ var x = {}
+ x["x"+n] = 1;
+ return x;
+}
+
+for ( i=1; i<1000; i++ ){
+ t.ensureIndex( make(i) );
+}
+
+assert.eq( 64 , t.getIndexKeys().length , "A2" )
+
+
+num = t.getIndexKeys().length
+
+t.dropIndex( make(num-1) )
+assert.eq( num - 1 , t.getIndexKeys().length , "B0" )
+
+t.ensureIndex( { z : 1 } )
+assert.eq( num , t.getIndexKeys().length , "B1" )
+
+t.dropIndex( "*" );
+assert.eq( 1 , t.getIndexKeys().length , "C1" )
diff --git a/jstests/core/index_sparse1.js b/jstests/core/index_sparse1.js
new file mode 100644
index 00000000000..fbcc20a9217
--- /dev/null
+++ b/jstests/core/index_sparse1.js
@@ -0,0 +1,45 @@
+
+t = db.index_sparse1;
+t.drop();
+
+t.insert( { _id : 1 , x : 1 } )
+t.insert( { _id : 2 , x : 2 } )
+t.insert( { _id : 3 , x : 2 } )
+t.insert( { _id : 4 } )
+t.insert( { _id : 5 } )
+
+assert.eq( 5 , t.count() , "A1" )
+assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "A2" )
+
+t.ensureIndex( { x : 1 } )
+assert.eq( 2 , t.getIndexes().length , "B1" )
+assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "B2" )
+t.dropIndex( { x : 1 } )
+assert.eq( 1 , t.getIndexes().length , "B3" )
+
+t.ensureIndex( { x : 1 } , { sparse : 1 } )
+assert.eq( 2 , t.getIndexes().length , "C1" )
+assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "C2" )
+t.dropIndex( { x : 1 } )
+assert.eq( 1 , t.getIndexes().length , "C3" )
+
+// -- sparse & unique
+
+t.remove( { _id : 2 } )
+
+// test that we can't create a unique index without sparse
+assert.writeError( t.ensureIndex( { x : 1 } , { unique : 1 } ));
+assert.eq( 1 , t.getIndexes().length , "D2" )
+
+
+t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } )
+assert.eq( 2 , t.getIndexes().length , "E1" )
+t.dropIndex( { x : 1 } )
+assert.eq( 1 , t.getIndexes().length , "E3" )
+
+
+t.insert( { _id : 2 , x : 2 } )
+t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } )
+assert.eq( 1 , t.getIndexes().length , "F1" )
+
+
diff --git a/jstests/core/index_sparse2.js b/jstests/core/index_sparse2.js
new file mode 100644
index 00000000000..56a59db3711
--- /dev/null
+++ b/jstests/core/index_sparse2.js
@@ -0,0 +1,23 @@
+t = db.index_sparse2;
+t.drop();
+
+t.insert( { _id : 1 , x : 1 , y : 1 } )
+t.insert( { _id : 2 , x : 2 } )
+t.insert( { _id : 3 } )
+
+t.ensureIndex( { x : 1 , y : 1 } )
+assert.eq( 2 , t.getIndexes().length , "A1" )
+assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "A2 count()" )
+assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "A2 itcount()" )
+t.dropIndex( { x : 1 , y : 1 } )
+assert.eq( 1 , t.getIndexes().length , "A3" )
+
+t.ensureIndex( { x : 1 , y : 1 } , { sparse : 1 } )
+assert.eq( 2 , t.getIndexes().length , "B1" )
+assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "B2 count()" )
+assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "B2 itcount()" )
+t.dropIndex( { x : 1 , y : 1 } )
+assert.eq( 1 , t.getIndexes().length , "B3" )
+
+
+
diff --git a/jstests/core/indexa.js b/jstests/core/indexa.js
new file mode 100644
index 00000000000..7602183adb2
--- /dev/null
+++ b/jstests/core/indexa.js
@@ -0,0 +1,22 @@
+// unique index constraint test for updates
+// case where object doesn't grow tested here
+
+t = db.indexa;
+t.drop();
+
+t.ensureIndex( { x:1 }, true );
+
+t.insert( { 'x':'A' } );
+t.insert( { 'x':'B' } );
+t.insert( { 'x':'A' } );
+
+assert.eq( 2 , t.count() , "indexa 1" );
+
+t.update( {x:'B'}, { x:'A' } );
+
+a = t.find().toArray();
+u = Array.unique( a.map( function(z){ return z.x } ) );
+assert.eq( 2 , t.count() , "indexa 2" );
+
+assert( a.length == u.length , "unique index update is broken" );
+
diff --git a/jstests/core/indexapi.js b/jstests/core/indexapi.js
new file mode 100644
index 00000000000..3e0b70ff15f
--- /dev/null
+++ b/jstests/core/indexapi.js
@@ -0,0 +1,48 @@
+
+t = db.indexapi;
+t.drop();
+
+key = { x : 1 };
+
+c = { ns : t._fullName , key : key , name : t._genIndexName( key ) };
+assert.eq( c , t._indexSpec( { x : 1 } ) , "A" );
+
+c.name = "bob";
+assert.eq( c , t._indexSpec( { x : 1 } , "bob" ) , "B" );
+
+c.name = t._genIndexName( key );
+assert.eq( c , t._indexSpec( { x : 1 } ) , "C" );
+
+c.unique = true;
+assert.eq( c , t._indexSpec( { x : 1 } , true ) , "D" );
+assert.eq( c , t._indexSpec( { x : 1 } , [ true ] ) , "E" );
+assert.eq( c , t._indexSpec( { x : 1 } , { unique : true } ) , "F" );
+
+c.dropDups = true;
+assert.eq( c , t._indexSpec( { x : 1 } , [ true , true ] ) , "G" );
+assert.eq( c , t._indexSpec( { x : 1 } , { unique : true , dropDups : true } ) , "F" );
+
+t.ensureIndex( { x : 1 } , { unique : true } );
+idx = t.getIndexes();
+assert.eq( 2 , idx.length , "M1" );
+assert.eq( key , idx[1].key , "M2" );
+assert( idx[1].unique , "M3" );
+
+t.drop();
+t.ensureIndex( { x : 1 } , { unique : 1 } );
+idx = t.getIndexes();
+assert.eq( 2 , idx.length , "M1" );
+assert.eq( key , idx[1].key , "M2" );
+assert( idx[1].unique , "M3" );
+//printjson( idx );
+
+// Test that attempting to create index in an invalid namespace fails.
+if (db.getMongo().writeMode() == 'commands') {
+ assert.throws(function() {
+ db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } );
+ });
+}
+else {
+ assert.writeError(db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } ));
+}
+
diff --git a/jstests/core/indexb.js b/jstests/core/indexb.js
new file mode 100644
index 00000000000..d7d2e8c9f05
--- /dev/null
+++ b/jstests/core/indexb.js
@@ -0,0 +1,29 @@
+// unique index test for a case where the object grows
+// and must move
+
+// see indexa.js for the test case for an update with dup id check
+// when it doesn't move
+
+
+t = db.indexb;
+t.drop();
+t.ensureIndex({a:1},true);
+
+t.insert({a:1});
+
+x = { a : 2 };
+t.save(x);
+
+{
+
+ assert( t.count() == 2, "count wrong B");
+
+ x.a = 1;
+ x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ t.save(x); // should fail, not unique.
+
+ assert( t.count() == 2,"count wrong" );
+ assert( t.find({a:1}).count() == 1,"bfail1" );
+ assert( t.find({a:2}).count() == 1,"bfail2" );
+
+}
diff --git a/jstests/core/indexc.js b/jstests/core/indexc.js
new file mode 100644
index 00000000000..b099e2d2823
--- /dev/null
+++ b/jstests/core/indexc.js
@@ -0,0 +1,20 @@
+
+t = db.indexc;
+t.drop();
+
+for ( var i=1; i<100; i++ ){
+ var d = new Date( ( new Date() ).getTime() + i );
+ t.save( { a : i , ts : d , cats : [ i , i + 1 , i + 2 ] } );
+ if ( i == 51 )
+ mid = d;
+}
+
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "A" );
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "B" );
+
+t.ensureIndex( { ts : 1 , cats : 1 } );
+t.ensureIndex( { cats : 1 } );
+
+// multi-key bug was firing here (related to getsetdup()):
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "C" );
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "D" );
diff --git a/jstests/core/indexd.js b/jstests/core/indexd.js
new file mode 100644
index 00000000000..33246ad9812
--- /dev/null
+++ b/jstests/core/indexd.js
@@ -0,0 +1,10 @@
+
+t = db.indexd;
+t.drop();
+
+t.save( { a : 1 } );
+t.ensureIndex( { a : 1 } );
+assert.throws( function(){ db.indexd.$_id_.drop(); } );
+assert( t.drop() );
+
+//db.indexd.$_id_.remove({});
diff --git a/jstests/core/indexe.js b/jstests/core/indexe.js
new file mode 100644
index 00000000000..e84322c6510
--- /dev/null
+++ b/jstests/core/indexe.js
@@ -0,0 +1,22 @@
+
+t = db.indexe;
+t.drop();
+
+var num = 1000;
+
+for ( i=0; i<num; i++){
+ t.insert( { a : "b" } );
+}
+
+assert.eq( num , t.find().count() ,"A1" );
+assert.eq( num , t.find( { a : "b" } ).count() , "B1" );
+assert.eq( num , t.find( { a : "b" } ).itcount() , "C1" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( num , t.find().count() ,"A2" );
+assert.eq( num , t.find().sort( { a : 1 } ).count() , "A2a" );
+assert.eq( num , t.find( { a : "b" } ).count() , "B2" );
+assert.eq( num , t.find( { a : "b" } ).itcount() , "C3" );
+
+t.drop();
diff --git a/jstests/core/indexes_on_indexes.js b/jstests/core/indexes_on_indexes.js
new file mode 100644
index 00000000000..807c1e25bfd
--- /dev/null
+++ b/jstests/core/indexes_on_indexes.js
@@ -0,0 +1,19 @@
+// ensure an index cannot be created on system.indexes
+t = db.getSiblingDB("indexes_on_indexes");
+printjson(t.system.indexes.getIndexes());
+assert.eq(t.system.indexes.getIndexes().length, 0);
+print("trying via ensureIndex");
+assert.throws(t.system.indexes.ensureIndex({_id:1}));
+printjson(t.system.indexes.getIndexes());
+assert.eq(t.system.indexes.getIndexes().length, 0);
+print("trying via createIndex");
+assert.throws(t.system.indexes.createIndex({_id:1}));
+printjson(t.system.indexes.getIndexes());
+assert.eq(t.system.indexes.getIndexes().length, 0);
+print("trying via direct insertion");
+assert.throws(t.system.indexes.insert({ v:1,
+ key:{_id:1},
+ ns: "indexes_on_indexes.system.indexes",
+ name:"wontwork"}));
+printjson(t.system.indexes.getIndexes());
+assert.eq(t.system.indexes.getIndexes().length, 0);
diff --git a/jstests/core/indexf.js b/jstests/core/indexf.js
new file mode 100644
index 00000000000..d65e7b1c898
--- /dev/null
+++ b/jstests/core/indexf.js
@@ -0,0 +1,13 @@
+
+t = db.indexf
+t.drop();
+
+t.ensureIndex( { x : 1 } );
+
+t.save( { x : 2 } );
+t.save( { y : 3 } );
+t.save( { x : 4 } );
+
+assert.eq( 2 , t.findOne( { x : 2 } ).x , "A1" );
+assert.eq( 3 , t.findOne( { x : null } ).y , "A2" );
+assert.eq( 4 , t.findOne( { x : 4 } ).x , "A3" );
diff --git a/jstests/core/indexg.js b/jstests/core/indexg.js
new file mode 100644
index 00000000000..a0709fd6568
--- /dev/null
+++ b/jstests/core/indexg.js
@@ -0,0 +1,13 @@
+
+f = db.jstests_indexg;
+f.drop();
+f.save( { list: [1, 2] } );
+f.save( { list: [1, 3] } );
+
+doit = function() {
+ assert.eq( 1, f.count( { list: { $in: [1], $ne: 3 } } ) );
+ assert.eq( 1, f.count( { list: { $in: [1], $not:{$in: [3] } } } ) );
+}
+doit();
+f.ensureIndex( { list: 1 } );
+doit(); \ No newline at end of file
diff --git a/jstests/core/indexh.js b/jstests/core/indexh.js
new file mode 100644
index 00000000000..ac2a93ec62b
--- /dev/null
+++ b/jstests/core/indexh.js
@@ -0,0 +1,41 @@
+// This should get skipped when testing replication
+
+t = db.jstests_indexh;
+
+function debug( t ) {
+ print( t );
+}
+
+function extraDebug() {
+// printjson( db.stats() );
+// db.printCollectionStats();
+}
+
+// index extent freeing
+t.drop();
+t.save( {} );
+var s1 = db.stats().dataSize;
+debug( "s1: " + s1 );
+extraDebug();
+t.ensureIndex( {a:1} );
+var s2 = db.stats().dataSize;
+debug( "s2: " + s2 );
+assert.automsg( "s1 < s2" );
+t.dropIndex( {a:1} );
+var s3 = db.stats().dataSize;
+debug( "s3: " + s3 );
+extraDebug();
+assert.eq.automsg( "s1", "s3" );
+
+// index node freeing
+t.drop();
+t.ensureIndex( {a:1} );
+for( i = 'a'; i.length < 500; i += 'a' ) {
+ t.save( {a:i} );
+}
+var s4 = db.stats().indexSize;
+debug( "s4: " + s4 );
+t.remove( {} );
+var s5 = db.stats().indexSize;
+debug( "s5: " + s5 );
+assert.automsg( "s5 < s4" ); \ No newline at end of file
diff --git a/jstests/core/indexi.js b/jstests/core/indexi.js
new file mode 100644
index 00000000000..bfd9d13e15c
--- /dev/null
+++ b/jstests/core/indexi.js
@@ -0,0 +1,34 @@
+// Test that client cannot access index namespaces SERVER-4276.
+
+t = db.jstests_indexi;
+t.drop();
+
+idx = db.jstests_indexi.$_id_;
+
+var expectWriteError = function(func) {
+ if (db.getMongo().writeMode() == 'commands') {
+ assert.throws(func);
+ }
+ else {
+ assert.writeError(func());
+ }
+};
+
+// Test that accessing the index namespace fails.
+function checkFailingOperations() {
+ assert.throws(function() { idx.find().itcount(); });
+ expectWriteError(function() { return idx.insert({ x: 1 }); });
+ expectWriteError(function() { return idx.update({ x: 1 }, { x: 2 }); });
+ expectWriteError(function() { return idx.remove({ x: 1 }); });
+ assert.commandFailed( idx.runCommand( 'compact' ) );
+ assert.writeError(idx.ensureIndex({ x: 1 }));
+}
+
+// Check with base collection not present.
+// TODO: SERVER-4276
+//checkFailingOperations();
+t.save({});
+
+// Check with base collection present.
+checkFailingOperations();
+
diff --git a/jstests/core/indexj.js b/jstests/core/indexj.js
new file mode 100644
index 00000000000..6d8ac85c972
--- /dev/null
+++ b/jstests/core/indexj.js
@@ -0,0 +1,44 @@
+// SERVER-726
+
+t = db.jstests_indexj;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:5} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "A" );
+
+t.drop();
+t.ensureIndex( {a:1} );
+t.save( {a:4} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "B" );
+
+t.save( {a:5} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "D" );
+
+t.save( {a:4} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "C" );
+
+t.save( {a:5} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "D" );
+
+t.drop();
+t.ensureIndex( {a:1,b:1} );
+t.save( { a:1,b:1 } );
+t.save( { a:1,b:2 } );
+t.save( { a:2,b:1 } );
+t.save( { a:2,b:2 } );
+
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).sort( {a:-1,b:-1} ).explain().nscanned );
+
+t.save( {a:1,b:1} );
+t.save( {a:1,b:1} );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).sort( {a:-1,b:-1} ).explain().nscanned );
+
+assert.eq( 1, t.find( { a:{$in:[1,1.9]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned );
+assert.eq( 1, t.find( { a:{$in:[1.1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).sort( {a:-1,b:-1} ).explain().nscanned );
+
+t.save( { a:1,b:1.5} );
+assert.eq( 3, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).hint( {a:1,b:1} ).explain().nscanned, "F" );
diff --git a/jstests/core/indexl.js b/jstests/core/indexl.js
new file mode 100644
index 00000000000..666586db7a7
--- /dev/null
+++ b/jstests/core/indexl.js
@@ -0,0 +1,27 @@
+// Check nonoverlapping $in/$all with multikeys SERVER-2165
+
+t = db.jstests_indexl;
+
+function test(t) {
+ t.save( {a:[1,2]} );
+ assert.eq( 1, t.count( {a:{$all:[1],$in:[2]}} ) );
+ assert.eq( 1, t.count( {a:{$all:[2],$in:[1]}} ) );
+ assert.eq( 1, t.count( {a:{$in:[2],$all:[1]}} ) );
+ assert.eq( 1, t.count( {a:{$in:[1],$all:[2]}} ) );
+ assert.eq( 1, t.count( {a:{$all:[1],$in:[2]}} ) );
+ t.save({a:[3,4]})
+ t.save({a:[2,3]})
+ t.save({a:[1,2,3,4]})
+ assert.eq( 2, t.count( {a:{$in:[2],$all:[1]}} ) );
+ assert.eq( 1, t.count( {a:{$in:[3],$all:[1,2]}} ) );
+ assert.eq( 1, t.count( {a:{$in:[1],$all:[3]}} ) );
+ assert.eq( 2, t.count( {a:{$in:[2,3],$all:[1]}} ) );
+ assert.eq( 1, t.count( {a:{$in:[4],$all:[2,3]}} ) );
+ assert.eq( 3, t.count( {a:{$in:[1,3],$all:[2]}} ) );
+}
+
+t.drop();
+test(t);
+t.drop();
+t.ensureIndex( {a:1} );
+test(t); \ No newline at end of file
diff --git a/jstests/core/indexm.js b/jstests/core/indexm.js
new file mode 100644
index 00000000000..6b31ea628cd
--- /dev/null
+++ b/jstests/core/indexm.js
@@ -0,0 +1,38 @@
+// Check proper range combinations with or clauses overlapping non or portion of query SERVER-2302
+
+t = db.jstests_indexm;
+t.drop();
+
+t.save( { a : [ { x : 1 } , { x : 2 } , { x : 3 } , { x : 4 } ] } )
+
+function test(){
+ assert.eq( 1, t.count(
+ {
+ a : { x : 1 } ,
+ "$or" : [ { a : { x : 2 } } , { a : { x : 3 } } ]
+ }
+ ) );
+}
+
+// The first find will return a result since there isn't an index.
+test();
+
+// Now create an index.
+t.ensureIndex({"a":1});
+test();
+// SERVER-3105
+//assert( !t.find(
+// {
+// a : { x : 1 } ,
+// "$or" : [ { a : { x : 2 } } , { a : { x : 3 } } ]
+// }
+// ).explain().clauses );
+
+// Now create a different index.
+t.dropIndexes();
+t.ensureIndex({"a.x":1});
+test();
+
+// Drop the indexes.
+t.dropIndexes();
+test(); \ No newline at end of file
diff --git a/jstests/core/indexn.js b/jstests/core/indexn.js
new file mode 100644
index 00000000000..9abb001eed9
--- /dev/null
+++ b/jstests/core/indexn.js
@@ -0,0 +1,49 @@
+// Test "impossible match" queries, or queries that will always have
+// an empty result set.
+
+t = db.jstests_indexn;
+t.drop();
+
+function checkImpossibleMatch( explain ) {
+ printjson(explain);
+ assert.eq( 0, explain.n );
+}
+
+t.save( {a:1,b:[1,2]} );
+
+t.ensureIndex( {a:1} );
+t.ensureIndex( {b:1} );
+
+// {a:1} is a single key index, so no matches are possible for this query
+assert.eq( 0, t.count( {a:{$gt:5,$lt:0}} ) );
+checkImpossibleMatch( t.find( {a:{$gt:5,$lt:0}} ).explain() );
+
+assert.eq( 0, t.count( {a:{$gt:5,$lt:0},b:2} ) );
+checkImpossibleMatch( t.find( {a:{$gt:5,$lt:0},b:2} ).explain() );
+
+assert.eq( 0, t.count( {a:{$gt:5,$lt:0},b:{$gt:0,$lt:5}} ) );
+checkImpossibleMatch( t.find( {a:{$gt:5,$lt:0},b:{$gt:0,$lt:5}} ).explain() );
+
+// One clause of an $or is an "impossible match"
+printjson( t.find( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ).explain() )
+assert.eq( 1, t.count( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ) );
+checkImpossibleMatch( t.find( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ).explain().clauses[ 0 ] );
+
+// One clause of an $or is an "impossible match"; original order of the $or
+// does not matter.
+printjson( t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ).explain() )
+assert.eq( 1, t.count( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ) );
+checkImpossibleMatch( t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ).explain().clauses[ 0 ] );
+
+t.save( {a:2} );
+
+// Descriptive test: query system sees this query as an $or where
+// one clause of the $or is an $and. The $and bounds get intersected
+// forming a clause with empty index bounds. The union of the $or bounds
+// produces the two point intervals [1, 1] and [2, 2].
+assert.eq( 2, t.count( {$or:[{a:1},{a:{$gt:5,$lt:0}},{a:2}]} ) );
+explain = t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}},{a:2}]} ).explain();
+printjson( explain )
+assert.eq( 2, explain.clauses.length );
+checkImpossibleMatch( explain.clauses[ 0 ] );
+assert.eq( [[1, 1], [2,2]], explain.clauses[ 1 ].indexBounds.a );
diff --git a/jstests/core/indexo.js b/jstests/core/indexo.js
new file mode 100644
index 00000000000..c9c9424b4ef
--- /dev/null
+++ b/jstests/core/indexo.js
@@ -0,0 +1,15 @@
+// Tests that an index cannot be created with dropDups=true on
+// a capped collection.
+
+var coll = db.jstests_indexo;
+coll.drop();
+
+// Can create a dropDups index on non-capped collection.
+assert.writeOK(coll.ensureIndex({x: 1}, {dropDups: true}));
+coll.drop();
+
+// Cannot create a dropDups index on non-capped collection.
+db.createCollection("jstests_indexy", {capped: true, size: 1024});
+coll = db.jstests_indexy;
+assert.writeError(coll.ensureIndex({x: 1}, {dropDups: true}));
+coll.drop();
diff --git a/jstests/core/indexp.js b/jstests/core/indexp.js
new file mode 100644
index 00000000000..cd72eeeebf5
--- /dev/null
+++ b/jstests/core/indexp.js
@@ -0,0 +1,23 @@
+// Tests that SERVER-11374 is fixed: specifically, that indexes cannot
+// be created on fields that begin with '$' but are not part of DBRefs
+// and that indexes cannot be created on field paths that contain empty
+// fields.
+
+var coll = db.jstests_indexp;
+
+// Empty field checks.
+assert.writeError(coll.ensureIndex({ 'a..b': 1 }));
+assert.writeError(coll.ensureIndex({ '.a': 1 }));
+assert.writeError(coll.ensureIndex({ 'a.': 1 }));
+assert.writeError(coll.ensureIndex({ '.': 1 }));
+assert.writeError(coll.ensureIndex({ '': 1 }));
+assert.writeOK(coll.ensureIndex({ 'a.b': 1 }));
+
+// '$'-prefixed field checks.
+assert.writeError(coll.ensureIndex({ '$a': 1 }));
+assert.writeError(coll.ensureIndex({ 'a.$b': 1 }));
+assert.writeError(coll.ensureIndex({ '$db': 1 }));
+assert.writeOK(coll.ensureIndex({ 'a$ap': 1 })); // $ in middle is ok
+assert.writeOK(coll.ensureIndex({ 'a.$id': 1 })); // $id/$db/$ref are execptions
+
+coll.dropIndexes();
diff --git a/jstests/core/indexq.js b/jstests/core/indexq.js
new file mode 100644
index 00000000000..38cd27b8798
--- /dev/null
+++ b/jstests/core/indexq.js
@@ -0,0 +1,20 @@
+// Test multikey range preference for a fully included range SERVER-958.
+
+t = db.jstests_indexq;
+t.drop();
+
+t.ensureIndex( {a:1} );
+// Single key index
+assert.eq( 5, t.find( {a:{$gt:4,$gte:5}} ).explain().indexBounds.a[ 0 ][ 0 ] );
+assert.eq( [[1,1],[2,2]], t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain().indexBounds.a );
+
+t.save( {a:[1,3]} );
+// Now with multi key index.
+
+// SERVER-12281: We should know that >4 is worse than >5
+// assert.eq( 5, t.find( {a:{$gt:4,$gte:5}} ).explain().indexBounds.a[ 0 ][ 0 ] );
+
+printjson(t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain())
+
+// SERVER-12281: We should know that in[1,2] is better than in[1,2,3].
+// assert.eq( [[1,1],[2,2]], t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain().indexBounds.a );
diff --git a/jstests/core/indexr.js b/jstests/core/indexr.js
new file mode 100644
index 00000000000..c3eecd045c8
--- /dev/null
+++ b/jstests/core/indexr.js
@@ -0,0 +1,44 @@
+// Check multikey index cases with parallel nested fields SERVER-958.
+
+t = db.jstests_indexr;
+t.drop();
+
+// Check without indexes.
+t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } );
+assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
+assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
+
+// Check with single key indexes.
+t.remove({});
+t.ensureIndex( {'a.b':1,'a.c':1} );
+t.ensureIndex( {a:1,'a.c':1} );
+assert.eq( 0, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
+assert.eq( 0, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
+assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
+assert.eq( 4, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
+
+t.save( { a: { b: 3, c: 3 } } );
+assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
+assert.eq( 1, t.count( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ) );
+assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
+assert.eq( 4, t.find( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] );
+
+// Check with multikey indexes.
+t.remove({});
+t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } );
+
+assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
+assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
+assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] );
+assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] );
+
+// Check reverse direction.
+assert.eq( 1, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).itcount() );
+assert.eq( 1, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).itcount() );
+
+assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).explain().indexBounds['a.c'] );
+assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).explain().indexBounds['a.c'] );
+
+// Check second field is constrained if first is not.
+assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {'a.b':1,'a.c':1} ).itcount() );
+assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {a:1,'a.c':1} ).itcount() );
diff --git a/jstests/core/indexs.js b/jstests/core/indexs.js
new file mode 100644
index 00000000000..609f912affe
--- /dev/null
+++ b/jstests/core/indexs.js
@@ -0,0 +1,21 @@
+// Test index key generation issue with parent and nested fields in same index and array containing subobject SERVER-3005.
+
+t = db.jstests_indexs;
+
+t.drop();
+t.ensureIndex( {a:1} );
+t.save( { a: [ { b: 3 } ] } );
+assert.eq( 1, t.count( { a:{ b:3 } } ) );
+
+t.drop();
+t.ensureIndex( {a:1,'a.b':1} );
+t.save( { a: { b: 3 } } );
+assert.eq( 1, t.count( { a:{ b:3 } } ) );
+ib = t.find( { a:{ b:3 } } ).explain().indexBounds;
+
+t.drop();
+t.ensureIndex( {a:1,'a.b':1} );
+t.save( { a: [ { b: 3 } ] } );
+assert.eq( ib, t.find( { a:{ b:3 } } ).explain().indexBounds );
+assert.eq( 1, t.find( { a:{ b:3 } } ).explain().nscanned );
+assert.eq( 1, t.count( { a:{ b:3 } } ) );
diff --git a/jstests/core/indext.js b/jstests/core/indext.js
new file mode 100644
index 00000000000..e418dc2e959
--- /dev/null
+++ b/jstests/core/indext.js
@@ -0,0 +1,21 @@
+// Sparse indexes with arrays SERVER-3216
+
+t = db.jstests_indext;
+t.drop();
+
+t.ensureIndex( {'a.b':1}, {sparse:true} );
+t.save( {a:[]} );
+t.save( {a:1} );
+assert.eq( 0, t.find().hint( {'a.b':1} ).itcount() );
+assert.eq( 0, t.find().hint( {'a.b':1} ).explain().nscanned );
+
+t.ensureIndex( {'a.b':1,'a.c':1}, {sparse:true} );
+t.save( {a:[]} );
+t.save( {a:1} );
+assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).itcount() );
+assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned );
+
+t.save( {a:[{b:1}]} );
+t.save( {a:1} );
+assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).itcount() );
+assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned );
diff --git a/jstests/core/indexu.js b/jstests/core/indexu.js
new file mode 100644
index 00000000000..9031d827bf4
--- /dev/null
+++ b/jstests/core/indexu.js
@@ -0,0 +1,108 @@
+// Test index key generation with duplicate values addressed by array index and
+// object field. SERVER-2902
+
+t = db.jstests_indexu;
+t.drop();
+
+var dupDoc = {a:[{'0':1}]}; // There are two 'a.0' fields in this doc.
+var dupDoc2 = {a:[{'1':1},'c']};
+var noDupDoc = {a:[{'1':1}]};
+
+// Test that we can't index dupDoc.
+assert.writeOK( t.save( dupDoc ));
+assert.writeError(t.ensureIndex( {'a.0':1} ));
+
+t.remove({});
+assert.writeOK(t.ensureIndex( {'a.0':1} ));
+assert.writeError( t.save( dupDoc ));
+
+// Test that we can't index dupDoc2.
+t.drop();
+assert.writeOK(t.save( dupDoc2 ));
+assert.writeError(t.ensureIndex( {'a.1':1} ));
+
+t.remove({});
+assert.writeOK(t.ensureIndex( {'a.1':1} ));
+assert.writeError(t.save( dupDoc2 ));
+
+// Test that we can index dupDoc with a different index.
+t.drop();
+t.ensureIndex( {'a.b':1} );
+assert.writeOK(t.save( dupDoc ));
+
+// Test number field starting with hyphen.
+t.drop();
+t.ensureIndex( {'a.-1':1} );
+assert.writeOK(t.save( {a:[{'-1':1}]} ));
+
+// Test number field starting with zero.
+t.drop();
+t.ensureIndex( {'a.00':1} );
+assert.writeOK( t.save( {a:[{'00':1}]} ));
+
+// Test multiple array indexes
+t.drop();
+t.ensureIndex( {'a.0':1,'a.1':1} );
+assert.writeOK( t.save( {a:[{'1':1}]} ));
+assert.writeError( t.save( {a:[{'1':1},4]} ));
+
+// Test that we can index noDupDoc.
+t.drop();
+t.save( noDupDoc );
+assert.writeOK(t.ensureIndex( {'a.0':1} ));
+assert.writeOK(t.ensureIndex( {'a.1':1} ));
+
+t.drop();
+t.ensureIndex( {'a.0':1} );
+t.ensureIndex( {'a.1':1} );
+assert.writeOK(t.save( noDupDoc ));
+
+// Test that we can query noDupDoc.
+assert.eq( 1, t.find( {'a.1':1} ).hint( {'a.1':1} ).itcount() );
+assert.eq( 1, t.find( {'a.1':1} ).hint( {$natural:1} ).itcount() );
+assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {'a.0':1} ).itcount() );
+assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {$natural:1} ).itcount() );
+
+// Check multiple nested array fields.
+t.drop();
+t.save( {a:[[1]]} );
+assert.writeOK(t.ensureIndex( {'a.0.0':1} ));
+assert.eq( 1, t.find( {'a.0.0':1} ).hint( {$natural:1} ).itcount() );
+assert.eq( 1, t.find( {'a.0.0':1} ).hint( {'a.0.0':1} ).itcount() );
+
+// Check where there is a duplicate for a partially addressed field but not for a fully addressed field.
+t.drop();
+t.save( {a:[[1],{'0':1}]} );
+assert.writeError(t.ensureIndex( {'a.0.0':1} ));
+
+// Check where there is a duplicate for a fully addressed field.
+t.drop();
+assert.writeOK( t.save( {a:[[1],{'0':[1]}]} ));
+assert.writeError(t.ensureIndex( {'a.0.0':1} ));
+
+// Two ways of addressing parse to an array.
+t.drop();
+t.save( {a:[{'0':1}]} );
+assert.writeError(t.ensureIndex( {'a.0.0':1} ));
+
+// Test several key depths - with same arrays being found.
+t.drop();
+t.save( {a:[{'0':[{'0':1}]}]} );
+assert.writeError(t.ensureIndex( {'a.0.0.0.0.0.0':1} ));
+assert.writeError(t.ensureIndex( {'a.0.0.0.0.0':1} ));
+assert.writeError(t.ensureIndex( {'a.0.0.0.0':1} ));
+assert.writeError(t.ensureIndex( {'a.0.0.0':1} ));
+assert.writeError(t.ensureIndex( {'a.0.0':1} ));
+assert.writeError(t.ensureIndex( {'a.0':1} ));
+assert.writeOK(t.ensureIndex( {'a':1} ));
+
+// Two prefixes extract docs, but one terminates extraction before array.
+t.drop();
+t.save( {a:[{'0':{'c':[]}}]} );
+assert.writeError(t.ensureIndex( {'a.0.c':1} ));
+
+t.drop();
+t.save( {a:[[{'b':1}]]} );
+assert.eq( 1, t.find( {'a.0.b':1} ).itcount() );
+t.ensureIndex( {'a.0.b':1} );
+assert.eq( 1, t.find( {'a.0.b':1} ).itcount() );
diff --git a/jstests/core/indexv.js b/jstests/core/indexv.js
new file mode 100644
index 00000000000..334ec432d74
--- /dev/null
+++ b/jstests/core/indexv.js
@@ -0,0 +1,18 @@
+// Check null key generation.
+
+t = db.jstests_indexv;
+t.drop();
+
+t.ensureIndex( {'a.b':1} );
+
+t.save( {a:[{},{b:1}]} );
+var e = t.find( {'a.b':null} ).explain();
+assert.eq( 1, e.n );
+assert.eq( 1, e.nscanned );
+
+t.drop();
+t.ensureIndex( {'a.b.c':1} );
+t.save( {a:[{b:[]},{b:{c:1}}]} );
+var e = t.find( {'a.b.c':null} ).explain();
+assert.eq( 0, e.n );
+assert.eq( 1, e.nscanned );
diff --git a/jstests/core/indexw.js b/jstests/core/indexw.js
new file mode 100644
index 00000000000..bd7c75b8b08
--- /dev/null
+++ b/jstests/core/indexw.js
@@ -0,0 +1,15 @@
+// Check that v0 keys are generated for v0 indexes SERVER-3375
+
+t = db.jstests_indexw;
+t.drop();
+
+t.save( {a:[]} );
+assert.eq( 1, t.count( {a:[]} ) );
+t.ensureIndex( {a:1} );
+assert.eq( 1, t.count( {a:[]} ) );
+t.dropIndexes();
+
+// The count result is incorrect - just checking here that v0 key generation is used.
+t.ensureIndex( {a:1}, {v:0} );
+// QUERY_MIGRATION: WE GET THIS RIGHT...BY CHANCE?
+// assert.eq( 0, t.count( {a:[]} ) );
diff --git a/jstests/core/insert1.js b/jstests/core/insert1.js
new file mode 100644
index 00000000000..e4ce6a4e10e
--- /dev/null
+++ b/jstests/core/insert1.js
@@ -0,0 +1,34 @@
+t = db.insert1;
+t.drop();
+
+var o = {a:1};
+t.insert(o);
+var doc = t.findOne();
+assert.eq(1, doc.a);
+assert(doc._id != null, tojson(doc));
+
+t.drop();
+o = {a:2, _id:new ObjectId()};
+var id = o._id;
+t.insert(o);
+doc = t.findOne();
+assert.eq(2, doc.a);
+assert.eq(id, doc._id);
+
+t.drop();
+o = {a:3, _id:"asdf"};
+id = o._id;
+t.insert(o);
+doc = t.findOne();
+assert.eq(3, doc.a);
+assert.eq(id, doc._id);
+
+t.drop();
+o = {a:4, _id:null};
+t.insert(o);
+doc = t.findOne();
+assert.eq(4, doc.a);
+assert.eq(null, doc._id, tojson(doc));
+
+var stats = db.runCommand({ collstats: "insert1" });
+assert(stats.paddingFactor == 1.0);
diff --git a/jstests/core/insert2.js b/jstests/core/insert2.js
new file mode 100644
index 00000000000..8ce4b25c25f
--- /dev/null
+++ b/jstests/core/insert2.js
@@ -0,0 +1,13 @@
+// Create a new connection object so it won't affect the global connection when we modify
+// it's settings.
+var conn = new Mongo(db.getMongo().host);
+conn._skipValidation = true;
+conn.forceWriteMode(db.getMongo().writeMode());
+
+t = conn.getDB(db.getName()).insert2;
+t.drop();
+
+assert.isnull( t.findOne() , "A" )
+assert.writeError(t.insert( { z : 1 , $inc : { x : 1 } } , 0, true ));
+assert.isnull( t.findOne() , "B" )
+
diff --git a/jstests/core/insert_id_undefined.js b/jstests/core/insert_id_undefined.js
new file mode 100644
index 00000000000..77d7d983549
--- /dev/null
+++ b/jstests/core/insert_id_undefined.js
@@ -0,0 +1,5 @@
+// ensure a document with _id undefined cannot be saved
+t = db.insert_id_undefined;
+t.drop();
+t.insert({_id:undefined});
+assert.eq(t.count(), 0);
diff --git a/jstests/core/insert_illegal_doc.js b/jstests/core/insert_illegal_doc.js
new file mode 100644
index 00000000000..2b4d326e9ce
--- /dev/null
+++ b/jstests/core/insert_illegal_doc.js
@@ -0,0 +1,22 @@
+// SERVER-12185: Do not allow insertion or update of docs which will fail the
+// "parallel indexing of arrays" test
+var coll = db.insert_illegal_doc;
+coll.drop();
+coll.ensureIndex({a: 1, b: 1});
+
+// test upsert
+coll.update({}, {_id: 1, a: [1, 2, 3], b: [4, 5, 6]}, true);
+assert.gleErrorCode(db, 10088);
+assert.eq(0, coll.find().itcount(), "should not be a doc");
+
+// test insert
+coll.insert({_id: 1, a: [1, 2, 3], b: [4, 5, 6]});
+assert.gleErrorCode(db, 10088);
+assert.eq(0, coll.find().itcount(), "should not be a doc");
+
+// test update
+coll.insert({_id: 1});
+assert.gleSuccess(db, "insert failed");
+coll.update({_id: 1}, {$set : { a : [1, 2, 3], b: [4, 5, 6]}});
+assert.gleErrorCode(db, 10088);
+assert.eq(undefined, coll.findOne().a, "update should have failed");
diff --git a/jstests/core/insert_long_index_key.js b/jstests/core/insert_long_index_key.js
new file mode 100644
index 00000000000..6379c36fb4a
--- /dev/null
+++ b/jstests/core/insert_long_index_key.js
@@ -0,0 +1,10 @@
+t = db.insert_long_index_key;
+t.drop();
+
+var s = new Array(2000).toString();
+t.ensureIndex( { x : 1 } );
+
+t.insert({ x: 1 });
+t.insert({ x: s });
+
+assert.eq( 1, t.count() );
diff --git a/jstests/core/ismaster.js b/jstests/core/ismaster.js
new file mode 100644
index 00000000000..0c385b02d7c
--- /dev/null
+++ b/jstests/core/ismaster.js
@@ -0,0 +1,28 @@
+var res = db.isMaster();
+// check that the fields that should be there are there and have proper values
+assert( res.maxBsonObjectSize &&
+ isNumber(res.maxBsonObjectSize) &&
+ res.maxBsonObjectSize > 0, "maxBsonObjectSize possibly missing:" + tojson(res));
+assert( res.maxMessageSizeBytes &&
+ isNumber(res.maxMessageSizeBytes) &&
+ res.maxBsonObjectSize > 0, "maxMessageSizeBytes possibly missing:" + tojson(res));
+assert( res.maxWriteBatchSize &&
+ isNumber(res.maxWriteBatchSize) &&
+ res.maxWriteBatchSize > 0, "maxWriteBatchSize possibly missing:" + tojson(res));
+assert(res.ismaster, "ismaster missing or false:" + tojson(res));
+assert(res.localTime, "localTime possibly missing:" + tojson(res));
+var unwantedFields = ["setName", "setVersion", "secondary", "hosts", "passives", "arbiters",
+ "primary", "aribterOnly", "passive", "slaveDelay", "hidden", "tags",
+ "buildIndexes", "me"];
+// check that the fields that shouldn't be there are not there
+var badFields = [];
+for (field in res) {
+ if (!res.hasOwnProperty(field)){
+ continue;
+ }
+ if (Array.contains(unwantedFields, field)) {
+ badFields.push(field);
+ }
+}
+assert(badFields.length === 0, "\nthe result:\n" + tojson(res)
+ + "\ncontained fields it shouldn't have: " + badFields);