From 5595b945603b0712c537787e31e6da661c424fee Mon Sep 17 00:00:00 2001 From: Randolph Tan Date: Tue, 14 Jan 2014 14:09:42 -0500 Subject: SERVER-12127 migrate js tests to jscore suite when not related to writes Moved test jstest/[a-i].js -> jstests/core/ and made changes to comply with write command api --- jstests/all.js | 47 --- jstests/all2.js | 86 ------ jstests/all3.js | 28 -- jstests/all4.js | 30 -- jstests/all5.js | 28 -- jstests/and.js | 85 ------ jstests/and2.js | 27 -- jstests/and3.js | 67 ----- jstests/andor.js | 99 ------- jstests/apitest_db.js | 77 ----- jstests/apitest_dbcollection.js | 115 -------- jstests/apply_ops1.js | 66 ----- jstests/apply_ops2.js | 71 ----- jstests/array1.js | 14 - jstests/array3.js | 8 - jstests/array4.js | 30 -- jstests/array_match1.js | 31 -- jstests/array_match2.js | 20 -- jstests/array_match3.js | 13 - jstests/array_match4.js | 30 -- jstests/arrayfind1.js | 40 --- jstests/arrayfind2.js | 29 -- jstests/arrayfind3.js | 16 -- jstests/arrayfind4.js | 22 -- jstests/arrayfind5.js | 23 -- jstests/arrayfind6.js | 26 -- jstests/arrayfind7.js | 52 ---- jstests/arrayfind8.js | 175 ------------ jstests/arrayfind9.js | 34 --- jstests/arrayfinda.js | 21 -- jstests/auth1.js | 54 ---- jstests/auth2.js | 9 - jstests/auth_copydb.js | 19 -- jstests/autoid.js | 11 - jstests/bad_index_plugin.js | 11 - jstests/basic1.js | 21 -- jstests/basic2.js | 16 -- jstests/basic3.js | 45 --- jstests/basic4.js | 12 - jstests/basic5.js | 6 - jstests/basic6.js | 8 - jstests/basic7.js | 11 - jstests/basic8.js | 11 - jstests/basic9.js | 19 -- jstests/basica.js | 33 --- jstests/basicb.js | 7 - jstests/basicc.js | 21 -- jstests/batch_size.js | 45 --- jstests/bench_test1.js | 37 --- jstests/bench_test2.js | 49 ---- jstests/bench_test3.js | 27 -- jstests/big_object1.js | 54 ---- jstests/binData.js | 14 - jstests/block_check_supported.js | 118 -------- jstests/bulk_insert.js | 22 -- jstests/capped.js | 11 - jstests/capped1.js | 11 - jstests/capped2.js | 62 ---- jstests/capped3.js | 45 --- jstests/capped5.js | 40 --- jstests/capped6.js | 109 ------- jstests/capped7.js | 89 ------ jstests/capped8.js | 108 ------- jstests/capped9.js | 28 -- jstests/capped_empty.js | 24 -- jstests/capped_max.js | 29 -- jstests/capped_server2639.js | 27 -- jstests/capped_server7543.js | 11 - jstests/cappeda.js | 33 --- jstests/check_shard_index.js | 141 --------- jstests/collmod.js | 82 ------ jstests/compact.js | 76 ----- jstests/compact2.js | 52 ---- jstests/compactPreservePadding.js | 26 -- jstests/connection_status.js | 27 -- jstests/connection_string_validation.js | 106 ------- jstests/constructors.js | 314 --------------------- jstests/copydb.js | 20 -- jstests/core/all.js | 47 +++ jstests/core/all2.js | 86 ++++++ jstests/core/all3.js | 28 ++ jstests/core/all4.js | 28 ++ jstests/core/all5.js | 28 ++ jstests/core/and.js | 85 ++++++ jstests/core/and2.js | 27 ++ jstests/core/and3.js | 67 +++++ jstests/core/andor.js | 99 +++++++ jstests/core/apitest_db.js | 77 +++++ jstests/core/apitest_dbcollection.js | 115 ++++++++ jstests/core/apply_ops1.js | 66 +++++ jstests/core/apply_ops2.js | 71 +++++ jstests/core/array1.js | 14 + jstests/core/array3.js | 8 + jstests/core/array4.js | 30 ++ jstests/core/array_match1.js | 31 ++ jstests/core/array_match2.js | 20 ++ jstests/core/array_match3.js | 13 + jstests/core/array_match4.js | 30 ++ jstests/core/arrayfind1.js | 40 +++ jstests/core/arrayfind2.js | 29 ++ jstests/core/arrayfind3.js | 16 ++ jstests/core/arrayfind4.js | 22 ++ jstests/core/arrayfind5.js | 23 ++ jstests/core/arrayfind6.js | 26 ++ jstests/core/arrayfind7.js | 52 ++++ jstests/core/arrayfind8.js | 175 ++++++++++++ jstests/core/arrayfind9.js | 34 +++ jstests/core/arrayfinda.js | 21 ++ jstests/core/auth1.js | 54 ++++ jstests/core/auth2.js | 9 + jstests/core/auth_copydb.js | 19 ++ jstests/core/autoid.js | 11 + jstests/core/bad_index_plugin.js | 11 + jstests/core/basic1.js | 21 ++ jstests/core/basic2.js | 16 ++ jstests/core/basic3.js | 45 +++ jstests/core/basic4.js | 12 + jstests/core/basic5.js | 6 + jstests/core/basic6.js | 8 + jstests/core/basic7.js | 11 + jstests/core/basic8.js | 11 + jstests/core/basic9.js | 19 ++ jstests/core/basica.js | 33 +++ jstests/core/basicb.js | 6 + jstests/core/basicc.js | 21 ++ jstests/core/batch_size.js | 45 +++ jstests/core/bench_test1.js | 37 +++ jstests/core/bench_test2.js | 48 ++++ jstests/core/bench_test3.js | 27 ++ jstests/core/big_object1.js | 55 ++++ jstests/core/binData.js | 14 + jstests/core/block_check_supported.js | 118 ++++++++ jstests/core/bulk_insert.js | 22 ++ jstests/core/capped.js | 11 + jstests/core/capped1.js | 11 + jstests/core/capped2.js | 62 ++++ jstests/core/capped3.js | 45 +++ jstests/core/capped5.js | 40 +++ jstests/core/capped6.js | 109 +++++++ jstests/core/capped7.js | 89 ++++++ jstests/core/capped8.js | 108 +++++++ jstests/core/capped9.js | 27 ++ jstests/core/capped_empty.js | 24 ++ jstests/core/capped_max.js | 29 ++ jstests/core/capped_server2639.js | 27 ++ jstests/core/capped_server7543.js | 11 + jstests/core/cappeda.js | 32 +++ jstests/core/check_shard_index.js | 141 +++++++++ jstests/core/collmod.js | 82 ++++++ jstests/core/compact.js | 76 +++++ jstests/core/compact2.js | 52 ++++ jstests/core/compactPreservePadding.js | 26 ++ jstests/core/connection_status.js | 27 ++ jstests/core/connection_string_validation.js | 106 +++++++ jstests/core/constructors.js | 313 ++++++++++++++++++++ jstests/core/copydb.js | 20 ++ jstests/core/count.js | 25 ++ jstests/core/count10.js | 59 ++++ jstests/core/count2.js | 28 ++ jstests/core/count3.js | 26 ++ jstests/core/count4.js | 17 ++ jstests/core/count5.js | 30 ++ jstests/core/count6.js | 61 ++++ jstests/core/count7.js | 25 ++ jstests/core/count9.js | 28 ++ jstests/core/count_hint.js | 20 ++ jstests/core/counta.js | 14 + jstests/core/countb.js | 11 + jstests/core/countc.js | 124 ++++++++ jstests/core/coveredIndex1.js | 64 +++++ jstests/core/coveredIndex2.js | 18 ++ jstests/core/coveredIndex3.js | 54 ++++ jstests/core/coveredIndex4.js | 40 +++ jstests/core/coveredIndex5.js | 70 +++++ jstests/core/covered_index_compound_1.js | 45 +++ jstests/core/covered_index_geo_1.js | 18 ++ jstests/core/covered_index_geo_2.js | 22 ++ jstests/core/covered_index_negative_1.js | 61 ++++ jstests/core/covered_index_simple_1.js | 55 ++++ jstests/core/covered_index_simple_2.js | 43 +++ jstests/core/covered_index_simple_3.js | 57 ++++ jstests/core/covered_index_simple_id.js | 42 +++ jstests/core/covered_index_sort_1.js | 34 +++ jstests/core/covered_index_sort_2.js | 17 ++ jstests/core/covered_index_sort_3.js | 16 ++ jstests/core/create_indexes.js | 83 ++++++ jstests/core/currentop.js | 79 ++++++ jstests/core/cursor1.js | 20 ++ jstests/core/cursor2.js | 24 ++ jstests/core/cursor3.js | 35 +++ jstests/core/cursor4.js | 47 +++ jstests/core/cursor5.js | 36 +++ jstests/core/cursor6.js | 100 +++++++ jstests/core/cursor7.js | 42 +++ jstests/core/cursora.js | 49 ++++ jstests/core/cursorb.js | 17 ++ jstests/core/datasize.js | 35 +++ jstests/core/datasize2.js | 27 ++ jstests/core/datasize3.js | 34 +++ jstests/core/date1.js | 17 ++ jstests/core/date2.js | 13 + jstests/core/date3.js | 31 ++ jstests/core/db.js | 11 + jstests/core/dbadmin.js | 105 +++++++ jstests/core/dbcase.js | 27 ++ jstests/core/dbcase2.js | 9 + jstests/core/dbhash.js | 58 ++++ jstests/core/dbhash2.js | 22 ++ jstests/core/dbref1.js | 10 + jstests/core/dbref2.js | 20 ++ jstests/core/dbref3.js | 45 +++ jstests/core/delx.js | 30 ++ jstests/core/depth_limit.js | 56 ++++ jstests/core/distinct1.js | 40 +++ jstests/core/distinct2.js | 13 + jstests/core/distinct3.js | 35 +++ jstests/core/distinct_array1.js | 91 ++++++ jstests/core/distinct_index1.js | 72 +++++ jstests/core/distinct_index2.js | 41 +++ jstests/core/distinct_speed1.js | 26 ++ jstests/core/drop.js | 25 ++ jstests/core/drop2.js | 52 ++++ jstests/core/drop3.js | 25 ++ jstests/core/drop_index.js | 20 ++ jstests/core/dropdb.js | 25 ++ jstests/core/dropdb_race.js | 41 +++ jstests/core/elemMatchProjection.js | 265 +++++++++++++++++ jstests/core/error2.js | 21 ++ jstests/core/error5.js | 8 + jstests/core/eval0.js | 8 + jstests/core/eval1.js | 17 ++ jstests/core/eval2.js | 28 ++ jstests/core/eval3.js | 21 ++ jstests/core/eval4.js | 23 ++ jstests/core/eval5.js | 23 ++ jstests/core/eval6.js | 15 + jstests/core/eval7.js | 3 + jstests/core/eval8.js | 19 ++ jstests/core/eval9.js | 22 ++ jstests/core/eval_nolock.js | 16 ++ jstests/core/evala.js | 9 + jstests/core/evalb.js | 40 +++ jstests/core/evalc.js | 25 ++ jstests/core/evald.js | 97 +++++++ jstests/core/evale.js | 5 + jstests/core/evalf.js | 27 ++ jstests/core/exists.js | 49 ++++ jstests/core/exists2.js | 16 ++ jstests/core/exists3.js | 21 ++ jstests/core/exists4.js | 20 ++ jstests/core/exists5.js | 33 +++ jstests/core/exists6.js | 79 ++++++ jstests/core/exists7.js | 21 ++ jstests/core/exists8.js | 76 +++++ jstests/core/exists9.js | 41 +++ jstests/core/existsa.js | 114 ++++++++ jstests/core/existsb.js | 76 +++++ jstests/core/explain1.js | 48 ++++ jstests/core/explain2.js | 27 ++ jstests/core/explain3.js | 23 ++ jstests/core/explain4.js | 68 +++++ jstests/core/explain5.js | 38 +++ jstests/core/explain6.js | 25 ++ jstests/core/explain7.js | 181 ++++++++++++ jstests/core/explain8.js | 24 ++ jstests/core/explain9.js | 24 ++ jstests/core/explain_batch_size.js | 19 ++ jstests/core/explaina.js | 45 +++ jstests/core/explainb.js | 46 +++ jstests/core/extent.js | 11 + jstests/core/extent2.js | 34 +++ jstests/core/filemd5.js | 11 + jstests/core/find1.js | 54 ++++ jstests/core/find2.js | 16 ++ jstests/core/find3.js | 10 + jstests/core/find4.js | 26 ++ jstests/core/find5.js | 51 ++++ jstests/core/find6.js | 41 +++ jstests/core/find7.js | 8 + jstests/core/find8.js | 27 ++ jstests/core/find9.js | 28 ++ jstests/core/find_and_modify.js | 38 +++ jstests/core/find_and_modify2.js | 16 ++ jstests/core/find_and_modify3.js | 21 ++ jstests/core/find_and_modify4.js | 55 ++++ jstests/core/find_and_modify_server6226.js | 7 + jstests/core/find_and_modify_server6254.js | 10 + jstests/core/find_and_modify_server6582.js | 18 ++ jstests/core/find_and_modify_server6588.js | 22 ++ jstests/core/find_and_modify_server6659.js | 7 + jstests/core/find_and_modify_server6909.js | 21 ++ jstests/core/find_and_modify_server6993.js | 9 + jstests/core/find_and_modify_server7660.js | 18 ++ jstests/core/find_and_modify_where.js | 10 + jstests/core/find_dedup.js | 35 +++ jstests/core/find_size.js | 26 ++ jstests/core/finda.js | 106 +++++++ jstests/core/fm1.js | 12 + jstests/core/fm2.js | 9 + jstests/core/fm3.js | 37 +++ jstests/core/fm4.js | 16 ++ jstests/core/fsync.js | 21 ++ jstests/core/fts1.js | 29 ++ jstests/core/fts2.js | 24 ++ jstests/core/fts3.js | 22 ++ jstests/core/fts4.js | 22 ++ jstests/core/fts5.js | 22 ++ jstests/core/fts_blog.js | 26 ++ jstests/core/fts_blogwild.js | 40 +++ jstests/core/fts_enabled.js | 5 + jstests/core/fts_explain.js | 18 ++ jstests/core/fts_index.js | 110 ++++++++ jstests/core/fts_mix.js | 159 +++++++++++ jstests/core/fts_partition1.js | 23 ++ jstests/core/fts_partition_no_multikey.js | 13 + jstests/core/fts_phrase.js | 25 ++ jstests/core/fts_proj.js | 20 ++ jstests/core/fts_projection.js | 99 +++++++ jstests/core/fts_querylang.js | 93 ++++++ jstests/core/fts_score_sort.js | 28 ++ jstests/core/fts_spanish.js | 30 ++ jstests/core/geo1.js | 37 +++ jstests/core/geo10.js | 15 + jstests/core/geo2.js | 40 +++ jstests/core/geo3.js | 77 +++++ jstests/core/geo4.js | 11 + jstests/core/geo5.js | 18 ++ jstests/core/geo6.js | 24 ++ jstests/core/geo7.js | 20 ++ jstests/core/geo8.js | 13 + jstests/core/geo9.js | 28 ++ jstests/core/geo_2d_explain.js | 29 ++ jstests/core/geo_2d_with_geojson_point.js | 20 ++ jstests/core/geo_allowedcomparisons.js | 95 +++++++ jstests/core/geo_array0.js | 26 ++ jstests/core/geo_array1.js | 38 +++ jstests/core/geo_array2.js | 161 +++++++++++ jstests/core/geo_borders.js | 162 +++++++++++ jstests/core/geo_box1.js | 43 +++ jstests/core/geo_box1_noindex.js | 32 +++ jstests/core/geo_box2.js | 52 ++++ jstests/core/geo_box3.js | 36 +++ jstests/core/geo_center_sphere1.js | 98 +++++++ jstests/core/geo_center_sphere2.js | 160 +++++++++++ jstests/core/geo_circle1.js | 43 +++ jstests/core/geo_circle1_noindex.js | 29 ++ jstests/core/geo_circle2.js | 26 ++ jstests/core/geo_circle2a.js | 37 +++ jstests/core/geo_circle3.js | 28 ++ jstests/core/geo_circle4.js | 31 ++ jstests/core/geo_circle5.js | 28 ++ jstests/core/geo_distinct.js | 14 + jstests/core/geo_exactfetch.js | 17 ++ jstests/core/geo_fiddly_box.js | 46 +++ jstests/core/geo_fiddly_box2.js | 32 +++ jstests/core/geo_group.js | 37 +++ jstests/core/geo_haystack1.js | 59 ++++ jstests/core/geo_haystack2.js | 60 ++++ jstests/core/geo_haystack3.js | 28 ++ jstests/core/geo_invalid_polygon.js | 29 ++ jstests/core/geo_mapreduce.js | 56 ++++ jstests/core/geo_mapreduce2.js | 36 +++ jstests/core/geo_max.js | 49 ++++ jstests/core/geo_mindistance.js | 214 ++++++++++++++ jstests/core/geo_mindistance_boundaries.js | 124 ++++++++ jstests/core/geo_multikey0.js | 26 ++ jstests/core/geo_multikey1.js | 19 ++ jstests/core/geo_multinest0.js | 60 ++++ jstests/core/geo_multinest1.js | 36 +++ jstests/core/geo_near_random1.js | 12 + jstests/core/geo_near_random2.js | 21 ++ jstests/core/geo_nearwithin.js | 27 ++ jstests/core/geo_or.js | 62 ++++ jstests/core/geo_poly_edge.js | 22 ++ jstests/core/geo_poly_line.js | 17 ++ jstests/core/geo_polygon1.js | 73 +++++ jstests/core/geo_polygon1_noindex.js | 46 +++ jstests/core/geo_polygon2.js | 263 +++++++++++++++++ jstests/core/geo_polygon3.js | 54 ++++ jstests/core/geo_queryoptimizer.js | 27 ++ jstests/core/geo_regex0.js | 18 ++ jstests/core/geo_s2cursorlimitskip.js | 68 +++++ jstests/core/geo_s2dedupnear.js | 11 + jstests/core/geo_s2descindex.js | 64 +++++ jstests/core/geo_s2disjoint_holes.js | 81 ++++++ jstests/core/geo_s2dupe_points.js | 71 +++++ jstests/core/geo_s2edgecases.js | 40 +++ jstests/core/geo_s2exact.js | 21 ++ jstests/core/geo_s2holesameasshell.js | 44 +++ jstests/core/geo_s2index.js | 114 ++++++++ jstests/core/geo_s2indexoldformat.js | 28 ++ jstests/core/geo_s2indexversion1.js | 150 ++++++++++ jstests/core/geo_s2intersection.js | 141 +++++++++ jstests/core/geo_s2largewithin.js | 45 +++ jstests/core/geo_s2meridian.js | 108 +++++++ jstests/core/geo_s2multi.js | 46 +++ jstests/core/geo_s2near.js | 84 ++++++ jstests/core/geo_s2nearComplex.js | 268 ++++++++++++++++++ jstests/core/geo_s2near_equator_opposite.js | 31 ++ jstests/core/geo_s2nearcorrect.js | 12 + jstests/core/geo_s2nearwithin.js | 41 +++ jstests/core/geo_s2nongeoarray.js | 26 ++ jstests/core/geo_s2nonstring.js | 22 ++ jstests/core/geo_s2nopoints.js | 7 + jstests/core/geo_s2oddshapes.js | 138 +++++++++ jstests/core/geo_s2ordering.js | 52 ++++ jstests/core/geo_s2overlappingpolys.js | 213 ++++++++++++++ jstests/core/geo_s2polywithholes.js | 46 +++ jstests/core/geo_s2selfintersectingpoly.js | 11 + jstests/core/geo_s2sparse.js | 113 ++++++++ jstests/core/geo_s2twofields.js | 65 +++++ jstests/core/geo_s2validindex.js | 26 ++ jstests/core/geo_s2within.js | 36 +++ jstests/core/geo_small_large.js | 158 +++++++++++ jstests/core/geo_sort1.js | 22 ++ jstests/core/geo_uniqueDocs.js | 40 +++ jstests/core/geo_uniqueDocs2.js | 80 ++++++ jstests/core/geo_update.js | 37 +++ jstests/core/geo_update1.js | 36 +++ jstests/core/geo_update2.js | 39 +++ jstests/core/geo_update_btree.js | 28 ++ jstests/core/geo_update_btree2.js | 71 +++++ jstests/core/geo_update_dedup.js | 60 ++++ jstests/core/geo_withinquery.js | 15 + jstests/core/geoa.js | 12 + jstests/core/geob.js | 35 +++ jstests/core/geoc.js | 24 ++ jstests/core/geod.js | 14 + jstests/core/geoe.js | 32 +++ jstests/core/geof.js | 19 ++ jstests/core/geonear_cmd_input_validation.js | 119 ++++++++ jstests/core/geonear_validate.js | 8 + jstests/core/getlog1.js | 24 ++ jstests/core/getlog2.js | 46 +++ jstests/core/group1.js | 64 +++++ jstests/core/group2.js | 38 +++ jstests/core/group3.js | 43 +++ jstests/core/group4.js | 45 +++ jstests/core/group5.js | 38 +++ jstests/core/group6.js | 32 +++ jstests/core/group7.js | 47 +++ jstests/core/group_empty.js | 8 + jstests/core/grow_hash_table.js | 42 +++ jstests/core/hashindex1.js | 94 ++++++ jstests/core/hashtest1.js | 78 +++++ jstests/core/hint1.js | 16 ++ jstests/core/hostinfo.js | 33 +++ jstests/core/id1.js | 16 ++ jstests/core/idhack.js | 43 +++ jstests/core/in.js | 24 ++ jstests/core/in2.js | 33 +++ jstests/core/in3.js | 11 + jstests/core/in4.js | 42 +++ jstests/core/in5.js | 56 ++++ jstests/core/in6.js | 13 + jstests/core/in8.js | 23 ++ jstests/core/in9.js | 35 +++ jstests/core/ina.js | 15 + jstests/core/inb.js | 19 ++ jstests/core/inc-SERVER-7446.js | 39 +++ jstests/core/inc1.js | 32 +++ jstests/core/inc2.js | 22 ++ jstests/core/inc3.js | 16 ++ jstests/core/index1.js | 24 ++ jstests/core/index10.js | 32 +++ jstests/core/index13.js | 147 ++++++++++ jstests/core/index2.js | 40 +++ jstests/core/index3.js | 16 ++ jstests/core/index4.js | 33 +++ jstests/core/index5.js | 24 ++ jstests/core/index6.js | 8 + jstests/core/index7.js | 67 +++++ jstests/core/index8.js | 62 ++++ jstests/core/index9.js | 25 ++ jstests/core/indexOtherNamespace.js | 27 ++ jstests/core/indexStatsCommand.js | 88 ++++++ jstests/core/index_arr1.js | 23 ++ jstests/core/index_arr2.js | 51 ++++ jstests/core/index_big1.js | 38 +++ jstests/core/index_bigkeys.js | 59 ++++ jstests/core/index_bigkeys_update.js | 18 ++ jstests/core/index_bounds_number_edge_cases.js | 50 ++++ jstests/core/index_check1.js | 31 ++ jstests/core/index_check2.js | 41 +++ jstests/core/index_check3.js | 63 +++++ jstests/core/index_check5.js | 17 ++ jstests/core/index_check6.js | 82 ++++++ jstests/core/index_check7.js | 15 + jstests/core/index_check8.js | 21 ++ jstests/core/index_diag.js | 50 ++++ jstests/core/index_elemmatch1.js | 43 +++ jstests/core/index_filter_commands.js | 167 +++++++++++ jstests/core/index_many.js | 51 ++++ jstests/core/index_many2.js | 31 ++ jstests/core/index_sparse1.js | 45 +++ jstests/core/index_sparse2.js | 23 ++ jstests/core/indexa.js | 22 ++ jstests/core/indexapi.js | 48 ++++ jstests/core/indexb.js | 29 ++ jstests/core/indexc.js | 20 ++ jstests/core/indexd.js | 10 + jstests/core/indexe.js | 22 ++ jstests/core/indexes_on_indexes.js | 19 ++ jstests/core/indexf.js | 13 + jstests/core/indexg.js | 13 + jstests/core/indexh.js | 41 +++ jstests/core/indexi.js | 34 +++ jstests/core/indexj.js | 44 +++ jstests/core/indexl.js | 27 ++ jstests/core/indexm.js | 38 +++ jstests/core/indexn.js | 49 ++++ jstests/core/indexo.js | 15 + jstests/core/indexp.js | 23 ++ jstests/core/indexq.js | 20 ++ jstests/core/indexr.js | 44 +++ jstests/core/indexs.js | 21 ++ jstests/core/indext.js | 21 ++ jstests/core/indexu.js | 108 +++++++ jstests/core/indexv.js | 18 ++ jstests/core/indexw.js | 15 + jstests/core/insert1.js | 34 +++ jstests/core/insert2.js | 13 + jstests/core/insert_id_undefined.js | 5 + jstests/core/insert_illegal_doc.js | 22 ++ jstests/core/insert_long_index_key.js | 10 + jstests/core/ismaster.js | 28 ++ jstests/count.js | 25 -- jstests/count10.js | 61 ---- jstests/count2.js | 28 -- jstests/count3.js | 26 -- jstests/count4.js | 17 -- jstests/count5.js | 30 -- jstests/count6.js | 61 ---- jstests/count7.js | 25 -- jstests/count9.js | 28 -- jstests/count_hint.js | 20 -- jstests/counta.js | 14 - jstests/countb.js | 11 - jstests/countc.js | 124 -------- jstests/coveredIndex1.js | 64 ----- jstests/coveredIndex2.js | 18 -- jstests/coveredIndex3.js | 54 ---- jstests/coveredIndex4.js | 40 --- jstests/coveredIndex5.js | 70 ----- jstests/covered_index_compound_1.js | 45 --- jstests/covered_index_geo_1.js | 18 -- jstests/covered_index_geo_2.js | 22 -- jstests/covered_index_negative_1.js | 61 ---- jstests/covered_index_simple_1.js | 55 ---- jstests/covered_index_simple_2.js | 43 --- jstests/covered_index_simple_3.js | 57 ---- jstests/covered_index_simple_id.js | 42 --- jstests/covered_index_sort_1.js | 34 --- jstests/covered_index_sort_2.js | 17 -- jstests/covered_index_sort_3.js | 16 -- jstests/create_indexes.js | 48 ---- jstests/currentop.js | 80 ------ jstests/cursor1.js | 20 -- jstests/cursor2.js | 24 -- jstests/cursor3.js | 35 --- jstests/cursor4.js | 47 --- jstests/cursor5.js | 36 --- jstests/cursor6.js | 100 ------- jstests/cursor7.js | 42 --- jstests/cursora.js | 47 --- jstests/cursorb.js | 17 -- jstests/datasize.js | 35 --- jstests/datasize2.js | 27 -- jstests/datasize3.js | 34 --- jstests/date1.js | 17 -- jstests/date2.js | 13 - jstests/date3.js | 31 -- jstests/db.js | 11 - jstests/dbadmin.js | 105 ------- jstests/dbcase.js | 29 -- jstests/dbcase2.js | 9 - jstests/dbhash.js | 58 ---- jstests/dbhash2.js | 22 -- jstests/dbref1.js | 10 - jstests/dbref2.js | 20 -- jstests/dbref3.js | 45 --- jstests/delx.js | 32 --- jstests/depth_limit.js | 56 ---- jstests/distinct1.js | 40 --- jstests/distinct2.js | 13 - jstests/distinct3.js | 33 --- jstests/distinct_array1.js | 91 ------ jstests/distinct_index1.js | 72 ----- jstests/distinct_index2.js | 41 --- jstests/distinct_speed1.js | 26 -- jstests/drop.js | 25 -- jstests/drop2.js | 53 ---- jstests/drop3.js | 29 -- jstests/drop_index.js | 20 -- jstests/dropdb.js | 26 -- jstests/dropdb_race.js | 44 --- jstests/elemMatchProjection.js | 265 ----------------- jstests/error2.js | 21 -- jstests/error5.js | 8 - jstests/eval0.js | 8 - jstests/eval1.js | 17 -- jstests/eval2.js | 28 -- jstests/eval3.js | 21 -- jstests/eval4.js | 23 -- jstests/eval5.js | 23 -- jstests/eval6.js | 15 - jstests/eval7.js | 3 - jstests/eval8.js | 19 -- jstests/eval9.js | 22 -- jstests/eval_nolock.js | 16 -- jstests/evala.js | 9 - jstests/evalb.js | 40 --- jstests/evalc.js | 25 -- jstests/evald.js | 98 ------- jstests/evale.js | 5 - jstests/evalf.js | 27 -- jstests/exists.js | 49 ---- jstests/exists2.js | 16 -- jstests/exists3.js | 21 -- jstests/exists4.js | 20 -- jstests/exists5.js | 33 --- jstests/exists6.js | 79 ------ jstests/exists7.js | 21 -- jstests/exists8.js | 76 ----- jstests/exists9.js | 41 --- jstests/existsa.js | 114 -------- jstests/existsb.js | 76 ----- jstests/explain1.js | 48 ---- jstests/explain2.js | 27 -- jstests/explain3.js | 24 -- jstests/explain4.js | 68 ----- jstests/explain5.js | 38 --- jstests/explain6.js | 25 -- jstests/explain7.js | 181 ------------ jstests/explain8.js | 24 -- jstests/explain9.js | 24 -- jstests/explain_batch_size.js | 19 -- jstests/explaina.js | 45 --- jstests/explainb.js | 46 --- jstests/extent.js | 11 - jstests/extent2.js | 34 --- jstests/filemd5.js | 11 - jstests/find1.js | 54 ---- jstests/find2.js | 16 -- jstests/find3.js | 10 - jstests/find4.js | 26 -- jstests/find5.js | 51 ---- jstests/find6.js | 41 --- jstests/find7.js | 8 - jstests/find8.js | 27 -- jstests/find9.js | 28 -- jstests/find_and_modify.js | 38 --- jstests/find_and_modify2.js | 16 -- jstests/find_and_modify3.js | 21 -- jstests/find_and_modify4.js | 55 ---- jstests/find_and_modify_server6226.js | 7 - jstests/find_and_modify_server6254.js | 10 - jstests/find_and_modify_server6582.js | 18 -- jstests/find_and_modify_server6588.js | 22 -- jstests/find_and_modify_server6659.js | 7 - jstests/find_and_modify_server6909.js | 21 -- jstests/find_and_modify_server6993.js | 9 - jstests/find_and_modify_server7660.js | 18 -- jstests/find_and_modify_where.js | 10 - jstests/find_dedup.js | 35 --- jstests/find_size.js | 26 -- jstests/finda.js | 106 ------- jstests/fm1.js | 12 - jstests/fm2.js | 9 - jstests/fm3.js | 37 --- jstests/fm4.js | 16 -- jstests/fsync.js | 22 -- jstests/fts1.js | 29 -- jstests/fts2.js | 24 -- jstests/fts3.js | 22 -- jstests/fts4.js | 22 -- jstests/fts5.js | 22 -- jstests/fts_blog.js | 26 -- jstests/fts_blogwild.js | 40 --- jstests/fts_enabled.js | 5 - jstests/fts_explain.js | 18 -- jstests/fts_index.js | 138 --------- jstests/fts_mix.js | 159 ----------- jstests/fts_partition1.js | 23 -- jstests/fts_partition_no_multikey.js | 17 -- jstests/fts_phrase.js | 25 -- jstests/fts_proj.js | 20 -- jstests/fts_projection.js | 99 ------- jstests/fts_querylang.js | 93 ------ jstests/fts_score_sort.js | 28 -- jstests/fts_spanish.js | 31 -- jstests/geo1.js | 41 --- jstests/geo10.js | 21 -- jstests/geo2.js | 40 --- jstests/geo3.js | 77 ----- jstests/geo4.js | 10 - jstests/geo5.js | 18 -- jstests/geo6.js | 24 -- jstests/geo7.js | 20 -- jstests/geo8.js | 13 - jstests/geo9.js | 28 -- jstests/geo_2d_explain.js | 29 -- jstests/geo_2d_with_geojson_point.js | 20 -- jstests/geo_allowedcomparisons.js | 107 ------- jstests/geo_array0.js | 27 -- jstests/geo_array1.js | 38 --- jstests/geo_array2.js | 163 ----------- jstests/geo_borders.js | 168 ----------- jstests/geo_box1.js | 43 --- jstests/geo_box1_noindex.js | 32 --- jstests/geo_box2.js | 52 ---- jstests/geo_box3.js | 36 --- jstests/geo_center_sphere1.js | 96 ------- jstests/geo_center_sphere2.js | 164 ----------- jstests/geo_circle1.js | 43 --- jstests/geo_circle1_noindex.js | 29 -- jstests/geo_circle2.js | 26 -- jstests/geo_circle2a.js | 37 --- jstests/geo_circle3.js | 28 -- jstests/geo_circle4.js | 31 -- jstests/geo_circle5.js | 28 -- jstests/geo_distinct.js | 16 -- jstests/geo_exactfetch.js | 17 -- jstests/geo_fiddly_box.js | 46 --- jstests/geo_fiddly_box2.js | 32 --- jstests/geo_group.js | 35 --- jstests/geo_haystack1.js | 59 ---- jstests/geo_haystack2.js | 60 ---- jstests/geo_haystack3.js | 28 -- jstests/geo_invalid_polygon.js | 33 --- jstests/geo_mapreduce.js | 56 ---- jstests/geo_mapreduce2.js | 36 --- jstests/geo_max.js | 49 ---- jstests/geo_mindistance.js | 214 -------------- jstests/geo_mindistance_boundaries.js | 124 -------- jstests/geo_multikey0.js | 26 -- jstests/geo_multikey1.js | 20 -- jstests/geo_multinest0.js | 63 ----- jstests/geo_multinest1.js | 37 --- jstests/geo_near_random1.js | 12 - jstests/geo_near_random2.js | 21 -- jstests/geo_nearwithin.js | 27 -- jstests/geo_oob_sphere.js | 42 --- jstests/geo_or.js | 62 ---- jstests/geo_poly_edge.js | 22 -- jstests/geo_poly_line.js | 17 -- jstests/geo_polygon1.js | 74 ----- jstests/geo_polygon1_noindex.js | 47 --- jstests/geo_polygon2.js | 266 ----------------- jstests/geo_polygon3.js | 54 ---- jstests/geo_queryoptimizer.js | 27 -- jstests/geo_regex0.js | 18 -- jstests/geo_s2cursorlimitskip.js | 69 ----- jstests/geo_s2dedupnear.js | 11 - jstests/geo_s2descindex.js | 64 ----- jstests/geo_s2disjoint_holes.js | 94 ------ jstests/geo_s2dupe_points.js | 72 ----- jstests/geo_s2edgecases.js | 40 --- jstests/geo_s2exact.js | 21 -- jstests/geo_s2holesameasshell.js | 46 --- jstests/geo_s2index.js | 107 ------- jstests/geo_s2indexoldformat.js | 28 -- jstests/geo_s2indexversion1.js | 150 ---------- jstests/geo_s2intersection.js | 141 --------- jstests/geo_s2largewithin.js | 45 --- jstests/geo_s2meridian.js | 109 ------- jstests/geo_s2multi.js | 50 ---- jstests/geo_s2near.js | 84 ------ jstests/geo_s2nearComplex.js | 269 ------------------ jstests/geo_s2near_equator_opposite.js | 31 -- jstests/geo_s2nearcorrect.js | 12 - jstests/geo_s2nearwithin.js | 42 --- jstests/geo_s2nongeoarray.js | 28 -- jstests/geo_s2nonstring.js | 22 -- jstests/geo_s2nopoints.js | 7 - jstests/geo_s2oddshapes.js | 138 --------- jstests/geo_s2ordering.js | 47 --- jstests/geo_s2overlappingpolys.js | 213 -------------- jstests/geo_s2polywithholes.js | 48 ---- jstests/geo_s2selfintersectingpoly.js | 12 - jstests/geo_s2sparse.js | 113 -------- jstests/geo_s2twofields.js | 64 ----- jstests/geo_s2validindex.js | 26 -- jstests/geo_s2within.js | 36 --- jstests/geo_small_large.js | 151 ---------- jstests/geo_sort1.js | 22 -- jstests/geo_uniqueDocs.js | 40 --- jstests/geo_uniqueDocs2.js | 80 ------ jstests/geo_update.js | 37 --- jstests/geo_update1.js | 38 --- jstests/geo_update2.js | 40 --- jstests/geo_update_btree.js | 25 -- jstests/geo_update_btree2.js | 71 ----- jstests/geo_update_dedup.js | 60 ---- jstests/geo_withinquery.js | 15 - jstests/geoa.js | 12 - jstests/geob.js | 35 --- jstests/geoc.js | 24 -- jstests/geod.js | 14 - jstests/geoe.js | 32 --- jstests/geof.js | 19 -- jstests/geonear_cmd_input_validation.js | 119 -------- jstests/geonear_validate.js | 8 - jstests/getlog1.js | 24 -- jstests/getlog2.js | 47 --- jstests/group1.js | 64 ----- jstests/group2.js | 38 --- jstests/group3.js | 43 --- jstests/group4.js | 45 --- jstests/group5.js | 38 --- jstests/group6.js | 32 --- jstests/group7.js | 45 --- jstests/group_empty.js | 8 - jstests/grow_hash_table.js | 45 --- jstests/hashindex1.js | 94 ------ jstests/hashtest1.js | 78 ----- jstests/hint1.js | 16 -- jstests/hostinfo.js | 33 --- jstests/id1.js | 16 -- jstests/idhack.js | 43 --- jstests/in.js | 24 -- jstests/in2.js | 33 --- jstests/in3.js | 11 - jstests/in4.js | 42 --- jstests/in5.js | 56 ---- jstests/in6.js | 13 - jstests/in8.js | 23 -- jstests/in9.js | 35 --- jstests/ina.js | 15 - jstests/inb.js | 19 -- jstests/inc-SERVER-7446.js | 43 --- jstests/inc1.js | 32 --- jstests/inc2.js | 22 -- jstests/inc3.js | 16 -- jstests/index1.js | 24 -- jstests/index10.js | 32 --- jstests/index13.js | 147 ---------- jstests/index2.js | 40 --- jstests/index3.js | 16 -- jstests/index4.js | 33 --- jstests/index5.js | 24 -- jstests/index6.js | 8 - jstests/index7.js | 67 ----- jstests/index8.js | 62 ---- jstests/index9.js | 25 -- jstests/indexOtherNamespace.js | 21 -- jstests/indexStatsCommand.js | 88 ------ jstests/index_arr1.js | 23 -- jstests/index_arr2.js | 51 ---- jstests/index_big1.js | 36 --- jstests/index_bigkeys.js | 59 ---- jstests/index_bigkeys_update.js | 20 -- jstests/index_bounds_number_edge_cases.js | 50 ---- jstests/index_check1.js | 31 -- jstests/index_check2.js | 41 --- jstests/index_check3.js | 63 ----- jstests/index_check5.js | 17 -- jstests/index_check6.js | 82 ------ jstests/index_check7.js | 15 - jstests/index_check8.js | 21 -- jstests/index_diag.js | 50 ---- jstests/index_elemmatch1.js | 41 --- jstests/index_filter_commands.js | 167 ----------- jstests/index_many.js | 51 ---- jstests/index_many2.js | 31 -- jstests/index_sparse1.js | 46 --- jstests/index_sparse2.js | 23 -- jstests/indexa.js | 22 -- jstests/indexapi.js | 40 --- jstests/indexb.js | 29 -- jstests/indexbindata.js | 0 jstests/indexc.js | 20 -- jstests/indexd.js | 10 - jstests/indexe.js | 22 -- jstests/indexes_on_indexes.js | 19 -- jstests/indexf.js | 13 - jstests/indexg.js | 13 - jstests/indexh.js | 41 --- jstests/indexi.js | 43 --- jstests/indexj.js | 44 --- jstests/indexl.js | 27 -- jstests/indexm.js | 38 --- jstests/indexn.js | 49 ---- jstests/indexo.js | 17 -- jstests/indexp.js | 54 ---- jstests/indexq.js | 20 -- jstests/indexr.js | 44 --- jstests/indexs.js | 21 -- jstests/indext.js | 21 -- jstests/indexu.js | 137 --------- jstests/indexv.js | 18 -- jstests/indexw.js | 15 - jstests/insert1.js | 44 --- jstests/insert2.js | 8 - jstests/insert_id_undefined.js | 6 - jstests/insert_illegal_doc.js | 22 -- jstests/insert_long_index_key.js | 10 - jstests/ismaster.js | 28 -- jstests/slowNightly/sharding_jscore_passthrough.js | 3 + src/mongo/shell/assert.js | 6 +- src/mongo/shell/collection.js | 2 +- 901 files changed, 19257 insertions(+), 19438 deletions(-) delete mode 100644 jstests/all.js delete mode 100644 jstests/all2.js delete mode 100644 jstests/all3.js delete mode 100644 jstests/all4.js delete mode 100644 jstests/all5.js delete mode 100644 jstests/and.js delete mode 100644 jstests/and2.js delete mode 100644 jstests/and3.js delete mode 100644 jstests/andor.js delete mode 100644 jstests/apitest_db.js delete mode 100644 jstests/apitest_dbcollection.js delete mode 100644 jstests/apply_ops1.js delete mode 100644 jstests/apply_ops2.js delete mode 100644 jstests/array1.js delete mode 100644 jstests/array3.js delete mode 100644 jstests/array4.js delete mode 100644 jstests/array_match1.js delete mode 100644 jstests/array_match2.js delete mode 100644 jstests/array_match3.js delete mode 100644 jstests/array_match4.js delete mode 100644 jstests/arrayfind1.js delete mode 100644 jstests/arrayfind2.js delete mode 100644 jstests/arrayfind3.js delete mode 100644 jstests/arrayfind4.js delete mode 100644 jstests/arrayfind5.js delete mode 100644 jstests/arrayfind6.js delete mode 100644 jstests/arrayfind7.js delete mode 100644 jstests/arrayfind8.js delete mode 100644 jstests/arrayfind9.js delete mode 100644 jstests/arrayfinda.js delete mode 100644 jstests/auth1.js delete mode 100644 jstests/auth2.js delete mode 100644 jstests/auth_copydb.js delete mode 100644 jstests/autoid.js delete mode 100644 jstests/bad_index_plugin.js delete mode 100644 jstests/basic1.js delete mode 100644 jstests/basic2.js delete mode 100644 jstests/basic3.js delete mode 100644 jstests/basic4.js delete mode 100644 jstests/basic5.js delete mode 100644 jstests/basic6.js delete mode 100644 jstests/basic7.js delete mode 100644 jstests/basic8.js delete mode 100644 jstests/basic9.js delete mode 100644 jstests/basica.js delete mode 100644 jstests/basicb.js delete mode 100644 jstests/basicc.js delete mode 100644 jstests/batch_size.js delete mode 100644 jstests/bench_test1.js delete mode 100644 jstests/bench_test2.js delete mode 100644 jstests/bench_test3.js delete mode 100644 jstests/big_object1.js delete mode 100644 jstests/binData.js delete mode 100644 jstests/block_check_supported.js delete mode 100644 jstests/bulk_insert.js delete mode 100644 jstests/capped.js delete mode 100644 jstests/capped1.js delete mode 100644 jstests/capped2.js delete mode 100644 jstests/capped3.js delete mode 100644 jstests/capped5.js delete mode 100644 jstests/capped6.js delete mode 100644 jstests/capped7.js delete mode 100644 jstests/capped8.js delete mode 100644 jstests/capped9.js delete mode 100644 jstests/capped_empty.js delete mode 100644 jstests/capped_max.js delete mode 100644 jstests/capped_server2639.js delete mode 100644 jstests/capped_server7543.js delete mode 100644 jstests/cappeda.js delete mode 100644 jstests/check_shard_index.js delete mode 100644 jstests/collmod.js delete mode 100644 jstests/compact.js delete mode 100644 jstests/compact2.js delete mode 100644 jstests/compactPreservePadding.js delete mode 100644 jstests/connection_status.js delete mode 100644 jstests/connection_string_validation.js delete mode 100644 jstests/constructors.js delete mode 100644 jstests/copydb.js create mode 100644 jstests/core/all.js create mode 100644 jstests/core/all2.js create mode 100644 jstests/core/all3.js create mode 100644 jstests/core/all4.js create mode 100644 jstests/core/all5.js create mode 100644 jstests/core/and.js create mode 100644 jstests/core/and2.js create mode 100644 jstests/core/and3.js create mode 100644 jstests/core/andor.js create mode 100644 jstests/core/apitest_db.js create mode 100644 jstests/core/apitest_dbcollection.js create mode 100644 jstests/core/apply_ops1.js create mode 100644 jstests/core/apply_ops2.js create mode 100644 jstests/core/array1.js create mode 100644 jstests/core/array3.js create mode 100644 jstests/core/array4.js create mode 100644 jstests/core/array_match1.js create mode 100644 jstests/core/array_match2.js create mode 100644 jstests/core/array_match3.js create mode 100644 jstests/core/array_match4.js create mode 100644 jstests/core/arrayfind1.js create mode 100644 jstests/core/arrayfind2.js create mode 100644 jstests/core/arrayfind3.js create mode 100644 jstests/core/arrayfind4.js create mode 100644 jstests/core/arrayfind5.js create mode 100644 jstests/core/arrayfind6.js create mode 100644 jstests/core/arrayfind7.js create mode 100644 jstests/core/arrayfind8.js create mode 100644 jstests/core/arrayfind9.js create mode 100644 jstests/core/arrayfinda.js create mode 100644 jstests/core/auth1.js create mode 100644 jstests/core/auth2.js create mode 100644 jstests/core/auth_copydb.js create mode 100644 jstests/core/autoid.js create mode 100644 jstests/core/bad_index_plugin.js create mode 100644 jstests/core/basic1.js create mode 100644 jstests/core/basic2.js create mode 100644 jstests/core/basic3.js create mode 100644 jstests/core/basic4.js create mode 100644 jstests/core/basic5.js create mode 100644 jstests/core/basic6.js create mode 100644 jstests/core/basic7.js create mode 100644 jstests/core/basic8.js create mode 100644 jstests/core/basic9.js create mode 100644 jstests/core/basica.js create mode 100644 jstests/core/basicb.js create mode 100644 jstests/core/basicc.js create mode 100644 jstests/core/batch_size.js create mode 100644 jstests/core/bench_test1.js create mode 100644 jstests/core/bench_test2.js create mode 100644 jstests/core/bench_test3.js create mode 100644 jstests/core/big_object1.js create mode 100644 jstests/core/binData.js create mode 100644 jstests/core/block_check_supported.js create mode 100644 jstests/core/bulk_insert.js create mode 100644 jstests/core/capped.js create mode 100644 jstests/core/capped1.js create mode 100644 jstests/core/capped2.js create mode 100644 jstests/core/capped3.js create mode 100644 jstests/core/capped5.js create mode 100644 jstests/core/capped6.js create mode 100644 jstests/core/capped7.js create mode 100644 jstests/core/capped8.js create mode 100644 jstests/core/capped9.js create mode 100644 jstests/core/capped_empty.js create mode 100644 jstests/core/capped_max.js create mode 100644 jstests/core/capped_server2639.js create mode 100644 jstests/core/capped_server7543.js create mode 100644 jstests/core/cappeda.js create mode 100644 jstests/core/check_shard_index.js create mode 100644 jstests/core/collmod.js create mode 100644 jstests/core/compact.js create mode 100644 jstests/core/compact2.js create mode 100644 jstests/core/compactPreservePadding.js create mode 100644 jstests/core/connection_status.js create mode 100644 jstests/core/connection_string_validation.js create mode 100644 jstests/core/constructors.js create mode 100644 jstests/core/copydb.js create mode 100644 jstests/core/count.js create mode 100644 jstests/core/count10.js create mode 100644 jstests/core/count2.js create mode 100644 jstests/core/count3.js create mode 100644 jstests/core/count4.js create mode 100644 jstests/core/count5.js create mode 100644 jstests/core/count6.js create mode 100644 jstests/core/count7.js create mode 100644 jstests/core/count9.js create mode 100644 jstests/core/count_hint.js create mode 100644 jstests/core/counta.js create mode 100644 jstests/core/countb.js create mode 100644 jstests/core/countc.js create mode 100644 jstests/core/coveredIndex1.js create mode 100644 jstests/core/coveredIndex2.js create mode 100644 jstests/core/coveredIndex3.js create mode 100644 jstests/core/coveredIndex4.js create mode 100644 jstests/core/coveredIndex5.js create mode 100644 jstests/core/covered_index_compound_1.js create mode 100644 jstests/core/covered_index_geo_1.js create mode 100644 jstests/core/covered_index_geo_2.js create mode 100644 jstests/core/covered_index_negative_1.js create mode 100644 jstests/core/covered_index_simple_1.js create mode 100644 jstests/core/covered_index_simple_2.js create mode 100644 jstests/core/covered_index_simple_3.js create mode 100644 jstests/core/covered_index_simple_id.js create mode 100644 jstests/core/covered_index_sort_1.js create mode 100644 jstests/core/covered_index_sort_2.js create mode 100644 jstests/core/covered_index_sort_3.js create mode 100644 jstests/core/create_indexes.js create mode 100644 jstests/core/currentop.js create mode 100644 jstests/core/cursor1.js create mode 100644 jstests/core/cursor2.js create mode 100644 jstests/core/cursor3.js create mode 100644 jstests/core/cursor4.js create mode 100644 jstests/core/cursor5.js create mode 100644 jstests/core/cursor6.js create mode 100644 jstests/core/cursor7.js create mode 100644 jstests/core/cursora.js create mode 100644 jstests/core/cursorb.js create mode 100644 jstests/core/datasize.js create mode 100644 jstests/core/datasize2.js create mode 100644 jstests/core/datasize3.js create mode 100644 jstests/core/date1.js create mode 100644 jstests/core/date2.js create mode 100644 jstests/core/date3.js create mode 100644 jstests/core/db.js create mode 100644 jstests/core/dbadmin.js create mode 100644 jstests/core/dbcase.js create mode 100644 jstests/core/dbcase2.js create mode 100644 jstests/core/dbhash.js create mode 100644 jstests/core/dbhash2.js create mode 100644 jstests/core/dbref1.js create mode 100644 jstests/core/dbref2.js create mode 100644 jstests/core/dbref3.js create mode 100644 jstests/core/delx.js create mode 100644 jstests/core/depth_limit.js create mode 100644 jstests/core/distinct1.js create mode 100644 jstests/core/distinct2.js create mode 100644 jstests/core/distinct3.js create mode 100644 jstests/core/distinct_array1.js create mode 100644 jstests/core/distinct_index1.js create mode 100644 jstests/core/distinct_index2.js create mode 100644 jstests/core/distinct_speed1.js create mode 100644 jstests/core/drop.js create mode 100644 jstests/core/drop2.js create mode 100644 jstests/core/drop3.js create mode 100644 jstests/core/drop_index.js create mode 100644 jstests/core/dropdb.js create mode 100644 jstests/core/dropdb_race.js create mode 100644 jstests/core/elemMatchProjection.js create mode 100644 jstests/core/error2.js create mode 100644 jstests/core/error5.js create mode 100644 jstests/core/eval0.js create mode 100644 jstests/core/eval1.js create mode 100644 jstests/core/eval2.js create mode 100644 jstests/core/eval3.js create mode 100644 jstests/core/eval4.js create mode 100644 jstests/core/eval5.js create mode 100644 jstests/core/eval6.js create mode 100644 jstests/core/eval7.js create mode 100644 jstests/core/eval8.js create mode 100644 jstests/core/eval9.js create mode 100644 jstests/core/eval_nolock.js create mode 100644 jstests/core/evala.js create mode 100644 jstests/core/evalb.js create mode 100644 jstests/core/evalc.js create mode 100644 jstests/core/evald.js create mode 100644 jstests/core/evale.js create mode 100644 jstests/core/evalf.js create mode 100644 jstests/core/exists.js create mode 100644 jstests/core/exists2.js create mode 100644 jstests/core/exists3.js create mode 100644 jstests/core/exists4.js create mode 100644 jstests/core/exists5.js create mode 100644 jstests/core/exists6.js create mode 100644 jstests/core/exists7.js create mode 100644 jstests/core/exists8.js create mode 100644 jstests/core/exists9.js create mode 100644 jstests/core/existsa.js create mode 100644 jstests/core/existsb.js create mode 100644 jstests/core/explain1.js create mode 100644 jstests/core/explain2.js create mode 100644 jstests/core/explain3.js create mode 100644 jstests/core/explain4.js create mode 100644 jstests/core/explain5.js create mode 100644 jstests/core/explain6.js create mode 100644 jstests/core/explain7.js create mode 100644 jstests/core/explain8.js create mode 100644 jstests/core/explain9.js create mode 100644 jstests/core/explain_batch_size.js create mode 100644 jstests/core/explaina.js create mode 100644 jstests/core/explainb.js create mode 100644 jstests/core/extent.js create mode 100644 jstests/core/extent2.js create mode 100644 jstests/core/filemd5.js create mode 100644 jstests/core/find1.js create mode 100644 jstests/core/find2.js create mode 100644 jstests/core/find3.js create mode 100644 jstests/core/find4.js create mode 100644 jstests/core/find5.js create mode 100644 jstests/core/find6.js create mode 100644 jstests/core/find7.js create mode 100644 jstests/core/find8.js create mode 100644 jstests/core/find9.js create mode 100644 jstests/core/find_and_modify.js create mode 100644 jstests/core/find_and_modify2.js create mode 100644 jstests/core/find_and_modify3.js create mode 100644 jstests/core/find_and_modify4.js create mode 100644 jstests/core/find_and_modify_server6226.js create mode 100644 jstests/core/find_and_modify_server6254.js create mode 100644 jstests/core/find_and_modify_server6582.js create mode 100644 jstests/core/find_and_modify_server6588.js create mode 100644 jstests/core/find_and_modify_server6659.js create mode 100644 jstests/core/find_and_modify_server6909.js create mode 100644 jstests/core/find_and_modify_server6993.js create mode 100644 jstests/core/find_and_modify_server7660.js create mode 100644 jstests/core/find_and_modify_where.js create mode 100644 jstests/core/find_dedup.js create mode 100644 jstests/core/find_size.js create mode 100644 jstests/core/finda.js create mode 100644 jstests/core/fm1.js create mode 100644 jstests/core/fm2.js create mode 100644 jstests/core/fm3.js create mode 100644 jstests/core/fm4.js create mode 100644 jstests/core/fsync.js create mode 100644 jstests/core/fts1.js create mode 100644 jstests/core/fts2.js create mode 100644 jstests/core/fts3.js create mode 100644 jstests/core/fts4.js create mode 100644 jstests/core/fts5.js create mode 100644 jstests/core/fts_blog.js create mode 100644 jstests/core/fts_blogwild.js create mode 100644 jstests/core/fts_enabled.js create mode 100644 jstests/core/fts_explain.js create mode 100644 jstests/core/fts_index.js create mode 100644 jstests/core/fts_mix.js create mode 100644 jstests/core/fts_partition1.js create mode 100644 jstests/core/fts_partition_no_multikey.js create mode 100644 jstests/core/fts_phrase.js create mode 100644 jstests/core/fts_proj.js create mode 100644 jstests/core/fts_projection.js create mode 100644 jstests/core/fts_querylang.js create mode 100644 jstests/core/fts_score_sort.js create mode 100644 jstests/core/fts_spanish.js create mode 100644 jstests/core/geo1.js create mode 100644 jstests/core/geo10.js create mode 100644 jstests/core/geo2.js create mode 100644 jstests/core/geo3.js create mode 100644 jstests/core/geo4.js create mode 100644 jstests/core/geo5.js create mode 100644 jstests/core/geo6.js create mode 100644 jstests/core/geo7.js create mode 100644 jstests/core/geo8.js create mode 100644 jstests/core/geo9.js create mode 100644 jstests/core/geo_2d_explain.js create mode 100644 jstests/core/geo_2d_with_geojson_point.js create mode 100644 jstests/core/geo_allowedcomparisons.js create mode 100644 jstests/core/geo_array0.js create mode 100644 jstests/core/geo_array1.js create mode 100644 jstests/core/geo_array2.js create mode 100644 jstests/core/geo_borders.js create mode 100644 jstests/core/geo_box1.js create mode 100644 jstests/core/geo_box1_noindex.js create mode 100644 jstests/core/geo_box2.js create mode 100644 jstests/core/geo_box3.js create mode 100644 jstests/core/geo_center_sphere1.js create mode 100644 jstests/core/geo_center_sphere2.js create mode 100644 jstests/core/geo_circle1.js create mode 100644 jstests/core/geo_circle1_noindex.js create mode 100644 jstests/core/geo_circle2.js create mode 100644 jstests/core/geo_circle2a.js create mode 100644 jstests/core/geo_circle3.js create mode 100644 jstests/core/geo_circle4.js create mode 100644 jstests/core/geo_circle5.js create mode 100644 jstests/core/geo_distinct.js create mode 100644 jstests/core/geo_exactfetch.js create mode 100644 jstests/core/geo_fiddly_box.js create mode 100644 jstests/core/geo_fiddly_box2.js create mode 100644 jstests/core/geo_group.js create mode 100644 jstests/core/geo_haystack1.js create mode 100644 jstests/core/geo_haystack2.js create mode 100644 jstests/core/geo_haystack3.js create mode 100644 jstests/core/geo_invalid_polygon.js create mode 100644 jstests/core/geo_mapreduce.js create mode 100644 jstests/core/geo_mapreduce2.js create mode 100644 jstests/core/geo_max.js create mode 100644 jstests/core/geo_mindistance.js create mode 100644 jstests/core/geo_mindistance_boundaries.js create mode 100644 jstests/core/geo_multikey0.js create mode 100644 jstests/core/geo_multikey1.js create mode 100644 jstests/core/geo_multinest0.js create mode 100644 jstests/core/geo_multinest1.js create mode 100644 jstests/core/geo_near_random1.js create mode 100644 jstests/core/geo_near_random2.js create mode 100644 jstests/core/geo_nearwithin.js create mode 100644 jstests/core/geo_or.js create mode 100644 jstests/core/geo_poly_edge.js create mode 100644 jstests/core/geo_poly_line.js create mode 100644 jstests/core/geo_polygon1.js create mode 100644 jstests/core/geo_polygon1_noindex.js create mode 100644 jstests/core/geo_polygon2.js create mode 100644 jstests/core/geo_polygon3.js create mode 100644 jstests/core/geo_queryoptimizer.js create mode 100644 jstests/core/geo_regex0.js create mode 100644 jstests/core/geo_s2cursorlimitskip.js create mode 100644 jstests/core/geo_s2dedupnear.js create mode 100644 jstests/core/geo_s2descindex.js create mode 100644 jstests/core/geo_s2disjoint_holes.js create mode 100644 jstests/core/geo_s2dupe_points.js create mode 100755 jstests/core/geo_s2edgecases.js create mode 100644 jstests/core/geo_s2exact.js create mode 100644 jstests/core/geo_s2holesameasshell.js create mode 100755 jstests/core/geo_s2index.js create mode 100755 jstests/core/geo_s2indexoldformat.js create mode 100644 jstests/core/geo_s2indexversion1.js create mode 100644 jstests/core/geo_s2intersection.js create mode 100644 jstests/core/geo_s2largewithin.js create mode 100644 jstests/core/geo_s2meridian.js create mode 100644 jstests/core/geo_s2multi.js create mode 100644 jstests/core/geo_s2near.js create mode 100644 jstests/core/geo_s2nearComplex.js create mode 100644 jstests/core/geo_s2near_equator_opposite.js create mode 100644 jstests/core/geo_s2nearcorrect.js create mode 100644 jstests/core/geo_s2nearwithin.js create mode 100644 jstests/core/geo_s2nongeoarray.js create mode 100755 jstests/core/geo_s2nonstring.js create mode 100644 jstests/core/geo_s2nopoints.js create mode 100644 jstests/core/geo_s2oddshapes.js create mode 100644 jstests/core/geo_s2ordering.js create mode 100644 jstests/core/geo_s2overlappingpolys.js create mode 100755 jstests/core/geo_s2polywithholes.js create mode 100644 jstests/core/geo_s2selfintersectingpoly.js create mode 100644 jstests/core/geo_s2sparse.js create mode 100644 jstests/core/geo_s2twofields.js create mode 100644 jstests/core/geo_s2validindex.js create mode 100644 jstests/core/geo_s2within.js create mode 100644 jstests/core/geo_small_large.js create mode 100644 jstests/core/geo_sort1.js create mode 100644 jstests/core/geo_uniqueDocs.js create mode 100644 jstests/core/geo_uniqueDocs2.js create mode 100644 jstests/core/geo_update.js create mode 100644 jstests/core/geo_update1.js create mode 100644 jstests/core/geo_update2.js create mode 100644 jstests/core/geo_update_btree.js create mode 100644 jstests/core/geo_update_btree2.js create mode 100644 jstests/core/geo_update_dedup.js create mode 100644 jstests/core/geo_withinquery.js create mode 100644 jstests/core/geoa.js create mode 100644 jstests/core/geob.js create mode 100644 jstests/core/geoc.js create mode 100644 jstests/core/geod.js create mode 100644 jstests/core/geoe.js create mode 100644 jstests/core/geof.js create mode 100644 jstests/core/geonear_cmd_input_validation.js create mode 100644 jstests/core/geonear_validate.js create mode 100644 jstests/core/getlog1.js create mode 100644 jstests/core/getlog2.js create mode 100644 jstests/core/group1.js create mode 100644 jstests/core/group2.js create mode 100644 jstests/core/group3.js create mode 100644 jstests/core/group4.js create mode 100644 jstests/core/group5.js create mode 100644 jstests/core/group6.js create mode 100644 jstests/core/group7.js create mode 100644 jstests/core/group_empty.js create mode 100644 jstests/core/grow_hash_table.js create mode 100644 jstests/core/hashindex1.js create mode 100644 jstests/core/hashtest1.js create mode 100644 jstests/core/hint1.js create mode 100644 jstests/core/hostinfo.js create mode 100644 jstests/core/id1.js create mode 100644 jstests/core/idhack.js create mode 100644 jstests/core/in.js create mode 100644 jstests/core/in2.js create mode 100644 jstests/core/in3.js create mode 100644 jstests/core/in4.js create mode 100644 jstests/core/in5.js create mode 100644 jstests/core/in6.js create mode 100644 jstests/core/in8.js create mode 100644 jstests/core/in9.js create mode 100644 jstests/core/ina.js create mode 100644 jstests/core/inb.js create mode 100644 jstests/core/inc-SERVER-7446.js create mode 100644 jstests/core/inc1.js create mode 100644 jstests/core/inc2.js create mode 100644 jstests/core/inc3.js create mode 100644 jstests/core/index1.js create mode 100644 jstests/core/index10.js create mode 100644 jstests/core/index13.js create mode 100644 jstests/core/index2.js create mode 100644 jstests/core/index3.js create mode 100644 jstests/core/index4.js create mode 100644 jstests/core/index5.js create mode 100644 jstests/core/index6.js create mode 100644 jstests/core/index7.js create mode 100644 jstests/core/index8.js create mode 100644 jstests/core/index9.js create mode 100644 jstests/core/indexOtherNamespace.js create mode 100644 jstests/core/indexStatsCommand.js create mode 100644 jstests/core/index_arr1.js create mode 100644 jstests/core/index_arr2.js create mode 100644 jstests/core/index_big1.js create mode 100755 jstests/core/index_bigkeys.js create mode 100644 jstests/core/index_bigkeys_update.js create mode 100644 jstests/core/index_bounds_number_edge_cases.js create mode 100644 jstests/core/index_check1.js create mode 100644 jstests/core/index_check2.js create mode 100644 jstests/core/index_check3.js create mode 100644 jstests/core/index_check5.js create mode 100644 jstests/core/index_check6.js create mode 100644 jstests/core/index_check7.js create mode 100644 jstests/core/index_check8.js create mode 100644 jstests/core/index_diag.js create mode 100644 jstests/core/index_elemmatch1.js create mode 100644 jstests/core/index_filter_commands.js create mode 100644 jstests/core/index_many.js create mode 100644 jstests/core/index_many2.js create mode 100644 jstests/core/index_sparse1.js create mode 100644 jstests/core/index_sparse2.js create mode 100644 jstests/core/indexa.js create mode 100644 jstests/core/indexapi.js create mode 100644 jstests/core/indexb.js create mode 100644 jstests/core/indexc.js create mode 100644 jstests/core/indexd.js create mode 100644 jstests/core/indexe.js create mode 100644 jstests/core/indexes_on_indexes.js create mode 100644 jstests/core/indexf.js create mode 100644 jstests/core/indexg.js create mode 100644 jstests/core/indexh.js create mode 100644 jstests/core/indexi.js create mode 100644 jstests/core/indexj.js create mode 100644 jstests/core/indexl.js create mode 100644 jstests/core/indexm.js create mode 100644 jstests/core/indexn.js create mode 100644 jstests/core/indexo.js create mode 100644 jstests/core/indexp.js create mode 100644 jstests/core/indexq.js create mode 100644 jstests/core/indexr.js create mode 100644 jstests/core/indexs.js create mode 100644 jstests/core/indext.js create mode 100644 jstests/core/indexu.js create mode 100644 jstests/core/indexv.js create mode 100644 jstests/core/indexw.js create mode 100644 jstests/core/insert1.js create mode 100644 jstests/core/insert2.js create mode 100644 jstests/core/insert_id_undefined.js create mode 100644 jstests/core/insert_illegal_doc.js create mode 100644 jstests/core/insert_long_index_key.js create mode 100644 jstests/core/ismaster.js delete mode 100644 jstests/count.js delete mode 100644 jstests/count10.js delete mode 100644 jstests/count2.js delete mode 100644 jstests/count3.js delete mode 100644 jstests/count4.js delete mode 100644 jstests/count5.js delete mode 100644 jstests/count6.js delete mode 100644 jstests/count7.js delete mode 100644 jstests/count9.js delete mode 100644 jstests/count_hint.js delete mode 100644 jstests/counta.js delete mode 100644 jstests/countb.js delete mode 100644 jstests/countc.js delete mode 100644 jstests/coveredIndex1.js delete mode 100644 jstests/coveredIndex2.js delete mode 100644 jstests/coveredIndex3.js delete mode 100644 jstests/coveredIndex4.js delete mode 100644 jstests/coveredIndex5.js delete mode 100644 jstests/covered_index_compound_1.js delete mode 100644 jstests/covered_index_geo_1.js delete mode 100644 jstests/covered_index_geo_2.js delete mode 100644 jstests/covered_index_negative_1.js delete mode 100644 jstests/covered_index_simple_1.js delete mode 100644 jstests/covered_index_simple_2.js delete mode 100644 jstests/covered_index_simple_3.js delete mode 100644 jstests/covered_index_simple_id.js delete mode 100644 jstests/covered_index_sort_1.js delete mode 100644 jstests/covered_index_sort_2.js delete mode 100644 jstests/covered_index_sort_3.js delete mode 100644 jstests/create_indexes.js delete mode 100644 jstests/currentop.js delete mode 100644 jstests/cursor1.js delete mode 100644 jstests/cursor2.js delete mode 100644 jstests/cursor3.js delete mode 100644 jstests/cursor4.js delete mode 100644 jstests/cursor5.js delete mode 100644 jstests/cursor6.js delete mode 100644 jstests/cursor7.js delete mode 100644 jstests/cursora.js delete mode 100644 jstests/cursorb.js delete mode 100644 jstests/datasize.js delete mode 100644 jstests/datasize2.js delete mode 100644 jstests/datasize3.js delete mode 100644 jstests/date1.js delete mode 100644 jstests/date2.js delete mode 100644 jstests/date3.js delete mode 100644 jstests/db.js delete mode 100644 jstests/dbadmin.js delete mode 100644 jstests/dbcase.js delete mode 100644 jstests/dbcase2.js delete mode 100644 jstests/dbhash.js delete mode 100644 jstests/dbhash2.js delete mode 100644 jstests/dbref1.js delete mode 100644 jstests/dbref2.js delete mode 100644 jstests/dbref3.js delete mode 100644 jstests/delx.js delete mode 100644 jstests/depth_limit.js delete mode 100644 jstests/distinct1.js delete mode 100644 jstests/distinct2.js delete mode 100644 jstests/distinct3.js delete mode 100644 jstests/distinct_array1.js delete mode 100644 jstests/distinct_index1.js delete mode 100644 jstests/distinct_index2.js delete mode 100644 jstests/distinct_speed1.js delete mode 100644 jstests/drop.js delete mode 100644 jstests/drop2.js delete mode 100644 jstests/drop3.js delete mode 100644 jstests/drop_index.js delete mode 100644 jstests/dropdb.js delete mode 100644 jstests/dropdb_race.js delete mode 100644 jstests/elemMatchProjection.js delete mode 100644 jstests/error2.js delete mode 100644 jstests/error5.js delete mode 100644 jstests/eval0.js delete mode 100644 jstests/eval1.js delete mode 100644 jstests/eval2.js delete mode 100644 jstests/eval3.js delete mode 100644 jstests/eval4.js delete mode 100644 jstests/eval5.js delete mode 100644 jstests/eval6.js delete mode 100644 jstests/eval7.js delete mode 100644 jstests/eval8.js delete mode 100644 jstests/eval9.js delete mode 100644 jstests/eval_nolock.js delete mode 100644 jstests/evala.js delete mode 100644 jstests/evalb.js delete mode 100644 jstests/evalc.js delete mode 100644 jstests/evald.js delete mode 100644 jstests/evale.js delete mode 100644 jstests/evalf.js delete mode 100644 jstests/exists.js delete mode 100644 jstests/exists2.js delete mode 100644 jstests/exists3.js delete mode 100644 jstests/exists4.js delete mode 100644 jstests/exists5.js delete mode 100644 jstests/exists6.js delete mode 100644 jstests/exists7.js delete mode 100644 jstests/exists8.js delete mode 100644 jstests/exists9.js delete mode 100644 jstests/existsa.js delete mode 100644 jstests/existsb.js delete mode 100644 jstests/explain1.js delete mode 100644 jstests/explain2.js delete mode 100644 jstests/explain3.js delete mode 100644 jstests/explain4.js delete mode 100644 jstests/explain5.js delete mode 100644 jstests/explain6.js delete mode 100644 jstests/explain7.js delete mode 100644 jstests/explain8.js delete mode 100644 jstests/explain9.js delete mode 100644 jstests/explain_batch_size.js delete mode 100644 jstests/explaina.js delete mode 100644 jstests/explainb.js delete mode 100644 jstests/extent.js delete mode 100644 jstests/extent2.js delete mode 100644 jstests/filemd5.js delete mode 100644 jstests/find1.js delete mode 100644 jstests/find2.js delete mode 100644 jstests/find3.js delete mode 100644 jstests/find4.js delete mode 100644 jstests/find5.js delete mode 100644 jstests/find6.js delete mode 100644 jstests/find7.js delete mode 100644 jstests/find8.js delete mode 100644 jstests/find9.js delete mode 100644 jstests/find_and_modify.js delete mode 100644 jstests/find_and_modify2.js delete mode 100644 jstests/find_and_modify3.js delete mode 100644 jstests/find_and_modify4.js delete mode 100644 jstests/find_and_modify_server6226.js delete mode 100644 jstests/find_and_modify_server6254.js delete mode 100644 jstests/find_and_modify_server6582.js delete mode 100644 jstests/find_and_modify_server6588.js delete mode 100644 jstests/find_and_modify_server6659.js delete mode 100644 jstests/find_and_modify_server6909.js delete mode 100644 jstests/find_and_modify_server6993.js delete mode 100644 jstests/find_and_modify_server7660.js delete mode 100644 jstests/find_and_modify_where.js delete mode 100644 jstests/find_dedup.js delete mode 100644 jstests/find_size.js delete mode 100644 jstests/finda.js delete mode 100644 jstests/fm1.js delete mode 100644 jstests/fm2.js delete mode 100644 jstests/fm3.js delete mode 100644 jstests/fm4.js delete mode 100644 jstests/fsync.js delete mode 100644 jstests/fts1.js delete mode 100644 jstests/fts2.js delete mode 100644 jstests/fts3.js delete mode 100644 jstests/fts4.js delete mode 100644 jstests/fts5.js delete mode 100644 jstests/fts_blog.js delete mode 100644 jstests/fts_blogwild.js delete mode 100644 jstests/fts_enabled.js delete mode 100644 jstests/fts_explain.js delete mode 100644 jstests/fts_index.js delete mode 100644 jstests/fts_mix.js delete mode 100644 jstests/fts_partition1.js delete mode 100644 jstests/fts_partition_no_multikey.js delete mode 100644 jstests/fts_phrase.js delete mode 100644 jstests/fts_proj.js delete mode 100644 jstests/fts_projection.js delete mode 100644 jstests/fts_querylang.js delete mode 100644 jstests/fts_score_sort.js delete mode 100644 jstests/fts_spanish.js delete mode 100644 jstests/geo1.js delete mode 100644 jstests/geo10.js delete mode 100644 jstests/geo2.js delete mode 100644 jstests/geo3.js delete mode 100644 jstests/geo4.js delete mode 100644 jstests/geo5.js delete mode 100644 jstests/geo6.js delete mode 100644 jstests/geo7.js delete mode 100644 jstests/geo8.js delete mode 100644 jstests/geo9.js delete mode 100644 jstests/geo_2d_explain.js delete mode 100644 jstests/geo_2d_with_geojson_point.js delete mode 100644 jstests/geo_allowedcomparisons.js delete mode 100644 jstests/geo_array0.js delete mode 100644 jstests/geo_array1.js delete mode 100644 jstests/geo_array2.js delete mode 100644 jstests/geo_borders.js delete mode 100644 jstests/geo_box1.js delete mode 100644 jstests/geo_box1_noindex.js delete mode 100644 jstests/geo_box2.js delete mode 100644 jstests/geo_box3.js delete mode 100644 jstests/geo_center_sphere1.js delete mode 100644 jstests/geo_center_sphere2.js delete mode 100644 jstests/geo_circle1.js delete mode 100644 jstests/geo_circle1_noindex.js delete mode 100644 jstests/geo_circle2.js delete mode 100644 jstests/geo_circle2a.js delete mode 100644 jstests/geo_circle3.js delete mode 100644 jstests/geo_circle4.js delete mode 100644 jstests/geo_circle5.js delete mode 100644 jstests/geo_distinct.js delete mode 100644 jstests/geo_exactfetch.js delete mode 100644 jstests/geo_fiddly_box.js delete mode 100644 jstests/geo_fiddly_box2.js delete mode 100644 jstests/geo_group.js delete mode 100644 jstests/geo_haystack1.js delete mode 100644 jstests/geo_haystack2.js delete mode 100644 jstests/geo_haystack3.js delete mode 100644 jstests/geo_invalid_polygon.js delete mode 100644 jstests/geo_mapreduce.js delete mode 100644 jstests/geo_mapreduce2.js delete mode 100644 jstests/geo_max.js delete mode 100644 jstests/geo_mindistance.js delete mode 100644 jstests/geo_mindistance_boundaries.js delete mode 100644 jstests/geo_multikey0.js delete mode 100644 jstests/geo_multikey1.js delete mode 100644 jstests/geo_multinest0.js delete mode 100644 jstests/geo_multinest1.js delete mode 100644 jstests/geo_near_random1.js delete mode 100644 jstests/geo_near_random2.js delete mode 100644 jstests/geo_nearwithin.js delete mode 100644 jstests/geo_oob_sphere.js delete mode 100644 jstests/geo_or.js delete mode 100644 jstests/geo_poly_edge.js delete mode 100644 jstests/geo_poly_line.js delete mode 100644 jstests/geo_polygon1.js delete mode 100644 jstests/geo_polygon1_noindex.js delete mode 100644 jstests/geo_polygon2.js delete mode 100644 jstests/geo_polygon3.js delete mode 100644 jstests/geo_queryoptimizer.js delete mode 100644 jstests/geo_regex0.js delete mode 100644 jstests/geo_s2cursorlimitskip.js delete mode 100644 jstests/geo_s2dedupnear.js delete mode 100644 jstests/geo_s2descindex.js delete mode 100644 jstests/geo_s2disjoint_holes.js delete mode 100644 jstests/geo_s2dupe_points.js delete mode 100755 jstests/geo_s2edgecases.js delete mode 100644 jstests/geo_s2exact.js delete mode 100644 jstests/geo_s2holesameasshell.js delete mode 100755 jstests/geo_s2index.js delete mode 100755 jstests/geo_s2indexoldformat.js delete mode 100644 jstests/geo_s2indexversion1.js delete mode 100644 jstests/geo_s2intersection.js delete mode 100644 jstests/geo_s2largewithin.js delete mode 100644 jstests/geo_s2meridian.js delete mode 100644 jstests/geo_s2multi.js delete mode 100644 jstests/geo_s2near.js delete mode 100644 jstests/geo_s2nearComplex.js delete mode 100644 jstests/geo_s2near_equator_opposite.js delete mode 100644 jstests/geo_s2nearcorrect.js delete mode 100644 jstests/geo_s2nearwithin.js delete mode 100644 jstests/geo_s2nongeoarray.js delete mode 100755 jstests/geo_s2nonstring.js delete mode 100644 jstests/geo_s2nopoints.js delete mode 100644 jstests/geo_s2oddshapes.js delete mode 100644 jstests/geo_s2ordering.js delete mode 100644 jstests/geo_s2overlappingpolys.js delete mode 100755 jstests/geo_s2polywithholes.js delete mode 100644 jstests/geo_s2selfintersectingpoly.js delete mode 100644 jstests/geo_s2sparse.js delete mode 100644 jstests/geo_s2twofields.js delete mode 100644 jstests/geo_s2validindex.js delete mode 100644 jstests/geo_s2within.js delete mode 100644 jstests/geo_small_large.js delete mode 100644 jstests/geo_sort1.js delete mode 100644 jstests/geo_uniqueDocs.js delete mode 100644 jstests/geo_uniqueDocs2.js delete mode 100644 jstests/geo_update.js delete mode 100644 jstests/geo_update1.js delete mode 100644 jstests/geo_update2.js delete mode 100644 jstests/geo_update_btree.js delete mode 100644 jstests/geo_update_btree2.js delete mode 100644 jstests/geo_update_dedup.js delete mode 100644 jstests/geo_withinquery.js delete mode 100644 jstests/geoa.js delete mode 100644 jstests/geob.js delete mode 100644 jstests/geoc.js delete mode 100644 jstests/geod.js delete mode 100644 jstests/geoe.js delete mode 100644 jstests/geof.js delete mode 100644 jstests/geonear_cmd_input_validation.js delete mode 100644 jstests/geonear_validate.js delete mode 100644 jstests/getlog1.js delete mode 100644 jstests/getlog2.js delete mode 100644 jstests/group1.js delete mode 100644 jstests/group2.js delete mode 100644 jstests/group3.js delete mode 100644 jstests/group4.js delete mode 100644 jstests/group5.js delete mode 100644 jstests/group6.js delete mode 100644 jstests/group7.js delete mode 100644 jstests/group_empty.js delete mode 100644 jstests/grow_hash_table.js delete mode 100644 jstests/hashindex1.js delete mode 100644 jstests/hashtest1.js delete mode 100644 jstests/hint1.js delete mode 100644 jstests/hostinfo.js delete mode 100644 jstests/id1.js delete mode 100644 jstests/idhack.js delete mode 100644 jstests/in.js delete mode 100644 jstests/in2.js delete mode 100644 jstests/in3.js delete mode 100644 jstests/in4.js delete mode 100644 jstests/in5.js delete mode 100644 jstests/in6.js delete mode 100644 jstests/in8.js delete mode 100644 jstests/in9.js delete mode 100644 jstests/ina.js delete mode 100644 jstests/inb.js delete mode 100644 jstests/inc-SERVER-7446.js delete mode 100644 jstests/inc1.js delete mode 100644 jstests/inc2.js delete mode 100644 jstests/inc3.js delete mode 100644 jstests/index1.js delete mode 100644 jstests/index10.js delete mode 100644 jstests/index13.js delete mode 100644 jstests/index2.js delete mode 100644 jstests/index3.js delete mode 100644 jstests/index4.js delete mode 100644 jstests/index5.js delete mode 100644 jstests/index6.js delete mode 100644 jstests/index7.js delete mode 100644 jstests/index8.js delete mode 100644 jstests/index9.js delete mode 100644 jstests/indexOtherNamespace.js delete mode 100644 jstests/indexStatsCommand.js delete mode 100644 jstests/index_arr1.js delete mode 100644 jstests/index_arr2.js delete mode 100644 jstests/index_big1.js delete mode 100755 jstests/index_bigkeys.js delete mode 100644 jstests/index_bigkeys_update.js delete mode 100644 jstests/index_bounds_number_edge_cases.js delete mode 100644 jstests/index_check1.js delete mode 100644 jstests/index_check2.js delete mode 100644 jstests/index_check3.js delete mode 100644 jstests/index_check5.js delete mode 100644 jstests/index_check6.js delete mode 100644 jstests/index_check7.js delete mode 100644 jstests/index_check8.js delete mode 100644 jstests/index_diag.js delete mode 100644 jstests/index_elemmatch1.js delete mode 100644 jstests/index_filter_commands.js delete mode 100644 jstests/index_many.js delete mode 100644 jstests/index_many2.js delete mode 100644 jstests/index_sparse1.js delete mode 100644 jstests/index_sparse2.js delete mode 100644 jstests/indexa.js delete mode 100644 jstests/indexapi.js delete mode 100644 jstests/indexb.js delete mode 100755 jstests/indexbindata.js delete mode 100644 jstests/indexc.js delete mode 100644 jstests/indexd.js delete mode 100644 jstests/indexe.js delete mode 100644 jstests/indexes_on_indexes.js delete mode 100644 jstests/indexf.js delete mode 100644 jstests/indexg.js delete mode 100644 jstests/indexh.js delete mode 100644 jstests/indexi.js delete mode 100644 jstests/indexj.js delete mode 100644 jstests/indexl.js delete mode 100644 jstests/indexm.js delete mode 100644 jstests/indexn.js delete mode 100644 jstests/indexo.js delete mode 100644 jstests/indexp.js delete mode 100644 jstests/indexq.js delete mode 100644 jstests/indexr.js delete mode 100644 jstests/indexs.js delete mode 100644 jstests/indext.js delete mode 100644 jstests/indexu.js delete mode 100644 jstests/indexv.js delete mode 100644 jstests/indexw.js delete mode 100644 jstests/insert1.js delete mode 100644 jstests/insert2.js delete mode 100644 jstests/insert_id_undefined.js delete mode 100644 jstests/insert_illegal_doc.js delete mode 100644 jstests/insert_long_index_key.js delete mode 100644 jstests/ismaster.js diff --git a/jstests/all.js b/jstests/all.js deleted file mode 100644 index 50087882ad1..00000000000 --- a/jstests/all.js +++ /dev/null @@ -1,47 +0,0 @@ -t = db.jstests_all; -t.drop(); - -doTest = function() { - - t.save( { a:[ 1,2,3 ] } ); - t.save( { a:[ 1,2,4 ] } ); - t.save( { a:[ 1,8,5 ] } ); - t.save( { a:[ 1,8,6 ] } ); - t.save( { a:[ 1,9,7 ] } ); - t.save( { a : [] } ); - t.save( {} ); - - assert.eq( 5, t.find( { a: { $all: [ 1 ] } } ).count() ); - assert.eq( 2, t.find( { a: { $all: [ 1, 2 ] } } ).count() ); - assert.eq( 2, t.find( { a: { $all: [ 1, 8 ] } } ).count() ); - assert.eq( 1, t.find( { a: { $all: [ 1, 3 ] } } ).count() ); - assert.eq( 2, t.find( { a: { $all: [ 2 ] } } ).count() ); - assert.eq( 1, t.find( { a: { $all: [ 2, 3 ] } } ).count() ); - assert.eq( 2, t.find( { a: { $all: [ 2, 1 ] } } ).count() ); - - t.save( { a: [ 2, 2 ] } ); - assert.eq( 3, t.find( { a: { $all: [ 2, 2 ] } } ).count() ); - - t.save( { a: [ [ 2 ] ] } ); - assert.eq( 3, t.find( { a: { $all: [ 2 ] } } ).count() ); - - t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } ); - assert.eq( 1, t.find( { 'a.b': { $all: [ 10 ] } } ).count() ); - assert.eq( 1, t.find( { a: { $all: [ 11 ] } } ).count() ); - - t.save( { a: { b: [ 20, 30 ] } } ); - assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() ); - assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() ); - - - assert.eq( 5 , t.find( { a : { $all : [1] } } ).count() , "E1" ); - assert.eq( 0 , t.find( { a : { $all : [19] } } ).count() , "E2" ); - assert.eq( 0 , t.find( { a : { $all : [] } } ).count() , "E3" ); - - -} - -doTest(); -t.drop(); -t.ensureIndex( {a:1} ); -doTest(); diff --git a/jstests/all2.js b/jstests/all2.js deleted file mode 100644 index 64372ca5e97..00000000000 --- a/jstests/all2.js +++ /dev/null @@ -1,86 +0,0 @@ - -t = db.all2; -t.drop(); - -t.save( { a : [ { x : 1 } , { x : 2 } ] } ) -t.save( { a : [ { x : 2 } , { x : 3 } ] } ) -t.save( { a : [ { x : 3 } , { x : 4 } ] } ) - -state = "no index"; - -function check( n , q , e ){ - assert.eq( n , t.find( q ).count() , tojson( q ) + " " + e + " count " + state ); - assert.eq( n , t.find( q ).itcount() , tojson( q ) + " " + e + " itcount" + state ); -} - -check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" ); -check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" ); - -check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" ); -check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" ); -check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" ); - -check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" ); -check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" ); -check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" ); - -t.ensureIndex( { "a.x" : 1 } ); -state = "index"; - -check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" ); -check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" ); - -check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" ); -check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" ); -check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" ); - -check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" ); -check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" ); -check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" ); - -// --- more - -t.drop(); - -t.save( { a : [ 1 , 2 ] } ) -t.save( { a : [ 2 , 3 ] } ) -t.save( { a : [ 3 , 4 ] } ) - -state = "more no index"; - -check( 1 , { "a" : { $in : [ 1 ] } } , "A" ); -check( 2 , { "a" : { $in : [ 2 ] } } , "B" ); - -check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" ); -check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" ); -check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" ); - -check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" ); -check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" ); -check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" ); - -t.ensureIndex( { "a" : 1 } ); -state = "more index"; - -check( 1 , { "a" : { $in : [ 1 ] } } , "A" ); -check( 2 , { "a" : { $in : [ 2 ] } } , "B" ); - -check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" ); -check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" ); -check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" ); - -check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" ); -check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" ); -check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" ); - - -// more 2 - -state = "more 2" - -t.drop(); -t.save( { name : [ "harry","jack","tom" ] } ) -check( 0 , { name : { $all : ["harry","john"] } } , "A" ); -t.ensureIndex( { name : 1 } ); -check( 0 , { name : { $all : ["harry","john"] } } , "B" ); - diff --git a/jstests/all3.js b/jstests/all3.js deleted file mode 100644 index b7a05321bbf..00000000000 --- a/jstests/all3.js +++ /dev/null @@ -1,28 +0,0 @@ -// Check that $all matching null is consistent with $in - SERVER-3820 - -t = db.jstests_all3; -t.drop(); - -t.save({}); - -assert.eq( 1, t.count( {foo:{$in:[null]}} ) ); -assert.eq( 1, t.count( {foo:{$all:[null]}} ) ); -assert.eq( 0, t.count( {foo:{$not:{$all:[null]}}} ) ); -assert.eq( 0, t.count( {foo:{$not:{$in:[null]}}} ) ); - -t.remove({}); -t.save({foo:1}); -assert.eq( 0, t.count( {foo:{$in:[null]}} ) ); -assert.eq( 0, t.count( {foo:{$all:[null]}} ) ); -assert.eq( 1, t.count( {foo:{$not:{$in:[null]}}} ) ); -assert.eq( 1, t.count( {foo:{$not:{$all:[null]}}} ) ); - -t.remove({}); -t.save( {foo:[0,1]} ); -assert.eq( 1, t.count( {foo:{$in:[[0,1]]}} ) ); -assert.eq( 1, t.count( {foo:{$all:[[0,1]]}} ) ); - -t.remove({}); -t.save( {foo:[]} ); -assert.eq( 1, t.count( {foo:{$in:[[]]}} ) ); -assert.eq( 1, t.count( {foo:{$all:[[]]}} ) ); diff --git a/jstests/all4.js b/jstests/all4.js deleted file mode 100644 index 18acbf4f46a..00000000000 --- a/jstests/all4.js +++ /dev/null @@ -1,30 +0,0 @@ -// Test $all/$elemMatch with missing field - SERVER-4492 - -t = db.jstests_all4; -t.drop(); - -function checkQuery( query, val ) { - assert.eq( val, t.count(query) ); - assert( !db.getLastError() ); - assert.eq( val, t.find(query).itcount() ); - assert( !db.getLastError() ); -} - -checkQuery( {a:{$all:[]}}, 0 ); -checkQuery( {a:{$all:[1]}}, 0 ); -checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 ); - -t.save({}); -checkQuery( {a:{$all:[]}}, 0 ); -checkQuery( {a:{$all:[1]}}, 0 ); -checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 ); - -t.save({a:1}); -checkQuery( {a:{$all:[]}}, 0 ); -checkQuery( {a:{$all:[1]}}, 1 ); -checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 ); - -t.save({a:[{b:1}]}); -checkQuery( {a:{$all:[]}}, 0 ); -checkQuery( {a:{$all:[1]}}, 1 ); -checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 1 ); diff --git a/jstests/all5.js b/jstests/all5.js deleted file mode 100644 index a5d9e312292..00000000000 --- a/jstests/all5.js +++ /dev/null @@ -1,28 +0,0 @@ -// Test $all/$elemMatch/null matching - SERVER-4517 - -t = db.jstests_all5; -t.drop(); - -function checkMatch( doc ) { - t.drop(); - t.save( doc ); - assert.eq( 1, t.count( {a:{$elemMatch:{b:null}}} ) ); - assert.eq( 1, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) ); -} - -function checkNoMatch( doc ) { - t.drop(); - t.save( doc ); - assert.eq( 0, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) ); -} - -checkNoMatch( {} ); -checkNoMatch( {a:1} ); - -checkNoMatch( {a:[]} ); -checkNoMatch( {a:[1]} ); - -checkMatch( {a:[{}]} ); -checkMatch( {a:[{c:1}]} ); -checkMatch( {a:[{b:null}]} ); -checkNoMatch( {a:[{b:1}]}, 0 ); diff --git a/jstests/and.js b/jstests/and.js deleted file mode 100644 index 4d8c2cd7d49..00000000000 --- a/jstests/and.js +++ /dev/null @@ -1,85 +0,0 @@ -// Some tests for $and SERVER-1089 - -t = db.jstests_and; -t.drop(); - -t.save( {a:[1,2]} ); -t.save( {a:'foo'} ); - -function check() { - // $and must be an array - assert.throws( function() { t.find( {$and:4} ).toArray() } ); - // $and array must not be empty - assert.throws( function() { t.find( {$and:[]} ).toArray() } ); - // $and elements must be objects - assert.throws( function() { t.find( {$and:[4]} ).toArray() } ); - - // Check equality matching - assert.eq( 1, t.count( {$and:[{a:1}]} ) ); - assert.eq( 1, t.count( {$and:[{a:1},{a:2}]} ) ); - assert.eq( 0, t.count( {$and:[{a:1},{a:3}]} ) ); - assert.eq( 0, t.count( {$and:[{a:1},{a:2},{a:3}]} ) ); - assert.eq( 1, t.count( {$and:[{a:'foo'}]} ) ); - assert.eq( 0, t.count( {$and:[{a:'foo'},{a:'g'}]} ) ); - - // Check $and with other fields - assert.eq( 1, t.count( {a:2,$and:[{a:1}]} ) ); - assert.eq( 0, t.count( {a:0,$and:[{a:1}]} ) ); - assert.eq( 0, t.count( {a:2,$and:[{a:0}]} ) ); - assert.eq( 1, t.count( {a:1,$and:[{a:1}]} ) ); - - // Check recursive $and - assert.eq( 1, t.count( {a:2,$and:[{$and:[{a:1}]}]} ) ); - assert.eq( 0, t.count( {a:0,$and:[{$and:[{a:1}]}]} ) ); - assert.eq( 0, t.count( {a:2,$and:[{$and:[{a:0}]}]} ) ); - assert.eq( 1, t.count( {a:1,$and:[{$and:[{a:1}]}]} ) ); - - assert.eq( 1, t.count( {$and:[{a:2},{$and:[{a:1}]}]} ) ); - assert.eq( 0, t.count( {$and:[{a:0},{$and:[{a:1}]}]} ) ); - assert.eq( 0, t.count( {$and:[{a:2},{$and:[{a:0}]}]} ) ); - assert.eq( 1, t.count( {$and:[{a:1},{$and:[{a:1}]}]} ) ); - - // Some of these cases were more important with an alternative $and syntax - // that was rejected, but they're still valid checks. - - // Check simple regex - assert.eq( 1, t.count( {$and:[{a:/foo/}]} ) ); - // Check multiple regexes - assert.eq( 1, t.count( {$and:[{a:/foo/},{a:/^f/},{a:/o/}]} ) ); - assert.eq( 0, t.count( {$and:[{a:/foo/},{a:/^g/}]} ) ); - assert.eq( 1, t.count( {$and:[{a:/^f/},{a:'foo'}]} ) ); - // Check regex flags - assert.eq( 0, t.count( {$and:[{a:/^F/},{a:'foo'}]} ) ); - assert.eq( 1, t.count( {$and:[{a:/^F/i},{a:'foo'}]} ) ); - - - - // Check operator - assert.eq( 1, t.count( {$and:[{a:{$gt:0}}]} ) ); - - // Check where - assert.eq( 1, t.count( {a:'foo',$where:'this.a=="foo"'} ) ); - assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) ); - assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) ); - - // Nested where ok - assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}]}) ); - assert.eq( 1, t.count({$and:[{a:'foo'},{$where:'this.a=="foo"'}]}) ); - assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}],$where:'this.a=="foo"'}) ); -} - -check(); -t.ensureIndex( {a:1} ); -check(); -var e = t.find( {$and:[{a:1}]} ).explain(); -assert.eq( 'BtreeCursor a_1', e.cursor ); -assert.eq( [[1,1]], e.indexBounds.a ); - -function checkBounds( query ) { - var e = t.find( query ).explain(true); - printjson(e); - assert.eq( 1, e.n ); -} - -checkBounds( {a:1,$and:[{a:2}]} ); -checkBounds( {$and:[{a:1},{a:2}]} ); diff --git a/jstests/and2.js b/jstests/and2.js deleted file mode 100644 index 0bd13eb7a1d..00000000000 --- a/jstests/and2.js +++ /dev/null @@ -1,27 +0,0 @@ -// Test dollar sign operator with $and SERVER-1089 - -t = db.jstests_and2; - -t.drop(); -t.save( {a:[1,2]} ); -t.update( {a:1}, {$set:{'a.$':5}} ); -assert.eq( [5,2], t.findOne().a ); - -t.drop(); -t.save( {a:[1,2]} ); -t.update( {$and:[{a:1}]}, {$set:{'a.$':5}} ); -assert.eq( [5,2], t.findOne().a ); - -// Make sure dollar sign operator with $and is consistent with no $and case -t.drop(); -t.save( {a:[1,2],b:[3,4]} ); -t.update( {a:1,b:4}, {$set:{'a.$':5}} ); -// Probably not what we want here, just trying to make sure $and is consistent -assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] ); - -// Make sure dollar sign operator with $and is consistent with no $and case -t.drop(); -t.save( {a:[1,2],b:[3,4]} ); -t.update( {a:1,$and:[{b:4}]}, {$set:{'a.$':5}} ); -// Probably not what we want here, just trying to make sure $and is consistent -assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] ); diff --git a/jstests/and3.js b/jstests/and3.js deleted file mode 100644 index 036c63c02f0..00000000000 --- a/jstests/and3.js +++ /dev/null @@ -1,67 +0,0 @@ -// Check key match with sub matchers - part of SERVER-3192 - -t = db.jstests_and3; -t.drop(); - -t.save( {a:1} ); -t.save( {a:'foo'} ); - -t.ensureIndex( {a:1} ); - -function checkScanMatch( query, nscannedObjects, n ) { - var e = t.find( query ).hint( {a:1} ).explain(); - assert.eq( nscannedObjects, e.nscannedObjects ); - assert.eq( n, e.n ); -} - -checkScanMatch( {a:/o/}, 1, 1 ); -checkScanMatch( {a:/a/}, 0, 0 ); -checkScanMatch( {a:{$not:/o/}}, 2, 1 ); -checkScanMatch( {a:{$not:/a/}}, 2, 2 ); - -checkScanMatch( {$and:[{a:/o/}]}, 1, 1 ); -checkScanMatch( {$and:[{a:/a/}]}, 0, 0 ); -checkScanMatch( {$and:[{a:{$not:/o/}}]}, 2, 1 ); -checkScanMatch( {$and:[{a:{$not:/a/}}]}, 2, 2 ); -checkScanMatch( {$and:[{a:/o/},{a:{$not:/o/}}]}, 1, 0 ); -checkScanMatch( {$and:[{a:/o/},{a:{$not:/a/}}]}, 1, 1 ); -checkScanMatch( {$or:[{a:/o/}]}, 1, 1 ); -checkScanMatch( {$or:[{a:/a/}]}, 0, 0 ); -checkScanMatch( {$nor:[{a:/o/}]}, 2, 1 ); -checkScanMatch( {$nor:[{a:/a/}]}, 2, 2 ); - -checkScanMatch( {$and:[{$and:[{a:/o/}]}]}, 1, 1 ); -checkScanMatch( {$and:[{$and:[{a:/a/}]}]}, 0, 0 ); -checkScanMatch( {$and:[{$and:[{a:{$not:/o/}}]}]}, 2, 1 ); -checkScanMatch( {$and:[{$and:[{a:{$not:/a/}}]}]}, 2, 2 ); -checkScanMatch( {$and:[{$or:[{a:/o/}]}]}, 1, 1 ); -checkScanMatch( {$and:[{$or:[{a:/a/}]}]}, 0, 0 ); -checkScanMatch( {$or:[{a:{$not:/o/}}]}, 2, 1 ); -checkScanMatch( {$and:[{$or:[{a:{$not:/o/}}]}]}, 2, 1 ); -checkScanMatch( {$and:[{$or:[{a:{$not:/a/}}]}]}, 2, 2 ); -checkScanMatch( {$and:[{$nor:[{a:/o/}]}]}, 2, 1 ); -checkScanMatch( {$and:[{$nor:[{a:/a/}]}]}, 2, 2 ); - -checkScanMatch( {$where:'this.a==1'}, 2, 1 ); -checkScanMatch( {$and:[{$where:'this.a==1'}]}, 2, 1 ); - -checkScanMatch( {a:1,$where:'this.a==1'}, 1, 1 ); -checkScanMatch( {a:1,$and:[{$where:'this.a==1'}]}, 1, 1 ); -checkScanMatch( {$and:[{a:1},{$where:'this.a==1'}]}, 1, 1 ); -checkScanMatch( {$and:[{a:1,$where:'this.a==1'}]}, 1, 1 ); -checkScanMatch( {a:1,$and:[{a:1},{a:1,$where:'this.a==1'}]}, 1, 1 ); - -function checkImpossibleMatch( query ) { - var e = t.find( query ).explain(); - assert.eq( 0, e.n ); - // The explain output should include the indexBounds field. - // The presence of the indexBounds field indicates that the - // query can make use of an index. - assert('indexBounds' in e, 'index bounds are missing'); -} - -// With a single key index, all bounds are utilized. -assert.eq( [[1,1]], t.find( {$and:[{a:1}]} ).explain().indexBounds.a ); -assert.eq( [[1,1]], t.find( {a:1,$and:[{a:1}]} ).explain().indexBounds.a ); -checkImpossibleMatch( {a:1,$and:[{a:2}]} ); -checkImpossibleMatch( {$and:[{a:1},{a:2}]} ); diff --git a/jstests/andor.js b/jstests/andor.js deleted file mode 100644 index f433ade8228..00000000000 --- a/jstests/andor.js +++ /dev/null @@ -1,99 +0,0 @@ -// SERVER-1089 Test and/or nesting - -t = db.jstests_andor; -t.drop(); - -// not ok -function ok( q ) { - assert.eq( 1, t.find( q ).itcount() ); -} - -t.save( {a:1} ); - -test = function() { - - ok( {a:1} ); - - ok( {$and:[{a:1}]} ); - ok( {$or:[{a:1}]} ); - - ok( {$and:[{$and:[{a:1}]}]} ); - ok( {$or:[{$or:[{a:1}]}]} ); - - ok( {$and:[{$or:[{a:1}]}]} ); - ok( {$or:[{$and:[{a:1}]}]} ); - - ok( {$and:[{$and:[{$or:[{a:1}]}]}]} ); - ok( {$and:[{$or:[{$and:[{a:1}]}]}]} ); - ok( {$or:[{$and:[{$and:[{a:1}]}]}]} ); - - ok( {$or:[{$and:[{$or:[{a:1}]}]}]} ); - - // now test $nor - - ok( {$and:[{a:1}]} ); - ok( {$nor:[{a:2}]} ); - - ok( {$and:[{$and:[{a:1}]}]} ); - ok( {$nor:[{$nor:[{a:1}]}]} ); - - ok( {$and:[{$nor:[{a:2}]}]} ); - ok( {$nor:[{$and:[{a:2}]}]} ); - - ok( {$and:[{$and:[{$nor:[{a:2}]}]}]} ); - ok( {$and:[{$nor:[{$and:[{a:2}]}]}]} ); - ok( {$nor:[{$and:[{$and:[{a:2}]}]}]} ); - - ok( {$nor:[{$and:[{$nor:[{a:1}]}]}]} ); - -} - -test(); -t.ensureIndex( {a:1} ); -test(); - -// Test an inequality base match. - -test = function() { - - ok( {a:{$ne:2}} ); - - ok( {$and:[{a:{$ne:2}}]} ); - ok( {$or:[{a:{$ne:2}}]} ); - - ok( {$and:[{$and:[{a:{$ne:2}}]}]} ); - ok( {$or:[{$or:[{a:{$ne:2}}]}]} ); - - ok( {$and:[{$or:[{a:{$ne:2}}]}]} ); - ok( {$or:[{$and:[{a:{$ne:2}}]}]} ); - - ok( {$and:[{$and:[{$or:[{a:{$ne:2}}]}]}]} ); - ok( {$and:[{$or:[{$and:[{a:{$ne:2}}]}]}]} ); - ok( {$or:[{$and:[{$and:[{a:{$ne:2}}]}]}]} ); - - ok( {$or:[{$and:[{$or:[{a:{$ne:2}}]}]}]} ); - - // now test $nor - - ok( {$and:[{a:{$ne:2}}]} ); - ok( {$nor:[{a:{$ne:1}}]} ); - - ok( {$and:[{$and:[{a:{$ne:2}}]}]} ); - ok( {$nor:[{$nor:[{a:{$ne:2}}]}]} ); - - ok( {$and:[{$nor:[{a:{$ne:1}}]}]} ); - ok( {$nor:[{$and:[{a:{$ne:1}}]}]} ); - - ok( {$and:[{$and:[{$nor:[{a:{$ne:1}}]}]}]} ); - ok( {$and:[{$nor:[{$and:[{a:{$ne:1}}]}]}]} ); - ok( {$nor:[{$and:[{$and:[{a:{$ne:1}}]}]}]} ); - - ok( {$nor:[{$and:[{$nor:[{a:{$ne:2}}]}]}]} ); - -} - -t.drop(); -t.save( {a:1} ); -test(); -t.ensureIndex( {a:1} ); -test(); diff --git a/jstests/apitest_db.js b/jstests/apitest_db.js deleted file mode 100644 index c734d67bba7..00000000000 --- a/jstests/apitest_db.js +++ /dev/null @@ -1,77 +0,0 @@ -/** - * Tests for the db object enhancement - */ - -assert( "test" == db, "wrong database currently not test" ); - -dd = function( x ){ - //print( x ); -} - -dd( "a" ); - - -dd( "b" ); - -/* - * be sure the public collection API is complete - */ -assert(db.createCollection , "createCollection" ); -assert(db.getProfilingLevel , "getProfilingLevel" ); -assert(db.setProfilingLevel , "setProfilingLevel" ); -assert(db.dbEval , "dbEval" ); -assert(db.group , "group" ); - -dd( "c" ); - -/* - * test createCollection - */ - -db.getCollection( "test" ).drop(); -db.getCollection( "system.namespaces" ).find().forEach( function(x) { assert(x.name != "test.test"); }); - -dd( "d" ); - -db.createCollection("test"); -var found = false; -db.getCollection( "system.namespaces" ).find().forEach( function(x) { if (x.name == "test.test") found = true; }); -assert(found, "found test.test in system.namespaces"); - -dd( "e" ); - -/* - * profile level - */ - -db.setProfilingLevel(0); -assert(db.getProfilingLevel() == 0, "prof level 0"); - -db.setProfilingLevel(1); -assert(db.getProfilingLevel() == 1, "p1"); - -db.setProfilingLevel(2); -assert(db.getProfilingLevel() == 2, "p2"); - -db.setProfilingLevel(0); -assert(db.getProfilingLevel() == 0, "prof level 0"); - -dd( "f" ); -asserted = false; -try { - db.setProfilingLevel(10); - assert(false); -} -catch (e) { - asserted = true; - assert(e.dbSetProfilingException); -} -assert( asserted, "should have asserted" ); - -dd( "g" ); - - - -assert.eq( "foo" , db.getSisterDB( "foo" ).getName() ) -assert.eq( "foo" , db.getSiblingDB( "foo" ).getName() ) - diff --git a/jstests/apitest_dbcollection.js b/jstests/apitest_dbcollection.js deleted file mode 100644 index 0983b065477..00000000000 --- a/jstests/apitest_dbcollection.js +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Tests for the db collection - */ - - - -/* - * test drop - */ -db.getCollection( "test_db" ).drop(); -assert(db.getCollection( "test_db" ).find().length() == 0,1); - -db.getCollection( "test_db" ).save({a:1}); -assert(db.getCollection( "test_db" ).find().length() == 1,2); - -db.getCollection( "test_db" ).drop(); -assert(db.getCollection( "test_db" ).find().length() == 0,3); - -/* - * test count - */ - -assert(db.getCollection( "test_db" ).count() == 0,4); -db.getCollection( "test_db" ).save({a:1}); -assert(db.getCollection( "test_db" ).count() == 1,5); -for (i = 0; i < 100; i++) { - db.getCollection( "test_db" ).save({a:1}); -} -assert(db.getCollection( "test_db" ).count() == 101,6); -db.getCollection( "test_db" ).drop(); -assert(db.getCollection( "test_db" ).count() == 0,7); - -/* - * test clean (not sure... just be sure it doen't blow up, I guess - */ - - db.getCollection( "test_db" ).clean(); - - /* - * test validate - */ - -db.getCollection( "test_db" ).drop(); -assert(db.getCollection( "test_db" ).count() == 0,8); - -for (i = 0; i < 100; i++) { - db.getCollection( "test_db" ).save({a:1}); -} - -var v = db.getCollection( "test_db" ).validate(); -if( v.ns != "test.test_db" ) { - print("Error: wrong ns name"); - print(tojson(v)); -} -assert (v.ns == "test.test_db",9); -assert (v.ok == 1,10); - -assert.eq(100,v.nrecords,11) - -/* - * test deleteIndex, deleteIndexes - */ - -db.getCollection( "test_db" ).drop(); -assert(db.getCollection( "test_db" ).count() == 0,12); -db.getCollection( "test_db" ).dropIndexes(); -assert(db.getCollection( "test_db" ).getIndexes().length == 0,13); - -db.getCollection( "test_db" ).save({a:10}); -assert(db.getCollection( "test_db" ).getIndexes().length == 1,14); - -db.getCollection( "test_db" ).ensureIndex({a:1}); -db.getCollection( "test_db" ).save({a:10}); - -print( tojson( db.getCollection( "test_db" ).getIndexes() ) ); -assert.eq(db.getCollection( "test_db" ).getIndexes().length , 2,15); - -db.getCollection( "test_db" ).dropIndex({a:1}); -assert(db.getCollection( "test_db" ).getIndexes().length == 1,16); - -db.getCollection( "test_db" ).save({a:10}); -db.getCollection( "test_db" ).ensureIndex({a:1}); -db.getCollection( "test_db" ).save({a:10}); - -assert(db.getCollection( "test_db" ).getIndexes().length == 2,17); - -db.getCollection( "test_db" ).dropIndex("a_1"); -assert.eq( db.getCollection( "test_db" ).getIndexes().length , 1,18); - -db.getCollection( "test_db" ).save({a:10, b:11}); -db.getCollection( "test_db" ).ensureIndex({a:1}); -db.getCollection( "test_db" ).ensureIndex({b:1}); -db.getCollection( "test_db" ).save({a:10, b:12}); - -assert(db.getCollection( "test_db" ).getIndexes().length == 3,19); - -db.getCollection( "test_db" ).dropIndex({b:1}); -assert(db.getCollection( "test_db" ).getIndexes().length == 2,20); -db.getCollection( "test_db" ).dropIndex({a:1}); -assert(db.getCollection( "test_db" ).getIndexes().length == 1,21); - -db.getCollection( "test_db" ).save({a:10, b:11}); -db.getCollection( "test_db" ).ensureIndex({a:1}); -db.getCollection( "test_db" ).ensureIndex({b:1}); -db.getCollection( "test_db" ).save({a:10, b:12}); - -assert(db.getCollection( "test_db" ).getIndexes().length == 3,22); - -db.getCollection( "test_db" ).dropIndexes(); -assert(db.getCollection( "test_db" ).getIndexes().length == 1,23); - -db.getCollection( "test_db" ).find(); - -db.getCollection( "test_db" ).drop(); -assert(db.getCollection( "test_db" ).getIndexes().length == 0,24); diff --git a/jstests/apply_ops1.js b/jstests/apply_ops1.js deleted file mode 100644 index 9e6cb39f7c7..00000000000 --- a/jstests/apply_ops1.js +++ /dev/null @@ -1,66 +0,0 @@ - -t = db.apply_ops1; -t.drop(); - -assert.eq( 0 , t.find().count() , "A0" ); -a = db.adminCommand( { applyOps : [ { "op" : "i" , "ns" : t.getFullName() , "o" : { _id : 5 , x : 17 } } ] } ) -assert.eq( 1 , t.find().count() , "A1a" ); -assert.eq( true, a.results[0], "A1b" ); - -o = { _id : 5 , x : 17 } -assert.eq( o , t.findOne() , "A2" ); - -res = db.runCommand( { applyOps : [ - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } -] } ) - -o.x++; -o.x++; - -assert.eq( 1 , t.find().count() , "A3" ); -assert.eq( o , t.findOne() , "A4" ); -assert.eq( true, res.results[0], "A1b" ); -assert.eq( true, res.results[1], "A1b" ); - - -res = db.runCommand( { applyOps : - [ - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } - ] - , - preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ] - } ); - -o.x++; -o.x++; - -assert.eq( 1 , t.find().count() , "B1" ); -assert.eq( o , t.findOne() , "B2" ); -assert.eq( true, res.results[0], "B2a" ); -assert.eq( true, res.results[1], "B2b" ); - - -res = db.runCommand( { applyOps : - [ - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } - ] - , - preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ] - } ); - -assert.eq( 1 , t.find().count() , "B3" ); -assert.eq( o , t.findOne() , "B4" ); - -res = db.runCommand( { applyOps : - [ - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , - { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 6 } , "o" : { $inc : { x : 1 } } } - ] - } ); - -assert.eq( true, res.results[0], "B5" ); -assert.eq( true, res.results[1], "B6" ); - diff --git a/jstests/apply_ops2.js b/jstests/apply_ops2.js deleted file mode 100644 index 1a5923c3465..00000000000 --- a/jstests/apply_ops2.js +++ /dev/null @@ -1,71 +0,0 @@ -//Test applyops upsert flag SERVER-7452 - -var t = db.apply_ops2; -t.drop(); - -assert.eq(0, t.find().count(), "test collection not empty"); - -t.insert({_id:1, x:"init"}); - -//alwaysUpsert = true -print("Testing applyOps with alwaysUpsert = true"); - -var res = db.runCommand({ applyOps: [ - { - op: "u", - ns: t.getFullName(), - o2 : { _id: 1 }, - o: { $set: { x: "upsert=true existing" }} - }, - { - op: "u", - ns: t.getFullName(), - o2: { _id: 2 }, - o: { $set : { x: "upsert=true non-existing" }} - }], alwaysUpsert: true }); - -assert.eq(true, res.results[0], "upsert = true, existing doc update failed"); -assert.eq(true, res.results[1], "upsert = true, nonexisting doc not upserted"); -assert.eq(2, t.find().count(), "2 docs expected after upsert"); - -//alwaysUpsert = false -print("Testing applyOps with alwaysUpsert = false"); - -res = db.runCommand({ applyOps: [ - { - op: "u", - ns: t.getFullName(), - o2: { _id: 1 }, - o: { $set : { x: "upsert=false existing" }} - }, - { - op: "u", - ns: t.getFullName(), - o2: { _id: 3 }, - o: { $set: { x: "upsert=false non-existing" }} - }], alwaysUpsert: false }); - -assert.eq(true, res.results[0], "upsert = false, existing doc update failed"); -assert.eq(false, res.results[1], "upsert = false, nonexisting doc upserted"); -assert.eq(2, t.find().count(), "2 docs expected after upsert failure"); - -//alwaysUpsert not specified, should default to true -print("Testing applyOps with default alwaysUpsert"); - -res = db.runCommand({ applyOps: [ - { - op: "u", - ns: t.getFullName(), - o2: { _id: 1 }, - o: { $set: { x: "upsert=default existing" }} - }, - { - op: "u", - ns: t.getFullName(), - o2: { _id: 4 }, - o: { $set: { x: "upsert=defaults non-existing" }} - }]}); - -assert.eq(true, res.results[0], "default upsert, existing doc update failed"); -assert.eq(true, res.results[1], "default upsert, nonexisting doc not upserted"); -assert.eq(3, t.find().count(), "2 docs expected after upsert failure"); diff --git a/jstests/array1.js b/jstests/array1.js deleted file mode 100644 index 4409b7bb4d3..00000000000 --- a/jstests/array1.js +++ /dev/null @@ -1,14 +0,0 @@ -t = db.array1 -t.drop() - -x = { a : [ 1 , 2 ] }; - -t.save( { a : [ [1,2] ] } ); -assert.eq( 1 , t.find( x ).count() , "A" ); - -t.save( x ); -delete x._id; -assert.eq( 2 , t.find( x ).count() , "B" ); - -t.ensureIndex( { a : 1 } ); -assert.eq( 2 , t.find( x ).count() , "C" ); // TODO SERVER-146 diff --git a/jstests/array3.js b/jstests/array3.js deleted file mode 100644 index 3d053f99417..00000000000 --- a/jstests/array3.js +++ /dev/null @@ -1,8 +0,0 @@ - -assert.eq( 5 , Array.sum( [ 1 , 4 ] ), "A" ) -assert.eq( 2.5 , Array.avg( [ 1 , 4 ] ), "B" ) - -arr = [ 2 , 4 , 4 , 4 , 5 , 5 , 7 , 9 ] -assert.eq( 5 , Array.avg( arr ) , "C" ) -assert.eq( 2 , Array.stdDev( arr ) , "D" ) - diff --git a/jstests/array4.js b/jstests/array4.js deleted file mode 100644 index 1053e160f11..00000000000 --- a/jstests/array4.js +++ /dev/null @@ -1,30 +0,0 @@ - -t = db.array4; -t.drop(); - -t.insert({"a": ["1", "2", "3"]}); -t.insert({"a" : ["2", "1"]}); - -var x = {'a.0' : /1/}; - -assert.eq(t.count(x), 1); - -assert.eq(t.findOne(x).a[0], 1); -assert.eq(t.findOne(x).a[1], 2); - -t.drop(); - -t.insert({"a" : {"0" : "1"}}); -t.insert({"a" : ["2", "1"]}); - -assert.eq(t.count(x), 1); -assert.eq(t.findOne(x).a[0], 1); - -t.drop(); - -t.insert({"a" : ["0", "1", "2", "3", "4", "5", "6", "1", "1", "1", "2", "3", "2", "1"]}); -t.insert({"a" : ["2", "1"]}); - -x = {"a.12" : /2/}; -assert.eq(t.count(x), 1); -assert.eq(t.findOne(x).a[0], 0); diff --git a/jstests/array_match1.js b/jstests/array_match1.js deleted file mode 100644 index f764fb913b1..00000000000 --- a/jstests/array_match1.js +++ /dev/null @@ -1,31 +0,0 @@ - -t = db.array_match1 -t.drop(); - -t.insert( { _id : 1 , a : [ 5 , 5 ] } ) -t.insert( { _id : 2 , a : [ 6 , 6 ] } ) -t.insert( { _id : 3 , a : [ 5 , 5 ] } ) - -function test( f , m ){ - var q = {}; - - q[f] = [5,5]; - assert.eq( 2 , t.find( q ).itcount() , m + "1" ) - - q[f] = [6,6]; - assert.eq( 1 , t.find( q ).itcount() , m + "2" ) -} - -test( "a" , "A" ); -t.ensureIndex( { a : 1 } ) -test( "a" , "B" ); - -t.drop(); - -t.insert( { _id : 1 , a : { b : [ 5 , 5 ] } } ) -t.insert( { _id : 2 , a : { b : [ 6 , 6 ] } } ) -t.insert( { _id : 3 , a : { b : [ 5 , 5 ] } } ) - -test( "a.b" , "C" ); -t.ensureIndex( { a : 1 } ) -test( "a.b" , "D" ); diff --git a/jstests/array_match2.js b/jstests/array_match2.js deleted file mode 100644 index d254b0a3fdd..00000000000 --- a/jstests/array_match2.js +++ /dev/null @@ -1,20 +0,0 @@ - -t = db.jstests_array_match2; -t.drop(); - -t.save( {a:[{1:4},5]} ); -// When the array index is the last field, both of these match types work. -assert.eq( 1, t.count( {'a.1':4} ) ); -assert.eq( 1, t.count( {'a.1':5} ) ); - -t.remove({}); -// When the array index is not the last field, only one of the match types works. -t.save( {a:[{1:{foo:4}},{foo:5}]} ); -assert.eq( 1, t.count( {'a.1.foo':4} ) ); -assert.eq( 1, t.count( {'a.1.foo':5} ) ); - -// Same issue with the $exists operator -t.remove({}); -t.save( {a:[{1:{foo:4}},{}]} ); -assert.eq( 1, t.count( {'a.1':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.1.foo':{$exists:true}} ) ); diff --git a/jstests/array_match3.js b/jstests/array_match3.js deleted file mode 100644 index c8653430770..00000000000 --- a/jstests/array_match3.js +++ /dev/null @@ -1,13 +0,0 @@ -// SERVER-2902 Test indexing of numerically referenced array elements. - -t = db.jstests_array_match3; -t.drop(); - -// Test matching numericallly referenced array element. -t.save( {a:{'0':5}} ); -t.save( {a:[5]} ); -assert.eq( 2, t.count( {'a.0':5} ) ); - -// Test with index. -t.ensureIndex( {'a.0':1} ); -assert.eq( 2, t.count( {'a.0':5} ) ); diff --git a/jstests/array_match4.js b/jstests/array_match4.js deleted file mode 100644 index b4cdec5143a..00000000000 --- a/jstests/array_match4.js +++ /dev/null @@ -1,30 +0,0 @@ -var t = db.array_match4; - -t.drop(); -t.save({a: [1, 2]}); - -var query_gte = {a: {$gte: [1, 2]}}; - -// -// without index -// - -assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (without index)'); -assert.eq(1, t.find(query_gte).itcount(), '$gte (without index)'); - -// -// with index -// - -t.ensureIndex({a: 1}); -assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (with index)'); - -// display explain output (for index bounds) -var explain = t.find(query_gte).explain(); -print('explain for ' + tojson(query_gte, '', true) + ' = ' + tojson(explain)); - -// number of documents returned by indexes query should be consistent -// with non-indexed case. -// XXX: The following assertion documents current behavior. -// XXX: 2.4 and 2.6 both return 0 documents. -assert.eq(0, t.find(query_gte).itcount(), '$gte (with index)'); diff --git a/jstests/arrayfind1.js b/jstests/arrayfind1.js deleted file mode 100644 index 539fa6193a1..00000000000 --- a/jstests/arrayfind1.js +++ /dev/null @@ -1,40 +0,0 @@ - -t = db.arrayfind1; -t.drop(); - -t.save( { a : [ { x : 1 } ] } ) -t.save( { a : [ { x : 1 , y : 2 , z : 1 } ] } ) -t.save( { a : [ { x : 1 , y : 1 , z : 3 } ] } ) - -function test( exptected , q , name ){ - assert.eq( exptected , t.find( q ).itcount() , name + " " + tojson( q ) + " itcount" ); - assert.eq( exptected , t.find( q ).count() , name + " " + tojson( q ) + " count" ); -} - -test( 3 , {} , "A1" ); -test( 1 , { "a.y" : 2 } , "A2" ); -test( 1 , { "a" : { x : 1 } } , "A3" ); -test( 3 , { "a" : { $elemMatch : { x : 1 } } } , "A4" ); // SERVER-377 - - -t.save( { a : [ { x : 2 } ] } ) -t.save( { a : [ { x : 3 } ] } ) -t.save( { a : [ { x : 4 } ] } ) - -assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "B1" ); -assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "B2" ); - -t.ensureIndex( { "a.x" : 1 } ); -assert( t.find( { "a" : { $elemMatch : { x : 1 } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "C1" ); - -assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "D1" ); - -t.find( { "a.x" : 1 } ).count(); -t.find( { "a.x" : { $gt : 1 } } ).count(); - -res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain() -assert( res.cursor.indexOf( "BtreeC" ) == 0 , "D2" ); -assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" ); - -assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" ); -assert( t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "E2" ); diff --git a/jstests/arrayfind2.js b/jstests/arrayfind2.js deleted file mode 100644 index c6a78042c3d..00000000000 --- a/jstests/arrayfind2.js +++ /dev/null @@ -1,29 +0,0 @@ - -t = db.arrayfind2; -t.drop(); - -function go( prefix ){ - assert.eq( 3 , t.count() , prefix + " A1" ); - assert.eq( 3 , t.find( { a : { $elemMatch : { x : { $gt : 4 } } } } ).count() , prefix + " A2" ); - assert.eq( 1 , t.find( { a : { $elemMatch : { x : { $lt : 2 } } } } ).count() , prefix + " A3" ); - assert.eq( 1 , t.find( { a : { $all : [ { $elemMatch : { x : { $lt : 4 } } } , - { $elemMatch : { x : { $gt : 5 } } } ] } } ).count() , prefix + " A4" ); - - assert.throws( function() { return t.findOne( { a : { $all : [ 1, { $elemMatch : { x : 3 } } ] } } ) } ); - assert.throws( function() { return t.findOne( { a : { $all : [ /a/, { $elemMatch : { x : 3 } } ] } } ) } ); - -} - -t.save( { a : [ { x : 1 } , { x : 5 } ] } ) -t.save( { a : [ { x : 3 } , { x : 5 } ] } ) -t.save( { a : [ { x : 3 } , { x : 6 } ] } ) - -go( "no index" ); -t.ensureIndex( { a : 1 } ); -go( "index(a)" ); - -t.ensureIndex( { "a.x": 1 } ); - -assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } } ] } } ).explain().indexBounds ); -// only first $elemMatch used to find bounds -assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } }, { $elemMatch : { y : 5 } } ] } } ).explain().indexBounds ); diff --git a/jstests/arrayfind3.js b/jstests/arrayfind3.js deleted file mode 100644 index de038c84264..00000000000 --- a/jstests/arrayfind3.js +++ /dev/null @@ -1,16 +0,0 @@ - -t = db.arrayfind3; -t.drop() - -t.save({a:[1,2]}) -t.save({a:[1, 2, 6]}) -t.save({a:[1, 4, 6]}) - - -assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "A1" ) -assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "A2" ) - -t.ensureIndex( { a : 1 } ) - -assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "B1" ); -assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "B2" ) diff --git a/jstests/arrayfind4.js b/jstests/arrayfind4.js deleted file mode 100644 index 17b02c8886b..00000000000 --- a/jstests/arrayfind4.js +++ /dev/null @@ -1,22 +0,0 @@ -// Test query empty array SERVER-2258 - -t = db.jstests_arrayfind4; -t.drop(); - -t.save( {a:[]} ); -t.ensureIndex( {a:1} ); - -assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() ); -assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() ); - -assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() ); -assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() ); - -t.remove({}); -t.save( {a:[[]]} ); - -assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() ); -assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() ); - -assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() ); -assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() ); diff --git a/jstests/arrayfind5.js b/jstests/arrayfind5.js deleted file mode 100644 index 9ff6e2b8a5f..00000000000 --- a/jstests/arrayfind5.js +++ /dev/null @@ -1,23 +0,0 @@ -// Test indexed elemmatch of missing field. - -t = db.jstests_arrayfind5; -t.drop(); - -function check( nullElemMatch ) { - assert.eq( 1, t.find( {'a.b':1} ).itcount() ); - assert.eq( 1, t.find( {a:{$elemMatch:{b:1}}} ).itcount() ); - assert.eq( nullElemMatch ? 1 : 0 , t.find( {'a.b':null} ).itcount() ); - assert.eq( nullElemMatch ? 1 : 0, t.find( {a:{$elemMatch:{b:null}}} ).itcount() ); // see SERVER-3377 -} - -t.save( {a:[{},{b:1}]} ); -check( true ); -t.ensureIndex( {'a.b':1} ); -check( true ); - -t.drop(); - -t.save( {a:[5,{b:1}]} ); -check( false ); -t.ensureIndex( {'a.b':1} ); -check( false ); diff --git a/jstests/arrayfind6.js b/jstests/arrayfind6.js deleted file mode 100644 index f4531cea96a..00000000000 --- a/jstests/arrayfind6.js +++ /dev/null @@ -1,26 +0,0 @@ -// Check index bound determination for $not:$elemMatch queries. SERVER-5740 - -t = db.jstests_arrayfind6; -t.drop(); - -t.save( { a:[ { b:1, c:2 } ] } ); - -function checkElemMatchMatches() { - assert.eq( 1, t.count( { a:{ $elemMatch:{ b:1, c:2 } } } ) ); - assert.eq( 0, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:2 } } } } ) ); - assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:3 } } } } ) ); - assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:1 }, c:3 } } } } ) ); - // Index bounds must be determined for $not:$elemMatch, not $not:$ne. In this case if index - // bounds are determined for $not:$ne, the a.b index will be constrained to the interval [2,2] - // and the saved document will not be matched as it should. - assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ) ); -} - -checkElemMatchMatches(); -t.ensureIndex( { 'a.b':1 } ); -checkElemMatchMatches(); - -// We currently never use an index for negations of -// ELEM_MATCH_OBJECT expressions. -var explain = t.find( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ).explain(); -assert.eq( "BasicCursor", explain.cursor ); diff --git a/jstests/arrayfind7.js b/jstests/arrayfind7.js deleted file mode 100644 index 7c44de1dc1d..00000000000 --- a/jstests/arrayfind7.js +++ /dev/null @@ -1,52 +0,0 @@ -// Nested $elemMatch clauses. SERVER-5741 - -t = db.jstests_arrayfind7; -t.drop(); - -t.save( { a:[ { b:[ { c:1, d:2 } ] } ] } ); - -function checkElemMatchMatches() { - assert.eq( 1, t.count( { a:{ $elemMatch:{ b:{ $elemMatch:{ c:1, d:2 } } } } } ) ); -} - -// The document is matched using nested $elemMatch expressions, with and without an index. -checkElemMatchMatches(); -t.ensureIndex( { 'a.b.c':1 } ); -checkElemMatchMatches(); - -function checkElemMatch( index, document, query ) { - // The document is matched without an index, and with single and multi key indexes. - t.drop(); - t.save( document ); - assert.eq( 1, t.count( query ) ); - t.ensureIndex( index ); - assert.eq( 1, t.count( query ) ); - t.save( { a:{ b:{ c:[ 10, 11 ] } } } ); // Make the index multikey. - assert.eq( 1, t.count( query ) ); -} - -// Two constraints within a nested $elemMatch expression. -checkElemMatch( { 'a.b.c':1 }, - { a:[ { b:[ { c:1 } ] } ] }, - { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $lte:1 } } } } } }); - -// Two constraints within a nested $elemMatch expression, one of which contains the other. -checkElemMatch( { 'a.b.c':1 }, - { a:[ { b:[ { c:2 } ] } ] }, - { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $in:[2] } } } } } }); - -// Two nested $elemMatch expressions. -checkElemMatch( { 'a.d.e':1, 'a.b.c':1 }, - { a:[ { b:[ { c:1 } ], d:[ { e:1 } ] } ] }, - { a:{ $elemMatch:{ d:{ $elemMatch:{ e:{ $lte:1 } } }, - b:{ $elemMatch:{ c:{ $gte:1 } } } } } }); - -// A non $elemMatch expression and a nested $elemMatch expression. -checkElemMatch( { 'a.x':1, 'a.b.c':1 }, - { a:[ { b:[ { c:1 } ], x:1 } ] }, - { 'a.x':1, a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1 } } } } } }); - -// $elemMatch is applied directly to a top level field. -checkElemMatch( { 'a.b.c':1 }, - { a:[ { b:[ { c:[ 1 ] } ] } ] }, - { a:{ $elemMatch:{ 'b.c':{ $elemMatch:{ $gte:1, $lte:1 } } } } }); diff --git a/jstests/arrayfind8.js b/jstests/arrayfind8.js deleted file mode 100644 index 07d44ace26e..00000000000 --- a/jstests/arrayfind8.js +++ /dev/null @@ -1,175 +0,0 @@ -// Matching behavior for $elemMatch applied to a top level element. -// SERVER-1264 -// SERVER-4180 - -t = db.jstests_arrayfind8; -t.drop(); - -function debug( x ) { - if ( debuggingEnabled = false ) { - printjson( x ); - } -} - -/** Set index state for the test. */ -function setIndexKey( key ) { - indexKey = key; - indexSpec = {}; - indexSpec[ key ] = 1; -} - -setIndexKey( 'a' ); - -function indexBounds( query ) { - debug( query ); - debug( t.find( query ).hint( indexSpec ).explain() ); - return t.find( query ).hint( indexSpec ).explain().indexBounds[ indexKey ]; -} - -/** Check that the query results match the documents in the 'expected' array. */ -function assertResults( expected, query, context ) { - debug( query ); - assert.eq( expected.length, t.count( query ), 'unexpected count in ' + context ); - results = t.find( query ).toArray(); - for( i in results ) { - found = false; - for( j in expected ) { - if ( friendlyEqual( expected[ j ], results[ i ].a ) ) { - found = true; - } - } - assert( found, 'unexpected result ' + results[ i ] + ' in ' + context ); - } -} - -/** - * Check matching for different query types. - * @param bothMatch - document matched by both standardQuery and elemMatchQuery - * @param elemMatch - document matched by elemMatchQuery but not standardQuery - * @param notElemMatch - document matched by standardQuery but not elemMatchQuery - */ -function checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, context ) { - - function mayPush( arr, elt ) { - if ( elt ) { - arr.push( elt ); - } - } - - expectedStandardQueryResults = []; - mayPush( expectedStandardQueryResults, bothMatch ); - mayPush( expectedStandardQueryResults, nonElemMatch ); - assertResults( expectedStandardQueryResults, standardQuery, context + ' standard query' ); - - expectedElemMatchQueryResults = []; - mayPush( expectedElemMatchQueryResults, bothMatch ); - mayPush( expectedElemMatchQueryResults, elemMatch ); - assertResults( expectedElemMatchQueryResults, elemMatchQuery, context + ' elemMatch query' ); -} - -/** - * Check matching and for different query types. - * @param subQuery - part of a query, to be provided as is for a standard query and within a - * $elemMatch clause for a $elemMatch query - * @param bothMatch - document matched by both standardQuery and elemMatchQuery - * @param elemMatch - document matched by elemMatchQuery but not standardQuery - * @param notElemMatch - document matched by standardQuery but not elemMatchQuery - * @param additionalConstraints - additional query parameters not generated from @param subQuery - */ -function checkQuery( subQuery, bothMatch, elemMatch, nonElemMatch, - additionalConstraints ) { - t.drop(); - additionalConstraints = additionalConstraints || {}; - - // Construct standard and elemMatch queries from subQuery. - firstSubQueryKey = Object.keySet( subQuery )[ 0 ]; - if ( firstSubQueryKey[ 0 ] == '$' ) { - standardQuery = { $and:[ { a:subQuery }, additionalConstraints ] }; - } - else { - // If the subQuery contains a field rather than operators, append to the 'a' field. - modifiedSubQuery = {}; - modifiedSubQuery[ 'a.' + firstSubQueryKey ] = subQuery[ firstSubQueryKey ]; - standardQuery = { $and:[ modifiedSubQuery, additionalConstraints ] }; - } - elemMatchQuery = { $and:[ { a:{ $elemMatch:subQuery } }, additionalConstraints ] }; - debug( elemMatchQuery ); - - function maySave( aValue ) { - if ( aValue ) { - debug( { a:aValue } ); - t.save( { a:aValue } ); - } - } - - // Save all documents and check matching without indexes. - maySave( bothMatch ); - maySave( elemMatch ); - maySave( nonElemMatch ); - - checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'unindexed' ); - - // Check matching and index bounds for a single key index. - - t.drop(); - maySave( bothMatch ); - maySave( elemMatch ); - // The nonElemMatch document is not tested here, as it will often make the index multikey. - t.ensureIndex( indexSpec ); - checkMatch( bothMatch, elemMatch, null, standardQuery, elemMatchQuery, 'single key index' ); - - // Check matching and index bounds for a multikey index. - - // Now the nonElemMatch document is tested. - maySave( nonElemMatch ); - // Force the index to be multikey. - t.save( { a:[ -1, -2 ] } ); - t.save( { a:{ b:[ -1, -2 ] } } ); - checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, - 'multikey index' ); -} - -maxNumber = Infinity; - -// Basic test. -checkQuery( { $gt:4 }, [ 5 ] ); - -// Multiple constraints within a $elemMatch clause. -checkQuery( { $gt:4, $lt:6 }, [ 5 ], null, [ 3, 7 ] ); -checkQuery( { $gt:4, $not:{ $gte:6 } }, [ 5 ] ); -checkQuery( { $gt:4, $not:{ $ne:6 } }, [ 6 ] ); -checkQuery( { $gte:5, $lte:5 }, [ 5 ], null, [ 4, 6 ] ); -checkQuery( { $in:[ 4, 6 ], $gt:5 }, [ 6 ], null, [ 4, 7 ] ); -checkQuery( { $regex:'^a' }, [ 'a' ] ); - -// Some constraints within a $elemMatch clause and other constraints outside of it. -checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $lt:6 } } ); -checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $lte:5 } } ); -checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $gt:5 } } ); - -// Constraints in different $elemMatch clauses. -checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } ); -checkQuery( { $gt:4 }, [ 3, 7 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } ); -checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lte:5 } } } ); -checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $elemMatch:{ $gt:5 } } } ); - -// TODO SERVER-1264 -if ( 0 ) { -checkQuery( { $elemMatch:{ $in:[ 5 ] } }, null, [[ 5 ]], [ 5 ], null ); -} - -setIndexKey( 'a.b' ); -checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:1 } ]], - [ { b:1 } ], null ); -checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:[ 0, 2 ] } ]], - [ { b:[ 0, 2 ] } ], null ); - -// Constraints for a top level (SERVER-1264 style) $elemMatch nested within a non top level -// $elemMatch. -checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:1 } } }, [ { b:[ 1 ] } ] ); -checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 1 ] } ] ); - -checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 2 ] } ], null, - null, { 'a.b':{ $in:[ 2, 5 ] } } ); -checkQuery( { b:{ $elemMatch:{ $in:[ 1, 2 ] }, $in:[ 2, 3 ] } }, - [ { b:[ 2 ] } ], null, [ { b:[ 1 ] }, { b:[ 3 ] } ], null ); diff --git a/jstests/arrayfind9.js b/jstests/arrayfind9.js deleted file mode 100644 index 4ee14c56580..00000000000 --- a/jstests/arrayfind9.js +++ /dev/null @@ -1,34 +0,0 @@ -// Assorted $elemMatch behavior checks. - -t = db.jstests_arrayfind9; -t.drop(); - -// Top level field $elemMatch:$not matching -t.save( { a:[ 1 ] } ); -assert.eq( 1, t.count( { a:{ $elemMatch:{ $not:{ $ne:1 } } } } ) ); - -// Top level field object $elemMatch matching. -t.drop(); -t.save( { a:[ {} ] } ); -assert.eq( 1, t.count( { a:{ $elemMatch:{ $gte:{} } } } ) ); - -// Top level field array $elemMatch matching. -t.drop(); -t.save( { a:[ [] ] } ); -assert.eq( 1, t.count( { a:{ $elemMatch:{ $in:[ [] ] } } } ) ); - -// Matching by array index. -t.drop(); -t.save( { a:[ [ 'x' ] ] } ); -assert.eq( 1, t.count( { a:{ $elemMatch:{ '0':'x' } } } ) ); - -// Matching multiple values of a nested array. -t.drop(); -t.save( { a:[ { b:[ 0, 2 ] } ] } ); -t.ensureIndex( { a:1 } ); -t.ensureIndex( { 'a.b':1 } ); -plans = [ { $natural:1 }, { a:1 }, { 'a.b':1 } ]; -for( i in plans ) { - p = plans[ i ]; - assert.eq( 1, t.find( { a:{ $elemMatch:{ b:{ $gte:1, $lte:1 } } } } ).hint( p ).itcount() ); -} diff --git a/jstests/arrayfinda.js b/jstests/arrayfinda.js deleted file mode 100644 index 179d3985580..00000000000 --- a/jstests/arrayfinda.js +++ /dev/null @@ -1,21 +0,0 @@ -// Assorted $elemMatch matching behavior checks. - -t = db.jstests_arrayfinda; -t.drop(); - -// $elemMatch only matches elements within arrays (a descriptive, not a normative test). -t.save( { a:[ { b:1 } ] } ); -t.save( { a:{ b:1 } } ); - -function assertExpectedMatch( cursor ) { - assert.eq( [ { b:1 } ], cursor.next().a ); - assert( !cursor.hasNext() ); -} - -assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ) ); -assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ) ); - -// $elemMatch is not used to perform key matching. SERVER-6001 -t.ensureIndex( { a:1 } ); -assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ).hint( { a:1 } ) ); -assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ).hint( { a:1 } ) ); diff --git a/jstests/auth1.js b/jstests/auth1.js deleted file mode 100644 index 4ab26e8d2e5..00000000000 --- a/jstests/auth1.js +++ /dev/null @@ -1,54 +0,0 @@ -var mydb = db.getSiblingDB('auth1_db'); -mydb.dropAllUsers(); - -pass = "a" + Math.random(); -//print( "password [" + pass + "]" ); - -mydb.createUser({user: "eliot" ,pwd: pass, roles: jsTest.basicUserRoles}); - -assert( mydb.auth( "eliot" , pass ) , "auth failed" ); -assert( ! mydb.auth( "eliot" , pass + "a" ) , "auth should have failed" ); - -pass2 = "b" + Math.random(); -mydb.changeUserPassword("eliot", pass2); - -assert( ! mydb.auth( "eliot" , pass ) , "failed to change password failed" ); -assert( mydb.auth( "eliot" , pass2 ) , "new password didn't take" ); - -assert( mydb.auth( "eliot" , pass2 ) , "what?" ); -mydb.dropUser( "eliot" ); -assert( ! mydb.auth( "eliot" , pass2 ) , "didn't drop user" ); - - -var a = mydb.getMongo().getDB( "admin" ); -a.dropAllUsers(); -pass = "c" + Math.random(); -a.createUser({user: "super", pwd: pass, roles: jsTest.adminUserRoles}); -assert( a.auth( "super" , pass ) , "auth failed" ); -assert( !a.auth( "super" , pass + "a" ) , "auth should have failed" ); - -mydb.dropAllUsers(); -pass = "a" + Math.random(); - -mydb.createUser({user: "eliot" , pwd: pass, roles: jsTest.basicUserRoles}); - -assert.commandFailed( mydb.runCommand( { authenticate: 1, user: "eliot", nonce: "foo", key: "bar" } ) ); - -// check sanity check SERVER-3003 - -var before = a.system.users.count({db: mydb.getName()}); - -assert.throws( function(){ - mydb.createUser({ user: "" , pwd: "abc", roles: jsTest.basicUserRoles}); -} , null , "C1" ) -assert.throws( function(){ - mydb.createUser({ user: "abc" , pwd: "", roles: jsTest.basicUserRoles}); -} , null , "C2" ) - - -var after = a.system.users.count({db: mydb.getName()}); -assert( before > 0 , "C3" ) -assert.eq( before , after , "C4" ) - -// Clean up after ourselves so other tests using authentication don't get messed up. -mydb.dropAllUsers() diff --git a/jstests/auth2.js b/jstests/auth2.js deleted file mode 100644 index 9c2b38f682d..00000000000 --- a/jstests/auth2.js +++ /dev/null @@ -1,9 +0,0 @@ -// just make sure logout doesn't break anything - -// SERVER-724 -db.runCommand({logout : 1}); -x = db.runCommand({logout : 1}); -assert.eq( 1 , x.ok , "A" ) - -x = db.logout(); -assert.eq( 1 , x.ok , "B" ) diff --git a/jstests/auth_copydb.js b/jstests/auth_copydb.js deleted file mode 100644 index f04cd0b0d29..00000000000 --- a/jstests/auth_copydb.js +++ /dev/null @@ -1,19 +0,0 @@ -a = db.getSisterDB( "copydb2-test-a" ); -b = db.getSisterDB( "copydb2-test-b" ); - -a.dropDatabase(); -b.dropDatabase(); -a.dropAllUsers(); -b.dropAllUsers(); - -a.foo.save( { a : 1 } ); - -a.createUser({user: "chevy" , pwd: "chase", roles: jsTest.basicUserRoles}); - -assert.eq( 1 , a.foo.count() , "A" ); -assert.eq( 0 , b.foo.count() , "B" ); - -// SERVER-727 -a.copyDatabase( a._name , b._name, "" , "chevy" , "chase" ); -assert.eq( 1 , a.foo.count() , "C" ); -assert.eq( 1 , b.foo.count() , "D" ); diff --git a/jstests/autoid.js b/jstests/autoid.js deleted file mode 100644 index 6c8062fd093..00000000000 --- a/jstests/autoid.js +++ /dev/null @@ -1,11 +0,0 @@ -f = db.jstests_autoid; -f.drop(); - -f.save( {z:1} ); -a = f.findOne( {z:1} ); -f.update( {z:1}, {z:2} ); -b = f.findOne( {z:2} ); -assert.eq( a._id.str, b._id.str ); -c = f.update( {z:2}, {z:"abcdefgabcdefgabcdefg"} ); -c = f.findOne( {} ); -assert.eq( a._id.str, c._id.str ); diff --git a/jstests/bad_index_plugin.js b/jstests/bad_index_plugin.js deleted file mode 100644 index 370eca4c4d7..00000000000 --- a/jstests/bad_index_plugin.js +++ /dev/null @@ -1,11 +0,0 @@ -// SERVER-5826 ensure you can't build an index with a non-existent plugin -t = db.bad_index_plugin; - -assert.eq(t.ensureIndex({good: 1}), undefined); -assert.eq(t.getIndexes().length, 2); // good + _id - -err = t.ensureIndex({bad: 'bad'}); -assert.neq(err, undefined); -assert(err.code >= 0); - -assert.eq(t.getIndexes().length, 2); // good + _id (no bad) diff --git a/jstests/basic1.js b/jstests/basic1.js deleted file mode 100644 index e5fa577f0b2..00000000000 --- a/jstests/basic1.js +++ /dev/null @@ -1,21 +0,0 @@ - -t = db.getCollection( "basic1" ); -t.drop(); - -o = { a : 1 }; -t.save( o ); - -assert.eq( 1 , t.findOne().a , "first" ); -assert( o._id , "now had id" ); -assert( o._id.str , "id not a real id" ); - -o.a = 2; -t.save( o ); - -assert.eq( 2 , t.findOne().a , "second" ); - -assert(t.validate().valid); - -// not a very good test of currentOp, but tests that it at least -// is sort of there: -assert( db.currentOp().inprog != null ); diff --git a/jstests/basic2.js b/jstests/basic2.js deleted file mode 100644 index aaa3de4366e..00000000000 --- a/jstests/basic2.js +++ /dev/null @@ -1,16 +0,0 @@ - -t = db.getCollection( "basic2" ); -t.drop(); - -o = { n : 2 }; -t.save( o ); - -assert.eq( 1 , t.find().count() ); - -assert.eq( 2 , t.find( o._id ).toArray()[0].n ); -assert.eq( 2 , t.find( o._id , { n : 1 } ).toArray()[0].n ); - -t.remove( o._id ); -assert.eq( 0 , t.find().count() ); - -assert(t.validate().valid); diff --git a/jstests/basic3.js b/jstests/basic3.js deleted file mode 100644 index d778974f64a..00000000000 --- a/jstests/basic3.js +++ /dev/null @@ -1,45 +0,0 @@ -// Tests that "." cannot be in field names -t = db.getCollection( "foo_basic3" ); -t.drop() - -//more diagnostics on bad save, if exception fails -doBadSave = function(param) { - print("doing save with " + tojson(param)) - t.save(param); - // Should not get here. - printjson(db.getLastErrorObj()); -} - -//more diagnostics on bad save, if exception fails -doBadUpdate = function(query, update) { - print("doing update with " + tojson(query) + " " + tojson(update)) - t.update(query, update); - // Should not get here. - printjson(db.getLastErrorObj()); -} - -assert.throws(doBadSave, [{"a.b":5}], ". in names aren't allowed doesn't work"); - -assert.throws(doBadSave, - [{ "x" : { "a.b" : 5 } }], - ". in embedded names aren't allowed doesn't work"); - -// following tests make sure update keys are checked -t.save({"a": 0,"b": 1}) - -assert.throws(doBadUpdate, [{a:0}, { "b.b" : 1 }], - "must deny '.' in key of update"); - -// upsert with embedded doc -assert.throws(doBadUpdate, [{a:10}, { c: {"b.b" : 1 }}], - "must deny embedded '.' in key of update"); - -// if it is a modifier, it should still go through -t.update({"a": 0}, {$set: { "c.c": 1}}) -t.update({"a": 0}, {$inc: { "c.c": 1}}) - -// edge cases -assert.throws(doBadUpdate, [{a:0}, { "":{"b.b" : 1} }], - "must deny '' embedded '.' in key of update"); -t.update({"a": 0}, {}) - diff --git a/jstests/basic4.js b/jstests/basic4.js deleted file mode 100644 index 0cf7a261e63..00000000000 --- a/jstests/basic4.js +++ /dev/null @@ -1,12 +0,0 @@ -t = db.getCollection( "basic4" ); -t.drop(); - -t.save( { a : 1 , b : 1.0 } ); - -assert( t.findOne() ); -assert( t.findOne( { a : 1 } ) ); -assert( t.findOne( { a : 1.0 } ) ); -assert( t.findOne( { b : 1 } ) ); -assert( t.findOne( { b : 1.0 } ) ); - -assert( ! t.findOne( { b : 2.0 } ) ); diff --git a/jstests/basic5.js b/jstests/basic5.js deleted file mode 100644 index bfa40fb8f5e..00000000000 --- a/jstests/basic5.js +++ /dev/null @@ -1,6 +0,0 @@ -t = db.getCollection( "basic5" ); -t.drop(); - -t.save( { a : 1 , b : [ 1 , 2 , 3 ] } ); -assert.eq( 3 , t.findOne().b.length ); - diff --git a/jstests/basic6.js b/jstests/basic6.js deleted file mode 100644 index e0cd6f1586e..00000000000 --- a/jstests/basic6.js +++ /dev/null @@ -1,8 +0,0 @@ - -t = db.basic6; - -t.findOne(); -t.a.findOne(); - -assert.eq( "test.basic6" , t.toString() ); -assert.eq( "test.basic6.a" , t.a.toString() ); diff --git a/jstests/basic7.js b/jstests/basic7.js deleted file mode 100644 index 7bb0d470e82..00000000000 --- a/jstests/basic7.js +++ /dev/null @@ -1,11 +0,0 @@ - -t = db.basic7; -t.drop(); - -t.save( { a : 1 } ) -t.ensureIndex( { a : 1 } ); - -assert.eq( t.find().toArray()[0].a , 1 ); -assert.eq( t.find().arrayAccess(0).a , 1 ); -assert.eq( t.find()[0].a , 1 ); - diff --git a/jstests/basic8.js b/jstests/basic8.js deleted file mode 100644 index 513da0d15d1..00000000000 --- a/jstests/basic8.js +++ /dev/null @@ -1,11 +0,0 @@ - -t = db.basic8; -t.drop(); - -t.save( { a : 1 } ); -o = t.findOne(); -o.b = 2; -t.save( o ); - -assert.eq( 1 , t.find().count() , "A" ); -assert.eq( 2 , t.findOne().b , "B" ); diff --git a/jstests/basic9.js b/jstests/basic9.js deleted file mode 100644 index b8308fba7d0..00000000000 --- a/jstests/basic9.js +++ /dev/null @@ -1,19 +0,0 @@ -// Tests that $ field names are not allowed, but you can use a $ anywhere else. -t = db.getCollection( "foo_basic9" ); -t.drop() - -// more diagnostics on bad save, if exception fails -doBadSave = function(param) { - print("doing save with " + tojson(param)) - t.save(param); - // Should not get here. - printjson(db.getLastErrorObj()); -} - -t.save({foo$foo:5}); -t.save({foo$:5}); - -assert.throws(doBadSave, [{$foo:5}], "key names aren't allowed to start with $ doesn't work"); -assert.throws(doBadSave, - [{x:{$foo:5}}], - "embedded key names aren't allowed to start with $ doesn't work"); diff --git a/jstests/basica.js b/jstests/basica.js deleted file mode 100644 index 0cc364beb42..00000000000 --- a/jstests/basica.js +++ /dev/null @@ -1,33 +0,0 @@ - -t = db.basica; - - -t.drop(); - -t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } ); - -x = t.findOne(); -x.b["0"].x = 4; -x.b["0"].z = 4; -x.b[0].m = 9; -x.b[0]["asd"] = 11; -x.a = 2; -x.z = 11; - -tojson( x ); -t.save( x ); -assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" ); - -// ----- - -t.drop(); - -t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } ); - -x = t.findOne(); -x.b["0"].z = 4; - -//printjson( x ); -t.save( x ); -assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" ); - diff --git a/jstests/basicb.js b/jstests/basicb.js deleted file mode 100644 index 0070f70068f..00000000000 --- a/jstests/basicb.js +++ /dev/null @@ -1,7 +0,0 @@ - -t = db.basicb; -t.drop(); - -assert.throws( "t.insert( { '$a' : 5 } );" ); -t.insert( { '$a' : 5 } , 0, true ); - diff --git a/jstests/basicc.js b/jstests/basicc.js deleted file mode 100644 index 0cd71ad32a0..00000000000 --- a/jstests/basicc.js +++ /dev/null @@ -1,21 +0,0 @@ -// test writing to two db's at the same time. - -t1 = db.jstests_basicc; -var db = db.getSisterDB("test_basicc"); -t2 = db.jstests_basicc; -t1.drop(); -t2.drop(); - -js = "while( 1 ) { db.jstests.basicc1.save( {} ); }"; -pid = startMongoProgramNoConnect( "mongo" , "--eval" , js , db.getMongo().host ); - -for( var i = 0; i < 1000; ++i ) { - t2.save( {} ); -} -assert.automsg( "!db.getLastError()" ); -stopMongoProgramByPid( pid ); -// put things back the way we found it -t1.drop(); -t2.drop(); -db.dropDatabase(); -db = db.getSisterDB("test"); \ No newline at end of file diff --git a/jstests/batch_size.js b/jstests/batch_size.js deleted file mode 100644 index 2bc144cd554..00000000000 --- a/jstests/batch_size.js +++ /dev/null @@ -1,45 +0,0 @@ -// Test subtleties of batchSize and limit. - -var t = db.jstests_batch_size; -t.drop(); - -for (var i = 0; i < 4; i++) { - t.save({_id: i, a: i}); -} - -function runIndexedTests() { - // With limit, indexed. - assert.eq(2, t.find().limit(2).itcount(), 'G'); - assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'H'); - - // With batchSize, indexed. - // SERVER-12438: If there is an index that provides the sort, - // then a plan with an unindexed sort should never be used. - // Consequently, batchSize will NOT be a hard limit in this case. - // WARNING: the behavior described above may change in the future. - assert.eq(4, t.find().batchSize(2).itcount(), 'I'); - assert.eq(4, t.find().sort({a: 1}).batchSize(2).itcount(), 'J'); -} - -// Without batch size or limit, unindexed. -assert.eq(4, t.find().itcount(), 'A'); -assert.eq(4, t.find().sort({a: 1}).itcount(), 'B'); - -// With limit, unindexed. -assert.eq(2, t.find().limit(2).itcount(), 'C'); -assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'D'); - -// With batchSize, unindexed. -// SERVER-12438: in general batch size does not mean a hard -// limit. With an unindexed sort, however, the server interprets -// batch size as a hard limit so that it can do a top k sort. -// WARNING: this behavior may change in the future. -assert.eq(4, t.find().batchSize(2).itcount(), 'E'); -assert.eq(2, t.find().sort({a: 1}).batchSize(2).itcount(), 'F'); - -// Run the tests with the index twice in order to double check plan caching. -t.ensureIndex({a: 1}); -for (var i = 0; i < 2; i++) { - runIndexedTests(); -} - diff --git a/jstests/bench_test1.js b/jstests/bench_test1.js deleted file mode 100644 index bb1423ee8b8..00000000000 --- a/jstests/bench_test1.js +++ /dev/null @@ -1,37 +0,0 @@ - -t = db.bench_test1; -t.drop(); - -t.insert( { _id : 1 , x : 1 } ) -t.insert( { _id : 2 , x : 1 } ) - -ops = [ - { op : "findOne" , ns : t.getFullName() , query : { _id : 1 } } , - { op : "update" , ns : t.getFullName() , query : { _id : 1 } , update : { $inc : { x : 1 } } } -] - -seconds = .7 - -benchArgs = { ops : ops , parallel : 2 , seconds : seconds , host : db.getMongo().host }; - -if (jsTest.options().auth) { - benchArgs['db'] = 'admin'; - benchArgs['username'] = jsTest.options().adminUser; - benchArgs['password'] = jsTest.options().adminPassword; -} -res = benchRun( benchArgs ); - -assert.lte( seconds * res.update , t.findOne( { _id : 1 } ).x * 1.05 , "A1" ) - - -assert.eq( 1 , t.getIndexes().length , "B1" ) -benchArgs['ops']=[ { op : "createIndex" , ns : t.getFullName() , key : { x : 1 } } ]; -benchArgs['parallel']=1; -benchArgs['seconds']=1; -benchRun( benchArgs ); -assert.eq( 2 , t.getIndexes().length , "B2" ) -benchArgs['ops']=[ { op : "dropIndex" , ns : t.getFullName() , key : { x : 1 } } ]; -benchRun( benchArgs ); -assert.soon( function(){ return t.getIndexes().length == 1; } ); - - diff --git a/jstests/bench_test2.js b/jstests/bench_test2.js deleted file mode 100644 index e2057ac693e..00000000000 --- a/jstests/bench_test2.js +++ /dev/null @@ -1,49 +0,0 @@ - -t = db.bench_test2 -t.drop(); - -for ( i=0; i<100; i++ ) - t.insert( { _id : i , x : 0 } ); -db.getLastError(); - -benchArgs = { ops : [ { ns : t.getFullName() , - op : "update" , - query : { _id : { "#RAND_INT" : [ 0 , 100 ] } } , - update : { $inc : { x : 1 } } } ] , - parallel : 2 , - seconds : 1 , - totals : true , - host : db.getMongo().host } - -if (jsTest.options().auth) { - benchArgs['db'] = 'admin'; - benchArgs['username'] = jsTest.options().adminUser; - benchArgs['password'] = jsTest.options().adminPassword; -} - -res = benchRun( benchArgs ) -printjson( res ); - -sumsq = 0 -sum = 0 - -min = 1000 -max = 0; -t.find().forEach( - function(z){ - sum += z.x; - sumsq += Math.pow( ( res.update / 100 ) - z.x , 2 ); - min = Math.min( z.x , min ); - max = Math.max( z.x , max ); - } -) - -avg = sum / 100 -std = Math.sqrt( sumsq / 100 ) - -print( "Avg: " + avg ) -print( "Std: " + std ) -print( "Min: " + min ) -print( "Max: " + max ) - - diff --git a/jstests/bench_test3.js b/jstests/bench_test3.js deleted file mode 100644 index 4bc21ed2505..00000000000 --- a/jstests/bench_test3.js +++ /dev/null @@ -1,27 +0,0 @@ -t = db.bench_test3 -t.drop(); - - -benchArgs = { ops : [ { ns : t.getFullName() , - op : "update" , - upsert : true , - query : { _id : { "#RAND_INT" : [ 0 , 5 , 4 ] } } , - update : { $inc : { x : 1 } } } ] , - parallel : 2 , - seconds : 1 , - totals : true , - host : db.getMongo().host } - -if (jsTest.options().auth) { - benchArgs['db'] = 'admin'; - benchArgs['username'] = jsTest.options().adminUser; - benchArgs['password'] = jsTest.options().adminPassword; -} - -res = benchRun( benchArgs ) -printjson( res ); - -var keys = [] -var totals = {} -db.bench_test3.find().sort( { _id : 1 } ).forEach( function(z){ keys.push( z._id ); totals[z._id] = z.x } ); -assert.eq( [ 0 , 4 , 8 , 12 , 16 ] , keys ) diff --git a/jstests/big_object1.js b/jstests/big_object1.js deleted file mode 100644 index 07c4150fb53..00000000000 --- a/jstests/big_object1.js +++ /dev/null @@ -1,54 +0,0 @@ - -t = db.big_object1 -t.drop(); - -if ( db.adminCommand( "buildinfo" ).bits == 64 ){ - - var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; - var s = large; - while ( s.length < 850 * 1024 ){ - s += large; - } - x = 0; - while ( true ){ - n = { _id : x , a : [] } - for ( i=0; i<14+x; i++ ) - n.a.push( s ) - try { - t.insert( n ) - o = n - } - catch ( e ){ - break; - } - - if ( db.getLastError() != null ) - break; - x++; - } - - printjson( t.stats(1024*1024) ) - - assert.lt( 15 * 1024 * 1024 , Object.bsonsize( o ) , "A1" ) - assert.gt( 17 * 1024 * 1024 , Object.bsonsize( o ) , "A2" ) - - assert.eq( x , t.count() , "A3" ) - - for ( i=0; i js conversion - var a = o.a; - } catch(e) { - assert(false, "Caught exception trying to insert during iteration " + i + ": " + e); - } - assert( o , "B" + i ); - } - - t.drop() -} -else { - print( "skipping big_object1 b/c not 64-bit" ) -} - -print("SUCCESS"); diff --git a/jstests/binData.js b/jstests/binData.js deleted file mode 100644 index 3f037650e05..00000000000 --- a/jstests/binData.js +++ /dev/null @@ -1,14 +0,0 @@ - -var x = new BinData(3, "OEJTfmD8twzaj/LPKLIVkA=="); -assert.eq(x.hex(), "3842537e60fcb70cda8ff2cf28b21590", "bad hex"); -assert.eq(x.base64(), "OEJTfmD8twzaj/LPKLIVkA==", "bad base64"); -assert.eq(x.type, 3, "bad type"); -assert.eq(x.length(), 16, "bad length"); - -x = new BinData(0, "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4="); -assert.eq(x.hex(), "4d616e2069732064697374696e677569736865642c206e6f74206f6e6c792062792068697320726561736f6e2c2062757420627920746869732073696e67756c61722070617373696f6e2066726f6d206f7468657220616e696d616c732c2077686963682069732061206c757374206f6620746865206d696e642c20746861742062792061207065727365766572616e6365206f662064656c6967687420696e2074686520636f6e74696e75656420616e6420696e6465666174696761626c652067656e65726174696f6e206f66206b6e6f776c656467652c2065786365656473207468652073686f727420766568656d656e6365206f6620616e79206361726e616c20706c6561737572652e", "bad hex"); -assert.eq(x.base64(), "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=", "bad base64"); -assert.eq(x.type, 0, "bad type"); -assert.eq(x.length(), 269, "bad length"); - - diff --git a/jstests/block_check_supported.js b/jstests/block_check_supported.js deleted file mode 100644 index 21d04ca93c7..00000000000 --- a/jstests/block_check_supported.js +++ /dev/null @@ -1,118 +0,0 @@ -// Test that serverStatus() features dependent on the ProcessInfo::blockCheckSupported() routine -// work correctly. These features are db.serverStatus({workingSet:1}).workingSet and -// db.serverStatus().indexCounters. -// Related to SERVER-9242, SERVER-6450. - -// Check that an object contains a specific set of fields and only those fields -// NOTE: destroys 'item' -// -var testExpectedFields = function(itemString, item, fieldList) { - print('Testing ' + itemString + ' for expected fields'); - for (var i = 0; i < fieldList.length; ++i) { - var field = fieldList[i]; - if (typeof item[field] == 'undefined') { - doassert('Test FAILED: missing "' + field + '" field'); - } - delete item[field]; - } - if (!friendlyEqual({}, item)) { - doassert('Test FAILED: found unexpected field(s): ' + tojsononeline(item)); - } -} - -// Run test as function to keep cruft out of global namespace -// -var doTest = function () { - - print('Testing workingSet and indexCounters portions of serverStatus'); - var hostInfo = db.hostInfo(); - var isXP = (hostInfo.os.name == 'Windows XP') ? true : false; - var isEmpty = (hostInfo.os.name == '') ? true : false; - - // Check that the serverStatus command returns something for these sub-documents - // - var serverStatus = db.serverStatus({ workingSet: 1 }); - if (!serverStatus) { - doassert('Test FAILED: db.serverStatus({workingSet:1}) did not return a value'); - } - if (!serverStatus.workingSet) { - doassert('Test FAILED: db.serverStatus({workingSet:1}).workingSet was not returned'); - } - if (!serverStatus.indexCounters) { - doassert('Test FAILED: db.serverStatus().indexCounters was not returned'); - } - var workingSet_1 = serverStatus.workingSet; - var indexCounters_1 = serverStatus.indexCounters; - - if (isXP) { - // Windows XP is the only supported platform that should be missing this data; make sure - // that we don't get bogus data back - // - var expectedResult = { info: 'not supported' }; - print('Testing db.serverStatus({workingSet:1}).workingSet on Windows XP -- expecting ' + - tojsononeline(expectedResult)); - assert.eq(expectedResult, workingSet_1, - 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' + - ' did not return the expected value'); - expectedResult = { note: 'not supported on this platform' }; - print('Testing db.serverStatus().indexCounters on Windows XP -- expecting ' + - tojsononeline(expectedResult)); - assert.eq(expectedResult, indexCounters_1, - 'Test FAILED: db.serverStatus().indexCounters' + - ' did not return the expected value'); - } - else if (isEmpty) { - // Until SERVER-9325 is fixed, Solaris/SmartOS will also be missing this data; make sure - // that we don't get bogus data back - // - expectedResult = { info: 'not supported' }; - print('Testing db.serverStatus({workingSet:1}).workingSet on "" (Solaris?) -- expecting ' + - tojsononeline(expectedResult)); - assert.eq(expectedResult, workingSet_1, - 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' + - ' did not return the expected value'); - expectedResult = { note: 'not supported on this platform' }; - print('Testing db.serverStatus().indexCounters on "" (Solaris?) -- expecting ' + - tojsononeline(expectedResult)); - assert.eq(expectedResult, indexCounters_1, - 'Test FAILED: db.serverStatus().indexCounters' + - ' did not return the expected value'); - } - else { - // Check that we get both workingSet and indexCounters and that all expected - // fields are present with no unexpected fields - // - testExpectedFields('db.serverStatus({workingSet:1}).workingSet', - workingSet_1, - ['note', 'pagesInMemory', 'computationTimeMicros', 'overSeconds']); - testExpectedFields('db.serverStatus().indexCounters', - indexCounters_1, - ['accesses', 'hits', 'misses', 'resets', 'missRatio']); - - if (0) { // comment out until SERVER-9284 is fixed - // See if we can make the index counters values change - // - print('Testing that indexCounters accesses and hits increase by 1 on indexed find()'); - var blockDB = db.getSiblingDB('block_check_supported'); - blockDB.dropDatabase(); - blockDB.coll.insert({ a: 1 }); - blockDB.coll.ensureIndex({ a: 1 }); - indexCounters_1 = db.serverStatus().indexCounters; - var doc = blockDB.coll.findOne({ a: 1 }); - var indexCounters_2 = db.serverStatus().indexCounters; - assert.gt(indexCounters_2.accesses, indexCounters_1.accesses, - 'Test FAILED: db.serverStatus().indexCounters.accesses' + - ' should have had a value greater than ' + indexCounters_1.accesses + - ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) + - ', after find(): ' + tojsononeline(indexCounters_2)); - assert.gt(indexCounters_2.hits, indexCounters_1.hits, - 'Test FAILED: db.serverStatus().indexCounters.hits' + - ' should have had a value greater than ' + indexCounters_1.hits + - ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) + - ', after find(): ' + tojsononeline(indexCounters_2)); - } // comment out until SERVER-9284 is fixed - } - print('Test PASSED!'); -}; - -doTest(); diff --git a/jstests/bulk_insert.js b/jstests/bulk_insert.js deleted file mode 100644 index e26b323c6d9..00000000000 --- a/jstests/bulk_insert.js +++ /dev/null @@ -1,22 +0,0 @@ -// Tests bulk insert of docs from the shell - -var coll = db.bulkInsertTest -coll.drop() - -Random.srand( new Date().getTime() ) - -var bulkSize = Math.floor( Random.rand() * 200 ) + 1 -var numInserts = Math.floor( Random.rand() * 300 ) + 1 - -print( "Inserting " + numInserts + " bulks of " + bulkSize + " documents." ) - -for( var i = 0; i < numInserts; i++ ){ - var bulk = [] - for( var j = 0; j < bulkSize; j++ ){ - bulk.push({ hi : "there", i : i, j : j }) - } - - coll.insert( bulk ) -} - -assert.eq( coll.count(), bulkSize * numInserts ) diff --git a/jstests/capped.js b/jstests/capped.js deleted file mode 100644 index 421132b6f75..00000000000 --- a/jstests/capped.js +++ /dev/null @@ -1,11 +0,0 @@ -db.jstests_capped.drop(); -db.createCollection("jstests_capped", {capped:true, size:30000}); - -assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_capped"} ).count(), "expected a count of one index for new capped collection" ); -t = db.jstests_capped; - -t.save({x:1}); -t.save({x:2}); - -assert( t.find().sort({$natural:1})[0].x == 1 , "expected obj.x==1"); -assert( t.find().sort({$natural:-1})[0].x == 2, "expected obj.x == 2"); diff --git a/jstests/capped1.js b/jstests/capped1.js deleted file mode 100644 index 0bbeaa40894..00000000000 --- a/jstests/capped1.js +++ /dev/null @@ -1,11 +0,0 @@ - -t = db.capped1; -t.drop(); - -db.createCollection("capped1" , {capped:true, size:1024 }); -v = t.validate(); -assert( v.valid , "A : " + tojson( v ) ); // SERVER-485 - -t.save( { x : 1 } ) -assert( t.validate().valid , "B" ) - diff --git a/jstests/capped2.js b/jstests/capped2.js deleted file mode 100644 index 65bb82f4c07..00000000000 --- a/jstests/capped2.js +++ /dev/null @@ -1,62 +0,0 @@ -db.capped2.drop(); -db._dbCommand( { create: "capped2", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); -tzz = db.capped2; - -function debug( x ) { -// print( x ); -} - -var val = new Array( 2000 ); -var c = ""; -for( i = 0; i < 2000; ++i, c += "---" ) { // bigger and bigger objects through the array... - val[ i ] = { a: c }; -} - -function checkIncreasing( i ) { - res = tzz.find().sort( { $natural: -1 } ); - assert( res.hasNext(), "A" ); - var j = i; - while( res.hasNext() ) { - try { - assert.eq( val[ j-- ].a, res.next().a, "B" ); - } catch( e ) { - debug( "capped2 err " + j ); - throw e; - } - } - res = tzz.find().sort( { $natural: 1 } ); - assert( res.hasNext(), "C" ); - while( res.hasNext() ) - assert.eq( val[ ++j ].a, res.next().a, "D" ); - assert.eq( j, i, "E" ); -} - -function checkDecreasing( i ) { - res = tzz.find().sort( { $natural: -1 } ); - assert( res.hasNext(), "F" ); - var j = i; - while( res.hasNext() ) { - assert.eq( val[ j++ ].a, res.next().a, "G" ); - } - res = tzz.find().sort( { $natural: 1 } ); - assert( res.hasNext(), "H" ); - while( res.hasNext() ) - assert.eq( val[ --j ].a, res.next().a, "I" ); - assert.eq( j, i, "J" ); -} - -for( i = 0 ;; ++i ) { - debug( "capped 2: " + i ); - tzz.insert( val[ i ] ); - if ( tzz.count() == 0 ) { - assert( i > 100, "K" ); - break; - } - checkIncreasing( i ); -} - -for( i = 600 ; i >= 0 ; --i ) { - debug( "capped 2: " + i ); - tzz.insert( val[ i ] ); - checkDecreasing( i ); -} diff --git a/jstests/capped3.js b/jstests/capped3.js deleted file mode 100644 index 2e5e6790cb7..00000000000 --- a/jstests/capped3.js +++ /dev/null @@ -1,45 +0,0 @@ -t = db.jstests_capped3; -t2 = db.jstests_capped3_clone; -t.drop(); -t2.drop(); -for( i = 0; i < 1000; ++i ) { - t.save( {i:i} ); -} -assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ), "A" ); -c = t2.find(); -for( i = 0; i < 1000; ++i ) { - assert.eq( i, c.next().i, "B" ); -} -assert( !c.hasNext(), "C" ); - -t.drop(); -t2.drop(); - -for( i = 0; i < 1000; ++i ) { - t.save( {i:i} ); -} -assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ), "D" ); -c = t2.find().sort( {$natural:-1} ); -i = 999; -while( c.hasNext() ) { - assert.eq( i--, c.next().i, "E" ); -} -//print( "i: " + i ); -var str = tojson( t2.stats() ); -//print( "stats: " + tojson( t2.stats() ) ); -assert( i < 990, "F" ); - -t.drop(); -t2.drop(); - -for( i = 0; i < 1000; ++i ) { - t.save( {i:i} ); -} -assert.commandWorked( t.convertToCapped( 1000 ), "G" ); -c = t.find().sort( {$natural:-1} ); -i = 999; -while( c.hasNext() ) { - assert.eq( i--, c.next().i, "H" ); -} -assert( i < 990, "I" ); -assert( i > 900, "J" ); diff --git a/jstests/capped5.js b/jstests/capped5.js deleted file mode 100644 index 37b776ee1ca..00000000000 --- a/jstests/capped5.js +++ /dev/null @@ -1,40 +0,0 @@ - -tn = "capped5" - -t = db[tn] -t.drop(); - - -db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); -t.insert( { _id : 5 , x : 11 , z : 52 } ); -assert.eq( 1 , t.getIndexKeys().length , "A0" ) //now we assume _id index even on capped coll -assert.eq( 52 , t.findOne( { x : 11 } ).z , "A1" ); - -t.ensureIndex( { _id : 1 } ) -t.ensureIndex( { x : 1 } ) - -assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" ); -assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" ); - -t.drop(); -db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); -t.insert( { _id : 5 , x : 11 } ); -t.insert( { _id : 5 , x : 12 } ); -assert.eq( 1, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index -assert.eq( 1, t.find().toArray().length ); //_id index unique, so second insert fails - -t.drop(); -db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); -t.insert( { _id : 5 , x : 11 } ); -t.insert( { _id : 6 , x : 12 } ); -t.ensureIndex( { x:1 }, {unique:true} ); -assert.eq( 2, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index -assert.eq( 2, t.find().hint( {x:1} ).toArray().length ); - -// SERVER-525 (closed) unique indexes in capped collection -t.drop(); -db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); -t.ensureIndex( { _id:1 } ); // note we assume will be automatically unique because it is _id -t.insert( { _id : 5 , x : 11 } ); -t.insert( { _id : 5 , x : 12 } ); -assert.eq( 1, t.find().toArray().length ); diff --git a/jstests/capped6.js b/jstests/capped6.js deleted file mode 100644 index 5db12b2fcf9..00000000000 --- a/jstests/capped6.js +++ /dev/null @@ -1,109 +0,0 @@ -// Test NamespaceDetails::cappedTruncateAfter via 'captrunc' command - -Random.setRandomSeed(); - -db.capped6.drop(); -db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); -tzz = db.capped6; - -function debug( x ) { -// print( x ); -} - -/** - * Check that documents in the collection are in order according to the value - * of a, which corresponds to the insert order. This is a check that the oldest - * document(s) is/are deleted when space is needed for the newest document. The - * check is performed in both forward and reverse directions. - */ -function checkOrder( i ) { - res = tzz.find().sort( { $natural: -1 } ); - assert( res.hasNext(), "A" ); - var j = i; - while( res.hasNext() ) { - try { - assert.eq( val[ j-- ].a, res.next().a, "B" ); - } catch( e ) { - debug( "capped6 err " + j ); - throw e; - } - } - res = tzz.find().sort( { $natural: 1 } ); - assert( res.hasNext(), "C" ); - while( res.hasNext() ) - assert.eq( val[ ++j ].a, res.next().a, "D" ); - assert.eq( j, i, "E" ); -} - -var val = new Array( 500 ); -var c = ""; -for( i = 0; i < 500; ++i, c += "-" ) { - // The a values are strings of increasing length. - val[ i ] = { a: c }; -} - -var oldMax = Random.randInt( 500 ); -var max = 0; - -/** - * Insert new documents until there are 'oldMax' documents in the collection, - * then remove a random number of documents (often all but one) via one or more - * 'captrunc' requests. - */ -function doTest() { - for( var i = max; i < oldMax; ++i ) { - tzz.insert( val[ i ] ); - } - max = oldMax; - count = tzz.count(); - - var min = 1; - if ( Random.rand() > 0.3 ) { - min = Random.randInt( count ) + 1; - } - - // Iteratively remove a random number of documents until we have no more - // than 'min' documents. - while( count > min ) { - // 'n' is the number of documents to remove - we must account for the - // possibility that 'inc' will be true, and avoid removing all documents - // from the collection in that case, as removing all documents is not - // allowed by 'captrunc' - var n = Random.randInt( count - min - 1 ); // 0 <= x <= count - min - 1 - var inc = Random.rand() > 0.5; - debug( count + " " + n + " " + inc ); - assert.commandWorked( db.runCommand( { captrunc:"capped6", n:n, inc:inc } ) ); - if ( inc ) { - n += 1; - } - count -= n; - max -= n; - // Validate the remaining documents. - checkOrder( max - 1 ); - } -} - -// Repeatedly add up to 'oldMax' documents and then truncate the newest -// documents. Newer documents take up more space than older documents. -for( var i = 0; i < 10; ++i ) { - doTest(); -} - -// reverse order of values -var val = new Array( 500 ); - -var c = ""; -for( i = 499; i >= 0; --i, c += "-" ) { - val[ i ] = { a: c }; -} -db.capped6.drop(); -db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); -tzz = db.capped6; - -// Same test as above, but now the newer documents take less space than the -// older documents instead of more. -for( var i = 0; i < 10; ++i ) { - doTest(); -} - -tzz.drop(); diff --git a/jstests/capped7.js b/jstests/capped7.js deleted file mode 100644 index 693828da85f..00000000000 --- a/jstests/capped7.js +++ /dev/null @@ -1,89 +0,0 @@ -// Test NamespaceDetails::emptyCappedCollection via 'emptycapped' command - -Random.setRandomSeed(); - -db.capped7.drop(); -db._dbCommand( { create: "capped7", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); -tzz = db.capped7; - -var ten = new Array( 11 ).toString().replace( /,/g, "-" ); - -count = 0; - -/** - * Insert new documents until the capped collection loops and the document - * count doesn't increase on insert. - */ -function insertUntilFull() { -count = tzz.count(); - var j = 0; -while( 1 ) { - tzz.save( {i:ten,j:j++} ); - var newCount = tzz.count(); - if ( count == newCount ) { - break; - } - count = newCount; -} -} - -insertUntilFull(); - -// oldCount == count before empty -oldCount = count; - -assert.eq.automsg( "11", "tzz.stats().numExtents" ); - -// oldSize == size before empty -var oldSize = tzz.stats().storageSize; - -assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) ); - -// check that collection storage parameters are the same after empty -assert.eq.automsg( "11", "tzz.stats().numExtents" ); -assert.eq.automsg( "oldSize", "tzz.stats().storageSize" ); - -// check that the collection is empty after empty -assert.eq.automsg( "0", "tzz.find().itcount()" ); -assert.eq.automsg( "0", "tzz.count()" ); - -// check that we can reuse the empty collection, inserting as many documents -// as we were able to the first time through. -insertUntilFull(); -assert.eq.automsg( "oldCount", "count" ); -assert.eq.automsg( "oldCount", "tzz.find().itcount()" ); -assert.eq.automsg( "oldCount", "tzz.count()" ); - -assert.eq.automsg( "11", "tzz.stats().numExtents" ); -var oldSize = tzz.stats().storageSize; - -assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) ); - -// check that the collection storage parameters are unchanged after another empty -assert.eq.automsg( "11", "tzz.stats().numExtents" ); -assert.eq.automsg( "oldSize", "tzz.stats().storageSize" ); - -// insert an arbitrary number of documents -var total = Random.randInt( 2000 ); -for( var j = 1; j <= total; ++j ) { - tzz.save( {i:ten,j:j} ); - // occasionally check that only the oldest documents are removed to make room - // for the newest documents - if ( Random.rand() > 0.95 ) { - assert.automsg( "j >= tzz.count()" ); - assert.eq.automsg( "tzz.count()", "tzz.find().itcount()" ); - var c = tzz.find().sort( {$natural:-1} ); - var k = j; - assert.automsg( "c.hasNext()" ); - while( c.hasNext() ) { - assert.eq.automsg( "c.next().j", "k--" ); - } - // check the same thing with a reverse iterator as well - var c = tzz.find().sort( {$natural:1} ); - assert.automsg( "c.hasNext()" ); - while( c.hasNext() ) { - assert.eq.automsg( "c.next().j", "++k" ); - } - assert.eq.automsg( "j", "k" ); - } -} \ No newline at end of file diff --git a/jstests/capped8.js b/jstests/capped8.js deleted file mode 100644 index 0f30e37aebf..00000000000 --- a/jstests/capped8.js +++ /dev/null @@ -1,108 +0,0 @@ -// Test NamespaceDetails::cappedTruncateAfter with empty extents - -Random.setRandomSeed(); - -t = db.jstests_capped8; - -function debug( x ) { -// printjson( x ); -} - -/** Generate an object with a string field of specified length */ -function obj( size, x ) { - return {X:x, a:new Array( size + 1 ).toString()};; -} - -function withinOne( a, b ) { - assert( Math.abs( a - b ) <= 1, "not within one: " + a + ", " + b ) -} - -var X = 0; - -/** - * Insert enough documents of the given size spec that the collection will - * contain only documents having this size spec. - */ -function insertManyRollingOver( objsize ) { - // Add some variability, as the precise number can trigger different cases. - X++; - n = 250 + Random.randInt(10); - - assert(t.count() == 0 || t.findOne().X != X); - - for( i = 0; i < n; ++i ) { - t.save( obj( objsize, X ) ); - debug( t.count() ); - } - - if (t.findOne().X != X) { - printjson(t.findOne()); - print("\n\nERROR didn't roll over in insertManyRollingOver " + objsize); - print("approx amountwritten: " + (objsize * n)); - printjson(t.stats()); - assert(false); - } -} - -/** - * Insert some documents in such a way that there may be an empty extent, then - * truncate the capped collection. - */ -function insertAndTruncate( first ) { - myInitialCount = t.count(); - // Insert enough documents to make the capped allocation loop over. - insertManyRollingOver( 150 ); - myFiftyCount = t.count(); - // Insert documents that are too big to fit in the smaller extents. - insertManyRollingOver( 5000 ); - myTwokCount = t.count(); - if ( first ) { - initialCount = myInitialCount; - fiftyCount = myFiftyCount; - twokCount = myTwokCount; - // Sanity checks for collection count - assert( fiftyCount > initialCount ); - assert( fiftyCount > twokCount ); - } else { - // Check that we are able to insert roughly the same number of documents - // after truncating. The exact values are slightly variable as a result - // of the capped allocation algorithm. - withinOne( initialCount, myInitialCount ); - withinOne( fiftyCount, myFiftyCount ); - withinOne( twokCount, myTwokCount ); - } - count = t.count(); - // Check that we can truncate the collection successfully. - assert.commandWorked( db.runCommand( { captrunc:"jstests_capped8", n:count - 1, inc:false } ) ); -} - -/** Test truncating and subsequent inserts */ -function testTruncate() { - insertAndTruncate( true ); - insertAndTruncate( false ); - insertAndTruncate( false ); -} - -var pass = 1; - -print("pass " + pass++); -t.drop(); -db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 10000, 4000 ] } ); -testTruncate(); - -print("pass " + pass++); -t.drop(); -db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 1000, 4000 ] } ); -testTruncate(); - -print("pass " + pass++); -t.drop(); -db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 4000 ] } ); -testTruncate(); - -print("pass " + pass++); -t.drop(); -db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000 ] } ); -testTruncate(); - -t.drop(); diff --git a/jstests/capped9.js b/jstests/capped9.js deleted file mode 100644 index 9ea506ce795..00000000000 --- a/jstests/capped9.js +++ /dev/null @@ -1,28 +0,0 @@ - -t = db.capped9; -t.drop(); - -db.createCollection("capped9" , {capped:true, size:1024*50 }); - -t.insert( { _id : 1 , x : 2 , y : 3 } ) - -assert.eq( 1 , t.find( { x : 2 } ).itcount() , "A1" ) -assert.eq( 1 , t.find( { y : 3 } ).itcount() , "A2" ) -//assert.throws( function(){ t.find( { _id : 1 } ).itcount(); } , [] , "A3" ); // SERVER-3064 - -t.update( { _id : 1 } , { $set : { y : 4 } } ) -//assert( db.getLastError() , "B1" ); // SERVER-3064 -//assert.eq( 3 , t.findOne().y , "B2" ); // SERVER-3064 - -t.ensureIndex( { _id : 1 } ) - -assert.eq( 1 , t.find( { _id : 1 } ).itcount() , "D1" ) - -t.update( { _id : 1 } , { $set : { y : 4 } } ) -assert( null == db.getLastError() , "D1: " + tojson( db.getLastError() ) ) -assert.eq( 4 , t.findOne().y , "D2" ) - - - - - diff --git a/jstests/capped_empty.js b/jstests/capped_empty.js deleted file mode 100644 index 5b0fb6b8f8e..00000000000 --- a/jstests/capped_empty.js +++ /dev/null @@ -1,24 +0,0 @@ - -t = db.capped_empty; -t.drop(); - -db.createCollection( t.getName() , { capped : true , size : 100 } ) - -t.insert( { x : 1 } ); -t.insert( { x : 2 } ); -t.insert( { x : 3 } ); -t.ensureIndex( { x : 1 } ); - -assert.eq( 3 , t.count() ); -assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned ); - -t.runCommand( "emptycapped" ); - -assert.eq( 0 , t.count() ); - -t.insert( { x : 1 } ); -t.insert( { x : 2 } ); -t.insert( { x : 3 } ); - -assert.eq( 3 , t.count() ); -assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned ); diff --git a/jstests/capped_max.js b/jstests/capped_max.js deleted file mode 100644 index 1d7cbc3ef23..00000000000 --- a/jstests/capped_max.js +++ /dev/null @@ -1,29 +0,0 @@ - -t = db.capped_max; -sz = 1024 * 16; - -t.drop(); -db.createCollection( t.getName() , {capped: true, size: sz } ); -assert.lt( Math.pow( 2, 62 ), t.stats().max.floatApprox ) - -t.drop(); -db.createCollection( t.getName() , {capped: true, size: sz, max: 123456 } ); -assert.eq( 123456, t.stats().max ); - -// create a collection with the max possible doc cap (2^31-2 docs) -t.drop(); -mm = Math.pow(2, 31) - 2; -db.createCollection( t.getName() , {capped: true, size: sz, max: mm } ); -assert.eq( mm, t.stats().max ); - -// create a collection with the 'no max' value (2^31-1 docs) -t.drop(); -mm = Math.pow(2, 31) - 1; -db.createCollection( t.getName() , {capped: true, size: sz, max: mm } ); -assert.eq(NumberLong("9223372036854775807"), t.stats().max ); - -t.drop(); -res = db.createCollection( t.getName() , {capped: true, size: sz, max: Math.pow(2, 31) } ); -assert.eq( 0, res.ok, tojson(res) ); -assert.eq( 0, t.stats().ok ) - diff --git a/jstests/capped_server2639.js b/jstests/capped_server2639.js deleted file mode 100644 index 465fd4ae874..00000000000 --- a/jstests/capped_server2639.js +++ /dev/null @@ -1,27 +0,0 @@ - -name = "server2639" - -t = db.getCollection( name ); -t.drop(); - - -db.createCollection( name , { capped : true , size : 1 } ); - -size = t.stats().storageSize; - -bigString = ""; -while ( bigString.length < size ) - bigString += "."; - -t.insert( { x : 1 } ); - -t.insert( { x : 2 , bigString : bigString } ); -gle = db.getLastErrorObj(); -assert.eq( 16328 , gle.code , tojson( gle ) ) - -assert.eq( 1 , t.count() ); // make sure small doc didn't get deleted -assert.eq( 1 , t.findOne().x ); - -// make sure can still insert -t.insert( { x : 2 } ); -assert.eq( 2 , t.count() ); diff --git a/jstests/capped_server7543.js b/jstests/capped_server7543.js deleted file mode 100644 index 514cd7964b2..00000000000 --- a/jstests/capped_server7543.js +++ /dev/null @@ -1,11 +0,0 @@ - -mydb = db.getSisterDB( "capped_server7543" ); -mydb.dropDatabase(); - -mydb.createCollection( "foo" , { capped : true , size : 12288 } ); - -assert.eq( 12288, mydb.foo.stats().storageSize ); -assert.eq( 1, mydb.foo.validate(true).extentCount ); - -mydb.dropDatabase(); - diff --git a/jstests/cappeda.js b/jstests/cappeda.js deleted file mode 100644 index 4a4b14a64e5..00000000000 --- a/jstests/cappeda.js +++ /dev/null @@ -1,33 +0,0 @@ - -t = db.scan_capped_id; -t.drop() - -x = t.runCommand( "create" , { capped : true , size : 10000 } ) -assert( x.ok ) - -for ( i=0; i<100; i++ ) - t.insert( { _id : i , x : 1 } ) - -function q() { - return t.findOne( { _id : 5 } ) -} - -function u() { - t.update( { _id : 5 } , { $set : { x : 2 } } ); - var gle = db.getLastError(); - if ( gle ) - throw gle; -} - - -// SERVER-3064 -//assert.throws( q , [] , "A1" ); -//assert.throws( u , [] , "B1" ); - -t.ensureIndex( { _id : 1 } ) - -assert.eq( 1 , q().x ) -q() -u() - -assert.eq( 2 , q().x ) diff --git a/jstests/check_shard_index.js b/jstests/check_shard_index.js deleted file mode 100644 index f85071124fb..00000000000 --- a/jstests/check_shard_index.js +++ /dev/null @@ -1,141 +0,0 @@ -// ------------------------- -// CHECKSHARDINGINDEX TEST UTILS -// ------------------------- - -f = db.jstests_shardingindex; -f.drop(); - - -// ------------------------- -// Case 1: all entries filled or empty should make a valid index -// - -f.drop(); -f.ensureIndex( { x: 1 , y: 1 } ); -assert.eq( 0 , f.count() , "1. initial count should be zero" ); - -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( true , res.ok, "1a" ); - -f.save( { x: 1 , y : 1 } ); -assert.eq( 1 , f.count() , "1. count after initial insert should be 1" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( true , res.ok , "1b" ); - - -// ------------------------- -// Case 2: entry with null values would make an index unsuitable -// - -f.drop(); -f.ensureIndex( { x: 1 , y: 1 } ); -assert.eq( 0 , f.count() , "2. initial count should be zero" ); - -f.save( { x: 1 , y : 1 } ); -f.save( { x: null , y : 1 } ); - -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( true , res.ok , "2a " + tojson(res) ); - -f.save( { y: 2 } ); -assert.eq( 3 , f.count() , "2. count after initial insert should be 3" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( false , res.ok , "2b " + tojson(res) ); - -// Check _id index -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {_id:1} }); -assert.eq( true , res.ok , "2c " + tojson(res) ); -assert( res.idskip , "2d " + tojson(res) ) - -// ------------------------- -// Case 3: entry with array values would make an index unsuitable -// - -f.drop(); -f.ensureIndex( { x: 1 , y: 1 } ); -assert.eq( 0 , f.count() , "3. initial count should be zero" ); - -f.save( { x: 1 , y : 1 } ); -f.save( { x: [1, 2] , y : 2 } ); - -assert.eq( 2 , f.count() , "3. count after initial insert should be 2" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( false , res.ok , "3a " + tojson(res) ); - -f.remove( { y : 2 } ); -f.reIndex(); - -assert.eq( 1 , f.count() , "3. count after removing array value should be 1" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( true , res.ok , "3b " + tojson(res) ); - -f.save( { x : 2, y : [1, 2] } ) - -assert.eq( 2 , f.count() , "3. count after adding array value should be 2" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( false , res.ok , "3c " + tojson(res) ); - -// ------------------------- -// Case 4: Handles prefix shard key indexes. -// - -f.drop(); -f.ensureIndex( { x: 1 , y: 1, z: 1 } ); -assert.eq( 0 , f.count() , "4. initial count should be zero" ); - -f.save( { x: 1 , y : 1, z : 1 } ); - -assert.eq( 1 , f.count() , "4. count after initial insert should be 1" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); -assert.eq( true , res.ok , "4a " + tojson(res) ); - -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( true , res.ok , "4b " + tojson(res) ); - -f.save( { x: [1, 2] , y : 2, z : 2 } ); - -assert.eq( 2 , f.count() , "4. count after adding array value should be 2" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); -assert.eq( false , res.ok , "4c " + tojson(res) ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( false , res.ok , "4d " + tojson(res) ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); -assert.eq( false , res.ok , "4e " + tojson(res) ); - - -f.remove( { y : 2 } ); -f.reIndex(); - -assert.eq( 1 , f.count() , "4. count after removing array value should be 1" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); -assert.eq( true , res.ok , "4f " + tojson(res) ); - -f.save( { x : 3, y : [1, 2], z : 3 } ) - -assert.eq( 2 , f.count() , "4. count after adding array value on second key should be 2" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); -assert.eq( false , res.ok , "4g " + tojson(res) ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( false , res.ok , "4h " + tojson(res) ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); -assert.eq( false , res.ok , "4i " + tojson(res) ); - -f.remove( { x : 3 } ); -f.reIndex(); // Necessary so that the index is no longer marked as multikey - -assert.eq( 1 , f.count() , "4. count after removing array value should be 1 again" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); -assert.eq( true , res.ok , "4e " + tojson(res) ); - -f.save( { x : 4, y : 4, z : [1, 2] } ) - -assert.eq( 2 , f.count() , "4. count after adding array value on third key should be 2" ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); -assert.eq( false , res.ok , "4c " + tojson(res) ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); -assert.eq( false , res.ok , "4d " + tojson(res) ); -res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); -assert.eq( false , res.ok , "4e " + tojson(res) ); - - -print("PASSED"); diff --git a/jstests/collmod.js b/jstests/collmod.js deleted file mode 100644 index 2dc5555f3ec..00000000000 --- a/jstests/collmod.js +++ /dev/null @@ -1,82 +0,0 @@ -// Basic js tests for the collMod command. -// Test setting the usePowerOf2Sizes flag, and modifying TTL indexes. - -function debug( x ) { - //printjson( x ); -} - -var coll = "collModTest"; -var t = db.getCollection( coll ); -t.drop(); - -db.createCollection( coll ); - - -// Verify the new collection has userFlags set to 1 -printjson(t.stats()); -assert.eq( t.stats().userFlags , 1 , "fresh collection doesn't have userFlags = 1 "); - -// Modify the collection with the usePowerOf2Sizes flag. Verify userFlags now = 0. -var res = db.runCommand( { "collMod" : coll, "usePowerOf2Sizes" : false } ); -debug( res ); -assert.eq( res.ok , 1 , "collMod failed" ); -assert.eq( t.stats().userFlags , 0 , "modified collection should have userFlags = 0 "); -var nso = db.system.namespaces.findOne( { name : t.getFullName() } ); -debug( nso ); -assert.eq( 0, nso.options.flags, "options didn't sync to system.namespaces: " + tojson( nso ) ); - -// Try to modify it with some unrecognized value -var res = db.runCommand( { "collMod" : coll, "unrecognized" : true } ); -debug( res ); -assert.eq( res.ok , 0 , "collMod shouldn't return ok with unrecognized value" ); - -// add a TTL index -t.ensureIndex( {a : 1}, { "expireAfterSeconds": 50 } ) -assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 50 } ), - "TTL index not added" ); - -// try to modify it with a bad key pattern -var res = db.runCommand( { "collMod" : coll, - "index" : { "keyPattern" : "bad" , "expireAfterSeconds" : 100 } } ); -debug( res ); -assert.eq( 0 , res.ok , "mod shouldn't work with bad keypattern"); - -// try to modify it without expireAfterSeconds field -var res = db.runCommand( { "collMod" : coll, - "index" : { "keyPattern" : {a : 1} } } ); -debug( res ); -assert.eq( 0 , res.ok , "TTL mod shouldn't work without expireAfterSeconds"); - -// try to modify it with a non-numeric expireAfterSeconds field -var res = db.runCommand( { "collMod" : coll, - "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : "100" } } ); -debug( res ); -assert.eq( 0 , res.ok , "TTL mod shouldn't work with non-numeric expireAfterSeconds"); - -// this time modifying should finally work -var res = db.runCommand( { "collMod" : coll, - "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : 100 } } ); -debug( res ); -assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ), - "TTL index not modified" ); - -// try to modify a faulty TTL index with a non-numeric expireAfterSeconds field -t.dropIndex( {a : 1 } ); -t.ensureIndex( {a : 1} , { "expireAfterSeconds": "50" } ) -var res = db.runCommand( { "collMod" : coll, - "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } ); -debug( res ); -assert.eq( 0, res.ok, "shouldn't be able to modify faulty index spec" ); - -// try with new index, this time set both expireAfterSeconds and the usePowerOf2Sizes flag -t.dropIndex( {a : 1 } ); -t.ensureIndex( {a : 1} , { "expireAfterSeconds": 50 } ) -var res = db.runCommand( { "collMod" : coll , - "usePowerOf2Sizes" : true, - "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } ); -debug( res ); -assert.eq( 1, res.ok, "should be able to modify both userFlags and expireAfterSeconds" ); -assert.eq( t.stats().userFlags , 1 , "userflags should be 1 now"); -assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ), - "TTL index should be 100 now" ); - diff --git a/jstests/compact.js b/jstests/compact.js deleted file mode 100644 index 2121debc17e..00000000000 --- a/jstests/compact.js +++ /dev/null @@ -1,76 +0,0 @@ -// compact.js - -var mydb = db.getSiblingDB('compact'); -t = mydb.compacttest; -t.drop(); -t.insert({ x: 3 }); -t.insert({ x: 3 }); -t.insert({ x: 5 }); -t.insert({ x: 4, z: 2, k: 'aaa' }); -t.insert({ x: 4, z: 2, k: 'aaa' }); -t.insert({ x: 4, z: 2, k: 'aaa' }); -t.insert({ x: 4, z: 2, k: 'aaa' }); -t.insert({ x: 4, z: 2, k: 'aaa' }); -t.insert({ x: 4, z: 2, k: 'aaa' }); -t.ensureIndex({ x: 1 }); - -print("1"); - -var res = mydb.runCommand({ compact: 'compacttest', dev: true, force: true }); -printjson(res); -assert(res.ok); -assert(t.count() == 9); -var v = t.validate(true); -assert(v.ok); -assert(v.extentCount == 1); -assert(v.deletedCount == 1); -assert(t.getIndexes().length == 2); -var ssize = t.stats().storageSize; - -print("2"); -res = mydb.runCommand({ compact: 'compacttest', dev: true,paddingBytes:1000, force:true }); -assert(res.ok); -assert(t.count() == 9); -var v = t.validate(true); -assert(v.ok); -assert(t.stats().storageSize > ssize, "expected more storage given padding is higher. however it rounds off so if something changed this could be"); -//printjson(t.stats()); - -print("z"); - -t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); -t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); -t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); -t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); -t.insert({ x: 4, z: null, k: { f: "", b: ""} }); -t.insert({ x: 4, z: null, k: { c: ""} }); -t.insert({ x: 4, z: null, k: { h: ""} }); -t.insert({ x: 4, z: null }); -t.insert({ x: 4, z: 3}); -t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); -t.insert({ x: 4, z: null, k: { c: ""} }); -t.insert({ x: 4, z: null, k: { c: ""} }); -t.insert({ x: 4, z: 3, k: { c: ""} }); - -t.ensureIndex({ z: 1, k: 1 }); -//t.ensureIndex({ z: 1, k: 1 }, { unique: true }); -//t.ensureIndex({ z: 1, k: 1 }, { dropDups: true, unique:true }); - -res = mydb.runCommand({ compact: 'compacttest', dev: true, paddingFactor: 1.2, force:true }); -printjson(res); -assert(res.ok); -assert(t.count() > 13); -var v = t.validate(true); -assert(v.ok); - -print("3"); - -// works on an empty collection? -t.remove({}); -assert(mydb.runCommand({ compact: 'compacttest', dev: true, force:true }).ok); -assert(t.count() == 0); -v = t.validate(true); -assert(v.ok); -assert(v.extentCount == 1); -assert(t.getIndexes().length == 3); - diff --git a/jstests/compact2.js b/jstests/compact2.js deleted file mode 100644 index 0a7c343a3f9..00000000000 --- a/jstests/compact2.js +++ /dev/null @@ -1,52 +0,0 @@ -// Compaction of a v0 index converts it to a v1 index using a v1 index comparator during external -// sort. SERVER-6499 - -t = db.jstests_compact2; -t.drop(); - -/** - * Assert that the index is of the expected version and its keys are ordered consistently with this - * version, and that the unique and background fields are set correctly. - */ -function assertIndex( expectedVersion, unique, background ) { - indexSpec = db.system.indexes.findOne( { ns:t.toString(), key:{ date:1 } } ); - // The index version is as expected. - assert.eq( expectedVersion, indexSpec.v ); - // The index uniqueness is as expected (treat missing and false unique specs as equivalent). - assert.eq( !unique, !indexSpec.unique ); - // Background is as expected. - assert.eq( !background, !indexSpec.background ); - // Check that 'date' key ordering is consistent with the index version. - dates = t.find().hint( { date:1 } ).toArray().map( function( x ) { return x.date; } ); - if ( expectedVersion == 0 ) { - // Under v0 index comparison, new Date( -1 ) > new Date( 1 ). - assert.eq( [ new Date( 1 ), new Date( -1 ) ], dates ); - } - else { - // Under v1 index comparsion, new Date( -1 ) < new Date( 1 ). - assert.eq( [ new Date( -1 ), new Date( 1 ) ], dates ); - } -} - -/** Compact a collection and check the resulting indexes. */ -function checkCompact( originalVersion, unique, background ) { - t.drop(); - t.save( { date:new Date( 1 ) } ); - t.save( { date:new Date( -1 ) } ); - t.ensureIndex( { date:1 }, { unique:unique, v:originalVersion, background:background } ); - assertIndex( originalVersion, unique, background ); - - // Under SERVER-6499, compact fails when a v0 index is converted to a v1 index and key - // comparisons are inconsistent, as with the date values in this test. - assert.commandWorked( t.runCommand( "compact" ) ); - assert( !db.getLastError() ); - - // Compact built an index with the default index version (v1). Uniqueness is maintained, but - // background always becomes false. - assertIndex( 1, unique, false ); -} - -checkCompact( 0, true, true ); -checkCompact( 0, false, false ); -checkCompact( 1, true, false ); -checkCompact( 1, false, true ); diff --git a/jstests/compactPreservePadding.js b/jstests/compactPreservePadding.js deleted file mode 100644 index 4748afb9a82..00000000000 --- a/jstests/compactPreservePadding.js +++ /dev/null @@ -1,26 +0,0 @@ -// test preservePadding - -var mydb = db.getSiblingDB('compactPreservePadding'); -var collName = "compactPreservePadding"; -var t = mydb.getCollection(collName); -t.drop(); - -// use larger keyname to avoid hitting an edge case with extents -for (i = 0; i < 10000; i++) { - t.insert({useLargerKeyName:i}); -} - -// remove half the entries -t.remove({useLargerKeyName:{$mod:[2,0]}}) -printjson(t.stats()); -originalSize = t.stats().size; -originalStorage = t.stats().storageSize; - -// compact! -mydb.runCommand({compact: collName, preservePadding: true}); -printjson(t.stats()); - -// object sizes ('size') should be the same (unless we hit an edge case involving extents, which -// this test doesn't) and storage size should shrink -assert(originalSize == t.stats().size); -assert(originalStorage > t.stats().storageSize); diff --git a/jstests/connection_status.js b/jstests/connection_status.js deleted file mode 100644 index 08d05cbf28d..00000000000 --- a/jstests/connection_status.js +++ /dev/null @@ -1,27 +0,0 @@ -// Tests the connectionStatus command - -var dbName = 'connection_status'; -var myDB = db.getSiblingDB(dbName); -myDB.dropAllUsers(); - -function test(userName) { - myDB.createUser({user: userName, pwd: "weak password", roles: jsTest.basicUserRoles}); - myDB.auth(userName, "weak password"); - - var output = myDB.runCommand("connectionStatus"); - assert.commandWorked(output); - var users = output.authInfo.authenticatedUsers; - - var matches = 0; - for (var i=0; i < users.length; i++) { - if (users[i].db != dbName) - continue; - - assert.eq(users[i].user, userName); - matches++; - } - assert.eq(matches, 1); -} - -test("someone"); -test("someone else"); // replaces someone diff --git a/jstests/connection_string_validation.js b/jstests/connection_string_validation.js deleted file mode 100644 index 4ecd1f926ee..00000000000 --- a/jstests/connection_string_validation.js +++ /dev/null @@ -1,106 +0,0 @@ -// Test validation of connection strings passed to the JavaScript "connect()" function. -// Related to SERVER-8030. - -port = "27017" - -if ( db.getMongo().host.indexOf( ":" ) >= 0 ) { - var idx = db.getMongo().host.indexOf( ":" ); - port = db.getMongo().host.substring( idx + 1 ); -} - -var goodStrings = [ - "localhost:" + port + "/test", - "127.0.0.1:" + port + "/test" - ]; - -var badStrings = [ - { s: undefined, r: /^Missing connection string$/ }, - { s: 7, r: /^Incorrect type/ }, - { s: null, r: /^Incorrect type/ }, - { s: "", r: /^Empty connection string$/ }, - { s: " ", r: /^Empty connection string$/ }, - { s: ":", r: /^Missing host name/ }, - { s: "/", r: /^Missing host name/ }, - { s: ":/", r: /^Missing host name/ }, - { s: ":/test", r: /^Missing host name/ }, - { s: ":" + port + "/", r: /^Missing host name/ }, - { s: ":" + port + "/test", r: /^Missing host name/ }, - { s: "/test", r: /^Missing host name/ }, - { s: "localhost:/test", r: /^Missing port number/ }, - { s: "127.0.0.1:/test", r: /^Missing port number/ }, - { s: "127.0.0.1:cat/test", r: /^Invalid port number/ }, - { s: "127.0.0.1:1cat/test", r: /^Invalid port number/ }, - { s: "127.0.0.1:123456/test", r: /^Invalid port number/ }, - { s: "127.0.0.1:65536/test", r: /^Invalid port number/ }, - { s: "::1:65536/test", r: /^Invalid port number/ }, - { s: "127.0.0.1:" + port + "/", r: /^Missing database name/ }, - { s: "::1:" + port + "/", r: /^Missing database name/ } - ]; - -function testGood(i, connectionString) { - print("\nTesting good connection string " + i + " (\"" + connectionString + "\") ..."); - var gotException = false; - var exception; - try { - var connectDB = connect(connectionString); - connectDB = null; - } - catch (e) { - gotException = true; - exception = e; - } - if (!gotException) { - print("Good connection string " + i + - " (\"" + connectionString + "\") correctly validated"); - return; - } - var message = "FAILED to correctly validate goodString " + i + - " (\"" + connectionString + "\"): exception was \"" + tojson(exception) + "\""; - doassert(message); -} - -function testBad(i, connectionString, errorRegex) { - print("\nTesting bad connection string " + i + " (\"" + connectionString + "\") ..."); - var gotException = false; - var gotCorrectErrorText = false; - var exception; - try { - var connectDB = connect(connectionString); - connectDB = null; - } - catch (e) { - gotException = true; - exception = e; - if (errorRegex.test(e.message)) { - gotCorrectErrorText = true; - } - } - if (gotCorrectErrorText) { - print("Bad connection string " + i + " (\"" + connectionString + - "\") correctly rejected:\n" + tojson(exception)); - return; - } - var message = "FAILED to generate correct exception for badString " + i + - " (\"" + connectionString + "\"): "; - if (gotException) { - message += "exception was \"" + tojson(exception) + - "\", it should have matched \"" + errorRegex.toString() + "\""; - } - else { - message += "no exception was thrown"; - } - doassert(message); -} - -var i; -jsTest.log("TESTING " + goodStrings.length + " good connection strings"); -for (i = 0; i < goodStrings.length; ++i) { - testGood(i, goodStrings[i]); -} - -jsTest.log("TESTING " + badStrings.length + " bad connection strings"); -for (i = 0; i < badStrings.length; ++i) { - testBad(i, badStrings[i].s, badStrings[i].r); -} - -jsTest.log("SUCCESSFUL test completion"); diff --git a/jstests/constructors.js b/jstests/constructors.js deleted file mode 100644 index 5d4dd177425..00000000000 --- a/jstests/constructors.js +++ /dev/null @@ -1,314 +0,0 @@ -// Tests to see what validity checks are done for 10gen specific object construction - -// Takes a list of constructors and returns a new list with an extra entry for each constructor with -// "new" prepended -function addConstructorsWithNew (constructorList) { - function prependNew (constructor) { - return "new " + constructor; - } - - var valid = constructorList.valid; - var invalid = constructorList.invalid; - // We use slice(0) here to make a copy of our lists - var validWithNew = valid.concat(valid.slice(0).map(prependNew)); - var invalidWithNew = invalid.concat(invalid.slice(0).map(prependNew)); - return { "valid" : validWithNew, "invalid" : invalidWithNew }; -} - -function clientEvalConstructorTest (constructorList) { - constructorList = addConstructorsWithNew(constructorList); - constructorList.valid.forEach(function (constructor) { - try { - eval(constructor); - } - catch (e) { - throw ("valid constructor: " + constructor + " failed in eval context: " + e); - } - }); - constructorList.invalid.forEach(function (constructor) { - assert.throws(function () { eval(constructor) }, - [], "invalid constructor did not throw error in eval context: " + constructor); - }); -} - -function dbEvalConstructorTest (constructorList) { - constructorList = addConstructorsWithNew(constructorList); - constructorList.valid.forEach(function (constructor) { - try { - db.eval(constructor); - } - catch (e) { - throw ("valid constructor: " + constructor + " failed in db.eval context: " + e); - } - }); - constructorList.invalid.forEach(function (constructor) { - assert.throws(function () { db.eval(constructor) }, - [], "invalid constructor did not throw error in db.eval context: " + constructor); - }); -} - -function mapReduceConstructorTest (constructorList) { - constructorList = addConstructorsWithNew(constructorList); - t = db.mr_constructors; - t.drop(); - - t.save( { "partner" : 1, "visits" : 9 } ) - t.save( { "partner" : 2, "visits" : 9 } ) - t.save( { "partner" : 1, "visits" : 11 } ) - t.save( { "partner" : 1, "visits" : 30 } ) - t.save( { "partner" : 2, "visits" : 41 } ) - t.save( { "partner" : 2, "visits" : 41 } ) - - constructorList.valid.forEach(function (constructor) { - try { - m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }"); - - r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }"); - - res = t.mapReduce( m , r , { out : "mr_constructors_out" , scope : { xx : 1 } } ); - } - catch (e) { - throw ("valid constructor: " + constructor + " failed in mapReduce context: " + e); - } - }); - constructorList.invalid.forEach(function (constructor) { - m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }"); - - r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }"); - - assert.throws(function () { res = t.mapReduce( m , r , - { out : "mr_constructors_out" , scope : { xx : 1 } } ) }, - [], "invalid constructor did not throw error in mapReduce context: " + constructor); - }); - - db.mr_constructors_out.drop(); - t.drop(); -} - -function whereConstructorTest (constructorList) { - constructorList = addConstructorsWithNew(constructorList); - t = db.where_constructors; - t.drop(); - t.insert({ x : 1 }); - assert(!db.getLastError()); - - constructorList.valid.forEach(function (constructor) { - try { - t.findOne({ $where : constructor }); - } - catch (e) { - throw ("valid constructor: " + constructor + " failed in $where query: " + e); - } - }); - constructorList.invalid.forEach(function (constructor) { - assert.throws(function () { t.findOne({ $where : constructor }) }, - [], "invalid constructor did not throw error in $where query: " + constructor); - }); -} - -var dbrefConstructors = { - "valid" : [ - "DBRef(\"namespace\", 0)", - "DBRef(\"namespace\", \"test\")", - "DBRef(\"namespace\", ObjectId())", - "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"))", - ], - "invalid" : [ - "DBRef()", - "DBRef(true, ObjectId())", - "DBRef(\"namespace\")", - "DBRef(\"namespace\", ObjectId(), true)", - ] -} - -var dbpointerConstructors = { - "valid" : [ - "DBPointer(\"namespace\", ObjectId())", - "DBPointer(\"namespace\", ObjectId(\"000000000000000000000000\"))", - ], - "invalid" : [ - "DBPointer()", - "DBPointer(true, ObjectId())", - "DBPointer(\"namespace\", 0)", - "DBPointer(\"namespace\", \"test\")", - "DBPointer(\"namespace\")", - "DBPointer(\"namespace\", ObjectId(), true)", - ] -} - - -var objectidConstructors = { - "valid" : [ - 'ObjectId()', - 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFF")', - ], - "invalid" : [ - 'ObjectId(5)', - 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFQ")', - ] -} - -var timestampConstructors = { - "valid" : [ - 'Timestamp()', - 'Timestamp(0,0)', - 'Timestamp(1.0,1.0)', - ], - "invalid" : [ - 'Timestamp(0)', - 'Timestamp(0,0,0)', - 'Timestamp("test","test")', - 'Timestamp("test",0)', - 'Timestamp(0,"test")', - 'Timestamp(true,true)', - 'Timestamp(true,0)', - 'Timestamp(0,true)', - ] -} - -var bindataConstructors = { - "valid" : [ - 'BinData(0,"test")', - ], - "invalid" : [ - 'BinData(0,"test", "test")', - 'BinData()', - 'BinData(-1, "")', - 'BinData(256, "")', - 'BinData("string","aaaa")', - // SERVER-10152 - //'BinData(0, true)', - //'BinData(0, null)', - //'BinData(0, undefined)', - //'BinData(0, {})', - //'BinData(0, [])', - //'BinData(0, function () {})', - ] -} - -var uuidConstructors = { - "valid" : [ - 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', - ], - "invalid" : [ - 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)', - 'UUID()', - 'UUID("aa")', - 'UUID("invalidhex")', - // SERVER-9686 - //'UUID("invalidhexbutstilltherequiredlen")', - 'UUID(true)', - 'UUID(null)', - 'UUID(undefined)', - 'UUID({})', - 'UUID([])', - 'UUID(function () {})', - ] -} - -var md5Constructors = { - "valid" : [ - 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', - ], - "invalid" : [ - 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)', - 'MD5()', - 'MD5("aa")', - 'MD5("invalidhex")', - // SERVER-9686 - //'MD5("invalidhexbutstilltherequiredlen")', - 'MD5(true)', - 'MD5(null)', - 'MD5(undefined)', - 'MD5({})', - 'MD5([])', - 'MD5(function () {})', - ] -} - -var hexdataConstructors = { - "valid" : [ - 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', - // Numbers as the payload are converted to strings, so HexData(0, 100) == HexData(0, "100") - 'HexData(0, 100)', - 'HexData(0, "")', - 'HexData(0, "aaa")', - 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', - 'HexData(0, "000000000000000000000005")', // SERVER-9605 - ], - "invalid" : [ - 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)', - 'HexData()', - 'HexData(0)', - 'HexData(-1, "")', - 'HexData(256, "")', - 'HexData("string","aaaa")', - // SERVER-10152 - //'HexData(0, true)', - //'HexData(0, null)', - //'HexData(0, undefined)', - //'HexData(0, {})', - //'HexData(0, [])', - //'HexData(0, function () {})', - // SERVER-9686 - //'HexData(0, "invalidhex")', - ] -} - -var dateConstructors = { - "valid" : [ - 'Date()', - 'Date(0)', - 'Date(0,0)', - 'Date(0,0,0)', - 'Date("foo")', - ], - "invalid" : [ - ] -} - -clientEvalConstructorTest(dbrefConstructors); -clientEvalConstructorTest(dbpointerConstructors); -clientEvalConstructorTest(objectidConstructors); -clientEvalConstructorTest(timestampConstructors); -clientEvalConstructorTest(bindataConstructors); -clientEvalConstructorTest(uuidConstructors); -clientEvalConstructorTest(md5Constructors); -clientEvalConstructorTest(hexdataConstructors); -clientEvalConstructorTest(dateConstructors); - -dbEvalConstructorTest(dbrefConstructors); -dbEvalConstructorTest(dbpointerConstructors); -dbEvalConstructorTest(objectidConstructors); -dbEvalConstructorTest(timestampConstructors); -dbEvalConstructorTest(bindataConstructors); -dbEvalConstructorTest(uuidConstructors); -dbEvalConstructorTest(md5Constructors); -dbEvalConstructorTest(hexdataConstructors); -dbEvalConstructorTest(dateConstructors); - -// SERVER-8963 -if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") { - mapReduceConstructorTest(dbrefConstructors); - mapReduceConstructorTest(dbpointerConstructors); - mapReduceConstructorTest(objectidConstructors); - mapReduceConstructorTest(timestampConstructors); - mapReduceConstructorTest(bindataConstructors); - mapReduceConstructorTest(uuidConstructors); - mapReduceConstructorTest(md5Constructors); - mapReduceConstructorTest(hexdataConstructors); -} -mapReduceConstructorTest(dateConstructors); - -// SERVER-8963 -if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") { - whereConstructorTest(dbrefConstructors); - whereConstructorTest(dbpointerConstructors); - whereConstructorTest(objectidConstructors); - whereConstructorTest(timestampConstructors); - whereConstructorTest(bindataConstructors); - whereConstructorTest(uuidConstructors); - whereConstructorTest(md5Constructors); - whereConstructorTest(hexdataConstructors); -} -whereConstructorTest(dateConstructors); diff --git a/jstests/copydb.js b/jstests/copydb.js deleted file mode 100644 index 7c7c02542a4..00000000000 --- a/jstests/copydb.js +++ /dev/null @@ -1,20 +0,0 @@ - - - - -a = db.getSisterDB( "copydb-test-a" ); -b = db.getSisterDB( "copydb-test-b" ); - -a.dropDatabase(); -b.dropDatabase(); - -a.foo.save( { a : 1 } ); - -assert.eq( 1 , a.foo.count() , "A" ); -assert.eq( 0 , b.foo.count() , "B" ); - -a.copyDatabase( a._name , b._name ); - -assert.eq( 1 , a.foo.count() , "C" ); -assert.eq( 1 , b.foo.count() , "D" ); - diff --git a/jstests/core/all.js b/jstests/core/all.js new file mode 100644 index 00000000000..50087882ad1 --- /dev/null +++ b/jstests/core/all.js @@ -0,0 +1,47 @@ +t = db.jstests_all; +t.drop(); + +doTest = function() { + + t.save( { a:[ 1,2,3 ] } ); + t.save( { a:[ 1,2,4 ] } ); + t.save( { a:[ 1,8,5 ] } ); + t.save( { a:[ 1,8,6 ] } ); + t.save( { a:[ 1,9,7 ] } ); + t.save( { a : [] } ); + t.save( {} ); + + assert.eq( 5, t.find( { a: { $all: [ 1 ] } } ).count() ); + assert.eq( 2, t.find( { a: { $all: [ 1, 2 ] } } ).count() ); + assert.eq( 2, t.find( { a: { $all: [ 1, 8 ] } } ).count() ); + assert.eq( 1, t.find( { a: { $all: [ 1, 3 ] } } ).count() ); + assert.eq( 2, t.find( { a: { $all: [ 2 ] } } ).count() ); + assert.eq( 1, t.find( { a: { $all: [ 2, 3 ] } } ).count() ); + assert.eq( 2, t.find( { a: { $all: [ 2, 1 ] } } ).count() ); + + t.save( { a: [ 2, 2 ] } ); + assert.eq( 3, t.find( { a: { $all: [ 2, 2 ] } } ).count() ); + + t.save( { a: [ [ 2 ] ] } ); + assert.eq( 3, t.find( { a: { $all: [ 2 ] } } ).count() ); + + t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } ); + assert.eq( 1, t.find( { 'a.b': { $all: [ 10 ] } } ).count() ); + assert.eq( 1, t.find( { a: { $all: [ 11 ] } } ).count() ); + + t.save( { a: { b: [ 20, 30 ] } } ); + assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() ); + assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() ); + + + assert.eq( 5 , t.find( { a : { $all : [1] } } ).count() , "E1" ); + assert.eq( 0 , t.find( { a : { $all : [19] } } ).count() , "E2" ); + assert.eq( 0 , t.find( { a : { $all : [] } } ).count() , "E3" ); + + +} + +doTest(); +t.drop(); +t.ensureIndex( {a:1} ); +doTest(); diff --git a/jstests/core/all2.js b/jstests/core/all2.js new file mode 100644 index 00000000000..64372ca5e97 --- /dev/null +++ b/jstests/core/all2.js @@ -0,0 +1,86 @@ + +t = db.all2; +t.drop(); + +t.save( { a : [ { x : 1 } , { x : 2 } ] } ) +t.save( { a : [ { x : 2 } , { x : 3 } ] } ) +t.save( { a : [ { x : 3 } , { x : 4 } ] } ) + +state = "no index"; + +function check( n , q , e ){ + assert.eq( n , t.find( q ).count() , tojson( q ) + " " + e + " count " + state ); + assert.eq( n , t.find( q ).itcount() , tojson( q ) + " " + e + " itcount" + state ); +} + +check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" ); +check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" ); + +check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" ); +check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" ); +check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" ); + +check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" ); +check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" ); +check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" ); + +t.ensureIndex( { "a.x" : 1 } ); +state = "index"; + +check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" ); +check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" ); + +check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" ); +check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" ); +check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" ); + +check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" ); +check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" ); +check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" ); + +// --- more + +t.drop(); + +t.save( { a : [ 1 , 2 ] } ) +t.save( { a : [ 2 , 3 ] } ) +t.save( { a : [ 3 , 4 ] } ) + +state = "more no index"; + +check( 1 , { "a" : { $in : [ 1 ] } } , "A" ); +check( 2 , { "a" : { $in : [ 2 ] } } , "B" ); + +check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" ); +check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" ); +check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" ); + +check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" ); +check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" ); +check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" ); + +t.ensureIndex( { "a" : 1 } ); +state = "more index"; + +check( 1 , { "a" : { $in : [ 1 ] } } , "A" ); +check( 2 , { "a" : { $in : [ 2 ] } } , "B" ); + +check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" ); +check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" ); +check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" ); + +check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" ); +check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" ); +check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" ); + + +// more 2 + +state = "more 2" + +t.drop(); +t.save( { name : [ "harry","jack","tom" ] } ) +check( 0 , { name : { $all : ["harry","john"] } } , "A" ); +t.ensureIndex( { name : 1 } ); +check( 0 , { name : { $all : ["harry","john"] } } , "B" ); + diff --git a/jstests/core/all3.js b/jstests/core/all3.js new file mode 100644 index 00000000000..b7a05321bbf --- /dev/null +++ b/jstests/core/all3.js @@ -0,0 +1,28 @@ +// Check that $all matching null is consistent with $in - SERVER-3820 + +t = db.jstests_all3; +t.drop(); + +t.save({}); + +assert.eq( 1, t.count( {foo:{$in:[null]}} ) ); +assert.eq( 1, t.count( {foo:{$all:[null]}} ) ); +assert.eq( 0, t.count( {foo:{$not:{$all:[null]}}} ) ); +assert.eq( 0, t.count( {foo:{$not:{$in:[null]}}} ) ); + +t.remove({}); +t.save({foo:1}); +assert.eq( 0, t.count( {foo:{$in:[null]}} ) ); +assert.eq( 0, t.count( {foo:{$all:[null]}} ) ); +assert.eq( 1, t.count( {foo:{$not:{$in:[null]}}} ) ); +assert.eq( 1, t.count( {foo:{$not:{$all:[null]}}} ) ); + +t.remove({}); +t.save( {foo:[0,1]} ); +assert.eq( 1, t.count( {foo:{$in:[[0,1]]}} ) ); +assert.eq( 1, t.count( {foo:{$all:[[0,1]]}} ) ); + +t.remove({}); +t.save( {foo:[]} ); +assert.eq( 1, t.count( {foo:{$in:[[]]}} ) ); +assert.eq( 1, t.count( {foo:{$all:[[]]}} ) ); diff --git a/jstests/core/all4.js b/jstests/core/all4.js new file mode 100644 index 00000000000..109795754bc --- /dev/null +++ b/jstests/core/all4.js @@ -0,0 +1,28 @@ +// Test $all/$elemMatch with missing field - SERVER-4492 + +t = db.jstests_all4; +t.drop(); + +function checkQuery( query, val ) { + assert.eq( val, t.count(query) ); + assert.eq( val, t.find(query).itcount() ); +} + +checkQuery( {a:{$all:[]}}, 0 ); +checkQuery( {a:{$all:[1]}}, 0 ); +checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 ); + +t.save({}); +checkQuery( {a:{$all:[]}}, 0 ); +checkQuery( {a:{$all:[1]}}, 0 ); +checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 ); + +t.save({a:1}); +checkQuery( {a:{$all:[]}}, 0 ); +checkQuery( {a:{$all:[1]}}, 1 ); +checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 ); + +t.save({a:[{b:1}]}); +checkQuery( {a:{$all:[]}}, 0 ); +checkQuery( {a:{$all:[1]}}, 1 ); +checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 1 ); diff --git a/jstests/core/all5.js b/jstests/core/all5.js new file mode 100644 index 00000000000..a5d9e312292 --- /dev/null +++ b/jstests/core/all5.js @@ -0,0 +1,28 @@ +// Test $all/$elemMatch/null matching - SERVER-4517 + +t = db.jstests_all5; +t.drop(); + +function checkMatch( doc ) { + t.drop(); + t.save( doc ); + assert.eq( 1, t.count( {a:{$elemMatch:{b:null}}} ) ); + assert.eq( 1, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) ); +} + +function checkNoMatch( doc ) { + t.drop(); + t.save( doc ); + assert.eq( 0, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) ); +} + +checkNoMatch( {} ); +checkNoMatch( {a:1} ); + +checkNoMatch( {a:[]} ); +checkNoMatch( {a:[1]} ); + +checkMatch( {a:[{}]} ); +checkMatch( {a:[{c:1}]} ); +checkMatch( {a:[{b:null}]} ); +checkNoMatch( {a:[{b:1}]}, 0 ); diff --git a/jstests/core/and.js b/jstests/core/and.js new file mode 100644 index 00000000000..4d8c2cd7d49 --- /dev/null +++ b/jstests/core/and.js @@ -0,0 +1,85 @@ +// Some tests for $and SERVER-1089 + +t = db.jstests_and; +t.drop(); + +t.save( {a:[1,2]} ); +t.save( {a:'foo'} ); + +function check() { + // $and must be an array + assert.throws( function() { t.find( {$and:4} ).toArray() } ); + // $and array must not be empty + assert.throws( function() { t.find( {$and:[]} ).toArray() } ); + // $and elements must be objects + assert.throws( function() { t.find( {$and:[4]} ).toArray() } ); + + // Check equality matching + assert.eq( 1, t.count( {$and:[{a:1}]} ) ); + assert.eq( 1, t.count( {$and:[{a:1},{a:2}]} ) ); + assert.eq( 0, t.count( {$and:[{a:1},{a:3}]} ) ); + assert.eq( 0, t.count( {$and:[{a:1},{a:2},{a:3}]} ) ); + assert.eq( 1, t.count( {$and:[{a:'foo'}]} ) ); + assert.eq( 0, t.count( {$and:[{a:'foo'},{a:'g'}]} ) ); + + // Check $and with other fields + assert.eq( 1, t.count( {a:2,$and:[{a:1}]} ) ); + assert.eq( 0, t.count( {a:0,$and:[{a:1}]} ) ); + assert.eq( 0, t.count( {a:2,$and:[{a:0}]} ) ); + assert.eq( 1, t.count( {a:1,$and:[{a:1}]} ) ); + + // Check recursive $and + assert.eq( 1, t.count( {a:2,$and:[{$and:[{a:1}]}]} ) ); + assert.eq( 0, t.count( {a:0,$and:[{$and:[{a:1}]}]} ) ); + assert.eq( 0, t.count( {a:2,$and:[{$and:[{a:0}]}]} ) ); + assert.eq( 1, t.count( {a:1,$and:[{$and:[{a:1}]}]} ) ); + + assert.eq( 1, t.count( {$and:[{a:2},{$and:[{a:1}]}]} ) ); + assert.eq( 0, t.count( {$and:[{a:0},{$and:[{a:1}]}]} ) ); + assert.eq( 0, t.count( {$and:[{a:2},{$and:[{a:0}]}]} ) ); + assert.eq( 1, t.count( {$and:[{a:1},{$and:[{a:1}]}]} ) ); + + // Some of these cases were more important with an alternative $and syntax + // that was rejected, but they're still valid checks. + + // Check simple regex + assert.eq( 1, t.count( {$and:[{a:/foo/}]} ) ); + // Check multiple regexes + assert.eq( 1, t.count( {$and:[{a:/foo/},{a:/^f/},{a:/o/}]} ) ); + assert.eq( 0, t.count( {$and:[{a:/foo/},{a:/^g/}]} ) ); + assert.eq( 1, t.count( {$and:[{a:/^f/},{a:'foo'}]} ) ); + // Check regex flags + assert.eq( 0, t.count( {$and:[{a:/^F/},{a:'foo'}]} ) ); + assert.eq( 1, t.count( {$and:[{a:/^F/i},{a:'foo'}]} ) ); + + + + // Check operator + assert.eq( 1, t.count( {$and:[{a:{$gt:0}}]} ) ); + + // Check where + assert.eq( 1, t.count( {a:'foo',$where:'this.a=="foo"'} ) ); + assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) ); + assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) ); + + // Nested where ok + assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}]}) ); + assert.eq( 1, t.count({$and:[{a:'foo'},{$where:'this.a=="foo"'}]}) ); + assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}],$where:'this.a=="foo"'}) ); +} + +check(); +t.ensureIndex( {a:1} ); +check(); +var e = t.find( {$and:[{a:1}]} ).explain(); +assert.eq( 'BtreeCursor a_1', e.cursor ); +assert.eq( [[1,1]], e.indexBounds.a ); + +function checkBounds( query ) { + var e = t.find( query ).explain(true); + printjson(e); + assert.eq( 1, e.n ); +} + +checkBounds( {a:1,$and:[{a:2}]} ); +checkBounds( {$and:[{a:1},{a:2}]} ); diff --git a/jstests/core/and2.js b/jstests/core/and2.js new file mode 100644 index 00000000000..0bd13eb7a1d --- /dev/null +++ b/jstests/core/and2.js @@ -0,0 +1,27 @@ +// Test dollar sign operator with $and SERVER-1089 + +t = db.jstests_and2; + +t.drop(); +t.save( {a:[1,2]} ); +t.update( {a:1}, {$set:{'a.$':5}} ); +assert.eq( [5,2], t.findOne().a ); + +t.drop(); +t.save( {a:[1,2]} ); +t.update( {$and:[{a:1}]}, {$set:{'a.$':5}} ); +assert.eq( [5,2], t.findOne().a ); + +// Make sure dollar sign operator with $and is consistent with no $and case +t.drop(); +t.save( {a:[1,2],b:[3,4]} ); +t.update( {a:1,b:4}, {$set:{'a.$':5}} ); +// Probably not what we want here, just trying to make sure $and is consistent +assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] ); + +// Make sure dollar sign operator with $and is consistent with no $and case +t.drop(); +t.save( {a:[1,2],b:[3,4]} ); +t.update( {a:1,$and:[{b:4}]}, {$set:{'a.$':5}} ); +// Probably not what we want here, just trying to make sure $and is consistent +assert.eq( {a:[1,5],b:[3,4]}, t.find( {}, {_id:0} ).toArray()[ 0 ] ); diff --git a/jstests/core/and3.js b/jstests/core/and3.js new file mode 100644 index 00000000000..036c63c02f0 --- /dev/null +++ b/jstests/core/and3.js @@ -0,0 +1,67 @@ +// Check key match with sub matchers - part of SERVER-3192 + +t = db.jstests_and3; +t.drop(); + +t.save( {a:1} ); +t.save( {a:'foo'} ); + +t.ensureIndex( {a:1} ); + +function checkScanMatch( query, nscannedObjects, n ) { + var e = t.find( query ).hint( {a:1} ).explain(); + assert.eq( nscannedObjects, e.nscannedObjects ); + assert.eq( n, e.n ); +} + +checkScanMatch( {a:/o/}, 1, 1 ); +checkScanMatch( {a:/a/}, 0, 0 ); +checkScanMatch( {a:{$not:/o/}}, 2, 1 ); +checkScanMatch( {a:{$not:/a/}}, 2, 2 ); + +checkScanMatch( {$and:[{a:/o/}]}, 1, 1 ); +checkScanMatch( {$and:[{a:/a/}]}, 0, 0 ); +checkScanMatch( {$and:[{a:{$not:/o/}}]}, 2, 1 ); +checkScanMatch( {$and:[{a:{$not:/a/}}]}, 2, 2 ); +checkScanMatch( {$and:[{a:/o/},{a:{$not:/o/}}]}, 1, 0 ); +checkScanMatch( {$and:[{a:/o/},{a:{$not:/a/}}]}, 1, 1 ); +checkScanMatch( {$or:[{a:/o/}]}, 1, 1 ); +checkScanMatch( {$or:[{a:/a/}]}, 0, 0 ); +checkScanMatch( {$nor:[{a:/o/}]}, 2, 1 ); +checkScanMatch( {$nor:[{a:/a/}]}, 2, 2 ); + +checkScanMatch( {$and:[{$and:[{a:/o/}]}]}, 1, 1 ); +checkScanMatch( {$and:[{$and:[{a:/a/}]}]}, 0, 0 ); +checkScanMatch( {$and:[{$and:[{a:{$not:/o/}}]}]}, 2, 1 ); +checkScanMatch( {$and:[{$and:[{a:{$not:/a/}}]}]}, 2, 2 ); +checkScanMatch( {$and:[{$or:[{a:/o/}]}]}, 1, 1 ); +checkScanMatch( {$and:[{$or:[{a:/a/}]}]}, 0, 0 ); +checkScanMatch( {$or:[{a:{$not:/o/}}]}, 2, 1 ); +checkScanMatch( {$and:[{$or:[{a:{$not:/o/}}]}]}, 2, 1 ); +checkScanMatch( {$and:[{$or:[{a:{$not:/a/}}]}]}, 2, 2 ); +checkScanMatch( {$and:[{$nor:[{a:/o/}]}]}, 2, 1 ); +checkScanMatch( {$and:[{$nor:[{a:/a/}]}]}, 2, 2 ); + +checkScanMatch( {$where:'this.a==1'}, 2, 1 ); +checkScanMatch( {$and:[{$where:'this.a==1'}]}, 2, 1 ); + +checkScanMatch( {a:1,$where:'this.a==1'}, 1, 1 ); +checkScanMatch( {a:1,$and:[{$where:'this.a==1'}]}, 1, 1 ); +checkScanMatch( {$and:[{a:1},{$where:'this.a==1'}]}, 1, 1 ); +checkScanMatch( {$and:[{a:1,$where:'this.a==1'}]}, 1, 1 ); +checkScanMatch( {a:1,$and:[{a:1},{a:1,$where:'this.a==1'}]}, 1, 1 ); + +function checkImpossibleMatch( query ) { + var e = t.find( query ).explain(); + assert.eq( 0, e.n ); + // The explain output should include the indexBounds field. + // The presence of the indexBounds field indicates that the + // query can make use of an index. + assert('indexBounds' in e, 'index bounds are missing'); +} + +// With a single key index, all bounds are utilized. +assert.eq( [[1,1]], t.find( {$and:[{a:1}]} ).explain().indexBounds.a ); +assert.eq( [[1,1]], t.find( {a:1,$and:[{a:1}]} ).explain().indexBounds.a ); +checkImpossibleMatch( {a:1,$and:[{a:2}]} ); +checkImpossibleMatch( {$and:[{a:1},{a:2}]} ); diff --git a/jstests/core/andor.js b/jstests/core/andor.js new file mode 100644 index 00000000000..f433ade8228 --- /dev/null +++ b/jstests/core/andor.js @@ -0,0 +1,99 @@ +// SERVER-1089 Test and/or nesting + +t = db.jstests_andor; +t.drop(); + +// not ok +function ok( q ) { + assert.eq( 1, t.find( q ).itcount() ); +} + +t.save( {a:1} ); + +test = function() { + + ok( {a:1} ); + + ok( {$and:[{a:1}]} ); + ok( {$or:[{a:1}]} ); + + ok( {$and:[{$and:[{a:1}]}]} ); + ok( {$or:[{$or:[{a:1}]}]} ); + + ok( {$and:[{$or:[{a:1}]}]} ); + ok( {$or:[{$and:[{a:1}]}]} ); + + ok( {$and:[{$and:[{$or:[{a:1}]}]}]} ); + ok( {$and:[{$or:[{$and:[{a:1}]}]}]} ); + ok( {$or:[{$and:[{$and:[{a:1}]}]}]} ); + + ok( {$or:[{$and:[{$or:[{a:1}]}]}]} ); + + // now test $nor + + ok( {$and:[{a:1}]} ); + ok( {$nor:[{a:2}]} ); + + ok( {$and:[{$and:[{a:1}]}]} ); + ok( {$nor:[{$nor:[{a:1}]}]} ); + + ok( {$and:[{$nor:[{a:2}]}]} ); + ok( {$nor:[{$and:[{a:2}]}]} ); + + ok( {$and:[{$and:[{$nor:[{a:2}]}]}]} ); + ok( {$and:[{$nor:[{$and:[{a:2}]}]}]} ); + ok( {$nor:[{$and:[{$and:[{a:2}]}]}]} ); + + ok( {$nor:[{$and:[{$nor:[{a:1}]}]}]} ); + +} + +test(); +t.ensureIndex( {a:1} ); +test(); + +// Test an inequality base match. + +test = function() { + + ok( {a:{$ne:2}} ); + + ok( {$and:[{a:{$ne:2}}]} ); + ok( {$or:[{a:{$ne:2}}]} ); + + ok( {$and:[{$and:[{a:{$ne:2}}]}]} ); + ok( {$or:[{$or:[{a:{$ne:2}}]}]} ); + + ok( {$and:[{$or:[{a:{$ne:2}}]}]} ); + ok( {$or:[{$and:[{a:{$ne:2}}]}]} ); + + ok( {$and:[{$and:[{$or:[{a:{$ne:2}}]}]}]} ); + ok( {$and:[{$or:[{$and:[{a:{$ne:2}}]}]}]} ); + ok( {$or:[{$and:[{$and:[{a:{$ne:2}}]}]}]} ); + + ok( {$or:[{$and:[{$or:[{a:{$ne:2}}]}]}]} ); + + // now test $nor + + ok( {$and:[{a:{$ne:2}}]} ); + ok( {$nor:[{a:{$ne:1}}]} ); + + ok( {$and:[{$and:[{a:{$ne:2}}]}]} ); + ok( {$nor:[{$nor:[{a:{$ne:2}}]}]} ); + + ok( {$and:[{$nor:[{a:{$ne:1}}]}]} ); + ok( {$nor:[{$and:[{a:{$ne:1}}]}]} ); + + ok( {$and:[{$and:[{$nor:[{a:{$ne:1}}]}]}]} ); + ok( {$and:[{$nor:[{$and:[{a:{$ne:1}}]}]}]} ); + ok( {$nor:[{$and:[{$and:[{a:{$ne:1}}]}]}]} ); + + ok( {$nor:[{$and:[{$nor:[{a:{$ne:2}}]}]}]} ); + +} + +t.drop(); +t.save( {a:1} ); +test(); +t.ensureIndex( {a:1} ); +test(); diff --git a/jstests/core/apitest_db.js b/jstests/core/apitest_db.js new file mode 100644 index 00000000000..c734d67bba7 --- /dev/null +++ b/jstests/core/apitest_db.js @@ -0,0 +1,77 @@ +/** + * Tests for the db object enhancement + */ + +assert( "test" == db, "wrong database currently not test" ); + +dd = function( x ){ + //print( x ); +} + +dd( "a" ); + + +dd( "b" ); + +/* + * be sure the public collection API is complete + */ +assert(db.createCollection , "createCollection" ); +assert(db.getProfilingLevel , "getProfilingLevel" ); +assert(db.setProfilingLevel , "setProfilingLevel" ); +assert(db.dbEval , "dbEval" ); +assert(db.group , "group" ); + +dd( "c" ); + +/* + * test createCollection + */ + +db.getCollection( "test" ).drop(); +db.getCollection( "system.namespaces" ).find().forEach( function(x) { assert(x.name != "test.test"); }); + +dd( "d" ); + +db.createCollection("test"); +var found = false; +db.getCollection( "system.namespaces" ).find().forEach( function(x) { if (x.name == "test.test") found = true; }); +assert(found, "found test.test in system.namespaces"); + +dd( "e" ); + +/* + * profile level + */ + +db.setProfilingLevel(0); +assert(db.getProfilingLevel() == 0, "prof level 0"); + +db.setProfilingLevel(1); +assert(db.getProfilingLevel() == 1, "p1"); + +db.setProfilingLevel(2); +assert(db.getProfilingLevel() == 2, "p2"); + +db.setProfilingLevel(0); +assert(db.getProfilingLevel() == 0, "prof level 0"); + +dd( "f" ); +asserted = false; +try { + db.setProfilingLevel(10); + assert(false); +} +catch (e) { + asserted = true; + assert(e.dbSetProfilingException); +} +assert( asserted, "should have asserted" ); + +dd( "g" ); + + + +assert.eq( "foo" , db.getSisterDB( "foo" ).getName() ) +assert.eq( "foo" , db.getSiblingDB( "foo" ).getName() ) + diff --git a/jstests/core/apitest_dbcollection.js b/jstests/core/apitest_dbcollection.js new file mode 100644 index 00000000000..0983b065477 --- /dev/null +++ b/jstests/core/apitest_dbcollection.js @@ -0,0 +1,115 @@ +/** + * Tests for the db collection + */ + + + +/* + * test drop + */ +db.getCollection( "test_db" ).drop(); +assert(db.getCollection( "test_db" ).find().length() == 0,1); + +db.getCollection( "test_db" ).save({a:1}); +assert(db.getCollection( "test_db" ).find().length() == 1,2); + +db.getCollection( "test_db" ).drop(); +assert(db.getCollection( "test_db" ).find().length() == 0,3); + +/* + * test count + */ + +assert(db.getCollection( "test_db" ).count() == 0,4); +db.getCollection( "test_db" ).save({a:1}); +assert(db.getCollection( "test_db" ).count() == 1,5); +for (i = 0; i < 100; i++) { + db.getCollection( "test_db" ).save({a:1}); +} +assert(db.getCollection( "test_db" ).count() == 101,6); +db.getCollection( "test_db" ).drop(); +assert(db.getCollection( "test_db" ).count() == 0,7); + +/* + * test clean (not sure... just be sure it doen't blow up, I guess + */ + + db.getCollection( "test_db" ).clean(); + + /* + * test validate + */ + +db.getCollection( "test_db" ).drop(); +assert(db.getCollection( "test_db" ).count() == 0,8); + +for (i = 0; i < 100; i++) { + db.getCollection( "test_db" ).save({a:1}); +} + +var v = db.getCollection( "test_db" ).validate(); +if( v.ns != "test.test_db" ) { + print("Error: wrong ns name"); + print(tojson(v)); +} +assert (v.ns == "test.test_db",9); +assert (v.ok == 1,10); + +assert.eq(100,v.nrecords,11) + +/* + * test deleteIndex, deleteIndexes + */ + +db.getCollection( "test_db" ).drop(); +assert(db.getCollection( "test_db" ).count() == 0,12); +db.getCollection( "test_db" ).dropIndexes(); +assert(db.getCollection( "test_db" ).getIndexes().length == 0,13); + +db.getCollection( "test_db" ).save({a:10}); +assert(db.getCollection( "test_db" ).getIndexes().length == 1,14); + +db.getCollection( "test_db" ).ensureIndex({a:1}); +db.getCollection( "test_db" ).save({a:10}); + +print( tojson( db.getCollection( "test_db" ).getIndexes() ) ); +assert.eq(db.getCollection( "test_db" ).getIndexes().length , 2,15); + +db.getCollection( "test_db" ).dropIndex({a:1}); +assert(db.getCollection( "test_db" ).getIndexes().length == 1,16); + +db.getCollection( "test_db" ).save({a:10}); +db.getCollection( "test_db" ).ensureIndex({a:1}); +db.getCollection( "test_db" ).save({a:10}); + +assert(db.getCollection( "test_db" ).getIndexes().length == 2,17); + +db.getCollection( "test_db" ).dropIndex("a_1"); +assert.eq( db.getCollection( "test_db" ).getIndexes().length , 1,18); + +db.getCollection( "test_db" ).save({a:10, b:11}); +db.getCollection( "test_db" ).ensureIndex({a:1}); +db.getCollection( "test_db" ).ensureIndex({b:1}); +db.getCollection( "test_db" ).save({a:10, b:12}); + +assert(db.getCollection( "test_db" ).getIndexes().length == 3,19); + +db.getCollection( "test_db" ).dropIndex({b:1}); +assert(db.getCollection( "test_db" ).getIndexes().length == 2,20); +db.getCollection( "test_db" ).dropIndex({a:1}); +assert(db.getCollection( "test_db" ).getIndexes().length == 1,21); + +db.getCollection( "test_db" ).save({a:10, b:11}); +db.getCollection( "test_db" ).ensureIndex({a:1}); +db.getCollection( "test_db" ).ensureIndex({b:1}); +db.getCollection( "test_db" ).save({a:10, b:12}); + +assert(db.getCollection( "test_db" ).getIndexes().length == 3,22); + +db.getCollection( "test_db" ).dropIndexes(); +assert(db.getCollection( "test_db" ).getIndexes().length == 1,23); + +db.getCollection( "test_db" ).find(); + +db.getCollection( "test_db" ).drop(); +assert(db.getCollection( "test_db" ).getIndexes().length == 0,24); diff --git a/jstests/core/apply_ops1.js b/jstests/core/apply_ops1.js new file mode 100644 index 00000000000..9e6cb39f7c7 --- /dev/null +++ b/jstests/core/apply_ops1.js @@ -0,0 +1,66 @@ + +t = db.apply_ops1; +t.drop(); + +assert.eq( 0 , t.find().count() , "A0" ); +a = db.adminCommand( { applyOps : [ { "op" : "i" , "ns" : t.getFullName() , "o" : { _id : 5 , x : 17 } } ] } ) +assert.eq( 1 , t.find().count() , "A1a" ); +assert.eq( true, a.results[0], "A1b" ); + +o = { _id : 5 , x : 17 } +assert.eq( o , t.findOne() , "A2" ); + +res = db.runCommand( { applyOps : [ + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } +] } ) + +o.x++; +o.x++; + +assert.eq( 1 , t.find().count() , "A3" ); +assert.eq( o , t.findOne() , "A4" ); +assert.eq( true, res.results[0], "A1b" ); +assert.eq( true, res.results[1], "A1b" ); + + +res = db.runCommand( { applyOps : + [ + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } + ] + , + preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ] + } ); + +o.x++; +o.x++; + +assert.eq( 1 , t.find().count() , "B1" ); +assert.eq( o , t.findOne() , "B2" ); +assert.eq( true, res.results[0], "B2a" ); +assert.eq( true, res.results[1], "B2b" ); + + +res = db.runCommand( { applyOps : + [ + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } + ] + , + preCondition : [ { ns : t.getFullName() , q : { _id : 5 } , res : { x : 19 } } ] + } ); + +assert.eq( 1 , t.find().count() , "B3" ); +assert.eq( o , t.findOne() , "B4" ); + +res = db.runCommand( { applyOps : + [ + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 5 } , "o" : { $inc : { x : 1 } } } , + { "op" : "u" , "ns" : t.getFullName() , "o2" : { _id : 6 } , "o" : { $inc : { x : 1 } } } + ] + } ); + +assert.eq( true, res.results[0], "B5" ); +assert.eq( true, res.results[1], "B6" ); + diff --git a/jstests/core/apply_ops2.js b/jstests/core/apply_ops2.js new file mode 100644 index 00000000000..1a5923c3465 --- /dev/null +++ b/jstests/core/apply_ops2.js @@ -0,0 +1,71 @@ +//Test applyops upsert flag SERVER-7452 + +var t = db.apply_ops2; +t.drop(); + +assert.eq(0, t.find().count(), "test collection not empty"); + +t.insert({_id:1, x:"init"}); + +//alwaysUpsert = true +print("Testing applyOps with alwaysUpsert = true"); + +var res = db.runCommand({ applyOps: [ + { + op: "u", + ns: t.getFullName(), + o2 : { _id: 1 }, + o: { $set: { x: "upsert=true existing" }} + }, + { + op: "u", + ns: t.getFullName(), + o2: { _id: 2 }, + o: { $set : { x: "upsert=true non-existing" }} + }], alwaysUpsert: true }); + +assert.eq(true, res.results[0], "upsert = true, existing doc update failed"); +assert.eq(true, res.results[1], "upsert = true, nonexisting doc not upserted"); +assert.eq(2, t.find().count(), "2 docs expected after upsert"); + +//alwaysUpsert = false +print("Testing applyOps with alwaysUpsert = false"); + +res = db.runCommand({ applyOps: [ + { + op: "u", + ns: t.getFullName(), + o2: { _id: 1 }, + o: { $set : { x: "upsert=false existing" }} + }, + { + op: "u", + ns: t.getFullName(), + o2: { _id: 3 }, + o: { $set: { x: "upsert=false non-existing" }} + }], alwaysUpsert: false }); + +assert.eq(true, res.results[0], "upsert = false, existing doc update failed"); +assert.eq(false, res.results[1], "upsert = false, nonexisting doc upserted"); +assert.eq(2, t.find().count(), "2 docs expected after upsert failure"); + +//alwaysUpsert not specified, should default to true +print("Testing applyOps with default alwaysUpsert"); + +res = db.runCommand({ applyOps: [ + { + op: "u", + ns: t.getFullName(), + o2: { _id: 1 }, + o: { $set: { x: "upsert=default existing" }} + }, + { + op: "u", + ns: t.getFullName(), + o2: { _id: 4 }, + o: { $set: { x: "upsert=defaults non-existing" }} + }]}); + +assert.eq(true, res.results[0], "default upsert, existing doc update failed"); +assert.eq(true, res.results[1], "default upsert, nonexisting doc not upserted"); +assert.eq(3, t.find().count(), "2 docs expected after upsert failure"); diff --git a/jstests/core/array1.js b/jstests/core/array1.js new file mode 100644 index 00000000000..4409b7bb4d3 --- /dev/null +++ b/jstests/core/array1.js @@ -0,0 +1,14 @@ +t = db.array1 +t.drop() + +x = { a : [ 1 , 2 ] }; + +t.save( { a : [ [1,2] ] } ); +assert.eq( 1 , t.find( x ).count() , "A" ); + +t.save( x ); +delete x._id; +assert.eq( 2 , t.find( x ).count() , "B" ); + +t.ensureIndex( { a : 1 } ); +assert.eq( 2 , t.find( x ).count() , "C" ); // TODO SERVER-146 diff --git a/jstests/core/array3.js b/jstests/core/array3.js new file mode 100644 index 00000000000..3d053f99417 --- /dev/null +++ b/jstests/core/array3.js @@ -0,0 +1,8 @@ + +assert.eq( 5 , Array.sum( [ 1 , 4 ] ), "A" ) +assert.eq( 2.5 , Array.avg( [ 1 , 4 ] ), "B" ) + +arr = [ 2 , 4 , 4 , 4 , 5 , 5 , 7 , 9 ] +assert.eq( 5 , Array.avg( arr ) , "C" ) +assert.eq( 2 , Array.stdDev( arr ) , "D" ) + diff --git a/jstests/core/array4.js b/jstests/core/array4.js new file mode 100644 index 00000000000..1053e160f11 --- /dev/null +++ b/jstests/core/array4.js @@ -0,0 +1,30 @@ + +t = db.array4; +t.drop(); + +t.insert({"a": ["1", "2", "3"]}); +t.insert({"a" : ["2", "1"]}); + +var x = {'a.0' : /1/}; + +assert.eq(t.count(x), 1); + +assert.eq(t.findOne(x).a[0], 1); +assert.eq(t.findOne(x).a[1], 2); + +t.drop(); + +t.insert({"a" : {"0" : "1"}}); +t.insert({"a" : ["2", "1"]}); + +assert.eq(t.count(x), 1); +assert.eq(t.findOne(x).a[0], 1); + +t.drop(); + +t.insert({"a" : ["0", "1", "2", "3", "4", "5", "6", "1", "1", "1", "2", "3", "2", "1"]}); +t.insert({"a" : ["2", "1"]}); + +x = {"a.12" : /2/}; +assert.eq(t.count(x), 1); +assert.eq(t.findOne(x).a[0], 0); diff --git a/jstests/core/array_match1.js b/jstests/core/array_match1.js new file mode 100644 index 00000000000..f764fb913b1 --- /dev/null +++ b/jstests/core/array_match1.js @@ -0,0 +1,31 @@ + +t = db.array_match1 +t.drop(); + +t.insert( { _id : 1 , a : [ 5 , 5 ] } ) +t.insert( { _id : 2 , a : [ 6 , 6 ] } ) +t.insert( { _id : 3 , a : [ 5 , 5 ] } ) + +function test( f , m ){ + var q = {}; + + q[f] = [5,5]; + assert.eq( 2 , t.find( q ).itcount() , m + "1" ) + + q[f] = [6,6]; + assert.eq( 1 , t.find( q ).itcount() , m + "2" ) +} + +test( "a" , "A" ); +t.ensureIndex( { a : 1 } ) +test( "a" , "B" ); + +t.drop(); + +t.insert( { _id : 1 , a : { b : [ 5 , 5 ] } } ) +t.insert( { _id : 2 , a : { b : [ 6 , 6 ] } } ) +t.insert( { _id : 3 , a : { b : [ 5 , 5 ] } } ) + +test( "a.b" , "C" ); +t.ensureIndex( { a : 1 } ) +test( "a.b" , "D" ); diff --git a/jstests/core/array_match2.js b/jstests/core/array_match2.js new file mode 100644 index 00000000000..d254b0a3fdd --- /dev/null +++ b/jstests/core/array_match2.js @@ -0,0 +1,20 @@ + +t = db.jstests_array_match2; +t.drop(); + +t.save( {a:[{1:4},5]} ); +// When the array index is the last field, both of these match types work. +assert.eq( 1, t.count( {'a.1':4} ) ); +assert.eq( 1, t.count( {'a.1':5} ) ); + +t.remove({}); +// When the array index is not the last field, only one of the match types works. +t.save( {a:[{1:{foo:4}},{foo:5}]} ); +assert.eq( 1, t.count( {'a.1.foo':4} ) ); +assert.eq( 1, t.count( {'a.1.foo':5} ) ); + +// Same issue with the $exists operator +t.remove({}); +t.save( {a:[{1:{foo:4}},{}]} ); +assert.eq( 1, t.count( {'a.1':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.1.foo':{$exists:true}} ) ); diff --git a/jstests/core/array_match3.js b/jstests/core/array_match3.js new file mode 100644 index 00000000000..c8653430770 --- /dev/null +++ b/jstests/core/array_match3.js @@ -0,0 +1,13 @@ +// SERVER-2902 Test indexing of numerically referenced array elements. + +t = db.jstests_array_match3; +t.drop(); + +// Test matching numericallly referenced array element. +t.save( {a:{'0':5}} ); +t.save( {a:[5]} ); +assert.eq( 2, t.count( {'a.0':5} ) ); + +// Test with index. +t.ensureIndex( {'a.0':1} ); +assert.eq( 2, t.count( {'a.0':5} ) ); diff --git a/jstests/core/array_match4.js b/jstests/core/array_match4.js new file mode 100644 index 00000000000..b4cdec5143a --- /dev/null +++ b/jstests/core/array_match4.js @@ -0,0 +1,30 @@ +var t = db.array_match4; + +t.drop(); +t.save({a: [1, 2]}); + +var query_gte = {a: {$gte: [1, 2]}}; + +// +// without index +// + +assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (without index)'); +assert.eq(1, t.find(query_gte).itcount(), '$gte (without index)'); + +// +// with index +// + +t.ensureIndex({a: 1}); +assert.eq(1, t.find({a: [1, 2]}).count(), '$eq (with index)'); + +// display explain output (for index bounds) +var explain = t.find(query_gte).explain(); +print('explain for ' + tojson(query_gte, '', true) + ' = ' + tojson(explain)); + +// number of documents returned by indexes query should be consistent +// with non-indexed case. +// XXX: The following assertion documents current behavior. +// XXX: 2.4 and 2.6 both return 0 documents. +assert.eq(0, t.find(query_gte).itcount(), '$gte (with index)'); diff --git a/jstests/core/arrayfind1.js b/jstests/core/arrayfind1.js new file mode 100644 index 00000000000..539fa6193a1 --- /dev/null +++ b/jstests/core/arrayfind1.js @@ -0,0 +1,40 @@ + +t = db.arrayfind1; +t.drop(); + +t.save( { a : [ { x : 1 } ] } ) +t.save( { a : [ { x : 1 , y : 2 , z : 1 } ] } ) +t.save( { a : [ { x : 1 , y : 1 , z : 3 } ] } ) + +function test( exptected , q , name ){ + assert.eq( exptected , t.find( q ).itcount() , name + " " + tojson( q ) + " itcount" ); + assert.eq( exptected , t.find( q ).count() , name + " " + tojson( q ) + " count" ); +} + +test( 3 , {} , "A1" ); +test( 1 , { "a.y" : 2 } , "A2" ); +test( 1 , { "a" : { x : 1 } } , "A3" ); +test( 3 , { "a" : { $elemMatch : { x : 1 } } } , "A4" ); // SERVER-377 + + +t.save( { a : [ { x : 2 } ] } ) +t.save( { a : [ { x : 3 } ] } ) +t.save( { a : [ { x : 4 } ] } ) + +assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "B1" ); +assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "B2" ); + +t.ensureIndex( { "a.x" : 1 } ); +assert( t.find( { "a" : { $elemMatch : { x : 1 } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "C1" ); + +assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "D1" ); + +t.find( { "a.x" : 1 } ).count(); +t.find( { "a.x" : { $gt : 1 } } ).count(); + +res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain() +assert( res.cursor.indexOf( "BtreeC" ) == 0 , "D2" ); +assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" ); + +assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" ); +assert( t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "E2" ); diff --git a/jstests/core/arrayfind2.js b/jstests/core/arrayfind2.js new file mode 100644 index 00000000000..c6a78042c3d --- /dev/null +++ b/jstests/core/arrayfind2.js @@ -0,0 +1,29 @@ + +t = db.arrayfind2; +t.drop(); + +function go( prefix ){ + assert.eq( 3 , t.count() , prefix + " A1" ); + assert.eq( 3 , t.find( { a : { $elemMatch : { x : { $gt : 4 } } } } ).count() , prefix + " A2" ); + assert.eq( 1 , t.find( { a : { $elemMatch : { x : { $lt : 2 } } } } ).count() , prefix + " A3" ); + assert.eq( 1 , t.find( { a : { $all : [ { $elemMatch : { x : { $lt : 4 } } } , + { $elemMatch : { x : { $gt : 5 } } } ] } } ).count() , prefix + " A4" ); + + assert.throws( function() { return t.findOne( { a : { $all : [ 1, { $elemMatch : { x : 3 } } ] } } ) } ); + assert.throws( function() { return t.findOne( { a : { $all : [ /a/, { $elemMatch : { x : 3 } } ] } } ) } ); + +} + +t.save( { a : [ { x : 1 } , { x : 5 } ] } ) +t.save( { a : [ { x : 3 } , { x : 5 } ] } ) +t.save( { a : [ { x : 3 } , { x : 6 } ] } ) + +go( "no index" ); +t.ensureIndex( { a : 1 } ); +go( "index(a)" ); + +t.ensureIndex( { "a.x": 1 } ); + +assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } } ] } } ).explain().indexBounds ); +// only first $elemMatch used to find bounds +assert.eq( {"a.x":[[3,3]]}, t.find( { a : { $all : [ { $elemMatch : { x : 3 } }, { $elemMatch : { y : 5 } } ] } } ).explain().indexBounds ); diff --git a/jstests/core/arrayfind3.js b/jstests/core/arrayfind3.js new file mode 100644 index 00000000000..de038c84264 --- /dev/null +++ b/jstests/core/arrayfind3.js @@ -0,0 +1,16 @@ + +t = db.arrayfind3; +t.drop() + +t.save({a:[1,2]}) +t.save({a:[1, 2, 6]}) +t.save({a:[1, 4, 6]}) + + +assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "A1" ) +assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "A2" ) + +t.ensureIndex( { a : 1 } ) + +assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "B1" ); +assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "B2" ) diff --git a/jstests/core/arrayfind4.js b/jstests/core/arrayfind4.js new file mode 100644 index 00000000000..17b02c8886b --- /dev/null +++ b/jstests/core/arrayfind4.js @@ -0,0 +1,22 @@ +// Test query empty array SERVER-2258 + +t = db.jstests_arrayfind4; +t.drop(); + +t.save( {a:[]} ); +t.ensureIndex( {a:1} ); + +assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() ); +assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() ); + +assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() ); +assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() ); + +t.remove({}); +t.save( {a:[[]]} ); + +assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() ); +assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() ); + +assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() ); +assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() ); diff --git a/jstests/core/arrayfind5.js b/jstests/core/arrayfind5.js new file mode 100644 index 00000000000..9ff6e2b8a5f --- /dev/null +++ b/jstests/core/arrayfind5.js @@ -0,0 +1,23 @@ +// Test indexed elemmatch of missing field. + +t = db.jstests_arrayfind5; +t.drop(); + +function check( nullElemMatch ) { + assert.eq( 1, t.find( {'a.b':1} ).itcount() ); + assert.eq( 1, t.find( {a:{$elemMatch:{b:1}}} ).itcount() ); + assert.eq( nullElemMatch ? 1 : 0 , t.find( {'a.b':null} ).itcount() ); + assert.eq( nullElemMatch ? 1 : 0, t.find( {a:{$elemMatch:{b:null}}} ).itcount() ); // see SERVER-3377 +} + +t.save( {a:[{},{b:1}]} ); +check( true ); +t.ensureIndex( {'a.b':1} ); +check( true ); + +t.drop(); + +t.save( {a:[5,{b:1}]} ); +check( false ); +t.ensureIndex( {'a.b':1} ); +check( false ); diff --git a/jstests/core/arrayfind6.js b/jstests/core/arrayfind6.js new file mode 100644 index 00000000000..f4531cea96a --- /dev/null +++ b/jstests/core/arrayfind6.js @@ -0,0 +1,26 @@ +// Check index bound determination for $not:$elemMatch queries. SERVER-5740 + +t = db.jstests_arrayfind6; +t.drop(); + +t.save( { a:[ { b:1, c:2 } ] } ); + +function checkElemMatchMatches() { + assert.eq( 1, t.count( { a:{ $elemMatch:{ b:1, c:2 } } } ) ); + assert.eq( 0, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:2 } } } } ) ); + assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:3 } } } } ) ); + assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:1 }, c:3 } } } } ) ); + // Index bounds must be determined for $not:$elemMatch, not $not:$ne. In this case if index + // bounds are determined for $not:$ne, the a.b index will be constrained to the interval [2,2] + // and the saved document will not be matched as it should. + assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ) ); +} + +checkElemMatchMatches(); +t.ensureIndex( { 'a.b':1 } ); +checkElemMatchMatches(); + +// We currently never use an index for negations of +// ELEM_MATCH_OBJECT expressions. +var explain = t.find( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ).explain(); +assert.eq( "BasicCursor", explain.cursor ); diff --git a/jstests/core/arrayfind7.js b/jstests/core/arrayfind7.js new file mode 100644 index 00000000000..7c44de1dc1d --- /dev/null +++ b/jstests/core/arrayfind7.js @@ -0,0 +1,52 @@ +// Nested $elemMatch clauses. SERVER-5741 + +t = db.jstests_arrayfind7; +t.drop(); + +t.save( { a:[ { b:[ { c:1, d:2 } ] } ] } ); + +function checkElemMatchMatches() { + assert.eq( 1, t.count( { a:{ $elemMatch:{ b:{ $elemMatch:{ c:1, d:2 } } } } } ) ); +} + +// The document is matched using nested $elemMatch expressions, with and without an index. +checkElemMatchMatches(); +t.ensureIndex( { 'a.b.c':1 } ); +checkElemMatchMatches(); + +function checkElemMatch( index, document, query ) { + // The document is matched without an index, and with single and multi key indexes. + t.drop(); + t.save( document ); + assert.eq( 1, t.count( query ) ); + t.ensureIndex( index ); + assert.eq( 1, t.count( query ) ); + t.save( { a:{ b:{ c:[ 10, 11 ] } } } ); // Make the index multikey. + assert.eq( 1, t.count( query ) ); +} + +// Two constraints within a nested $elemMatch expression. +checkElemMatch( { 'a.b.c':1 }, + { a:[ { b:[ { c:1 } ] } ] }, + { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $lte:1 } } } } } }); + +// Two constraints within a nested $elemMatch expression, one of which contains the other. +checkElemMatch( { 'a.b.c':1 }, + { a:[ { b:[ { c:2 } ] } ] }, + { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $in:[2] } } } } } }); + +// Two nested $elemMatch expressions. +checkElemMatch( { 'a.d.e':1, 'a.b.c':1 }, + { a:[ { b:[ { c:1 } ], d:[ { e:1 } ] } ] }, + { a:{ $elemMatch:{ d:{ $elemMatch:{ e:{ $lte:1 } } }, + b:{ $elemMatch:{ c:{ $gte:1 } } } } } }); + +// A non $elemMatch expression and a nested $elemMatch expression. +checkElemMatch( { 'a.x':1, 'a.b.c':1 }, + { a:[ { b:[ { c:1 } ], x:1 } ] }, + { 'a.x':1, a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1 } } } } } }); + +// $elemMatch is applied directly to a top level field. +checkElemMatch( { 'a.b.c':1 }, + { a:[ { b:[ { c:[ 1 ] } ] } ] }, + { a:{ $elemMatch:{ 'b.c':{ $elemMatch:{ $gte:1, $lte:1 } } } } }); diff --git a/jstests/core/arrayfind8.js b/jstests/core/arrayfind8.js new file mode 100644 index 00000000000..07d44ace26e --- /dev/null +++ b/jstests/core/arrayfind8.js @@ -0,0 +1,175 @@ +// Matching behavior for $elemMatch applied to a top level element. +// SERVER-1264 +// SERVER-4180 + +t = db.jstests_arrayfind8; +t.drop(); + +function debug( x ) { + if ( debuggingEnabled = false ) { + printjson( x ); + } +} + +/** Set index state for the test. */ +function setIndexKey( key ) { + indexKey = key; + indexSpec = {}; + indexSpec[ key ] = 1; +} + +setIndexKey( 'a' ); + +function indexBounds( query ) { + debug( query ); + debug( t.find( query ).hint( indexSpec ).explain() ); + return t.find( query ).hint( indexSpec ).explain().indexBounds[ indexKey ]; +} + +/** Check that the query results match the documents in the 'expected' array. */ +function assertResults( expected, query, context ) { + debug( query ); + assert.eq( expected.length, t.count( query ), 'unexpected count in ' + context ); + results = t.find( query ).toArray(); + for( i in results ) { + found = false; + for( j in expected ) { + if ( friendlyEqual( expected[ j ], results[ i ].a ) ) { + found = true; + } + } + assert( found, 'unexpected result ' + results[ i ] + ' in ' + context ); + } +} + +/** + * Check matching for different query types. + * @param bothMatch - document matched by both standardQuery and elemMatchQuery + * @param elemMatch - document matched by elemMatchQuery but not standardQuery + * @param notElemMatch - document matched by standardQuery but not elemMatchQuery + */ +function checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, context ) { + + function mayPush( arr, elt ) { + if ( elt ) { + arr.push( elt ); + } + } + + expectedStandardQueryResults = []; + mayPush( expectedStandardQueryResults, bothMatch ); + mayPush( expectedStandardQueryResults, nonElemMatch ); + assertResults( expectedStandardQueryResults, standardQuery, context + ' standard query' ); + + expectedElemMatchQueryResults = []; + mayPush( expectedElemMatchQueryResults, bothMatch ); + mayPush( expectedElemMatchQueryResults, elemMatch ); + assertResults( expectedElemMatchQueryResults, elemMatchQuery, context + ' elemMatch query' ); +} + +/** + * Check matching and for different query types. + * @param subQuery - part of a query, to be provided as is for a standard query and within a + * $elemMatch clause for a $elemMatch query + * @param bothMatch - document matched by both standardQuery and elemMatchQuery + * @param elemMatch - document matched by elemMatchQuery but not standardQuery + * @param notElemMatch - document matched by standardQuery but not elemMatchQuery + * @param additionalConstraints - additional query parameters not generated from @param subQuery + */ +function checkQuery( subQuery, bothMatch, elemMatch, nonElemMatch, + additionalConstraints ) { + t.drop(); + additionalConstraints = additionalConstraints || {}; + + // Construct standard and elemMatch queries from subQuery. + firstSubQueryKey = Object.keySet( subQuery )[ 0 ]; + if ( firstSubQueryKey[ 0 ] == '$' ) { + standardQuery = { $and:[ { a:subQuery }, additionalConstraints ] }; + } + else { + // If the subQuery contains a field rather than operators, append to the 'a' field. + modifiedSubQuery = {}; + modifiedSubQuery[ 'a.' + firstSubQueryKey ] = subQuery[ firstSubQueryKey ]; + standardQuery = { $and:[ modifiedSubQuery, additionalConstraints ] }; + } + elemMatchQuery = { $and:[ { a:{ $elemMatch:subQuery } }, additionalConstraints ] }; + debug( elemMatchQuery ); + + function maySave( aValue ) { + if ( aValue ) { + debug( { a:aValue } ); + t.save( { a:aValue } ); + } + } + + // Save all documents and check matching without indexes. + maySave( bothMatch ); + maySave( elemMatch ); + maySave( nonElemMatch ); + + checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'unindexed' ); + + // Check matching and index bounds for a single key index. + + t.drop(); + maySave( bothMatch ); + maySave( elemMatch ); + // The nonElemMatch document is not tested here, as it will often make the index multikey. + t.ensureIndex( indexSpec ); + checkMatch( bothMatch, elemMatch, null, standardQuery, elemMatchQuery, 'single key index' ); + + // Check matching and index bounds for a multikey index. + + // Now the nonElemMatch document is tested. + maySave( nonElemMatch ); + // Force the index to be multikey. + t.save( { a:[ -1, -2 ] } ); + t.save( { a:{ b:[ -1, -2 ] } } ); + checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, + 'multikey index' ); +} + +maxNumber = Infinity; + +// Basic test. +checkQuery( { $gt:4 }, [ 5 ] ); + +// Multiple constraints within a $elemMatch clause. +checkQuery( { $gt:4, $lt:6 }, [ 5 ], null, [ 3, 7 ] ); +checkQuery( { $gt:4, $not:{ $gte:6 } }, [ 5 ] ); +checkQuery( { $gt:4, $not:{ $ne:6 } }, [ 6 ] ); +checkQuery( { $gte:5, $lte:5 }, [ 5 ], null, [ 4, 6 ] ); +checkQuery( { $in:[ 4, 6 ], $gt:5 }, [ 6 ], null, [ 4, 7 ] ); +checkQuery( { $regex:'^a' }, [ 'a' ] ); + +// Some constraints within a $elemMatch clause and other constraints outside of it. +checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $lt:6 } } ); +checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $lte:5 } } ); +checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $gt:5 } } ); + +// Constraints in different $elemMatch clauses. +checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } ); +checkQuery( { $gt:4 }, [ 3, 7 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } ); +checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lte:5 } } } ); +checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $elemMatch:{ $gt:5 } } } ); + +// TODO SERVER-1264 +if ( 0 ) { +checkQuery( { $elemMatch:{ $in:[ 5 ] } }, null, [[ 5 ]], [ 5 ], null ); +} + +setIndexKey( 'a.b' ); +checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:1 } ]], + [ { b:1 } ], null ); +checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:[ 0, 2 ] } ]], + [ { b:[ 0, 2 ] } ], null ); + +// Constraints for a top level (SERVER-1264 style) $elemMatch nested within a non top level +// $elemMatch. +checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:1 } } }, [ { b:[ 1 ] } ] ); +checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 1 ] } ] ); + +checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 2 ] } ], null, + null, { 'a.b':{ $in:[ 2, 5 ] } } ); +checkQuery( { b:{ $elemMatch:{ $in:[ 1, 2 ] }, $in:[ 2, 3 ] } }, + [ { b:[ 2 ] } ], null, [ { b:[ 1 ] }, { b:[ 3 ] } ], null ); diff --git a/jstests/core/arrayfind9.js b/jstests/core/arrayfind9.js new file mode 100644 index 00000000000..4ee14c56580 --- /dev/null +++ b/jstests/core/arrayfind9.js @@ -0,0 +1,34 @@ +// Assorted $elemMatch behavior checks. + +t = db.jstests_arrayfind9; +t.drop(); + +// Top level field $elemMatch:$not matching +t.save( { a:[ 1 ] } ); +assert.eq( 1, t.count( { a:{ $elemMatch:{ $not:{ $ne:1 } } } } ) ); + +// Top level field object $elemMatch matching. +t.drop(); +t.save( { a:[ {} ] } ); +assert.eq( 1, t.count( { a:{ $elemMatch:{ $gte:{} } } } ) ); + +// Top level field array $elemMatch matching. +t.drop(); +t.save( { a:[ [] ] } ); +assert.eq( 1, t.count( { a:{ $elemMatch:{ $in:[ [] ] } } } ) ); + +// Matching by array index. +t.drop(); +t.save( { a:[ [ 'x' ] ] } ); +assert.eq( 1, t.count( { a:{ $elemMatch:{ '0':'x' } } } ) ); + +// Matching multiple values of a nested array. +t.drop(); +t.save( { a:[ { b:[ 0, 2 ] } ] } ); +t.ensureIndex( { a:1 } ); +t.ensureIndex( { 'a.b':1 } ); +plans = [ { $natural:1 }, { a:1 }, { 'a.b':1 } ]; +for( i in plans ) { + p = plans[ i ]; + assert.eq( 1, t.find( { a:{ $elemMatch:{ b:{ $gte:1, $lte:1 } } } } ).hint( p ).itcount() ); +} diff --git a/jstests/core/arrayfinda.js b/jstests/core/arrayfinda.js new file mode 100644 index 00000000000..179d3985580 --- /dev/null +++ b/jstests/core/arrayfinda.js @@ -0,0 +1,21 @@ +// Assorted $elemMatch matching behavior checks. + +t = db.jstests_arrayfinda; +t.drop(); + +// $elemMatch only matches elements within arrays (a descriptive, not a normative test). +t.save( { a:[ { b:1 } ] } ); +t.save( { a:{ b:1 } } ); + +function assertExpectedMatch( cursor ) { + assert.eq( [ { b:1 } ], cursor.next().a ); + assert( !cursor.hasNext() ); +} + +assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ) ); +assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ) ); + +// $elemMatch is not used to perform key matching. SERVER-6001 +t.ensureIndex( { a:1 } ); +assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ).hint( { a:1 } ) ); +assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ).hint( { a:1 } ) ); diff --git a/jstests/core/auth1.js b/jstests/core/auth1.js new file mode 100644 index 00000000000..4ab26e8d2e5 --- /dev/null +++ b/jstests/core/auth1.js @@ -0,0 +1,54 @@ +var mydb = db.getSiblingDB('auth1_db'); +mydb.dropAllUsers(); + +pass = "a" + Math.random(); +//print( "password [" + pass + "]" ); + +mydb.createUser({user: "eliot" ,pwd: pass, roles: jsTest.basicUserRoles}); + +assert( mydb.auth( "eliot" , pass ) , "auth failed" ); +assert( ! mydb.auth( "eliot" , pass + "a" ) , "auth should have failed" ); + +pass2 = "b" + Math.random(); +mydb.changeUserPassword("eliot", pass2); + +assert( ! mydb.auth( "eliot" , pass ) , "failed to change password failed" ); +assert( mydb.auth( "eliot" , pass2 ) , "new password didn't take" ); + +assert( mydb.auth( "eliot" , pass2 ) , "what?" ); +mydb.dropUser( "eliot" ); +assert( ! mydb.auth( "eliot" , pass2 ) , "didn't drop user" ); + + +var a = mydb.getMongo().getDB( "admin" ); +a.dropAllUsers(); +pass = "c" + Math.random(); +a.createUser({user: "super", pwd: pass, roles: jsTest.adminUserRoles}); +assert( a.auth( "super" , pass ) , "auth failed" ); +assert( !a.auth( "super" , pass + "a" ) , "auth should have failed" ); + +mydb.dropAllUsers(); +pass = "a" + Math.random(); + +mydb.createUser({user: "eliot" , pwd: pass, roles: jsTest.basicUserRoles}); + +assert.commandFailed( mydb.runCommand( { authenticate: 1, user: "eliot", nonce: "foo", key: "bar" } ) ); + +// check sanity check SERVER-3003 + +var before = a.system.users.count({db: mydb.getName()}); + +assert.throws( function(){ + mydb.createUser({ user: "" , pwd: "abc", roles: jsTest.basicUserRoles}); +} , null , "C1" ) +assert.throws( function(){ + mydb.createUser({ user: "abc" , pwd: "", roles: jsTest.basicUserRoles}); +} , null , "C2" ) + + +var after = a.system.users.count({db: mydb.getName()}); +assert( before > 0 , "C3" ) +assert.eq( before , after , "C4" ) + +// Clean up after ourselves so other tests using authentication don't get messed up. +mydb.dropAllUsers() diff --git a/jstests/core/auth2.js b/jstests/core/auth2.js new file mode 100644 index 00000000000..9c2b38f682d --- /dev/null +++ b/jstests/core/auth2.js @@ -0,0 +1,9 @@ +// just make sure logout doesn't break anything + +// SERVER-724 +db.runCommand({logout : 1}); +x = db.runCommand({logout : 1}); +assert.eq( 1 , x.ok , "A" ) + +x = db.logout(); +assert.eq( 1 , x.ok , "B" ) diff --git a/jstests/core/auth_copydb.js b/jstests/core/auth_copydb.js new file mode 100644 index 00000000000..f04cd0b0d29 --- /dev/null +++ b/jstests/core/auth_copydb.js @@ -0,0 +1,19 @@ +a = db.getSisterDB( "copydb2-test-a" ); +b = db.getSisterDB( "copydb2-test-b" ); + +a.dropDatabase(); +b.dropDatabase(); +a.dropAllUsers(); +b.dropAllUsers(); + +a.foo.save( { a : 1 } ); + +a.createUser({user: "chevy" , pwd: "chase", roles: jsTest.basicUserRoles}); + +assert.eq( 1 , a.foo.count() , "A" ); +assert.eq( 0 , b.foo.count() , "B" ); + +// SERVER-727 +a.copyDatabase( a._name , b._name, "" , "chevy" , "chase" ); +assert.eq( 1 , a.foo.count() , "C" ); +assert.eq( 1 , b.foo.count() , "D" ); diff --git a/jstests/core/autoid.js b/jstests/core/autoid.js new file mode 100644 index 00000000000..6c8062fd093 --- /dev/null +++ b/jstests/core/autoid.js @@ -0,0 +1,11 @@ +f = db.jstests_autoid; +f.drop(); + +f.save( {z:1} ); +a = f.findOne( {z:1} ); +f.update( {z:1}, {z:2} ); +b = f.findOne( {z:2} ); +assert.eq( a._id.str, b._id.str ); +c = f.update( {z:2}, {z:"abcdefgabcdefgabcdefg"} ); +c = f.findOne( {} ); +assert.eq( a._id.str, c._id.str ); diff --git a/jstests/core/bad_index_plugin.js b/jstests/core/bad_index_plugin.js new file mode 100644 index 00000000000..98ebdb9bb28 --- /dev/null +++ b/jstests/core/bad_index_plugin.js @@ -0,0 +1,11 @@ +// SERVER-5826 ensure you can't build an index with a non-existent plugin +t = db.bad_index_plugin; + +assert.writeOK(t.ensureIndex({good: 1})); +assert.eq(t.getIndexes().length, 2); // good + _id + +var err = t.ensureIndex({bad: 'bad'}); +assert.writeError(err); +assert(err.getWriteError().code >= 0); + +assert.eq(t.getIndexes().length, 2); // good + _id (no bad) diff --git a/jstests/core/basic1.js b/jstests/core/basic1.js new file mode 100644 index 00000000000..e5fa577f0b2 --- /dev/null +++ b/jstests/core/basic1.js @@ -0,0 +1,21 @@ + +t = db.getCollection( "basic1" ); +t.drop(); + +o = { a : 1 }; +t.save( o ); + +assert.eq( 1 , t.findOne().a , "first" ); +assert( o._id , "now had id" ); +assert( o._id.str , "id not a real id" ); + +o.a = 2; +t.save( o ); + +assert.eq( 2 , t.findOne().a , "second" ); + +assert(t.validate().valid); + +// not a very good test of currentOp, but tests that it at least +// is sort of there: +assert( db.currentOp().inprog != null ); diff --git a/jstests/core/basic2.js b/jstests/core/basic2.js new file mode 100644 index 00000000000..aaa3de4366e --- /dev/null +++ b/jstests/core/basic2.js @@ -0,0 +1,16 @@ + +t = db.getCollection( "basic2" ); +t.drop(); + +o = { n : 2 }; +t.save( o ); + +assert.eq( 1 , t.find().count() ); + +assert.eq( 2 , t.find( o._id ).toArray()[0].n ); +assert.eq( 2 , t.find( o._id , { n : 1 } ).toArray()[0].n ); + +t.remove( o._id ); +assert.eq( 0 , t.find().count() ); + +assert(t.validate().valid); diff --git a/jstests/core/basic3.js b/jstests/core/basic3.js new file mode 100644 index 00000000000..5fb5581a252 --- /dev/null +++ b/jstests/core/basic3.js @@ -0,0 +1,45 @@ +// Tests that "." cannot be in field names +t = db.getCollection( "foo_basic3" ); +t.drop() + +//more diagnostics on bad save, if exception fails +doBadSave = function(param) { + print("doing save with " + tojson(param)) + var res = t.save(param); + // Should not get here. + printjson(res); +} + +//more diagnostics on bad save, if exception fails +doBadUpdate = function(query, update) { + print("doing update with " + tojson(query) + " " + tojson(update)) + var res = t.update(query, update); + // Should not get here. + printjson(res); +} + +assert.throws(doBadSave, [{"a.b":5}], ". in names aren't allowed doesn't work"); + +assert.throws(doBadSave, + [{ "x" : { "a.b" : 5 } }], + ". in embedded names aren't allowed doesn't work"); + +// following tests make sure update keys are checked +t.save({"a": 0,"b": 1}) + +assert.throws(doBadUpdate, [{a:0}, { "b.b" : 1 }], + "must deny '.' in key of update"); + +// upsert with embedded doc +assert.throws(doBadUpdate, [{a:10}, { c: {"b.b" : 1 }}], + "must deny embedded '.' in key of update"); + +// if it is a modifier, it should still go through +t.update({"a": 0}, {$set: { "c.c": 1}}) +t.update({"a": 0}, {$inc: { "c.c": 1}}) + +// edge cases +assert.throws(doBadUpdate, [{a:0}, { "":{"b.b" : 1} }], + "must deny '' embedded '.' in key of update"); +t.update({"a": 0}, {}) + diff --git a/jstests/core/basic4.js b/jstests/core/basic4.js new file mode 100644 index 00000000000..0cf7a261e63 --- /dev/null +++ b/jstests/core/basic4.js @@ -0,0 +1,12 @@ +t = db.getCollection( "basic4" ); +t.drop(); + +t.save( { a : 1 , b : 1.0 } ); + +assert( t.findOne() ); +assert( t.findOne( { a : 1 } ) ); +assert( t.findOne( { a : 1.0 } ) ); +assert( t.findOne( { b : 1 } ) ); +assert( t.findOne( { b : 1.0 } ) ); + +assert( ! t.findOne( { b : 2.0 } ) ); diff --git a/jstests/core/basic5.js b/jstests/core/basic5.js new file mode 100644 index 00000000000..bfa40fb8f5e --- /dev/null +++ b/jstests/core/basic5.js @@ -0,0 +1,6 @@ +t = db.getCollection( "basic5" ); +t.drop(); + +t.save( { a : 1 , b : [ 1 , 2 , 3 ] } ); +assert.eq( 3 , t.findOne().b.length ); + diff --git a/jstests/core/basic6.js b/jstests/core/basic6.js new file mode 100644 index 00000000000..e0cd6f1586e --- /dev/null +++ b/jstests/core/basic6.js @@ -0,0 +1,8 @@ + +t = db.basic6; + +t.findOne(); +t.a.findOne(); + +assert.eq( "test.basic6" , t.toString() ); +assert.eq( "test.basic6.a" , t.a.toString() ); diff --git a/jstests/core/basic7.js b/jstests/core/basic7.js new file mode 100644 index 00000000000..7bb0d470e82 --- /dev/null +++ b/jstests/core/basic7.js @@ -0,0 +1,11 @@ + +t = db.basic7; +t.drop(); + +t.save( { a : 1 } ) +t.ensureIndex( { a : 1 } ); + +assert.eq( t.find().toArray()[0].a , 1 ); +assert.eq( t.find().arrayAccess(0).a , 1 ); +assert.eq( t.find()[0].a , 1 ); + diff --git a/jstests/core/basic8.js b/jstests/core/basic8.js new file mode 100644 index 00000000000..513da0d15d1 --- /dev/null +++ b/jstests/core/basic8.js @@ -0,0 +1,11 @@ + +t = db.basic8; +t.drop(); + +t.save( { a : 1 } ); +o = t.findOne(); +o.b = 2; +t.save( o ); + +assert.eq( 1 , t.find().count() , "A" ); +assert.eq( 2 , t.findOne().b , "B" ); diff --git a/jstests/core/basic9.js b/jstests/core/basic9.js new file mode 100644 index 00000000000..814b72b2ae7 --- /dev/null +++ b/jstests/core/basic9.js @@ -0,0 +1,19 @@ +// Tests that $ field names are not allowed, but you can use a $ anywhere else. +t = db.getCollection( "foo_basic9" ); +t.drop() + +// more diagnostics on bad save, if exception fails +doBadSave = function(param) { + print("doing save with " + tojson(param)) + var res = t.save(param); + // Should not get here. + print('Should have errored out: ' + tojson(res)); +} + +t.save({foo$foo:5}); +t.save({foo$:5}); + +assert.throws(doBadSave, [{$foo:5}], "key names aren't allowed to start with $ doesn't work"); +assert.throws(doBadSave, + [{x:{$foo:5}}], + "embedded key names aren't allowed to start with $ doesn't work"); diff --git a/jstests/core/basica.js b/jstests/core/basica.js new file mode 100644 index 00000000000..0cc364beb42 --- /dev/null +++ b/jstests/core/basica.js @@ -0,0 +1,33 @@ + +t = db.basica; + + +t.drop(); + +t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } ); + +x = t.findOne(); +x.b["0"].x = 4; +x.b["0"].z = 4; +x.b[0].m = 9; +x.b[0]["asd"] = 11; +x.a = 2; +x.z = 11; + +tojson( x ); +t.save( x ); +assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" ); + +// ----- + +t.drop(); + +t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } ); + +x = t.findOne(); +x.b["0"].z = 4; + +//printjson( x ); +t.save( x ); +assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" ); + diff --git a/jstests/core/basicb.js b/jstests/core/basicb.js new file mode 100644 index 00000000000..95eb60151af --- /dev/null +++ b/jstests/core/basicb.js @@ -0,0 +1,6 @@ + +t = db.basicb; +t.drop(); + +assert.throws( function() { t.insert( { '$a' : 5 } ); }); + diff --git a/jstests/core/basicc.js b/jstests/core/basicc.js new file mode 100644 index 00000000000..8da8c68a8b0 --- /dev/null +++ b/jstests/core/basicc.js @@ -0,0 +1,21 @@ +// test writing to two db's at the same time. + +t1 = db.jstests_basicc; +var db = db.getSisterDB("test_basicc"); +t2 = db.jstests_basicc; +t1.drop(); +t2.drop(); + +js = "while( 1 ) { db.jstests.basicc1.save( {} ); }"; +pid = startMongoProgramNoConnect( "mongo" , "--eval" , js , db.getMongo().host ); + +for( var i = 0; i < 1000; ++i ) { + assert.writeOK(t2.save( {} )); +} + +stopMongoProgramByPid( pid ); +// put things back the way we found it +t1.drop(); +t2.drop(); +db.dropDatabase(); +db = db.getSisterDB("test"); diff --git a/jstests/core/batch_size.js b/jstests/core/batch_size.js new file mode 100644 index 00000000000..2bc144cd554 --- /dev/null +++ b/jstests/core/batch_size.js @@ -0,0 +1,45 @@ +// Test subtleties of batchSize and limit. + +var t = db.jstests_batch_size; +t.drop(); + +for (var i = 0; i < 4; i++) { + t.save({_id: i, a: i}); +} + +function runIndexedTests() { + // With limit, indexed. + assert.eq(2, t.find().limit(2).itcount(), 'G'); + assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'H'); + + // With batchSize, indexed. + // SERVER-12438: If there is an index that provides the sort, + // then a plan with an unindexed sort should never be used. + // Consequently, batchSize will NOT be a hard limit in this case. + // WARNING: the behavior described above may change in the future. + assert.eq(4, t.find().batchSize(2).itcount(), 'I'); + assert.eq(4, t.find().sort({a: 1}).batchSize(2).itcount(), 'J'); +} + +// Without batch size or limit, unindexed. +assert.eq(4, t.find().itcount(), 'A'); +assert.eq(4, t.find().sort({a: 1}).itcount(), 'B'); + +// With limit, unindexed. +assert.eq(2, t.find().limit(2).itcount(), 'C'); +assert.eq(2, t.find().sort({a: 1}).limit(2).itcount(), 'D'); + +// With batchSize, unindexed. +// SERVER-12438: in general batch size does not mean a hard +// limit. With an unindexed sort, however, the server interprets +// batch size as a hard limit so that it can do a top k sort. +// WARNING: this behavior may change in the future. +assert.eq(4, t.find().batchSize(2).itcount(), 'E'); +assert.eq(2, t.find().sort({a: 1}).batchSize(2).itcount(), 'F'); + +// Run the tests with the index twice in order to double check plan caching. +t.ensureIndex({a: 1}); +for (var i = 0; i < 2; i++) { + runIndexedTests(); +} + diff --git a/jstests/core/bench_test1.js b/jstests/core/bench_test1.js new file mode 100644 index 00000000000..bb1423ee8b8 --- /dev/null +++ b/jstests/core/bench_test1.js @@ -0,0 +1,37 @@ + +t = db.bench_test1; +t.drop(); + +t.insert( { _id : 1 , x : 1 } ) +t.insert( { _id : 2 , x : 1 } ) + +ops = [ + { op : "findOne" , ns : t.getFullName() , query : { _id : 1 } } , + { op : "update" , ns : t.getFullName() , query : { _id : 1 } , update : { $inc : { x : 1 } } } +] + +seconds = .7 + +benchArgs = { ops : ops , parallel : 2 , seconds : seconds , host : db.getMongo().host }; + +if (jsTest.options().auth) { + benchArgs['db'] = 'admin'; + benchArgs['username'] = jsTest.options().adminUser; + benchArgs['password'] = jsTest.options().adminPassword; +} +res = benchRun( benchArgs ); + +assert.lte( seconds * res.update , t.findOne( { _id : 1 } ).x * 1.05 , "A1" ) + + +assert.eq( 1 , t.getIndexes().length , "B1" ) +benchArgs['ops']=[ { op : "createIndex" , ns : t.getFullName() , key : { x : 1 } } ]; +benchArgs['parallel']=1; +benchArgs['seconds']=1; +benchRun( benchArgs ); +assert.eq( 2 , t.getIndexes().length , "B2" ) +benchArgs['ops']=[ { op : "dropIndex" , ns : t.getFullName() , key : { x : 1 } } ]; +benchRun( benchArgs ); +assert.soon( function(){ return t.getIndexes().length == 1; } ); + + diff --git a/jstests/core/bench_test2.js b/jstests/core/bench_test2.js new file mode 100644 index 00000000000..871b24ca051 --- /dev/null +++ b/jstests/core/bench_test2.js @@ -0,0 +1,48 @@ + +t = db.bench_test2 +t.drop(); + +for ( i=0; i<100; i++ ) + t.insert( { _id : i , x : 0 } ); + +benchArgs = { ops : [ { ns : t.getFullName() , + op : "update" , + query : { _id : { "#RAND_INT" : [ 0 , 100 ] } } , + update : { $inc : { x : 1 } } } ] , + parallel : 2 , + seconds : 1 , + totals : true , + host : db.getMongo().host } + +if (jsTest.options().auth) { + benchArgs['db'] = 'admin'; + benchArgs['username'] = jsTest.options().adminUser; + benchArgs['password'] = jsTest.options().adminPassword; +} + +res = benchRun( benchArgs ) +printjson( res ); + +sumsq = 0 +sum = 0 + +min = 1000 +max = 0; +t.find().forEach( + function(z){ + sum += z.x; + sumsq += Math.pow( ( res.update / 100 ) - z.x , 2 ); + min = Math.min( z.x , min ); + max = Math.max( z.x , max ); + } +) + +avg = sum / 100 +std = Math.sqrt( sumsq / 100 ) + +print( "Avg: " + avg ) +print( "Std: " + std ) +print( "Min: " + min ) +print( "Max: " + max ) + + diff --git a/jstests/core/bench_test3.js b/jstests/core/bench_test3.js new file mode 100644 index 00000000000..4bc21ed2505 --- /dev/null +++ b/jstests/core/bench_test3.js @@ -0,0 +1,27 @@ +t = db.bench_test3 +t.drop(); + + +benchArgs = { ops : [ { ns : t.getFullName() , + op : "update" , + upsert : true , + query : { _id : { "#RAND_INT" : [ 0 , 5 , 4 ] } } , + update : { $inc : { x : 1 } } } ] , + parallel : 2 , + seconds : 1 , + totals : true , + host : db.getMongo().host } + +if (jsTest.options().auth) { + benchArgs['db'] = 'admin'; + benchArgs['username'] = jsTest.options().adminUser; + benchArgs['password'] = jsTest.options().adminPassword; +} + +res = benchRun( benchArgs ) +printjson( res ); + +var keys = [] +var totals = {} +db.bench_test3.find().sort( { _id : 1 } ).forEach( function(z){ keys.push( z._id ); totals[z._id] = z.x } ); +assert.eq( [ 0 , 4 , 8 , 12 , 16 ] , keys ) diff --git a/jstests/core/big_object1.js b/jstests/core/big_object1.js new file mode 100644 index 00000000000..be61dbd3041 --- /dev/null +++ b/jstests/core/big_object1.js @@ -0,0 +1,55 @@ + +t = db.big_object1 +t.drop(); + +if ( db.adminCommand( "buildinfo" ).bits == 64 ){ + + var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + var s = large; + while ( s.length < 850 * 1024 ){ + s += large; + } + x = 0; + while ( true ){ + var result; + n = { _id : x , a : [] } + for ( i=0; i<14+x; i++ ) + n.a.push( s ) + try { + result = t.insert( n ) + o = n + } + catch ( e ){ + break; + } + + if ( result.hasWriteErrors() ) + break; + x++; + } + + printjson( t.stats(1024*1024) ) + + assert.lt( 15 * 1024 * 1024 , Object.bsonsize( o ) , "A1" ) + assert.gt( 17 * 1024 * 1024 , Object.bsonsize( o ) , "A2" ) + + assert.eq( x , t.count() , "A3" ) + + for ( i=0; i js conversion + var a = o.a; + } catch(e) { + assert(false, "Caught exception trying to insert during iteration " + i + ": " + e); + } + assert( o , "B" + i ); + } + + t.drop() +} +else { + print( "skipping big_object1 b/c not 64-bit" ) +} + +print("SUCCESS"); diff --git a/jstests/core/binData.js b/jstests/core/binData.js new file mode 100644 index 00000000000..3f037650e05 --- /dev/null +++ b/jstests/core/binData.js @@ -0,0 +1,14 @@ + +var x = new BinData(3, "OEJTfmD8twzaj/LPKLIVkA=="); +assert.eq(x.hex(), "3842537e60fcb70cda8ff2cf28b21590", "bad hex"); +assert.eq(x.base64(), "OEJTfmD8twzaj/LPKLIVkA==", "bad base64"); +assert.eq(x.type, 3, "bad type"); +assert.eq(x.length(), 16, "bad length"); + +x = new BinData(0, "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4="); +assert.eq(x.hex(), "4d616e2069732064697374696e677569736865642c206e6f74206f6e6c792062792068697320726561736f6e2c2062757420627920746869732073696e67756c61722070617373696f6e2066726f6d206f7468657220616e696d616c732c2077686963682069732061206c757374206f6620746865206d696e642c20746861742062792061207065727365766572616e6365206f662064656c6967687420696e2074686520636f6e74696e75656420616e6420696e6465666174696761626c652067656e65726174696f6e206f66206b6e6f776c656467652c2065786365656473207468652073686f727420766568656d656e6365206f6620616e79206361726e616c20706c6561737572652e", "bad hex"); +assert.eq(x.base64(), "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=", "bad base64"); +assert.eq(x.type, 0, "bad type"); +assert.eq(x.length(), 269, "bad length"); + + diff --git a/jstests/core/block_check_supported.js b/jstests/core/block_check_supported.js new file mode 100644 index 00000000000..21d04ca93c7 --- /dev/null +++ b/jstests/core/block_check_supported.js @@ -0,0 +1,118 @@ +// Test that serverStatus() features dependent on the ProcessInfo::blockCheckSupported() routine +// work correctly. These features are db.serverStatus({workingSet:1}).workingSet and +// db.serverStatus().indexCounters. +// Related to SERVER-9242, SERVER-6450. + +// Check that an object contains a specific set of fields and only those fields +// NOTE: destroys 'item' +// +var testExpectedFields = function(itemString, item, fieldList) { + print('Testing ' + itemString + ' for expected fields'); + for (var i = 0; i < fieldList.length; ++i) { + var field = fieldList[i]; + if (typeof item[field] == 'undefined') { + doassert('Test FAILED: missing "' + field + '" field'); + } + delete item[field]; + } + if (!friendlyEqual({}, item)) { + doassert('Test FAILED: found unexpected field(s): ' + tojsononeline(item)); + } +} + +// Run test as function to keep cruft out of global namespace +// +var doTest = function () { + + print('Testing workingSet and indexCounters portions of serverStatus'); + var hostInfo = db.hostInfo(); + var isXP = (hostInfo.os.name == 'Windows XP') ? true : false; + var isEmpty = (hostInfo.os.name == '') ? true : false; + + // Check that the serverStatus command returns something for these sub-documents + // + var serverStatus = db.serverStatus({ workingSet: 1 }); + if (!serverStatus) { + doassert('Test FAILED: db.serverStatus({workingSet:1}) did not return a value'); + } + if (!serverStatus.workingSet) { + doassert('Test FAILED: db.serverStatus({workingSet:1}).workingSet was not returned'); + } + if (!serverStatus.indexCounters) { + doassert('Test FAILED: db.serverStatus().indexCounters was not returned'); + } + var workingSet_1 = serverStatus.workingSet; + var indexCounters_1 = serverStatus.indexCounters; + + if (isXP) { + // Windows XP is the only supported platform that should be missing this data; make sure + // that we don't get bogus data back + // + var expectedResult = { info: 'not supported' }; + print('Testing db.serverStatus({workingSet:1}).workingSet on Windows XP -- expecting ' + + tojsononeline(expectedResult)); + assert.eq(expectedResult, workingSet_1, + 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' + + ' did not return the expected value'); + expectedResult = { note: 'not supported on this platform' }; + print('Testing db.serverStatus().indexCounters on Windows XP -- expecting ' + + tojsononeline(expectedResult)); + assert.eq(expectedResult, indexCounters_1, + 'Test FAILED: db.serverStatus().indexCounters' + + ' did not return the expected value'); + } + else if (isEmpty) { + // Until SERVER-9325 is fixed, Solaris/SmartOS will also be missing this data; make sure + // that we don't get bogus data back + // + expectedResult = { info: 'not supported' }; + print('Testing db.serverStatus({workingSet:1}).workingSet on "" (Solaris?) -- expecting ' + + tojsononeline(expectedResult)); + assert.eq(expectedResult, workingSet_1, + 'Test FAILED: db.serverStatus({workingSet:1}).workingSet' + + ' did not return the expected value'); + expectedResult = { note: 'not supported on this platform' }; + print('Testing db.serverStatus().indexCounters on "" (Solaris?) -- expecting ' + + tojsononeline(expectedResult)); + assert.eq(expectedResult, indexCounters_1, + 'Test FAILED: db.serverStatus().indexCounters' + + ' did not return the expected value'); + } + else { + // Check that we get both workingSet and indexCounters and that all expected + // fields are present with no unexpected fields + // + testExpectedFields('db.serverStatus({workingSet:1}).workingSet', + workingSet_1, + ['note', 'pagesInMemory', 'computationTimeMicros', 'overSeconds']); + testExpectedFields('db.serverStatus().indexCounters', + indexCounters_1, + ['accesses', 'hits', 'misses', 'resets', 'missRatio']); + + if (0) { // comment out until SERVER-9284 is fixed + // See if we can make the index counters values change + // + print('Testing that indexCounters accesses and hits increase by 1 on indexed find()'); + var blockDB = db.getSiblingDB('block_check_supported'); + blockDB.dropDatabase(); + blockDB.coll.insert({ a: 1 }); + blockDB.coll.ensureIndex({ a: 1 }); + indexCounters_1 = db.serverStatus().indexCounters; + var doc = blockDB.coll.findOne({ a: 1 }); + var indexCounters_2 = db.serverStatus().indexCounters; + assert.gt(indexCounters_2.accesses, indexCounters_1.accesses, + 'Test FAILED: db.serverStatus().indexCounters.accesses' + + ' should have had a value greater than ' + indexCounters_1.accesses + + ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) + + ', after find(): ' + tojsononeline(indexCounters_2)); + assert.gt(indexCounters_2.hits, indexCounters_1.hits, + 'Test FAILED: db.serverStatus().indexCounters.hits' + + ' should have had a value greater than ' + indexCounters_1.hits + + ': indexCounters: before find(): ' + tojsononeline(indexCounters_1) + + ', after find(): ' + tojsononeline(indexCounters_2)); + } // comment out until SERVER-9284 is fixed + } + print('Test PASSED!'); +}; + +doTest(); diff --git a/jstests/core/bulk_insert.js b/jstests/core/bulk_insert.js new file mode 100644 index 00000000000..e26b323c6d9 --- /dev/null +++ b/jstests/core/bulk_insert.js @@ -0,0 +1,22 @@ +// Tests bulk insert of docs from the shell + +var coll = db.bulkInsertTest +coll.drop() + +Random.srand( new Date().getTime() ) + +var bulkSize = Math.floor( Random.rand() * 200 ) + 1 +var numInserts = Math.floor( Random.rand() * 300 ) + 1 + +print( "Inserting " + numInserts + " bulks of " + bulkSize + " documents." ) + +for( var i = 0; i < numInserts; i++ ){ + var bulk = [] + for( var j = 0; j < bulkSize; j++ ){ + bulk.push({ hi : "there", i : i, j : j }) + } + + coll.insert( bulk ) +} + +assert.eq( coll.count(), bulkSize * numInserts ) diff --git a/jstests/core/capped.js b/jstests/core/capped.js new file mode 100644 index 00000000000..421132b6f75 --- /dev/null +++ b/jstests/core/capped.js @@ -0,0 +1,11 @@ +db.jstests_capped.drop(); +db.createCollection("jstests_capped", {capped:true, size:30000}); + +assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_capped"} ).count(), "expected a count of one index for new capped collection" ); +t = db.jstests_capped; + +t.save({x:1}); +t.save({x:2}); + +assert( t.find().sort({$natural:1})[0].x == 1 , "expected obj.x==1"); +assert( t.find().sort({$natural:-1})[0].x == 2, "expected obj.x == 2"); diff --git a/jstests/core/capped1.js b/jstests/core/capped1.js new file mode 100644 index 00000000000..0bbeaa40894 --- /dev/null +++ b/jstests/core/capped1.js @@ -0,0 +1,11 @@ + +t = db.capped1; +t.drop(); + +db.createCollection("capped1" , {capped:true, size:1024 }); +v = t.validate(); +assert( v.valid , "A : " + tojson( v ) ); // SERVER-485 + +t.save( { x : 1 } ) +assert( t.validate().valid , "B" ) + diff --git a/jstests/core/capped2.js b/jstests/core/capped2.js new file mode 100644 index 00000000000..65bb82f4c07 --- /dev/null +++ b/jstests/core/capped2.js @@ -0,0 +1,62 @@ +db.capped2.drop(); +db._dbCommand( { create: "capped2", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); +tzz = db.capped2; + +function debug( x ) { +// print( x ); +} + +var val = new Array( 2000 ); +var c = ""; +for( i = 0; i < 2000; ++i, c += "---" ) { // bigger and bigger objects through the array... + val[ i ] = { a: c }; +} + +function checkIncreasing( i ) { + res = tzz.find().sort( { $natural: -1 } ); + assert( res.hasNext(), "A" ); + var j = i; + while( res.hasNext() ) { + try { + assert.eq( val[ j-- ].a, res.next().a, "B" ); + } catch( e ) { + debug( "capped2 err " + j ); + throw e; + } + } + res = tzz.find().sort( { $natural: 1 } ); + assert( res.hasNext(), "C" ); + while( res.hasNext() ) + assert.eq( val[ ++j ].a, res.next().a, "D" ); + assert.eq( j, i, "E" ); +} + +function checkDecreasing( i ) { + res = tzz.find().sort( { $natural: -1 } ); + assert( res.hasNext(), "F" ); + var j = i; + while( res.hasNext() ) { + assert.eq( val[ j++ ].a, res.next().a, "G" ); + } + res = tzz.find().sort( { $natural: 1 } ); + assert( res.hasNext(), "H" ); + while( res.hasNext() ) + assert.eq( val[ --j ].a, res.next().a, "I" ); + assert.eq( j, i, "J" ); +} + +for( i = 0 ;; ++i ) { + debug( "capped 2: " + i ); + tzz.insert( val[ i ] ); + if ( tzz.count() == 0 ) { + assert( i > 100, "K" ); + break; + } + checkIncreasing( i ); +} + +for( i = 600 ; i >= 0 ; --i ) { + debug( "capped 2: " + i ); + tzz.insert( val[ i ] ); + checkDecreasing( i ); +} diff --git a/jstests/core/capped3.js b/jstests/core/capped3.js new file mode 100644 index 00000000000..2e5e6790cb7 --- /dev/null +++ b/jstests/core/capped3.js @@ -0,0 +1,45 @@ +t = db.jstests_capped3; +t2 = db.jstests_capped3_clone; +t.drop(); +t2.drop(); +for( i = 0; i < 1000; ++i ) { + t.save( {i:i} ); +} +assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ), "A" ); +c = t2.find(); +for( i = 0; i < 1000; ++i ) { + assert.eq( i, c.next().i, "B" ); +} +assert( !c.hasNext(), "C" ); + +t.drop(); +t2.drop(); + +for( i = 0; i < 1000; ++i ) { + t.save( {i:i} ); +} +assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ), "D" ); +c = t2.find().sort( {$natural:-1} ); +i = 999; +while( c.hasNext() ) { + assert.eq( i--, c.next().i, "E" ); +} +//print( "i: " + i ); +var str = tojson( t2.stats() ); +//print( "stats: " + tojson( t2.stats() ) ); +assert( i < 990, "F" ); + +t.drop(); +t2.drop(); + +for( i = 0; i < 1000; ++i ) { + t.save( {i:i} ); +} +assert.commandWorked( t.convertToCapped( 1000 ), "G" ); +c = t.find().sort( {$natural:-1} ); +i = 999; +while( c.hasNext() ) { + assert.eq( i--, c.next().i, "H" ); +} +assert( i < 990, "I" ); +assert( i > 900, "J" ); diff --git a/jstests/core/capped5.js b/jstests/core/capped5.js new file mode 100644 index 00000000000..37b776ee1ca --- /dev/null +++ b/jstests/core/capped5.js @@ -0,0 +1,40 @@ + +tn = "capped5" + +t = db[tn] +t.drop(); + + +db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); +t.insert( { _id : 5 , x : 11 , z : 52 } ); +assert.eq( 1 , t.getIndexKeys().length , "A0" ) //now we assume _id index even on capped coll +assert.eq( 52 , t.findOne( { x : 11 } ).z , "A1" ); + +t.ensureIndex( { _id : 1 } ) +t.ensureIndex( { x : 1 } ) + +assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" ); +assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" ); + +t.drop(); +db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); +t.insert( { _id : 5 , x : 11 } ); +t.insert( { _id : 5 , x : 12 } ); +assert.eq( 1, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index +assert.eq( 1, t.find().toArray().length ); //_id index unique, so second insert fails + +t.drop(); +db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); +t.insert( { _id : 5 , x : 11 } ); +t.insert( { _id : 6 , x : 12 } ); +t.ensureIndex( { x:1 }, {unique:true} ); +assert.eq( 2, db.system.indexes.count( {ns:"test."+tn} ) ); //now we assume _id index +assert.eq( 2, t.find().hint( {x:1} ).toArray().length ); + +// SERVER-525 (closed) unique indexes in capped collection +t.drop(); +db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } ); +t.ensureIndex( { _id:1 } ); // note we assume will be automatically unique because it is _id +t.insert( { _id : 5 , x : 11 } ); +t.insert( { _id : 5 , x : 12 } ); +assert.eq( 1, t.find().toArray().length ); diff --git a/jstests/core/capped6.js b/jstests/core/capped6.js new file mode 100644 index 00000000000..5db12b2fcf9 --- /dev/null +++ b/jstests/core/capped6.js @@ -0,0 +1,109 @@ +// Test NamespaceDetails::cappedTruncateAfter via 'captrunc' command + +Random.setRandomSeed(); + +db.capped6.drop(); +db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); +tzz = db.capped6; + +function debug( x ) { +// print( x ); +} + +/** + * Check that documents in the collection are in order according to the value + * of a, which corresponds to the insert order. This is a check that the oldest + * document(s) is/are deleted when space is needed for the newest document. The + * check is performed in both forward and reverse directions. + */ +function checkOrder( i ) { + res = tzz.find().sort( { $natural: -1 } ); + assert( res.hasNext(), "A" ); + var j = i; + while( res.hasNext() ) { + try { + assert.eq( val[ j-- ].a, res.next().a, "B" ); + } catch( e ) { + debug( "capped6 err " + j ); + throw e; + } + } + res = tzz.find().sort( { $natural: 1 } ); + assert( res.hasNext(), "C" ); + while( res.hasNext() ) + assert.eq( val[ ++j ].a, res.next().a, "D" ); + assert.eq( j, i, "E" ); +} + +var val = new Array( 500 ); +var c = ""; +for( i = 0; i < 500; ++i, c += "-" ) { + // The a values are strings of increasing length. + val[ i ] = { a: c }; +} + +var oldMax = Random.randInt( 500 ); +var max = 0; + +/** + * Insert new documents until there are 'oldMax' documents in the collection, + * then remove a random number of documents (often all but one) via one or more + * 'captrunc' requests. + */ +function doTest() { + for( var i = max; i < oldMax; ++i ) { + tzz.insert( val[ i ] ); + } + max = oldMax; + count = tzz.count(); + + var min = 1; + if ( Random.rand() > 0.3 ) { + min = Random.randInt( count ) + 1; + } + + // Iteratively remove a random number of documents until we have no more + // than 'min' documents. + while( count > min ) { + // 'n' is the number of documents to remove - we must account for the + // possibility that 'inc' will be true, and avoid removing all documents + // from the collection in that case, as removing all documents is not + // allowed by 'captrunc' + var n = Random.randInt( count - min - 1 ); // 0 <= x <= count - min - 1 + var inc = Random.rand() > 0.5; + debug( count + " " + n + " " + inc ); + assert.commandWorked( db.runCommand( { captrunc:"capped6", n:n, inc:inc } ) ); + if ( inc ) { + n += 1; + } + count -= n; + max -= n; + // Validate the remaining documents. + checkOrder( max - 1 ); + } +} + +// Repeatedly add up to 'oldMax' documents and then truncate the newest +// documents. Newer documents take up more space than older documents. +for( var i = 0; i < 10; ++i ) { + doTest(); +} + +// reverse order of values +var val = new Array( 500 ); + +var c = ""; +for( i = 499; i >= 0; --i, c += "-" ) { + val[ i ] = { a: c }; +} +db.capped6.drop(); +db._dbCommand( { create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); +tzz = db.capped6; + +// Same test as above, but now the newer documents take less space than the +// older documents instead of more. +for( var i = 0; i < 10; ++i ) { + doTest(); +} + +tzz.drop(); diff --git a/jstests/core/capped7.js b/jstests/core/capped7.js new file mode 100644 index 00000000000..693828da85f --- /dev/null +++ b/jstests/core/capped7.js @@ -0,0 +1,89 @@ +// Test NamespaceDetails::emptyCappedCollection via 'emptycapped' command + +Random.setRandomSeed(); + +db.capped7.drop(); +db._dbCommand( { create: "capped7", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } ); +tzz = db.capped7; + +var ten = new Array( 11 ).toString().replace( /,/g, "-" ); + +count = 0; + +/** + * Insert new documents until the capped collection loops and the document + * count doesn't increase on insert. + */ +function insertUntilFull() { +count = tzz.count(); + var j = 0; +while( 1 ) { + tzz.save( {i:ten,j:j++} ); + var newCount = tzz.count(); + if ( count == newCount ) { + break; + } + count = newCount; +} +} + +insertUntilFull(); + +// oldCount == count before empty +oldCount = count; + +assert.eq.automsg( "11", "tzz.stats().numExtents" ); + +// oldSize == size before empty +var oldSize = tzz.stats().storageSize; + +assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) ); + +// check that collection storage parameters are the same after empty +assert.eq.automsg( "11", "tzz.stats().numExtents" ); +assert.eq.automsg( "oldSize", "tzz.stats().storageSize" ); + +// check that the collection is empty after empty +assert.eq.automsg( "0", "tzz.find().itcount()" ); +assert.eq.automsg( "0", "tzz.count()" ); + +// check that we can reuse the empty collection, inserting as many documents +// as we were able to the first time through. +insertUntilFull(); +assert.eq.automsg( "oldCount", "count" ); +assert.eq.automsg( "oldCount", "tzz.find().itcount()" ); +assert.eq.automsg( "oldCount", "tzz.count()" ); + +assert.eq.automsg( "11", "tzz.stats().numExtents" ); +var oldSize = tzz.stats().storageSize; + +assert.commandWorked( db._dbCommand( { emptycapped: "capped7" } ) ); + +// check that the collection storage parameters are unchanged after another empty +assert.eq.automsg( "11", "tzz.stats().numExtents" ); +assert.eq.automsg( "oldSize", "tzz.stats().storageSize" ); + +// insert an arbitrary number of documents +var total = Random.randInt( 2000 ); +for( var j = 1; j <= total; ++j ) { + tzz.save( {i:ten,j:j} ); + // occasionally check that only the oldest documents are removed to make room + // for the newest documents + if ( Random.rand() > 0.95 ) { + assert.automsg( "j >= tzz.count()" ); + assert.eq.automsg( "tzz.count()", "tzz.find().itcount()" ); + var c = tzz.find().sort( {$natural:-1} ); + var k = j; + assert.automsg( "c.hasNext()" ); + while( c.hasNext() ) { + assert.eq.automsg( "c.next().j", "k--" ); + } + // check the same thing with a reverse iterator as well + var c = tzz.find().sort( {$natural:1} ); + assert.automsg( "c.hasNext()" ); + while( c.hasNext() ) { + assert.eq.automsg( "c.next().j", "++k" ); + } + assert.eq.automsg( "j", "k" ); + } +} \ No newline at end of file diff --git a/jstests/core/capped8.js b/jstests/core/capped8.js new file mode 100644 index 00000000000..0f30e37aebf --- /dev/null +++ b/jstests/core/capped8.js @@ -0,0 +1,108 @@ +// Test NamespaceDetails::cappedTruncateAfter with empty extents + +Random.setRandomSeed(); + +t = db.jstests_capped8; + +function debug( x ) { +// printjson( x ); +} + +/** Generate an object with a string field of specified length */ +function obj( size, x ) { + return {X:x, a:new Array( size + 1 ).toString()};; +} + +function withinOne( a, b ) { + assert( Math.abs( a - b ) <= 1, "not within one: " + a + ", " + b ) +} + +var X = 0; + +/** + * Insert enough documents of the given size spec that the collection will + * contain only documents having this size spec. + */ +function insertManyRollingOver( objsize ) { + // Add some variability, as the precise number can trigger different cases. + X++; + n = 250 + Random.randInt(10); + + assert(t.count() == 0 || t.findOne().X != X); + + for( i = 0; i < n; ++i ) { + t.save( obj( objsize, X ) ); + debug( t.count() ); + } + + if (t.findOne().X != X) { + printjson(t.findOne()); + print("\n\nERROR didn't roll over in insertManyRollingOver " + objsize); + print("approx amountwritten: " + (objsize * n)); + printjson(t.stats()); + assert(false); + } +} + +/** + * Insert some documents in such a way that there may be an empty extent, then + * truncate the capped collection. + */ +function insertAndTruncate( first ) { + myInitialCount = t.count(); + // Insert enough documents to make the capped allocation loop over. + insertManyRollingOver( 150 ); + myFiftyCount = t.count(); + // Insert documents that are too big to fit in the smaller extents. + insertManyRollingOver( 5000 ); + myTwokCount = t.count(); + if ( first ) { + initialCount = myInitialCount; + fiftyCount = myFiftyCount; + twokCount = myTwokCount; + // Sanity checks for collection count + assert( fiftyCount > initialCount ); + assert( fiftyCount > twokCount ); + } else { + // Check that we are able to insert roughly the same number of documents + // after truncating. The exact values are slightly variable as a result + // of the capped allocation algorithm. + withinOne( initialCount, myInitialCount ); + withinOne( fiftyCount, myFiftyCount ); + withinOne( twokCount, myTwokCount ); + } + count = t.count(); + // Check that we can truncate the collection successfully. + assert.commandWorked( db.runCommand( { captrunc:"jstests_capped8", n:count - 1, inc:false } ) ); +} + +/** Test truncating and subsequent inserts */ +function testTruncate() { + insertAndTruncate( true ); + insertAndTruncate( false ); + insertAndTruncate( false ); +} + +var pass = 1; + +print("pass " + pass++); +t.drop(); +db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 10000, 4000 ] } ); +testTruncate(); + +print("pass " + pass++); +t.drop(); +db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 1000, 4000 ] } ); +testTruncate(); + +print("pass " + pass++); +t.drop(); +db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000, 4000 ] } ); +testTruncate(); + +print("pass " + pass++); +t.drop(); +db._dbCommand( { create:"jstests_capped8", capped: true, $nExtents: [ 10000 ] } ); +testTruncate(); + +t.drop(); diff --git a/jstests/core/capped9.js b/jstests/core/capped9.js new file mode 100644 index 00000000000..2e0c2c74640 --- /dev/null +++ b/jstests/core/capped9.js @@ -0,0 +1,27 @@ + +t = db.capped9; +t.drop(); + +db.createCollection("capped9" , {capped:true, size:1024*50 }); + +t.insert( { _id : 1 , x : 2 , y : 3 } ) + +assert.eq( 1 , t.find( { x : 2 } ).itcount() , "A1" ) +assert.eq( 1 , t.find( { y : 3 } ).itcount() , "A2" ) +//assert.throws( function(){ t.find( { _id : 1 } ).itcount(); } , [] , "A3" ); // SERVER-3064 + +t.update( { _id : 1 } , { $set : { y : 4 } } ) +//assert( db.getLastError() , "B1" ); // SERVER-3064 +//assert.eq( 3 , t.findOne().y , "B2" ); // SERVER-3064 + +t.ensureIndex( { _id : 1 } ) + +assert.eq( 1 , t.find( { _id : 1 } ).itcount() , "D1" ) + +assert.writeOK( t.update( { _id: 1 }, { $set: { y: 4 } } )); +assert.eq( 4 , t.findOne().y , "D2" ) + + + + + diff --git a/jstests/core/capped_empty.js b/jstests/core/capped_empty.js new file mode 100644 index 00000000000..5b0fb6b8f8e --- /dev/null +++ b/jstests/core/capped_empty.js @@ -0,0 +1,24 @@ + +t = db.capped_empty; +t.drop(); + +db.createCollection( t.getName() , { capped : true , size : 100 } ) + +t.insert( { x : 1 } ); +t.insert( { x : 2 } ); +t.insert( { x : 3 } ); +t.ensureIndex( { x : 1 } ); + +assert.eq( 3 , t.count() ); +assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned ); + +t.runCommand( "emptycapped" ); + +assert.eq( 0 , t.count() ); + +t.insert( { x : 1 } ); +t.insert( { x : 2 } ); +t.insert( { x : 3 } ); + +assert.eq( 3 , t.count() ); +assert.eq( 1 , t.find( { x : 2 } ).explain().nscanned ); diff --git a/jstests/core/capped_max.js b/jstests/core/capped_max.js new file mode 100644 index 00000000000..1d7cbc3ef23 --- /dev/null +++ b/jstests/core/capped_max.js @@ -0,0 +1,29 @@ + +t = db.capped_max; +sz = 1024 * 16; + +t.drop(); +db.createCollection( t.getName() , {capped: true, size: sz } ); +assert.lt( Math.pow( 2, 62 ), t.stats().max.floatApprox ) + +t.drop(); +db.createCollection( t.getName() , {capped: true, size: sz, max: 123456 } ); +assert.eq( 123456, t.stats().max ); + +// create a collection with the max possible doc cap (2^31-2 docs) +t.drop(); +mm = Math.pow(2, 31) - 2; +db.createCollection( t.getName() , {capped: true, size: sz, max: mm } ); +assert.eq( mm, t.stats().max ); + +// create a collection with the 'no max' value (2^31-1 docs) +t.drop(); +mm = Math.pow(2, 31) - 1; +db.createCollection( t.getName() , {capped: true, size: sz, max: mm } ); +assert.eq(NumberLong("9223372036854775807"), t.stats().max ); + +t.drop(); +res = db.createCollection( t.getName() , {capped: true, size: sz, max: Math.pow(2, 31) } ); +assert.eq( 0, res.ok, tojson(res) ); +assert.eq( 0, t.stats().ok ) + diff --git a/jstests/core/capped_server2639.js b/jstests/core/capped_server2639.js new file mode 100644 index 00000000000..adc6f994163 --- /dev/null +++ b/jstests/core/capped_server2639.js @@ -0,0 +1,27 @@ + +name = "server2639" + +t = db.getCollection( name ); +t.drop(); + + +db.createCollection( name , { capped : true , size : 1 } ); + +size = t.stats().storageSize; + +bigString = ""; +while ( bigString.length < size ) + bigString += "."; + +t.insert( { x : 1 } ); + +var res = t.insert( { x : 2 , bigString : bigString } ); +assert.writeError( res ); +assert.eq( 16328, res.getWriteError().code, res.getWriteError().toString() ); + +assert.eq( 1 , t.count() ); // make sure small doc didn't get deleted +assert.eq( 1 , t.findOne().x ); + +// make sure can still insert +t.insert( { x : 2 } ); +assert.eq( 2 , t.count() ); diff --git a/jstests/core/capped_server7543.js b/jstests/core/capped_server7543.js new file mode 100644 index 00000000000..514cd7964b2 --- /dev/null +++ b/jstests/core/capped_server7543.js @@ -0,0 +1,11 @@ + +mydb = db.getSisterDB( "capped_server7543" ); +mydb.dropDatabase(); + +mydb.createCollection( "foo" , { capped : true , size : 12288 } ); + +assert.eq( 12288, mydb.foo.stats().storageSize ); +assert.eq( 1, mydb.foo.validate(true).extentCount ); + +mydb.dropDatabase(); + diff --git a/jstests/core/cappeda.js b/jstests/core/cappeda.js new file mode 100644 index 00000000000..3244ffae84f --- /dev/null +++ b/jstests/core/cappeda.js @@ -0,0 +1,32 @@ + +t = db.scan_capped_id; +t.drop() + +x = t.runCommand( "create" , { capped : true , size : 10000 } ) +assert( x.ok ) + +for ( i=0; i<100; i++ ) + t.insert( { _id : i , x : 1 } ) + +function q() { + return t.findOne( { _id : 5 } ) +} + +function u() { + var res = t.update( { _id : 5 } , { $set : { x : 2 } } ); + if ( res.hasWriteErrors() ) + throw res; +} + + +// SERVER-3064 +//assert.throws( q , [] , "A1" ); +//assert.throws( u , [] , "B1" ); + +t.ensureIndex( { _id : 1 } ) + +assert.eq( 1 , q().x ) +q() +u() + +assert.eq( 2 , q().x ) diff --git a/jstests/core/check_shard_index.js b/jstests/core/check_shard_index.js new file mode 100644 index 00000000000..f85071124fb --- /dev/null +++ b/jstests/core/check_shard_index.js @@ -0,0 +1,141 @@ +// ------------------------- +// CHECKSHARDINGINDEX TEST UTILS +// ------------------------- + +f = db.jstests_shardingindex; +f.drop(); + + +// ------------------------- +// Case 1: all entries filled or empty should make a valid index +// + +f.drop(); +f.ensureIndex( { x: 1 , y: 1 } ); +assert.eq( 0 , f.count() , "1. initial count should be zero" ); + +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( true , res.ok, "1a" ); + +f.save( { x: 1 , y : 1 } ); +assert.eq( 1 , f.count() , "1. count after initial insert should be 1" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( true , res.ok , "1b" ); + + +// ------------------------- +// Case 2: entry with null values would make an index unsuitable +// + +f.drop(); +f.ensureIndex( { x: 1 , y: 1 } ); +assert.eq( 0 , f.count() , "2. initial count should be zero" ); + +f.save( { x: 1 , y : 1 } ); +f.save( { x: null , y : 1 } ); + +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( true , res.ok , "2a " + tojson(res) ); + +f.save( { y: 2 } ); +assert.eq( 3 , f.count() , "2. count after initial insert should be 3" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( false , res.ok , "2b " + tojson(res) ); + +// Check _id index +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {_id:1} }); +assert.eq( true , res.ok , "2c " + tojson(res) ); +assert( res.idskip , "2d " + tojson(res) ) + +// ------------------------- +// Case 3: entry with array values would make an index unsuitable +// + +f.drop(); +f.ensureIndex( { x: 1 , y: 1 } ); +assert.eq( 0 , f.count() , "3. initial count should be zero" ); + +f.save( { x: 1 , y : 1 } ); +f.save( { x: [1, 2] , y : 2 } ); + +assert.eq( 2 , f.count() , "3. count after initial insert should be 2" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( false , res.ok , "3a " + tojson(res) ); + +f.remove( { y : 2 } ); +f.reIndex(); + +assert.eq( 1 , f.count() , "3. count after removing array value should be 1" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( true , res.ok , "3b " + tojson(res) ); + +f.save( { x : 2, y : [1, 2] } ) + +assert.eq( 2 , f.count() , "3. count after adding array value should be 2" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( false , res.ok , "3c " + tojson(res) ); + +// ------------------------- +// Case 4: Handles prefix shard key indexes. +// + +f.drop(); +f.ensureIndex( { x: 1 , y: 1, z: 1 } ); +assert.eq( 0 , f.count() , "4. initial count should be zero" ); + +f.save( { x: 1 , y : 1, z : 1 } ); + +assert.eq( 1 , f.count() , "4. count after initial insert should be 1" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); +assert.eq( true , res.ok , "4a " + tojson(res) ); + +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( true , res.ok , "4b " + tojson(res) ); + +f.save( { x: [1, 2] , y : 2, z : 2 } ); + +assert.eq( 2 , f.count() , "4. count after adding array value should be 2" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); +assert.eq( false , res.ok , "4c " + tojson(res) ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( false , res.ok , "4d " + tojson(res) ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); +assert.eq( false , res.ok , "4e " + tojson(res) ); + + +f.remove( { y : 2 } ); +f.reIndex(); + +assert.eq( 1 , f.count() , "4. count after removing array value should be 1" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); +assert.eq( true , res.ok , "4f " + tojson(res) ); + +f.save( { x : 3, y : [1, 2], z : 3 } ) + +assert.eq( 2 , f.count() , "4. count after adding array value on second key should be 2" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); +assert.eq( false , res.ok , "4g " + tojson(res) ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( false , res.ok , "4h " + tojson(res) ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); +assert.eq( false , res.ok , "4i " + tojson(res) ); + +f.remove( { x : 3 } ); +f.reIndex(); // Necessary so that the index is no longer marked as multikey + +assert.eq( 1 , f.count() , "4. count after removing array value should be 1 again" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); +assert.eq( true , res.ok , "4e " + tojson(res) ); + +f.save( { x : 4, y : 4, z : [1, 2] } ) + +assert.eq( 2 , f.count() , "4. count after adding array value on third key should be 2" ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} }); +assert.eq( false , res.ok , "4c " + tojson(res) ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} }); +assert.eq( false , res.ok , "4d " + tojson(res) ); +res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} }); +assert.eq( false , res.ok , "4e " + tojson(res) ); + + +print("PASSED"); diff --git a/jstests/core/collmod.js b/jstests/core/collmod.js new file mode 100644 index 00000000000..2dc5555f3ec --- /dev/null +++ b/jstests/core/collmod.js @@ -0,0 +1,82 @@ +// Basic js tests for the collMod command. +// Test setting the usePowerOf2Sizes flag, and modifying TTL indexes. + +function debug( x ) { + //printjson( x ); +} + +var coll = "collModTest"; +var t = db.getCollection( coll ); +t.drop(); + +db.createCollection( coll ); + + +// Verify the new collection has userFlags set to 1 +printjson(t.stats()); +assert.eq( t.stats().userFlags , 1 , "fresh collection doesn't have userFlags = 1 "); + +// Modify the collection with the usePowerOf2Sizes flag. Verify userFlags now = 0. +var res = db.runCommand( { "collMod" : coll, "usePowerOf2Sizes" : false } ); +debug( res ); +assert.eq( res.ok , 1 , "collMod failed" ); +assert.eq( t.stats().userFlags , 0 , "modified collection should have userFlags = 0 "); +var nso = db.system.namespaces.findOne( { name : t.getFullName() } ); +debug( nso ); +assert.eq( 0, nso.options.flags, "options didn't sync to system.namespaces: " + tojson( nso ) ); + +// Try to modify it with some unrecognized value +var res = db.runCommand( { "collMod" : coll, "unrecognized" : true } ); +debug( res ); +assert.eq( res.ok , 0 , "collMod shouldn't return ok with unrecognized value" ); + +// add a TTL index +t.ensureIndex( {a : 1}, { "expireAfterSeconds": 50 } ) +assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 50 } ), + "TTL index not added" ); + +// try to modify it with a bad key pattern +var res = db.runCommand( { "collMod" : coll, + "index" : { "keyPattern" : "bad" , "expireAfterSeconds" : 100 } } ); +debug( res ); +assert.eq( 0 , res.ok , "mod shouldn't work with bad keypattern"); + +// try to modify it without expireAfterSeconds field +var res = db.runCommand( { "collMod" : coll, + "index" : { "keyPattern" : {a : 1} } } ); +debug( res ); +assert.eq( 0 , res.ok , "TTL mod shouldn't work without expireAfterSeconds"); + +// try to modify it with a non-numeric expireAfterSeconds field +var res = db.runCommand( { "collMod" : coll, + "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : "100" } } ); +debug( res ); +assert.eq( 0 , res.ok , "TTL mod shouldn't work with non-numeric expireAfterSeconds"); + +// this time modifying should finally work +var res = db.runCommand( { "collMod" : coll, + "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : 100 } } ); +debug( res ); +assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ), + "TTL index not modified" ); + +// try to modify a faulty TTL index with a non-numeric expireAfterSeconds field +t.dropIndex( {a : 1 } ); +t.ensureIndex( {a : 1} , { "expireAfterSeconds": "50" } ) +var res = db.runCommand( { "collMod" : coll, + "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } ); +debug( res ); +assert.eq( 0, res.ok, "shouldn't be able to modify faulty index spec" ); + +// try with new index, this time set both expireAfterSeconds and the usePowerOf2Sizes flag +t.dropIndex( {a : 1 } ); +t.ensureIndex( {a : 1} , { "expireAfterSeconds": 50 } ) +var res = db.runCommand( { "collMod" : coll , + "usePowerOf2Sizes" : true, + "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } ); +debug( res ); +assert.eq( 1, res.ok, "should be able to modify both userFlags and expireAfterSeconds" ); +assert.eq( t.stats().userFlags , 1 , "userflags should be 1 now"); +assert.eq( 1, db.system.indexes.count( { key : {a:1}, expireAfterSeconds : 100 } ), + "TTL index should be 100 now" ); + diff --git a/jstests/core/compact.js b/jstests/core/compact.js new file mode 100644 index 00000000000..2121debc17e --- /dev/null +++ b/jstests/core/compact.js @@ -0,0 +1,76 @@ +// compact.js + +var mydb = db.getSiblingDB('compact'); +t = mydb.compacttest; +t.drop(); +t.insert({ x: 3 }); +t.insert({ x: 3 }); +t.insert({ x: 5 }); +t.insert({ x: 4, z: 2, k: 'aaa' }); +t.insert({ x: 4, z: 2, k: 'aaa' }); +t.insert({ x: 4, z: 2, k: 'aaa' }); +t.insert({ x: 4, z: 2, k: 'aaa' }); +t.insert({ x: 4, z: 2, k: 'aaa' }); +t.insert({ x: 4, z: 2, k: 'aaa' }); +t.ensureIndex({ x: 1 }); + +print("1"); + +var res = mydb.runCommand({ compact: 'compacttest', dev: true, force: true }); +printjson(res); +assert(res.ok); +assert(t.count() == 9); +var v = t.validate(true); +assert(v.ok); +assert(v.extentCount == 1); +assert(v.deletedCount == 1); +assert(t.getIndexes().length == 2); +var ssize = t.stats().storageSize; + +print("2"); +res = mydb.runCommand({ compact: 'compacttest', dev: true,paddingBytes:1000, force:true }); +assert(res.ok); +assert(t.count() == 9); +var v = t.validate(true); +assert(v.ok); +assert(t.stats().storageSize > ssize, "expected more storage given padding is higher. however it rounds off so if something changed this could be"); +//printjson(t.stats()); + +print("z"); + +t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); +t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); +t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); +t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); +t.insert({ x: 4, z: null, k: { f: "", b: ""} }); +t.insert({ x: 4, z: null, k: { c: ""} }); +t.insert({ x: 4, z: null, k: { h: ""} }); +t.insert({ x: 4, z: null }); +t.insert({ x: 4, z: 3}); +t.insert({ x: 4, z: 2, k: { a: "", b: ""} }); +t.insert({ x: 4, z: null, k: { c: ""} }); +t.insert({ x: 4, z: null, k: { c: ""} }); +t.insert({ x: 4, z: 3, k: { c: ""} }); + +t.ensureIndex({ z: 1, k: 1 }); +//t.ensureIndex({ z: 1, k: 1 }, { unique: true }); +//t.ensureIndex({ z: 1, k: 1 }, { dropDups: true, unique:true }); + +res = mydb.runCommand({ compact: 'compacttest', dev: true, paddingFactor: 1.2, force:true }); +printjson(res); +assert(res.ok); +assert(t.count() > 13); +var v = t.validate(true); +assert(v.ok); + +print("3"); + +// works on an empty collection? +t.remove({}); +assert(mydb.runCommand({ compact: 'compacttest', dev: true, force:true }).ok); +assert(t.count() == 0); +v = t.validate(true); +assert(v.ok); +assert(v.extentCount == 1); +assert(t.getIndexes().length == 3); + diff --git a/jstests/core/compact2.js b/jstests/core/compact2.js new file mode 100644 index 00000000000..0a7c343a3f9 --- /dev/null +++ b/jstests/core/compact2.js @@ -0,0 +1,52 @@ +// Compaction of a v0 index converts it to a v1 index using a v1 index comparator during external +// sort. SERVER-6499 + +t = db.jstests_compact2; +t.drop(); + +/** + * Assert that the index is of the expected version and its keys are ordered consistently with this + * version, and that the unique and background fields are set correctly. + */ +function assertIndex( expectedVersion, unique, background ) { + indexSpec = db.system.indexes.findOne( { ns:t.toString(), key:{ date:1 } } ); + // The index version is as expected. + assert.eq( expectedVersion, indexSpec.v ); + // The index uniqueness is as expected (treat missing and false unique specs as equivalent). + assert.eq( !unique, !indexSpec.unique ); + // Background is as expected. + assert.eq( !background, !indexSpec.background ); + // Check that 'date' key ordering is consistent with the index version. + dates = t.find().hint( { date:1 } ).toArray().map( function( x ) { return x.date; } ); + if ( expectedVersion == 0 ) { + // Under v0 index comparison, new Date( -1 ) > new Date( 1 ). + assert.eq( [ new Date( 1 ), new Date( -1 ) ], dates ); + } + else { + // Under v1 index comparsion, new Date( -1 ) < new Date( 1 ). + assert.eq( [ new Date( -1 ), new Date( 1 ) ], dates ); + } +} + +/** Compact a collection and check the resulting indexes. */ +function checkCompact( originalVersion, unique, background ) { + t.drop(); + t.save( { date:new Date( 1 ) } ); + t.save( { date:new Date( -1 ) } ); + t.ensureIndex( { date:1 }, { unique:unique, v:originalVersion, background:background } ); + assertIndex( originalVersion, unique, background ); + + // Under SERVER-6499, compact fails when a v0 index is converted to a v1 index and key + // comparisons are inconsistent, as with the date values in this test. + assert.commandWorked( t.runCommand( "compact" ) ); + assert( !db.getLastError() ); + + // Compact built an index with the default index version (v1). Uniqueness is maintained, but + // background always becomes false. + assertIndex( 1, unique, false ); +} + +checkCompact( 0, true, true ); +checkCompact( 0, false, false ); +checkCompact( 1, true, false ); +checkCompact( 1, false, true ); diff --git a/jstests/core/compactPreservePadding.js b/jstests/core/compactPreservePadding.js new file mode 100644 index 00000000000..4748afb9a82 --- /dev/null +++ b/jstests/core/compactPreservePadding.js @@ -0,0 +1,26 @@ +// test preservePadding + +var mydb = db.getSiblingDB('compactPreservePadding'); +var collName = "compactPreservePadding"; +var t = mydb.getCollection(collName); +t.drop(); + +// use larger keyname to avoid hitting an edge case with extents +for (i = 0; i < 10000; i++) { + t.insert({useLargerKeyName:i}); +} + +// remove half the entries +t.remove({useLargerKeyName:{$mod:[2,0]}}) +printjson(t.stats()); +originalSize = t.stats().size; +originalStorage = t.stats().storageSize; + +// compact! +mydb.runCommand({compact: collName, preservePadding: true}); +printjson(t.stats()); + +// object sizes ('size') should be the same (unless we hit an edge case involving extents, which +// this test doesn't) and storage size should shrink +assert(originalSize == t.stats().size); +assert(originalStorage > t.stats().storageSize); diff --git a/jstests/core/connection_status.js b/jstests/core/connection_status.js new file mode 100644 index 00000000000..08d05cbf28d --- /dev/null +++ b/jstests/core/connection_status.js @@ -0,0 +1,27 @@ +// Tests the connectionStatus command + +var dbName = 'connection_status'; +var myDB = db.getSiblingDB(dbName); +myDB.dropAllUsers(); + +function test(userName) { + myDB.createUser({user: userName, pwd: "weak password", roles: jsTest.basicUserRoles}); + myDB.auth(userName, "weak password"); + + var output = myDB.runCommand("connectionStatus"); + assert.commandWorked(output); + var users = output.authInfo.authenticatedUsers; + + var matches = 0; + for (var i=0; i < users.length; i++) { + if (users[i].db != dbName) + continue; + + assert.eq(users[i].user, userName); + matches++; + } + assert.eq(matches, 1); +} + +test("someone"); +test("someone else"); // replaces someone diff --git a/jstests/core/connection_string_validation.js b/jstests/core/connection_string_validation.js new file mode 100644 index 00000000000..4ecd1f926ee --- /dev/null +++ b/jstests/core/connection_string_validation.js @@ -0,0 +1,106 @@ +// Test validation of connection strings passed to the JavaScript "connect()" function. +// Related to SERVER-8030. + +port = "27017" + +if ( db.getMongo().host.indexOf( ":" ) >= 0 ) { + var idx = db.getMongo().host.indexOf( ":" ); + port = db.getMongo().host.substring( idx + 1 ); +} + +var goodStrings = [ + "localhost:" + port + "/test", + "127.0.0.1:" + port + "/test" + ]; + +var badStrings = [ + { s: undefined, r: /^Missing connection string$/ }, + { s: 7, r: /^Incorrect type/ }, + { s: null, r: /^Incorrect type/ }, + { s: "", r: /^Empty connection string$/ }, + { s: " ", r: /^Empty connection string$/ }, + { s: ":", r: /^Missing host name/ }, + { s: "/", r: /^Missing host name/ }, + { s: ":/", r: /^Missing host name/ }, + { s: ":/test", r: /^Missing host name/ }, + { s: ":" + port + "/", r: /^Missing host name/ }, + { s: ":" + port + "/test", r: /^Missing host name/ }, + { s: "/test", r: /^Missing host name/ }, + { s: "localhost:/test", r: /^Missing port number/ }, + { s: "127.0.0.1:/test", r: /^Missing port number/ }, + { s: "127.0.0.1:cat/test", r: /^Invalid port number/ }, + { s: "127.0.0.1:1cat/test", r: /^Invalid port number/ }, + { s: "127.0.0.1:123456/test", r: /^Invalid port number/ }, + { s: "127.0.0.1:65536/test", r: /^Invalid port number/ }, + { s: "::1:65536/test", r: /^Invalid port number/ }, + { s: "127.0.0.1:" + port + "/", r: /^Missing database name/ }, + { s: "::1:" + port + "/", r: /^Missing database name/ } + ]; + +function testGood(i, connectionString) { + print("\nTesting good connection string " + i + " (\"" + connectionString + "\") ..."); + var gotException = false; + var exception; + try { + var connectDB = connect(connectionString); + connectDB = null; + } + catch (e) { + gotException = true; + exception = e; + } + if (!gotException) { + print("Good connection string " + i + + " (\"" + connectionString + "\") correctly validated"); + return; + } + var message = "FAILED to correctly validate goodString " + i + + " (\"" + connectionString + "\"): exception was \"" + tojson(exception) + "\""; + doassert(message); +} + +function testBad(i, connectionString, errorRegex) { + print("\nTesting bad connection string " + i + " (\"" + connectionString + "\") ..."); + var gotException = false; + var gotCorrectErrorText = false; + var exception; + try { + var connectDB = connect(connectionString); + connectDB = null; + } + catch (e) { + gotException = true; + exception = e; + if (errorRegex.test(e.message)) { + gotCorrectErrorText = true; + } + } + if (gotCorrectErrorText) { + print("Bad connection string " + i + " (\"" + connectionString + + "\") correctly rejected:\n" + tojson(exception)); + return; + } + var message = "FAILED to generate correct exception for badString " + i + + " (\"" + connectionString + "\"): "; + if (gotException) { + message += "exception was \"" + tojson(exception) + + "\", it should have matched \"" + errorRegex.toString() + "\""; + } + else { + message += "no exception was thrown"; + } + doassert(message); +} + +var i; +jsTest.log("TESTING " + goodStrings.length + " good connection strings"); +for (i = 0; i < goodStrings.length; ++i) { + testGood(i, goodStrings[i]); +} + +jsTest.log("TESTING " + badStrings.length + " bad connection strings"); +for (i = 0; i < badStrings.length; ++i) { + testBad(i, badStrings[i].s, badStrings[i].r); +} + +jsTest.log("SUCCESSFUL test completion"); diff --git a/jstests/core/constructors.js b/jstests/core/constructors.js new file mode 100644 index 00000000000..ac8ae08d7c0 --- /dev/null +++ b/jstests/core/constructors.js @@ -0,0 +1,313 @@ +// Tests to see what validity checks are done for 10gen specific object construction + +// Takes a list of constructors and returns a new list with an extra entry for each constructor with +// "new" prepended +function addConstructorsWithNew (constructorList) { + function prependNew (constructor) { + return "new " + constructor; + } + + var valid = constructorList.valid; + var invalid = constructorList.invalid; + // We use slice(0) here to make a copy of our lists + var validWithNew = valid.concat(valid.slice(0).map(prependNew)); + var invalidWithNew = invalid.concat(invalid.slice(0).map(prependNew)); + return { "valid" : validWithNew, "invalid" : invalidWithNew }; +} + +function clientEvalConstructorTest (constructorList) { + constructorList = addConstructorsWithNew(constructorList); + constructorList.valid.forEach(function (constructor) { + try { + eval(constructor); + } + catch (e) { + throw ("valid constructor: " + constructor + " failed in eval context: " + e); + } + }); + constructorList.invalid.forEach(function (constructor) { + assert.throws(function () { eval(constructor) }, + [], "invalid constructor did not throw error in eval context: " + constructor); + }); +} + +function dbEvalConstructorTest (constructorList) { + constructorList = addConstructorsWithNew(constructorList); + constructorList.valid.forEach(function (constructor) { + try { + db.eval(constructor); + } + catch (e) { + throw ("valid constructor: " + constructor + " failed in db.eval context: " + e); + } + }); + constructorList.invalid.forEach(function (constructor) { + assert.throws(function () { db.eval(constructor) }, + [], "invalid constructor did not throw error in db.eval context: " + constructor); + }); +} + +function mapReduceConstructorTest (constructorList) { + constructorList = addConstructorsWithNew(constructorList); + t = db.mr_constructors; + t.drop(); + + t.save( { "partner" : 1, "visits" : 9 } ) + t.save( { "partner" : 2, "visits" : 9 } ) + t.save( { "partner" : 1, "visits" : 11 } ) + t.save( { "partner" : 1, "visits" : 30 } ) + t.save( { "partner" : 2, "visits" : 41 } ) + t.save( { "partner" : 2, "visits" : 41 } ) + + constructorList.valid.forEach(function (constructor) { + try { + m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }"); + + r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }"); + + res = t.mapReduce( m , r , { out : "mr_constructors_out" , scope : { xx : 1 } } ); + } + catch (e) { + throw ("valid constructor: " + constructor + " failed in mapReduce context: " + e); + } + }); + constructorList.invalid.forEach(function (constructor) { + m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }"); + + r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }"); + + assert.throws(function () { res = t.mapReduce( m , r , + { out : "mr_constructors_out" , scope : { xx : 1 } } ) }, + [], "invalid constructor did not throw error in mapReduce context: " + constructor); + }); + + db.mr_constructors_out.drop(); + t.drop(); +} + +function whereConstructorTest (constructorList) { + constructorList = addConstructorsWithNew(constructorList); + t = db.where_constructors; + t.drop(); + assert.writeOK( t.insert({ x : 1 })); + + constructorList.valid.forEach(function (constructor) { + try { + t.findOne({ $where : constructor }); + } + catch (e) { + throw ("valid constructor: " + constructor + " failed in $where query: " + e); + } + }); + constructorList.invalid.forEach(function (constructor) { + assert.throws(function () { t.findOne({ $where : constructor }) }, + [], "invalid constructor did not throw error in $where query: " + constructor); + }); +} + +var dbrefConstructors = { + "valid" : [ + "DBRef(\"namespace\", 0)", + "DBRef(\"namespace\", \"test\")", + "DBRef(\"namespace\", ObjectId())", + "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"))", + ], + "invalid" : [ + "DBRef()", + "DBRef(true, ObjectId())", + "DBRef(\"namespace\")", + "DBRef(\"namespace\", ObjectId(), true)", + ] +} + +var dbpointerConstructors = { + "valid" : [ + "DBPointer(\"namespace\", ObjectId())", + "DBPointer(\"namespace\", ObjectId(\"000000000000000000000000\"))", + ], + "invalid" : [ + "DBPointer()", + "DBPointer(true, ObjectId())", + "DBPointer(\"namespace\", 0)", + "DBPointer(\"namespace\", \"test\")", + "DBPointer(\"namespace\")", + "DBPointer(\"namespace\", ObjectId(), true)", + ] +} + + +var objectidConstructors = { + "valid" : [ + 'ObjectId()', + 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFF")', + ], + "invalid" : [ + 'ObjectId(5)', + 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFQ")', + ] +} + +var timestampConstructors = { + "valid" : [ + 'Timestamp()', + 'Timestamp(0,0)', + 'Timestamp(1.0,1.0)', + ], + "invalid" : [ + 'Timestamp(0)', + 'Timestamp(0,0,0)', + 'Timestamp("test","test")', + 'Timestamp("test",0)', + 'Timestamp(0,"test")', + 'Timestamp(true,true)', + 'Timestamp(true,0)', + 'Timestamp(0,true)', + ] +} + +var bindataConstructors = { + "valid" : [ + 'BinData(0,"test")', + ], + "invalid" : [ + 'BinData(0,"test", "test")', + 'BinData()', + 'BinData(-1, "")', + 'BinData(256, "")', + 'BinData("string","aaaa")', + // SERVER-10152 + //'BinData(0, true)', + //'BinData(0, null)', + //'BinData(0, undefined)', + //'BinData(0, {})', + //'BinData(0, [])', + //'BinData(0, function () {})', + ] +} + +var uuidConstructors = { + "valid" : [ + 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', + ], + "invalid" : [ + 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)', + 'UUID()', + 'UUID("aa")', + 'UUID("invalidhex")', + // SERVER-9686 + //'UUID("invalidhexbutstilltherequiredlen")', + 'UUID(true)', + 'UUID(null)', + 'UUID(undefined)', + 'UUID({})', + 'UUID([])', + 'UUID(function () {})', + ] +} + +var md5Constructors = { + "valid" : [ + 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', + ], + "invalid" : [ + 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)', + 'MD5()', + 'MD5("aa")', + 'MD5("invalidhex")', + // SERVER-9686 + //'MD5("invalidhexbutstilltherequiredlen")', + 'MD5(true)', + 'MD5(null)', + 'MD5(undefined)', + 'MD5({})', + 'MD5([])', + 'MD5(function () {})', + ] +} + +var hexdataConstructors = { + "valid" : [ + 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', + // Numbers as the payload are converted to strings, so HexData(0, 100) == HexData(0, "100") + 'HexData(0, 100)', + 'HexData(0, "")', + 'HexData(0, "aaa")', + 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', + 'HexData(0, "000000000000000000000005")', // SERVER-9605 + ], + "invalid" : [ + 'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)', + 'HexData()', + 'HexData(0)', + 'HexData(-1, "")', + 'HexData(256, "")', + 'HexData("string","aaaa")', + // SERVER-10152 + //'HexData(0, true)', + //'HexData(0, null)', + //'HexData(0, undefined)', + //'HexData(0, {})', + //'HexData(0, [])', + //'HexData(0, function () {})', + // SERVER-9686 + //'HexData(0, "invalidhex")', + ] +} + +var dateConstructors = { + "valid" : [ + 'Date()', + 'Date(0)', + 'Date(0,0)', + 'Date(0,0,0)', + 'Date("foo")', + ], + "invalid" : [ + ] +} + +clientEvalConstructorTest(dbrefConstructors); +clientEvalConstructorTest(dbpointerConstructors); +clientEvalConstructorTest(objectidConstructors); +clientEvalConstructorTest(timestampConstructors); +clientEvalConstructorTest(bindataConstructors); +clientEvalConstructorTest(uuidConstructors); +clientEvalConstructorTest(md5Constructors); +clientEvalConstructorTest(hexdataConstructors); +clientEvalConstructorTest(dateConstructors); + +dbEvalConstructorTest(dbrefConstructors); +dbEvalConstructorTest(dbpointerConstructors); +dbEvalConstructorTest(objectidConstructors); +dbEvalConstructorTest(timestampConstructors); +dbEvalConstructorTest(bindataConstructors); +dbEvalConstructorTest(uuidConstructors); +dbEvalConstructorTest(md5Constructors); +dbEvalConstructorTest(hexdataConstructors); +dbEvalConstructorTest(dateConstructors); + +// SERVER-8963 +if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") { + mapReduceConstructorTest(dbrefConstructors); + mapReduceConstructorTest(dbpointerConstructors); + mapReduceConstructorTest(objectidConstructors); + mapReduceConstructorTest(timestampConstructors); + mapReduceConstructorTest(bindataConstructors); + mapReduceConstructorTest(uuidConstructors); + mapReduceConstructorTest(md5Constructors); + mapReduceConstructorTest(hexdataConstructors); +} +mapReduceConstructorTest(dateConstructors); + +// SERVER-8963 +if (db.runCommand({buildinfo:1}).javascriptEngine == "V8") { + whereConstructorTest(dbrefConstructors); + whereConstructorTest(dbpointerConstructors); + whereConstructorTest(objectidConstructors); + whereConstructorTest(timestampConstructors); + whereConstructorTest(bindataConstructors); + whereConstructorTest(uuidConstructors); + whereConstructorTest(md5Constructors); + whereConstructorTest(hexdataConstructors); +} +whereConstructorTest(dateConstructors); diff --git a/jstests/core/copydb.js b/jstests/core/copydb.js new file mode 100644 index 00000000000..7c7c02542a4 --- /dev/null +++ b/jstests/core/copydb.js @@ -0,0 +1,20 @@ + + + + +a = db.getSisterDB( "copydb-test-a" ); +b = db.getSisterDB( "copydb-test-b" ); + +a.dropDatabase(); +b.dropDatabase(); + +a.foo.save( { a : 1 } ); + +assert.eq( 1 , a.foo.count() , "A" ); +assert.eq( 0 , b.foo.count() , "B" ); + +a.copyDatabase( a._name , b._name ); + +assert.eq( 1 , a.foo.count() , "C" ); +assert.eq( 1 , b.foo.count() , "D" ); + diff --git a/jstests/core/count.js b/jstests/core/count.js new file mode 100644 index 00000000000..5502d7176c1 --- /dev/null +++ b/jstests/core/count.js @@ -0,0 +1,25 @@ +t = db.jstests_count; + +t.drop(); +t.save( { i: 1 } ); +t.save( { i: 2 } ); +assert.eq( 1, t.find( { i: 1 } ).count(), "A" ); +assert.eq( 1, t.count( { i: 1 } ) , "B" ); +assert.eq( 2, t.find().count() , "C" ); +assert.eq( 2, t.find( undefined ).count() , "D" ); +assert.eq( 2, t.find( null ).count() , "E" ); +assert.eq( 2, t.count() , "F" ); + +t.drop(); +t.save( {a:true,b:false} ); +t.ensureIndex( {b:1,a:1} ); +assert.eq( 1, t.find( {a:true,b:false} ).count() , "G" ); +assert.eq( 1, t.find( {b:false,a:true} ).count() , "H" ); + +t.drop(); +t.save( {a:true,b:false} ); +t.ensureIndex( {b:1,a:1,c:1} ); + +assert.eq( 1, t.find( {a:true,b:false} ).count() , "I" ); +assert.eq( 1, t.find( {b:false,a:true} ).count() , "J" ); + diff --git a/jstests/core/count10.js b/jstests/core/count10.js new file mode 100644 index 00000000000..21243b3151d --- /dev/null +++ b/jstests/core/count10.js @@ -0,0 +1,59 @@ +// Test that interrupting a count returns an error code. + +t = db.count10; +t.drop(); + +for ( i=0; i<100; i++ ){ + t.save( { x : i } ); +} + +// Start a parallel shell which repeatedly checks for a count +// query using db.currentOp(). As soon as the op is found, +// kill it via db.killOp(). +s = startParallelShell( + 'assert.soon(function() {' + + ' current = db.currentOp({"ns": db.count10.getFullName(), ' + + ' "query.count": db.count10.getName()}); ' + + + // Check that we found the count op. If not, return false so + // that assert.soon will retry. + ' assert("inprog" in current); ' + + ' if (current.inprog.length === 0) { ' + + ' jsTest.log("count10.js: did not find count op, retrying"); ' + + ' printjson(current); ' + + ' return false; ' + + ' } ' + + ' countOp = current.inprog[0]; ' + + ' if (!countOp) { ' + + ' jsTest.log("count10.js: did not find count op, retrying"); ' + + ' printjson(current); ' + + ' return false; ' + + ' } ' + + + // Found the count op. Try to kill it. + ' jsTest.log("count10.js: found count op:"); ' + + ' printjson(current); ' + + ' printjson(db.killOp(countOp.opid)); ' + + ' return true; ' + + '}, "count10.js: could not find count op after retrying, gave up");' +); + +function getKilledCount() { + try { + db.count10.find("sleep(1000)").count(); + jsTest.log("count10.js: count op completed without being killed"); + } catch (e) { + return e; + } +} + +var res = getKilledCount(); +jsTest.log("count10.js: killed count output start"); +printjson(res); +jsTest.log("count10.js: killed count output end"); +assert(res); +assert(res.match(/count failed/) !== null); +assert(res.match(/\"code\"/) !== null); + +s(); + diff --git a/jstests/core/count2.js b/jstests/core/count2.js new file mode 100644 index 00000000000..4d060aaac20 --- /dev/null +++ b/jstests/core/count2.js @@ -0,0 +1,28 @@ +t = db.count2; +t.drop(); + +for ( var i=0; i<1000; i++ ){ + t.save( { num : i , m : i % 20 } ); +} + +assert.eq( 1000 , t.count() , "A" ) +assert.eq( 1000 , t.find().count() , "B" ) +assert.eq( 1000 , t.find().toArray().length , "C" ) + +assert.eq( 50 , t.find( { m : 5 } ).toArray().length , "D" ) +assert.eq( 50 , t.find( { m : 5 } ).count() , "E" ) + +assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).toArray().length , "F" ) +assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).count() , "G" ) +assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).countReturn() , "H" ) + +assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).toArray().length , "I" ) +assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).limit(20).count() , "J" ) +assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).countReturn() , "K" ) + +assert.eq( 5 , t.find( { m : 5 } ).skip( 45 ).limit(20).countReturn() , "L" ) + +// Negative skip values should return error +var negSkipResult = db.runCommand({ count: 't', skip : -2 }); +assert( ! negSkipResult.ok , "negative skip value shouldn't work, n = " + negSkipResult.n ); +assert( negSkipResult.errmsg.length > 0 , "no error msg for negative skip" ); diff --git a/jstests/core/count3.js b/jstests/core/count3.js new file mode 100644 index 00000000000..a8c3ef5faad --- /dev/null +++ b/jstests/core/count3.js @@ -0,0 +1,26 @@ + +t = db.count3; + +t.drop(); + +t.save( { a : 1 } ); +t.save( { a : 1 , b : 2 } ); + +assert.eq( 2 , t.find( { a : 1 } ).itcount() , "A" ); +assert.eq( 2 , t.find( { a : 1 } ).count() , "B" ); + +assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).itcount() , "C" ); +assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).count() , "D" ); + +t.drop(); + +t.save( { a : 1 } ); + +assert.eq( 1 , t.find( { a : 1 } ).itcount() , "E" ); +assert.eq( 1 , t.find( { a : 1 } ).count() , "F" ); + +assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).itcount() , "G" ); +assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).count() , "H" ); + + + diff --git a/jstests/core/count4.js b/jstests/core/count4.js new file mode 100644 index 00000000000..7be74362603 --- /dev/null +++ b/jstests/core/count4.js @@ -0,0 +1,17 @@ + +t = db.count4; +t.drop(); + +for ( i=0; i<100; i++ ){ + t.save( { x : i } ); +} + +q = { x : { $gt : 25 , $lte : 75 } } + +assert.eq( 50 , t.find( q ).count() , "A" ); +assert.eq( 50 , t.find( q ).itcount() , "B" ); + +t.ensureIndex( { x : 1 } ); + +assert.eq( 50 , t.find( q ).count() , "C" ); +assert.eq( 50 , t.find( q ).itcount() , "D" ); diff --git a/jstests/core/count5.js b/jstests/core/count5.js new file mode 100644 index 00000000000..b6bbc543352 --- /dev/null +++ b/jstests/core/count5.js @@ -0,0 +1,30 @@ + +t = db.count5; +t.drop(); + +for ( i=0; i<100; i++ ){ + t.save( { x : i } ); +} + +q = { x : { $gt : 25 , $lte : 75 } }; + +assert.eq( 50 , t.find( q ).count() , "A" ); +assert.eq( 50 , t.find( q ).itcount() , "B" ); + +t.ensureIndex( { x : 1 } ); + +assert.eq( 50 , t.find( q ).count() , "C" ); +assert.eq( 50 , t.find( q ).itcount() , "D" ); + +assert.eq( 50 , t.find( q ).limit(1).count() , "E" ); +assert.eq( 1 , t.find( q ).limit(1).itcount() , "F" ); + +assert.eq( 5 , t.find( q ).limit(5).size() , "G" ); +assert.eq( 5 , t.find( q ).skip(5).limit(5).size() , "H" ); +assert.eq( 2 , t.find( q ).skip(48).limit(5).size() , "I" ); + +assert.eq( 20 , t.find().limit(20).size() , "J" ); + +assert.eq( 0 , t.find().skip(120).size() , "K" ); +assert.eq( 1 , db.$cmd.findOne( { count: "count5" } )["ok"] , "L" ); +assert.eq( 1 , db.$cmd.findOne( { count: "count5", skip: 120 } )["ok"] , "M" ); diff --git a/jstests/core/count6.js b/jstests/core/count6.js new file mode 100644 index 00000000000..44c5fa33bc7 --- /dev/null +++ b/jstests/core/count6.js @@ -0,0 +1,61 @@ +// Some correctness checks for fast and normal count modes, including with skip and limit. + +t = db.jstests_count6; + +function checkCountForObject( obj ) { + t.drop(); + t.ensureIndex( {b:1,a:1} ); + + function checkCounts( query, expected ) { + assert.eq( expected, t.count( query ) , "A1" ); + assert.eq( expected, t.find( query ).skip( 0 ).limit( 0 ).count( true ) , "A2" ); + // Check proper counts with various skip and limit specs. + for( var skip = 1; skip <= 2; ++skip ) { + for( var limit = 1; limit <= 2; ++limit ) { + assert.eq( Math.max( expected - skip, 0 ), t.find( query ).skip( skip ).count( true ) , "B1" ); + assert.eq( Math.min( expected, limit ), t.find( query ).limit( limit ).count( true ) , "B2" ); + assert.eq( Math.min( Math.max( expected - skip, 0 ), limit ), t.find( query ).skip( skip ).limit( limit ).count( true ) , "B4" ); + + // Check limit(x) = limit(-x) + assert.eq( t.find( query ).limit( limit ).count( true ), + t.find( query ).limit( -limit ).count( true ) , "C1" ); + assert.eq( t.find( query ).skip( skip ).limit( limit ).count( true ), + t.find( query ).skip( skip ).limit( -limit ).count( true ) , "C2" ); + } + } + + // Check limit(0) has no effect + assert.eq( expected, t.find( query ).limit( 0 ).count( true ) , "D1" ); + assert.eq( Math.max( expected - skip, 0 ), + t.find( query ).skip( skip ).limit( 0 ).count( true ) , "D2" ); + assert.eq( expected, t.getDB().runCommand({ count: t.getName(), + query: query, limit: 0 }).n , "D3" ); + assert.eq( Math.max( expected - skip, 0 ), + t.getDB().runCommand({ count: t.getName(), + query: query, limit: 0, skip: skip }).n , "D4" ); + } + + for( var i = 0; i < 5; ++i ) { + checkCounts( {a:obj.a,b:obj.b}, i ); + checkCounts( {b:obj.b,a:obj.a}, i ); + t.insert( obj ); + } + + t.insert( {a:true,b:true} ); + t.insert( {a:true,b:1} ); + t.insert( {a:false,b:1} ); + t.insert( {a:false,b:true} ); + t.insert( {a:false,b:false} ); + + checkCounts( {a:obj.a,b:obj.b}, i ); + checkCounts( {b:obj.b,a:obj.a}, i ); + + // Check with no query + checkCounts( {}, 10 ); +} + +// Check fast count mode. +checkCountForObject( {a:true,b:false} ); + +// Check normal count mode. +checkCountForObject( {a:1,b:0} ); diff --git a/jstests/core/count7.js b/jstests/core/count7.js new file mode 100644 index 00000000000..c2c1260d49b --- /dev/null +++ b/jstests/core/count7.js @@ -0,0 +1,25 @@ +// Check normal count matching and deduping. + +t = db.jstests_count7; +t.drop(); + +t.ensureIndex( {a:1} ); +t.save( {a:'algebra'} ); +t.save( {a:'apple'} ); +t.save( {a:'azores'} ); +t.save( {a:'bumper'} ); +t.save( {a:'supper'} ); +t.save( {a:'termite'} ); +t.save( {a:'zeppelin'} ); +t.save( {a:'ziggurat'} ); +t.save( {a:'zope'} ); + +assert.eq( 5, t.count( {a:/p/} ) ); + +t.remove({}); + +t.save( {a:[1,2,3]} ); +t.save( {a:[1,2,3]} ); +t.save( {a:[1]} ); + +assert.eq( 2, t.count( {a:{$gt:1}} ) ); diff --git a/jstests/core/count9.js b/jstests/core/count9.js new file mode 100644 index 00000000000..888ffe3b544 --- /dev/null +++ b/jstests/core/count9.js @@ -0,0 +1,28 @@ +// Test fast mode count with multikey entries. + +t = db.jstests_count9; +t.drop(); + +t.ensureIndex( {a:1} ); + +t.save( {a:['a','b','a']} ); +assert.eq( 1, t.count( {a:'a'} ) ); + +t.save( {a:['a','b','a']} ); +assert.eq( 2, t.count( {a:'a'} ) ); + +t.drop(); +t.ensureIndex( {a:1,b:1} ); + +t.save( {a:['a','b','a'],b:'r'} ); +assert.eq( 1, t.count( {a:'a',b:'r'} ) ); +assert.eq( 1, t.count( {a:'a'} ) ); + +t.save( {a:['a','b','a'],b:'r'} ); +assert.eq( 2, t.count( {a:'a',b:'r'} ) ); +assert.eq( 2, t.count( {a:'a'} ) ); + +t.drop(); +t.ensureIndex( {'a.b':1,'a.c':1} ); +t.save( {a:[{b:'b',c:'c'},{b:'b',c:'c'}]} ); +assert.eq( 1, t.count( {'a.b':'b','a.c':'c'} ) ); diff --git a/jstests/core/count_hint.js b/jstests/core/count_hint.js new file mode 100644 index 00000000000..93322d282db --- /dev/null +++ b/jstests/core/count_hint.js @@ -0,0 +1,20 @@ +// test passing hint to the count cmd +// hints are ignored if there is no query predicate +t = db.jstests_count_hint; +t.drop(); + +t.save( { i: 1 } ); +t.save( { i: 2 } ); +assert.eq( 2, t.find().count() ); + +t.ensureIndex( { i:1 } ); + +assert.eq( 1, t.find( { i: 1 } ).hint( "_id_" ).count(), "A" ); +assert.eq( 2, t.find().hint( "_id_" ).count(), "B" ); +assert.throws( function() { t.find( { i: 1 } ).hint( "BAD HINT" ).count(); } ); + +// create a sparse index which should have no entries +t.ensureIndex( { x:1 }, { sparse:true } ); + +assert.eq( 0, t.find( { i: 1 } ).hint( "x_1" ).count(), "C" ); +assert.eq( 2, t.find().hint( "x_1" ).count(), "D" ); diff --git a/jstests/core/counta.js b/jstests/core/counta.js new file mode 100644 index 00000000000..f0834d455dd --- /dev/null +++ b/jstests/core/counta.js @@ -0,0 +1,14 @@ +// Check that count returns 0 in some exception cases. + +t = db.jstests_counta; +t.drop(); + +for( i = 0; i < 10; ++i ) { + t.save( {a:i} ); +} + +// f() is undefined, causing an assertion +assert.throws( + function(){ + t.count( { $where:function() { if ( this.a < 5 ) { return true; } else { f(); } } } ); + } ); diff --git a/jstests/core/countb.js b/jstests/core/countb.js new file mode 100644 index 00000000000..8f7131a5a6c --- /dev/null +++ b/jstests/core/countb.js @@ -0,0 +1,11 @@ +// Test fast count mode with single key index unsatisfiable constraints on a multi key index. + +t = db.jstests_countb; +t.drop(); + +t.ensureIndex( {a:1} ); +t.save( {a:['a','b']} ); +assert.eq( 0, t.find( {a:{$in:['a'],$gt:'b'}} ).count() ); +assert.eq( 0, t.find( {$and:[{a:'a'},{a:{$gt:'b'}}]} ).count() ); +assert.eq( 1, t.find( {$and:[{a:'a'},{$where:"this.a[1]=='b'"}]} ).count() ); +assert.eq( 0, t.find( {$and:[{a:'a'},{$where:"this.a[1]!='b'"}]} ).count() ); diff --git a/jstests/core/countc.js b/jstests/core/countc.js new file mode 100644 index 00000000000..260dbb1f264 --- /dev/null +++ b/jstests/core/countc.js @@ -0,0 +1,124 @@ +// In fast count mode the Matcher is bypassed when matching can be performed by a BtreeCursor and +// its delegate FieldRangeVector or an IntervalBtreeCursor. The tests below check that fast count +// mode is implemented appropriately in specific cases. +// +// SERVER-1752 + +t = db.jstests_countc; +t.drop(); + + +// Match a subset of inserted values within a $in operator. +t.drop(); +t.ensureIndex( { a:1 } ); +// Save 'a' values 0, 0.5, 1.5, 2.5 ... 97.5, 98.5, 99. +t.save( { a:0 } ); +t.save( { a:99 } ); +for( i = 0; i < 99; ++i ) { + t.save( { a:( i + 0.5 ) } ); +} +// Query 'a' values $in 0, 1, 2, ..., 99. +vals = []; +for( i = 0; i < 100; ++i ) { + vals.push( i ); +} +// Only values 0 and 99 of the $in set are present in the collection, so the expected count is 2. +assert.eq( 2, t.count( { a:{ $in:vals } } ) ); + + +// Match 'a' values within upper and lower limits. +t.drop(); +t.ensureIndex( { a:1 } ); +t.save( { a:[ 1, 2 ] } ); // Will match because 'a' is in range. +t.save( { a:9 } ); // Will not match because 'a' is not in range. +// Only one document matches. +assert.eq( 1, t.count( { a:{ $gt:0, $lt:5 } } ) ); + + +// Match two nested fields within an array. +t.drop(); +t.ensureIndex( { 'a.b':1, 'a.c':1 } ); +t.save( { a:[ { b:2, c:3 }, {} ] } ); +// The document does not match because its c value is 3. +assert.eq( 0, t.count( { 'a.b':2, 'a.c':2 } ) ); + + +// $gt:string only matches strings. +t.drop(); +t.ensureIndex( { a:1 } ); +t.save( { a:'a' } ); // Will match. +t.save( { a:{} } ); // Will not match because {} is not a string. +// Only one document matches. +assert.eq( 1, t.count( { a:{ $gte:'' } } ) ); + + +// $lte:date only matches dates. +t.drop(); +t.ensureIndex( { a:1 } ); +t.save( { a:new Date( 1 ) } ); // Will match. +t.save( { a:true } ); // Will not match because 'true' is not a date. +// Only one document matches. +assert.eq( 1, t.count( { a:{ $lte:new Date( 1 ) } } ) ); + + +// Querying for 'undefined' triggers an error. +t.drop(); +t.ensureIndex( { a:1 } ); +assert.throws( function() { t.count( { a:undefined } ); } ); + + +// Count using a descending order index. +t.drop(); +t.ensureIndex( { a:-1 } ); +t.save( { a:1 } ); +t.save( { a:2 } ); +t.save( { a:3 } ); +assert.eq( 1, t.count( { a:{ $gt:2 } } ) ); +assert.eq( 1, t.count( { a:{ $lt:2 } } ) ); +assert.eq( 2, t.count( { a:{ $lte:2 } } ) ); +assert.eq( 2, t.count( { a:{ $lt:3 } } ) ); + + +// Count using a compound index. +t.drop(); +t.ensureIndex( { a:1, b:1 } ); +t.save( { a:1, b:2 } ); +t.save( { a:2, b:1 } ); +t.save( { a:2, b:3 } ); +t.save( { a:3, b:4 } ); +assert.eq( 1, t.count( { a:1 })); +assert.eq( 2, t.count( { a:2 })); +assert.eq( 1, t.count( { a:{ $gt:2 } } ) ); +assert.eq( 1, t.count( { a:{ $lt:2 } } ) ); +assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) ); +assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) ); +assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) ); + + +// Count using a compound descending order index. +t.drop(); +t.ensureIndex( { a:1, b:-1 } ); +t.save( { a:1, b:2 } ); +t.save( { a:2, b:1 } ); +t.save( { a:2, b:3 } ); +t.save( { a:3, b:4 } ); +assert.eq( 1, t.count( { a:{ $gt:2 } } ) ); +assert.eq( 1, t.count( { a:{ $lt:2 } } ) ); +assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) ); +assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) ); +assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) ); + + +// Count with a multikey value. +t.drop(); +t.ensureIndex( { a:1 } ); +t.save( { a:[ 1, 2 ] } ); +assert.eq( 1, t.count( { a:{ $gt:0, $lte:2 } } ) ); + + +// Count with a match constraint on an unindexed field. +t.drop(); +t.ensureIndex( { a:1 } ); +t.save( { a:1, b:1 } ); +t.save( { a:1, b:2 } ); +assert.eq( 1, t.count( { a:1, $where:'this.b == 1' } ) ); diff --git a/jstests/core/coveredIndex1.js b/jstests/core/coveredIndex1.js new file mode 100644 index 00000000000..ce11f89ceed --- /dev/null +++ b/jstests/core/coveredIndex1.js @@ -0,0 +1,64 @@ + +t = db["jstests_coveredIndex1"]; +t.drop(); + +t.save({fn: "john", ln: "doe"}) +t.save({fn: "jack", ln: "doe"}) +t.save({fn: "john", ln: "smith"}) +t.save({fn: "jack", ln: "black"}) +t.save({fn: "bob", ln: "murray"}) +t.save({fn: "aaa", ln: "bbb", obj: {a: 1, b: "blah"}}) +assert.eq( t.findOne({ln: "doe"}).fn, "john", "Cannot find right record" ); +assert.eq( t.count(), 6, "Not right length" ); + +// use simple index +t.ensureIndex({ln: 1}); +assert.eq( t.find({ln: "doe"}).explain().indexOnly, false, "Find using covered index but all fields are returned"); +assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find using covered index but _id is returned"); +assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); + +// this time, without a query spec +// SERVER-2109 +//assert.eq( t.find({}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); +assert.eq( t.find({}, {ln: 1, _id: 0}).hint({ln: 1}).explain().indexOnly, true, "Find is not using covered index"); + +// use compound index +t.dropIndex({ln: 1}) +t.ensureIndex({ln: 1, fn: 1}); +// return 1 field +assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); +// return both fields, multiple docs returned +assert.eq( t.find({ln: "doe"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); +// match 1 record using both fields +assert.eq( t.find({ln: "doe", fn: "john"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); +// change ordering +assert.eq( t.find({fn: "john", ln: "doe"}, {fn: 1, ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); +// ask from 2nd index key +assert.eq( t.find({fn: "john"}, {fn: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key"); + +// repeat above but with _id field +t.dropIndex({ln: 1, fn: 1}) +t.ensureIndex({_id: 1, ln: 1}); +// return 1 field +assert.eq( t.find({_id: 123, ln: "doe"}, {_id: 1}).explain().indexOnly, true, "Find is not using covered index"); +// match 1 record using both fields +assert.eq( t.find({_id: 123, ln: "doe"}, {ln: 1}).explain().indexOnly, true, "Find is not using covered index"); +// change ordering +assert.eq( t.find({ln: "doe", _id: 123}, {ln: 1, _id: 1}).explain().indexOnly, true, "Find is not using covered index"); +// ask from 2nd index key +assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key"); + +// repeat above but with embedded obj +t.dropIndex({_id: 1, ln: 1}) +t.ensureIndex({obj: 1}); +assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object"); +assert.eq( t.find({obj: {a: 1, b: "blah"}}).explain().indexOnly, false, "Index doesnt have all fields to cover"); +assert.eq( t.find({obj: {a: 1, b: "blah"}}, {obj: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); + +// repeat above but with index on sub obj field +t.dropIndex({obj: 1}); +t.ensureIndex({"obj.a": 1, "obj.b": 1}) +assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object"); + +assert(t.validate().valid); + diff --git a/jstests/core/coveredIndex2.js b/jstests/core/coveredIndex2.js new file mode 100644 index 00000000000..56a23f43211 --- /dev/null +++ b/jstests/core/coveredIndex2.js @@ -0,0 +1,18 @@ +t = db["jstests_coveredIndex2"]; +t.drop(); + +t.save({a: 1}) +t.save({a: 2}) +assert.eq( t.findOne({a: 1}).a, 1, "Cannot find right record" ); +assert.eq( t.count(), 2, "Not right length" ); + +// use simple index +t.ensureIndex({a: 1}); +assert.eq( t.find({a:1}).explain().indexOnly, false, "Find using covered index but all fields are returned"); +assert.eq( t.find({a:1}, {a: 1}).explain().indexOnly, false, "Find using covered index but _id is returned"); +assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); + +// add multikey +t.save({a:[3,4]}) +assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index even after multikey insert"); + diff --git a/jstests/core/coveredIndex3.js b/jstests/core/coveredIndex3.js new file mode 100644 index 00000000000..66180342605 --- /dev/null +++ b/jstests/core/coveredIndex3.js @@ -0,0 +1,54 @@ +// Check proper covered index handling when query and processGetMore yield. +// SERVER-4975 + +if ( 0 ) { // SERVER-4975 + +t = db.jstests_coveredIndex3; +t2 = db.jstests_coveredIndex3_other; +t.drop(); +t2.drop(); + +function doTest( batchSize ) { + + // Insert an array, which will make the { a:1 } index multikey and should disable covered index + // matching. + p1 = startParallelShell( + 'for( i = 0; i < 60; ++i ) { \ + db.jstests_coveredIndex3.save( { a:[ 2000, 2001 ] } ); \ + sleep( 300 ); \ + }' + ); + + // Frequent writes cause the find operation to yield. + p2 = startParallelShell( + 'for( i = 0; i < 1800; ++i ) { \ + db.jstests_coveredIndex3_other.save( {} ); \ + sleep( 10 ); \ + }' + ); + + for( i = 0; i < 30; ++i ) { + t.drop(); + t.ensureIndex( { a:1 } ); + + for( j = 0; j < 1000; ++j ) { + t.save( { a:j } ); + } + + c = t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).batchSize( batchSize ); + while( c.hasNext() ) { + o = c.next(); + // If o contains a high numeric 'a' value, it must come from an array saved in p1. + assert( !( o.a > 1500 ), 'improper object returned ' + tojson( o ) ); + } + } + + p1(); + p2(); + +} + +doTest( 2000 ); // Test query. +doTest( 500 ); // Try to test getMore - not clear if this will actually trigger the getMore issue. + +} diff --git a/jstests/core/coveredIndex4.js b/jstests/core/coveredIndex4.js new file mode 100644 index 00000000000..136eba603cf --- /dev/null +++ b/jstests/core/coveredIndex4.js @@ -0,0 +1,40 @@ +// Test covered index projection with $or clause, specifically in getMore +// SERVER-4980 + +t = db.jstests_coveredIndex4; +t.drop(); + +t.ensureIndex( { a:1 } ); +t.ensureIndex( { b:1 } ); + +orClause = []; +for( i = 0; i < 200; ++i ) { + if ( i % 2 == 0 ) { + t.save( { a:i } ); + orClause.push( { a:i } ); + } + else { + t.save( { b:i } ); + orClause.push( { b:i } ); + } +} + +c = t.find( { $or:orClause }, { _id:0, a:1 } ); + +// No odd values of a were saved, so we should not see any in the results. +while( c.hasNext() ) { + o = c.next(); + if ( o.a ) { + assert.eq( 0, o.a % 2, 'unexpected result: ' + tojson( o ) ); + } +} + +c = t.find( { $or:orClause }, { _id:0, b:1 } ); + +// No even values of b were saved, so we should not see any in the results. +while( c.hasNext() ) { + o = c.next(); + if ( o.b ) { + assert.eq( 1, o.b % 2, 'unexpected result: ' + tojson( o ) ); + } +} diff --git a/jstests/core/coveredIndex5.js b/jstests/core/coveredIndex5.js new file mode 100644 index 00000000000..ee383cd93e2 --- /dev/null +++ b/jstests/core/coveredIndex5.js @@ -0,0 +1,70 @@ +// Test use of covered indexes when there are multiple candidate indexes. + +t = db.jstests_coveredIndex5; +t.drop(); + +t.ensureIndex( { a:1, b:1 } ); +t.ensureIndex( { a:1, c:1 } ); + +function checkFields( query, projection ) { + t.ensureIndex( { z:1 } ); // clear query patterns + t.dropIndex( { z:1 } ); + + results = t.find( query, projection ).toArray(); + + expectedFields = []; + for ( k in projection ) { + if ( k != '_id' ) { + expectedFields.push( k ); + } + } + + vals = []; + for ( i in results ) { + r = results[ i ]; + printjson(r); + assert.eq( 0, r.a ); + assert.eq( expectedFields, Object.keySet( r ) ); + for ( k in projection ) { + if ( k != '_id' && k != 'a' ) { + vals.push( r[ k ] ); + } + } + } + + if ( vals.length != 0 ) { + vals.sort(); + assert.eq( [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ], vals ); + } +} + +function checkCursorCovered( cursor, covered, count, query, projection ) { + checkFields( query, projection ); + explain = t.find( query, projection ).explain( true ); + if (covered) { + assert.eq( cursor, explain.cursor ); + } + assert.eq( covered, explain.indexOnly ); + assert.eq( count, explain.n ); +} + +for( i = 0; i < 10; ++i ) { + t.save( { a:0, b:i, c:9-i } ); +} + +checkCursorCovered( 'BtreeCursor a_1_b_1', true, 10, { a:0 }, { _id:0, a:1 } ); + +checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1 } ); +checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, b:1 } ); + +// Covered index on a,c not preferentially selected. +checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, c:1 } ); + +t.save( { a:0, c:[ 1, 2 ] } ); +t.save( { a:1 } ); +checkCursorCovered( 'BtreeCursor a_1_b_1', false, 11, { a:0, d:null }, { _id:0, a:1 } ); + +t.save( { a:0, b:[ 1, 2 ] } ); +t.save( { a:1 } ); +checkCursorCovered( 'BtreeCursor a_1_b_1', false, 12, { a:0, d:null }, { _id:0, a:1 } ); + diff --git a/jstests/core/covered_index_compound_1.js b/jstests/core/covered_index_compound_1.js new file mode 100644 index 00000000000..7e529785d12 --- /dev/null +++ b/jstests/core/covered_index_compound_1.js @@ -0,0 +1,45 @@ +// Compound index covered query tests + +var coll = db.getCollection("covered_compound_1") +coll.drop() +for (i=0;i<100;i++) { + coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)}) +} +coll.ensureIndex({a:1,b:-1,c:1}) + +// Test equality - all indexed fields queried and projected +var plan = coll.find({a:10, b:"strvar_10", c:0}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query") + +// Test query on subset of fields queried and project all +var plan = coll.find({a:26, b:"strvar_0"}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.2 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.2 - nscannedObjects should be 0 for covered query") + +// Test query on all fields queried and project subset +var plan = coll.find({a:38, b:"strvar_12", c: 8}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.3 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.3 - nscannedObjects should be 0 for covered query") + +// Test no query +var plan = coll.find({}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.4 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.4 - nscannedObjects should be 0 for covered query") + +// Test range query +var plan = coll.find({a:{$gt:25,$lt:43}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.5 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.5 - nscannedObjects should be 0 for covered query") + +// Test in query +var plan = coll.find({a:38, b:"strvar_12", c:{$in:[5,8]}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.6 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.6 - nscannedObjects should be 0 for covered query") + +// Test no result +var plan = coll.find({a:38, b:"strvar_12", c:55},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.7 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.7 - nscannedObjects should be 0 for covered query") + +print('all tests passed') diff --git a/jstests/core/covered_index_geo_1.js b/jstests/core/covered_index_geo_1.js new file mode 100644 index 00000000000..1d647dfa94c --- /dev/null +++ b/jstests/core/covered_index_geo_1.js @@ -0,0 +1,18 @@ +var coll = db.getCollection("covered_geo_1") +coll.drop() + +coll.insert({_id : 1, loc : [ 5 , 5 ], type : "type1"}) +coll.insert({_id : 2, loc : [ 6 , 6 ], type : "type2"}) +coll.insert({_id : 3, loc : [ 7 , 7 ], type : "type3"}) + +coll.ensureIndex({loc : "2d", type : 1}); + +var plan = coll.find({loc : [ 6 , 6 ]}, {loc:1, type:1, _id:0}).hint({loc:"2d", type:1}).explain(); +assert.eq(false, plan.indexOnly, "geo.1.1 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "geo.1.1 - nscannedObjects should not be 0 for a non covered query") + +var plan = coll.find({loc : [ 6 , 6 ]}, {type:1, _id:0}).hint({loc:"2d", type:1}).explain(); +assert.eq(false, plan.indexOnly, "geo.1.2 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "geo.1.2 - nscannedObjects should not be 0 for a non covered query") + +print("all tests passed") \ No newline at end of file diff --git a/jstests/core/covered_index_geo_2.js b/jstests/core/covered_index_geo_2.js new file mode 100644 index 00000000000..52f610b7e64 --- /dev/null +++ b/jstests/core/covered_index_geo_2.js @@ -0,0 +1,22 @@ +var coll = db.getCollection("covered_geo_2") +coll.drop() + +coll.insert({_id : 1, loc1 : [ 5 , 5 ], type1 : "type1", + loc2 : [ 5 , 5 ], type2 : 1}) +coll.insert({_id : 2, loc1 : [ 6 , 6 ], type1 : "type2", + loc2 : [ 5 , 5 ], type2 : 2}) +coll.insert({_id : 3, loc1 : [ 7 , 7 ], type1 : "type3", + loc2 : [ 5 , 5 ], type2 : 3}) + +coll.ensureIndex({loc1 : "2dsphere", type1 : 1}); +coll.ensureIndex({type2: 1, loc2 : "2dsphere"}); + +var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {loc1:1, type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain(); +assert.eq(false, plan.indexOnly, "geo.2.1 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "geo.2.1 - nscannedObjects should not be 0 for a non covered query") + +var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain(); +assert.eq(false, plan.indexOnly, "geo.2.2 - indexOnly should be false for a non covered query") +assert.neq(0, plan.nscannedObjects, "geo.2.2 - nscannedObjects should not be 0 for a non covered query") + +print("all tests passed") diff --git a/jstests/core/covered_index_negative_1.js b/jstests/core/covered_index_negative_1.js new file mode 100644 index 00000000000..ab03e7566f6 --- /dev/null +++ b/jstests/core/covered_index_negative_1.js @@ -0,0 +1,61 @@ +// Miscellaneous covered query tests. Mostly negative tests +// These are tests where we do not expect the query to be a +// covered index query. Hence we expect indexOnly=false and +// nscannedObjects > 0 + +var coll = db.getCollection("covered_negative_1") +coll.drop() +for (i=0;i<100;i++) { + coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10), d: i*10, e: [i, i%10], + f:i}) +} +coll.ensureIndex({a:1,b:-1,c:1}) +coll.ensureIndex({e:1}) +coll.ensureIndex({d:1}) +coll.ensureIndex({f:"hashed"}) + +// Test no projection +var plan = coll.find({a:10, b:"strvar_10", c:0}).hint({a:1, b:-1, c:1}).explain() +assert.eq(false, plan.indexOnly, "negative.1.1 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "negative.1.1 - nscannedObjects should not be 0 for a non covered query") + +// Test projection and not excluding _id +var plan = coll.find({a:10, b:"strvar_10", c:0},{a:1, b:1, c:1}).hint({a:1, b:-1, c:1}).explain() +assert.eq(false, plan.indexOnly, "negative.1.2 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "negative.1.2 - nscannedObjects should not be 0 for a non covered query") + +// Test projection of non-indexed field +var plan = coll.find({d:100},{d:1, c:1, _id:0}).hint({d:1}).explain() +assert.eq(false, plan.indexOnly, "negative.1.3 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "negative.1.3 - nscannedObjects should not be 0 for a non covered query") + +// Test query and projection on a multi-key index +var plan = coll.find({e:99},{e:1, _id:0}).hint({e:1}).explain() +assert.eq(false, plan.indexOnly, "negative.1.4 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "negative.1.4 - nscannedObjects should not be 0 for a non covered query") + +// Commenting out negative.1.5 and 1.6 pending fix in SERVER-8650 +// // Test projection and $natural sort +// var plan = coll.find({a:{$gt:70}},{a:1, b:1, c:1, _id:0}).sort({$natural:1}).hint({a:1, b:-1, c:1}).explain() +// // indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8561 +// assert.eq(true, plan.indexOnly, "negative.1.5 - indexOnly should be false on a non covered query") +// assert.neq(0, plan.nscannedObjects, "negative.1.5 - nscannedObjects should not be 0 for a non covered query") + +// // Test sort on non-indexed field +// var plan = coll.find({d:{$lt:1000}},{d:1, _id:0}).sort({c:1}).hint({d:1}).explain() +// //indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562 +// assert.eq(true, plan.indexOnly, "negative.1.6 - indexOnly should be false on a non covered query") +// assert.neq(0, plan.nscannedObjects, "negative.1.6 - nscannedObjects should not be 0 for a non covered query") + +// Test query on non-indexed field +var plan = coll.find({d:{$lt:1000}},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() +//indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562 +// assert.eq(true, plan.indexOnly, "negative.1.7 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "negative.1.7 - nscannedObjects should not be 0 for a non covered query") + +// Test query on hashed indexed field +var plan = coll.find({f:10},{f:1, _id:0}).hint({f:"hashed"}).explain() +assert.eq(false, plan.indexOnly, "negative.1.8 - indexOnly should be false on a non covered query") +assert.neq(0, plan.nscannedObjects, "negative.1.8 - nscannedObjects should not be 0 for a non covered query") + +print('all tests passed') diff --git a/jstests/core/covered_index_simple_1.js b/jstests/core/covered_index_simple_1.js new file mode 100644 index 00000000000..44e3c00a9f8 --- /dev/null +++ b/jstests/core/covered_index_simple_1.js @@ -0,0 +1,55 @@ +// Simple covered index query test + +var coll = db.getCollection("covered_simple_1") +coll.drop() +for (i=0;i<10;i++) { + coll.insert({foo:i}) +} +for (i=0;i<10;i++) { + coll.insert({foo:i}) +} +for (i=0;i<5;i++) { + coll.insert({bar:i}) +} +coll.insert({foo:"string"}) +coll.insert({foo:{bar:1}}) +coll.insert({foo:null}) +coll.ensureIndex({foo:1}) + +// Test equality with int value +var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.1 - nscannedObjects should be 0 for covered query") + +// Test equality with string value +var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.2 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.2 - nscannedObjects should be 0 for covered query") + +// Test equality with doc value +var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.3 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.3 - nscannedObjects should be 0 for covered query") + +// Test no query +var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.4 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.4 - nscannedObjects should be 0 for covered query") + +// Test range query +var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.5 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.5 - nscannedObjects should be 0 for covered query") + +// Test in query +var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.6 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.6 - nscannedObjects should be 0 for covered query") + +// Test no return +var plan = coll.find({foo:"2"}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.1.7 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.1.7 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') + diff --git a/jstests/core/covered_index_simple_2.js b/jstests/core/covered_index_simple_2.js new file mode 100644 index 00000000000..313cca439d8 --- /dev/null +++ b/jstests/core/covered_index_simple_2.js @@ -0,0 +1,43 @@ +// Simple covered index query test with unique index + +var coll = db.getCollection("covered_simple_2") +coll.drop() +for (i=0;i<10;i++) { + coll.insert({foo:i}) +} +coll.insert({foo:"string"}) +coll.insert({foo:{bar:1}}) +coll.insert({foo:null}) +coll.ensureIndex({foo:1},{unique:true}) + +// Test equality with int value +var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.2.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.2.1 - nscannedObjects should be 0 for covered query") + +// Test equality with string value +var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.2.2 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.2.2 - nscannedObjects should be 0 for covered query") + +// Test equality with int value on a dotted field +var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.2.3 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.2.3 - nscannedObjects should be 0 for covered query") + +// Test no query +var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.2.4 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.2.4 - nscannedObjects should be 0 for covered query") + +// Test range query +var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.2.5 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.2.5 - nscannedObjects should be 0 for covered query") + +// Test in query +var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "simple.2.6 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.2.6 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') diff --git a/jstests/core/covered_index_simple_3.js b/jstests/core/covered_index_simple_3.js new file mode 100644 index 00000000000..ee586540ea4 --- /dev/null +++ b/jstests/core/covered_index_simple_3.js @@ -0,0 +1,57 @@ +// Simple covered index query test with a unique sparse index + +var coll = db.getCollection("covered_simple_3"); +coll.drop(); +for (i=0;i<10;i++) { + coll.insert({foo:i}); +} +for (i=0;i<5;i++) { + coll.insert({bar:i}); +} +coll.insert({foo:"string"}); +coll.insert({foo:{bar:1}}); +coll.insert({foo:null}); +coll.ensureIndex({foo:1}, {sparse:true, unique:true}); + +// Test equality with int value +var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.1 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.1 - nscannedObjects should be 0 for covered query"); + +// Test equality with string value +var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.2 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.2 - nscannedObjects should be 0 for covered query"); + +// Test equality with int value on a dotted field +var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.3 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.3 - nscannedObjects should be 0 for covered query"); + +// Test no query +var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.4 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.4 - nscannedObjects should be 0 for covered query"); + +// Test range query +var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.5 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.5 - nscannedObjects should be 0 for covered query"); + +// Test in query +var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.6 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.6 - nscannedObjects should be 0 for covered query"); + +// Test $exists true +var plan = coll.find({foo:{$exists:true}}, {foo:1, _id:0}).hint({foo:1}).explain(); +assert.eq(true, plan.indexOnly, "simple.3.7 - indexOnly should be true on covered query"); +assert.eq(0, plan.nscannedObjects, "simple.3.7 - nscannedObjects should be 0 for covered query"); + +// SERVER-12262: currently $nin will always use a collection scan +//var plan = coll.find({foo:{$nin:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain() +//assert.eq(true, plan.indexOnly, "simple.3.8 - indexOnly should be true on covered query") +// this should be 0 but is not due to bug https://jira.mongodb.org/browse/SERVER-3187 +//assert.eq(13, plan.nscannedObjects, "simple.3.8 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') diff --git a/jstests/core/covered_index_simple_id.js b/jstests/core/covered_index_simple_id.js new file mode 100644 index 00000000000..c7f6811a33c --- /dev/null +++ b/jstests/core/covered_index_simple_id.js @@ -0,0 +1,42 @@ +// Simple covered index query test + +var coll = db.getCollection("covered_simple_id") +coll.drop() +for (i=0;i<10;i++) { + coll.insert({_id:i}) +} +coll.insert({_id:"string"}) +coll.insert({_id:{bar:1}}) +coll.insert({_id:null}) + +// Test equality with int value +var plan = coll.find({_id:1}, {_id:1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "simple.id.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.id.1 - nscannedObjects should be 0 for covered query") + +// Test equality with string value +var plan = coll.find({_id:"string"}, {_id:1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "simple.id.2 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.id.2 - nscannedObjects should be 0 for covered query") + +// Test equality with int value on a dotted field +var plan = coll.find({_id:{bar:1}}, {_id:1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "simple.id.3 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.id.3 - nscannedObjects should be 0 for covered query") + +// Test no query +var plan = coll.find({}, {_id:1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "simple.id.4 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.id.4 - nscannedObjects should be 0 for covered query") + +// Test range query +var plan = coll.find({_id:{$gt:2,$lt:6}}, {_id:1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "simple.id.5 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.id.5 - nscannedObjects should be 0 for covered query") + +// Test in query +var plan = coll.find({_id:{$in:[5,8]}}, {_id:1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "simple.id.6 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "simple.id.6 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') diff --git a/jstests/core/covered_index_sort_1.js b/jstests/core/covered_index_sort_1.js new file mode 100644 index 00000000000..adfcb5c6cb6 --- /dev/null +++ b/jstests/core/covered_index_sort_1.js @@ -0,0 +1,34 @@ +// Simple covered index query test with sort + +var coll = db.getCollection("covered_sort_1") +coll.drop() +for (i=0;i<10;i++) { + coll.insert({foo:i}) +} +for (i=0;i<10;i++) { + coll.insert({foo:i}) +} +for (i=0;i<5;i++) { + coll.insert({bar:i}) +} +coll.insert({foo:"1"}) +coll.insert({foo:{bar:1}}) +coll.insert({foo:null}) +coll.ensureIndex({foo:1}) + +// Test no query and sort ascending +var plan = coll.find({}, {foo:1, _id:0}).sort({foo:1}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "sort.1.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "sort.1.1 - nscannedObjects should be 0 for covered query") + +// Test no query and sort descending +var plan = coll.find({}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "sort.1.2 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "sort.1.2 - nscannedObjects should be 0 for covered query") + +// Test range query with sort +var plan = coll.find({foo:{$gt:2}}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain() +assert.eq(true, plan.indexOnly, "sort.1.5 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "sort.1.5 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') \ No newline at end of file diff --git a/jstests/core/covered_index_sort_2.js b/jstests/core/covered_index_sort_2.js new file mode 100644 index 00000000000..e5dd48b47af --- /dev/null +++ b/jstests/core/covered_index_sort_2.js @@ -0,0 +1,17 @@ +// Simple covered index query test with sort on _id + +var coll = db.getCollection("covered_sort_2") +coll.drop() +for (i=0;i<10;i++) { + coll.insert({_id:i}) +} +coll.insert({_id:"1"}) +coll.insert({_id:{bar:1}}) +coll.insert({_id:null}) + +// Test no query +var plan = coll.find({}, {_id:1}).sort({_id:-1}).hint({_id:1}).explain() +assert.eq(true, plan.indexOnly, "sort.2.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "sort.2.1 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') \ No newline at end of file diff --git a/jstests/core/covered_index_sort_3.js b/jstests/core/covered_index_sort_3.js new file mode 100644 index 00000000000..8f5986c4d76 --- /dev/null +++ b/jstests/core/covered_index_sort_3.js @@ -0,0 +1,16 @@ +// Compound index covered query tests with sort + +var coll = db.getCollection("covered_sort_3") +coll.drop() +for (i=0;i<100;i++) { + coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)}) +} +coll.insert +coll.ensureIndex({a:1,b:-1,c:1}) + +// Test no query, sort on all fields in index order +var plan = coll.find({}, {b:1, c:1, _id:0}).sort({a:1,b:-1,c:1}).hint({a:1, b:-1, c:1}).explain() +assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query") +assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query") + +print ('all tests pass') diff --git a/jstests/core/create_indexes.js b/jstests/core/create_indexes.js new file mode 100644 index 00000000000..6c54c4de0cb --- /dev/null +++ b/jstests/core/create_indexes.js @@ -0,0 +1,83 @@ +t = db.create_indexes; +t.drop(); + +// TODO: revisit this after createIndexes api stabilizes. +var isMongos = ("isdbgrid" == db.runCommand("ismaster").msg); +var extractResult = function(obj) { + if (!isMongos) return obj; + + // Sample mongos format: + // { + // raw: { + // "localhost:30000": { + // createdCollectionAutomatically: false, + // numIndexesBefore: 3, + // numIndexesAfter: 5, + // ok: 1 + // } + // }, + // ok: 1 + // } + + var numFields = 0; + var result = null; + for (var field in obj.raw) { + result = obj.raw[field]; + numFields++; + } + + assert.neq(null, result); + assert.eq(1, numFields); + return result; +}; + + +res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } ); +res = extractResult( res ); +assert( res.createdCollectionAutomatically ); +assert.eq( 1, res.numIndexesBefore ); +assert.eq( 2, res.numIndexesAfter ); + +res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } ); +res = extractResult( res ); +assert.eq( 2, res.numIndexesBefore ); +assert( res.noChangesMade ); + +res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" }, + { key : { "y" : 1 }, name : "y_1" } ] } ); +res = extractResult( res ); +assert( !res.createdCollectionAutomatically ); +assert.eq( 2, res.numIndexesBefore ); +assert.eq( 3, res.numIndexesAfter ); + +res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" }, + { key : { "b" : 1 }, name : "b_1" } ] } ); +res = extractResult( res ); +assert( !res.createdCollectionAutomatically ); +assert.eq( 3, res.numIndexesBefore ); +assert.eq( 5, res.numIndexesAfter ); + +res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" }, + { key : { "b" : 1 }, name : "b_1" } ] } ); +res = extractResult( res ); +assert.eq( 5, res.numIndexesBefore ); +assert( res.noChangesMade ); + +res = t.runCommand( "createIndexes", { indexes : [ {} ] } ); +assert( !res.ok ); + +res = t.runCommand( "createIndexes", { indexes : [ {} , { key : { m : 1 }, name : "asd" } ] } ); +assert( !res.ok ); + +assert.eq( 5, t.getIndexes().length ); + +res = t.runCommand( "createIndexes", + { indexes : [ { key : { "c" : 1 }, sparse : true, name : "c_1" } ] } ) +assert.eq( 6, t.getIndexes().length ); +assert.eq( 1, t.getIndexes().filter( function(z){ return z.sparse; } ).length ); + +res = t.runCommand( "createIndexes", + { indexes : [ { key : { "x" : "foo" }, name : "x_1" } ] } ); +assert( !res.ok ) + +assert.eq( 6, t.getIndexes().length ); diff --git a/jstests/core/currentop.js b/jstests/core/currentop.js new file mode 100644 index 00000000000..e1d2e73cc7a --- /dev/null +++ b/jstests/core/currentop.js @@ -0,0 +1,79 @@ +print("BEGIN currentop.js"); + +// test basic currentop functionality + querying of nested documents +t = db.jstests_currentop +t.drop(); + +for(i=0;i<100;i++) { + t.save({ "num": i }); +} + +print("count:" + t.count()); + +function ops(q) { + printjson( db.currentOp().inprog ); + return db.currentOp(q).inprog; +} + +print("start shell"); + +// sleep for a second for each (of 100) documents; can be killed in between documents & test should complete before 100 seconds +s1 = startParallelShell("db.jstests_currentop.count( { '$where': function() { sleep(1000); } } )"); + +print("sleep"); +sleep(1000); + +print("inprog:"); +printjson(db.currentOp().inprog) +print() +sleep(1); +print("inprog:"); +printjson(db.currentOp().inprog) +print() + +// need to wait for read to start +print("wait have some ops"); +assert.soon( function(){ + return ops( { "locks.^test": "r", "ns": "test.jstests_currentop" } ).length + + ops({ "locks.^test": "R", "ns": "test.jstests_currentop" }).length >= 1; +}, "have_some_ops"); +print("ok"); + +s2 = startParallelShell( "db.jstests_currentop.update({ '$where': function() { sleep(150); } }," + + " { 'num': 1 }, false, true );" ); + +o = []; + +function f() { + o = ops({ "ns": "test.jstests_currentop" }); + + printjson(o); + + var writes = ops({ "locks.^test": "w", "ns": "test.jstests_currentop" }).length; + + var readops = ops({ "locks.^test": "r", "ns": "test.jstests_currentop" }); + print("readops:"); + printjson(readops); + var reads = readops.length; + + print("total: " + o.length + " w: " + writes + " r:" + reads); + + return o.length > writes && o.length > reads; +} + +print("go"); + +assert.soon( f, "f" ); + +// avoid waiting for the operations to complete (if soon succeeded) +for(var i in o) { + db.killOp(o[i].opid); +} + +start = new Date(); + +s1(); +s2(); + +// don't want to pass if timeout killed the js function +assert( ( new Date() ) - start < 30000 ); diff --git a/jstests/core/cursor1.js b/jstests/core/cursor1.js new file mode 100644 index 00000000000..8448752bb0c --- /dev/null +++ b/jstests/core/cursor1.js @@ -0,0 +1,20 @@ + +t = db.cursor1 +t.drop(); + +big = ""; +while ( big.length < 50000 ) + big += "asdasdasdasdsdsdadsasdasdasD"; + +num = Math.ceil( 10000000 / big.length ); + +for ( var i=0; i 0; }).sort({ _id: -1 }).explain() + num = ex.n + end = new Date() + } + catch (e) { + print("cursora.js FAIL " + e); + join(); + throw e; + } + + join() + + //print( "cursora.js num: " + num + " time:" + ( end.getTime() - start.getTime() ) ) + assert.eq( 0 , t.count() , "after remove: " + tojson( ex ) ) + // assert.lt( 0 , ex.nYields , "not enough yields : " + tojson( ex ) ); // TODO make this more reliable so cen re-enable assert + if ( n == num ) + print( "cursora.js warning: shouldn't have counted all n: " + n + " num: " + num ); +} + +run( 1500 ) +run( 5000 ) +run( 1500 , true ) +run( 5000 , true ) +print("cursora.js SUCCESS") diff --git a/jstests/core/cursorb.js b/jstests/core/cursorb.js new file mode 100644 index 00000000000..65e356e89cb --- /dev/null +++ b/jstests/core/cursorb.js @@ -0,0 +1,17 @@ +// The 'cursor not found in map -1' warning is not logged when get more exhausts a client cursor. +// SERVER-6931 + +t = db.jstests_cursorb; +t.drop(); + +// Exhaust a client cursor in get more. +for( i = 0; i < 200; ++i ) { + t.save( { a:i } ); +} +t.find().itcount(); + +// Check that the 'cursor not found in map -1' message is not printed. This message indicates an +// attempt to look up a cursor with an invalid id and should never appear in the log. +log = db.adminCommand( { getLog:'global' } ).log +log.forEach( function( line ) { assert( !line.match( /cursor not found in map -1 / ), + 'Cursor map lookup with id -1.' ); } ); diff --git a/jstests/core/datasize.js b/jstests/core/datasize.js new file mode 100644 index 00000000000..13e9f11bf0c --- /dev/null +++ b/jstests/core/datasize.js @@ -0,0 +1,35 @@ +// test end-to-end data allocation without powerOf2Sizes enabled +f = db.jstests_datasize; +f.drop(); + +// this test requires usePowerOf2Sizes to be off +db.createCollection( f.getName(), { usePowerOf2Sizes: false } ); +assert.eq(0, f.stats().userFlags); + +assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); +f.save( {qq:'c'} ); +assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); +f.save( {qq:'fg'} ); +assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); + +f.drop(); +db.createCollection( f.getName(), { usePowerOf2Sizes: false} ); + +f.ensureIndex( {qq:1} ); +assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); +f.save( {qq:'c'} ); +assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); +f.save( {qq:'fg'} ); +assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); + +assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}} ).ok ); + +assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'z' }} ).size ); +assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }} ).size ); +assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{qq:1}} ).size ); +assert.eq( 36, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'d'}, max:{qq:'z' }, keyPattern:{qq:1}} ).size ); + +assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'c' }} ).size ); +assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'d' }} ).size ); + +assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{a:1}} ).ok ); diff --git a/jstests/core/datasize2.js b/jstests/core/datasize2.js new file mode 100644 index 00000000000..103cb2096ee --- /dev/null +++ b/jstests/core/datasize2.js @@ -0,0 +1,27 @@ + +t = db.datasize2 +t.drop(); + +N = 1000 +for ( i=0; i after.mem.mapped , "closeAllDatabases does something before:" + tojson( before.mem ) + " after:" + tojson( after.mem ) + " cmd res:" + tojson( cmdres ) ); + print( before.mem.mapped + " -->> " + after.mem.mapped ); +} +else { + print( "can't test serverStatus on this machine" ); +} + +t.save( { x : 1 } ); + +res = db._adminCommand( "listDatabases" ); +assert( res.databases && res.databases.length > 0 , "listDatabases 1 " + tojson(res) ); + +now = new Date(); +x = db._adminCommand( "ismaster" ); +assert( x.ismaster , "ismaster failed: " + tojson( x ) ) +assert( x.localTime, "ismaster didn't include time: " + tojson(x)) +localTimeSkew = x.localTime - now +if ( localTimeSkew >= 50 ) { + print( "Warning: localTimeSkew " + localTimeSkew + " > 50ms." ) +} +assert.lt( localTimeSkew, 500, "isMaster.localTime" ) + +before = db.runCommand( "serverStatus" ) +print(before.uptimeEstimate); +sleep( 5000 ) +after = db.runCommand( "serverStatus" ) +print(after.uptimeEstimate); +assert.lt( 2 , after.uptimeEstimate , "up1" ) +assert.gt( after.uptimeEstimate , before.uptimeEstimate , "up2" ) + +// Test startup_log +var stats = db.getSisterDB( "local" ).startup_log.stats(); +assert(stats.capped); + +var latestStartUpLog = db.getSisterDB( "local" ).startup_log.find().sort( { $natural: -1 } ).limit(1).next(); +var serverStatus = db._adminCommand( "serverStatus" ); +var cmdLine = db._adminCommand( "getCmdLineOpts" ).parsed; + +// Test that the startup log has the expected keys +var verbose = false; +var expectedKeys = ["_id", "hostname", "startTime", "startTimeLocal", "cmdLine", "pid", "buildinfo"]; +var keys = Object.keySet(latestStartUpLog); +assert(arrayEq(expectedKeys, keys, verbose), 'startup_log keys failed'); + +// Tests _id implicitly - should be comprised of host-timestamp +// Setup expected startTime and startTimeLocal from the supplied timestamp +var _id = latestStartUpLog._id.split('-'); // _id should consist of host-timestamp +var _idUptime = _id.pop(); +var _idHost = _id.join('-'); +var uptimeSinceEpochRounded = Math.floor(_idUptime/1000) * 1000; +var startTime = new Date(uptimeSinceEpochRounded); // Expected startTime + +assert.eq(_idHost, latestStartUpLog.hostname, "Hostname doesn't match one from _id"); +assert.eq(serverStatus.host.split(':')[0], latestStartUpLog.hostname, "Hostname doesn't match one in server status"); +assert.closeWithinMS(startTime, latestStartUpLog.startTime, + "StartTime doesn't match one from _id", 2000); // Expect less than 2 sec delta +assert.eq(cmdLine, latestStartUpLog.cmdLine, "cmdLine doesn't match that from getCmdLineOpts"); +assert.eq(serverStatus.pid, latestStartUpLog.pid, "pid doesn't match that from serverStatus"); + +// Test buildinfo +var buildinfo = db.runCommand( "buildinfo" ); +delete buildinfo.ok; // Delete extra meta info not in startup_log +var isMaster = db._adminCommand( "ismaster" ); + +// Test buildinfo has the expected keys +var expectedKeys = ["version", "gitVersion", "OpenSSLVersion", "sysInfo", "loaderFlags", "compilerFlags", "allocator", "versionArray", "javascriptEngine", "bits", "debug", "maxBsonObjectSize"]; +var keys = Object.keySet(latestStartUpLog.buildinfo); +// Disabled to check +assert(arrayIsSubset(expectedKeys, keys), "buildinfo keys failed! \n expected:\t" + expectedKeys + "\n actual:\t" + keys); +assert.eq(buildinfo, latestStartUpLog.buildinfo, "buildinfo doesn't match that from buildinfo command"); + +// Test version and version Array +var version = latestStartUpLog.buildinfo.version.split('-')[0]; +var versionArray = latestStartUpLog.buildinfo.versionArray; +var versionArrayCleaned = []; +// Only create a string with 2 dots (2.5.5, not 2.5.5.0) +for (var i = 0; i < (versionArray.length - 1); i++) if (versionArray[i] >= 0) { versionArrayCleaned.push(versionArray[i]); } + +assert.eq(serverStatus.version, latestStartUpLog.buildinfo.version, "Mongo version doesn't match that from ServerStatus"); +assert.eq(version, versionArrayCleaned.join('.'), "version doesn't match that from the versionArray"); +assert(["V8", "SpiderMonkey", "Unknown"].indexOf(latestStartUpLog.buildinfo.javascriptEngine) > -1); +assert.eq(isMaster.maxBsonObjectSize, latestStartUpLog.buildinfo.maxBsonObjectSize, "maxBsonObjectSize doesn't match one from ismaster"); diff --git a/jstests/core/dbcase.js b/jstests/core/dbcase.js new file mode 100644 index 00000000000..c3aa466ba17 --- /dev/null +++ b/jstests/core/dbcase.js @@ -0,0 +1,27 @@ +// Check db name duplication constraint SERVER-2111 + +a = db.getSisterDB( "dbcasetest_dbnamea" ) +b = db.getSisterDB( "dbcasetest_dbnameA" ) + +a.dropDatabase(); +b.dropDatabase(); + +assert.writeOK( a.foo.save( { x : 1 } )); + +res = b.foo.save( { x : 1 } ); +assert.writeError( res ); +assert.eq( 13297, res.getWriteError().code, res.toString() ); + +assert.neq( -1, db.getMongo().getDBNames().indexOf( a.getName() ) ); +assert.eq( -1, db.getMongo().getDBNames().indexOf( b.getName() ) ); +printjson( db.getMongo().getDBs().databases ); + +a.dropDatabase(); +b.dropDatabase(); + +ai = db.getMongo().getDBNames().indexOf( a.getName() ); +bi = db.getMongo().getDBNames().indexOf( b.getName() ); +// One of these dbs may exist if there is a slave active, but they must +// not both exist. +assert( ai == -1 || bi == -1 ); +printjson( db.getMongo().getDBs().databases ); diff --git a/jstests/core/dbcase2.js b/jstests/core/dbcase2.js new file mode 100644 index 00000000000..f9973d98837 --- /dev/null +++ b/jstests/core/dbcase2.js @@ -0,0 +1,9 @@ +// SERVER-2111 Check that an in memory db name will block creation of a db with a similar but differently cased name. + +a = db.getSisterDB( "dbcase2test_dbnamea" ) +b = db.getSisterDB( "dbcase2test_dbnameA" ) + +a.c.count(); +assert.throws( function() { b.c.count() } ); + +assert.eq( -1, db.getMongo().getDBNames().indexOf( "dbcase2test_dbnameA" ) ); diff --git a/jstests/core/dbhash.js b/jstests/core/dbhash.js new file mode 100644 index 00000000000..7fea4b4d50c --- /dev/null +++ b/jstests/core/dbhash.js @@ -0,0 +1,58 @@ + +a = db.dbhasha; +b = db.dbhashb; + +a.drop(); +b.drop(); + +// debug SERVER-761 +db.getCollectionNames().forEach( function( x ) { + v = db[ x ].validate(); + if ( !v.valid ) { + print( x ); + printjson( v ); + } + } ); + +function dbhash( mydb ) { + var ret = mydb.runCommand( "dbhash" ); + assert.commandWorked( ret, "dbhash failure" ); + return ret; +} + +function gh( coll , mydb ){ + if ( ! mydb ) mydb = db; + var x = dbhash( mydb ).collections[coll.getName()]; + if ( ! x ) + return ""; + return x; +} + +function dbh( mydb ){ + return dbhash( mydb ).md5; +} + +assert.eq( gh( a ) , gh( b ) , "A1" ); + +a.insert( { _id : 5 } ); +assert.neq( gh( a ) , gh( b ) , "A2" ); + +b.insert( { _id : 5 } ); +assert.eq( gh( a ) , gh( b ) , "A3" ); + +dba = db.getSisterDB( "dbhasha" ); +dbb = db.getSisterDB( "dbhashb" ); + +dba.dropDatabase(); +dbb.dropDatabase(); + +assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B1" ); +assert.eq( dbh( dba ) , dbh( dbb ) , "C1" ); + +dba.foo.insert( { _id : 5 } ); +assert.neq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B2" ); +assert.neq( dbh( dba ) , dbh( dbb ) , "C2" ); + +dbb.foo.insert( { _id : 5 } ); +assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B3" ); +assert.eq( dbh( dba ) , dbh( dbb ) , "C3" ); diff --git a/jstests/core/dbhash2.js b/jstests/core/dbhash2.js new file mode 100644 index 00000000000..ac491291c2b --- /dev/null +++ b/jstests/core/dbhash2.js @@ -0,0 +1,22 @@ + +mydb = db.getSisterDB( "config" ); + +t = mydb.foo; +t.drop(); + +t.insert( { x : 1 } ); +res1 = mydb.runCommand( "dbhash" ); +assert( res1.fromCache.indexOf( "config.foo" ) == -1 ); + +res2 = mydb.runCommand( "dbhash" ); +assert( res2.fromCache.indexOf( "config.foo" ) >= 0 ); +assert.eq( res1.collections.foo, res2.collections.foo ); + +t.insert( { x : 2 } ); +res3 = mydb.runCommand( "dbhash" ); +assert( res3.fromCache.indexOf( "config.foo" ) < 0 ); +assert.neq( res1.collections.foo, res3.collections.foo ); + + + + diff --git a/jstests/core/dbref1.js b/jstests/core/dbref1.js new file mode 100644 index 00000000000..4a827662c1a --- /dev/null +++ b/jstests/core/dbref1.js @@ -0,0 +1,10 @@ + +a = db.dbref1a; +b = db.dbref1b; + +a.drop(); +b.drop(); + +a.save( { name : "eliot" } ); +b.save( { num : 1 , link : new DBPointer( "dbref1a" , a.findOne()._id ) } ); +assert.eq( "eliot" , b.findOne().link.fetch().name , "A" ); diff --git a/jstests/core/dbref2.js b/jstests/core/dbref2.js new file mode 100644 index 00000000000..d1b4870322d --- /dev/null +++ b/jstests/core/dbref2.js @@ -0,0 +1,20 @@ + +a = db.dbref2a; +b = db.dbref2b; +c = db.dbref2c; + +a.drop(); +b.drop(); +c.drop(); + +a.save( { name : "eliot" } ); +b.save( { num : 1 , link : new DBRef( "dbref2a" , a.findOne()._id ) } ); +c.save( { num : 1 , links : [ new DBRef( "dbref2a" , a.findOne()._id ) ] } ); + +assert.eq( "eliot" , b.findOne().link.fetch().name , "A" ); +assert.neq( "el" , b.findOne().link.fetch().name , "B" ); + +// $elemMatch value +var doc = c.findOne( { links: { $elemMatch: { $ref : "dbref2a", $id : a.findOne()._id } } } ); +assert.eq( "eliot" , doc.links[0].fetch().name , "C" ); +assert.neq( "el" , doc.links[0].fetch().name , "D" ); diff --git a/jstests/core/dbref3.js b/jstests/core/dbref3.js new file mode 100644 index 00000000000..2f3ab8fa79c --- /dev/null +++ b/jstests/core/dbref3.js @@ -0,0 +1,45 @@ +// Make sure we only make a DBRef object for objects where the first field is a string named $ref +// and the second field is $id with any type. Only the first two fields matter for deciding if it +// is a DBRef. See http://docs.mongodb.org/manual/reference/database-references/#dbrefs. + +var t = db.dbref3; + +t.drop(); + +// true cases +t.insert({sub: {$ref: "foo", $id: "bar"}, dbref: true}); +t.insert({sub: {$ref: "foo", $id: "bar", $db: "baz"}, dbref: true}); +t.insert({sub: {$ref: "foo", $id: "bar", db: "baz"}, dbref: true}); // out of spec but accepted +t.insert({sub: {$ref: "foo", $id: ObjectId()}, dbref: true}); +t.insert({sub: {$ref: "foo", $id: 1}, dbref: true}); + +t.insert({sub: {$ref: 123/*not a string*/, $id: "bar"}, dbref: false}); +t.insert({sub: {$id: "bar", $ref: "foo"}, dbref: false}); +t.insert({sub: {$ref: "foo"}, dbref: false}); +t.insert({sub: {$id: "foo"}, dbref: false}); +t.insert({sub: {other: 1, $ref: "foo", $id: "bar"}, dbref: false}); + +t.find().forEach(function(obj) { + assert.eq(obj.sub.constructor == DBRef, obj.dbref, tojson(obj)); +}); + +// We should be able to run distinct against DBRef fields. +var distinctRefs = t.distinct('sub.$ref'); +print('distinct $ref = ' + distinctRefs); + +var distinctIDs = t.distinct('sub.$id'); +print('distinct $id = ' + distinctIDs); + +var distinctDBs = t.distinct('sub.$db'); +print('distinct $db = ' + distinctDBs); + +// Confirm number of unique values in each DBRef field. +assert.eq(2, distinctRefs.length); +assert.eq(4, distinctIDs.length); +assert.eq(1, distinctDBs.length); + +// $id is an array. perform positional projection on $id. +t.insert({sub: {$ref: "foo", $id: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}]}}); +var k = t.findOne({'sub.$id': {$elemMatch: {x: 2}}}, {_id: 0, 'sub.$id.$': 1}); +print('k = ' + tojson(k)); +assert.eq({sub: {$id: [{x: 2, y:2}]}}, k); \ No newline at end of file diff --git a/jstests/core/delx.js b/jstests/core/delx.js new file mode 100644 index 00000000000..d28b2063898 --- /dev/null +++ b/jstests/core/delx.js @@ -0,0 +1,30 @@ + +a = db.getSisterDB("delxa" ) +b = db.getSisterDB("delxb" ) + +function setup( mydb ){ + mydb.dropDatabase(); + for ( i=0; i<100; i++ ){ + mydb.foo.insert( { _id : i } ); + } +} + +setup( a ); +setup( b ); + +assert.eq( 100 , a.foo.find().itcount() , "A1" ) +assert.eq( 100 , b.foo.find().itcount() , "A2" ) + +x = a.foo.find().sort( { _id : 1 } ).batchSize( 60 ) +y = b.foo.find().sort( { _id : 1 } ).batchSize( 60 ) + +x.next(); +y.next(); + +a.foo.remove( { _id : { $gt : 50 } } ); + +assert.eq( 51 , a.foo.find().itcount() , "B1" ) +assert.eq( 100 , b.foo.find().itcount() , "B2" ) + +assert.eq( 59 , x.itcount() , "C1" ) +assert.eq( 99 , y.itcount() , "C2" ); // this was asserting because ClientCursor byLoc doesn't take db into consideration diff --git a/jstests/core/depth_limit.js b/jstests/core/depth_limit.js new file mode 100644 index 00000000000..7523a1fc9fe --- /dev/null +++ b/jstests/core/depth_limit.js @@ -0,0 +1,56 @@ +// SERVER-11781 Don't crash when converting deeply nested or cyclical JS objects to BSON. + +function test() { + function assertTooBig(obj) { + // This used to crash rather than throwing an exception. + assert.throws(function(){Object.bsonsize(obj)}); + } + + function assertNotTooBig(obj) { + assert.doesNotThrow(function(){Object.bsonsize(obj)}); + } + + function objWithDepth(depth) { + var out = 1; + while (depth--) { + out = {o: out}; + } + return out; + } + + function arrayWithDepth(depth) { + var out = 1; + while (depth--) { + out = [out]; + } + return out; + } + + assertNotTooBig({}); + assertNotTooBig({array: []}); + + var objCycle = {}; + objCycle.cycle = objCycle; + assertTooBig(objCycle); + + var arrayCycle = []; + arrayCycle.push(arrayCycle); + assertTooBig({array: arrayCycle}); + + var objDepthLimit = 150; + assertNotTooBig(objWithDepth(objDepthLimit - 1)); + assertTooBig(objWithDepth(objDepthLimit)); + + + var arrayDepthLimit = objDepthLimit - 1; // one lower due to wrapping object + assertNotTooBig({array: arrayWithDepth(arrayDepthLimit - 1)}); + assertTooBig({array: arrayWithDepth(arrayDepthLimit)}); +} + +// test in shell +test(); + +// test on server +db.depth_limit.drop(); +db.depth_limit.insert({}); +db.depth_limit.find({$where: test}).itcount(); // itcount ensures that cursor is executed on server diff --git a/jstests/core/distinct1.js b/jstests/core/distinct1.js new file mode 100644 index 00000000000..03e425af761 --- /dev/null +++ b/jstests/core/distinct1.js @@ -0,0 +1,40 @@ + +t = db.distinct1; +t.drop(); + +assert.eq( 0 , t.distinct( "a" ).length , "test empty" ); + +t.save( { a : 1 } ) +t.save( { a : 2 } ) +t.save( { a : 2 } ) +t.save( { a : 2 } ) +t.save( { a : 3 } ) + + +res = t.distinct( "a" ); +assert.eq( "1,2,3" , res.toString() , "A1" ); + +assert.eq( "1,2" , t.distinct( "a" , { a : { $lt : 3 } } ) , "A2" ); + +t.drop(); + +t.save( { a : { b : "a" } , c : 12 } ); +t.save( { a : { b : "b" } , c : 12 } ); +t.save( { a : { b : "c" } , c : 12 } ); +t.save( { a : { b : "c" } , c : 12 } ); + +res = t.distinct( "a.b" ); +assert.eq( "a,b,c" , res.toString() , "B1" ); +printjson(t._distinct( "a.b" ).stats); +assert.eq( "BasicCursor" , t._distinct( "a.b" ).stats.cursor , "B2" ) + +t.drop(); + +t.save({_id: 1, a: 1}); +t.save({_id: 2, a: 2}); + +// Test distinct with _id. +res = t.distinct( "_id" ); +assert.eq( "1,2", res.toString(), "C1" ); +res = t.distinct( "a", {_id: 1} ); +assert.eq( "1", res.toString(), "C2" ); diff --git a/jstests/core/distinct2.js b/jstests/core/distinct2.js new file mode 100644 index 00000000000..41ee78c5117 --- /dev/null +++ b/jstests/core/distinct2.js @@ -0,0 +1,13 @@ + +t = db.distinct2; +t.drop(); + +t.save({a:null}); +assert.eq( 0 , t.distinct('a.b').length , "A" ); + +t.drop(); +t.save( { a : 1 } ); +assert.eq( [1] , t.distinct( "a" ) , "B" ); +t.save( {} ) +assert.eq( [1] , t.distinct( "a" ) , "C" ); + diff --git a/jstests/core/distinct3.js b/jstests/core/distinct3.js new file mode 100644 index 00000000000..0add7aeb95e --- /dev/null +++ b/jstests/core/distinct3.js @@ -0,0 +1,35 @@ +// Yield and delete test case for query optimizer cursor. SERVER-4401 + +t = db.jstests_distinct3; +t.drop(); + +t.ensureIndex({a:1}); +t.ensureIndex({b:1}); + +var bulk = t.initializeUnorderedBulkOp(); +for( i = 0; i < 50; ++i ) { + for( j = 0; j < 2; ++j ) { + bulk.insert({a:i,c:i,d:j}); + } +} +for( i = 0; i < 100; ++i ) { + bulk.insert({b:i,c:i+50}); +} +assert.writeOK(bulk.execute()); + +// Attempt to remove the last match for the {a:1} index scan while distinct is yielding. +p = startParallelShell( 'var bulk = db.jstests_distinct3.initializeUnorderedBulkOp();' + + 'for( i = 0; i < 100; ++i ) { ' + + ' bulk.remove( { a:49 } ); ' + + ' for( j = 0; j < 20; ++j ) { ' + + ' bulk.insert( { a:49, c:49, d:j } ); ' + + ' } ' + + ' bulk.execute(); ' + + '} ' ); + +for( i = 0; i < 100; ++i ) { + count = t.distinct( 'c', {$or:[{a:{$gte:0},d:0},{b:{$gte:0}}]} ).length; + assert.gt( count, 100 ); +} + +p(); diff --git a/jstests/core/distinct_array1.js b/jstests/core/distinct_array1.js new file mode 100644 index 00000000000..2f289ad2e79 --- /dev/null +++ b/jstests/core/distinct_array1.js @@ -0,0 +1,91 @@ +t = db.distinct_array1; +t.drop(); + +t.save( { a : [1,2,3] } ) +t.save( { a : [2,3,4] } ) +t.save( { a : [3,4,5] } ) +t.save( { a : 9 } ) + + +// Without index. +res = t.distinct( "a" ).sort(); +assert.eq( "1,2,3,4,5,9" , res.toString() , "A1" ); + +// Array element 0 without index. +res = t.distinct( "a.0" ).sort(); +assert.eq( "1,2,3" , res.toString() , "A2" ); + +// Array element 1 without index. +res = t.distinct( "a.1" ).sort(); +assert.eq( "2,3,4" , res.toString() , "A3" ); + +// With index. +t.ensureIndex( { a : 1 } ); +res = t.distinct( "a" ).sort(); +assert.eq( "1,2,3,4,5,9" , res.toString() , "A4" ); + +// Array element 0 with index. +res = t.distinct( "a.0" ).sort(); +assert.eq( "1,2,3" , res.toString() , "A5" ); + +// Array element 1 with index. +res = t.distinct( "a.1" ).sort(); +assert.eq( "2,3,4" , res.toString() , "A6" ); + +//t.drop(); + +t.save( { a : [{b:"a"}, {b:"d"}] , c : 12 } ); +t.save( { a : [{b:"b"}, {b:"d"}] , c : 12 } ); +t.save( { a : [{b:"c"}, {b:"e"}] , c : 12 } ); +t.save( { a : [{b:"c"}, {b:"f"}] , c : 12 } ); +t.save( { a : [] , c : 12 } ); +t.save( { a : { b : "z"} , c : 12 } ); + +// Without index. +res = t.distinct( "a.b" ).sort(); +assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B1" ); + +// Array element 0 without index +res = t.distinct( "a.0.b" ).sort(); +assert.eq( "a,b,c" , res.toString() , "B2" ); + +// Array element 1 without index +res = t.distinct( "a.1.b" ).sort(); +assert.eq( "d,e,f" , res.toString() , "B3" ); + +// With index. +t.ensureIndex( { "a.b" : 1 } ); +res = t.distinct( "a.b" ); +res.sort() +assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B4" ); + +// _id as an document containing an array +t.save( { _id : { a : [1,2,3] } } ) +t.save( { _id : { a : [2,3,4] } } ) +t.save( { _id : { a : [3,4,5] } } ) +t.save( { _id : { a : 9 } } ) + +// Without index. +res = t.distinct( "_id.a" ).sort(); +assert.eq( "1,2,3,4,5,9" , res.toString() , "C1" ); + +// Array element 0 without index. +res = t.distinct( "_id.a.0" ).sort(); +assert.eq( "1,2,3" , res.toString() , "C2" ); + +// Array element 1 without index. +res = t.distinct( "_id.a.1" ).sort(); +assert.eq( "2,3,4" , res.toString() , "C3" ); + +// With index. +t.ensureIndex( { "_id.a" : 1 } ); +res = t.distinct( "_id.a" ).sort(); +assert.eq( "1,2,3,4,5,9" , res.toString() , "C4" ); + +// Array element 0 with index. +res = t.distinct( "_id.a.0" ).sort(); +assert.eq( "1,2,3" , res.toString() , "C5" ); + +// Array element 1 with index. +res = t.distinct( "_id.a.1" ).sort(); +assert.eq( "2,3,4" , res.toString() , "C6" ); diff --git a/jstests/core/distinct_index1.js b/jstests/core/distinct_index1.js new file mode 100644 index 00000000000..6de1a7927e4 --- /dev/null +++ b/jstests/core/distinct_index1.js @@ -0,0 +1,72 @@ + +t = db.distinct_index1 +t.drop(); + +function r( x ){ + return Math.floor( Math.sqrt( x * 123123 ) ) % 10; +} + +function d( k , q ){ + return t.runCommand( "distinct" , { key : k , query : q || {} } ) +} + +for ( i=0; i<1000; i++ ){ + o = { a : r(i*5) , b : r(i) }; + t.insert( o ); +} + +x = d( "a" ); +assert.eq( 1000 , x.stats.n , "AA1" ) +assert.eq( 1000 , x.stats.nscanned , "AA2" ) +assert.eq( 1000 , x.stats.nscannedObjects , "AA3" ) + +x = d( "a" , { a : { $gt : 5 } } ); +assert.eq( 398 , x.stats.n , "AB1" ) +assert.eq( 1000 , x.stats.nscanned , "AB2" ) +assert.eq( 1000 , x.stats.nscannedObjects , "AB3" ) + +x = d( "b" , { a : { $gt : 5 } } ); +assert.eq( 398 , x.stats.n , "AC1" ) +assert.eq( 1000 , x.stats.nscanned , "AC2" ) +assert.eq( 1000 , x.stats.nscannedObjects , "AC3" ) + + + +t.ensureIndex( { a : 1 } ) + +x = d( "a" ); +// There are only 10 values. We use the fast distinct hack and only examine each value once. +assert.eq( 10 , x.stats.n , "BA1" ) +assert.eq( 10 , x.stats.nscanned , "BA2" ) + +x = d( "a" , { a : { $gt : 5 } } ); +// Only 4 values of a are >= 5 and we use the fast distinct hack. +assert.eq(4, x.stats.n , "BB1" ) +assert.eq(4, x.stats.nscanned , "BB2" ) +assert.eq(0, x.stats.nscannedObjects , "BB3" ) + +x = d( "b" , { a : { $gt : 5 } } ); +// We can't use the fast distinct hack here because we're distinct-ing over 'b'. +assert.eq( 398 , x.stats.n , "BC1" ) +assert.eq( 398 , x.stats.nscanned , "BC2" ) +assert.eq( 398 , x.stats.nscannedObjects , "BC3" ) + +// Check proper nscannedObjects count when using a query optimizer cursor. +t.dropIndexes(); +t.ensureIndex( { a : 1, b : 1 } ); +x = d( "b" , { a : { $gt : 5 }, b : { $gt : 5 } } ); +printjson(x); +// 171 is the # of results we happen to scan when we don't use a distinct +// hack. When we use the distinct hack we scan 16, currently. +assert.lte(x.stats.n, 171); +assert.eq( 0 , x.stats.nscannedObjects , "BB3" ) + + + +// Cursor name should not be empty when using $or with hashed index. +// +t.dropIndexes(); +t.ensureIndex( { a : "hashed" } ); +x = d( "a", { $or : [ { a : 3 }, { a : 5 } ] } ); +assert.eq( 188, x.stats.n, "DA1" ); +assert.neq( "", x.stats.cursor, "DA2" ); diff --git a/jstests/core/distinct_index2.js b/jstests/core/distinct_index2.js new file mode 100644 index 00000000000..67d28b8b95e --- /dev/null +++ b/jstests/core/distinct_index2.js @@ -0,0 +1,41 @@ +t = db.distinct_index2; +t.drop(); + +t.ensureIndex( { a : 1 , b : 1 } ) +t.ensureIndex( { c : 1 } ) + +// Uniformly distributed dataset. +// If we use a randomly generated dataset, we might not +// generate all the distinct values in the range [0, 10). +for ( var a=0; a<10; a++ ) { + for ( var b=0; b<10; b++ ) { + for ( var c=0; c<10; c++ ) { + t.insert( { a : a , b : b , c : c } ); + } + } +} + +correct = [] +for ( i=0; i<10; i++ ) + correct.push( i ) + +function check( field ){ + res = t.distinct( field ) + res = res.sort() + assert.eq( correct , res , "check: " + field ); + + if ( field != "a" ){ + res = t.distinct( field , { a : 1 } ) + res = res.sort() + assert.eq( correct , res , "check 2: " + field ); + } +} + +check( "a" ) +check( "b" ) +check( "c" ) + +// hashed index should produce same results. +t.dropIndexes(); +t.ensureIndex( { a : "hashed" } ); +check( "a" ); diff --git a/jstests/core/distinct_speed1.js b/jstests/core/distinct_speed1.js new file mode 100644 index 00000000000..4cae5b0ae06 --- /dev/null +++ b/jstests/core/distinct_speed1.js @@ -0,0 +1,26 @@ + +t = db.distinct_speed1; + +t.drop(); +for ( var i=0; i<10000; i++ ){ + t.save( { x : i % 10 } ); +} + +assert.eq( 10 , t.distinct("x").length , "A1" ); + +function fast(){ + t.find().explain().millis; +} + +function slow(){ + t.distinct("x"); +} + +for ( i=0; i<3; i++ ){ + print( "it: " + Date.timeFunc( fast ) ); + print( "di: " + Date.timeFunc( slow ) ); +} + + +t.ensureIndex( { x : 1 } ); +t.distinct( "x" , { x : 5 } ) diff --git a/jstests/core/drop.js b/jstests/core/drop.js new file mode 100644 index 00000000000..154c35d1db3 --- /dev/null +++ b/jstests/core/drop.js @@ -0,0 +1,25 @@ +var coll = db.jstests_drop; + +coll.drop(); + +res = coll.runCommand("drop"); +assert( !res.ok, tojson( res ) ); + + +assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "A"); +coll.save({}); +assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "B"); +coll.ensureIndex({a : 1}); +assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "C"); +assert.commandWorked(db.runCommand({drop : coll.getName()})); +assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "D"); + +coll.ensureIndex({a : 1}); +assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "E"); +assert.commandWorked(db.runCommand({deleteIndexes : coll.getName(), index : "*"}), + "delete indexes A"); +assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "G"); + +// make sure we can still use it +coll.save({}); +assert.eq(1, coll.find().hint("_id_").toArray().length, "H"); diff --git a/jstests/core/drop2.js b/jstests/core/drop2.js new file mode 100644 index 00000000000..5eef20adc61 --- /dev/null +++ b/jstests/core/drop2.js @@ -0,0 +1,52 @@ +var coll = db.jstests_drop2; +coll.drop(); + +function debug( x ) { + printjson( x ); +} + +coll.save( {} ); + +function getOpId( drop ) { + var inProg = db.currentOp().inprog; + debug( inProg ); + for ( var id in inProg ) { + var op = inProg[ id ]; + if ( drop ) { + if ( op.query && op.query.drop && op.query.drop == coll.getName() ) { + return op.opid; + } + } else { + if ( op.query && op.query.query && op.query.query.$where && op.ns == (coll + "") ) { + return op.opid; + } + } + } + return null; +} + +var shell1 = startParallelShell( "print(\"Count thread started\");" + + "db.getMongo().getCollection(\"" + + (coll + "") + "\")" + + ".count( { $where: function() {" + + "while( 1 ) { sleep( 1 ); } } } );" + + "print(\"Count thread terminating\");" ); +countOpId = null; +assert.soon( function() { countOpId = getOpId( false ); return countOpId; } ); + +var shell2 = startParallelShell( "print(\"Drop thread started\");" + + "print(\"drop result: \" + " + + "db.getMongo().getCollection(\"" + + (coll + "") + "\")" + + ".drop() );" + + "print(\"Drop thread terminating\")" ); +dropOpId = null; +assert.soon( function() { dropOpId = getOpId( true ); return dropOpId; } ); + +db.killOp( dropOpId ); +db.killOp( countOpId ); + +shell1(); +shell2(); + +coll.drop(); // in SERVER-1818, this fails diff --git a/jstests/core/drop3.js b/jstests/core/drop3.js new file mode 100644 index 00000000000..d97b40989b8 --- /dev/null +++ b/jstests/core/drop3.js @@ -0,0 +1,25 @@ +t = db.jstests_drop3; +sub = t.sub; + +t.drop(); +sub.drop(); + + +for (var i = 0; i < 10; i++){ + t.insert({}); + sub.insert({}); +} + +var cursor = t.find().batchSize(2); +var subcursor = sub.find().batchSize(2); + +cursor.next(); +subcursor.next(); +assert.eq( cursor.objsLeftInBatch(), 1 ); +assert.eq( subcursor.objsLeftInBatch(), 1 ); + +t.drop(); // should invalidate cursor, but not subcursor + +assert.throws( function(){ cursor.itcount() } ); // throws "cursor doesn't exist on server" error on getMore +assert.eq( subcursor.itcount(), 9 ); //one already seen + diff --git a/jstests/core/drop_index.js b/jstests/core/drop_index.js new file mode 100644 index 00000000000..8e2278d00c5 --- /dev/null +++ b/jstests/core/drop_index.js @@ -0,0 +1,20 @@ + +t = db.dropIndex; +t.drop(); + +t.insert( { _id : 1 , a : 2 , b : 3 } ); +assert.eq( 1 , t.getIndexes().length , "A1" ); + +t.ensureIndex( { a : 1 } ); +t.ensureIndex( { b : 1 } ); +assert.eq( 3 , t.getIndexes().length , "A2" ); + +x = db._dbCommand( { dropIndexes: t.getName() , index : t._genIndexName( { a : 1 } ) } ); +assert.eq( 2 , t.getIndexes().length , "B1 " + tojson(x) ); + +x = db._dbCommand( { dropIndexes: t.getName() , index : { b : 1 } } ) +assert.eq( 1 , t.getIndexes().length , "B2" ); + +// ensure you can recreate indexes, even if you don't use dropIndex method +t.ensureIndex({a:1}); +assert.eq(2 , t.getIndexes().length); diff --git a/jstests/core/dropdb.js b/jstests/core/dropdb.js new file mode 100644 index 00000000000..0c080ffb286 --- /dev/null +++ b/jstests/core/dropdb.js @@ -0,0 +1,25 @@ +// Test that a db does not exist after it is dropped. +// Disabled in the small oplog suite because the slave may create a master db +// with the same name as the dropped db when requesting a clone. + +m = db.getMongo(); +baseName = "jstests_dropdb"; +ddb = db.getSisterDB( baseName ); + +print("initial dbs: " + tojson(m.getDBNames())); + +function check(shouldExist) { + var dbs = m.getDBNames(); + assert.eq(Array.contains(dbs, baseName), shouldExist, + "DB " + baseName + " should " + (shouldExist ? "" : "not ") + "exist." + + " dbs: " + tojson(dbs) + "\n" + tojson( m.getDBs() ) ); +} + +ddb.c.save( {} ); +check(true); + +ddb.dropDatabase(); +check(false); + +ddb.dropDatabase(); +check(false); diff --git a/jstests/core/dropdb_race.js b/jstests/core/dropdb_race.js new file mode 100644 index 00000000000..61fa0887ef5 --- /dev/null +++ b/jstests/core/dropdb_race.js @@ -0,0 +1,41 @@ +// test dropping a db with simultaneous commits + +m = db.getMongo(); +baseName = "jstests_dur_droprace"; +d = db.getSisterDB(baseName); +t = d.foo; + +assert(d.adminCommand({ setParameter: 1, syncdelay: 5 }).ok); + +var s = 0; + +var start = new Date(); + +for (var pass = 0; pass < 100; pass++) { + if (pass % 2 == 0) { + // sometimes wait for create db first, to vary the timing of things + var options = ( pass % 4 == 0 )? { writeConcern: { j: true }} : undefined; + t.insert({}, options); + } + t.insert({ x: 1 }); + t.insert({ x: 3 }); + t.ensureIndex({ x: 1 }); + sleep(s); + if (pass % 37 == 0) + d.adminCommand("closeAllDatabases"); + else if (pass % 13 == 0) + t.drop(); + else if (pass % 17 == 0) + t.dropIndexes(); + else + d.dropDatabase(); + if (pass % 7 == 0) + d.runCommand({getLastError:1,j:1}); + d.getLastError(); + s = (s + 1) % 25; + //print(pass); + if ((new Date()) - start > 60000) { + print("stopping early"); + break; + } +} diff --git a/jstests/core/elemMatchProjection.js b/jstests/core/elemMatchProjection.js new file mode 100644 index 00000000000..73088fab699 --- /dev/null +++ b/jstests/core/elemMatchProjection.js @@ -0,0 +1,265 @@ +// Tests for $elemMatch projections and $ positional operator projection. +t = db.SERVER828Test; +t.drop(); + +date1 = new Date(); + +// Insert various styles of arrays +for ( i = 0; i < 100; i++ ) { + t.insert({ group: 1, x: [ 1, 2, 3, 4, 5 ] }); + t.insert({ group: 2, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ] }); + t.insert({ group: 3, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ], + y: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] }); + t.insert({ group: 3, x: [ { a: 1, b: 3 }, { a: -6, c: 3 } ] }); + t.insert({ group: 4, x: [ { a: 1, b: 4 }, { a: -6, c: 3 } ] }); + t.insert({ group: 5, x: [ new Date(), 5, 10, 'string', new ObjectId(), 123.456 ] }); + t.insert({ group: 6, x: [ { a: 'string', b: date1 }, + { a: new ObjectId(), b: 1.2345 }, + { a: 'string2', b: date1 } ] }); + t.insert({ group: 7, x: [ { y: [ 1, 2, 3, 4 ] } ] }); + t.insert({ group: 8, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] }); + t.insert({ group: 9, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] }, + { z: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] }); + t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ], + y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] }); + t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ], + y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] }); + t.insert({ group: 11, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ], + covered: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] }); + t.insert({ group: 12, x: { y : [ { a: 1, b: 1 }, { a: 1, b: 2} ] } } ); + t.insert({ group: 13, x: [ { a: 1, b: 1 }, {a: 1, b: 2 } ] } ); + t.insert({ group: 13, x: [ { a: 1, b: 2 }, {a: 1, b: 1 } ] } ); +} +t.ensureIndex({group:1, 'y.d':1}); // for regular index test (not sure if this is really adding anything useful) +t.ensureIndex({group:1, covered:1}); // for covered index test + +// +// SERVER-828: Positional operator ($) projection tests +// +assert.eq( 1, + t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).toArray()[0].x.length, + "single object match (array length match)" ); + +assert.eq( 2, + t.find( { group:3, 'x.a':1 }, { 'x.$':1 } ).toArray()[0].x[0].b, + "single object match first" ); + +assert.eq( undefined, + t.find( { group:3, 'x.a':2 }, { _id:0, 'x.$':1 } ).toArray()[0]._id, + "single object match with filtered _id" ); + +assert.eq( 1, + t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { _id:1 } ).toArray()[0].x.length, + "sorted single object match with filtered _id (array length match)" ); + +assert.eq( 1, + t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':1 } ).toArray()[0].x.length, + "single object match with elemMatch" ); + +assert.eq( 1, + t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':1} } ).toArray()[0].x.length, + "single object match with elemMatch and positive slice" ); + +assert.eq( 1, + t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':-1} } ).toArray()[0].x.length, + "single object match with elemMatch and negative slice" ); + +assert.eq( 1, + t.find( { 'group':12, 'x.y.a':1 }, { 'x.y.$': 1 } ).toArray()[0].x.y.length, + "single object match with two level dot notation" ); + +assert.eq( 1, + t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { x:1 } ).toArray()[0].x.length, + "sorted object match (array length match)" ); + +assert.eq( { aa:1, dd:5 }, + t.find( { group:3, 'y.dd':5 }, { 'y.$':1 } ).toArray()[0].y[0], + "single object match (value match)" ); + +assert.throws( function() { + t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).toArray(); + }, [], "throw on invalid projection (field mismatch)" ); + +assert.throws( function() { + t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).sort( { x:1 } ).toArray() + }, [], "throw on invalid sorted projection (field mismatch)" ); + +assert.throws( function() {x + t.find( { group:3, 'x.a':2 }, { 'x.$':1, group:0 } ).sort( { x:1 } ).toArray(); + }, [], "throw on invalid projection combination (include and exclude)" ); + +assert.throws( function() { + t.find( { group:3, 'x.a':1, 'y.aa':1 }, { 'x.$':1, 'y.$':1 } ).toArray(); + }, [], "throw on multiple projections" ); + +assert.throws( function() { + t.find( { group:3}, { 'g.$':1 } ).toArray() + }, [], "throw on invalid projection (non-array field)" ); + +assert.eq( { aa:1, dd:5 }, + t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).toArray()[0].covered[0], + "single object match (covered index)" ); + +assert.eq( { aa:1, dd:5 }, + t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).sort( { covered:1 } ).toArray()[0].covered[0], + "single object match (sorted covered index)" ); + +assert.eq( 1, + t.find( { group:10, 'y.d': 4 }, { 'y.$':1 } ).toArray()[0].y.length, + "single object match (regular index" ); + +if (false) { + + assert.eq( 2, // SERVER-1013: allow multiple positional operators + t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].y[0].bb, + "multi match, multi proj 1" ); + + assert.eq( 5, // SSERVER-1013: allow multiple positional operators + t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].x[0].d, + "multi match, multi proj 2" ); + + assert.eq( 2, // SERVER-1243: allow multiple results from same matcher + t.find( { group:2, x: { $elemMatchAll: { a:1 } } }, { 'x.$':1 } ).toArray()[0].x.length, + "multi element match, single proj" ); + + assert.eq( 2, // SERVER-1013: multiple array matches with one prositional operator + t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1 } ).toArray()[0].y[0].bb, + "multi match, single proj 1" ); + + assert.eq( 2, // SERVER-1013: multiple array matches with one positional operator + t.find( { group:3, 'y.cc':3, 'x.b':2 }, { 'x.$':1 } ).toArray()[0].x[0].b, + "multi match, single proj 2" ); + +} + +// +// SERVER-2238: $elemMatch projections +// +assert.eq( -6, + t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x[0].a, + "single object match" ); + +assert.eq( 1, + t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x.length, + "filters non-matching array elements" ); + +assert.eq( 1, + t.find( { group:4 }, { x: { $elemMatch: { a:-6, c:3 } } } ).toArray()[0].x.length, + "filters non-matching array elements with multiple elemMatch criteria" ); + +assert.eq( 1, + t.find( { group: 13 }, { 'x' : {'$elemMatch' : { a: {$gt: 0, $lt: 2} } } } ).toArray()[0].x.length, + "filters non-matching array elements with multiple criteria for a single element in the array" ); + +assert.eq( 3, + t.find( { group:4 }, { x: { $elemMatch: { a:{ $lt:1 } } } } ).toArray()[0].x[0].c, + "object operator match" ); + +assert.eq( [ 4 ], + t.find( { group:1 }, { x: { $elemMatch: { $in:[100, 4, -123] } } } ).toArray()[0].x, + "$in number match" ); + +assert.eq( [ {a : 1, b : 2} ], + t.find( { group:2 }, { x: { $elemMatch: { a: { $in:[1] } } } } ).toArray()[0].x, + "$in number match" ); + +assert.eq( [1], + t.find( { group:1 }, { x: { $elemMatch: { $nin:[4, 5, 6] } } } ).toArray()[0].x, + "$nin number match" ); + +// but this may become a user assertion, since a single element of an array can't match more than one value +assert.eq( [ 1], + t.find( { group:1 }, { x: { $elemMatch: { $all:[1] } } } ).toArray()[0].x, + "$in number match" ); + +assert.eq( [ { a: 'string', b: date1 } ], + t.find( { group:6 }, { x: { $elemMatch: { a:'string' } } } ).toArray()[0].x, + "mixed object match on string eq" ); + +assert.eq( [ { a: 'string2', b: date1 } ], + t.find( { group:6 }, { x: { $elemMatch: { a:/ring2/ } } } ).toArray()[0].x, + "mixed object match on regexp" ); + +assert.eq( [ { a: 'string', b: date1 } ], + t.find( { group:6 }, { x: { $elemMatch: { a: { $type: 2 } } } } ).toArray()[0].x, + "mixed object match on type" ); + +assert.eq( [ { a : 2, c : 3} ], + t.find( { group:2 }, { x: { $elemMatch: { a: { $ne: 1 } } } } ).toArray()[0].x, + "mixed object match on ne" ); + +assert.eq( [ {a : 1, d : 5} ], + t.find( { group:3 }, { x: { $elemMatch: { d: { $exists: true } } } } ).toArray()[0].x, + "mixed object match on exists" ); + +assert.eq( [ {a : 2, c : 3} ], + t.find( { group:3 }, { x: { $elemMatch: { a: { $mod : [2, 0 ] } } } } ).toArray()[0].x, + "mixed object match on mod" ); + +assert.eq( {"x" : [ { "a" : 1, "b" : 2 } ], "y" : [ { "c" : 3, "d" : 4 } ] }, + t.find( { group:10 }, { _id : 0, + x: { $elemMatch: { a: 1 } }, + y: { $elemMatch: { c: 3 } } } ).toArray()[0], + "multiple $elemMatch on unique fields 1" ); + +if (false) { + + assert.eq( 2 , // SERVER-1243: handle multiple $elemMatch results + t.find( { group:4 }, { x: { $elemMatchAll: { a:{ $lte:2 } } } } ).toArray()[0].x.length, + "multi object match" ); + + assert.eq( 3 , // SERVER-1243: handle multiple $elemMatch results + t.find( { group:1 }, { x: { $elemMatchAll: { $in:[1, 2, 3] } } } ).toArray()[0].x.length, + "$in number match" ); + + assert.eq( 1 , // SERVER-1243: handle multiple $elemMatch results + t.find( { group:5 }, { x: { $elemMatchAll: { $ne: 5 } } } ).toArray()[0].x.length, + "single mixed type match 1" ); + + assert.eq( 1 , // SERVER-831: handle nested arrays + t.find( { group:9 }, { 'x.y': { $elemMatch: { a: 1 } } } ).toArray()[0].x.length, + "single dotted match" ); + +} + +// +// Batch/getMore tests +// +// test positional operator across multiple batches +a = t.find( { group:3, 'x.b':2 }, { 'x.$':1 } ).batchSize(1) +while ( a.hasNext() ) { + assert.eq( 2, a.next().x[0].b, "positional getMore test"); +} + +// test $elemMatch operator across multiple batches +a = t.find( { group:3 }, { x:{$elemMatch:{a:1}} } ).batchSize(1) +while ( a.hasNext() ) { + assert.eq( 1, a.next().x[0].a, "positional getMore test"); +} + +// verify the positional update operator matches the same element as the the positional find. this +// is to ensure consistent behavior with updates until SERVER-1013 is resolved, at which point the +// following tests should be updated. + +t.update({ group: 10, 'x.a': 3, 'y.c':1 }, { $set:{'x.$':100} }, false, true ); +// updated the wrong element, so the following assertions should be true +assert.eq( 100, + t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0], + "wrong single element match after update" ); + +assert.eq( 100, + t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0], + "wrong single element match after update" ); + +t.remove({ group: 10 }); +t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ], + y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] }); + +t.update({ group: 10, 'y.c':1, 'x.a': 3 }, { $set:{'x.$':100} }, false, true ); +// updated the correct element +assert.eq( 100, + t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0], + "right single element match after update" ); +assert.eq( 100, + t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0], + "right single element match after update" ); diff --git a/jstests/core/error2.js b/jstests/core/error2.js new file mode 100644 index 00000000000..8c27d6250e1 --- /dev/null +++ b/jstests/core/error2.js @@ -0,0 +1,21 @@ +// Test that client gets stack trace on failed invoke + +f = db.jstests_error2; + +f.drop(); + +f.save( {a:1} ); + +assert.throws( + function(){ + c = f.find({$where : function(){ return a() }}); + c.next(); + } +); + +assert.throws( + function(){ + db.eval( function() { return a(); } ); + } +); + diff --git a/jstests/core/error5.js b/jstests/core/error5.js new file mode 100644 index 00000000000..5884d20d8c1 --- /dev/null +++ b/jstests/core/error5.js @@ -0,0 +1,8 @@ + +t = db.error5 +t.drop(); + +assert.throws( function(){ t.save( 4 ); printjson( t.findOne() ) } , null , "A" ); +t.save( { a : 1 } ) +assert.eq( 1 , t.count() , "B" ); + diff --git a/jstests/core/eval0.js b/jstests/core/eval0.js new file mode 100644 index 00000000000..4375cace839 --- /dev/null +++ b/jstests/core/eval0.js @@ -0,0 +1,8 @@ + +assert.eq( 17 , db.eval( function(){ return 11 + 6; } ) , "A" ); +assert.eq( 17 , db.eval( function( x ){ return 10 + x; } , 7 ) , "B" ); + +// check that functions in system.js work +db.system.js.insert({_id: "add", value: function(x,y){ return x + y;}}); +assert.eq( 20 , db.eval( "this.add(15, 5);" ) , "C" ); + diff --git a/jstests/core/eval1.js b/jstests/core/eval1.js new file mode 100644 index 00000000000..4a5ca75f09b --- /dev/null +++ b/jstests/core/eval1.js @@ -0,0 +1,17 @@ + +t = db.eval1; +t.drop(); + +t.save( { _id : 1 , name : "eliot" } ); +t.save( { _id : 2 , name : "sara" } ); + +f = function(id){ + return db["eval1"].findOne( { _id : id } ).name; +} + + +assert.eq( "eliot" , f( 1 ) , "A" ); +assert.eq( "sara" , f( 2 ) , "B" ); +assert.eq( "eliot" , db.eval( f , 1 ) , "C" ); +assert.eq( "sara" , db.eval( f , 2 ) , "D" ); + diff --git a/jstests/core/eval2.js b/jstests/core/eval2.js new file mode 100644 index 00000000000..6e39bb4a7bd --- /dev/null +++ b/jstests/core/eval2.js @@ -0,0 +1,28 @@ + +t = db.eval2; +t.drop(); +t.save({a:1}); +t.save({a:1}); + +var f = db.group( + { + ns: t.getName(), + key: { a:true}, + cond: { a:1 }, + reduce: function(obj,prev) { prev.csum++; } , + initial: { csum: 0} + } +); + +assert(f[0].a == 1 && f[0].csum == 2 , "on db" ); + +var f = t.group( + { + key: { a:true}, + cond: { a:1 }, + reduce: function(obj,prev) { prev.csum++; } , + initial: { csum: 0} + } +); + +assert(f[0].a == 1 && f[0].csum == 2 , "on coll" ); diff --git a/jstests/core/eval3.js b/jstests/core/eval3.js new file mode 100644 index 00000000000..404d4d863b7 --- /dev/null +++ b/jstests/core/eval3.js @@ -0,0 +1,21 @@ + +t = db.eval3; +t.drop(); + +t.save( { _id : 1 , name : "eliot" } ); +assert.eq( 1 , t.count() , "A" ); + +function z( a , b ){ + db.eval3.save( { _id : a , name : b } ); + return b; +} + +z( 2 , "sara" ); +assert.eq( 2 , t.count() , "B" ); + +assert.eq( "eliot,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() ); + +assert.eq( "joe" , db.eval( z , 3 , "joe" ) , "C" ); +assert.eq( 3 , t.count() , "D" ); + +assert.eq( "eliot,joe,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() ); diff --git a/jstests/core/eval4.js b/jstests/core/eval4.js new file mode 100644 index 00000000000..31d6ef0c2a8 --- /dev/null +++ b/jstests/core/eval4.js @@ -0,0 +1,23 @@ + +t = db.eval4; +t.drop(); + +t.save( { a : 1 } ); +t.save( { a : 2 } ); +t.save( { a : 3 } ); + +assert.eq( 3 , t.count() , "A" ); + +function f( x ){ + db.eval4.remove( { a : x } ); +} + +f( 2 ); +assert.eq( 2 , t.count() , "B" ); + +db.eval( f , 2 ); +assert.eq( 2 , t.count() , "C" ); + +db.eval( f , 3 ); +assert.eq( 1 , t.count() , "D" ); + diff --git a/jstests/core/eval5.js b/jstests/core/eval5.js new file mode 100644 index 00000000000..a9223a555a6 --- /dev/null +++ b/jstests/core/eval5.js @@ -0,0 +1,23 @@ + +t = db.eval5; +t.drop(); + +t.save( { a : 1 , b : 2 , c : 3 } ); + +assert.eq( 3 , + db.eval( + function(z){ + return db.eval5.find().toArray()[0].c; + } + ) , + "something weird A" + ); + +assert.isnull( + db.eval( + function(z){ + return db.eval5.find( {} , { a : 1 } ).toArray()[0].c; + } + ), + "field spec didn't work" + ); diff --git a/jstests/core/eval6.js b/jstests/core/eval6.js new file mode 100644 index 00000000000..5fe096974c6 --- /dev/null +++ b/jstests/core/eval6.js @@ -0,0 +1,15 @@ + +t = db.eval6; +t.drop(); + +t.save( { a : 1 } ); + +db.eval( + function(){ + o = db.eval6.findOne(); + o.b = 2; + db.eval6.save( o ); + } +); + +assert.eq( 2 , t.findOne().b ); diff --git a/jstests/core/eval7.js b/jstests/core/eval7.js new file mode 100644 index 00000000000..45e06af276c --- /dev/null +++ b/jstests/core/eval7.js @@ -0,0 +1,3 @@ + +assert.eq( 6 , db.eval( "5 + 1" ) , "A" ) +assert.throws( function(z){ db.eval( "5 + function x; + 1" )} ); diff --git a/jstests/core/eval8.js b/jstests/core/eval8.js new file mode 100644 index 00000000000..072a890e80a --- /dev/null +++ b/jstests/core/eval8.js @@ -0,0 +1,19 @@ + +t = db.eval8; +t.drop(); + +x = { a : 1 , b : 2 }; +t.save( x ); +x = t.findOne(); + +assert( x.a && x.b , "A" ); +delete x.b; + +assert( x.a && ! x.b , "B" ) +x.b = 3; +assert( x.a && x.b , "C" ); +assert.eq( 3 , x.b , "D" ); + +t.save( x ); +y = t.findOne(); +assert.eq( tojson( x ) , tojson( y ) , "E" ); diff --git a/jstests/core/eval9.js b/jstests/core/eval9.js new file mode 100644 index 00000000000..9c6642901e4 --- /dev/null +++ b/jstests/core/eval9.js @@ -0,0 +1,22 @@ + +a = [ 1 , "asd" , null , [ 2 , 3 ] , new Date() , { x : 1 } ] + +for ( var i=0; i 0, 'C : ' + tojson( o ) ); +} +finally { + + db.setProfilingLevel(0); + db = stddb; +} diff --git a/jstests/core/evalc.js b/jstests/core/evalc.js new file mode 100644 index 00000000000..0320ecd5133 --- /dev/null +++ b/jstests/core/evalc.js @@ -0,0 +1,25 @@ +t = db.jstests_evalc; +t.drop(); + +t2 = db.evalc_done +t2.drop() + +for( i = 0; i < 10; ++i ) { + t.save( {i:i} ); +} + +// SERVER-1610 + +assert.eq( 0 , t2.count() , "X1" ) + +s = startParallelShell( "print( 'starting forked:' + Date() ); for ( i=0; i<50000; i++ ){ db.currentOp(); } print( 'ending forked:' + Date() ); db.evalc_done.insert( { x : 1 } ); " ) + +print( "starting eval: " + Date() ) +while ( true ) { + db.eval( "db.jstests_evalc.count( {i:10} );" ); + if ( t2.count() > 0 ) + break; +} +print( "end eval: " + Date() ) + +s(); diff --git a/jstests/core/evald.js b/jstests/core/evald.js new file mode 100644 index 00000000000..7bb0eb825b1 --- /dev/null +++ b/jstests/core/evald.js @@ -0,0 +1,97 @@ +t = db.jstests_evald; +t.drop(); + +function debug( x ) { +// printjson( x ); +} + +for( i = 0; i < 10; ++i ) { + t.save( {i:i} ); +} + +function op( ev, where ) { + p = db.currentOp().inprog; + debug( p ); + for ( var i in p ) { + var o = p[ i ]; + if ( where ) { + if ( o.active && o.query && o.query.query && o.query.query.$where && o.ns == "test.jstests_evald" ) { + return o.opid; + } + } else { + if ( o.active && o.query && o.query.$eval && o.query.$eval == ev ) { + return o.opid; + } + } + } + return -1; +} + +function doIt( ev, wait, where ) { + + if ( where ) { + s = startParallelShell( ev ); + } else { + s = startParallelShell( "db.eval( '" + ev + "' )" ); + } + + o = null; + assert.soon( function() { o = op( ev, where ); return o != -1 } ); + + if ( wait ) { + sleep( 2000 ); + } + + debug( "going to kill" ); + + db.killOp( o ); + + debug( "sent kill" ); + + s(); + +} + +// nested scope with nested invoke() +doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", true, true); +doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", false, true); + +// simple tight loop tests with callback +doIt("while(1) { sleep(1); }", false); +doIt("while(1) { sleep(1); }", true); + +// simple tight loop tests without callback +doIt("while(1) {;}", false); +doIt("while(1) {;}", true); + +// the for loops are currently required, as a spawned op masks the parent op - see SERVER-1931 +doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", true); +doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", false); +doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", true); +doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", false); + +// try/catch with tight-loop kill tests. Catch testing is important +// due to v8::TerminateExecution internals. +// native callback with nested invoke(), drop JS exceptions +doIt("while(1) { " + + " for(var i = 0; i < 10000; ++i) {;} " + + " try { " + + " db.jstests_evald.count({i:10}); " + + " } catch (e) {} " + + "}", true ); + +// native callback, drop JS exceptions +doIt("while(1) { " + + " try { " + + " while(1) { " + + " sleep(1); " + + " } " + + " } catch (e) {} " + + "}", true ); + +// no native callback and drop JS exceptions +doIt("while(1) { " + + " try { " + + " while(1) {;} " + + " } catch (e) {} " + + "}", true ); diff --git a/jstests/core/evale.js b/jstests/core/evale.js new file mode 100644 index 00000000000..af5a303f167 --- /dev/null +++ b/jstests/core/evale.js @@ -0,0 +1,5 @@ +t = db.jstests_evale; +t.drop(); + +db.eval( function() { return db.jstests_evale.count( { $where:function() { return true; } } ) } ); +db.eval( "db.jstests_evale.count( { $where:function() { return true; } } )" ); \ No newline at end of file diff --git a/jstests/core/evalf.js b/jstests/core/evalf.js new file mode 100644 index 00000000000..01b7907ba93 --- /dev/null +++ b/jstests/core/evalf.js @@ -0,0 +1,27 @@ +// test that killing a parent op interrupts the child op + +t = db.jstests_evalf; +t.drop(); + +//if ( typeof _threadInject == "undefined" ) { // don't run in v8 mode - SERVER-1900 + +// the code in eval must be under 512 chars because otherwise it's not displayed in curOp() +try { +db.eval( function() { + opid = null; + while( opid == null ) { + ops = db.currentOp().inprog; + for( i in ops ) { + o = ops[ i ]; + if ( o.active && o.query && o.query.$eval ) { opid = o.opid; } + }} + db.jstests_evalf.save( {"opid":opid} ); + db.jstests_evalf.count( { $where:function() { var id = db.jstests_evalf.findOne().opid; db.killOp( id ); while( 1 ) { ; } } } ); + } ); +} catch (ex) { + // exception is thrown in V8 when job gets killed. Does not seem like bad behavior. +} + +// make sure server and JS still work +db.eval( function() { db.jstests_evalf.count(); }); +//} diff --git a/jstests/core/exists.js b/jstests/core/exists.js new file mode 100644 index 00000000000..3f1e904e52f --- /dev/null +++ b/jstests/core/exists.js @@ -0,0 +1,49 @@ +t = db.jstests_exists; +t.drop(); + +t.save( {} ); +t.save( {a:1} ); +t.save( {a:{b:1}} ); +t.save( {a:{b:{c:1}}} ); +t.save( {a:{b:{c:{d:null}}}} ); + +function dotest( n ){ + + assert.eq( 5, t.count() , n ); + assert.eq( 1, t.count( {a:null} ) , n ); + assert.eq( 2, t.count( {'a.b':null} ) , n ); + assert.eq( 3, t.count( {'a.b.c':null} ) , n ); + assert.eq( 5, t.count( {'a.b.c.d':null} ) , n ); + + assert.eq( 5, t.count() , n ); + assert.eq( 4, t.count( {a:{$ne:null}} ) , n ); + assert.eq( 3, t.count( {'a.b':{$ne:null}} ) , n ); + assert.eq( 2, t.count( {'a.b.c':{$ne:null}} ) , n ); + assert.eq( 0, t.count( {'a.b.c.d':{$ne:null}} ) , n ); + + assert.eq( 4, t.count( {a: {$exists:true}} ) , n ); + assert.eq( 3, t.count( {'a.b': {$exists:true}} ) , n ); + assert.eq( 2, t.count( {'a.b.c': {$exists:true}} ) , n ); + assert.eq( 1, t.count( {'a.b.c.d': {$exists:true}} ) , n ); + + assert.eq( 1, t.count( {a: {$exists:false}} ) , n ); + assert.eq( 2, t.count( {'a.b': {$exists:false}} ) , n ); + assert.eq( 3, t.count( {'a.b.c': {$exists:false}} ) , n ); + assert.eq( 4, t.count( {'a.b.c.d': {$exists:false}} ) , n ); +} + +dotest( "before index" ) +t.ensureIndex( { "a" : 1 } ) +t.ensureIndex( { "a.b" : 1 } ) +t.ensureIndex( { "a.b.c" : 1 } ) +t.ensureIndex( { "a.b.c.d" : 1 } ) +dotest( "after index" ) +assert.eq( 1, t.find( {a: {$exists:false}} ).hint( {a:1} ).itcount() ); + +t.drop(); + +t.save( {r:[{s:1}]} ); +assert( t.findOne( {'r.s':{$exists:true}} ) ); +assert( !t.findOne( {'r.s':{$exists:false}} ) ); +assert( !t.findOne( {'r.t':{$exists:true}} ) ); +assert( t.findOne( {'r.t':{$exists:false}} ) ); diff --git a/jstests/core/exists2.js b/jstests/core/exists2.js new file mode 100644 index 00000000000..e925c168f50 --- /dev/null +++ b/jstests/core/exists2.js @@ -0,0 +1,16 @@ + +t = db.exists2; +t.drop(); + +t.save( { a : 1 , b : 1 } ) +t.save( { a : 1 , b : 1 , c : 1 } ) + +assert.eq( 2 , t.find().itcount() , "A1" ); +assert.eq( 2 , t.find( { a : 1 , b : 1 } ).itcount() , "A2" ); +assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "A3" ); +assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "A4" ); + +t.ensureIndex( { a : 1 , b : 1 , c : 1 } ) +assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "B1" ); +assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "B2" ); + diff --git a/jstests/core/exists3.js b/jstests/core/exists3.js new file mode 100644 index 00000000000..53a69d6c3bb --- /dev/null +++ b/jstests/core/exists3.js @@ -0,0 +1,21 @@ +// Check exists with non empty document, based on SERVER-2470 example. + +t = db.jstests_exists3; +t.drop(); + +t.insert({a: 1, b: 2}); + +assert.eq( 1, t.find({}).sort({c: -1}).itcount() ); +assert.eq( 1, t.count({c: {$exists: false}}) ); +assert.eq( 1, t.find({c: {$exists: false}}).itcount() ); +assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() ); + +// now we have an index on the sort key +t.ensureIndex({c: -1}) + +assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() ); +assert.eq( 1, t.find({c: {$exists: false}}).itcount() ); +// still ok without the $exists +assert.eq( 1, t.find({}).sort({c: -1}).itcount() ); +// and ok with a convoluted $not $exists +assert.eq( 1, t.find({c: {$not: {$exists: true}}}).sort({c: -1}).itcount() ); diff --git a/jstests/core/exists4.js b/jstests/core/exists4.js new file mode 100644 index 00000000000..fb801ed62e9 --- /dev/null +++ b/jstests/core/exists4.js @@ -0,0 +1,20 @@ +// Check various exists cases, based on SERVER-1735 example. + +t = db.jstests_exists4; +t.drop(); + +t.ensureIndex({date: -1, country_code: 1, user_id: 1}, {unique: 1, background: 1}); +t.insert({ date: new Date("08/27/2010"), tot_visit: 100}); +t.insert({ date: new Date("08/27/2010"), country_code: "IT", tot_visit: 77}); +t.insert({ date: new Date("08/27/2010"), country_code: "ES", tot_visit: 23}); +t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "and...@spacca.org", tot_visit: 11}); +t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@gmail.com", tot_visit: 5}); +t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@progloedizioni.com", tot_visit: 7}); + +assert.eq( 6, t.find({date: new Date("08/27/2010")}).count() ); +assert.eq( 5, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}}).count() ); +assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: {$exists: false}}).count() ); +assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: null}).count() ); +assert.eq( 3, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: true}}).count() ); +assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: false}}).count() ); +assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: null}).count() ); diff --git a/jstests/core/exists5.js b/jstests/core/exists5.js new file mode 100644 index 00000000000..a90a94f908f --- /dev/null +++ b/jstests/core/exists5.js @@ -0,0 +1,33 @@ +// Test some $not/$exists cases. + +t = db.jstests_exists5; +t.drop(); + +t.save( {a:1} ); +assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); +assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) ); +assert.eq( 1, t.count( {'c.d':{$not:{$exists:true}}} ) ); +assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) ); +assert.eq( 0, t.count( {'c.d':{$not:{$exists:false}}} ) ); + +t.drop(); +t.save( {a:{b:1}} ); +assert.eq( 1, t.count( {'a.b':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) ); +assert.eq( 0, t.count( {'a.b':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) ); + +t.drop(); +t.save( {a:[1]} ); +assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); +assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) ); +assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) ); + +t.drop(); +t.save( {a:[{b:1}]} ); +assert.eq( 1, t.count( {'a.b':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) ); +assert.eq( 0, t.count( {'a.b':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) ); diff --git a/jstests/core/exists6.js b/jstests/core/exists6.js new file mode 100644 index 00000000000..2fa4ba85d49 --- /dev/null +++ b/jstests/core/exists6.js @@ -0,0 +1,79 @@ +// SERVER-393 Test indexed matching with $exists. + +t = db.jstests_exists6; +t.drop(); + +t.ensureIndex( {b:1} ); +t.save( {} ); +t.save( {b:1} ); +t.save( {b:null} ); + +//--------------------------------- + +function checkIndexUse( query, usesIndex, index, bounds ) { + var x = t.find( query ).explain() + if ( usesIndex ) { + assert.eq( x.cursor.indexOf(index), 0 , tojson(x) ); + if ( ! x.indexBounds ) x.indexBounds = {} + assert.eq( bounds, x.indexBounds.b , tojson(x) ); + } + else { + assert.eq( 'BasicCursor', x.cursor, tojson(x) ); + } +} + +function checkExists( query, usesIndex, bounds ) { + checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds ); + // Whether we use an index or not, we will always scan all docs. + assert.eq( 3, t.find( query ).explain().nscanned ); + // 2 docs will match. + assert.eq( 2, t.find( query ).itcount() ); +} + +function checkMissing( query, usesIndex, bounds ) { + checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds ); + // Nscanned changes based on index usage. + if ( usesIndex ) assert.eq( 2, t.find( query ).explain().nscanned ); + else assert.eq( 3, t.find( query ).explain().nscanned ); + // 1 doc is missing 'b'. + assert.eq( 1, t.find( query ).itcount() ); +} + +function checkExistsCompound( query, usesIndex, bounds ) { + checkIndexUse( query, usesIndex, 'BtreeCursor', bounds ); + if ( usesIndex ) assert.eq( 3, t.find( query ).explain().nscanned ); + else assert.eq( 3, t.find( query ).explain().nscanned ); + // 2 docs have a:1 and b:exists. + assert.eq( 2, t.find( query ).itcount() ); +} + +function checkMissingCompound( query, usesIndex, bounds ) { + checkIndexUse( query, usesIndex, 'BtreeCursor', bounds ); + // two possible indexes to use + // 1 doc should match + assert.eq( 1, t.find( query ).itcount() ); +} + +//--------------------------------- + +var allValues = [ [ { $minElement:1 }, { $maxElement:1 } ] ]; +var nullNull = [ [ null, null ] ]; + +// Basic cases +checkExists( {b:{$exists:true}}, true, allValues ); +// We change this to not -> not -> exists:true, and get allValue for bounds +// but we use a BasicCursor? +checkExists( {b:{$not:{$exists:false}}}, false, allValues ); +checkMissing( {b:{$exists:false}}, true, nullNull ); +checkMissing( {b:{$not:{$exists:true}}}, true, nullNull ); + +// Now check existence of second compound field. +t.ensureIndex( {a:1,b:1} ); +t.save( {a:1} ); +t.save( {a:1,b:1} ); +t.save( {a:1,b:null} ); + +checkExistsCompound( {a:1,b:{$exists:true}}, true, allValues ); +checkExistsCompound( {a:1,b:{$not:{$exists:false}}}, true, allValues ); +checkMissingCompound( {a:1,b:{$exists:false}}, true, nullNull ); +checkMissingCompound( {a:1,b:{$not:{$exists:true}}}, true, nullNull ); diff --git a/jstests/core/exists7.js b/jstests/core/exists7.js new file mode 100644 index 00000000000..91fd589f30d --- /dev/null +++ b/jstests/core/exists7.js @@ -0,0 +1,21 @@ + +// Test that non boolean value types are allowed with $explain spec. SERVER-2322 + +t = db.jstests_explain7; +t.drop(); + +function testIntegerExistsSpec() { + t.remove({}); + t.save( {} ); + t.save( {a:1} ); + t.save( {a:2} ); + t.save( {a:3, b:3} ); + t.save( {a:4, b:4} ); + + assert.eq( 2, t.count( {b:{$exists:1}} ) ); + assert.eq( 3, t.count( {b:{$exists:0}} ) ); +} + +testIntegerExistsSpec(); +t.ensureIndex( {b:1} ); +testIntegerExistsSpec(); diff --git a/jstests/core/exists8.js b/jstests/core/exists8.js new file mode 100644 index 00000000000..ca62ebeb9ab --- /dev/null +++ b/jstests/core/exists8.js @@ -0,0 +1,76 @@ +// Test $exists with array element field names SERVER-2897 + +t = db.jstests_exists8; +t.drop(); + +t.save( {a:[1]} ); +assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.1':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.1':{$exists:true}} ) ); + +t.remove({}); +t.save( {a:[1,2]} ); +assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.1':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); +assert.eq( 1, t.count( {'a.1':{$exists:true}} ) ); + +t.remove({}); +t.save( {a:[{}]} ); +assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.1':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.1':{$exists:true}} ) ); + +t.remove({}); +t.save( {a:[{},{}]} ); +assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.1':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); +assert.eq( 1, t.count( {'a.1':{$exists:true}} ) ); + +t.remove({}); +t.save( {a:[{'b':2},{'a':1}]} ); +assert.eq( 1, t.count( {'a.a':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.1.a':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.0.a':{$exists:false}} ) ); + +t.remove({}); +t.save( {a:[[1]]} ); +assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.0.0':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.0.0':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.0.0.0':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.0.0.0':{$exists:false}} ) ); + +t.remove({}); +t.save( {a:[[[1]]]} ); +assert.eq( 1, t.count( {'a.0.0.0':{$exists:true}} ) ); + +t.remove({}); +t.save( {a:[[{b:1}]]} ); +assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); +assert.eq( 1, t.count( {'a.0.b':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.0.b':{$exists:false}} ) ); + +t.remove({}); +t.save( {a:[[],[{b:1}]]} ); +assert.eq( 0, t.count( {'a.0.b':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.0.b':{$exists:false}} ) ); + +t.remove({}); +t.save( {a:[[],[{b:1}]]} ); +assert.eq( 1, t.count( {'a.1.b':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.1.b':{$exists:false}} ) ); + +t.remove({}); +t.save( {a:[[],[{b:1}]]} ); +assert.eq( 1, t.count( {'a.1.0.b':{$exists:true}} ) ); +assert.eq( 0, t.count( {'a.1.0.b':{$exists:false}} ) ); + +t.remove({}); +t.save( {a:[[],[{b:1}]]} ); +assert.eq( 0, t.count( {'a.1.1.b':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.1.1.b':{$exists:false}} ) ); diff --git a/jstests/core/exists9.js b/jstests/core/exists9.js new file mode 100644 index 00000000000..66378d1b424 --- /dev/null +++ b/jstests/core/exists9.js @@ -0,0 +1,41 @@ +// SERVER-393 Test exists with various empty array and empty object cases. + +t = db.jstests_exists9; +t.drop(); + +// Check existence of missing nested field. +t.save( {a:{}} ); +assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); +assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); + +// With index. +t.ensureIndex( {'a.b':1} ); +assert.eq( 1, t.find( {'a.b':{$exists:false}} ).hint( {'a.b':1} ).itcount() ); +assert.eq( 0, t.find( {'a.b':{$exists:true}} ).hint( {'a.b':1} ).itcount() ); + +t.drop(); + +// Check that an empty array 'exists'. +t.save( {} ); +t.save( {a:[]} ); +assert.eq( 1, t.count( {a:{$exists:true}} ) ); +assert.eq( 1, t.count( {a:{$exists:false}} ) ); + +// With index. +t.ensureIndex( {a:1} ); +assert.eq( 1, t.find( {a:{$exists:true}} ).hint( {a:1} ).itcount() ); +assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).itcount() ); +assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).explain().nscanned ); + +t.drop(); + +// Check that an indexed field within an empty array does not exist. +t.save( {a:{'0':1}} ); +t.save( {a:[]} ); +assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); +assert.eq( 1, t.count( {'a.0':{$exists:false}} ) ); + +// With index. +t.ensureIndex( {'a.0':1} ); +assert.eq( 1, t.find( {'a.0':{$exists:true}} ).hint( {'a.0':1} ).itcount() ); +assert.eq( 1, t.find( {'a.0':{$exists:false}} ).hint( {'a.0':1} ).itcount() ); diff --git a/jstests/core/existsa.js b/jstests/core/existsa.js new file mode 100644 index 00000000000..9ef7e9f374c --- /dev/null +++ b/jstests/core/existsa.js @@ -0,0 +1,114 @@ +// Sparse indexes are disallowed for $exists:false queries. SERVER-3918 + +t = db.jstests_existsa; +t.drop(); + +t.save( {} ); +t.save( { a:1 } ); +t.save( { a:{ x:1 }, b:1 } ); + +/** Configure testing of an index { :1 }. */ +function setIndex( _indexKeyField ) { + indexKeyField = _indexKeyField; + indexKeySpec = {}; + indexKeySpec[ indexKeyField ] = 1; + t.ensureIndex( indexKeySpec, { sparse:true } ); + indexCursorName = 'BtreeCursor ' + indexKeyField + '_1'; +} +setIndex( 'a' ); + +/** Validate the prefix of 'str'. */ +function assertPrefix( prefix, str ) { + assert.eq( prefix, str.substring( 0, prefix.length ) ); +} + +/** @return count when hinting the index to use. */ +function hintedCount( query ) { + assertPrefix( indexCursorName, t.find( query ).hint( indexKeySpec ).explain().cursor ); + return t.find( query ).hint( indexKeySpec ).itcount(); +} + +/** The query field does not exist and the sparse index is not used without a hint. */ +function assertMissing( query, expectedMissing, expectedIndexedMissing ) { + expectedMissing = expectedMissing || 1; + expectedIndexedMissing = expectedIndexedMissing || 0; + assert.eq( expectedMissing, t.count( query ) ); + assert.eq( 'BasicCursor', t.find( query ).explain().cursor ); + // We also shouldn't get a different count depending on whether + // an index is used or not. + assert.eq( expectedIndexedMissing, hintedCount( query ) ); +} + +/** The query field exists and the sparse index is used without a hint. */ +function assertExists( query, expectedExists ) { + expectedExists = expectedExists || 2; + assert.eq( expectedExists, t.count( query ) ); + assert.eq( 0, t.find( query ).explain().cursor.indexOf('BtreeCursor') ); + // An $exists:true predicate generates no index filters. Add another predicate on the index key + // to trigger use of the index. + andClause = {} + andClause[ indexKeyField ] = { $ne:null }; + Object.extend( query, { $and:[ andClause ] } ); + assert.eq( expectedExists, t.count( query ) ); + assertPrefix( indexCursorName, t.find( query ).explain().cursor ); + assert.eq( expectedExists, hintedCount( query ) ); +} + +/** The query field exists and the sparse index is not used without a hint. */ +function assertExistsUnindexed( query, expectedExists ) { + expectedExists = expectedExists || 2; + assert.eq( expectedExists, t.count( query ) ); + assert.eq( 'BasicCursor', t.find( query ).explain().cursor ); + // Even with another predicate on the index key, the sparse index is disallowed. + andClause = {} + andClause[ indexKeyField ] = { $ne:null }; + Object.extend( query, { $and:[ andClause ] } ); + assert.eq( expectedExists, t.count( query ) ); + assert.eq( 'BasicCursor', t.find( query ).explain().cursor ); + assert.eq( expectedExists, hintedCount( query ) ); +} + +// $exists:false queries match the proper number of documents and disallow the sparse index. +assertMissing( { a:{ $exists:false } } ); +assertMissing( { a:{ $not:{ $exists:true } } } ); +assertMissing( { $and:[ { a:{ $exists:false } } ] } ); +assertMissing( { $or:[ { a:{ $exists:false } } ] } ); +assertMissing( { $nor:[ { a:{ $exists:true } } ] } ); +assertMissing( { 'a.x':{ $exists:false } }, 2, 1 ); + +// Currently a sparse index is disallowed even if the $exists:false query is on a different field. +assertMissing( { b:{ $exists:false } }, 2, 1 ); +assertMissing( { b:{ $exists:false }, a:{ $ne:6 } }, 2, 1 ); +assertMissing( { b:{ $not:{ $exists:true } } }, 2, 1 ); + +// Top level $exists:true queries match the proper number of documents +// and use the sparse index on { a : 1 }. +assertExists( { a:{ $exists:true } } ); + +// Nested $exists queries match the proper number of documents and disallow the sparse index. +assertExistsUnindexed( { $nor:[ { a:{ $exists:false } } ] } ); +assertExistsUnindexed( { $nor:[ { 'a.x':{ $exists:false } } ] }, 1 ); +assertExistsUnindexed( { a:{ $not:{ $exists:false } } } ); + +// Nested $exists queries disallow the sparse index in some cases where it is not strictly +// necessary to do so. (Descriptive tests.) +assertExistsUnindexed( { $nor:[ { b:{ $exists:false } } ] }, 1 ); // Unindexed field. +assertExists( { $or:[ { a:{ $exists:true } } ] } ); // $exists:true not $exists:false. + +// Behavior is similar with $elemMatch. +t.drop(); +t.save( { a:[ {} ] } ); +t.save( { a:[ { b:1 } ] } ); +t.save( { a:[ { b:1 } ] } ); +setIndex( 'a.b' ); + +assertMissing( { a:{ $elemMatch:{ b:{ $exists:false } } } } ); +// A $elemMatch predicate is treated as nested, and the index should be used for $exists:true. +assertExists( { a:{ $elemMatch:{ b:{ $exists:true } } } } ); + +// A non sparse index will not be disallowed. +t.drop(); +t.save( {} ); +t.ensureIndex( { a:1 } ); +assert.eq( 1, t.find( { a:{ $exists:false } } ).itcount() ); +assert.eq( 'BtreeCursor a_1', t.find( { a:{ $exists:false } } ).explain().cursor ); diff --git a/jstests/core/existsb.js b/jstests/core/existsb.js new file mode 100644 index 00000000000..a212be145c0 --- /dev/null +++ b/jstests/core/existsb.js @@ -0,0 +1,76 @@ +// Tests for $exists against documents that store a null value +// +// A document with a missing value for an indexed field +// is indexed *as if* it had the value 'null' explicitly. +// Therefore: +// { b : 1 } +// { a : null, b : 1 } +// look identical based on a standard index on { a : 1 }. +// +// -- HOWEVER!! -- +// A sparse index on { a : 1 } would include { a : null, b : 1 }, +// but would not include { b : 1 }. In this case, the two documents +// are treated equally. +// +// Also, super special edge case around sparse, compound indexes +// from Mathias: +// If we have a sparse index on { a : 1, b : 1 } +// And we insert docs {}, { a : 1 }, +// { b : 1 }, and { a : 1, b : 1 } +// everything but {} will have an index entry. +// Let's make sure we handle this properly! + +t = db.jstests_existsb; +t.drop(); + +t.save( {} ); +t.save( { a: 1 } ); +t.save( { b: 1 } ); +t.save( { a: 1, b: null } ); +t.save( { a: 1, b: 1 } ); + +/** run a series of checks, just on the number of docs found */ +function checkExistsNull() { + // Basic cases + assert.eq( 3, t.count({ a:{ $exists: true }}) ); + assert.eq( 2, t.count({ a:{ $exists: false }}) ); + assert.eq( 3, t.count({ b:{ $exists: true }}) ); + assert.eq( 2, t.count({ b:{ $exists: false }}) ); + // With negations + assert.eq( 3, t.count({ a:{ $not:{ $exists: false }}}) ); + assert.eq( 2, t.count({ a:{ $not:{ $exists: true }}}) ); + assert.eq( 3, t.count({ b:{ $not:{ $exists: false }}}) ); + assert.eq( 2, t.count({ b:{ $not:{ $exists: true }}}) ); + // Both fields + assert.eq( 2, t.count({ a:1, b: { $exists: true }}) ); + assert.eq( 1, t.count({ a:1, b: { $exists: false }}) ); + assert.eq( 1, t.count({ a:{ $exists: true }, b:1}) ); + assert.eq( 1, t.count({ a:{ $exists: false }, b:1}) ); + // Both fields, both $exists + assert.eq( 2, t.count({ a:{ $exists: true }, b:{ $exists: true }}) ); + assert.eq( 1, t.count({ a:{ $exists: true }, b:{ $exists: false }}) ); + assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: true }}) ); + assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: false }}) ); +} + +// with no index, make sure we get correct results +checkExistsNull(); + +// try with a standard index +t.ensureIndex({ a : 1 }); +checkExistsNull(); + +// try with a sparse index +t.dropIndexes(); +t.ensureIndex({ a : 1 }, { sparse:true }); +checkExistsNull(); + +// try with a compound index +t.dropIndexes(); +t.ensureIndex({ a : 1, b : 1 }); +checkExistsNull(); + +// try with sparse compound index +t.dropIndexes(); +t.ensureIndex({ a : 1, b : 1 }, { sparse:true }); +checkExistsNull(); diff --git a/jstests/core/explain1.js b/jstests/core/explain1.js new file mode 100644 index 00000000000..4c92b102e38 --- /dev/null +++ b/jstests/core/explain1.js @@ -0,0 +1,48 @@ + +t = db.explain1; +t.drop(); + +for ( var i=0; i<100; i++ ){ + t.save( { x : i } ); +} + +q = { x : { $gt : 50 } }; + +assert.eq( 49 , t.find( q ).count() , "A" ); +assert.eq( 49 , t.find( q ).itcount() , "B" ); +assert.eq( 20 , t.find( q ).limit(20).itcount() , "C" ); + +t.ensureIndex( { x : 1 } ); + +assert.eq( 49 , t.find( q ).count() , "D" ); +assert.eq( 49 , t.find( q ).itcount() , "E" ); +assert.eq( 20 , t.find( q ).limit(20).itcount() , "F" ); + +assert.eq( 49 , t.find(q).explain().n , "G" ); +assert.eq( 20 , t.find(q).limit(20).explain().n , "H" ); +assert.eq( 20 , t.find(q).limit(-20).explain().n , "I" ); +assert.eq( 49 , t.find(q).batchSize(20).explain().n , "J" ); + +// verbose explain output with stats +// display index bounds + +var explainGt = t.find({x: {$gt: 5}}).explain(true); +var boundsVerboseGt = explainGt.stats.children[0].boundsVerbose; + +print('explain stats for $gt = ' + tojson(explainGt.stats)); + +var explainGte = t.find({x: {$gte: 5}}).explain(true); +var boundsVerboseGte = explainGte.stats.children[0].boundsVerbose; + +print('explain stats for $gte = ' + tojson(explainGte.stats)); + +print('index bounds for $gt = ' + tojson(explainGt.indexBounds)); +print('index bounds for $gte = ' + tojson(explainGte.indexBounds)); + +print('verbose bounds for $gt = ' + tojson(boundsVerboseGt)); +print('verbose bounds for $gte = ' + tojson(boundsVerboseGte)); + +// Since the verbose bounds are opaque, all we try to confirm is that the +// verbose bounds for $gt is different from those generated for $gte. +assert.neq(boundsVerboseGt, boundsVerboseGte, + 'verbose bounds for $gt and $gte should not be the same'); diff --git a/jstests/core/explain2.js b/jstests/core/explain2.js new file mode 100644 index 00000000000..b70ffdc0b1e --- /dev/null +++ b/jstests/core/explain2.js @@ -0,0 +1,27 @@ + +t = db.explain2 +t.drop(); + +t.ensureIndex( { a : 1 , b : 1 } ); + +for ( i=1; i<10; i++ ){ + t.insert( { _id : i , a : i , b : i , c : i } ); +} + +function go( q , c , b , o ){ + var e = t.find( q ).hint( {a:1,b:1} ).explain(); + assert.eq( c , e.n , "count " + tojson( q ) ) + assert.eq( b , e.nscanned , "nscanned " + tojson( q ) ) + assert.eq( o , e.nscannedObjects , "nscannedObjects " + tojson( q ) ) +} + +q = { a : { $gt : 3 } } +go( q , 6 , 6 , 6 ); + +q.b = 5 +go( q , 1 , 6 , 1 ); + +delete q.b +q.c = 5 +go( q , 1 , 6 , 6 ); + diff --git a/jstests/core/explain3.js b/jstests/core/explain3.js new file mode 100644 index 00000000000..c205e57252c --- /dev/null +++ b/jstests/core/explain3.js @@ -0,0 +1,23 @@ +/** SERVER-2451 Kill cursor while explain is yielding */ + +t = db.jstests_explain3; +t.drop(); + +t.ensureIndex( {i:1} ); +for( var i = 0; i < 10000; ++i ) { + t.save( {i:i,j:0} ); +} + +s = startParallelShell( "sleep( 20 ); db.jstests_explain3.dropIndex( {i:1} );" ); + +try { + t.find( {i:{$gt:-1},j:1} ).hint( {i:1} ).explain() +} catch (e) { + print( "got exception" ); + printjson( e ); +} + +s(); + +// Sanity check to make sure mongod didn't seg fault. +assert.eq( 10000, t.count() ); diff --git a/jstests/core/explain4.js b/jstests/core/explain4.js new file mode 100644 index 00000000000..d6d3d818a72 --- /dev/null +++ b/jstests/core/explain4.js @@ -0,0 +1,68 @@ +// Basic validation of explain output fields. + +t = db.jstests_explain4; +t.drop(); + +function checkField( explain, name, value ) { + assert( explain.hasOwnProperty( name ) ); + if ( value != null ) { + assert.eq( value, explain[ name ], name ); + // Check that the value is of the expected type. SERVER-5288 + assert.eq( typeof( value ), typeof( explain[ name ] ), 'type ' + name ); + } +} + +function checkNonCursorPlanFields( explain, matches, n ) { + checkField( explain, "n", n ); + checkField( explain, "nscannedObjects", matches ); + checkField( explain, "nscanned", matches ); +} + +function checkPlanFields( explain, matches, n ) { + checkField( explain, "cursor", "BasicCursor" ); + // index related fields do not appear in non-indexed plan + assert(!("indexBounds" in explain)); + checkNonCursorPlanFields( explain, matches, n ); +} + +function checkFields( matches, sort, limit ) { + cursor = t.find(); + if ( sort ) { + print("sort is {a:1}"); + cursor.sort({a:1}); + } + if ( limit ) { + print("limit = " + limit); + cursor.limit( limit ); + } + explain = cursor.explain( true ); + printjson( explain ); + checkPlanFields( explain, matches, matches > 0 ? 1 : 0 ); + checkField( explain, "scanAndOrder", sort ); + checkField( explain, "millis" ); + checkField( explain, "nYields" ); + checkField( explain, "nChunkSkips", 0 ); + checkField( explain, "isMultiKey", false ); + checkField( explain, "indexOnly", false ); + checkField( explain, "server" ); + checkField( explain, "allPlans" ); + explain.allPlans.forEach( function( x ) { checkPlanFields( x, matches, matches ); } ); +} + +checkFields( 0, false ); + +// If there's nothing in the collection, there's no point in verifying that a sort +// is done. +// checkFields( 0, true ); + +t.save( {} ); +checkFields( 1, false ); +checkFields( 1, true ); + +t.save( {} ); +checkFields( 1, false, 1 ); + +// Check basic fields with multiple clauses. +t.save( { _id:0 } ); +explain = t.find( { $or:[ { _id:0 }, { _id:1 } ] } ).explain( true ); +checkNonCursorPlanFields( explain, 1, 1 ); diff --git a/jstests/core/explain5.js b/jstests/core/explain5.js new file mode 100644 index 00000000000..a90f0726317 --- /dev/null +++ b/jstests/core/explain5.js @@ -0,0 +1,38 @@ +// Check that the explain result count does proper deduping. + +t = db.jstests_explain5; +t.drop(); + +t.ensureIndex( {a:1} ); +t.ensureIndex( {b:1} ); + +t.save( {a:[1,2,3],b:[4,5,6]} ); +for( i = 0; i < 10; ++i ) { + t.save( {} ); +} + +// Check with a single in order plan. + +explain = t.find( {a:{$gt:0}} ).explain( true ); +assert.eq( 1, explain.n ); +assert.eq( 1, explain.allPlans[ 0 ].n ); + +// Check with a single out of order plan. + +explain = t.find( {a:{$gt:0}} ).sort( {z:1} ).hint( {a:1} ).explain( true ); +assert.eq( 1, explain.n ); +assert.eq( 1, explain.allPlans[ 0 ].n ); + +// Check with multiple plans. + +explain = t.find( {a:{$gt:0},b:{$gt:0}} ).explain( true ); +assert.eq( 1, explain.n ); +assert.eq( 1, explain.allPlans[ 0 ].n ); +assert.eq( 1, explain.allPlans[ 1 ].n ); + +explain = t.find( {$or:[{a:{$gt:0},b:{$gt:0}},{a:{$gt:-1},b:{$gt:-1}}]} ).explain( true ); +assert.eq( 1, explain.n ); +// Check 'n' for every alternative query plan. +for (var i = 0; i < explain.allPlans.length; ++i) { + assert.eq( 1, explain.allPlans[i].n ); +} diff --git a/jstests/core/explain6.js b/jstests/core/explain6.js new file mode 100644 index 00000000000..47d8d2fd731 --- /dev/null +++ b/jstests/core/explain6.js @@ -0,0 +1,25 @@ +// Test explain result count when a skip parameter is used. + +t = db.jstests_explain6; +t.drop(); + +t.save( {} ); +explain = t.find().skip( 1 ).explain( true ); +assert.eq( 0, explain.n ); +// With only one plan, the skip information is known for the plan. This is an arbitrary +// implementation detail, but it changes the way n is calculated. +assert.eq( 0, explain.allPlans[ 0 ].n ); + +t.ensureIndex( {a:1} ); +explain = t.find( {a:null,b:null} ).skip( 1 ).explain( true ); +assert.eq( 0, explain.n ); + +printjson( explain ); +assert.eq( 0, explain.allPlans[ 0 ].n ); + +t.dropIndexes(); +explain = t.find().skip( 1 ).sort({a:1}).explain( true ); +// Skip is applied for an in memory sort. +assert.eq( 0, explain.n ); +printjson(explain); +assert.eq( 0, explain.allPlans[ 0 ].n ); diff --git a/jstests/core/explain7.js b/jstests/core/explain7.js new file mode 100644 index 00000000000..df277aaf211 --- /dev/null +++ b/jstests/core/explain7.js @@ -0,0 +1,181 @@ +// Test cases for explain()'s nscannedObjects. SERVER-4161 + +t = db.jstests_explain7; +t.drop(); + +t.save( { a:1 } ); +t.ensureIndex( { a:1 } ); + +function assertExplain( expected, explain, checkAllPlans ) { + for( field in expected ) { + assert.eq( expected[ field ], explain[ field ], field ); + } + if ( checkAllPlans && explain.allPlans && explain.allPlans.length == 1 ) { + for( field in expected ) { + assert.eq( expected[ field ], explain.allPlans[ 0 ][ field ], field ); + } + } + return explain; +} + +function assertHintedExplain( expected, cursor ) { + return assertExplain( expected, cursor.hint( { a:1 } ).explain( true ), true ); +} + +function assertUnhintedExplain( expected, cursor, checkAllPlans ) { + return assertExplain( expected, cursor.explain( true ), checkAllPlans ); +} + +// Standard query. +assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, + t.find( { a:1 } ) ); + +// Covered index query. +assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 /* no object loaded */ }, + t.find( { a:1 }, { _id:0, a:1 } ) ); + +// Covered index query, but matching requires loading document. +assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, + t.find( { a:1, b:null }, { _id:0, a:1 } ) ); + +// $returnKey query. +assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 }, + t.find( { a:1 } )._addSpecial( "$returnKey", true ) ); + +// $returnKey query but matching requires loading document. +assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, + t.find( { a:1, b:null } )._addSpecial( "$returnKey", true ) ); + +// Skip a result. +assertHintedExplain( { n:0, nscanned:1, nscannedObjects:1 }, + t.find( { a:1 } ).skip( 1 ) ); + +// Cursor sorted covered index query. +assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0, scanAndOrder:false }, + t.find( { a:1 }, { _id:0, a:1 } ).sort( { a:1 } ) ); + +t.dropIndex( { a:1 } ); +t.ensureIndex( { a:1, b:1 } ); + +// In memory sort covered index query. +assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1, scanAndOrder:true }, + t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } ) + .hint( { a:1, b:1 } ) ); + +// In memory sort $returnKey query. +assertUnhintedExplain( { n:1, nscanned:1, scanAndOrder:true }, + t.find( { a:{ $gt:0 } } )._addSpecial( "$returnKey", true ).sort( { b:1 } ) + .hint( { a:1, b:1 } ) ); + +// In memory sort with skip. +assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 /* The record is still loaded. */ }, + t.find( { a:{ $gt:0 } } ).sort( { b:1 } ).skip( 1 ).hint( { a:1, b:1 } ), + false ); + +// With a multikey index. +t.drop(); +t.ensureIndex( { a:1 } ); +t.save( { a:[ 1, 2 ] } ); + +assertHintedExplain( { n:1, scanAndOrder:false }, + t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ) ); +assertHintedExplain( { n:1, scanAndOrder:true }, + t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } ) ); + +// Dedup matches from multiple query plans. +t.drop(); +t.ensureIndex( { a:1, b:1 } ); +t.ensureIndex( { b:1, a:1 } ); +t.save( { a:1, b:1 } ); + +// Document matched by three query plans. +assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, + t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ) ); + +// Document matched by three query plans, with sorting. +assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, + t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).sort( { c:1 } ) ); + +// Document matched by three query plans, with a skip. +assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 }, + t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).skip( 1 ) ); + +// Hybrid ordered and unordered plans. + +t.drop(); +t.ensureIndex( { a:1, b:1 } ); +t.ensureIndex( { b:1 } ); +for( i = 0; i < 30; ++i ) { + t.save( { a:i, b:i } ); +} + +// Ordered plan chosen. +assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:30, + scanAndOrder:false }, + t.find( { b:{ $gte:0 } } ).sort( { a:1 } ) ); + +// SERVER-12769: When an index is used to provide a sort, our covering +// analysis isn't good. This could execute as a covered query, but currently +// does not. +/* +// Ordered plan chosen with a covered index. +//assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:0, + //scanAndOrder:false }, + //t.find( { b:{ $gte:0 } }, { _id:0, b:1 } ).sort( { a:1 } ) ); +*/ + +// Ordered plan chosen, with a skip. Skip is not included in counting nscannedObjects for a single +// plan. +assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:29, nscanned:30, nscannedObjects:30, + scanAndOrder:false }, + t.find( { b:{ $gte:0 } } ).sort( { a:1 } ).skip( 1 ) ); + +// Unordered plan chosen. +assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, + //nscannedObjects:1, nscannedObjectsAllPlans:2, + scanAndOrder:true }, + t.find( { b:1 } ).sort( { a:1 } ) ); + +// Unordered plan chosen and projected. +assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1, + scanAndOrder:true }, + t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ) ); + +// Unordered plan chosen, with a skip. +// Note that all plans are equally unproductive here, so we can't test which one is picked reliably. +assertUnhintedExplain( { n:0 }, + t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ).skip( 1 ) ); + +// Unordered plan chosen, $returnKey specified. +assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, scanAndOrder:true }, + t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ) + ._addSpecial( "$returnKey", true ) ); + +// Unordered plan chosen, $returnKey specified, matching requires loading document. +assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1, + scanAndOrder:true }, + t.find( { b:1, c:null }, { _id:0, b:1 } ).sort( { a:1 } ) + ._addSpecial( "$returnKey", true ) ); + +t.ensureIndex( { a:1, b:1, c:1 } ); + +// Documents matched by four query plans. +assertUnhintedExplain( { n:30, nscanned:30, nscannedObjects:30, + //nscannedObjectsAllPlans:90 // Not 120 because deduping occurs before + // loading results. + }, + t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).sort( { b:1 } ) ); + +for( i = 30; i < 150; ++i ) { + t.save( { a:i, b:i } ); +} + +explain = assertUnhintedExplain( { n:150}, + t.find( { $or:[ { a:{ $gte:-1, $lte:200 }, + b:{ $gte:0, $lte:201 } }, + { a:{ $gte:0, $lte:201 }, + b:{ $gte:-1, $lte:200 } } ] }, + { _id:0, a:1, b:1 } ).hint( { a:1, b:1 } ) ); +printjson(explain); +// Check nscannedObjects for each clause. +assert.eq( 0, explain.clauses[ 0 ].nscannedObjects ); diff --git a/jstests/core/explain8.js b/jstests/core/explain8.js new file mode 100644 index 00000000000..fde6adbd8f4 --- /dev/null +++ b/jstests/core/explain8.js @@ -0,0 +1,24 @@ +// Test calculation of the 'millis' field in explain output. + +t = db.jstests_explain8; +t.drop(); + +t.ensureIndex( { a:1 } ); +for( i = 1000; i < 4000; i += 1000 ) { + t.save( { a:i } ); +} + +// Run a query with one $or clause per a-value, each of which sleeps for 'a' milliseconds. +function slow() { + sleep( this.a ); + return true; +} +clauses = []; +for( i = 1000; i < 4000; i += 1000 ) { + clauses.push( { a:i, $where:slow } ); +} +explain = t.find( { $or:clauses } ).explain( true ); +//printjson( explain ); + +// Verify the duration of the whole query, and of each clause. +assert.gt( explain.millis, 1000 - 500 + 2000 - 500 + 3000 - 500 ); diff --git a/jstests/core/explain9.js b/jstests/core/explain9.js new file mode 100644 index 00000000000..80cab856aa7 --- /dev/null +++ b/jstests/core/explain9.js @@ -0,0 +1,24 @@ +// Test that limit is applied by explain when there are both in order and out of order candidate +// plans. SERVER-4150 + +t = db.jstests_explain9; +t.drop(); + +t.ensureIndex( { a:1 } ); + +for( i = 0; i < 10; ++i ) { + t.save( { a:i, b:0 } ); +} + +explain = t.find( { a:{ $gte:0 }, b:0 } ).sort( { a:1 } ).limit( 5 ).explain( true ); +// Five results are expected, matching the limit spec. +assert.eq( 5, explain.n ); +explain.allPlans.forEach( function( x ) { + // Five results are expected for the in order plan. + if ( x.cursor == "BtreeCursor a_1" ) { + assert.eq( 5, x.n ); + } + else { + assert.gte( 5, x.n ); + } + } ); diff --git a/jstests/core/explain_batch_size.js b/jstests/core/explain_batch_size.js new file mode 100644 index 00000000000..65bc1df40d7 --- /dev/null +++ b/jstests/core/explain_batch_size.js @@ -0,0 +1,19 @@ +// minimal test to check handling of batch size when explain info is requested +// expected behavior is to return explain.n = total number of documents matching query +// batch size is also tested in another smoke test jstest/explain1.js but that test +// also covers the use of an indexed collection and includes a couple of test cases +// using limit() + +t = db.explain_batch_size; +t.drop(); + +n = 3 +for (i=0; i 110 ) { + cursor = makeCursor( query, {}, sort, batchSize, true ); + lastNonAIndexResult = -1; + for( i = 0; i < expectedLeftInBatch; ++i ) { + next = cursor.next(); + // Identify the query plan used by checking the fields of a returnKey query. + if ( !friendlyEqual( [ 'a', '_id' ], Object.keySet( next ) ) ) { + lastNonAIndexResult = i; + } + } + // The last results should come from the a,_id index. + assert.lt( lastNonAIndexResult, expectedLeftInBatch - 5 ); + } +} + +function queryWithPlanTypes( withDups ) { + t.drop(); + for( i = 1; i < numDocs; ++i ) { + t.save( { _id:i, a:i, b:0 } ); + } + if ( withDups ) { + t.save( { _id:0, a:[ 0, numDocs ], b:0 } ); // Add a dup on a:1 index. + } + else { + t.save( { _id:0, a:0, b:0 } ); + } + t.ensureIndex( { a:1, _id:1 } ); // Include _id for a covered index projection. + + // All plans in order. + checkCursorWithBatchSize( { a:{ $gte:0 } }, null, 150, 150 ); + + // All plans out of order. + checkCursorWithBatchSize( { a:{ $gte:0 } }, { c:1 }, null, 101 ); + + // Some plans in order, some out of order. + checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, 150, 150 ); + checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, null, 101 ); +} + +queryWithPlanTypes( false ); +queryWithPlanTypes( true ); diff --git a/jstests/core/fm1.js b/jstests/core/fm1.js new file mode 100644 index 00000000000..bc60a3d8911 --- /dev/null +++ b/jstests/core/fm1.js @@ -0,0 +1,12 @@ + +t = db.fm1; +t.drop(); + +t.insert({foo:{bar:1}}) +t.find({},{foo:1}).toArray(); +t.find({},{'foo.bar':1}).toArray(); +t.find({},{'baz':1}).toArray(); +t.find({},{'baz.qux':1}).toArray(); +t.find({},{'foo.qux':1}).toArray(); + + diff --git a/jstests/core/fm2.js b/jstests/core/fm2.js new file mode 100644 index 00000000000..00ccdf4afee --- /dev/null +++ b/jstests/core/fm2.js @@ -0,0 +1,9 @@ + +t = db.fm2 +t.drop(); + +t.insert( { "one" : { "two" : {"three":"four"} } } ); + +x = t.find({},{"one.two":1})[0] +assert.eq( 1 , Object.keySet( x.one ).length , "ks l 1" ); + diff --git a/jstests/core/fm3.js b/jstests/core/fm3.js new file mode 100644 index 00000000000..8ccde6d5ab3 --- /dev/null +++ b/jstests/core/fm3.js @@ -0,0 +1,37 @@ +t = db.fm3 +t.drop(); + +t.insert( {a:[{c:{e:1, f:1}}, {d:2}, 'z'], b:1} ); + + +res = t.findOne({}, {a:1}); +assert.eq(res.a, [{c:{e:1, f:1}}, {d:2}, 'z'], "one a"); +assert.eq(res.b, undefined, "one b"); + +res = t.findOne({}, {a:0}); +assert.eq(res.a, undefined, "two a"); +assert.eq(res.b, 1, "two b"); + +res = t.findOne({}, {'a.d':1}); +assert.eq(res.a, [{}, {d:2}], "three a"); +assert.eq(res.b, undefined, "three b"); + +res = t.findOne({}, {'a.d':0}); +assert.eq(res.a, [{c:{e:1, f:1}}, {}, 'z'], "four a"); +assert.eq(res.b, 1, "four b"); + +res = t.findOne({}, {'a.c':1}); +assert.eq(res.a, [{c:{e:1, f:1}}, {}], "five a"); +assert.eq(res.b, undefined, "five b"); + +res = t.findOne({}, {'a.c':0}); +assert.eq(res.a, [{}, {d:2}, 'z'], "six a"); +assert.eq(res.b, 1, "six b"); + +res = t.findOne({}, {'a.c.e':1}); +assert.eq(res.a, [{c:{e:1}}, {}], "seven a"); +assert.eq(res.b, undefined, "seven b"); + +res = t.findOne({}, {'a.c.e':0}); +assert.eq(res.a, [{c:{f:1}}, {d:2}, 'z'], "eight a"); +assert.eq(res.b, 1, "eight b"); diff --git a/jstests/core/fm4.js b/jstests/core/fm4.js new file mode 100644 index 00000000000..1ce947ad5e7 --- /dev/null +++ b/jstests/core/fm4.js @@ -0,0 +1,16 @@ +t = db.fm4 +t.drop(); + +t.insert({_id:1, a:1, b:1}); + +assert.eq( t.findOne({}, {_id:1}), {_id:1}, 1) +assert.eq( t.findOne({}, {_id:0}), {a:1, b:1}, 2) + +assert.eq( t.findOne({}, {_id:1, a:1}), {_id:1, a:1}, 3) +assert.eq( t.findOne({}, {_id:0, a:1}), {a:1}, 4) + +assert.eq( t.findOne({}, {_id:0, a:0}), {b:1}, 6) +assert.eq( t.findOne({}, { a:0}), {_id:1, b:1}, 5) + +// not sure if we want to suport this since it is the same as above +//assert.eq( t.findOne({}, {_id:1, a:0}), {_id:1, b:1}, 5) diff --git a/jstests/core/fsync.js b/jstests/core/fsync.js new file mode 100644 index 00000000000..9238c992466 --- /dev/null +++ b/jstests/core/fsync.js @@ -0,0 +1,21 @@ +// test the lock/unlock snapshotting feature a bit + +x=db.runCommand({fsync:1,lock:1}); // not on admin db +assert(!x.ok,"D"); + +x=db.fsyncLock(); // uses admin automatically + +assert(x.ok,"C"); + +y = db.currentOp(); +assert(y.fsyncLock,"B"); + +z = db.fsyncUnlock(); +assert( db.currentOp().fsyncLock == null, "A2" ); + +// make sure the db is unlocked +db.jstests_fsync.insert({x:1}); + +assert( db.currentOp().fsyncLock == null, "A" ); + +assert( !db.eval('db.fsyncLock()').ok, "eval('db.fsyncLock()') should fail." ) diff --git a/jstests/core/fts1.js b/jstests/core/fts1.js new file mode 100644 index 00000000000..6bd138d6c25 --- /dev/null +++ b/jstests/core/fts1.js @@ -0,0 +1,29 @@ +load( "jstests/libs/fts.js" ); + +t = db.text1; +t.drop(); + +// this test requires usePowerOf2Sizes to be off +db.createCollection( t.getName(), {"usePowerOf2Sizes" : false } ); +assert.eq(0, t.stats().userFlags); + +assert.eq( [] , queryIDS( t , "az" ) , "A0" ); + +t.save( { _id : 1 , x : "az b c" } ); +t.save( { _id : 2 , x : "az b" } ); +t.save( { _id : 3 , x : "b c" } ); +t.save( { _id : 4 , x : "b c d" } ); + +assert.eq(t.stats().userFlags, 0, + "A new collection should not have power-of-2 storage allocation strategy"); +t.ensureIndex( { x : "text" } ); +assert.eq(t.stats().userFlags, 1, + "Creating a text index on a collection should change the allocation strategy " + + "to power-of-2."); + +assert.eq( [1,2,3,4] , queryIDS( t , "c az" ) , "A1" ); +assert.eq( [4] , queryIDS( t , "d" ) , "A2" ); + +idx = db.system.indexes.findOne( { ns: t.getFullName(), "weights.x" : 1 } ) +assert( idx.v >= 1, tojson( idx ) ) +assert( idx.textIndexVersion >= 1, tojson( idx ) ) diff --git a/jstests/core/fts2.js b/jstests/core/fts2.js new file mode 100644 index 00000000000..e0e7469fa5e --- /dev/null +++ b/jstests/core/fts2.js @@ -0,0 +1,24 @@ + +load( "jstests/libs/fts.js" ); + +t = db.text2; +t.drop(); + +t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } ); +t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } ); + +t.ensureIndex( { x : "text" } , { weights : { x : 10 , y : 1 } } ); + +assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); +assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); + +assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); +assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); + +assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" ); +assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" ); + +printjson(lastCommadResult); +assert.eq( 2 , lastCommadResult.stats.nscannedObjects , "B3" ); +assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); + diff --git a/jstests/core/fts3.js b/jstests/core/fts3.js new file mode 100644 index 00000000000..f5f72c4df0a --- /dev/null +++ b/jstests/core/fts3.js @@ -0,0 +1,22 @@ + +load( "jstests/libs/fts.js" ); + +t = db.text3; +t.drop(); + +t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } ); +t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } ); + +t.ensureIndex( { x : "text" , z : 1 } , { weights : { x : 10 , y : 1 } } ); + +assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); +assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); + +assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); +assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); + +assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" ); +assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" ); + +assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" ); +assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); diff --git a/jstests/core/fts4.js b/jstests/core/fts4.js new file mode 100644 index 00000000000..8598457b033 --- /dev/null +++ b/jstests/core/fts4.js @@ -0,0 +1,22 @@ + +load( "jstests/libs/fts.js" ); + +t = db.text4; +t.drop(); + +t.save( { _id : 1 , x : [ "az" , "b" , "x" ] , y : [ "c" , "d" , "m" ] , z : 1 } ); +t.save( { _id : 2 , x : [ "c" , "d" , "y" ] , y : [ "az" , "b" , "n" ] , z : 2 } ); + +t.ensureIndex( { y : "text" , z : 1 } , { weights : { x : 10 } } ); + +assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); +assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); + +assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); +assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); + +assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" ); +assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" ); + +assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" ); +assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); diff --git a/jstests/core/fts5.js b/jstests/core/fts5.js new file mode 100644 index 00000000000..a3097b47a4a --- /dev/null +++ b/jstests/core/fts5.js @@ -0,0 +1,22 @@ + +load( "jstests/libs/fts.js" ); + +t = db.text5; +t.drop(); + +t.save( { _id: 1 , x: [ { a: "az" } , { a: "b" } , { a: "x" } ] , y: [ "c" , "d" , "m" ] , z: 1 } ); +t.save( { _id: 2 , x: [ { a: "c" } , { a: "d" } , { a: "y" } ] , y: [ "az" , "b" , "n" ] , z: 2 } ); + +t.ensureIndex( { y: "text" , z: 1 } , { weights: { "x.a": 10 } } ); + +assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); +assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); + +assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); +assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); + +assert.eq( [1] , queryIDS( t , "az" , { z: 1 } ) , "B1" ); +assert.eq( [1] , queryIDS( t , "d" , { z: 1 } ) , "B2" ); + +assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" ); +assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); diff --git a/jstests/core/fts_blog.js b/jstests/core/fts_blog.js new file mode 100644 index 00000000000..38cbb826eff --- /dev/null +++ b/jstests/core/fts_blog.js @@ -0,0 +1,26 @@ +t = db.text_blog; +t.drop(); + +t.save( { _id : 1 , title : "my blog post" , text : "this is a new blog i am writing. yay" } ); +t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am writing. yay" } ); +t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } ); + +// default weight is 1 +// specify weights if you want a field to be more meaningull +t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } ); + +res = t.runCommand( "text" , { search : "blog" } ) +assert.eq( 3, res.results.length ); +assert.eq( 1, res.results[0].obj._id ); + +res = t.runCommand( "text" , { search : "write" } ) +assert.eq( 3, res.results.length ); +assert.eq( res.results[0].score, res.results[1].score ); +assert.eq( res.results[0].score, res.results[2].score ); + + + + + + + diff --git a/jstests/core/fts_blogwild.js b/jstests/core/fts_blogwild.js new file mode 100644 index 00000000000..ecad0ce0b19 --- /dev/null +++ b/jstests/core/fts_blogwild.js @@ -0,0 +1,40 @@ +t = db.text_blogwild; +t.drop(); + +t.save( { _id: 1 , title: "my blog post" , text: "this is a new blog i am writing. yay eliot" } ); +t.save( { _id: 2 , title: "my 2nd post" , text: "this is a new blog i am writing. yay" } ); +t.save( { _id: 3 , title: "knives are Fun for writing eliot" , text: "this is a new blog i am writing. yay" } ); + +// default weight is 1 +// specify weights if you want a field to be more meaningull +t.ensureIndex( { dummy: "text" } , { weights: "$**" } ); + +res = t.runCommand( "text" , { search: "blog" } ); +assert.eq( 3 , res.stats.n , "A1" ); + +res = t.runCommand( "text" , { search: "write" } ); +assert.eq( 3 , res.stats.n , "B1" ); + +// mixing +t.dropIndex( "dummy_text" ); +assert.eq( 1 , t.getIndexKeys().length , "C1" ); +t.ensureIndex( { dummy: "text" } , { weights: { "$**": 1 , title: 2 } } ); + + +res = t.runCommand( "text" , { search: "write" } ); +assert.eq( 3 , res.stats.n , "C2" ); +assert.eq( 3 , res.results[0].obj._id , "C3" ); + +res = t.runCommand( "text" , { search: "blog" } ); +assert.eq( 3 , res.stats.n , "D1" ); +assert.eq( 1 , res.results[0].obj._id , "D2" ); + +res = t.runCommand( "text" , { search: "eliot" } ); +assert.eq( 2 , res.stats.n , "E1" ); +assert.eq( 3 , res.results[0].obj._id , "E2" ); + + + + + + diff --git a/jstests/core/fts_enabled.js b/jstests/core/fts_enabled.js new file mode 100644 index 00000000000..8617caff59f --- /dev/null +++ b/jstests/core/fts_enabled.js @@ -0,0 +1,5 @@ +// Test that the textSearchEnabled server parameter works correctly (now deprecated). + +// Value true is accepted, value false is rejected. +assert.commandWorked(db.adminCommand({setParameter: 1, textSearchEnabled: true})); +assert.commandFailed(db.adminCommand({setParameter: 1, textSearchEnabled: false})); diff --git a/jstests/core/fts_explain.js b/jstests/core/fts_explain.js new file mode 100644 index 00000000000..0d9c1fd7a9d --- /dev/null +++ b/jstests/core/fts_explain.js @@ -0,0 +1,18 @@ +// Test $text explain. SERVER-12037. + +var coll = db.fts_explain; + +coll.drop(); +coll.ensureIndex({content: "text"}, {default_language: "none"}); +assert.gleSuccess(db); + +coll.insert({content: "some data"}); +assert.gleSuccess(db); + +var explain = coll.find({$text:{$search: "\"a\" -b -\"c\""}}).explain(true); +assert.eq(explain.cursor, "TextCursor"); +assert.eq(explain.stats.type, "TEXT"); +assert.eq(explain.stats.parsedTextQuery.terms, ["a"]); +assert.eq(explain.stats.parsedTextQuery.negatedTerms, ["b"]); +assert.eq(explain.stats.parsedTextQuery.phrases, ["a"]); +assert.eq(explain.stats.parsedTextQuery.negatedPhrases, ["c"]); diff --git a/jstests/core/fts_index.js b/jstests/core/fts_index.js new file mode 100644 index 00000000000..ab1971816bd --- /dev/null +++ b/jstests/core/fts_index.js @@ -0,0 +1,110 @@ +// Test that: +// 1. Text indexes properly validate the index spec used to create them. +// 2. Text indexes properly enforce a schema on the language_override field. +// 3. Collections may have at most one text index. +// 4. Text indexes properly handle large documents. + +var coll = db.fts_index; +var indexName = "textIndex"; +coll.drop(); +coll.getDB().createCollection(coll.getName()); + +// +// 1. Text indexes properly validate the index spec used to create them. +// + +// Spec passes text-specific index validation. +assert.writeOK(coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanish"})); +assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName})); +coll.dropIndexes(); + +// Spec fails text-specific index validation ("spanglish" unrecognized). +assert.writeError(coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanglish"})); +assert.eq(0, coll.system.indexes.count({ns: coll.getFullName(), name: indexName})); +coll.dropIndexes(); + +// Spec passes general index validation. +assert.writeOK(coll.ensureIndex({"$**": "text"}, {name: indexName})); +assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName})); +coll.dropIndexes(); + +// Spec fails general index validation ("a.$**" invalid field name for key). +assert.writeError(coll.ensureIndex({"a.$**": "text"}, {name: indexName})); +assert.eq(0, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName})); +coll.dropIndexes(); + +// +// 2. Text indexes properly enforce a schema on the language_override field. +// + +// Can create a text index on a collection where no documents have invalid language_override. +coll.insert({a: ""}); +coll.insert({a: "", language: "spanish"}); +assert.writeOK(coll.ensureIndex({a: "text"})); +coll.drop(); + +// Can't create a text index on a collection containing document with an invalid language_override. +coll.insert({a: "", language: "spanglish"}); +assert.writeError(coll.ensureIndex({a: "text"})); +coll.drop(); + +// Can insert documents with valid language_override into text-indexed collection. +assert.writeOK(coll.ensureIndex({a: "text"})); +coll.insert({a: ""}); +assert.writeOK( coll.insert({a: "", language: "spanish"})); +coll.drop(); + +// Can't insert documents with invalid language_override into text-indexed collection. +assert.writeOK(coll.ensureIndex({a: "text"})); +assert.writeError( coll.insert({a: "", language: "spanglish"})); +coll.drop(); + +// +// 3. Collections may have at most one text index. +// +assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1})); +assert.eq(2, coll.getIndexes().length); + +// ensureIndex() becomes a no-op on an equivalent index spec. +assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1})); +assert.eq(2, coll.getIndexes().length); +assert.writeOK(coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {b: 1}})); +assert.eq(2, coll.getIndexes().length); +assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "english"})); +assert.eq(2, coll.getIndexes().length); +assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 2})); +assert.eq(2, coll.getIndexes().length); +assert.writeOK(coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "language"})); +assert.eq(2, coll.getIndexes().length); + +// ensureIndex() fails if a second text index would be built. +assert.writeError(coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {d: 1}})); +assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "none"})); +assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 1})); +assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "idioma"})); +assert.writeError(coll.ensureIndex({a: 1, b: "text", c: 1}, {weights: {d: 1}})); +assert.writeError(coll.ensureIndex({a: 1, b: "text", d: 1})); +assert.writeError(coll.ensureIndex({a: 1, d: "text", c: 1})); +assert.writeError(coll.ensureIndex({b: "text"})); +assert.writeError(coll.ensureIndex({b: "text", c: 1})); +assert.writeError(coll.ensureIndex({a: 1, b: "text"})); + +coll.dropIndexes(); + +// +// 4. Text indexes properly handle large keys. +// + +assert.writeOK(coll.ensureIndex({a: "text"})); + +var longstring = ""; +var longstring2 = ""; +for(var i = 0; i < 1024 * 1024; ++i) { + longstring = longstring + "a"; + longstring2 = longstring2 + "b"; +} +coll.insert({a: longstring}); +coll.insert({a: longstring2}); +assert.eq(1, coll.find({$text: {$search: longstring}}).itcount(), "long string not found in index"); + +coll.drop(); diff --git a/jstests/core/fts_mix.js b/jstests/core/fts_mix.js new file mode 100644 index 00000000000..56da123cdc3 --- /dev/null +++ b/jstests/core/fts_mix.js @@ -0,0 +1,159 @@ + +load( "jstests/libs/fts.js" ); + +// test collection +tc = db.text_mix; +tc.drop(); + +// creation of collection documents +// content generated using wikipedia random article +tc.save( { _id: 1, title: "Olivia Shakespear",text: "Olivia Shakespear (born Olivia Tucker; 17 March 1863 – 3 October 1938) was a British novelist, playwright, and patron of the arts. She wrote six books that are described as \"marriage problem\" novels. Her works sold poorly, sometimes only a few hundred copies. Her last novel, Uncle Hilary, is considered her best. She wrote two plays in collaboration with Florence Farr." } ); +tc.save( { _id: 2, title: "Mahim Bora", text: "Mahim Bora (born 1926) is an Indian writer and educationist from Assam state. He was born at a tea estate of Sonitpur district. He is an M.A. in Assamese literature from Gauhati University and had been a teacher in the Nowgong College for most of his teaching career. He has now retired and lives at Nagaon. Bora spent a good part of his childhood in the culture-rich surroundings of rural Nagaon, where the river Kalong was the life-blood of a community. His impressionable mind was to capture a myriad memories of that childhood, later to find expression in his poems, short stories and novels with humour, irony and pathos woven into their texture. When this river was dammed up, its disturbing effect was on the entire community dependant on nature's bounty." } ); +tc.save( { _id: 3, title: "A break away!", text: "A break away! is an 1891 painting by Australian artist Tom Roberts. The painting depicts a mob of thirsty sheep stampeding towards a dam. A drover on horseback is attempting to turn the mob before they drown or crush each other in their desire to drink. The painting, an \"icon of Australian art\", is part of a series of works by Roberts that \"captures what was an emerging spirit of national identity.\" Roberts painted the work at Corowa. The painting depicts a time of drought, with little grass and the soil kicked up as dust. The work itself is a reflection on the pioneering days of the pastoral industry, which were coming to an end by the 1890s." } ); +tc.save( { _id: 4, title: "Linn-Kristin Riegelhuth Koren", text: "Linn-Kristin Riegelhuth Koren (born 1 August 1984, in Ski) is a Norwegian handballer playing for Larvik HK and the Norwegian national team. She is commonly known as Linka. Outside handball she is a qualified nurse." } ); +tc.save( { _id: 5, title: "Morten Jensen", text: "Morten Jensen (born December 2, 1982 in Lynge) is a Danish athlete. He primarily participates in long jump, 100 metres and 200 metres. He competed at the World Championships in 2005 and 2007, the 2006 World Indoor Championships, the 2006 European Championships, the 2007 World Championships and the 2008 Olympic Games without qualifying for the final round. He was runner-up in the 2010 Finnish Elite Games rankings, just missing out to Levern Spencer for that year's jackpot. He holds the Danish record in both long jump and 100 metres. He also holds the Danish indoor record in the 200 metres. He has been a part of the Sparta teamsine 2005, before then he was a part of FIF Hillerd. His coach was Leif Dahlberg after the 2010 European Championships he change to Lars Nielsen and Anders Miller." } ); +tc.save( { _id: 6, title: "Janet Laurence", text: "Janet Laurence (born 1947) is a Sydney based Australian artist who works in mixed media and installation. Her work has been included in major survey exhibitions, nationally and internationally and is regularly exhibited in Sydney, Melbourne and Japan. Her work explores a relationship to the natural world, often from an architectural context. It extends from the gallery space into the urban fabric, and has been realized in many site specific projects, often involving collaborations with architects, landscape architects and environmental scientists. She has received many grants and awards including a Rockefeller Residency in 1997. Laurence was a Trustee of the Art Gallery of NSW from 1995 to 2005. Laurence was the subject of John Beard's winning entry for the 2007 Archibald Prize." } ); +tc.save( { _id: 7, title: "Glen-Coats Baronets", text: "The Glen-Coats Baronetcy, of Ferguslie Park in the Parish of Abbey in the County of Renfrew, was a title in the Baronetage of the United Kingdom. It was created on 25 June 1894 for Thomas Glen-Coats, Director of the thread-making firm of J. & P. Coats, Ltd, and later Liberal Member of Parliament for Renfrewshire West. Born Thomas Coats, he assumed the additional surname of Glen, which was that of his maternal grandfather. He was succeeded by his son, the second Baronet. He won a gold medal in sailing at the 1908 Summer Olympics. The title became extinct on his death in 1954. Two other members of the Coats family also gained distinction. George Coats, 1st Baron Glentanar, was the younger brother of the first Baronet, while Sir James Coats, 1st Baronet (see Coats Baronets), was the first cousin of the first Baronet." } ); +tc.save( { _id: 8, title: "Grapeleaf Skeletonizer", text: "The Grapeleaf Skeletonizer, Harrisina americana is a moth in the family Zygaenidae. It is widespread in the eastern half of the United States, and commonly noticed defoliating grapes, especially of the Virginia creeper (Parthenocissus quinquefolia). The western grapeleaf skeletonizer, Harrisina brillians is very similar to and slightly larger than H. americana, but their distributions are different. Members of this family all produce hydrogen cyanide, a potent antipredator toxin." } ); +tc.save( { _id: 9, title: "Physics World", text: "Physics World is the membership magazine of the Institute of Physics, one of the largest physical societies in the world. It is an international monthly magazine covering all areas of physics, both pure and applied, and is aimed at physicists in research, industry and education worldwide. It was launched in 1988 by IOP Publishing Ltd and has established itself as one of the world's leading physics magazines. The magazine is sent free to members of the Institute of Physics, who can also access a digital edition of the magazine, although selected articles can be read by anyone for free online. It was redesigned in September 2005 and has an audited circulation of just under 35000. The current editor is Matin Durrani. Also on the team are Dens Milne (associate editor), Michael Banks (news editor), Louise Mayor (features editor) and Margaret Harris (reviews and careers editor). Hamish Johnston is the editor of the magazine's website physicsworld.com and James Dacey is its reporter." } ); +tc.save( { _id: 10, title: "Mallacoota, Victoria", text: "Mallacoota is a small town in the East Gippsland region of Victoria, Australia. At the 2006 census, Mallacoota had a population of 972. At holiday times, particularly Easter and Christmas, the population increases by about 8,000. It is one of the most isolated towns in the state of Victoria, 25 kilometres off the Princes Highway and 523 kilometres (325 mi) from Melbourne. It is 526 kilometres (327 mi) from Sydney, New South Wales. It is halfway between Melbourne and Sydney when travelling via Princes Highway, though that is a long route between Australia's two main cities. It is the last official township on Victoria's east coast before the border with New South Wales. Mallacoota has a regional airport (Mallacoota Airport) YMCO (XMC) consisting of a grassed field for private light planes. It is known for its wild flowers, abalone industry, the inlet estuary consisting of Top Lake and Bottom Lake, and Croajingolong National Park that surround it. It is a popular and beautiful holiday spot for boating, fishing, walking the wilderness coast, swimming, birdwatching, and surfing. The Mallacoota Arts Council runs events throughout each year. Mallacoota Inlet is one of the main villages along the wilderness coast walk from NSW to Victoria, Australia." } ); + +// begin tests + +// -------------------------------------------- INDEXING & WEIGHTING ------------------------------- + +// start with basic index, one item with default weight +tc.ensureIndex( { "title": "text" } ); + +// test the single result case.. +res = tc.runCommand( "text", { search: "Victoria" } ); +assert.eq( 1, res.results.length ); +assert.eq( 10, res.results[0].obj._id ); + +tc.dropIndexes(); + +// now let's see about multiple fields, with specific weighting +tc.ensureIndex( { "title": "text", "text": "text" }, { weights: { "title": 10 } } ); +assert.eq( [9,7,8], queryIDS( tc, "members physics" ) ); + +tc.dropIndexes(); + +// test all-1 weighting with "$**" +tc.ensureIndex( { "$**": "text" } ); +assert.eq( [2,8,7], queryIDS( tc, "family tea estate" ) ); + +tc.dropIndexes(); + +// non-1 weight on "$**" + other weight specified for some field +tc.ensureIndex( { "$**": "text" }, { weights: { "$**": 10, "text": 2 } } ); +assert.eq( [7,5], queryIDS( tc, "Olympic Games gold medal" ) ); + +tc.dropIndexes(); + +// -------------------------------------------- SEARCHING ------------------------------------------ + +// go back to "$**": 1, "title": 10.. and test more specific search functionality! +tc.ensureIndex( { "$**": "text" }, { weights: { "title": 10 } } ); + +// -------------------------------------------- STEMMING ------------------------------------------- + +// tests stemming for basic plural case +res = tc.runCommand( "text", { search: "member" } ); +res2 = tc.runCommand( "text", { search: "members" } ); +assert.eq( getIDS( res ), getIDS( res2 ) ); + +// search for something with potential 's bug. +res = tc.runCommand( "text", { search: "magazine's" } ); +res2 = tc.runCommand( "text", { search: "magazine" } ); +assert.eq( getIDS( res ), getIDS( res2 ) ); + +// -------------------------------------------- LANGUAGE ------------------------------------------- + +res = tc.runCommand( "text", { search: "member", language: "spanglish" } ); +assert.commandFailed( res ); +res = tc.runCommand( "text", { search: "member", language: "english" } ); +assert.commandWorked( res ); + +// -------------------------------------------- LIMIT RESULTS -------------------------------------- + +// ensure limit limits results +assert.eq( [2], queryIDS( tc, "rural river dam", null , { limit : 1 } ) ); + +// ensure top results are the same regardless of limit +// make sure that this uses a case where it wouldn't be otherwise.. +res = tc.runCommand( "text", { search: "united kingdom british princes", limit: 1 } ); +res2 = tc.runCommand( "text", { search: "united kingdom british princes" } ); +assert.eq( 1, res.results.length ); +assert.eq( 4, res2.results.length ); +assert.eq( res.results[0].obj._id, res2.results[0].obj._id ); + +// -------------------------------------------- PROJECTION ----------------------------------------- + +// test projection.. show just title and id +res = tc.runCommand( "text", { search: "Morten Jensen", project: { title: 1 } } ); +assert.eq( 1, res.results.length ); +assert.eq( 5, res.results[0].obj._id ); +assert.eq( null, res.results[0].obj.text ); +assert.neq( null, res.results[0].obj.title ); +assert.neq( null, res.results[0].obj._id ); + +// test negative projection, ie. show everything but text +res = tc.runCommand( "text", { search: "handball", project: { text: 0 } } ); +assert.eq( 1, res.results.length ); +assert.eq( 4, res.results[0].obj._id ); +assert.eq( null, res.results[0].obj.text ); +assert.neq( null, res.results[0].obj.title ); +assert.neq( null, res.results[0].obj._id ); + +// test projection only title, no id +res = tc.runCommand( "text", { search: "Mahim Bora", project: { _id: 0, title: 1 } } ); +assert.eq( 1, res.results.length ); +assert.eq( "Mahim Bora", res.results[0].obj.title ); +assert.eq( null, res.results[0].obj.text ); +assert.neq( null, res.results[0].obj.title ); +assert.eq( null, res.results[0].obj._id ); + +// -------------------------------------------- NEGATION ------------------------------------------- + +// test negation +assert.eq( [8], queryIDS( tc, "United -Kingdom" ) ); +assert.eq( -1, tc.findOne( { _id : 8 } ).text.search(/Kingdom/i) ); + +// test negation edge cases... hyphens, double dash, etc. +assert.eq( [4], queryIDS( tc, "Linn-Kristin" ) ); + +// -------------------------------------------- PHRASE MATCHING ------------------------------------ + +// test exact phrase matching on +assert.eq( [7], queryIDS( tc, "\"Summer Olympics\"" ) ); +assert.neq( -1, tc.findOne( { _id: 7 } ).text.indexOf("Summer Olympics") ); + +// phrasematch with other stuff.. negation, other terms, etc. +assert.eq( [10], queryIDS( tc, "\"wild flowers\" Sydney" ) ); + +assert.eq( [3], queryIDS( tc, "\"industry\" -Melbourne -Physics" ) ); + +// -------------------------------------------- EDGE CASES ----------------------------------------- + +// test empty string +res = tc.runCommand( "text", { search: "" } ); +assert.eq( 0, res.ok ) + +// test string with a space in it +res = tc.runCommand( "text", { search: " " } ); +assert.eq( 0, res.results.length ); + +// -------------------------------------------- FILTERING ------------------------------------------ + +assert.eq( [2], queryIDS( tc, "Mahim" ) ); +assert.eq( [2], queryIDS( tc, "Mahim", { _id: 2 } ) ); +assert.eq( [], queryIDS( tc, "Mahim", { _id: 1 } ) ); +assert.eq( [], queryIDS( tc, "Mahim", { _id: { $gte: 4 } } ) ); +assert.eq( [2], queryIDS( tc, "Mahim", { _id: { $lte: 4 } } ) ); + +// using regex conditional filtering +assert.eq( [9], queryIDS( tc, "members", { title: { $regex: /Phy.*/i } } ) ); + +// ------------------------------------------------------------------------------------------------- + +assert( tc.validate().valid ); diff --git a/jstests/core/fts_partition1.js b/jstests/core/fts_partition1.js new file mode 100644 index 00000000000..f1b4c437c3c --- /dev/null +++ b/jstests/core/fts_partition1.js @@ -0,0 +1,23 @@ +load( "jstests/libs/fts.js" ) + +t = db.text_parition1; +t.drop(); + +t.insert( { _id : 1 , x : 1 , y : "foo" } ); +t.insert( { _id : 2 , x : 1 , y : "bar" } ); +t.insert( { _id : 3 , x : 2 , y : "foo" } ); +t.insert( { _id : 4 , x : 2 , y : "bar" } ); + +t.ensureIndex( { x : 1, y : "text" } ); + +res = t.runCommand( "text", { search : "foo" } ); +assert.eq( 0, res.ok, tojson(res) ); + +assert.eq( [ 1 ], queryIDS( t, "foo" , { x : 1 } ) ); + +res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } } ); +assert( res.results[0].score > 0, tojson( res ) ) + +// repeat search with "language" specified, SERVER-8999 +res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } , language : "english" } ); +assert( res.results[0].score > 0, tojson( res ) ) diff --git a/jstests/core/fts_partition_no_multikey.js b/jstests/core/fts_partition_no_multikey.js new file mode 100644 index 00000000000..29b9c371612 --- /dev/null +++ b/jstests/core/fts_partition_no_multikey.js @@ -0,0 +1,13 @@ + +t = db.fts_partition_no_multikey; +t.drop(); + +t.ensureIndex( { x : 1, y : "text" } ) + +assert.writeOK( t.insert( { x : 5 , y : "this is fun" } )); + +assert.writeError( t.insert( { x : [] , y : "this is fun" } )); + +assert.writeError( t.insert( { x : [1] , y : "this is fun" } )); + +assert.writeError( t.insert( { x : [1,2] , y : "this is fun" } )); diff --git a/jstests/core/fts_phrase.js b/jstests/core/fts_phrase.js new file mode 100644 index 00000000000..0b58bef817e --- /dev/null +++ b/jstests/core/fts_phrase.js @@ -0,0 +1,25 @@ + +t = db.text_phrase; +t.drop() + +t.save( { _id : 1 , title : "my blog post" , text : "i am writing a blog. yay" } ); +t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am typing. yay" } ); +t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } ); + +t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } ); + +res = t.runCommand( "text" , { search : "blog write" } ); +assert.eq( 3, res.results.length ); +assert.eq( 1, res.results[0].obj._id ); +assert( res.results[0].score > (res.results[1].score*2), tojson(res) ); + +res = t.runCommand( "text" , { search : "write blog" } ); +assert.eq( 3, res.results.length ); +assert.eq( 1, res.results[0].obj._id ); +assert( res.results[0].score > (res.results[1].score*2), tojson(res) ); + + + + + + diff --git a/jstests/core/fts_proj.js b/jstests/core/fts_proj.js new file mode 100644 index 00000000000..1ecc6688d1b --- /dev/null +++ b/jstests/core/fts_proj.js @@ -0,0 +1,20 @@ +t = db.text_proj; +t.drop(); + +t.save( { _id : 1 , x : "a", y: "b", z : "c"}); +t.save( { _id : 2 , x : "d", y: "e", z : "f"}); +t.save( { _id : 3 , x : "a", y: "g", z : "h"}); + +t.ensureIndex( { x : "text"} , { default_language : "none" } ); + +res = t.runCommand("text", {search : "a"}); +assert.eq( 2, res.results.length ); +assert( res.results[0].obj.y, tojson(res) ); + +res = t.runCommand("text", {search : "a", project: {x: 1}}); +assert.eq( 2, res.results.length ); +assert( !res.results[0].obj.y, tojson(res) ); + + + + diff --git a/jstests/core/fts_projection.js b/jstests/core/fts_projection.js new file mode 100644 index 00000000000..9bdb9dbca8a --- /dev/null +++ b/jstests/core/fts_projection.js @@ -0,0 +1,99 @@ +// Test $text with $textScore projection. + +var t = db.getSiblingDB("test").getCollection("fts_projection"); +t.drop(); + +db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true}); + +t.insert({_id: 0, a: "textual content"}); +t.insert({_id: 1, a: "additional content", b: -1}); +t.insert({_id: 2, a: "irrelevant content"}); +t.ensureIndex({a:"text"}); + +// Project the text score. +var results = t.find({$text: {$search: "textual content -irrelevant"}}, {_idCopy:0, score:{$meta: "textScore"}}).toArray(); +// printjson(results); +// Scores should exist. +assert.eq(results.length, 2); +assert(results[0].score); +assert(results[1].score); + +// indexed by _id. +var scores = [0, 0, 0]; +scores[results[0]._id] = results[0].score; +scores[results[1]._id] = results[1].score; + +// +// Edge/error cases: +// + +// Project text score into 2 fields. +results = t.find({$text: {$search: "textual content -irrelevant"}}, {otherScore: {$meta: "textScore"}, score:{$meta: "textScore"}}).toArray(); +assert.eq(2, results.length); +for (var i = 0; i < results.length; ++i) { + assert.close(scores[results[i]._id], results[i].score); + assert.close(scores[results[i]._id], results[i].otherScore); +} + +// printjson(results); + +// Project text score into "x.$" shouldn't crash +assert.throws(function() { t.find({$text: {$search: "textual content -irrelevant"}}, {'x.$': {$meta: "textScore"}}).toArray(); }); + +// TODO: We can't project 'x.y':1 and 'x':1 (yet). + +// Clobber an existing field and behave nicely. +results = t.find({$text: {$search: "textual content -irrelevant"}}, + {b: {$meta: "textScore"}}).toArray(); +assert.eq(2, results.length); +for (var i = 0; i < results.length; ++i) { + assert.close(scores[results[i]._id], results[i].b, + i + ': existing field in ' + tojson(results[i], '', true) + + ' is not clobbered with score'); +} + +assert.neq(-1, results[0].b); + +// Don't crash if we have no text score. +var results = t.find({a: /text/}, {score: {$meta: "textScore"}}).toArray(); +// printjson(results); + +// No textScore proj. with nested fields +assert.throws(function() { t.find({$text: {$search: "blah"}}, {'x.y':{$meta: "textScore"}}).toArray(); }); + +// SERVER-12173 +// When $text operator is in $or, should evaluate first +results = t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {_id: 1}]}, + {_idCopy:0, score:{$meta: "textScore"}}).toArray(); +printjson(results); +assert.eq(2, results.length); +for (var i = 0; i < results.length; ++i) { + assert.close(scores[results[i]._id], results[i].score, + i + ': TEXT under OR invalid score: ' + tojson(results[i], '', true)); +} + +// SERVER-12592 +// When $text operator is in $or, all non-$text children must be indexed. Otherwise, we should produce +// a readable error. +var errorMessage = ''; +assert.throws( function() { + try { + t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {b: 1}]}).itcount(); + } + catch (e) { + errorMessage = e; + throw e; + } +}, null, 'Expected error from failed TEXT under OR planning'); +assert.neq(-1, errorMessage.indexOf('TEXT'), + 'message from failed text planning does not mention TEXT: ' + errorMessage); +assert.neq(-1, errorMessage.indexOf('OR'), + 'message from failed text planning does not mention OR: ' + errorMessage); + +// Scores should exist. +assert.eq(results.length, 2); +assert(results[0].score, + "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or"); +assert(results[1].score, + "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or"); + diff --git a/jstests/core/fts_querylang.js b/jstests/core/fts_querylang.js new file mode 100644 index 00000000000..2a139f5b766 --- /dev/null +++ b/jstests/core/fts_querylang.js @@ -0,0 +1,93 @@ +// Test $text query operator. + +var t = db.getSiblingDB("test").getCollection("fts_querylang"); +var cursor; +var results; + +db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true}); + +t.drop(); + +t.insert({_id: 0, unindexedField: 0, a: "textual content"}); +t.insert({_id: 1, unindexedField: 1, a: "additional content"}); +t.insert({_id: 2, unindexedField: 2, a: "irrelevant content"}); +t.ensureIndex({a: "text"}); + +// Test text query with no results. +assert.eq(false, t.find({$text: {$search: "words"}}).hasNext()); + +// Test basic text query. +results = t.find({$text: {$search: "textual content -irrelevant"}}).toArray(); +assert.eq(results.length, 2); +assert.neq(results[0]._id, 2); +assert.neq(results[1]._id, 2); + +// Test sort with basic text query. +results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).toArray(); +assert.eq(results.length, 2); +assert.eq(results[0]._id, 0); +assert.eq(results[1]._id, 1); + +// Test skip with basic text query. +results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).skip(1).toArray(); +assert.eq(results.length, 1); +assert.eq(results[0]._id, 1); + +// Test limit with basic text query. +results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).limit(1).toArray(); +assert.eq(results.length, 1); +assert.eq(results[0]._id, 0); + +// TODO Test basic text query with sort, once sort is enabled in the new query framework. + +// TODO Test basic text query with projection, once projection is enabled in the new query +// framework. + +// Test $and of basic text query with indexed expression. +results = t.find({$text: {$search: "content -irrelevant"}, + _id: 1}).toArray(); +assert.eq(results.length, 1); +assert.eq(results[0]._id, 1); + +// Test $and of basic text query with unindexed expression. +results = t.find({$text: {$search: "content -irrelevant"}, + unindexedField: 1}).toArray(); +assert.eq(results.length, 1); +assert.eq(results[0]._id, 1); + +// TODO Test invalid inputs for $text, $search, $language. + +// Test $language. +cursor = t.find({$text: {$search: "contents", $language: "none"}}); +assert.eq(false, cursor.hasNext()); + +cursor = t.find({$text: {$search: "contents", $language: "EN"}}); +assert.eq(true, cursor.hasNext()); + +cursor = t.find({$text: {$search: "contents", $language: "spanglish"}}); +assert.throws(function() { cursor.next() }); + +// TODO Test $and of basic text query with geo expression. + +// Test update with $text. +t.update({$text: {$search: "textual content -irrelevant"}}, {$set: {b: 1}}, {multi: true}); +assert.eq(2, t.find({b: 1}).itcount(), + 'incorrect number of documents updated'); + +// TODO Test remove with $text, once it is enabled with the new query framework. + +// TODO Test count with $text, once it is enabled with the new query framework. + +// TODO Test findAndModify with $text, once it is enabled with the new query framework. + +// TODO Test aggregate with $text, once it is enabled with the new query framework. + +// TODO Test that old query framework rejects $text queries. + +// TODO Test that $text fails without a text index. + +// TODO Test that $text accepts a hint of the text index. + +// TODO Test that $text fails if a different index is hinted. + +// TODO Test $text with {$natural:1} sort, {$natural:1} hint. diff --git a/jstests/core/fts_score_sort.js b/jstests/core/fts_score_sort.js new file mode 100644 index 00000000000..59fb852a774 --- /dev/null +++ b/jstests/core/fts_score_sort.js @@ -0,0 +1,28 @@ +// Test sorting with text score metadata. + +var t = db.getSiblingDB("test").getCollection("fts_score_sort"); +t.drop(); + +db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true}); + +t.insert({_id: 0, a: "textual content"}); +t.insert({_id: 1, a: "additional content"}); +t.insert({_id: 2, a: "irrelevant content"}); +t.ensureIndex({a:"text"}); + +// Sort by the text score. +var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({score: {$meta: "textScore"}}).toArray(); +// printjson(results); +assert.eq(results.length, 2); +assert.eq(results[0]._id, 0); +assert.eq(results[1]._id, 1); +assert(results[0].score > results[1].score); + +// Sort by {_id descending, score} and verify the order is right. +var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({_id: -1, score: {$meta: "textScore"}}).toArray(); +printjson(results); +assert.eq(results.length, 2); +assert.eq(results[0]._id, 1); +assert.eq(results[1]._id, 0); +// Note the reversal from above. +assert(results[0].score < results[1].score); diff --git a/jstests/core/fts_spanish.js b/jstests/core/fts_spanish.js new file mode 100644 index 00000000000..b322c369f3f --- /dev/null +++ b/jstests/core/fts_spanish.js @@ -0,0 +1,30 @@ + +load( "jstests/libs/fts.js" ); + +t = db.text_spanish; +t.drop(); + +t.save( { _id: 1, title: "mi blog", text: "Este es un blog de prueba" } ); +t.save( { _id: 2, title: "mi segundo post", text: "Este es un blog de prueba" } ); +t.save( { _id: 3, title: "cuchillos son divertidos", text: "este es mi tercer blog stemmed" } ); +t.save( { _id: 4, language: "en", title: "My fourth blog", text: "This stemmed blog is in english" } ); + +// default weight is 1 +// specify weights if you want a field to be more meaningull +t.ensureIndex( { "title": "text", text: "text" }, { weights: { title: 10 }, + default_language: "es" } ); + +res = t.runCommand( "text", { search: "blog" } ); +assert.eq( 4, res.results.length ); + +assert.eq( [4], queryIDS( t, "stem" ) ); +assert.eq( [3], queryIDS( t, "stemmed" ) ); +assert.eq( [4], queryIDS( t, "stemmed", null, { language : "en" } ) ); + +assert.eq( [1,2], queryIDS( t, "prueba" ) ); + +assert.writeError( t.save( { _id: 5, language: "spanglish", title: "", text: "" } )); + +t.dropIndexes(); +res = t.ensureIndex( { "title": "text", text: "text" }, { default_language: "spanglish" } ); +assert.neq(null, res); diff --git a/jstests/core/geo1.js b/jstests/core/geo1.js new file mode 100644 index 00000000000..5e28713d581 --- /dev/null +++ b/jstests/core/geo1.js @@ -0,0 +1,37 @@ + +t = db.geo1 +t.drop(); + +idx = { loc : "2d" , zip : 1 } + +t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } ) +t.insert( { zip : "10024" , loc : [ 40.786387 , 73.97709 ] } ) +assert.writeOK( t.insert( { zip : "94061" , loc : [ 37.463911 , 122.23396 ] } )); + +// test "2d" has to be first +assert.eq( 1 , t.getIndexKeys().length , "S1" ); +t.ensureIndex( { zip : 1 , loc : "2d" } ); +assert.eq( 1 , t.getIndexKeys().length , "S2" ); + +t.ensureIndex( idx ); +assert.eq( 2 , t.getIndexKeys().length , "S3" ); + +assert.eq( 3 , t.count() , "B1" ); +assert.writeError( t.insert( { loc : [ 200 , 200 ] } )); +assert.eq( 3 , t.count() , "B3" ); + +// test normal access + +wb = t.findOne( { zip : "06525" } ) +assert( wb , "C1" ); + +assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" ) +assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" ) +// assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" ) + +// test config options + +t.drop(); + +t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } ); +assert.writeOK( t.insert( { loc : [ 200 , 200 ] } )); diff --git a/jstests/core/geo10.js b/jstests/core/geo10.js new file mode 100644 index 00000000000..b122da99f2a --- /dev/null +++ b/jstests/core/geo10.js @@ -0,0 +1,15 @@ +// Test for SERVER-2746 + +coll = db.geo10 +coll.drop(); + +assert.writeOK( db.geo10.ensureIndex( { c : '2d', t : 1 }, { min : 0, max : Math.pow( 2, 40 ) } )); +assert( db.system.indexes.count({ ns : "test.geo10" }) == 2, "A3" ) + +printjson( db.system.indexes.find().toArray() ) + +assert.writeOK( db.geo10.insert( { c : [ 1, 1 ], t : 1 } )); +assert.writeOK( db.geo10.insert( { c : [ 3600, 3600 ], t : 1 } )); +assert.writeOK( db.geo10.insert( { c : [ 0.001, 0.001 ], t : 1 } )); + +printjson( db.geo10.find({ c : { $within : { $box : [[0.001, 0.001], [Math.pow(2, 40) - 0.001, Math.pow(2, 40) - 0.001]] } }, t : 1 }).toArray() ) diff --git a/jstests/core/geo2.js b/jstests/core/geo2.js new file mode 100644 index 00000000000..f9632ebd16d --- /dev/null +++ b/jstests/core/geo2.js @@ -0,0 +1,40 @@ + +t = db.geo2 +t.drop(); + +n = 1 +for ( var x=-100; x<100; x+=2 ){ + for ( var y=-100; y<100; y+=2 ){ + t.insert( { _id : n++ , loc : [ x , y ] } ) + } +} + +t.ensureIndex( { loc : "2d" } ) + +fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } ); + +function a( cur ){ + var total = 0; + var outof = 0; + while ( cur.hasNext() ){ + var o = cur.next(); + total += Geo.distance( [ 50 , 50 ] , o.loc ); + outof++; + } + return total/outof; +} + +assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B1" ) +assert.close( 1.33333 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(3) ) , "B2" ); +assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B3" ); + +printjson( t.find( { loc : { $near : [ 50 , 50 ] } } ).explain() ) + + +assert.lt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(50) ) , "C1" ) +assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 , 3 ] } } ).limit(50) ) , "C2" ) +assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] , $maxDistance : 3 } } ).limit(50) ) , "C3" ) + +// SERVER-8974 - test if $geoNear operator works with 2d index as well +var geoNear_cursor = t.find( { loc : { $geoNear : [50, 50] } } ); +assert.eq( geoNear_cursor.count(), 100 ) diff --git a/jstests/core/geo3.js b/jstests/core/geo3.js new file mode 100644 index 00000000000..47637783f5b --- /dev/null +++ b/jstests/core/geo3.js @@ -0,0 +1,77 @@ + +t = db.geo3 +t.drop(); + +n = 1 +for ( var x=-100; x<100; x+=2 ){ + for ( var y=-100; y<100; y+=2 ){ + t.insert( { _id : n++ , loc : [ x , y ] , a : Math.abs( x ) % 5 , b : Math.abs( y ) % 5 } ) + } +} + + +t.ensureIndex( { loc : "2d" } ) + +fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } ); + +// test filter + +filtered1 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } ); +assert.eq( 10 , filtered1.results.length , "B1" ); +filtered1.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B2: " + tojson( z ) ); } ) +//printjson( filtered1.stats ); + +function avgA( q , len ){ + if ( ! len ) + len = 10; + var realq = { loc : { $near : [ 50 , 50 ] } }; + if ( q ) + Object.extend( realq , q ); + var as = + t.find( realq ).limit(len).map( + function(z){ + return z.a; + } + ); + assert.eq( len , as.length , "length in avgA" ); + return Array.avg( as ); +} + +function testFiltering( msg ){ + assert.gt( 2 , avgA( {} ) , msg + " testFiltering 1 " ); + assert.eq( 2 , avgA( { a : 2 } ) , msg + " testFiltering 2 " ); + assert.eq( 4 , avgA( { a : 4 } ) , msg + " testFiltering 3 " ); +} + +testFiltering( "just loc" ); + +t.dropIndex( { loc : "2d" } ) +assert.eq( 1 , t.getIndexKeys().length , "setup 3a" ) +t.ensureIndex( { loc : "2d" , a : 1 } ) +assert.eq( 2 , t.getIndexKeys().length , "setup 3b" ) + +filtered2 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } ); +assert.eq( 10 , filtered2.results.length , "B3" ); +filtered2.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B4: " + tojson( z ) ); } ) + +assert.eq( filtered1.stats.avgDistance , filtered2.stats.avgDistance , "C1" ) +assert.eq( filtered1.stats.nscanned , filtered2.stats.nscanned , "C3" ) +assert.gt( filtered1.stats.objectsLoaded , filtered2.stats.objectsLoaded , "C3" ) + +testFiltering( "loc and a" ); + +t.dropIndex( { loc : "2d" , a : 1 } ) +assert.eq( 1 , t.getIndexKeys().length , "setup 4a" ) +t.ensureIndex( { loc : "2d" , b : 1 } ) +assert.eq( 2 , t.getIndexKeys().length , "setup 4b" ) + +testFiltering( "loc and b" ); + + +q = { loc : { $near : [ 50 , 50 ] } } +assert.eq( 100 , t.find( q ).limit(100).itcount() , "D1" ) +assert.eq( 100 , t.find( q ).limit(100).count() , "D2" ) + +assert.eq( 20 , t.find( q ).limit(20).itcount() , "D3" ) +assert.eq( 20 , t.find( q ).limit(20).size() , "D4" ) + diff --git a/jstests/core/geo4.js b/jstests/core/geo4.js new file mode 100644 index 00000000000..c1be468bb52 --- /dev/null +++ b/jstests/core/geo4.js @@ -0,0 +1,11 @@ +var t = db.geo4; +t.drop(); + +t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } ); + +var err = t.ensureIndex( { loc : "2d" }, { bits : 33 } ); +assert.writeError(err); +assert( err.getWriteError().errmsg.indexOf("bits in geo index must be between 1 and 32") >= 0, + tojson( err )); + +assert.writeOK(t.ensureIndex( { loc : "2d" }, { bits : 32 } )); diff --git a/jstests/core/geo5.js b/jstests/core/geo5.js new file mode 100644 index 00000000000..67b00f85b44 --- /dev/null +++ b/jstests/core/geo5.js @@ -0,0 +1,18 @@ +t = db.geo5; +t.drop(); + +t.insert( { p : [ 0,0 ] } ) +t.ensureIndex( { p : "2d" } ) + +res = t.runCommand( "geoNear" , { near : [1,1] } ); +assert.eq( 1 , res.results.length , "A1" ); + +t.insert( { p : [ 1,1 ] } ) +t.insert( { p : [ -1,-1 ] } ) +res = t.runCommand( "geoNear" , { near : [50,50] } ); +assert.eq( 3 , res.results.length , "A2" ); + +t.insert( { p : [ -1,-1 ] } ) +res = t.runCommand( "geoNear" , { near : [50,50] } ); +assert.eq( 4 , res.results.length , "A3" ); + diff --git a/jstests/core/geo6.js b/jstests/core/geo6.js new file mode 100644 index 00000000000..185795c57ba --- /dev/null +++ b/jstests/core/geo6.js @@ -0,0 +1,24 @@ + +t = db.geo6; +t.drop(); + +t.ensureIndex( { loc : "2d" } ); + +assert.eq( 0 , t.find().itcount() , "pre0" ); +assert.eq( 0 , t.find( { loc : { $near : [50,50] } } ).itcount() , "pre1" ) + +t.insert( { _id : 1 , loc : [ 1 , 1 ] } ) +t.insert( { _id : 2 , loc : [ 1 , 2 ] } ) +t.insert( { _id : 3 } ) + +assert.eq( 3 , t.find().itcount() , "A1" ) +assert.eq( 2 , t.find().hint( { loc : "2d" } ).itcount() , "A2" ) +assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).itcount() , "A3" ) + +t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).forEach(printjson); +assert.eq( 1 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).next()._id , "B1" ) +assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : -1 } ).next()._id , "B1" ) + + +t.insert( { _id : 4 , loc : [] } ) +assert.eq( 4 , t.find().itcount() , "C1" ) diff --git a/jstests/core/geo7.js b/jstests/core/geo7.js new file mode 100644 index 00000000000..c220da54249 --- /dev/null +++ b/jstests/core/geo7.js @@ -0,0 +1,20 @@ + +t = db.geo7; +t.drop(); + +t.insert({_id:1,y:[1,1]}) +t.insert({_id:2,y:[1,1],z:3}) +t.insert({_id:3,y:[1,1],z:4}) +t.insert({_id:4,y:[1,1],z:5}) + +t.ensureIndex({y:"2d",z:1}) + +assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A1" ); + +t.dropIndex({y:"2d",z:1}) + +t.ensureIndex({y:"2d"}) +assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A2" ); + +t.insert( { _id : 5 , y : 5 } ); +assert.eq( 5 , t.findOne( { y : 5 } )._id , "B1" ); diff --git a/jstests/core/geo8.js b/jstests/core/geo8.js new file mode 100644 index 00000000000..301f3bcc0d1 --- /dev/null +++ b/jstests/core/geo8.js @@ -0,0 +1,13 @@ + +t = db.geo8 +t.drop() + +t.insert( { loc : [ 5 , 5 ] } ) +t.insert( { loc : [ 5 , 6 ] } ) +t.insert( { loc : [ 5 , 7 ] } ) +t.insert( { loc : [ 4 , 5 ] } ) +t.insert( { loc : [ 100 , 100 ] } ) + +t.ensureIndex( { loc : "2d" } ) + +t.runCommand( "geoWalk" ); diff --git a/jstests/core/geo9.js b/jstests/core/geo9.js new file mode 100644 index 00000000000..8b6510f03b5 --- /dev/null +++ b/jstests/core/geo9.js @@ -0,0 +1,28 @@ + +t = db.geo9 +t.drop(); + +t.save( { _id : 1 , a : [ 10 , 10 ] , b : [ 50 , 50 ] } ) +t.save( { _id : 2 , a : [ 11 , 11 ] , b : [ 51 , 52 ] } ) +t.save( { _id : 3 , a : [ 12 , 12 ] , b : [ 52 , 52 ] } ) + +t.save( { _id : 4 , a : [ 50 , 50 ] , b : [ 10 , 10 ] } ) +t.save( { _id : 5 , a : [ 51 , 51 ] , b : [ 11 , 11 ] } ) +t.save( { _id : 6 , a : [ 52 , 52 ] , b : [ 12 , 12 ] } ) + +t.ensureIndex( { a : "2d" } ) +t.ensureIndex( { b : "2d" } ) + +function check( field ){ + var q = {} + q[field] = { $near : [ 11 , 11 ] } + arr = t.find( q ).limit(3).map( + function(z){ + return Geo.distance( [ 11 , 11 ] , z[field] ); + } + ); + assert.eq( 2 * Math.sqrt( 2 ) , Array.sum( arr ) , "test " + field ); +} + +check( "a" ) +check( "b" ) diff --git a/jstests/core/geo_2d_explain.js b/jstests/core/geo_2d_explain.js new file mode 100644 index 00000000000..8195642aabc --- /dev/null +++ b/jstests/core/geo_2d_explain.js @@ -0,0 +1,29 @@ +var t = db.geo_2d_explain; + +t.drop(); + +var n = 1000; + +// insert n documents with integer _id, a can be 1-5, loc is close to [40, 40] +t.drop() +t.ensureIndex({loc: "2d", _id: 1}) + +var x = 40; +var y = 40; +for (var i = 0; i < n; i++) { + // random number in range [1, 5] + var a = Math.floor(Math.random() * 5) + 1; + var dist = 4.0; + var dx = (Math.random() - 0.5) * dist; + var dy = (Math.random() - 0.5) * dist; + var loc = [x + dx, y + dy]; + t.save({_id: i, a: a, loc: loc}); +} + +var explain = t.find({loc: {$near: [40, 40]}, _id: {$lt: 50}}).explain(); + +print('explain = ' + tojson(explain)); + +assert.eq({}, explain.indexBounds); +assert.eq(explain.n, explain.nscannedObjects); +assert.lte(explain.n, explain.nscanned); diff --git a/jstests/core/geo_2d_with_geojson_point.js b/jstests/core/geo_2d_with_geojson_point.js new file mode 100644 index 00000000000..b5afc8b77b8 --- /dev/null +++ b/jstests/core/geo_2d_with_geojson_point.js @@ -0,0 +1,20 @@ +/* + * Use of GeoJSON points should be prohibited with a 2d index, SERVER-10636. + */ + +var t = db.geo_2d_with_geojson_point; +t.drop(); +t.ensureIndex({loc: '2d'}); + +var geoJSONPoint = { + type: 'Point', + coordinates: [0, 0] +}; + +print(assert.throws( + function() { + t.findOne({ + loc: {$near: {$geometry: geoJSONPoint}}}); + }, + [], + 'querying 2d index with GeoJSON point.')); diff --git a/jstests/core/geo_allowedcomparisons.js b/jstests/core/geo_allowedcomparisons.js new file mode 100644 index 00000000000..61eb3f43e52 --- /dev/null +++ b/jstests/core/geo_allowedcomparisons.js @@ -0,0 +1,95 @@ +// A test for what geometries can interact with what other geometries. +t = db.geo_allowedcomparisons; + +// Any GeoJSON object can intersect with any geojson object. +geojsonPoint = { "type" : "Point", "coordinates": [ 0, 0 ] }; +oldPoint = [0,0]; + +// GeoJSON polygons can contain any geojson object and OLD points. +geojsonPoly = { "type" : "Polygon", + "coordinates" : [ [ [-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]]}; + +// This can be contained by GJ polygons, intersected by anything GJ and old points. +geojsonLine = { "type" : "LineString", "coordinates": [ [ 0, 0], [1, 1]]} + +// $centerSphere can contain old or new points. +oldCenterSphere = [[0, 0], Math.PI / 180]; +// $box can contain old points. +oldBox = [[-5,-5], [5,5]]; +// $polygon can contain old points. +oldPolygon = [[-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]] +// $center can contain old points. +oldCenter = [[0, 0], 1]; + +t.drop(); +t.ensureIndex({geo: "2d"}); +// 2d doesn't know what to do w/this +assert.writeError(t.insert({geo: geojsonPoint})); +// Old points are OK. +assert.writeOK(t.insert({geo: oldPoint})); +// Lines not OK in 2d +assert.writeError(t.insert({geo: geojsonLine})); +// Shapes are not OK to insert in 2d +assert.writeError(t.insert({geo: geojsonPoly})); +assert.writeError(t.insert({geo: oldCenterSphere})); +assert.writeError(t.insert({geo: oldCenter})); +// If we try to insert a polygon, it thinks it's an array of points. Let's not +// do that. Ditto for the box. + +// Verify that even if we can't index them, we can use them in a matcher. +t.insert({gj: geojsonLine}) +t.insert({gj: geojsonPoly}) +geojsonPoint2 = { "type" : "Point", "coordinates": [ 0, 0.001 ] }; +t.insert({gjp: geojsonPoint2}) + +// We convert between old and new style points. +assert.eq(1, t.find({gjp: {$geoWithin: {$box: oldBox}}}).itcount()); +assert.eq(1, t.find({gjp: {$geoWithin: {$polygon: oldPolygon}}}).itcount()); +assert.eq(1, t.find({gjp: {$geoWithin: {$center: oldCenter}}}).itcount()); +assert.eq(1, t.find({gjp: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount()) + +function runTests() { + // Each find the box, the polygon, and the old point. + assert.eq(1, t.find({geo: {$geoWithin: {$box: oldBox}}}).itcount()) + assert.eq(1, t.find({geo: {$geoWithin: {$polygon: oldPolygon}}}).itcount()) + // Each find the old point. + assert.eq(1, t.find({geo: {$geoWithin: {$center: oldCenter}}}).itcount()) + assert.eq(1, t.find({geo: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount()) + // Using geojson with 2d-style geoWithin syntax should choke. + assert.throws(function() { return t.find({geo: {$geoWithin: {$polygon: geojsonPoly}}}) + .itcount();}) + // Using old polygon w/new syntax should choke too. + assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldPolygon}}}) + .itcount();}) + assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldBox}}}) + .itcount();}) + assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenter}}}) + .itcount();}) + assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenterSphere}}}) + .itcount();}) + // Even if we only have a 2d index, the 2d suitability function should + // allow the matcher to deal with this. If we have a 2dsphere index we use it. + assert.eq(1, t.find({geo: {$geoWithin: {$geometry: geojsonPoly}}}).itcount()) + assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoly}}}).itcount()) + assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: oldPoint}}}).itcount()) + assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoint}}}).itcount()) +} + +// We have a 2d index right now. Let's see what it does. +runTests(); + +// No index now. +t.dropIndex({geo: "2d"}) +runTests(); + +// 2dsphere index now. +assert.writeOK( t.ensureIndex({geo: "2dsphere"}) ); +// 2dsphere does not support arrays of points. +assert.writeError(t.insert({geo: [geojsonPoint2, geojsonPoint]})); +runTests(); + +// Old stuff is not GeoJSON (or old-style point). All should fail. +assert.writeError(t.insert({geo: oldBox})); +assert.writeError(t.insert({geo: oldPolygon})); +assert.writeError(t.insert({geo: oldCenter})); +assert.writeError(t.insert({geo: oldCenterSphere})); diff --git a/jstests/core/geo_array0.js b/jstests/core/geo_array0.js new file mode 100644 index 00000000000..39c7b10c083 --- /dev/null +++ b/jstests/core/geo_array0.js @@ -0,0 +1,26 @@ +// Make sure the very basics of geo arrays are sane by creating a few multi location docs +t = db.geoarray + +function test(index) { + t.drop(); + t.insert( { zip : "10001", loc : { home : [ 10, 10 ], work : [ 50, 50 ] } } ) + t.insert( { zip : "10002", loc : { home : [ 20, 20 ], work : [ 50, 50 ] } } ) + var res = t.insert( { zip : "10003", loc : { home : [ 30, 30 ], work : [ 50, 50 ] } } ); + assert.writeOK( res ); + + if (index) { + assert.writeOK(t.ensureIndex( { loc : "2d", zip : 1 } )); + assert.eq( 2, t.getIndexKeys().length ) + } + + res = t.insert( { zip : "10004", loc : { home : [ 40, 40 ], work : [ 50, 50 ] } } ); + assert.writeOK( res ); + + // test normal access + printjson( t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() ) + assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() ); + assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() ); +} + +//test(false); // this was removed as part of SERVER-6400 +test(true) diff --git a/jstests/core/geo_array1.js b/jstests/core/geo_array1.js new file mode 100644 index 00000000000..ca61050c888 --- /dev/null +++ b/jstests/core/geo_array1.js @@ -0,0 +1,38 @@ +// Make sure many locations in one doc works, in the form of an array + +t = db.geoarray1 +function test(index) { + t.drop(); + + var locObj = [] + // Add locations everywhere + for ( var i = 0; i < 10; i++ ) { + for ( var j = 0; j < 10; j++ ) { + if ( j % 2 == 0 ) + locObj.push( [ i, j ] ) + else + locObj.push( { x : i, y : j } ) + } + } + + // Add docs with all these locations + for( var i = 0; i < 300; i++ ){ + t.insert( { loc : locObj } ) + } + + if (index) { + t.ensureIndex( { loc : "2d" } ) + } + + // Pull them back + for ( var i = 0; i < 10; i++ ) { + for ( var j = 0; j < 10; j++ ) { + assert.eq(300, t.find({loc: {$within: {$box: [[i - 0.5, j - 0.5 ], + [i + 0.5,j + 0.5]]}}}) + .count()) + } + } +} + +test(true) +test(false) diff --git a/jstests/core/geo_array2.js b/jstests/core/geo_array2.js new file mode 100644 index 00000000000..acfc6a15abf --- /dev/null +++ b/jstests/core/geo_array2.js @@ -0,0 +1,161 @@ +// Check the semantics of near calls with multiple locations + +t = db.geoarray2 +t.drop(); + +var numObjs = 10; +var numLocs = 100; + +// Test the semantics of near / nearSphere / etc. queries with multiple keys per object + +for( var i = -1; i < 2; i++ ){ + for(var j = -1; j < 2; j++ ){ + + locObj = [] + + if( i != 0 || j != 0 ) + locObj.push( { x : i * 50 + Random.rand(), + y : j * 50 + Random.rand() } ) + locObj.push( { x : Random.rand(), + y : Random.rand() } ) + locObj.push( { x : Random.rand(), + y : Random.rand() } ) + + t.insert({ name : "" + i + "" + j , loc : locObj , type : "A" }) + t.insert({ name : "" + i + "" + j , loc : locObj , type : "B" }) + } +} + +assert.writeOK(t.ensureIndex({ loc : "2d" , type : 1 })); + +print( "Starting testing phase... ") + +for( var t = 0; t < 2; t++ ){ + +var type = t == 0 ? "A" : "B" + +for( var i = -1; i < 2; i++ ){ + for(var j = -1; j < 2; j++ ){ + + var center = [ i * 50 , j * 50 ] + var count = i == 0 && j == 0 ? 9 : 1 + var objCount = 1 + + // Do near check + + var nearResults = db.runCommand( { geoNear : "geoarray2" , + near : center , + num : count, + query : { type : type } } ).results + //printjson( nearResults ) + + var objsFound = {} + var lastResult = 0; + for( var k = 0; k < nearResults.length; k++ ){ + + // All distances should be small, for the # of results + assert.gt( 1.5 , nearResults[k].dis ) + // Distances should be increasing + assert.lte( lastResult, nearResults[k].dis ) + // Objs should be of the right type + assert.eq( type, nearResults[k].obj.type ) + + lastResult = nearResults[k].dis + + var objKey = "" + nearResults[k].obj._id + + if( objKey in objsFound ) objsFound[ objKey ]++ + else objsFound[ objKey ] = 1 + + } + + // Make sure we found the right objects each time + // Note: Multiple objects could be found for diff distances. + for( var q in objsFound ){ + assert.eq( objCount , objsFound[q] ) + } + + + // Do nearSphere check + + // Earth Radius + var eRad = 6371 + + nearResults = db.geoarray2.find( { loc : { $nearSphere : center , $maxDistance : 500 /* km */ / eRad }, type : type } ).toArray() + + assert.eq( nearResults.length , count ) + + objsFound = {} + lastResult = 0; + for( var k = 0; k < nearResults.length; k++ ){ + var objKey = "" + nearResults[k]._id + if( objKey in objsFound ) objsFound[ objKey ]++ + else objsFound[ objKey ] = 1 + + } + + // Make sure we found the right objects each time + for( var q in objsFound ){ + assert.eq( objCount , objsFound[q] ) + } + + + + // Within results do not return duplicate documents + + var count = i == 0 && j == 0 ? 9 : 1 + var objCount = i == 0 && j == 0 ? 1 : 1 + + // Do within check + objsFound = {} + + var box = [ [center[0] - 1, center[1] - 1] , [center[0] + 1, center[1] + 1] ] + + //printjson( box ) + + var withinResults = db.geoarray2.find({ loc : { $within : { $box : box } } , type : type }).toArray() + + assert.eq( withinResults.length , count ) + + for( var k = 0; k < withinResults.length; k++ ){ + var objKey = "" + withinResults[k]._id + if( objKey in objsFound ) objsFound[ objKey ]++ + else objsFound[ objKey ] = 1 + } + + //printjson( objsFound ) + + // Make sure we found the right objects each time + for( var q in objsFound ){ + assert.eq( objCount , objsFound[q] ) + } + + + // Do within check (circle) + objsFound = {} + + withinResults = db.geoarray2.find({ loc : { $within : { $center : [ center, 1.5 ] } } , type : type }).toArray() + + assert.eq( withinResults.length , count ) + + for( var k = 0; k < withinResults.length; k++ ){ + var objKey = "" + withinResults[k]._id + if( objKey in objsFound ) objsFound[ objKey ]++ + else objsFound[ objKey ] = 1 + } + + // Make sure we found the right objects each time + for( var q in objsFound ){ + assert.eq( objCount , objsFound[q] ) + } + + + + } +} + +} + + + + diff --git a/jstests/core/geo_borders.js b/jstests/core/geo_borders.js new file mode 100644 index 00000000000..20781409b1e --- /dev/null +++ b/jstests/core/geo_borders.js @@ -0,0 +1,162 @@ +t = db.borders +t.drop() + +epsilon = 0.0001; + +// For these tests, *required* that step ends exactly on max +min = -1 +max = 1 +step = 1 +numItems = 0; + +for ( var x = min; x <= max; x += step ) { + for ( var y = min; y <= max; y += step ) { + t.insert( { loc : { x : x, y : y } } ) + numItems++; + } +} + +overallMin = -1 +overallMax = 1 + +// Create a point index slightly smaller than the points we have +var res = t.ensureIndex({ loc: "2d" }, + { max: overallMax - epsilon / 2, + min: overallMin + epsilon / 2 }); +assert.neq(null, res); + +// Create a point index only slightly bigger than the points we have +res = t.ensureIndex( { loc : "2d" }, { max : overallMax + epsilon, min : overallMin - epsilon } ); +assert.writeOK(res); + +// ************ +// Box Tests +// ************ + +// If the bounds are bigger than the box itself, just clip at the borders +assert.eq( numItems, t.find( + { loc : { $within : { $box : [ + [ overallMin - 2 * epsilon, overallMin - 2 * epsilon ], + [ overallMax + 2 * epsilon, overallMax + 2 * epsilon ] ] } } } ).count() ); + +// Check this works also for bounds where only a single dimension is off-bounds +assert.eq( numItems - 5, t.find( + { loc : { $within : { $box : [ + [ overallMin - 2 * epsilon, overallMin - 0.5 * epsilon ], + [ overallMax - epsilon, overallMax - epsilon ] ] } } } ).count() ); + +// Make sure we can get at least close to the bounds of the index +assert.eq( numItems, t.find( + { loc : { $within : { $box : [ + [ overallMin - epsilon / 2, overallMin - epsilon / 2 ], + [ overallMax + epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() ); + +// Make sure we can get at least close to the bounds of the index +assert.eq( numItems, t.find( + { loc : { $within : { $box : [ + [ overallMax + epsilon / 2, overallMax + epsilon / 2 ], + [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() ); + +// Check that swapping min/max has good behavior +assert.eq( numItems, t.find( + { loc : { $within : { $box : [ + [ overallMax + epsilon / 2, overallMax + epsilon / 2 ], + [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() ); + +assert.eq( numItems, t.find( + { loc : { $within : { $box : [ + [ overallMax + epsilon / 2, overallMin - epsilon / 2 ], + [ overallMin - epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() ); + +// ************** +// Circle tests +// ************** + +center = ( overallMax + overallMin ) / 2 +center = [ center, center ] +radius = overallMax + +offCenter = [ center[0] + radius, center[1] + radius ] +onBounds = [ offCenter[0] + epsilon, offCenter[1] + epsilon ] +offBounds = [ onBounds[0] + epsilon, onBounds[1] + epsilon ] +onBoundsNeg = [ -onBounds[0], -onBounds[1] ] + +// Make sure we can get all points when radius is exactly at full bounds +assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + epsilon ] } } } ).count() ); + +// Make sure we can get points when radius is over full bounds +assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + 2 * epsilon ] } } } ).count() ); + +// Make sure we can get points when radius is over full bounds, off-centered +assert.lt( 0, t.find( { loc : { $within : { $center : [ offCenter, radius + 2 * epsilon ] } } } ).count() ); + +// Make sure we get correct corner point when center is in bounds +// (x bounds wrap, so could get other corner) +cornerPt = t.findOne( { loc : { $within : { $center : [ offCenter, step / 2 ] } } } ); +assert.eq( cornerPt.loc.y, overallMax ) + +// Make sure we get correct corner point when center is on bounds +// NOTE: Only valid points on MIN bounds +cornerPt = t + .findOne( { loc : { $within : { $center : [ onBoundsNeg, Math.sqrt( 2 * epsilon * epsilon ) + ( step / 2 ) ] } } } ); +assert.eq( cornerPt.loc.y, overallMin ) + +// Make sure we can't get corner point when center is over bounds +try { + t.findOne( { loc : { $within : { $center : [ offBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } ); + assert( false ) +} catch (e) { +} + +// Make sure we can't get corner point when center is on max bounds +try { + t.findOne( { loc : { $within : { $center : [ onBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } ); + assert( false ) +} catch (e) { +} + +// *********** +// Near tests +// *********** + +// Make sure we can get all nearby points to point in range +assert.eq( overallMax, t.find( { loc : { $near : offCenter } } ).next().loc.y ); + +// Make sure we can get all nearby points to point on boundary +assert.eq( overallMin, t.find( { loc : { $near : onBoundsNeg } } ).next().loc.y ); + +// Make sure we can't get all nearby points to point over boundary +try { + t.findOne( { loc : { $near : offBounds } } ) + assert( false ) +} catch (e) { +} +// Make sure we can't get all nearby points to point on max boundary +try { + t.findOne( { loc : { $near : onBoundsNeg } } ) + assert( false ) +} catch (e) { +} + +// Make sure we can get all nearby points within one step (4 points in top +// corner) +assert.eq( 4, t.find( { loc : { $near : offCenter, $maxDistance : step * 1.9 } } ).count() ); + +// ************** +// Command Tests +// ************** +// Make sure we can get all nearby points to point in range +assert.eq( overallMax, db.runCommand( { geoNear : "borders", near : offCenter } ).results[0].obj.loc.y ); + +// Make sure we can get all nearby points to point on boundary +assert.eq( overallMin, db.runCommand( { geoNear : "borders", near : onBoundsNeg } ).results[0].obj.loc.y ); + +// Make sure we can't get all nearby points to point over boundary +assert.commandFailed( db.runCommand( { geoNear : "borders", near : offBounds } )); + +// Make sure we can't get all nearby points to point on max boundary +assert.commandWorked( db.runCommand( { geoNear : "borders", near : onBounds } )); + +// Make sure we can get all nearby points within one step (4 points in top +// corner) +assert.eq( 4, db.runCommand( { geoNear : "borders", near : offCenter, maxDistance : step * 1.5 } ).results.length ); diff --git a/jstests/core/geo_box1.js b/jstests/core/geo_box1.js new file mode 100644 index 00000000000..5ef335158e1 --- /dev/null +++ b/jstests/core/geo_box1.js @@ -0,0 +1,43 @@ + +t = db.geo_box1; +t.drop(); + +num = 0; +for ( x=0; x<=20; x++ ){ + for ( y=0; y<=20; y++ ){ + o = { _id : num++ , loc : [ x , y ] } + t.save( o ) + } +} + +t.ensureIndex( { loc : "2d" } ); + +searches = [ + [ [ 1 , 2 ] , [ 4 , 5 ] ] , + [ [ 1 , 1 ] , [ 2 , 2 ] ] , + [ [ 0 , 2 ] , [ 4 , 5 ] ] , + [ [ 1 , 1 ] , [ 2 , 8 ] ] , +]; + + +for ( i=0; i> ' + + '(' + covering[1][0] + ',' + covering[1][1] + ')'); +// Compare covering against $box coordinates. +// min X +assert.lte(covering[0][0], 4); +// min Y +assert.lte(covering[0][1], 4); +// max X +assert.gte(covering[1][0], 6); +// max Y +assert.gte(covering[1][1], 6); diff --git a/jstests/core/geo_box3.js b/jstests/core/geo_box3.js new file mode 100644 index 00000000000..8941f637518 --- /dev/null +++ b/jstests/core/geo_box3.js @@ -0,0 +1,36 @@ +// How to construct a test to stress the flaw in SERVER-994: +// construct an index, think up a bounding box inside the index that +// doesn't include the center of the index, and put a point inside the +// bounding box. + +// This is the bug reported in SERVER-994. +t=db.geo_box3; +t.drop(); +t.insert({ point : { x : -15000000, y : 10000000 } }); +t.ensureIndex( { point : "2d" } , { min : -21000000 , max : 21000000 } ); +var c=t.find({point: {"$within": {"$box": [[-20000000, 7000000], [0, 15000000]]} } }); +assert.eq(1, c.count(), "A1"); + +// Same thing, modulo 1000000. +t=db.geo_box3; +t.drop(); +t.insert({ point : { x : -15, y : 10 } }); +t.ensureIndex( { point : "2d" } , { min : -21 , max : 21 } ); +var c=t.find({point: {"$within": {"$box": [[-20, 7], [0, 15]]} } }); +assert.eq(1, c.count(), "B1"); + +// Two more examples, one where the index is centered at the origin, +// one not. +t=db.geo_box3; +t.drop(); +t.insert({ point : { x : 1.0 , y : 1.0 } }); +t.ensureIndex( { point : "2d" } , { min : -2 , max : 2 } ); +var c=t.find({point: {"$within": {"$box": [[.1, .1], [1.99, 1.99]]} } }); +assert.eq(1, c.count(), "C1"); + +t=db.geo_box3; +t.drop(); +t.insert({ point : { x : 3.9 , y : 3.9 } }); +t.ensureIndex( { point : "2d" } , { min : 0 , max : 4 } ); +var c=t.find({point: {"$within": {"$box": [[2.05, 2.05], [3.99, 3.99]]} } }); +assert.eq(1, c.count(), "D1"); diff --git a/jstests/core/geo_center_sphere1.js b/jstests/core/geo_center_sphere1.js new file mode 100644 index 00000000000..4e4a658f058 --- /dev/null +++ b/jstests/core/geo_center_sphere1.js @@ -0,0 +1,98 @@ +t = db.geo_center_sphere1; + +function test(index) { + t.drop(); + skip = 8 // lower for more rigor, higher for more speed (tested with .5, .678, 1, 2, 3, and 4) + + searches = [ + // x , y rad + [ [ 5 , 0 ] , 0.05 ] , // ~200 miles + [ [ 135 , 0 ] , 0.05 ] , + + [ [ 5 , 70 ] , 0.05 ] , + [ [ 135 , 70 ] , 0.05 ] , + [ [ 5 , 85 ] , 0.05 ] , + + [ [ 20 , 0 ] , 0.25 ] , // ~1000 miles + [ [ 20 , -45 ] , 0.25 ] , + [ [ -20 , 60 ] , 0.25 ] , + [ [ -20 , -70 ] , 0.25 ] , + ]; + correct = searches.map( function(z){ return []; } ); + + num = 0; + + var bulk = t.initializeUnorderedBulkOp(); + for ( x=-179; x<=179; x += skip ){ + for ( y=-89; y<=89; y += skip ){ + o = { _id : num++ , loc : [ x , y ] } + bulk.insert( o ); + for ( i=0; i= distance ) { + minNewDistance = newDistance; + } + } + + //print( "Dist from : " + results[i].loc[j] + " to " + startPoint + " is " + // + minNewDistance + " vs " + radius ) + + assert.lte( minNewDistance, radius ); + assert.gte( minNewDistance, distance ); + distance = minNewDistance; + } + + // geoNear + results = db.runCommand({ geoNear: "sphere", near: startPoint, maxDistance: radius, + num : 2 * pointsIn, spherical : true } ).results; + + /* + printjson( results ); + + for ( var j = 0; j < results[0].obj.loc.length; j++ ) { + var newDistance = Geo.sphereDistance( startPoint, results[0].obj.loc[j] ) + if( newDistance <= radius ) print( results[0].obj.loc[j] + " : " + newDistance ) + } + */ + + assert.eq( docsIn, results.length ); + + var distance = 0; + for ( var i = 0; i < results.length; i++ ) { + var retDistance = results[i].dis + + // print( "Dist from : " + results[i].loc + " to " + startPoint + " is " + // + retDistance + " vs " + radius ) + + var distInObj = false; + for ( var j = 0; j < results[i].obj.loc.length && distInObj == false; j++ ) { + var newDistance = Geo.sphereDistance( startPoint, results[i].obj.loc[j] ); + distInObj = ( newDistance >= retDistance - 0.0001 && + newDistance <= retDistance + 0.0001 ); + } + + assert( distInObj ); + assert.lte( retDistance, radius ); + assert.gte( retDistance, distance ); + distance = retDistance; + } +} + diff --git a/jstests/core/geo_circle1.js b/jstests/core/geo_circle1.js new file mode 100644 index 00000000000..852b60d186b --- /dev/null +++ b/jstests/core/geo_circle1.js @@ -0,0 +1,43 @@ + +t = db.geo_circle1; +t.drop(); + +searches = [ + [ [ 5 , 5 ] , 3 ] , + [ [ 5 , 5 ] , 1 ] , + [ [ 5 , 5 ] , 5 ] , + [ [ 0 , 5 ] , 5 ] , +]; +correct = searches.map( function(z){ return []; } ); + +num = 0; + +for ( x=0; x<=20; x++ ){ + for ( y=0; y<=20; y++ ){ + o = { _id : num++ , loc : [ x , y ] } + t.save( o ) + for ( i=0; i queries[i].maxDistance ) + continue; + if ( queries[i].search.z != n % 5 ) + continue; + answers[i].results.push( { _id : n , loc : [ x , y ]} ) + answers[i].totalDistance += d; + } + + n++; + } +} + +t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } ); + +for ( i=0; i queries[i].maxDistance ) + continue; + if ( queries[i].search.z != n % 10 && + queries[i].search.z != ( n + 5 ) % 10 ) + continue; + answers[i].results.push( { _id : n , loc : [ x , y ] } ) + answers[i].totalDistance += d; + } + + n++; + } +} + +t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } ); + +for ( i=0; i1 from us but <1.5 +// These points are (-+1, -+1) +resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {$and: [{geo: {$within: {$center: [[0, 0], 1.5]}}}, + {geo: {$not: {$within: {$center: [[0,0], 1]}}}}]}}) +assert.eq(resNear.results.length, 4) diff --git a/jstests/core/geo_or.js b/jstests/core/geo_or.js new file mode 100644 index 00000000000..fd9b7234a21 --- /dev/null +++ b/jstests/core/geo_or.js @@ -0,0 +1,62 @@ +// multiple geo clauses with $or + +t = db.geoor; + +t.drop(); + +var p = [-71.34895, 42.46037]; +var q = [1.48736, 42.55327]; + +t.save({loc: p}); +t.save({loc: q}); + +var indexname = "2dsphere"; + +t.ensureIndex({loc: indexname}) + +assert.eq(1, t.find({loc: p}).itcount(), indexname); + +// $or supports at most one $near clause +assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}]}).itcount(), + 'geo query not supported by $or. index type: ' + indexname); +assert.throws(function() { + assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}, + {loc: {$nearSphere: q}}]}).itcount(), + 'geo query not supported by $or. index type: ' + indexname); +}, null, '$or with multiple $near clauses'); + +// the following tests should match the points in the collection + +assert.eq(2, t.find({$or: [ + {loc: {$geoWithin: {$centerSphere: [p, 10]}}}, + {loc: {$geoWithin: {$centerSphere: [p, 10]}}} + ]}).itcount(), + 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname); +assert.eq(2, t.find({$or: [ + {loc: {$geoIntersects: {$geometry: {type: 'LineString', coordinates: [p, q]}}}}, + {loc: {$geoIntersects: {$geometry: {type: 'LineString', + coordinates: [[0,0], [1,1]]}}}} + ]}).itcount(), + 'multiple $geoIntersects LineString clauses not supported by $or. index type: ' + indexname); +assert.eq(2, t.find({$or: [ + {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: p}}}}, + {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: q}}}} + ]}).itcount(), + 'multiple $geoIntersects Point clauses not supported by $or. index type: ' + indexname); +assert.eq(2, t.find({$or: [ + {loc: {$geoIntersects: {$geometry: {type: 'Polygon', + coordinates: [[[0, 0], p, q, [0, 0]]]}}}}, + {loc: {$geoIntersects: {$geometry: + {type: 'Polygon', coordinates: [[[0, 0], [1, 1], [0, 1], [0, 0]]]}}}} + ]}).itcount(), + 'multiple $geoIntersects Polygon clauses not supported by $or. index type: ' + indexname); + +t.dropIndexes(); + +var indexname = "2d"; + +t.ensureIndex({loc: indexname}) + +assert.eq(2, t.find({$or: [{loc: {$geoWithin: {$centerSphere: [p, 10]}}}, + {loc: {$geoWithin: {$centerSphere: [p, 10]}}}]}).itcount(), + 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname); diff --git a/jstests/core/geo_poly_edge.js b/jstests/core/geo_poly_edge.js new file mode 100644 index 00000000000..31a0849e67d --- /dev/null +++ b/jstests/core/geo_poly_edge.js @@ -0,0 +1,22 @@ +// +// Tests polygon edge cases +// + +var coll = db.getCollection( 'jstests_geo_poly_edge' ) +coll.drop(); + +coll.ensureIndex({ loc : "2d" }) + +coll.insert({ loc : [10, 10] }) +coll.insert({ loc : [10, -10] }) + +assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, -10 ]] } } }).itcount(), 2 ) + +assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, 10 ]] } } }).itcount(), 1 ) + + +coll.insert({ loc : [179, 0] }) +coll.insert({ loc : [0, 179] }) + +assert.eq( coll.find({ loc : { $within : { $polygon : [[0, 0], [1000, 0], [1000, 1000], [0, 1000]] } } }).itcount(), 3 ) + diff --git a/jstests/core/geo_poly_line.js b/jstests/core/geo_poly_line.js new file mode 100644 index 00000000000..aca77b6ab0a --- /dev/null +++ b/jstests/core/geo_poly_line.js @@ -0,0 +1,17 @@ +// Test that weird polygons work SERVER-3725 + +t = db.geo_polygon5; +t.drop(); + +t.insert({loc:[0,0]}) +t.insert({loc:[1,0]}) +t.insert({loc:[2,0]}) +t.insert({loc:[3,0]}) +t.insert({loc:[4,0]}) + +t.ensureIndex( { loc : "2d" } ); + +printjson( t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).toArray() ) + +assert.eq( 5, t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).itcount() ) + diff --git a/jstests/core/geo_polygon1.js b/jstests/core/geo_polygon1.js new file mode 100644 index 00000000000..11f17910306 --- /dev/null +++ b/jstests/core/geo_polygon1.js @@ -0,0 +1,73 @@ +// +// Tests for N-dimensional polygon querying +// + +t = db.geo_polygon1; +t.drop(); + +num = 0; +for ( x=1; x < 9; x++ ){ + for ( y= 1; y < 9; y++ ){ + o = { _id : num++ , loc : [ x , y ] }; + t.save( o ); + } +} + +t.ensureIndex( { loc : "2d" } ); + +triangle = [[0,0], [1,1], [0,2]]; + +// Look at only a small slice of the data within a triangle +assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" ); + +boxBounds = [ [0,0], [0,10], [10,10], [10,0] ]; + +assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" ); + +//Make sure we can add object-based polygons +assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() ) + +// Look in a box much bigger than the one we have data in +boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]]; +assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" ); + +t.drop(); + +pacman = [ + [0,2], [0,4], [2,6], [4,6], // Head + [6,4], [4,3], [6,2], // Mouth + [4,0], [2,0] // Bottom + ]; + +t.save({loc: [1,3] }); // Add a point that's in +assert.writeOK(t.ensureIndex( { loc : "2d" } )); + +assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" ); + +t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening +t.save({ loc : [3, 7] }) // Add a point above the center of the head +t.save({ loc : [3,-1] }) // Add a point below the center of the bottom + +assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" ); + +// Make sure we can't add bad polygons +okay = true +try{ + t.find( { loc : { $within : { $polygon : [1, 2] } } } ).toArray() + okay = false +} +catch(e){} +assert(okay) +try{ + t.find( { loc : { $within : { $polygon : [[1, 2]] } } } ).toArray() + okay = false +} +catch(e){} +assert(okay) +try{ + t.find( { loc : { $within : { $polygon : [[1, 2], [2, 3]] } } } ).toArray() + okay = false +} +catch(e){} +assert(okay) + diff --git a/jstests/core/geo_polygon1_noindex.js b/jstests/core/geo_polygon1_noindex.js new file mode 100644 index 00000000000..4fc7135c2f9 --- /dev/null +++ b/jstests/core/geo_polygon1_noindex.js @@ -0,0 +1,46 @@ +// SERVER-7343: allow $within without a geo index. + +t = db.geo_polygon1_noindex; +t.drop(); + +num = 0; +for ( x=1; x < 9; x++ ){ + for ( y= 1; y < 9; y++ ){ + o = { _id : num++ , loc : [ x , y ] }; + t.save( o ); + } +} + +triangle = [[0,0], [1,1], [0,2]]; + +// Look at only a small slice of the data within a triangle +assert.eq( 1 , t.find({ loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" ); + +boxBounds = [ [0,0], [0,10], [10,10], [10,0] ]; + +assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" ); + +//Make sure we can add object-based polygons +assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() ) + +// Look in a box much bigger than the one we have data in +boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]]; +assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" ); + +t.drop(); + +pacman = [ + [0,2], [0,4], [2,6], [4,6], // Head + [6,4], [4,3], [6,2], // Mouth + [4,0], [2,0] // Bottom + ]; + +assert.writeOK(t.save({loc: [1,3] })); // Add a point that's in + +assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" ); + +t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening +t.save({ loc : [3, 7] }) // Add a point above the center of the head +t.save({ loc : [3,-1] }) // Add a point below the center of the bottom + +assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" ); diff --git a/jstests/core/geo_polygon2.js b/jstests/core/geo_polygon2.js new file mode 100644 index 00000000000..c626064f153 --- /dev/null +++ b/jstests/core/geo_polygon2.js @@ -0,0 +1,263 @@ +// +// More tests for N-dimensional polygon querying +// + +// Create a polygon of some shape (no holes) +// using turtle graphics. Basically, will look like a very contorted octopus (quad-pus?) shape. +// There are no holes, but some edges will probably touch. + +var numTests = 4; + +for ( var test = 0; test < numTests; test++ ) { + + Random.srand( 1337 + test ); + + var numTurtles = 4; + var gridSize = [ 20, 20 ]; + var turtleSteps = 500; + var bounds = [ Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001 ]; + var rotation = Math.PI * Random.rand(); + var bits = Math.floor( Random.rand() * 32 ); + + printjson( { test : test, rotation : rotation, bits : bits }); + + var rotatePoint = function( x, y ) { + + if( y == undefined ){ + y = x[1]; + x = x[0]; + } + + xp = x * Math.cos( rotation ) - y * Math.sin( rotation ); + yp = y * Math.cos( rotation ) + x * Math.sin( rotation ); + + var scaleX = (bounds[1] - bounds[0]) / 360; + var scaleY = (bounds[1] - bounds[0]) / 360; + + x *= scaleX; + y *= scaleY; + + return [xp, yp]; + }; + + var grid = []; + for ( var i = 0; i < gridSize[0]; i++ ) { + grid.push( new Array( gridSize[1] ) ); + } + + grid.toString = function() { + + var gridStr = ""; + for ( var j = grid[0].length - 1; j >= -1; j-- ) { + for ( var i = 0; i < grid.length; i++ ) { + if ( i == 0 ) + gridStr += ( j == -1 ? " " : ( j % 10) ) + ": "; + if ( j != -1 ) + gridStr += "[" + ( grid[i][j] != undefined ? grid[i][j] : " " ) + "]"; + else + gridStr += " " + ( i % 10 ) + " "; + } + gridStr += "\n"; + } + + return gridStr; + }; + + var turtles = []; + for ( var i = 0; i < numTurtles; i++ ) { + + var up = ( i % 2 == 0 ) ? i - 1 : 0; + var left = ( i % 2 == 1 ) ? ( i - 1 ) - 1 : 0; + + turtles[i] = [ + [ Math.floor( gridSize[0] / 2 ), Math.floor( gridSize[1] / 2 ) ], + [ Math.floor( gridSize[0] / 2 ) + left, Math.floor( gridSize[1] / 2 ) + up ] ]; + + grid[turtles[i][1][0]][turtles[i][1][1]] = i; + + } + + grid[Math.floor( gridSize[0] / 2 )][Math.floor( gridSize[1] / 2 )] = "S"; + + // print( grid.toString() ) + + var pickDirections = function() { + + var up = Math.floor( Random.rand() * 3 ); + if ( up == 2 ) + up = -1; + + if ( up == 0 ) { + var left = Math.floor( Random.rand() * 3 ); + if ( left == 2 ) + left = -1; + } else + left = 0; + + if ( Random.rand() < 0.5 ) { + var swap = left; + left = up; + up = swap; + } + + return [ left, up ]; + }; + + for ( var s = 0; s < turtleSteps; s++ ) { + + for ( var t = 0; t < numTurtles; t++ ) { + + var dirs = pickDirections(); + var up = dirs[0]; + var left = dirs[1]; + + var lastTurtle = turtles[t][turtles[t].length - 1]; + var nextTurtle = [ lastTurtle[0] + left, lastTurtle[1] + up ]; + + if ( nextTurtle[0] >= gridSize[0] || + nextTurtle[1] >= gridSize[1] || + nextTurtle[0] < 0 || + nextTurtle[1] < 0 ) + continue; + + if ( grid[nextTurtle[0]][nextTurtle[1]] == undefined ) { + turtles[t].push( nextTurtle ); + grid[nextTurtle[0]][nextTurtle[1]] = t; + } + + } + } + + turtlePaths = []; + for ( var t = 0; t < numTurtles; t++ ) { + + turtlePath = []; + + var nextSeg = function(currTurtle, prevTurtle) { + + var pathX = currTurtle[0] + + if ( currTurtle[1] < prevTurtle[1] ) { + pathX = currTurtle[0] + 1; + pathY = prevTurtle[1] + } else if ( currTurtle[1] > prevTurtle[1] ) { + pathX = currTurtle[0]; + pathY = currTurtle[1]; + } else if ( currTurtle[0] < prevTurtle[0] ) { + pathX = prevTurtle[0]; + pathY = currTurtle[1]; + } else if ( currTurtle[0] > prevTurtle[0] ) { + pathX = currTurtle[0]; + pathY = currTurtle[1] + 1; + } + + // print( " Prev : " + prevTurtle + " Curr : " + currTurtle + " path + // : " + // + [pathX, pathY]); + + return [ pathX, pathY ] + }; + + for ( var s = 1; s < turtles[t].length; s++ ) { + + currTurtle = turtles[t][s]; + prevTurtle = turtles[t][s - 1]; + + turtlePath.push( nextSeg( currTurtle, prevTurtle ) ); + + } + + for ( var s = turtles[t].length - 2; s >= 0; s-- ) { + + currTurtle = turtles[t][s]; + prevTurtle = turtles[t][s + 1]; + + turtlePath.push( nextSeg( currTurtle, prevTurtle ) ); + + } + + // printjson( turtlePath ) + + // End of the line is not inside our polygon. + var lastTurtle = turtles[t][turtles[t].length - 1]; + grid[lastTurtle[0]][lastTurtle[1]] = undefined; + + fixedTurtlePath = []; + for ( var s = 1; s < turtlePath.length; s++ ) { + + if ( turtlePath[s - 1][0] == turtlePath[s][0] && + turtlePath[s - 1][1] == turtlePath[s][1] ) { + continue; + } + + var up = turtlePath[s][1] - turtlePath[s - 1][1]; + var right = turtlePath[s][0] - turtlePath[s - 1][0]; + var addPoint = ( up != 0 && right != 0 ); + + if ( addPoint && up != right ) { + fixedTurtlePath.push( [ turtlePath[s][0], turtlePath[s - 1][1] ] ); + } else if ( addPoint ) { + fixedTurtlePath.push( [ turtlePath[s - 1][0], turtlePath[s][1] ] ); + } + + fixedTurtlePath.push( turtlePath[s] ); + } + + // printjson( fixedTurtlePath ) + + turtlePaths.push( fixedTurtlePath ); + } + + // Uncomment to print polygon shape + // print( grid.toString() ) + + var polygon = []; + for ( var t = 0; t < turtlePaths.length; t++ ) { + for ( var s = 0; s < turtlePaths[t].length; s++ ) { + polygon.push( rotatePoint( turtlePaths[t][s] ) ); + } + } + + // Uncomment to print out polygon + // printjson( polygon ) + + t = db.polytest2; + t.drop(); + + // Test single and multi-location documents + var pointsIn = 0; + var pointsOut = 0; + var allPointsIn = []; + var allPointsOut = []; + + for ( var j = grid[0].length - 1; j >= 0; j-- ) { + for ( var i = 0; i < grid.length; i++ ) { + var point = rotatePoint( [ i + 0.5, j + 0.5 ] ); + + t.insert( { loc : point } ); + if ( grid[i][j] != undefined ){ + allPointsIn.push( point ); + pointsIn++; + } + else{ + allPointsOut.push( point ); + pointsOut++; + } + } + } + + var res = t.ensureIndex({ loc: "2d" }, { bits: 1 + bits, max: bounds[1], min: bounds[0] }); + assert.writeOK( res ); + + t.insert( { loc : allPointsIn } ); + t.insert( { loc : allPointsOut } ); + allPoints = allPointsIn.concat( allPointsOut ); + t.insert( { loc : allPoints } ); + + print( "Points : " ); + printjson( { pointsIn : pointsIn, pointsOut : pointsOut } ); + //print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() ) + + assert.eq( gridSize[0] * gridSize[1] + 3, t.find().count() ); + assert.eq( 2 + pointsIn, t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() ); +} diff --git a/jstests/core/geo_polygon3.js b/jstests/core/geo_polygon3.js new file mode 100644 index 00000000000..b144bfbc589 --- /dev/null +++ b/jstests/core/geo_polygon3.js @@ -0,0 +1,54 @@ +// +// Tests for polygon querying with varying levels of accuracy +// + +var numTests = 31; + +for( var n = 0; n < numTests; n++ ){ + + t = db.geo_polygon3; + t.drop(); + + num = 0; + for ( x=1; x < 9; x++ ){ + for ( y= 1; y < 9; y++ ){ + o = { _id : num++ , loc : [ x , y ] }; + t.save( o ); + } + } + + t.ensureIndex( { loc : "2d" }, { bits : 2 + n } ); + + triangle = [[0,0], [1,1], [0,2]]; + + // Look at only a small slice of the data within a triangle + assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).itcount() , "Triangle Test" ); + + + boxBounds = [ [0,0], [0,10], [10,10], [10,0] ]; + + assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Bounding Box Test" ); + + // Look in a box much bigger than the one we have data in + boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]]; + assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Big Bounding Box Test" ); + + t.drop(); + + pacman = [ + [0,2], [0,4], [2,6], [4,6], // Head + [6,4], [4,3], [6,2], // Mouth + [4,0], [2,0] // Bottom + ]; + + t.save({loc: [1,3] }); // Add a point that's in + t.ensureIndex( { loc : "2d" }, { bits : 2 + t } ); + + assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman single point" ); + + t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening + t.save({ loc : [3, 7] }) // Add a point above the center of the head + t.save({ loc : [3,-1] }) // Add a point below the center of the bottom + + assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman double point" ); +} diff --git a/jstests/core/geo_queryoptimizer.js b/jstests/core/geo_queryoptimizer.js new file mode 100644 index 00000000000..7a438bce8fb --- /dev/null +++ b/jstests/core/geo_queryoptimizer.js @@ -0,0 +1,27 @@ + +t = db.geo_qo1; +t.drop() + +t.ensureIndex({loc:"2d"}) + +t.insert({'issue':0}) +t.insert({'issue':1}) +t.insert({'issue':2}) +t.insert({'issue':2, 'loc':[30.12,-118]}) +t.insert({'issue':1, 'loc':[30.12,-118]}) +t.insert({'issue':0, 'loc':[30.12,-118]}) + +assert.eq( 6 , t.find().itcount() , "A1" ) + +assert.eq( 2 , t.find({'issue':0}).itcount() , "A2" ) + +assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "A3" ) + +assert.eq( 2 , t.find({'issue':0}).itcount() , "B1" ) + +assert.eq( 6 , t.find().itcount() , "B2" ) + +assert.eq( 2 , t.find({'issue':0}).itcount() , "B3" ) + +assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "B4" ) + diff --git a/jstests/core/geo_regex0.js b/jstests/core/geo_regex0.js new file mode 100644 index 00000000000..79042b9074e --- /dev/null +++ b/jstests/core/geo_regex0.js @@ -0,0 +1,18 @@ +// From SERVER-2247 +// Tests to make sure regex works with geo indices + +t = db.regex0 +t.drop() + +t.ensureIndex( { point : '2d', words : 1 } ) +t.insert( { point : [ 1, 1 ], words : [ 'foo', 'bar' ] } ) + +regex = { words : /^f/ } +geo = { point : { $near : [ 1, 1 ] } } +both = { point : { $near : [ 1, 1 ] }, words : /^f/ } + +assert.eq(1, t.find( regex ).count() ) +assert.eq(1, t.find( geo ).count() ) +assert.eq(1, t.find( both ).count() ) + + diff --git a/jstests/core/geo_s2cursorlimitskip.js b/jstests/core/geo_s2cursorlimitskip.js new file mode 100644 index 00000000000..2417d41f24c --- /dev/null +++ b/jstests/core/geo_s2cursorlimitskip.js @@ -0,0 +1,68 @@ +// Test various cursor behaviors +var t = db.geo_s2getmmm +t.drop(); +t.ensureIndex({geo: "2dsphere"}); + +Random.setRandomSeed(); +var random = Random.rand; + +/* + * To test that getmore is working within 2dsphere index. + * We insert a bunch of points, get a cursor, and fetch some + * of the points. Then we insert a bunch more points, and + * finally fetch a bunch more. + * If the final fetches work successfully, then getmore should + * be working + */ +function sign() { return random() > 0.5 ? 1 : -1; } +function insertRandomPoints(num, minDist, maxDist){ + for(var i = 0; i < num; i++){ + var lat = sign() * (minDist + random() * (maxDist - minDist)); + var lng = sign() * (minDist + random() * (maxDist - minDist)); + var point = { geo: { type: "Point", coordinates: [lng, lat] } }; + assert.writeOK(t.insert(point)); + } +} + +var initialPointCount = 200 +var smallBit = 10 +var secondPointCount = 100 + +// Insert points between 0.01 and 1.0 away. +insertRandomPoints(initialPointCount, 0.01, 1.0); + +var cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).batchSize(4); +assert.eq(cursor.count(), initialPointCount); + +for(var j = 0; j < smallBit; j++){ + assert(cursor.hasNext()); + cursor.next(); +} +// We looked at (initialPointCount - smallBit) points, should be more. +assert(cursor.hasNext()) + +// Insert points outside of the shell we've tested thus far +insertRandomPoints(secondPointCount, 2.01, 3.0); +assert.eq(cursor.count(), initialPointCount + secondPointCount) + +for(var k = 0; k < initialPointCount + secondPointCount - smallBit; k++){ + assert(cursor.hasNext()) + var tmpPoint = cursor.next(); +} +// Shouldn't be any more points to look at now. +assert(!cursor.hasNext()) + +var someLimit = 23; +// Make sure limit does something. +cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit) +// Count doesn't work here -- ignores limit/skip, so we use itcount. +assert.eq(cursor.itcount(), someLimit) +// Make sure skip works by skipping some stuff ourselves. +var someSkip = 3; +cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit + someSkip) +for (var i = 0; i < someSkip; ++i) { cursor.next(); } +var cursor2 = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).skip(someSkip).limit(someLimit) +while (cursor.hasNext()) { + assert(cursor2.hasNext()); + assert.eq(cursor.next(), cursor2.next()); +} diff --git a/jstests/core/geo_s2dedupnear.js b/jstests/core/geo_s2dedupnear.js new file mode 100644 index 00000000000..ac31e082891 --- /dev/null +++ b/jstests/core/geo_s2dedupnear.js @@ -0,0 +1,11 @@ +// Make sure that we don't return several of the same result due to faulty +// assumptions about the btree cursor. That is, don't return duplicate results. +t = db.geo_s2dedupnear +t.drop() + +t.ensureIndex( { geo : "2dsphere" } ) +var x = { "type" : "Polygon", + "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]} +t.insert({geo: x}) +res = t.find({geo: {$geoNear: {"type" : "Point", "coordinates" : [31, 41]}}}) +assert.eq(res.itcount(), 1) diff --git a/jstests/core/geo_s2descindex.js b/jstests/core/geo_s2descindex.js new file mode 100644 index 00000000000..39d153a6e55 --- /dev/null +++ b/jstests/core/geo_s2descindex.js @@ -0,0 +1,64 @@ +// +// Tests 2dsphere with descending fields, ensures correct lookup +// + +var coll = db.getCollection("twodspheredesc"); + +var descriptors = [["field1", -1], ["field2", -1], ["coordinates", "2dsphere"]] +var docA = {field1 : "a", field2 : 1, coordinates : [-118.2400013, 34.073893]} +var docB = {field1 : "b", field2 : 1, coordinates : [-118.2400012, 34.073894]} + +// Try both regular and near index cursors +var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893], + 0.44915760491198753]}}}; +var queryNear = {coordinates : {$geoNear : {"type" : "Point", "coordinates" : [0, 0]}}}; + +// +// The idea here is we try "2dsphere" indexes in combination with descending +// other fields in various +// positions and ensure that we return correct results. +// + +for ( var t = 0; t < descriptors.length; t++) { + + var descriptor = {}; + for ( var i = 0; i < descriptors.length; i++) { + descriptor[descriptors[i][0]] = descriptors[i][1]; + } + + jsTest.log("Trying 2dsphere index with descriptor " + tojson(descriptor)); + + coll.drop(); + coll.ensureIndex(descriptor); + + coll.insert(docA); + coll.insert(docB); + + assert.eq(1, coll.count(Object.merge(query, {field1 : "a"}))); + assert.eq(1, coll.count(Object.merge(query, {field1 : "b"}))); + assert.eq(2, coll.count(Object.merge(query, {field2 : 1}))); + assert.eq(0, coll.count(Object.merge(query, {field2 : 0}))); + + var firstEls = descriptors.splice(1); + descriptors = firstEls.concat(descriptors); +} + +// +// Data taken from previously-hanging result +// + +jsTest.log("Trying case found in wild..."); + +coll.drop(); +coll.ensureIndex({coordinates : "2dsphere", field : -1}); +coll.insert({coordinates : [-118.240013, 34.073893]}); +var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893], + 0.44915760491198753]}}, + field : 1}; + +assert.eq(null, coll.findOne(query)); +coll.remove({}) +coll.insert({coordinates : [-118.240013, 34.073893], field : 1}); +assert.neq(null, coll.findOne(query)); + +jsTest.log("Success!"); diff --git a/jstests/core/geo_s2disjoint_holes.js b/jstests/core/geo_s2disjoint_holes.js new file mode 100644 index 00000000000..26d94d9343a --- /dev/null +++ b/jstests/core/geo_s2disjoint_holes.js @@ -0,0 +1,81 @@ +// +// We should prohibit polygons with holes not bounded by their exterior shells. +// +// From spec: +// +// "For Polygons with multiple rings, the first must be the exterior ring and +// any others must be interior rings or holes." +// http://geojson.org/geojson-spec.html#polygon +// + +var t = db.geo_s2disjoint_holes, + coordinates = [ + // One square. + [[9, 9], [9, 11], [11, 11], [11, 9], [9, 9]], + // Another disjoint square. + [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]] + ], + poly = { + type: 'Polygon', + coordinates: coordinates + }, + multiPoly = { + type: 'MultiPolygon', + // Multi-polygon's coordinates are wrapped in one more array. + coordinates: [coordinates] + }; + +t.drop(); + +jsTest.log("We're going to print some error messages, don't be alarmed."); + +// +// Can't query with a polygon or multi-polygon that has a non-contained hole. +// +print(assert.throws( + function() { + t.findOne({geo: {$geoWithin: {$geometry: poly}}}); + }, + [], + "parsing a polygon with non-overlapping holes.")); + +print(assert.throws( + function() { + t.findOne({geo: {$geoWithin: {$geometry: multiPoly}}}); + }, + [], + "parsing a multi-polygon with non-overlapping holes.")); + +// +// Can't insert a bad polygon or a bad multi-polygon with a 2dsphere index. +// +t.createIndex({p: '2dsphere'}); +assert.writeError(t.insert({p: poly})); +assert.writeError(t.insert({p: multiPoly})); + +// +// Can't create a 2dsphere index when the collection contains a bad polygon or +// bad multi-polygon. +// +t.drop(); +t.insert({p: poly}); +res = t.createIndex({p: '2dsphere'}); +assert(!res.ok, tojson(res)); +assert.eq(1, t.getIndexes().length); + +t.drop(); +t.insert({p: multiPoly}); +res = t.createIndex({p: '2dsphere'}); +assert(!res.ok, tojson(res)); +assert.eq(1, t.getIndexes().length); + +// +// But with no index we can insert bad polygons and bad multi-polygons. +// +t.drop(); +assert.writeOK(t.insert({p: poly})); +assert.writeOK(t.insert({p: multiPoly})); + +t.drop(); + +jsTest.log("Success.") diff --git a/jstests/core/geo_s2dupe_points.js b/jstests/core/geo_s2dupe_points.js new file mode 100644 index 00000000000..8dd6e804c78 --- /dev/null +++ b/jstests/core/geo_s2dupe_points.js @@ -0,0 +1,71 @@ +// See: SERVER-9240, SERVER-9401. +// s2 rejects shapes with duplicate adjacent points as invalid, but they are +// valid in GeoJSON. We store the duplicates, but internally remove them +// before indexing or querying. +t = db.geo_s2dupe_points +t.drop() +t.ensureIndex({geo: "2dsphere"}) + +function testDuplicates(shapeName, shapeWithDupes, shapeWithoutDupes) { + // insert a doc with dupes + assert.writeOK(t.insert(shapeWithDupes)); + + // duplicates are preserved when the document is fetched by _id + assert.eq(shapeWithDupes, t.findOne({_id: shapeName})); + assert.neq(shapeWithoutDupes, t.findOne({_id: shapeName}).geo); + + // can query with $geoIntersects inserted doc using both the duplicated and de-duplicated docs + assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithDupes.geo } } } ).itcount(), 1); + assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithoutDupes } } } ).itcount(), 1); + + // direct document equality in queries is preserved + assert.eq(t.find({ geo: shapeWithoutDupes} ).itcount(), 0); + assert.eq(t.find({ geo: shapeWithDupes.geo } ).itcount(), 1); +} + +// LineString +var lineWithDupes = { _id: "line", geo: { type: "LineString", + coordinates: [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6] ] + } +}; +var lineWithoutDupes = { type: "LineString", coordinates: [ [40,5], [41,6] ] }; + +// Polygon +var polygonWithDupes = { _id: "poly", geo: { type: "Polygon", + coordinates: [ + [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ], + [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0], [-2.0, -2.0] ] + ] } +}; +var polygonWithoutDupes = { type: "Polygon", + coordinates: [ + [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ], + [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0] ] + ] +}; + +// MultiPolygon +var multiPolygonWithDupes = { _id: "multi", geo: { type: "MultiPolygon", coordinates: [ + [ + [ [102.0, 2.0], [103.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ] + ], + [ + [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], + [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.8, 0.8], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] + ] + ] +} }; +var multiPolygonWithoutDupes = { type: "MultiPolygon", coordinates: [ + [ + [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ] + ], + [ + [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], + [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] + ] + ] +}; + +testDuplicates("line", lineWithDupes, lineWithoutDupes); +testDuplicates("poly", polygonWithDupes, polygonWithoutDupes); +testDuplicates("multi", multiPolygonWithDupes, multiPolygonWithoutDupes); diff --git a/jstests/core/geo_s2edgecases.js b/jstests/core/geo_s2edgecases.js new file mode 100755 index 00000000000..bf46baba744 --- /dev/null +++ b/jstests/core/geo_s2edgecases.js @@ -0,0 +1,40 @@ +t = db.geo_s2edgecases +t.drop() + +roundworldpoint = { "type" : "Point", "coordinates": [ 180, 0 ] } + +// Opposite the equator +roundworld = { "type" : "Polygon", + "coordinates" : [ [ [179,1], [-179,1], [-179,-1], [179,-1], [179,1]]]} +t.insert({geo : roundworld}) + +roundworld2 = { "type" : "Polygon", + "coordinates" : [ [ [179,1], [179,-1], [-179,-1], [-179,1], [179,1]]]} +t.insert({geo : roundworld2}) + +// North pole +santapoint = { "type" : "Point", "coordinates": [ 180, 90 ] } +santa = { "type" : "Polygon", + "coordinates" : [ [ [179,89], [179,90], [-179,90], [-179,89], [179,89]]]} +t.insert({geo : santa}) +santa2 = { "type" : "Polygon", + "coordinates" : [ [ [179,89], [-179,89], [-179,90], [179,90], [179,89]]]} +t.insert({geo : santa2}) + +// South pole +penguinpoint = { "type" : "Point", "coordinates": [ 0, -90 ] } +penguin1 = { "type" : "Polygon", + "coordinates" : [ [ [0,-89], [0,-90], [179,-90], [179,-89], [0,-89]]]} +t.insert({geo : penguin1}) +penguin2 = { "type" : "Polygon", + "coordinates" : [ [ [0,-89], [179,-89], [179,-90], [0,-90], [0,-89]]]} +t.insert({geo : penguin2}) + +t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } ) + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : roundworldpoint} } }); +assert.eq(res.count(), 2); +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : santapoint} } }); +assert.eq(res.count(), 2); +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : penguinpoint} } }); +assert.eq(res.count(), 2); diff --git a/jstests/core/geo_s2exact.js b/jstests/core/geo_s2exact.js new file mode 100644 index 00000000000..a7cf9627765 --- /dev/null +++ b/jstests/core/geo_s2exact.js @@ -0,0 +1,21 @@ +// Queries on exact geometry should return the exact geometry. +t = db.geo_s2exact +t.drop() + +function test(geometry) { + t.insert({geo: geometry}) + assert.eq(1, t.find({geo: geometry}).itcount(), geometry) + t.ensureIndex({geo: "2dsphere"}) + assert.eq(1, t.find({geo: geometry}).itcount(), geometry) + t.dropIndex({geo: "2dsphere"}) +} + +pointA = { "type" : "Point", "coordinates": [ 40, 5 ] } +test(pointA) + +someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]} +test(someline) + +somepoly = { "type" : "Polygon", + "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} +test(somepoly) diff --git a/jstests/core/geo_s2holesameasshell.js b/jstests/core/geo_s2holesameasshell.js new file mode 100644 index 00000000000..89ba5ef571b --- /dev/null +++ b/jstests/core/geo_s2holesameasshell.js @@ -0,0 +1,44 @@ +// If polygons have holes, the holes cannot be equal to the entire geometry. +var t = db.geo_s2holessameasshell +t.drop(); +t.ensureIndex({geo: "2dsphere"}); + +var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]}; +var edgePoint = {"type": "Point", "coordinates": [0, 0.5]}; +var cornerPoint = {"type": "Point", "coordinates": [0, 0]}; + +// Various "edge" cases. None of them should be returned by the non-polygon +// polygon below. +t.insert({geo : centerPoint}); +t.insert({geo : edgePoint}); +t.insert({geo : cornerPoint}); + +// This generates an empty covering. +var polygonWithFullHole = { "type" : "Polygon", "coordinates": [ + [[0,0], [0,1], [1, 1], [1, 0], [0, 0]], + [[0,0], [0,1], [1, 1], [1, 0], [0, 0]] + ] +}; + +// No keys for insert should error. +assert.writeError(t.insert({geo: polygonWithFullHole})); + +// No covering to search over should give an empty result set. +assert.throws(function() { + return t.find({geo: {$geoWithin: {$geometry: polygonWithFullHole}}}).count()}) + +// Similar polygon to the one above, but is covered by two holes instead of +// one. +var polygonWithTwoHolesCoveringWholeArea = {"type" : "Polygon", "coordinates": [ + [[0,0], [0,1], [1, 1], [1, 0], [0, 0]], + [[0,0], [0,0.5], [1, 0.5], [1, 0], [0, 0]], + [[0,0.5], [0,1], [1, 1], [1, 0.5], [0, 0.5]] + ] +}; + +// No keys for insert should error. +assert.writeError(t.insert({geo: polygonWithTwoHolesCoveringWholeArea})); + +// No covering to search over should give an empty result set. +assert.throws(function() { + return t.find({geo: {$geoWithin: {$geometry: polygonWithTwoHolesCoveringWholeArea}}}).count()}) diff --git a/jstests/core/geo_s2index.js b/jstests/core/geo_s2index.js new file mode 100755 index 00000000000..974e4578dce --- /dev/null +++ b/jstests/core/geo_s2index.js @@ -0,0 +1,114 @@ +t = db.geo_s2index +t.drop() + +// We internally drop adjacent duplicate points in lines. +someline = { "type" : "LineString", "coordinates": [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6]]} +t.insert( {geo : someline , nonGeo: "someline"}) +t.ensureIndex({geo: "2dsphere"}) +foo = t.find({geo: {$geoIntersects: {$geometry: {type: "Point", coordinates: [40,5]}}}}).next(); +assert.eq(foo.geo, someline); +t.dropIndex({geo: "2dsphere"}) + +pointA = { "type" : "Point", "coordinates": [ 40, 5 ] } +t.insert( {geo : pointA , nonGeo: "pointA"}) + +pointD = { "type" : "Point", "coordinates": [ 41.001, 6.001 ] } +t.insert( {geo : pointD , nonGeo: "pointD"}) + +pointB = { "type" : "Point", "coordinates": [ 41, 6 ] } +t.insert( {geo : pointB , nonGeo: "pointB"}) + +pointC = { "type" : "Point", "coordinates": [ 41, 6 ] } +t.insert( {geo : pointC} ) + +// Add a point within the polygon but not on the border. Don't want to be on +// the path of the polyline. +pointE = { "type" : "Point", "coordinates": [ 40.6, 5.4 ] } +t.insert( {geo : pointE} ) + +// Make sure we can index this without error. +t.insert({nonGeo: "noGeoField!"}) + +somepoly = { "type" : "Polygon", + "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} +t.insert( {geo : somepoly, nonGeo: "somepoly" }) + +var res = t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } ); +// We have a point without any geo data. Don't error. +assert.writeOK(res); + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointA} } }); +assert.eq(res.itcount(), 3); + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointB} } }); +assert.eq(res.itcount(), 4); + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointD} } }); +assert.eq(res.itcount(), 1); + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : someline} } }) +assert.eq(res.itcount(), 5); + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 6); + +res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 6); + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }).limit(1) +assert.eq(res.itcount(), 1); + +res = t.find({ "nonGeo": "pointA", + "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 1); + +// Don't crash mongod if we give it bad input. +t.drop() +t.ensureIndex({loc: "2dsphere", x:1}) +t.save({loc: [0,0]}) +assert.throws(function() { return t.count({loc: {$foo:[0,0]}}) }) +assert.throws(function() { return t.find({ "nonGeo": "pointA", + "geo" : { "$geoIntersects" : { "$geometry" : somepoly}, + "$near": {"$geometry" : somepoly }}}).count()}) + +// If we specify a datum, it has to be valid (WGS84). +t.drop() +t.ensureIndex({loc: "2dsphere"}) +res = t.insert({ loc: { type: 'Point', + coordinates: [40, 5], + crs: { type: 'name', properties: { name: 'EPSG:2000' }}}}); +assert.writeError(res); +assert.eq(0, t.find().itcount()) +res = t.insert({ loc: { type: 'Point', coordinates: [40, 5] }}); +assert.writeOK(res); +res = t.insert({ loc: { type: 'Point', + coordinates: [40, 5], + crs: { type: 'name', properties: {name :'EPSG:4326' }}}}); +assert.writeOK(res); +res = t.insert({ loc: { type:'Point', + coordinates: [40, 5], + crs: { type: 'name', + properties: { name: 'urn:ogc:def:crs:OGC:1.3:CRS84'}}}}); +assert.writeOK(res); + +// We can pass level parameters and we verify that they're valid. +// 0 <= coarsestIndexedLevel <= finestIndexedLevel <= 30. +t.drop(); +t.save({loc: [0,0]}) +res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 17, coarsestIndexedLevel: 5 }); +assert.writeOK(res); + +t.drop(); +t.save({loc: [0,0]}) +res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 31, coarsestIndexedLevel: 5 }); +assert.writeError(res); + +t.drop(); +t.save({loc: [0,0]}) +res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 30, coarsestIndexedLevel: 0 }); +assert.writeOK(res); + +t.drop(); +t.save({loc: [0,0]}) +res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 30, coarsestIndexedLevel: -1 }); +assert.writeError(res); diff --git a/jstests/core/geo_s2indexoldformat.js b/jstests/core/geo_s2indexoldformat.js new file mode 100755 index 00000000000..e2cc1f353ee --- /dev/null +++ b/jstests/core/geo_s2indexoldformat.js @@ -0,0 +1,28 @@ +// Make sure that the 2dsphere index can deal with non-GeoJSON points. +// 2dsphere does not accept legacy shapes, only legacy points. +t = db.geo_s2indexoldformat +t.drop() + +t.insert( {geo : [40, 5], nonGeo: ["pointA"]}) +t.insert( {geo : [41.001, 6.001], nonGeo: ["pointD"]}) +t.insert( {geo : [41, 6], nonGeo: ["pointB"]}) +t.insert( {geo : [41, 6]} ) +t.insert( {geo : {x:40.6, y:5.4}} ) + +t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } ) + +res = t.find({ "geo" : { "$geoIntersects" : { "$geometry": {x:40, y:5}}}}) +assert.eq(res.count(), 1); + +res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [41,6]}}}) +assert.eq(res.count(), 2); + +// We don't support legacy polygons in 2dsphere. +assert.writeError(t.insert( {geo : [[40,5],[40,6],[41,6],[41,5]], nonGeo: ["somepoly"] })); +assert.writeError(t.insert( {geo : {a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}})); + +// Test "Can't canonicalize query: BadValue bad geo query" error. +assert.throws(function() { + t.findOne({ "geo" : { "$geoIntersects" : {"$geometry": [[40,5],[40,6],[41,6],[41,5]]}}}); +}); + diff --git a/jstests/core/geo_s2indexversion1.js b/jstests/core/geo_s2indexversion1.js new file mode 100644 index 00000000000..8524faeddbd --- /dev/null +++ b/jstests/core/geo_s2indexversion1.js @@ -0,0 +1,150 @@ +// Tests 2dsphere index option "2dsphereIndexVersion". Verifies that GeoJSON objects that are new +// in version 2 are not allowed in version 1. + +var coll = db.getCollection("geo_s2indexversion1"); +coll.drop(); + +// +// Index build should fail for invalid values of "2dsphereIndexVersion". +// + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": -1}); +assert.gleError(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 0}); +assert.gleError(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 3}); +assert.gleError(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": Infinity}); +assert.gleError(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": "foo"}); +assert.gleError(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": {a: 1}}); +assert.gleError(db); +coll.drop(); + +// +// Index build should succeed for valid values of "2dsphereIndexVersion". +// + +coll.ensureIndex({geo: "2dsphere"}); +assert.gleSuccess(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1}); +assert.gleSuccess(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(1)}); +assert.gleSuccess(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(1)}); +assert.gleSuccess(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2}); +assert.gleSuccess(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(2)}); +assert.gleSuccess(db); +coll.drop(); + +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(2)}); +assert.gleSuccess(db); +coll.drop(); + +// +// {2dsphereIndexVersion: 2} should be the default for new indexes. +// + +coll.ensureIndex({geo: "2dsphere"}); +assert.gleSuccess(db); +var specObj = coll.getDB().system.indexes.findOne({ns: coll.getFullName(), name: "geo_2dsphere"}); +assert.eq(2, specObj["2dsphereIndexVersion"]); +coll.drop(); + +// +// Test compatibility of various GeoJSON objects with both 2dsphere index versions. +// + +var pointDoc = {geo: {type: "Point", coordinates: [40, 5]}}; +var lineStringDoc = {geo: {type: "LineString", coordinates: [[40, 5], [41, 6]]}}; +var polygonDoc = {geo: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 1], [0, 0]]]}}; +var multiPointDoc = {geo: {type: "MultiPoint", + coordinates: [[-73.9580, 40.8003], [-73.9498, 40.7968], + [-73.9737, 40.7648], [-73.9814, 40.7681]]}}; +var multiLineStringDoc = {geo: {type: "MultiLineString", + coordinates: [[[-73.96943, 40.78519], [-73.96082, 40.78095]], + [[-73.96415, 40.79229], [-73.95544, 40.78854]], + [[-73.97162, 40.78205], [-73.96374, 40.77715]], + [[-73.97880, 40.77247], [-73.97036, 40.76811]]]}}; +var multiPolygonDoc = {geo: {type: "MultiPolygon", + coordinates: [[[[-73.958, 40.8003], [-73.9498, 40.7968], + [-73.9737, 40.7648], [-73.9814, 40.7681], + [-73.958, 40.8003]]], + [[[-73.958, 40.8003], [-73.9498, 40.7968], + [-73.9737, 40.7648], [-73.958, 40.8003]]]]}}; +var geometryCollectionDoc = {geo: {type: "GeometryCollection", + geometries: [{type: "MultiPoint", + coordinates: [[-73.9580, 40.8003], + [-73.9498, 40.7968], + [-73.9737, 40.7648], + [-73.9814, 40.7681]]}, + {type: "MultiLineString", + coordinates: [[[-73.96943, 40.78519], + [-73.96082, 40.78095]], + [[-73.96415, 40.79229], + [-73.95544, 40.78854]], + [[-73.97162, 40.78205], + [-73.96374, 40.77715]], + [[-73.97880, 40.77247], + [-73.97036, 40.76811]]]}]}}; + +// {2dsphereIndexVersion: 2} indexes allow all supported GeoJSON objects. +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2}); +assert.gleSuccess(db); +coll.insert(pointDoc); +assert.gleSuccess(db); +coll.insert(lineStringDoc); +assert.gleSuccess(db); +coll.insert(polygonDoc); +assert.gleSuccess(db); +coll.insert(multiPointDoc); +assert.gleSuccess(db); +coll.insert(multiLineStringDoc); +assert.gleSuccess(db); +coll.insert(multiPolygonDoc); +assert.gleSuccess(db); +coll.insert(geometryCollectionDoc); +assert.gleSuccess(db); +coll.drop(); + +// {2dsphereIndexVersion: 1} indexes allow only Point, LineString, and Polygon. +coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1}); +assert.gleSuccess(db); +coll.insert(pointDoc); +assert.gleSuccess(db); +coll.insert(lineStringDoc); +assert.gleSuccess(db); +coll.insert(polygonDoc); +assert.gleSuccess(db); +coll.insert(multiPointDoc); +assert.gleError(db); +coll.insert(multiLineStringDoc); +assert.gleError(db); +coll.insert(multiPolygonDoc); +assert.gleError(db); +coll.insert(geometryCollectionDoc); +assert.gleError(db); +coll.drop(); diff --git a/jstests/core/geo_s2intersection.js b/jstests/core/geo_s2intersection.js new file mode 100644 index 00000000000..42abacca98d --- /dev/null +++ b/jstests/core/geo_s2intersection.js @@ -0,0 +1,141 @@ +var t = db.geo_s2intersectinglines +t.drop() +t.ensureIndex( { geo : "2dsphere" } ); + +/* All the tests in this file are generally confirming intersections based upon + * these three geo objects. + */ +var canonLine = { + name: 'canonLine', + geo: { + type: "LineString", + coordinates: [[0.0, 0.0], [1.0, 0.0]] + } +}; + +var canonPoint = { + name: 'canonPoint', + geo: { + type: "Point", + coordinates: [10.0, 10.0] + } +}; + +var canonPoly = { + name: 'canonPoly', + geo: { + type: "Polygon", + coordinates: [ + [[50.0, 50.0], [51.0, 50.0], [51.0, 51.0], [50.0, 51.0], [50.0, 50.0]] + ] + } +}; + +t.insert(canonLine); +t.insert(canonPoint); +t.insert(canonPoly); + + +//Case 1: Basic sanity intersection. +var testLine = {type: "LineString", + coordinates: [[0.5, 0.5], [0.5, -0.5]]}; + +var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonLine'); + + +//Case 2: Basic Polygon intersection. +// we expect that the canonLine should intersect with this polygon. +var testPoly = {type: "Polygon", + coordinates: [ + [[0.4, -0.1],[0.4, 0.1], [0.6, 0.1], [0.6, -0.1], [0.4, -0.1]] + ]} + +result = t.find({geo: {$geoIntersects: {$geometry: testPoly}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonLine'); + + +//Case 3: Intersects the vertex of a line. +// When a line intersects the vertex of a line, we expect this to +// count as a geoIntersection. +testLine = {type: "LineString", + coordinates: [[0.0, 0.5], [0.0, -0.5]]}; + +result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonLine'); + +// Case 4: Sanity no intersection. +// This line just misses the canonLine in the negative direction. This +// should not count as a geoIntersection. +testLine = {type: "LineString", + coordinates: [[-0.1, 0.5], [-0.1, -0.5]]}; + +result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 0); + + +// Case 5: Overlapping line - only partially overlaps. +// Undefined behaviour: does intersect +testLine = {type: "LineString", + coordinates: [[-0.5, 0.0], [0.5, 0.0]]}; + +var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonLine'); + + +// Case 6: Contained line - this line is fully contained by the canonLine +// Undefined behaviour: doesn't intersect. +testLine = {type: "LineString", + coordinates: [[0.1, 0.0], [0.9, 0.0]]}; + +result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 0); + +// Case 7: Identical line in the identical position. +// Undefined behaviour: does intersect. +testLine = {type: "LineString", + coordinates: [[0.0, 0.0], [1.0, 0.0]]}; + +result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonLine'); + +// Case 8: Point intersection - we search with a line that intersects +// with the canonPoint. +testLine = {type: "LineString", + coordinates: [[10.0, 11.0], [10.0, 9.0]]}; + +result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonPoint'); + +// Case 9: Point point intersection +// as above but with an identical point to the canonPoint. We expect an +// intersection here. +testPoint = {type: "Point", + coordinates: [10.0, 10.0]} + +result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonPoint'); + + +//Case 10: Sanity point non-intersection. +var testPoint = {type: "Point", + coordinates: [12.0, 12.0]} + +result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}}); +assert.eq(result.count(), 0); + +// Case 11: Point polygon intersection +// verify that a point inside a polygon $geoIntersects. +testPoint = {type: "Point", + coordinates: [50.5, 50.5]} + +result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}}); +assert.eq(result.count(), 1); +assert.eq(result[0]['name'], 'canonPoly'); diff --git a/jstests/core/geo_s2largewithin.js b/jstests/core/geo_s2largewithin.js new file mode 100644 index 00000000000..2327f1fb02d --- /dev/null +++ b/jstests/core/geo_s2largewithin.js @@ -0,0 +1,45 @@ +// If our $within is enormous, create a coarse covering for the search so it +// doesn't take forever. +t = db.geo_s2largewithin +t.drop() +t.ensureIndex( { geo : "2dsphere" } ); + +testPoint = { + name: "origin", + geo: { + type: "Point", + coordinates: [0.0, 0.0] + } +}; + +testHorizLine = { + name: "horiz", + geo: { + type: "LineString", + coordinates: [[-2.0, 10.0], [2.0, 10.0]] + } +}; + +testVertLine = { + name: "vert", + geo: { + type: "LineString", + coordinates: [[10.0, -2.0], [10.0, 2.0]] + } +}; + +t.insert(testPoint); +t.insert(testHorizLine); +t.insert(testVertLine); + +//Test a poly that runs horizontally along the equator. + +longPoly = {type: "Polygon", + coordinates: [ + [[30.0, 1.0], [-30.0, 1.0], [-30.0, -1.0], [30.0, -1.0], [30.0, 1.0]] + ]}; + +result = t.find({geo: {$geoWithin: {$geometry: longPoly}}}); +assert.eq(result.itcount(), 1); +result = t.find({geo: {$geoWithin: {$geometry: longPoly}}}); +assert.eq("origin", result[0].name) diff --git a/jstests/core/geo_s2meridian.js b/jstests/core/geo_s2meridian.js new file mode 100644 index 00000000000..6bc7dc735f2 --- /dev/null +++ b/jstests/core/geo_s2meridian.js @@ -0,0 +1,108 @@ +t = db.geo_s2meridian; +t.drop(); +t.ensureIndex({geo: "2dsphere"}); + +/* + * Test 1: check that intersection works on the meridian. We insert a line + * that crosses the meridian, and then run a geoIntersect with a line + * that runs along the meridian. + */ + +meridianCrossingLine = { + geo: { + type: "LineString", + coordinates: [ + [-178.0, 10.0], + [178.0, 10.0]] + } +}; + +assert.writeOK(t.insert(meridianCrossingLine)); + +lineAlongMeridian = { + type: "LineString", + coordinates: [ + [180.0, 11.0], + [180.0, 9.0] + ] +} + +result = t.find({geo: {$geoIntersects: {$geometry: lineAlongMeridian}}}); +assert.eq(result.itcount(), 1); + +t.drop(); +t.ensureIndex({geo: "2dsphere"}); +/* + * Test 2: check that within work across the meridian. We insert points + * on the meridian, and immediately on either side, and confirm that a poly + * covering all of them returns them all. + */ +pointOnNegativeSideOfMeridian = { + geo: { + type: "Point", + coordinates: [-179.0, 1.0] + } +}; +pointOnMeridian = { + geo: { + type: "Point", + coordinates: [180.0, 1.0] + } +}; +pointOnPositiveSideOfMeridian = { + geo: { + type: "Point", + coordinates: [179.0, 1.0] + } +}; + +t.insert(pointOnMeridian); +t.insert(pointOnNegativeSideOfMeridian); +t.insert(pointOnPositiveSideOfMeridian); + +meridianCrossingPoly = { + type: "Polygon", + coordinates: [ + [[-178.0, 10.0], [178.0, 10.0], [178.0, -10.0], [-178.0, -10.0], [-178.0, 10.0]] + ] +}; + +result = t.find({geo: {$geoWithin: {$geometry: meridianCrossingPoly}}}); +assert.eq(result.itcount(), 3); + +t.drop(); +t.ensureIndex({geo: "2dsphere"}); +/* + * Test 3: Check that near works around the meridian. Insert two points, one + * closer, but across the meridian, and confirm they both come back, and + * that the order is correct. + */ +pointOnNegativeSideOfMerid = { + name: "closer", + geo: { + type: "Point", + coordinates: [-179.0, 0.0] + } +}; + +pointOnPositiveSideOfMerid = { + name: "farther", + geo: { + type: "Point", + coordinates: [176.0, 0.0] + } +}; + +t.insert(pointOnNegativeSideOfMerid); +t.insert(pointOnPositiveSideOfMerid); + +pointOnPositiveSideOfMeridian = { + type: "Point", + coordinates: [179.0, 0.0] +}; + +result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}}); +assert.eq(result.itcount(), 2); +result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}}); +assert.eq(result[0].name, "closer"); +assert.eq(result[1].name, "farther"); diff --git a/jstests/core/geo_s2multi.js b/jstests/core/geo_s2multi.js new file mode 100644 index 00000000000..56e3ef77068 --- /dev/null +++ b/jstests/core/geo_s2multi.js @@ -0,0 +1,46 @@ +t = db.geo_s2index +t.drop() + +t.ensureIndex({geo: "2dsphere"}) + +// Let's try the examples in the GeoJSON spec. +multiPointA = { "type": "MultiPoint", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] } +assert.writeOK(t.insert({geo: multiPointA})); + +multiLineStringA = { "type": "MultiLineString", "coordinates": [ [ [100.0, 0.0], [101.0, 1.0] ], + [ [102.0, 2.0], [103.0, 3.0] ]]} +assert.writeOK(t.insert({geo: multiLineStringA})); + +multiPolygonA = { "type": "MultiPolygon", "coordinates": [ + [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]], + [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], + [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]]} +assert.writeOK(t.insert({geo: multiPolygonA})) + +assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: + {"type": "Point", "coordinates": [100,0]}}}}).itcount()); +assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: + {"type": "Point", "coordinates": [101.0,1.0]}}}}).itcount()); + +// Inside the hole in multiPolygonA +assert.eq(0, t.find({geo: {$geoIntersects: {$geometry: + {"type": "Point", "coordinates": [100.21,0.21]}}}}).itcount()); + +// One point inside the hole, one out. +assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: + {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21]]}}}}).itcount()); +assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: + {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21],[101,1]]}}}}).itcount()); +// Polygon contains itself and the multipoint. +assert.eq(2, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount()); + +partialPolygonA = { "type": "Polygon", "coordinates": + [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ] }; +assert.writeOK(t.insert({geo: partialPolygonA})); +// Polygon contains itself, the partial poly, and the multipoint +assert.eq(3, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount()); + +assert.eq(1, t.find({geo: {$geoWithin: {$geometry: partialPolygonA}}}).itcount()); + +// Itself, the multi poly, the multipoint... +assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: partialPolygonA}}}).itcount()); diff --git a/jstests/core/geo_s2near.js b/jstests/core/geo_s2near.js new file mode 100644 index 00000000000..136e821b4b8 --- /dev/null +++ b/jstests/core/geo_s2near.js @@ -0,0 +1,84 @@ +// Test 2dsphere near search, called via find and geoNear. +t = db.geo_s2near +t.drop(); + +// Make sure that geoNear gives us back loc +goldenPoint = {type: "Point", coordinates: [ 31.0, 41.0]} +t.insert({geo: goldenPoint}) +t.ensureIndex({ geo : "2dsphere" }) +resNear = db.runCommand({geoNear : t.getName(), near: [30, 40], num: 1, spherical: true, includeLocs: true}) +assert.eq(resNear.results[0].loc, goldenPoint) + +// FYI: +// One degree of long @ 0 is 111km or so. +// One degree of lat @ 0 is 110km or so. +lat = 0 +lng = 0 +points = 10 +for (var x = -points; x < points; x += 1) { + for (var y = -points; y < points; y += 1) { + t.insert({geo : { "type" : "Point", "coordinates" : [lng + x/1000.0, lat + y/1000.0]}}) + } +} + +origin = { "type" : "Point", "coordinates": [ lng, lat ] } + +t.ensureIndex({ geo : "2dsphere" }) + +// Near only works when the query is a point. +someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]} +somepoly = { "type" : "Polygon", + "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} +assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : someline } } }).count()}) +assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : somepoly } } }).count()}) +assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: someline, spherical:true }).results.length}) +assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: somepoly, spherical:true }).results.length}) + +// Do some basic near searches. +res = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 2000} } }).limit(10) +resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, maxDistance: Math.PI, spherical: true}) +assert.eq(res.itcount(), resNear.results.length, 10) + +res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10) +resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, spherical: true}) +assert.eq(res.itcount(), resNear.results.length, 10) + +// Find all the points! +res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000) +resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true}) +assert.eq(resNear.results.length, res.itcount(), (2 * points) * (2 * points)) + +// longitude goes -180 to 180 +// latitude goes -90 to 90 +// Let's put in some perverse (polar) data and make sure we get it back. +// Points go long, lat. +t.insert({geo: { "type" : "Point", "coordinates" : [-180, -90]}}) +t.insert({geo: { "type" : "Point", "coordinates" : [180, -90]}}) +t.insert({geo: { "type" : "Point", "coordinates" : [180, 90]}}) +t.insert({geo: { "type" : "Point", "coordinates" : [-180, 90]}}) +res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000) +resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true}) +assert.eq(res.itcount(), resNear.results.length, (2 * points) * (2 * points) + 4) + +function testRadAndDegreesOK(distance) { + // Distance for old style points is radians. + resRadians = t.find({geo: {$nearSphere: [0,0], $maxDistance: (distance / (6378.1 * 1000))}}) + // Distance for new style points is meters. + resMeters = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: distance} } }) + // And we should get the same # of results no matter what. + assert.eq(resRadians.itcount(), resMeters.itcount()) + + // Also, geoNear should behave the same way. + resGNMeters = db.runCommand({geoNear : t.getName(), near: origin, maxDistance: distance, spherical: true}) + resGNRadians = db.runCommand({geoNear : t.getName(), near: [0,0], maxDistance: (distance / (6378.1 * 1000)), spherical: true}) + assert.eq(resGNRadians.results.length, resGNMeters.results.length) + for (var i = 0; i < resGNRadians.length; ++i) { + // Radius of earth * radians = distance in meters. + assert.close(resGNRadians.results[i].dis * 6378.1 * 1000, resGNMeters.results[i].dis) + } +} + +testRadAndDegreesOK(1); +testRadAndDegreesOK(10) +testRadAndDegreesOK(50) +testRadAndDegreesOK(10000) diff --git a/jstests/core/geo_s2nearComplex.js b/jstests/core/geo_s2nearComplex.js new file mode 100644 index 00000000000..835dfe88481 --- /dev/null +++ b/jstests/core/geo_s2nearComplex.js @@ -0,0 +1,268 @@ +var t = db.get_s2nearcomplex +t.drop() +t.ensureIndex({geo: "2dsphere"}) + +/* Short names for math operations */ +Random.setRandomSeed(); +var random = Random.rand; +var PI = Math.PI; +var asin = Math.asin; +var sin = Math.sin; +var cos = Math.cos; +var atan2 = Math.atan2 + + +var originGeo = {type: "Point", coordinates: [20.0, 20.0]}; +// Center point for all tests. +var origin = { + name: "origin", + geo: originGeo +} + + +/* + * Convenience function for checking that coordinates match. threshold let's you + * specify how accurate equals should be. + */ +function coordinateEqual(first, second, threshold){ + threshold = threshold || 0.001 + first = first['geo']['coordinates'] + second = second['geo']['coordinates'] + if(Math.abs(first[0] - second[0]) <= threshold){ + if(Math.abs(first[1] - second[1]) <= threshold){ + return true; + } + } + return false; +} + +/* + * Creates `count` random and uniformly distributed points centered around `origin` + * no points will be closer to origin than minDist, and no points will be further + * than maxDist. Points will be inserted into the global `t` collection, and will + * be returned. + * based on this algorithm: http://williams.best.vwh.net/avform.htm#LL + */ +function uniformPoints(origin, count, minDist, maxDist){ + var i; + var lng = origin['geo']['coordinates'][0]; + var lat = origin['geo']['coordinates'][1]; + var distances = []; + var points = []; + for(i=0; i < count; i++){ + distances.push((random() * (maxDist - minDist)) + minDist); + } + distances.sort(); + while(points.length < count){ + var angle = random() * 2 * PI; + var distance = distances[points.length]; + var pointLat = asin((sin(lat) * cos(distance)) + (cos(lat) * sin(distance) * cos(angle))); + var pointDLng = atan2(sin(angle) * sin(distance) * cos(lat), cos(distance) - sin(lat) * sin(pointLat)); + var pointLng = ((lng - pointDLng + PI) % 2*PI) - PI; + + // Latitude must be [-90, 90] + var newLat = lat + pointLat; + if (newLat > 90) newLat -= 180; + if (newLat < -90) newLat += 180; + + // Longitude must be [-180, 180] + var newLng = lng + pointLng; + if (newLng > 180) newLng -= 360; + if (newLng < -180) newLng += 360; + + var newPoint = { + geo: { + type: "Point", + //coordinates: [lng + pointLng, lat + pointLat] + coordinates: [newLng, newLat] + } + }; + + points.push(newPoint); + } + for(i=0; i < points.length; i++){ + t.insert(points[i]); + } + return points; +} + +/* + * Creates a random uniform field as above, excepting for `numberOfHoles` gaps that + * have `sizeOfHoles` points missing centered around a random point. + */ +function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, sizeOfHoles){ + var points = uniformPoints(origin, count, minDist, maxDist); + var i; + for(i=0; i smallQuery[1]); + +// Let's just index one field. +var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere"}, + {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}}); +print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]); +// assert(smallQuery[0] > smallQuery[1]); + +// And the other one. +var smallQuery = timeWithoutAndWithAnIndex({from: "2dsphere"}, + {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}}); +print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]); +// assert(smallQuery[0] > smallQuery[1]); diff --git a/jstests/core/geo_s2validindex.js b/jstests/core/geo_s2validindex.js new file mode 100644 index 00000000000..bc8a569e559 --- /dev/null +++ b/jstests/core/geo_s2validindex.js @@ -0,0 +1,26 @@ +// +// Tests valid cases for creation of 2dsphere index +// + +var coll = db.getCollection("twodspherevalid"); + +// Valid index +coll.drop(); +assert.writeOK(coll.ensureIndex({geo : "2dsphere", other : 1})); + +// Valid index +coll.drop(); +assert.writeOK(coll.ensureIndex({geo : "2dsphere", other : 1, geo2 : "2dsphere"})); + +// Invalid index, using hash with 2dsphere +coll.drop(); +assert.writeError(coll.ensureIndex({geo : "2dsphere", other : "hash"})); + +// Invalid index, using 2d with 2dsphere +coll.drop(); +assert.writeError(coll.ensureIndex({geo : "2dsphere", other : "2d"})); + +jsTest.log("Success!"); + +// Ensure the empty collection is gone, so that small_oplog passes. +coll.drop(); diff --git a/jstests/core/geo_s2within.js b/jstests/core/geo_s2within.js new file mode 100644 index 00000000000..87fd32a7676 --- /dev/null +++ b/jstests/core/geo_s2within.js @@ -0,0 +1,36 @@ +// Test some cases that might be iffy with $within, mostly related to polygon w/holes. +t = db.geo_s2within +t.drop() +t.ensureIndex({geo: "2dsphere"}) + +somepoly = { "type" : "Polygon", + "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} + +t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [40.2, 5.2]]}}) +// This is only partially contained within the polygon. +t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [42, 7]]}}) + +res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 1); + +t.drop() +t.ensureIndex({geo: "2dsphere"}) +somepoly = { "type" : "Polygon", + "coordinates" : [ [ [40,5], [40,8], [43,8], [43,5], [40,5]], + [ [41,6], [42,6], [42,7], [41,7], [41,6]]]} + +t.insert({geo:{ "type" : "Point", "coordinates": [ 40, 5 ] }}) +res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 1); +// In the hole. Shouldn't find it. +t.insert({geo:{ "type" : "Point", "coordinates": [ 41.1, 6.1 ] }}) +res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 1); +// Also in the hole. +t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.1, 6.1], [41.2, 6.2]]}}) +res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 1); +// Half-hole, half-not. Shouldn't be $within. +t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.5, 6.5], [42.5, 7.5]]}}) +res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) +assert.eq(res.itcount(), 1); diff --git a/jstests/core/geo_small_large.js b/jstests/core/geo_small_large.js new file mode 100644 index 00000000000..e927e8d5402 --- /dev/null +++ b/jstests/core/geo_small_large.js @@ -0,0 +1,158 @@ +// SERVER-2386, general geo-indexing using very large and very small bounds + +load( "jstests/libs/geo_near_random.js" ); + +// Do some random tests (for near queries) with very large and small ranges + +var test = new GeoNearRandomTest( "geo_small_large" ); + +bounds = { min : -Math.pow( 2, 34 ), max : Math.pow( 2, 34 ) }; + +test.insertPts( 50, bounds ); + +printjson( db["geo_small_large"].find().limit( 10 ).toArray() ); + +test.testPt( [ 0, 0 ] ); +test.testPt( test.mkPt( undefined, bounds ) ); +test.testPt( test.mkPt( undefined, bounds ) ); +test.testPt( test.mkPt( undefined, bounds ) ); +test.testPt( test.mkPt( undefined, bounds ) ); + +test = new GeoNearRandomTest( "geo_small_large" ); + +bounds = { min : -Math.pow( 2, -34 ), max : Math.pow( 2, -34 ) }; + +test.insertPts( 50, bounds ); + +printjson( db["geo_small_large"].find().limit( 10 ).toArray() ); + +test.testPt( [ 0, 0 ] ); +test.testPt( test.mkPt( undefined, bounds ) ); +test.testPt( test.mkPt( undefined, bounds ) ); +test.testPt( test.mkPt( undefined, bounds ) ); +test.testPt( test.mkPt( undefined, bounds ) ); + + +// Check that our box and circle queries also work +var scales = [ + Math.pow( 2, 40 ), + Math.pow( 2, -40 ), + Math.pow(2, 2), + Math.pow(3, -15), + Math.pow(3, 15) +]; + +for ( var i = 0; i < scales.length; i++ ) { + + var scale = scales[i]; + + var eps = Math.pow( 2, -7 ) * scale; + var radius = 5 * scale; + var max = 10 * scale; + var min = -max; + var range = max - min; + var bits = 2 + Math.random() * 30; + + var t = db["geo_small_large"]; + t.drop(); + t.ensureIndex( { p : "2d" }, { min : min, max : max, bits : bits }); + + var outPoints = 0; + var inPoints = 0; + + printjson({ eps : eps, radius : radius, max : max, min : min, range : range, bits : bits }); + + // Put a point slightly inside and outside our range + for ( var j = 0; j < 2; j++ ) { + var currRad = ( j % 2 == 0 ? radius + eps : radius - eps ); + var res = t.insert( { p : { x : currRad, y : 0 } } ); + print( res.toString() ); + } + + printjson( t.find().toArray() ); + + assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1, + "Incorrect center points found!" ); + assert.eq( t.count( { p : { $within : { $box : [ [ -radius, -radius ], [ radius, radius ] ] } } } ), 1, + "Incorrect box points found!" ); + + var shouldFind = []; + var randoms = []; + + for ( var j = 0; j < 2; j++ ) { + + var randX = Math.random(); // randoms[j].randX + var randY = Math.random(); // randoms[j].randY + + randoms.push({ randX : randX, randY : randY }); + + var x = randX * ( range - eps ) + eps + min; + var y = randY * ( range - eps ) + eps + min; + + t.insert( { p : [ x, y ] } ); + + if ( x * x + y * y > radius * radius ){ + // print( "out point "); + // printjson({ x : x, y : y }) + outPoints++; + } + else{ + // print( "in point "); + // printjson({ x : x, y : y }) + inPoints++; + shouldFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) }); + } + } + + /* + function printDiff( didFind, shouldFind ){ + + for( var i = 0; i < shouldFind.length; i++ ){ + var beenFound = false; + for( var j = 0; j < didFind.length && !beenFound ; j++ ){ + beenFound = shouldFind[i].x == didFind[j].x && + shouldFind[i].y == didFind[j].y + } + + if( !beenFound ){ + print( "Could not find: " ) + shouldFind[i].inRadius = ( radius - shouldFind[i].radius >= 0 ) + printjson( shouldFind[i] ) + } + } + } + + print( "Finding random pts... ") + var found = t.find( { p : { $within : { $center : [[0, 0], radius ] } } } ).toArray() + var didFind = [] + for( var f = 0; f < found.length; f++ ){ + //printjson( found[f] ) + var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0] + var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1] + didFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) }) + } + + print( "Did not find but should: ") + printDiff( didFind, shouldFind ) + print( "Found but should not have: ") + printDiff( shouldFind, didFind ) + */ + + assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1 + inPoints, + "Incorrect random center points found!\n" + tojson( randoms ) ); + + print("Found " + inPoints + " points in and " + outPoints + " points out."); + + var found = t.find( { p : { $near : [0, 0], $maxDistance : radius } } ).toArray(); + var dist = 0; + for( var f = 0; f < found.length; f++ ){ + var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0]; + var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1]; + print( "Dist: x : " + x + " y : " + y + " dist : " + + Math.sqrt( x * x + y * y) + " radius : " + radius ); + } + + assert.eq( t.count( { p : { $near : [0, 0], $maxDistance : radius } } ), 1 + inPoints, + "Incorrect random center points found near!\n" + tojson( randoms ) ); +} + diff --git a/jstests/core/geo_sort1.js b/jstests/core/geo_sort1.js new file mode 100644 index 00000000000..67de80e65c7 --- /dev/null +++ b/jstests/core/geo_sort1.js @@ -0,0 +1,22 @@ + +t = db.geo_sort1 +t.drop(); + +for ( x=0; x<10; x++ ){ + for ( y=0; y<10; y++ ){ + t.insert( { loc : [ x , y ] , foo : x * x * y } ); + } +} + +t.ensureIndex( { loc : "2d" , foo : 1 } ) + +q = t.find( { loc : { $near : [ 5 , 5 ] } , foo : { $gt : 20 } } ) +m = function(z){ return z.foo; } + +a = q.clone().map( m ); +b = q.clone().sort( { foo : 1 } ).map( m ); + +assert.neq( a , b , "A" ); +a.sort(); +b.sort(); +assert.eq( a , b , "B" ); diff --git a/jstests/core/geo_uniqueDocs.js b/jstests/core/geo_uniqueDocs.js new file mode 100644 index 00000000000..61f1a40522d --- /dev/null +++ b/jstests/core/geo_uniqueDocs.js @@ -0,0 +1,40 @@ +// Test uniqueDocs option for $within and geoNear queries SERVER-3139 +// SERVER-12120 uniqueDocs is deprecated. Server always returns unique documents. + +collName = 'geo_uniqueDocs_test' +t = db.geo_uniqueDocs_test +t.drop() + +t.save( { locs : [ [0,2], [3,4]] } ) +t.save( { locs : [ [6,8], [10,10] ] } ) + +t.ensureIndex( { locs : '2d' } ) + +// geoNear tests +// uniqueDocs option is ignored. +assert.eq(2, db.runCommand({geoNear:collName, near:[0,0]}).results.length) +assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:false}).results.length) +assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:true}).results.length) +results = db.runCommand({geoNear:collName, near:[0,0], num:2}).results +assert.eq(2, results.length) +assert.close(2, results[0].dis) +assert.close(10, results[1].dis) +results = db.runCommand({geoNear:collName, near:[0,0], num:2, uniqueDocs:true}).results +assert.eq(2, results.length) +assert.close(2, results[0].dis) +assert.close(10, results[1].dis) + +// $within tests + +assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]]}}}).itcount()) +assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : true}}}).itcount()) +assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : false}}}).itcount()) + +assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : true}}}).itcount()) +assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : false}}}).itcount()) + +assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : true}}}).itcount()) +assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : false}}}).itcount()) + +assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : true}}}).itcount()) +assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : false}}}).itcount()) diff --git a/jstests/core/geo_uniqueDocs2.js b/jstests/core/geo_uniqueDocs2.js new file mode 100644 index 00000000000..f9b95113f78 --- /dev/null +++ b/jstests/core/geo_uniqueDocs2.js @@ -0,0 +1,80 @@ +// Additional checks for geo uniqueDocs and includeLocs SERVER-3139. +// SERVER-12120 uniqueDocs is deprecated. +// Server always returns results with implied uniqueDocs=true + +collName = 'jstests_geo_uniqueDocs2'; +t = db[collName]; +t.drop(); + +t.save( {loc:[[20,30],[40,50]]} ); +t.ensureIndex( {loc:'2d'} ); + +// Check exact matches of different locations. +assert.eq( 1, t.count( { loc : [20,30] } ) ); +assert.eq( 1, t.count( { loc : [40,50] } ) ); + +// Check behavior for $near, where $uniqueDocs mode is unavailable. +assert.eq( [t.findOne()], t.find( { loc: { $near: [50,50] } } ).toArray() ); + +// Check correct number of matches for $within / $uniqueDocs. +// uniqueDocs ignored - does not affect results. +assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40] } } } ) ); +assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : true } } } ) ); +assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ) ); + +// For $within / $uniqueDocs, limit applies to docs. +assert.eq( 1, t.find( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ).limit(1).itcount() ); + +// Now check a circle only containing one of the locs. +assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10] } } } ) ); +assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : true } } } ) ); +assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : false } } } ) ); + +// Check number and character of results with geoNear / uniqueDocs / includeLocs. +notUniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : false } ); +uniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : false } ); +notUniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ); +uniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : true } ); + +// Check that only unique docs are returned. +assert.eq( 1, notUniqueNotInclude.results.length ); +assert.eq( 1, uniqueNotInclude.results.length ); +assert.eq( 1, notUniqueInclude.results.length ); +assert.eq( 1, uniqueInclude.results.length ); + +// Check that locs are included. +assert( !notUniqueNotInclude.results[0].loc ); +assert( !uniqueNotInclude.results[0].loc ); +assert( notUniqueInclude.results[0].loc ); +assert( uniqueInclude.results[0].loc ); + +// For geoNear / uniqueDocs, 'num' limit seems to apply to locs. +assert.eq( 1, db.runCommand( { geoNear : collName , near : [50,50], num : 1, uniqueDocs : false, includeLocs : false } ).results.length ); + +// Check locs returned in includeLocs mode. +t.remove({}); +objLocs = [{x:20,y:30,z:['loc1','loca']},{x:40,y:50,z:['loc2','locb']}]; +t.save( {loc:objLocs} ); +results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results; +assert.contains( results[0].loc, objLocs ); + +// Check locs returned in includeLocs mode, where locs are arrays. +t.remove({}); +arrLocs = [[20,30],[40,50]]; +t.save( {loc:arrLocs} ); +results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results; +// The original loc arrays are returned as objects. +expectedLocs = arrLocs + +assert.contains( results[0].loc, expectedLocs ); + +// Test a large number of locations in the array. +t.drop(); +arr = []; +for( i = 0; i < 10000; ++i ) { + arr.push( [10,10] ); +} +arr.push( [100,100] ); +t.save( {loc:arr} ); +t.ensureIndex( {loc:'2d'} ); +assert.eq( 1, t.count( { loc : { $within : { $center : [[99, 99], 5] } } } ) ); diff --git a/jstests/core/geo_update.js b/jstests/core/geo_update.js new file mode 100644 index 00000000000..dd4b28c8374 --- /dev/null +++ b/jstests/core/geo_update.js @@ -0,0 +1,37 @@ +// Tests geo queries w/ update & upsert +// from SERVER-3428 + +var coll = db.testGeoUpdate +coll.drop() + +coll.ensureIndex({ loc : "2d" }) + +// Test normal update +print( "Updating..." ) + +coll.insert({ loc : [1.0, 2.0] }) + +coll.update({ loc : { $near : [1.0, 2.0] } }, + { x : true, loc : [1.0, 2.0] }) + +// Test upsert +print( "Upserting..." ) + +coll.update({ loc : { $within : { $center : [[10, 20], 1] } } }, + { x : true }, + true) + +coll.update({ loc : { $near : [10.0, 20.0], $maxDistance : 1 } }, + { x : true }, + true) + + +coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } }, + { $set : { loc : [100, 100] }, $push : { people : "chris" } }, + true) + +coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } }, + { $set : { loc : [100, 100] }, $push : { people : "john" } }, + true) + +assert.eq( 4, coll.find().itcount() ) diff --git a/jstests/core/geo_update1.js b/jstests/core/geo_update1.js new file mode 100644 index 00000000000..6352ef0aa19 --- /dev/null +++ b/jstests/core/geo_update1.js @@ -0,0 +1,36 @@ + +t = db.geo_update1 +t.drop() + +for(var x = 0; x < 10; x++ ) { + for(var y = 0; y < 10; y++ ) { + t.insert({"loc": [x, y] , x : x , y : y , z : 1 }); + } +} + +t.ensureIndex( { loc : "2d" } ) + +function p(){ + print( "--------------" ); + for ( var y=0; y<10; y++ ){ + var c = t.find( { y : y } ).sort( { x : 1 } ) + var s = ""; + while ( c.hasNext() ) + s += c.next().z + " "; + print( s ) + } + print( "--------------" ); +} + +p() + +var res = t.update({ loc: { $within: { $center: [[ 5, 5 ], 2 ]}}}, { $inc: { z: 1 }}, false, true); +assert.writeOK( res ); +p() + +assert.writeOK(t.update({}, {'$inc' : { 'z' : 1}}, false, true)); +p() + +res = t.update({ loc: { $within: { $center: [[ 5, 5 ], 2 ]}}}, { $inc: { z: 1 }}, false, true); +assert.writeOK( res ); +p() diff --git a/jstests/core/geo_update2.js b/jstests/core/geo_update2.js new file mode 100644 index 00000000000..6a42619ac98 --- /dev/null +++ b/jstests/core/geo_update2.js @@ -0,0 +1,39 @@ + +t = db.geo_update2 +t.drop() + +for(var x = 0; x < 10; x++ ) { + for(var y = 0; y < 10; y++ ) { + t.insert({"loc": [x, y] , x : x , y : y }); + } +} + +t.ensureIndex( { loc : "2d" } ) + +function p(){ + print( "--------------" ); + for ( var y=0; y<10; y++ ){ + var c = t.find( { y : y } ).sort( { x : 1 } ) + var s = ""; + while ( c.hasNext() ) + s += c.next().z + " "; + print( s ) + } + print( "--------------" ); +} + +p() + + +assert.writeOK(t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, + {'$inc' : { 'z' : 1}}, false, true)); +p() + +assert.writeOK(t.update({}, {'$inc' : { 'z' : 1}}, false, true)); +p() + + +assert.writeOK(t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, + {'$inc' : { 'z' : 1}}, false, true)); +p() + diff --git a/jstests/core/geo_update_btree.js b/jstests/core/geo_update_btree.js new file mode 100644 index 00000000000..225a6635903 --- /dev/null +++ b/jstests/core/geo_update_btree.js @@ -0,0 +1,28 @@ +// Tests whether the geospatial search is stable under btree updates + +var coll = db.getCollection( "jstests_geo_update_btree" ) +coll.drop() + +coll.ensureIndex( { loc : '2d' } ) + +var big = new Array( 3000 ).toString() + +if (testingReplication) { + coll.setWriteConcern({ w: 2 }); +} + +var parallelInsert = startParallelShell( + "for ( var i = 0; i < 1000; i++ ) {" + + " var doc = { loc: [ Random.rand() * 180, Random.rand() * 180 ], v: '' }" + + " db.jstests_geo_update_btree.insert(doc);" + + "}"); + +for ( i = 0; i < 1000; i++ ) { + coll.update( + { loc : { $within : { $center : [ [ Random.rand() * 180, Random.rand() * 180 ], Random.rand() * 50 ] } } }, + { $set : { v : big } }, false, true ) + + if( i % 10 == 0 ) print( i ); +} + +parallelInsert(); diff --git a/jstests/core/geo_update_btree2.js b/jstests/core/geo_update_btree2.js new file mode 100644 index 00000000000..d99970c73e0 --- /dev/null +++ b/jstests/core/geo_update_btree2.js @@ -0,0 +1,71 @@ +// Tests whether the geospatial search is stable under btree updates +// +// Tests the implementation of the 2d search, not the behavior we promise. MongoDB currently +// promises no isolation, so there is no guarantee that we get the results we expect in this file. + +// The old query system, if it saw a 2d query, would never consider a collscan. +// +// The new query system can answer the queries in this file with a collscan and ranks +// the collscan against the indexed result. +// +// In order to expose the specific NON GUARANTEED isolation behavior this file tests +// we disable table scans to ensure that the new query system only looks at the 2d +// scan. +assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) ); + +var status = function( msg ){ + print( "\n\n###\n" + msg + "\n###\n\n" ) +} + +var coll = db.getCollection( "jstests_geo_update_btree2" ) +coll.drop() + +coll.ensureIndex( { loc : '2d' } ) + +status( "Inserting points..." ) + +var numPoints = 10 +for ( i = 0; i < numPoints; i++ ) { + coll.insert( { _id : i, loc : [ Random.rand() * 180, Random.rand() * 180 ], i : i % 2 } ); +} + +status( "Starting long query..." ) + +var query = coll.find({ loc : { $within : { $box : [[-180, -180], [180, 180]] } } }).batchSize( 2 ) +var firstValues = [ query.next()._id, query.next()._id ] +printjson( firstValues ) + +status( "Removing points not returned by query..." ) + +var allQuery = coll.find() +var removeIds = [] +while( allQuery.hasNext() ){ + var id = allQuery.next()._id + if( firstValues.indexOf( id ) < 0 ){ + removeIds.push( id ) + } +} + +var updateIds = [] +for( var i = 0, max = removeIds.length / 2; i < max; i++ ) updateIds.push( removeIds.pop() ) + +printjson( removeIds ) +coll.remove({ _id : { $in : removeIds } }) + +status( "Updating points returned by query..." ) +printjson(updateIds); + +var big = new Array( 3000 ).toString() +for( var i = 0; i < updateIds.length; i++ ) + coll.update({ _id : updateIds[i] }, { $set : { data : big } }) + +status( "Counting final points..." ) + +// It's not defined whether or not we return documents that are modified during a query. We +// shouldn't crash, but it's not defined how many results we get back. This test is modifying every +// doc not returned by the query, and since we currently handle the invalidation by removing them, +// we won't return them. But we shouldn't crash. +// assert.eq( ( numPoints - 2 ) / 2, query.itcount() ) +query.itcount(); + +assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false} ) ); diff --git a/jstests/core/geo_update_dedup.js b/jstests/core/geo_update_dedup.js new file mode 100644 index 00000000000..def93a839e4 --- /dev/null +++ b/jstests/core/geo_update_dedup.js @@ -0,0 +1,60 @@ +// Test that updates with geo queries which match +// the same document multiple times only apply +// the update once + +var t = db.jstests_geo_update_dedup;; + +// 2d index with $near +t.drop(); +t.ensureIndex({locs: "2d"}); +t.save({locs: [[49.999,49.999], [50.0,50.0], [50.001,50.001]]}); + +var q = {locs: {$near: [50.0, 50.0]}}; +assert.eq(1, t.find(q).itcount(), 'duplicates returned from query'); + +var res = t.update({locs: {$near: [50.0, 50.0]}}, {$inc: {touchCount: 1}}, false, true); +assert.eq(1, res.nMatched); +assert.eq(1, t.findOne().touchCount); + +t.drop(); +t.ensureIndex({locs: "2d"}); +t.save({locs: [{x:49.999,y:49.999}, {x:50.0,y:50.0}, {x:50.001,y:50.001}]}); +res = t.update({locs: {$near: {x:50.0, y:50.0}}}, {$inc: {touchCount: 1}}); +assert.eq(1, res.nMatched); +assert.eq(1, t.findOne().touchCount); + +// 2d index with $within +t.drop(); +t.ensureIndex({loc: "2d"}); +t.save({loc: [[0, 0], [1, 1]]}); + +res = t.update({loc: {$within: {$center: [[0, 0], 2]}}}, {$inc: {touchCount: 1}}, false, true); +assert.eq(1, res.nMatched); +assert.eq(1, t.findOne().touchCount); + +// 2dsphere index with $geoNear +t.drop(); +t.ensureIndex({geo: "2dsphere"}); +var x = { "type" : "Polygon", + "coordinates" : [[[49.999,49.999], [50.0,50.0], [50.001,50.001], [49.999,49.999]]]} +t.save({geo: x}) + +res = t.update({geo: {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}}, + {$inc: {touchCount: 1}}, false, true); +assert.eq(1, res.nMatched); +assert.eq(1, t.findOne().touchCount); + +t.drop(); +var locdata = [ + {geo: {type: "Point", coordinates: [49.999,49.999]}}, + {geo: {type: "Point", coordinates: [50.000,50.000]}}, + {geo: {type: "Point", coordinates: [50.001,50.001]}} +]; +t.save({locdata: locdata, count: 0}) +t.ensureIndex({"locdata.geo": "2dsphere"}); + +res = t.update({"locdata.geo": {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}}, + {$inc: {touchCount: 1}}, false, true); +assert.eq(1, res.nMatched); +assert.eq(1, t.findOne().touchCount); + diff --git a/jstests/core/geo_withinquery.js b/jstests/core/geo_withinquery.js new file mode 100644 index 00000000000..11701d34c62 --- /dev/null +++ b/jstests/core/geo_withinquery.js @@ -0,0 +1,15 @@ +// SERVER-7343: allow $within without a geo index. +t = db.geo_withinquery; +t.drop(); + +num = 0; +for ( x=0; x<=20; x++ ){ + for ( y=0; y<=20; y++ ){ + o = { _id : num++ , loc : [ x , y ] } + t.save( o ) + } +} + +assert.eq(21 * 21 - 1, t.find({ $and: [ {loc: {$ne:[0,0]}}, + {loc: {$within: {$box: [[0,0], [100,100]]}}}, + ]}).itcount(), "UHOH!") diff --git a/jstests/core/geoa.js b/jstests/core/geoa.js new file mode 100644 index 00000000000..3081f6c5c2e --- /dev/null +++ b/jstests/core/geoa.js @@ -0,0 +1,12 @@ + +t = db.geoa +t.drop(); + +t.save( { _id : 1 , a : { loc : [ 5 , 5 ] } } ) +t.save( { _id : 2 , a : { loc : [ 6 , 6 ] } } ) +t.save( { _id : 3 , a : { loc : [ 7 , 7 ] } } ) + +t.ensureIndex( { "a.loc" : "2d" } ); + +cur = t.find( { "a.loc" : { $near : [ 6 , 6 ] } } ); +assert.eq( 2 , cur.next()._id , "A1" ); diff --git a/jstests/core/geob.js b/jstests/core/geob.js new file mode 100644 index 00000000000..0dcc2658ba2 --- /dev/null +++ b/jstests/core/geob.js @@ -0,0 +1,35 @@ +var t = db.geob; +t.drop(); + +var a = {p: [0, 0]}; +var b = {p: [1, 0]}; +var c = {p: [3, 4]}; +var d = {p: [0, 6]}; + +t.save(a); +t.save(b); +t.save(c); +t.save(d); +t.ensureIndex({p: "2d"}); + +var res = t.runCommand("geoNear", {near: [0,0]}); +assert.close(3, res.stats.avgDistance, "A"); + +assert.close(0, res.results[0].dis, "B1"); +assert.eq(a._id, res.results[0].obj._id, "B2"); + +assert.close(1, res.results[1].dis, "C1"); +assert.eq(b._id, res.results[1].obj._id, "C2"); + +assert.close(5, res.results[2].dis, "D1"); +assert.eq(c._id, res.results[2].obj._id, "D2"); + +assert.close(6, res.results[3].dis, "E1"); +assert.eq(d._id, res.results[3].obj._id, "E2"); + +res = t.runCommand("geoNear", {near: [0,0], distanceMultiplier: 2}); +assert.close(6, res.stats.avgDistance, "F"); +assert.close(0, res.results[0].dis, "G"); +assert.close(2, res.results[1].dis, "H"); +assert.close(10, res.results[2].dis, "I"); +assert.close(12, res.results[3].dis, "J"); diff --git a/jstests/core/geoc.js b/jstests/core/geoc.js new file mode 100644 index 00000000000..8b0178095e8 --- /dev/null +++ b/jstests/core/geoc.js @@ -0,0 +1,24 @@ + +t = db.geoc; +t.drop() + +N = 1000; + +for (var i=0; i + +contains = function(arr,obj) { + var i = arr.length; + while (i--) { + if (arr[i] === obj) { + return true; + } + } + return false; +} + +var resp = db.adminCommand({getLog:"*"}) +assert( resp.ok == 1, "error executing getLog command" ); +assert( resp.names, "no names field" ); +assert( resp.names.length > 0, "names array is empty" ); +assert( contains(resp.names,"global") , "missing global category" ); +assert( !contains(resp.names,"butty") , "missing butty category" ); + +resp = db.adminCommand({getLog:"global"}) +assert( resp.ok == 1, "error executing getLog command" ); +assert( resp.log, "no log field" ); +assert( resp.log.length > 0 , "no log lines" ); diff --git a/jstests/core/getlog2.js b/jstests/core/getlog2.js new file mode 100644 index 00000000000..846f0548309 --- /dev/null +++ b/jstests/core/getlog2.js @@ -0,0 +1,46 @@ +// tests getlog as well as slow querying logging + +glcol = db.getLogTest2; +glcol.drop() + +contains = function(arr, func) { + var i = arr.length; + while (i--) { + if (func(arr[i])) { + return true; + } + } + return false; +} + +// test doesn't work when talking to mongos +if(db.isMaster().msg != "isdbgrid") { + // run a slow query + glcol.save({ "SENTINEL": 1 }); + glcol.findOne({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }); + + // run a slow update + glcol.update({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }, { "x": "x" }); + + var resp = db.adminCommand({getLog:"global"}); + assert( resp.ok == 1, "error executing getLog command" ); + assert( resp.log, "no log field" ); + assert( resp.log.length > 0 , "no log lines" ); + + // ensure that slow query is logged in detail + assert( contains(resp.log, function(v) { + print(v); + return v.indexOf(" query ") != -1 && v.indexOf("query:") != -1 && + v.indexOf("nscanned:") != -1 && + v.indexOf("nscannedObjects:") != -1 && + v.indexOf("SENTINEL") != -1; + }) ); + + // same, but for update + assert( contains(resp.log, function(v) { + return v.indexOf(" update ") != -1 && v.indexOf("query") != -1 && + v.indexOf("nscanned:") != -1 && + v.indexOf("nscannedObjects:") != -1 && + v.indexOf("SENTINEL") != -1; + }) ); +} diff --git a/jstests/core/group1.js b/jstests/core/group1.js new file mode 100644 index 00000000000..c4147c0d89a --- /dev/null +++ b/jstests/core/group1.js @@ -0,0 +1,64 @@ +t = db.group1; +t.drop(); + +t.save( { n : 1 , a : 1 } ); +t.save( { n : 2 , a : 1 } ); +t.save( { n : 3 , a : 2 } ); +t.save( { n : 4 , a : 2 } ); +t.save( { n : 5 , a : 2 } ); + +var p = { key : { a : true } , + reduce : function(obj,prev) { prev.count++; }, + initial: { count: 0 } + }; + +res = t.group( p ); + +assert( res.length == 2 , "A" ); +assert( res[0].a == 1 , "B" ); +assert( res[0].count == 2 , "C" ); +assert( res[1].a == 2 , "D" ); +assert( res[1].count == 3 , "E" ); + +assert.eq( res , t.groupcmd( p ) , "ZZ" ); + +ret = t.groupcmd( { key : {} , reduce : p.reduce , initial : p.initial } ); +assert.eq( 1 , ret.length , "ZZ 2" ); +assert.eq( 5 , ret[0].count , "ZZ 3" ); + +ret = t.groupcmd( { key : {} , reduce : function(obj,prev){ prev.sum += obj.n } , initial : { sum : 0 } } ); +assert.eq( 1 , ret.length , "ZZ 4" ); +assert.eq( 15 , ret[0].sum , "ZZ 5" ); + +t.drop(); + +t.save( { "a" : 2 } ); +t.save( { "b" : 5 } ); +t.save( { "a" : 1 } ); +t.save( { "a" : 2 } ); + +c = {key: {a:1}, cond: {}, initial: {"count": 0}, reduce: function(obj, prev) { prev.count++; } }; + +assert.eq( t.group( c ) , t.groupcmd( c ) , "ZZZZ" ); + + +t.drop(); + +t.save( { name : { first : "a" , last : "A" } } ); +t.save( { name : { first : "b" , last : "B" } } ); +t.save( { name : { first : "a" , last : "A" } } ); + + +p = { key : { 'name.first' : true } , + reduce : function(obj,prev) { prev.count++; }, + initial: { count: 0 } + }; + +res = t.group( p ); +assert.eq( 2 , res.length , "Z1" ); +assert.eq( "a" , res[0]['name.first'] , "Z2" ) +assert.eq( "b" , res[1]['name.first'] , "Z3" ) +assert.eq( 2 , res[0].count , "Z4" ) +assert.eq( 1 , res[1].count , "Z5" ) + + diff --git a/jstests/core/group2.js b/jstests/core/group2.js new file mode 100644 index 00000000000..a8e6653470a --- /dev/null +++ b/jstests/core/group2.js @@ -0,0 +1,38 @@ +t = db.group2; +t.drop(); + +t.save({a: 2}); +t.save({b: 5}); +t.save({a: 1}); + +cmd = { key: {a: 1}, + initial: {count: 0}, + reduce: function(obj, prev) { + prev.count++; + } + }; + +result = t.group(cmd); + +assert.eq(3, result.length, "A"); +assert.eq(null, result[1].a, "C"); +assert("a" in result[1], "D"); +assert.eq(1, result[2].a, "E"); + +assert.eq(1, result[0].count, "F"); +assert.eq(1, result[1].count, "G"); +assert.eq(1, result[2].count, "H"); + + +delete cmd.key +cmd["$keyf"] = function(x){ return { a : x.a }; }; +result2 = t.group( cmd ); + +assert.eq( result , result2, "check result2" ); + + +delete cmd.$keyf +cmd["keyf"] = function(x){ return { a : x.a }; }; +result3 = t.group( cmd ); + +assert.eq( result , result3, "check result3" ); diff --git a/jstests/core/group3.js b/jstests/core/group3.js new file mode 100644 index 00000000000..d113b9d570f --- /dev/null +++ b/jstests/core/group3.js @@ -0,0 +1,43 @@ +t = db.group3; +t.drop(); + +t.save({a: 1}); +t.save({a: 2}); +t.save({a: 3}); +t.save({a: 4}); + + +cmd = { initial: {count: 0, sum: 0}, + reduce: function(obj, prev) { + prev.count++; + prev.sum += obj.a; + }, + finalize: function(obj) { + if (obj.count){ + obj.avg = obj.sum / obj.count; + }else{ + obj.avg = 0; + } + }, + }; + +result1 = t.group(cmd); + +assert.eq(1, result1.length, "test1"); +assert.eq(10, result1[0].sum, "test1"); +assert.eq(4, result1[0].count, "test1"); +assert.eq(2.5, result1[0].avg, "test1"); + + +cmd['finalize'] = function(obj) { + if (obj.count){ + return obj.sum / obj.count; + }else{ + return 0; + } +}; + +result2 = t.group(cmd); + +assert.eq(1, result2.length, "test2"); +assert.eq(2.5, result2[0], "test2"); diff --git a/jstests/core/group4.js b/jstests/core/group4.js new file mode 100644 index 00000000000..e75c0d1ae2c --- /dev/null +++ b/jstests/core/group4.js @@ -0,0 +1,45 @@ + +t = db.group4 +t.drop(); + +function test( c , n ){ + var x = {}; + c.forEach( + function(z){ + assert.eq( z.count , z.values.length , n + "\t" + tojson( z ) ); + } + ); +} + +t.insert({name:'bob',foo:1}) +t.insert({name:'bob',foo:2}) +t.insert({name:'alice',foo:1}) +t.insert({name:'alice',foo:3}) +t.insert({name:'fred',foo:3}) +t.insert({name:'fred',foo:4}) + +x = t.group( + { + key: {foo:1}, + initial: {count:0,values:[]}, + reduce: function (obj, prev){ + prev.count++ + prev.values.push(obj.name) + } + } +); +test( x , "A" ); + +x = t.group( + { + key: {foo:1}, + initial: {count:0}, + reduce: function (obj, prev){ + if (!prev.values) {prev.values = [];} + prev.count++; + prev.values.push(obj.name); + } + } +); +test( x , "B" ); + diff --git a/jstests/core/group5.js b/jstests/core/group5.js new file mode 100644 index 00000000000..3534fe5f030 --- /dev/null +++ b/jstests/core/group5.js @@ -0,0 +1,38 @@ + +t = db.group5; +t.drop(); + +// each group has groupnum+1 5 users +for ( var group=0; group<10; group++ ){ + for ( var i=0; i<5+group; i++ ){ + t.save( { group : "group" + group , user : i } ) + } +} + +function c( group ){ + return t.group( + { + key : { group : 1 } , + q : { group : "group" + group } , + initial : { users : {} }, + reduce : function(obj,prev){ + prev.users[obj.user] = true; // add this user to the hash + }, + finalize : function(x){ + var count = 0; + for (var key in x.users){ + count++; + } + + //replace user obj with count + //count add new field and keep users + x.users = count; + return x; + } + })[0]; // returns array +} + +assert.eq( "group0" , c(0).group , "g0" ); +assert.eq( 5 , c(0).users , "g0 a" ); +assert.eq( "group5" , c(5).group , "g5" ); +assert.eq( 10 , c(5).users , "g5 a" ); diff --git a/jstests/core/group6.js b/jstests/core/group6.js new file mode 100644 index 00000000000..b77a37a5d11 --- /dev/null +++ b/jstests/core/group6.js @@ -0,0 +1,32 @@ +t = db.jstests_group6; +t.drop(); + +for( i = 1; i <= 10; ++i ) { + t.save( {i:new NumberLong( i ),y:1} ); +} + +assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" ); + +t.drop(); +for( i = 1; i <= 10; ++i ) { + if ( i % 2 == 0 ) { + t.save( {i:new NumberLong( i ),y:1} ); + } else { + t.save( {i:i,y:1} ); + } +} + +assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" ); + +t.drop(); +for( i = 1; i <= 10; ++i ) { + if ( i % 2 == 1 ) { + t.save( {i:new NumberLong( i ),y:1} ); + } else { + t.save( {i:i,y:1} ); + } +} + +assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" ); + +assert.eq.automsg( "NumberLong(10)", "t.group( {$reduce: function(doc, prev) { prev.count += 1; }, initial: {count: new NumberLong(0) }} )[ 0 ].count" ); \ No newline at end of file diff --git a/jstests/core/group7.js b/jstests/core/group7.js new file mode 100644 index 00000000000..1413000079c --- /dev/null +++ b/jstests/core/group7.js @@ -0,0 +1,47 @@ +// Test yielding group command SERVER-1395 + +t = db.jstests_group7; +t.drop(); + +function checkForYield( docs, updates ) { + t.drop(); + a = 0; + for( var i = 0; i < docs; ++i ) { + t.save( {a:a} ); + } + + // Iteratively update all a values atomically. + p = startParallelShell( + 'for( a = 0; a < ' + updates + '; ++a ) {' + + 'db.jstests_group7.update({ $atomic: true }, { $set: { a: a }}, false, true);' + + '}' ); + + for( var i = 0; i < updates; ++i ) { + print("running group " + i + " of " + updates); + ret = t.group({key:{a:1},reduce:function(){},initial:{}}); + // Check if group sees more than one a value, indicating that it yielded. + if ( ret.length > 1 ) { + p(); + return true; + } + printjson( ret ); + } + + p(); + return false; +} + +var yielded = false; +var docs = 1500; +var updates = 50; +for( var j = 1; j <= 6; ++j ) { + print("Iteration " + j + " docs = " + docs + " updates = " + updates); + if ( checkForYield( docs, updates ) ) { + yielded = true; + break; + } + // Increase docs and updates to encourage yielding. + docs *= 2; + updates *= 2; +} +assert( yielded ); diff --git a/jstests/core/group_empty.js b/jstests/core/group_empty.js new file mode 100644 index 00000000000..62a734ed0f8 --- /dev/null +++ b/jstests/core/group_empty.js @@ -0,0 +1,8 @@ + +t = db.group_empty; +t.drop(); + +res1 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}}); +t.ensureIndex( { x : 1 } ); +res2 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}}); +assert.eq( res1, res2 ); diff --git a/jstests/core/grow_hash_table.js b/jstests/core/grow_hash_table.js new file mode 100644 index 00000000000..1f96820d61f --- /dev/null +++ b/jstests/core/grow_hash_table.js @@ -0,0 +1,42 @@ +// This test creates a large projection, which causes a set of field names to +// be stored in a StringMap (based on UnorderedFastKeyTable). The hash table +// starts with 20 slots, but must be grown repeatedly to hold the complete set +// of fields. This test verifies that we can grow the hash table repeatedly +// with no failures. +// +// Related to SERVER-9824. + +var testDB = db.getSiblingDB('grow_hash_table'); + +var doTest = function(count) { + print('Testing with count of ' + count); + testDB.dropDatabase(); + var id = { data: 1 }; + var doc = { _id: id }; + var projection = { }; + + // Create a document and a projection with fields r1, r2, r3 ... + for (var i = 1; i <= count; ++i) { + var r = 'r' + i; + doc[r] = i; + projection[r] = 1; + } + + // Store the document + assert.writeOK(testDB.collection.insert(doc)); + + // Try to read the document using a large projection + try { + var findCount = testDB.collection.find({ _id: id }, projection).itcount(); + assert(findCount == 1, + 'Failed to find single stored document, find().itcount() == ' + findCount); + } + catch (e) { + testDB.dropDatabase(); + doassert('Test FAILED! Caught exception ' + tojsononeline(e)); + } + testDB.dropDatabase(); + jsTest.log('Test PASSED'); +} + +doTest(10000); diff --git a/jstests/core/hashindex1.js b/jstests/core/hashindex1.js new file mode 100644 index 00000000000..34bd6dc0725 --- /dev/null +++ b/jstests/core/hashindex1.js @@ -0,0 +1,94 @@ +var t = db.hashindex1; +t.drop() + +//test non-single field hashed indexes don't get created (maybe change later) +var badspec = {a : "hashed" , b : 1}; +t.ensureIndex( badspec ); +assert.eq( t.getIndexes().length , 1 , "only _id index should be created"); + +//test unique index not created (maybe change later) +var goodspec = {a : "hashed"}; +t.ensureIndex( goodspec , {"unique" : true}); +assert.eq( t.getIndexes().length , 1 , "unique index got created."); + +//now test that non-unique index does get created +t.ensureIndex(goodspec); +assert.eq( t.getIndexes().length , 2 , "hashed index didn't get created"); + +//test basic inserts +for(i=0; i < 10; i++ ){ + t.insert( {a:i } ); +} +assert.eq( t.find().count() , 10 , "basic insert didn't work"); +assert.eq( t.find().hint(goodspec).toArray().length , 10 , "basic insert didn't work"); +assert.eq( t.find({a : 3}).hint({_id : 1}).toArray()[0]._id , + t.find({a : 3}).hint(goodspec).toArray()[0]._id , + "hashindex lookup didn't work" ); + + +//make sure things with the same hash are not both returned +t.insert( {a: 3.1} ); +assert.eq( t.find().count() , 11 , "additional insert didn't work"); +assert.eq( t.find({a : 3.1}).hint(goodspec).toArray().length , 1); +assert.eq( t.find({a : 3}).hint(goodspec).toArray().length , 1); +//test right obj is found +assert.eq( t.find({a : 3.1}).hint(goodspec).toArray()[0].a , 3.1); + +//test that hashed cursor is used when it should be +var cursorname = "BtreeCursor a_hashed"; +assert.eq( t.find({a : 1}).explain().cursor , + cursorname , + "not using hashed cursor"); + +// SERVER-12222 +//printjson( t.find({a : {$gte : 3 , $lte : 3}}).explain() ) +//assert.eq( t.find({a : {$gte : 3 , $lte : 3}}).explain().cursor , +// cursorname , +// "not using hashed cursor"); +assert.neq( t.find({c : 1}).explain().cursor , + cursorname , + "using irrelevant hashed cursor"); + +printjson( t.find({a : {$in : [1,2]}}).explain() ) +// Hash index used with a $in set membership predicate. +assert.eq( t.find({a : {$in : [1,2]}}).explain()["cursor"], + "BtreeCursor a_hashed", + "not using hashed cursor"); + +// Hash index used with a singleton $and predicate conjunction. +assert.eq( t.find({$and : [{a : 1}]}).explain()["cursor"], + "BtreeCursor a_hashed", + "not using hashed cursor"); + +// Hash index used with a non singleton $and predicate conjunction. +assert.eq( t.find({$and : [{a : {$in : [1,2]}},{a : {$gt : 1}}]}).explain()["cursor"], + "BtreeCursor a_hashed", + "not using hashed cursor"); + +//test creation of index based on hash of _id index +var goodspec2 = {'_id' : "hashed"}; +t.ensureIndex( goodspec2 ); +assert.eq( t.getIndexes().length , 3 , "_id index didn't get created"); + +var newid = t.findOne()["_id"]; +assert.eq( t.find( {_id : newid} ).hint( {_id : 1} ).toArray()[0]._id , + t.find( {_id : newid} ).hint( goodspec2 ).toArray()[0]._id, + "using hashed index and different index returns different docs"); + + +//test creation of sparse hashed index +var sparseindex = {b : "hashed"}; +t.ensureIndex( sparseindex , {"sparse" : true}); +assert.eq( t.getIndexes().length , 4 , "sparse index didn't get created"); + +//test sparse index has smaller total items on after inserts +for(i=0; i < 10; i++ ){ + t.insert( {b : i} ); +} +var totalb = t.find().hint(sparseindex).toArray().length; +assert.eq( totalb , 10 , "sparse index has wrong total"); + +var total = t.find().hint({"_id" : 1}).toArray().length; +var totala = t.find().hint(goodspec).toArray().length; +assert.eq(total , totala , "non-sparse index has wrong total"); +assert.lt(totalb , totala , "sparse index should have smaller total"); diff --git a/jstests/core/hashtest1.js b/jstests/core/hashtest1.js new file mode 100644 index 00000000000..981a0c36877 --- /dev/null +++ b/jstests/core/hashtest1.js @@ -0,0 +1,78 @@ +//hashtest1.js +//Simple tests to check hashing of various types +//make sure that different numeric types hash to same thing, and other sanity checks + +var hash = function( v , seed ){ + if (seed) + return db.runCommand({"_hashBSONElement" : v , "seed" : seed})["out"]; + else + return db.runCommand({"_hashBSONElement" : v})["out"]; +}; + +var oidHash = hash( ObjectId() ); +var oidHash2 = hash( ObjectId() ); +var oidHash3 = hash( ObjectId() ); +assert(! friendlyEqual( oidHash, oidHash2) , "ObjectIDs should hash to different things"); +assert(! friendlyEqual( oidHash, oidHash3) , "ObjectIDs should hash to different things"); +assert(! friendlyEqual( oidHash2, oidHash3) , "ObjectIDs should hash to different things"); + +var intHash = hash( NumberInt(3) ); +var doubHash = hash( 3 ); +var doubHash2 = hash( 3.0 ); +var longHash = hash( NumberLong(3) ); +var fracHash = hash( NumberInt(3.5) ); +assert.eq( intHash , doubHash ); +assert.eq( intHash , doubHash2 ); +assert.eq( intHash , longHash ); +assert.eq( intHash , fracHash ); + +var trueHash = hash( true ); +var falseHash = hash( false ); +assert(! friendlyEqual( trueHash, falseHash) , "true and false should hash to different things"); + +var nullHash = hash( null ); +assert(! friendlyEqual( falseHash , nullHash ) , "false and null should hash to different things"); + +var dateHash = hash( new Date() ); +sleep(1); +var isodateHash = hash( ISODate() ); +assert(! friendlyEqual( dateHash, isodateHash) , "different dates should hash to different things"); + +var stringHash = hash( "3" ); +assert(! friendlyEqual( intHash , stringHash ), "3 and \"3\" should hash to different things"); + +var regExpHash = hash( RegExp("3") ); +assert(! friendlyEqual( stringHash , regExpHash) , "\"3\" and RegExp(3) should hash to different things"); + +var intHash4 = hash( 4 ); +assert(! friendlyEqual( intHash , intHash4 ), "3 and 4 should hash to different things"); + +var intHashSeeded = hash( 4 , 3 ); +assert(! friendlyEqual(intHash4 , intHashSeeded ), "different seeds should make different hashes"); + +var minkeyHash = hash( MinKey ); +var maxkeyHash = hash( MaxKey ); +assert(! friendlyEqual(minkeyHash , maxkeyHash ), "minkey and maxkey should hash to different things"); + +var arrayHash = hash( [0,1.0,NumberLong(2)] ); +var arrayHash2 = hash( [0,NumberInt(1),2] ); +assert.eq( arrayHash , arrayHash2 , "didn't squash numeric types in array"); + +var objectHash = hash( {"0":0, "1" : NumberInt(1), "2" : 2} ); +assert(! friendlyEqual(objectHash , arrayHash2) , "arrays and sub-objects should hash to different things"); + +var c = hash( {a : {}, b : 1} ); +var d = hash( {a : {b : 1}} ); +assert(! friendlyEqual( c , d ) , "hashing doesn't group sub-docs and fields correctly"); + +var e = hash( {a : 3 , b : [NumberLong(3), {c : NumberInt(3)}]} ); +var f = hash( {a : NumberLong(3) , b : [NumberInt(3), {c : 3.0}]} ); +assert.eq( e , f , "recursive number squashing doesn't work"); + +var nanHash = hash( 0/0 ); +var zeroHash = hash( 0 ); +assert.eq( nanHash , zeroHash , "NaN and Zero should hash to the same thing"); + + +//should also test that CodeWScope hashes correctly +//but waiting for SERVER-3391 (CodeWScope support in shell) \ No newline at end of file diff --git a/jstests/core/hint1.js b/jstests/core/hint1.js new file mode 100644 index 00000000000..b5a580f2b93 --- /dev/null +++ b/jstests/core/hint1.js @@ -0,0 +1,16 @@ + +p = db.jstests_hint1; +p.drop(); + +p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true } ); +p.ensureIndex( { ts: 1 } ); + +e = p.find( { live: true, ts: { $lt: new Date( 1234119308272 ) }, cls: "entry", verticals: "alleyinsider" } ).sort( { ts: -1 } ).hint( { ts: 1 } ).explain(); +assert.eq(e.indexBounds.ts[0][0].getTime(), new Date(1234119308272).getTime(), "A"); + +//printjson(e); + +assert.eq( /*just below min date is bool true*/true, e.indexBounds.ts[0][1], "B"); + +assert.eq(1, p.find({ live: true, ts: { $lt: new Date(1234119308272) }, cls: "entry", verticals: "alleyinsider" }).sort({ ts: -1 }).hint({ ts: 1 }).count()); + diff --git a/jstests/core/hostinfo.js b/jstests/core/hostinfo.js new file mode 100644 index 00000000000..16c3810b2c4 --- /dev/null +++ b/jstests/core/hostinfo.js @@ -0,0 +1,33 @@ +// SERVER-4615: Ensure hostInfo() command returns expected results on each platform + +assert.commandWorked( db.hostInfo() ); +var hostinfo = db.hostInfo(); + +// test for os-specific fields +if (hostinfo.os.type == "Windows") { + assert.neq( hostinfo.os.name, "" || null, "Missing Windows os name" ); + assert.neq( hostinfo.os.version, "" || null, "Missing Windows version" ); + +} else if (hostinfo.os.type == "Linux") { + assert.neq( hostinfo.os.name, "" || null, "Missing Linux os/distro name" ); + assert.neq( hostinfo.os.version, "" || null, "Missing Lindows version" ); + +} else if (hostinfo.os.type == "Darwin") { + assert.neq( hostinfo.os.name, "" || null, "Missing Darwin os name" ); + assert.neq( hostinfo.os.version, "" || null, "Missing Darwin version" ); + +} else if (hostinfo.os.type == "BSD") { + assert.neq( hostinfo.os.name, "" || null, "Missing FreeBSD os name" ); + assert.neq( hostinfo.os.version, "" || null, "Missing FreeBSD version" ); +} + +// comment out this block for systems which have not implemented hostinfo. +if (hostinfo.os.type != "") { + assert.neq( hostinfo.system.hostname, "" || null, "Missing Hostname" ); + assert.neq( hostinfo.system.currentTime, "" || null, "Missing Current Time" ); + assert.neq( hostinfo.system.cpuAddrSize, "" || null || 0, "Missing CPU Address Size" ); + assert.neq( hostinfo.system.memSizeMB, "" || null, "Missing Memory Size" ); + assert.neq( hostinfo.system.numCores, "" || null || 0, "Missing Number of Cores" ); + assert.neq( hostinfo.system.cpuArch, "" || null, "Missing CPU Architecture" ); + assert.neq( hostinfo.system.numaEnabled, "" || null, "Missing NUMA flag" ); +} diff --git a/jstests/core/id1.js b/jstests/core/id1.js new file mode 100644 index 00000000000..9236340e4ec --- /dev/null +++ b/jstests/core/id1.js @@ -0,0 +1,16 @@ + +t = db.id1 +t.drop(); + +t.save( { _id : { a : 1 , b : 2 } , x : "a" } ); +t.save( { _id : { a : 1 , b : 2 } , x : "b" } ); +t.save( { _id : { a : 3 , b : 2 } , x : "c" } ); +t.save( { _id : { a : 4 , b : 2 } , x : "d" } ); +t.save( { _id : { a : 4 , b : 2 } , x : "e" } ); +t.save( { _id : { a : 2 , b : 2 } , x : "f" } ); + +assert.eq( 4 , t.find().count() , "A" ); +assert.eq( "b" , t.findOne( { _id : { a : 1 , b : 2 } } ).x ); +assert.eq( "c" , t.findOne( { _id : { a : 3 , b : 2 } } ).x ); +assert.eq( "e" , t.findOne( { _id : { a : 4 , b : 2 } } ).x ); +assert.eq( "f" , t.findOne( { _id : { a : 2 , b : 2 } } ).x ); diff --git a/jstests/core/idhack.js b/jstests/core/idhack.js new file mode 100644 index 00000000000..21409645489 --- /dev/null +++ b/jstests/core/idhack.js @@ -0,0 +1,43 @@ + +t = db.idhack +t.drop() + + +t.insert( { _id : { x : 1 } , z : 1 } ) +t.insert( { _id : { x : 2 } , z : 2 } ) +t.insert( { _id : { x : 3 } , z : 3 } ) +t.insert( { _id : 1 , z : 4 } ) +t.insert( { _id : 2 , z : 5 } ) +t.insert( { _id : 3 , z : 6 } ) + +assert.eq( 2 , t.findOne( { _id : { x : 2 } } ).z , "A1" ) +assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).count() , "A2" ) +assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).itcount() , "A3" ) + +t.update( { _id : { x : 2 } } , { $set : { z : 7 } } ) +assert.eq( 7 , t.findOne( { _id : { x : 2 } } ).z , "B1" ) + +t.update( { _id : { $gte : 2 } } , { $set : { z : 8 } } , false , true ) +assert.eq( 4 , t.findOne( { _id : 1 } ).z , "C1" ) +assert.eq( 8 , t.findOne( { _id : 2 } ).z , "C2" ) +assert.eq( 8 , t.findOne( { _id : 3 } ).z , "C3" ) + +// explain output should show that the ID hack was applied. +var query = { _id : { x : 2 } }; +var explain = t.find( query ).explain( true ); +print( "explain for " + tojson( query , "" , true ) + " = " + tojson( explain ) ); +assert.eq( 1 , explain.n , "D1" ); +assert.eq( 1 , explain.nscanned , "D2" ); +assert.neq( undefined , explain.cursor , "D3" ); +assert.neq( "" , explain.cursor , "D4" ); +assert.neq( undefined , explain.indexBounds , "D5" ); +assert.neq( {} , explain.indexBounds , "D6" ); + +// ID hack cannot be used with hint(). +var query = { _id : { x : 2 } }; +var explain = t.find( query ).explain(); +t.ensureIndex( { _id : 1 , a : 1 } ); +var hintExplain = t.find( query ).hint( { _id : 1 , a : 1 } ).explain(); +print( "explain for hinted query = " + tojson( hintExplain ) ); +assert.neq( explain.cursor, hintExplain.cursor, "E1" ); + diff --git a/jstests/core/in.js b/jstests/core/in.js new file mode 100644 index 00000000000..da1313692e1 --- /dev/null +++ b/jstests/core/in.js @@ -0,0 +1,24 @@ + +t = db.in1; +t.drop(); + +t.save( { a : 1 } ); +t.save( { a : 2 } ); + +// $in must take an array as argument: SERVER-7445 +assert.throws( function() { return t.find( { a : { $in : { x : 1 } } } ).itcount(); } ); +assert.throws( function() { return t.find( { a : { $in : 1 } } ).itcount(); } ); + +assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount() , "A" ); +assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "B" ); +assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "C" ); + +t.ensureIndex( { a : 1 } ); + +assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount(), "D" ); +assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "E" ); +assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" ); + +assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" ); + +assert.eq( 1 , t.find( { a : { $gt: 1, $in : [ 2 ] } } ).itcount() , "H" ); diff --git a/jstests/core/in2.js b/jstests/core/in2.js new file mode 100644 index 00000000000..66b90daa25a --- /dev/null +++ b/jstests/core/in2.js @@ -0,0 +1,33 @@ + +t = db.in2; + +function go( name , index ){ + + t.drop(); + + t.save( { a : 1 , b : 1 } ); + t.save( { a : 1 , b : 2 } ); + t.save( { a : 1 , b : 3 } ); + + t.save( { a : 1 , b : 1 } ); + t.save( { a : 2 , b : 2 } ); + t.save( { a : 3 , b : 3 } ); + + t.save( { a : 1 , b : 1 } ); + t.save( { a : 2 , b : 1 } ); + t.save( { a : 3 , b : 1 } ); + + if ( index ) + t.ensureIndex( index ); + + assert.eq( 7 , t.find( { a : { $in : [ 1 , 2 ] } } ).count() , name + " A" ); + + assert.eq( 6 , t.find( { a : { $in : [ 1 , 2 ] } , b : { $in : [ 1 , 2 ] } } ).count() , name + " B" ); +} + +go( "no index" ); +go( "index on a" , { a : 1 } ); +go( "index on b" , { b : 1 } ); +go( "index on a&b" , { a : 1 , b : 1 } ); + + diff --git a/jstests/core/in3.js b/jstests/core/in3.js new file mode 100644 index 00000000000..b0a8bb7b81f --- /dev/null +++ b/jstests/core/in3.js @@ -0,0 +1,11 @@ +t = db.jstests_in3; + +t.drop(); +t.ensureIndex( {i:1} ); +assert.eq( {i:[[3,3]]}, t.find( {i:{$in:[3]}} ).explain().indexBounds , "A1" ); +assert.eq( {i:[[3,3],[6,6]]}, t.find( {i:{$in:[3,6]}} ).explain().indexBounds , "A2" ); + +for ( var i=0; i<20; i++ ) + t.insert( { i : i } ); + +assert.eq( 3 , t.find( {i:{$in:[3,6]}} ).explain().nscanned , "B1" ) diff --git a/jstests/core/in4.js b/jstests/core/in4.js new file mode 100644 index 00000000000..3e3dca29528 --- /dev/null +++ b/jstests/core/in4.js @@ -0,0 +1,42 @@ +t = db.jstests_in4; + +function checkRanges( a, b ) { + assert.eq( a, b ); +} + +t.drop(); +t.ensureIndex( {a:1,b:1} ); +checkRanges( {a:[[2,2]],b:[[3,3]]}, t.find( {a:2,b:3} ).explain().indexBounds ); +checkRanges( {a:[[2,2],[3,3]],b:[[4,4]]}, t.find( {a:{$in:[2,3]},b:4} ).explain().indexBounds ); +checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds ); +checkRanges( {a:[[2,2],[3,3]],b:[[4,4],[5,5]]}, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).explain().indexBounds ); + +checkRanges( {a:[[2,2],[3,3]],b:[[4,10]]}, t.find( {a:{$in:[2,3]},b:{$gt:4,$lt:10}} ).explain().indexBounds ); + +t.save( {a:1,b:1} ); +t.save( {a:2,b:4.5} ); +t.save( {a:2,b:4} ); +assert.eq( 2, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).hint( {a:1,b:1} ).explain().nscanned ); +assert.eq( 2, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).a ); +assert.eq( 4, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).b ); + +t.drop(); +t.ensureIndex( {a:1,b:1,c:1} ); +checkRanges( {a:[[2,2]],b:[[3,3],[4,4]],c:[[5,5]]}, t.find( {a:2,b:{$in:[3,4]},c:5} ).explain().indexBounds ); + +t.save( {a:2,b:3,c:5} ); +t.save( {a:2,b:3,c:4} ); +assert.eq( 1, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned ); +t.remove({}); +t.save( {a:2,b:4,c:5} ); +t.save( {a:2,b:4,c:4} ); +assert.eq( 2, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned ); + +t.drop(); +t.ensureIndex( {a:1,b:-1} ); +ib = t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds; +checkRanges( {a:[[2,2]],b:[[4,4],[3,3]]}, ib ); +assert( ib.b[ 0 ][ 0 ] > ib.b[ 1 ][ 0 ] ); +ib = t.find( {a:2,b:{$in:[3,4]}} ).sort( {a:-1,b:1} ).explain().indexBounds; +checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, ib ); +assert( ib.b[ 0 ][ 0 ] < ib.b[ 1 ][ 0 ] ); diff --git a/jstests/core/in5.js b/jstests/core/in5.js new file mode 100644 index 00000000000..435c8864004 --- /dev/null +++ b/jstests/core/in5.js @@ -0,0 +1,56 @@ + +t = db.in5 + +function go( fn ){ + t.drop(); + o = {}; + o[fn] = { a : 1 , b : 2 }; + t.insert( o ); + + x = {}; + x[fn] = { a : 1 , b : 2 }; + assert.eq( 1 , t.find( x ).itcount() , "A1 - " + fn ); + + + y = {}; + y[fn] = { $in : [ { a : 1 , b : 2 } ] } + assert.eq( 1 , t.find( y ).itcount() , "A2 - " + fn ); + + + z = {}; + z[fn+".a"] = 1; + z[fn+".b"] = { $in : [ 2 ] } + assert.eq( 1 , t.find( z ).itcount() , "A3 - " + fn ); // SERVER-1366 + + + i = {} + i[fn] = 1 + t.ensureIndex( i ) + + assert.eq( 1 , t.find( x ).itcount() , "B1 - " + fn ); + assert.eq( 1 , t.find( y ).itcount() , "B2 - " + fn ); + assert.eq( 1 , t.find( z ).itcount() , "B3 - " + fn ); // SERVER-1366 + + t.dropIndex( i ) + + assert.eq( 1 , t.getIndexes().length , "T2" ); + + i = {} + i[fn + ".a" ] = 1; + t.ensureIndex( i ) + assert.eq( 2 , t.getIndexes().length , "T3" ); + + assert.eq( 1 , t.find( x ).itcount() , "C1 - " + fn ); + assert.eq( 1 , t.find( y ).itcount() , "C2 - " + fn ); + assert.eq( 1 , t.find( z ).itcount() , "C3 - " + fn ); // SERVER-1366 + + t.dropIndex( i ) + + +} + +go( "x" ); +go( "_id" ) + + + diff --git a/jstests/core/in6.js b/jstests/core/in6.js new file mode 100644 index 00000000000..f114d93442a --- /dev/null +++ b/jstests/core/in6.js @@ -0,0 +1,13 @@ +t = db.jstests_in6; +t.drop(); + +t.save( {} ); + +function doTest() { + assert.eq.automsg( "1", "t.count( {i:null} )" ); + assert.eq.automsg( "1", "t.count( {i:{$in:[null]}} )" ); +} + +doTest(); +t.ensureIndex( {i:1} ); +doTest(); diff --git a/jstests/core/in8.js b/jstests/core/in8.js new file mode 100644 index 00000000000..5e7e587629f --- /dev/null +++ b/jstests/core/in8.js @@ -0,0 +1,23 @@ +// SERVER-2829 Test arrays matching themselves within a $in expression. + +t = db.jstests_in8; +t.drop(); + +t.save( {key: [1]} ); +t.save( {key: ['1']} ); +t.save( {key: [[2]]} ); + +function doTest() { + assert.eq( 1, t.count( {key:[1]} ) ); + assert.eq( 1, t.count( {key:{$in:[[1]]}} ) ); + assert.eq( 1, t.count( {key:{$in:[[1]],$ne:[2]}} ) ); + assert.eq( 1, t.count( {key:{$in:[['1']],$type:2}} ) ); + assert.eq( 1, t.count( {key:['1']} ) ); + assert.eq( 1, t.count( {key:{$in:[['1']]}} ) ); + assert.eq( 1, t.count( {key:[2]} ) ); + assert.eq( 1, t.count( {key:{$in:[[2]]}} ) ); +} + +doTest(); +t.ensureIndex( {key:1} ); +doTest(); diff --git a/jstests/core/in9.js b/jstests/core/in9.js new file mode 100644 index 00000000000..cbe28e2e2df --- /dev/null +++ b/jstests/core/in9.js @@ -0,0 +1,35 @@ +// SERVER-2343 Test $in empty array matching. + +t = db.jstests_in9; +t.drop(); + +function someData() { + t.remove({}); + t.save( {key: []} ); +} + +function moreData() { + someData(); + t.save( {key: [1]} ); + t.save( {key: ['1']} ); + t.save( {key: null} ); + t.save( {} ); +} + +function check() { + assert.eq( 1, t.count( {key:[]} ) ); + assert.eq( 1, t.count( {key:{$in:[[]]}} ) ); +} + +function doTest() { + someData(); + check(); + moreData(); + check(); +} + +doTest(); + +// SERVER-1943 not fixed yet +t.ensureIndex( {key:1} ); +doTest(); diff --git a/jstests/core/ina.js b/jstests/core/ina.js new file mode 100644 index 00000000000..cf614ab994d --- /dev/null +++ b/jstests/core/ina.js @@ -0,0 +1,15 @@ +// Uassert when $elemMatch is attempted within $in SERVER-3545 + +t = db.jstests_ina; +t.drop(); +t.save( {} ); + +assert.throws( function() { t.find( {a:{$in:[{$elemMatch:{b:1}}]}} ).itcount(); } ); +assert.throws( function() { t.find( {a:{$not:{$in:[{$elemMatch:{b:1}}]}}} ).itcount(); } ); + +assert.throws( function() { t.find( {a:{$nin:[{$elemMatch:{b:1}}]}} ).itcount(); } ); +assert.throws( function() { t.find( {a:{$not:{$nin:[{$elemMatch:{b:1}}]}}} ).itcount(); } ); + +// NOTE Above we don't check cases like {b:2,$elemMatch:{b:3,4}} - generally +// we assume that the first key is $elemMatch if any key is, and validating +// every key is expensive in some cases. \ No newline at end of file diff --git a/jstests/core/inb.js b/jstests/core/inb.js new file mode 100644 index 00000000000..34ec843d36c --- /dev/null +++ b/jstests/core/inb.js @@ -0,0 +1,19 @@ +// Test $in regular expressions with overlapping index bounds. SERVER-4677 + +t = db.jstests_inb; +t.drop(); + +function checkBoundsAndResults( query ) { + assert.eq( [ 'a', 'b' ], t.find( query ).explain().indexBounds.x[0] ); + assert.eq( 4, t.count( query ) ); + assert.eq( 4, t.find( query ).itcount() ); +} + +t.ensureIndex( {x:1} ); +t.save( {x:'aa'} ); +t.save( {x:'ab'} ); +t.save( {x:'ac'} ); +t.save( {x:'ad'} ); + +checkBoundsAndResults( {x:{$in:[/^a/,/^ab/]}} ); +checkBoundsAndResults( {x:{$in:[/^ab/,/^a/]}} ); diff --git a/jstests/core/inc-SERVER-7446.js b/jstests/core/inc-SERVER-7446.js new file mode 100644 index 00000000000..c8066a8e491 --- /dev/null +++ b/jstests/core/inc-SERVER-7446.js @@ -0,0 +1,39 @@ +var c = db.incSERVER7446 + +// A 32 bit overflow spills to 64 bits +c.drop(); +c.save( { a: NumberInt( "2147483647" ) } ); +var updateResult = c.update( {}, { $inc:{ a:NumberInt( 1 ) } } ); +assert.eq(1, updateResult.nMatched, "Object not modified"); +var res = c.findOne(); +assert.eq(NumberLong, res.a.constructor, + "NumberInt incremented beyond std::numeric_limits::max() not NumberLong"); +assert.eq(NumberLong("2147483648"), res.a, + "NumberInt incremented beyond std::numeric_limits::max() has wrong value"); + +// A 32 bit underflow spills to 64 bits +c.drop(); +c.save( { a: NumberInt( "-2147483648" ) } ); +updateResult = c.update( {}, { $inc:{ a:NumberInt( -1 ) } } ); +assert.eq(1, updateResult.nMatched, "Object not modified"); +res = c.findOne(); +assert.eq(NumberLong, res.a.constructor, + "NumberInt decremented beyond std::numeric_limits::min() not NumberLong"); +assert.eq(NumberLong("-2147483649"), res.a, + "NumberInt decremented beyond std::numeric_limits::min() has wrong value"); + +// A 64 bit overflow is an error +c.drop(); +c.save( { a: NumberLong( "9223372036854775807" ) } ); +updateResult = c.update( {}, { $inc:{ a:NumberInt( 1 ) } } ); +assert.eq(0, updateResult.nMatched, + "Did not fail to increment a NumberLong past std::numeric_limits::max()"); + +// A 64 bit underflow is an error +c.drop(); +c.save( { a: NumberLong( "-9223372036854775808" ) } ); +updateResult = c.update( {}, { $inc:{ a:NumberInt( -1 ) } } ); +assert.eq(0, updateResult.nMatched, + "Did not fail to decrement a NumberLong past std::numeric_limits::min()"); + +c.drop() diff --git a/jstests/core/inc1.js b/jstests/core/inc1.js new file mode 100644 index 00000000000..027f307a476 --- /dev/null +++ b/jstests/core/inc1.js @@ -0,0 +1,32 @@ + +t = db.inc1; +t.drop(); + +function test( num , name ){ + assert.eq( 1 , t.count() , name + " count" ); + assert.eq( num , t.findOne().x , name + " value" ); +} + +t.save( { _id : 1 , x : 1 } ); +test( 1 , "A" ); + +t.update( { _id : 1 } , { $inc : { x : 1 } } ); +test( 2 , "B" ); + +t.update( { _id : 1 } , { $inc : { x : 1 } } ); +test( 3 , "C" ); + +t.update( { _id : 2 } , { $inc : { x : 1 } } ); +test( 3 , "D" ); + +t.update( { _id : 1 } , { $inc : { x : 2 } } ); +test( 5 , "E" ); + +t.update( { _id : 1 } , { $inc : { x : -1 } } ); +test( 4 , "F" ); + +t.ensureIndex( { x : 1 } ); + +t.update( { _id : 1 } , { $inc : { x : 1 } } ); +test( 5 , "G" ); + diff --git a/jstests/core/inc2.js b/jstests/core/inc2.js new file mode 100644 index 00000000000..75a8e65a384 --- /dev/null +++ b/jstests/core/inc2.js @@ -0,0 +1,22 @@ + +t = db.inc2 +t.drop(); + +t.save( { _id : 1 , x : 1 } ); +t.save( { _id : 2 , x : 2 } ); +t.save( { _id : 3 , x : 3 } ); + +function order(){ + return t.find().sort( { x : 1 } ).map( function(z){ return z._id; } ); +} + +assert.eq( "1,2,3" , order() , "A" ); + +t.update( { _id : 1 } , { $inc : { x : 4 } } ); +assert.eq( "2,3,1" , order() , "B" ); + +t.ensureIndex( { x : 1 } ); +assert.eq( "2,3,1" , order() , "C" ); + +t.update( { _id : 3 } , { $inc : { x : 4 } } ); +assert.eq( "2,1,3" , order() , "D" ); diff --git a/jstests/core/inc3.js b/jstests/core/inc3.js new file mode 100644 index 00000000000..baeeb198cf4 --- /dev/null +++ b/jstests/core/inc3.js @@ -0,0 +1,16 @@ + +t = db.inc3; + +t.drop(); +t.save( { _id : 1 , z : 1 , a : 1 } ); +t.update( {} , { $inc : { z : 1 , a : 1 } } ); +t.update( {} , { $inc : { a : 1 , z : 1 } } ); +assert.eq( { _id : 1 , z : 3 , a : 3 } , t.findOne() , "A" ) + + +t.drop(); +t.save( { _id : 1 , a : 1 , z : 1 } ); +t.update( {} , { $inc : { z : 1 , a : 1 } } ); +t.update( {} , { $inc : { a : 1 , z : 1 } } ); +assert.eq( { _id : 1 , a : 3 , z : 3 } , t.findOne() , "B" ) + diff --git a/jstests/core/index1.js b/jstests/core/index1.js new file mode 100644 index 00000000000..64bbfa8732b --- /dev/null +++ b/jstests/core/index1.js @@ -0,0 +1,24 @@ + +t = db.embeddedIndexTest; + +t.remove( {} ); + +o = { name : "foo" , z : { a : 17 , b : 4} }; +t.save( o ); + +assert( t.findOne().z.a == 17 ); +assert( t.findOne( { z : { a : 17 } } ) == null); + +t.ensureIndex( { "z.a" : 1 } ); + +assert( t.findOne().z.a == 17 ); +assert( t.findOne( { z : { a : 17 } } ) == null); + +o = { name : "bar" , z : { a : 18 } }; +t.save( o ); + +assert.eq.automsg( "2", "t.find().length()" ); +assert.eq.automsg( "2", "t.find().sort( { 'z.a' : 1 } ).length()" ); +assert.eq.automsg( "2", "t.find().sort( { 'z.a' : -1 } ).length()" ); + +assert(t.validate().valid); diff --git a/jstests/core/index10.js b/jstests/core/index10.js new file mode 100644 index 00000000000..d86402e41af --- /dev/null +++ b/jstests/core/index10.js @@ -0,0 +1,32 @@ +// unique index, drop dups + +t = db.jstests_index10; +t.drop(); + +t.save( {i:1} ); +t.save( {i:2} ); +t.save( {i:1} ); +t.save( {i:3} ); +t.save( {i:1} ); + +t.ensureIndex( {i:1} ); +assert.eq( 5, t.count() ); +t.dropIndexes(); +var err = t.ensureIndex( {i:1}, true ); +assert.writeError(err) +assert.eq( 11000, err.getWriteError().code ); + +assert( 1 == db.system.indexes.count( {ns:"test.jstests_index10" } ), "only id index" ); +// t.dropIndexes(); + +ts = t.totalIndexSize(); +t.ensureIndex( {i:1}, [ true, true ] ); +ts2 = t.totalIndexSize(); + +assert.eq( ts * 2, ts2, "totalIndexSize fail" ); + +assert.eq( 3, t.count() ); +assert.eq( 1, t.count( {i:1} ) ); + +t.ensureIndex( {j:1}, [ true, true ] ); +assert.eq( 1, t.count() ); diff --git a/jstests/core/index13.js b/jstests/core/index13.js new file mode 100644 index 00000000000..7e317d90d94 --- /dev/null +++ b/jstests/core/index13.js @@ -0,0 +1,147 @@ +// Top level match fields within an $elemMatch clause may constrain multiple subfields from a +// compound multikey index. SERVER-3104 +// +// Given a multikey index { 'a.b':1, 'a.c':1 } and query { 'a.b':3, 'a.c':3 } only the index field +// 'a.b' is constrained to the range [3, 3], while the index field 'a.c' is just constrained +// to be within minkey and maxkey. This implementation ensures that the document +// { a:[ { b:3 }, { c:3 } ] }, which generates index keys { 'a.b':3, 'a.c':null } and +// { 'a.b':null and 'a.c':3 } will be retrieved for the query. (See SERVER-958 for more +// information.) +// +// If the query is instead { a:{ $elemMatch:{ b:3, c:3 } } } then the document +// { a:[ { b:3 }, { c:3 } ] } does not match. Until SERVER-3104 was implemented, the index +// constraints would be [3,3] on the 'a.b' field and [minkey,maxkey] on the 'a.c' field, the same as +// for the non $elemMatch query in the previous paragraph. With the SERVER-3104 implementation, +// constraints on two fields within a $elemMatch parent can both be applied to an index. Due to the +// SERVER-3104 implementation, the index constraints become [3,3] on the 'a.b' field _and_ [3,3] on +// the 'a.c' field. + +t = db.jstests_index13; +t.drop(); + +function assertConsistentResults( query ) { + assert.eq( t.find( query ).hint( { $natural:1 } ).sort( { _id:1 } ).toArray(), + t.find( query ).hint( index ).sort( { _id:1 } ).toArray() ); +} + +function assertResults( query ) { + explain = t.find( query ).hint( index ).explain(); + // printjson( explain ); // debug + assertConsistentResults( query ); +} + +// Cases with single dotted index fied names. +index = { 'a.b':1, 'a.c':1 }; +t.ensureIndex( index ); +t.save( { a:[ { b:1 }, { c:1 } ] } ); +t.save( { a:[ { b:1, c:1 } ] } ); +assert.eq( 2, t.count() ); +// Without $elemMatch. +assertResults( { 'a.b':1, 'a.c':1 } ); +// With $elemMatch. +assertResults( { a:{ $elemMatch:{ b:1, c:1 } } } ); + +// Without shared $elemMatch. +assertResults( { 'a.b':1, a:{ $elemMatch:{ c:1 } } } ); +// Two different $elemMatch expressions. +assertResults( { $and:[ { a:{ $elemMatch:{ b:1 } } }, + { a:{ $elemMatch:{ c:1 } } } ] } ); + + +// Cases relating to parse order and inclusion of intersected ranges. +assertResults( { 'a.b':1, a:{ $elemMatch:{ b:{ $gt:0 }, c:1 } } } ); +assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':1 } ); +assertResults( { 'a.c':1, a:{ $elemMatch:{ b:1, c:1 } } } ); +assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':{ $gt:0 } } ); + +// Cases with $elemMatch on multiple fields. +t.remove({}); +index = { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 }; +t.ensureIndex( index ); +t.insert( { a:[ { b:1 }, { c:1 } ], d: { e:1, f:1 } } ); +t.insert( { a:[ { b:1, c:1 } ], d: { e:1, f:1 } } ); +t.insert( { a:{ b:1, c:1 }, d:[ { e:1, f:1 } ] } ); +t.insert( { a:{ b:1, c:1 }, d:[ { e:1 }, { f:1 } ] } ); + +assert.eq( 4, t.count() ); + +// Without $elemMatch. +assertResults( { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 } ); +// With $elemMatch. +assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd': { $elemMatch:{ e:1, f:1 } } } ); +assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd.e': 1, 'd.f' : 1 } ); +assertResults( { 'a.b': 1, 'a.c' : 1, 'd': { $elemMatch:{ e:1, f:1 } } } ); + + +// Cases with nested $elemMatch. +t.remove({}) +index = { 'a.b.c':1, 'a.b.d' :1 }; +t.ensureIndex( index ); +t.insert( { a:[ { b: [ { c : 1, d : 1 } ] } ] } ) ; +t.insert( { a:[ { b: [ { c : 1 } , { d : 1 } ] } ] } ) ; +assert.eq( 2, t.count() ); +// Without $elemMatch. +assertResults( { 'a.b.c':1, 'a.b.d':1 } ); +// With $elemMatch. +assertResults( { "a" : { $elemMatch : { "b" : { $elemMatch : { c : 1, d : 1 } } } } } ); + +// Cases with double dotted index field names. +t.drop(); +index = { 'a.b.x':1, 'a.b.y':1 }; +t.ensureIndex( index ); +t.save( { a:{ b:{ x:1, y:1 } } } ); +t.save( { a:[ { b:{ x:1 } }, { b:{ y:1 } } ] } ); +t.save( { a:[ { b:[ { x:1 }, { y:1 } ] } ] } ); +t.save( { a:[ { b:[ { x:1, y:1 } ] } ] } ); +assert.eq( 4, t.count() ); + +// No $elemMatch. +assertResults( { 'a.b.x':1, 'a.b.y':1 } ); +// $elemMatch with dotted children. +assertResults( { a:{ $elemMatch:{ 'b.x':1, 'b.y':1 } } } ); +// $elemMatch with undotted children. +assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } ); + +// Cases where a field is indexed along with its children. +t.dropIndexes(); +index = { 'a':1, 'a.b.x':1, 'a.b.y':1 }; +t.ensureIndex( index ); + +// With $ne. +assertResults( { a:{ $ne:4 }, 'a.b':{ $elemMatch:{ x:1, y:1 } } } ); + +// No constraint on a prior parent field. +assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } ); + +// Cases with double dotted index field names branching to different fields at each dot. +t.drop(); +index = { 'a.b.c':1, 'a.e.f':1, 'a.b.d':1, 'a.e.g':1 } +t.ensureIndex( index ); +t.save( { a:{ b:{ c:1, d:1 }, e:{ f:1, g:1 } } } ); +t.save( { a:[ { b:{ c:1 }, e:{ f:1 } }, { b:{ d:1 }, e:{ g:1 } } ] } ); +t.save( { a:[ { b:{ c:1 } }, { e:{ f:1 } }, { b:{ d:1 } }, { e:{ g:1 } } ] } ); +t.save( { a:[ { b:[ { c:1 }, { d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } ); +t.save( { a:[ { b:[ { c:[ 1 ] }, { d:[ 1 ] } ] }, { e:[ { f:[ 1 ] }, { g:[ 1 ] } ] } ] } ); +t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } ); +t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1, g:1 } ] } ] } ); +assert.eq( 7, t.count() ); + +// Constraint on a prior cousin field. +assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, + 'a.e':{ $elemMatch:{ f:1, g:1 } } } ); + +// Different constraint on a prior cousin field. +assertResults( { 'a.b':{ $elemMatch:{ d:1 } }, + 'a.e':{ $elemMatch:{ f:1, g:1 } } } ); + + +// Cases with double dotted index field names branching to different fields at each dot, and the +// same field name strings after the second dot. +t.drop(); +index = { 'a.b.c':1, 'a.e.c':1, 'a.b.d':1, 'a.e.d':1 } +t.ensureIndex( index ); +t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { c:1, d:1 } ] } ] } ); +assert.eq( 1, t.count() ); + +// Constraint on a prior cousin field with the same field names. +assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, 'a.e':{ $elemMatch:{ c:1, d:1 } } } ); diff --git a/jstests/core/index2.js b/jstests/core/index2.js new file mode 100644 index 00000000000..b54abcaa792 --- /dev/null +++ b/jstests/core/index2.js @@ -0,0 +1,40 @@ +/* test indexing where the key is an embedded object. + */ + +t = db.embeddedIndexTest2; + +t.drop(); +assert( t.findOne() == null ); + +o = { name : "foo" , z : { a : 17 } }; +p = { name : "foo" , z : { a : 17 } }; +q = { name : "barrr" , z : { a : 18 } }; +r = { name : "barrr" , z : { k : "zzz", L:[1,2] } }; + +t.save( o ); + +assert( t.findOne().z.a == 17 ); + +t.save( p ); +t.save( q ); + +assert( t.findOne({z:{a:17}}).z.a==17 ); +assert( t.find({z:{a:17}}).length() == 2 ); +assert( t.find({z:{a:18}}).length() == 1 ); + +t.save( r ); + +assert( t.findOne({z:{a:17}}).z.a==17 ); +assert( t.find({z:{a:17}}).length() == 2 ); +assert( t.find({z:{a:18}}).length() == 1 ); + +t.ensureIndex( { z : 1 } ); + +assert( t.findOne({z:{a:17}}).z.a==17 ); +assert( t.find({z:{a:17}}).length() == 2 ); +assert( t.find({z:{a:18}}).length() == 1 ); + +assert( t.find().sort( { z : 1 } ).length() == 4 ); +assert( t.find().sort( { z : -1 } ).length() == 4 ); + +assert(t.validate().valid); diff --git a/jstests/core/index3.js b/jstests/core/index3.js new file mode 100644 index 00000000000..80139460cb4 --- /dev/null +++ b/jstests/core/index3.js @@ -0,0 +1,16 @@ + + +t = db.index3; +t.drop(); + +assert( t.getIndexes().length == 0 ); + +t.ensureIndex( { name : 1 } ); + +t.save( { name : "a" } ); + +t.ensureIndex( { name : 1 } ); + +assert( t.getIndexes().length == 2 ); + +assert(t.validate().valid); diff --git a/jstests/core/index4.js b/jstests/core/index4.js new file mode 100644 index 00000000000..9dd731c83ee --- /dev/null +++ b/jstests/core/index4.js @@ -0,0 +1,33 @@ +// index4.js + + +t = db.index4; +t.drop(); + +t.save( { name : "alleyinsider" , + instances : [ + { pool : "prod1" } , + { pool : "dev1" } + ] + } ); + +t.save( { name : "clusterstock" , + instances : [ + { pool : "dev1" } + ] + } ); + + +// this should fail, not allowed -- we confirm that. +t.ensureIndex( { instances : { pool : 1 } } ); +assert.eq( 0, db.system.indexes.find( {ns:"test.index4",name:{$ne:"_id_"}} ).count(), "no indexes should be here yet"); + +t.ensureIndex( { "instances.pool" : 1 } ); + +sleep( 10 ); + +a = t.find( { instances : { pool : "prod1" } } ); +assert( a.length() == 1, "len1" ); +assert( a[0].name == "alleyinsider", "alley" ); + +assert(t.validate().valid, "valid" ); diff --git a/jstests/core/index5.js b/jstests/core/index5.js new file mode 100644 index 00000000000..841ac12ed45 --- /dev/null +++ b/jstests/core/index5.js @@ -0,0 +1,24 @@ +// index5.js - test reverse direction index + +function validate() { + assert.eq( 2, t.find().count() ); + f = t.find().sort( { a: 1 } ); + assert.eq( 2, t.count() ); + assert.eq( 1, f[ 0 ].a ); + assert.eq( 2, f[ 1 ].a ); + r = t.find().sort( { a: -1 } ); + assert.eq( 2, r.count() ); + assert.eq( 2, r[ 0 ].a ); + assert.eq( 1, r[ 1 ].a ); +} + +t = db.index5; +t.drop(); + +t.save( { a: 1 } ); +t.save( { a: 2 } ); + +validate(); + +t.ensureIndex( { a: -1 } ); +validate(); diff --git a/jstests/core/index6.js b/jstests/core/index6.js new file mode 100644 index 00000000000..8dbd8f74fcf --- /dev/null +++ b/jstests/core/index6.js @@ -0,0 +1,8 @@ +// index6.js Test indexes on array subelements. + +r = db.ed.db.index6; +r.drop(); + +r.save( { comments : [ { name : "eliot", foo : 1 } ] } ); +r.ensureIndex( { "comments.name": 1 } ); +assert( r.findOne( { "comments.name": "eliot" } ) ); diff --git a/jstests/core/index7.js b/jstests/core/index7.js new file mode 100644 index 00000000000..9e3a6c66d11 --- /dev/null +++ b/jstests/core/index7.js @@ -0,0 +1,67 @@ +// index7.js Test that we use an index when and only when we expect to. + +function index( q ) { + assert( q.explain().cursor.match( /^BtreeCursor/ ) , "index assert" ); +} + +function noIndex( q ) { + assert( q.explain().cursor.match( /^BasicCursor/ ) , "noIndex assert" ); +} + +function start( k, q, rev) { + var exp = q.explain().indexBounds; + var s = {a:exp.a[rev?1:0][0],b:exp.b[0][0]}; + assert.eq( k.a, s.a ); + assert.eq( k.b, s.b ); +} +function end( k, q, rev) { + var exp = q.explain().indexBounds + var e = {a:exp.a[rev?1:0][1],b:exp.b[0][1]}; + assert.eq( k.a, e.a ); + assert.eq( k.b, e.b ); +} +function both( k, q ) { + start( k, q ); + end( k, q ); +} + +f = db.ed_db_index7; +f.drop(); + +f.save( { a : 5 } ) +f.ensureIndex( { a: 1 } ); +index( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { a: 1 } ) ); +noIndex( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { $natural: 1 } ) ); +f.drop(); + +f.ensureIndex( { a: 1, b: 1 } ); +assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] ); +assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] ); +assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] ); +assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] ); +assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c ); +assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c ); + +start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) ); +start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) ); +start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true ); +start( { a: "a", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) ); +end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) ); +end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) ); +end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true ); +end( { a: "b", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) ); + +start( { a: "z", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) ); +end( { a: "{", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) ); + +start( { a: "az", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) ); +end( { a: "a{", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) ); + +both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { a: 1, b: 1 } ) ); + +both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).hint( { a: 1, b: 1 } ) ); +both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) ); + +f.drop(); +f.ensureIndex( { b: 1, a: 1 } ); +both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { b: 1, a: 1 } ) ); diff --git a/jstests/core/index8.js b/jstests/core/index8.js new file mode 100644 index 00000000000..719ad2dd2cb --- /dev/null +++ b/jstests/core/index8.js @@ -0,0 +1,62 @@ +// Test key uniqueness + +t = db.jstests_index8; +t.drop(); + +t.ensureIndex( { a: 1 } ); +t.ensureIndex( { b: 1 }, true ); +t.ensureIndex( { c: 1 }, [ false, "cIndex" ] ); + +checkIndexes = function( num ) { +// printjson( db.system.indexes.find( { ns: "test.jstests_index8" } ).toArray() ); + indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } ).toArray(); + var start = 0; + if ( indexes[0].name == "_id_" ) + start = 1; + assert( !indexes[ start ].unique , "A" + num ); + assert( indexes[ start + 1 ].unique , "B" + num + " " + tojson( indexes[start+1] ) ); + assert( !indexes[ start + 2 ].unique , "C" + num ); + assert.eq( "cIndex", indexes[ start + 2 ].name , "D" + num ); +} + +checkIndexes( 1 ); + +t.reIndex(); +checkIndexes( 2 ); + +t.save( { a: 2, b: 1 } ); +t.save( { a: 2 } ); +assert.eq( 2, t.find().count() ); + +t.save( { b: 4 } ); +t.save( { b: 4 } ); +assert.eq( 3, t.find().count() ); +assert.eq( 3, t.find().hint( {c:1} ).toArray().length ); +assert.eq( 3, t.find().hint( {b:1} ).toArray().length ); +assert.eq( 3, t.find().hint( {a:1} ).toArray().length ); + +t.drop(); +t.ensureIndex( { a: 1, b: -1 }, true ); +t.save( { a: 2, b: 3 } ); +t.save( { a: 2, b: 3 } ); +t.save( { a: 2, b: 4 } ); +t.save( { a: 1, b: 3 } ); +assert.eq( 3, t.find().count() ); + +t.drop(); +t.ensureIndex( { a: 1 }, true ); +t.save( { a: [ 2, 3 ] } ); +t.save( { a: 2 } ); +assert.eq( 1, t.find().count() ); + +t.drop(); +t.ensureIndex( { a: 1 }, true ); +t.save( { a: 2 } ); +t.save( { a: [ 1, 2, 3 ] } ); +t.save( { a: [ 3, 2, 1 ] } ); +assert.eq( 1, t.find().sort( { a: 1 } ).hint( { a: 1 } ).toArray().length ); +assert.eq( 1, t.find().sort( { a: -1 } ).hint( { a: 1 } ).toArray().length ); + +assert.eq( t._indexSpec( { x : 1 } , true ) , t._indexSpec( { x : 1 } , [ true ] ) , "spec 1" ); +assert.eq( t._indexSpec( { x : 1 } , "eliot" ) , t._indexSpec( { x : 1 } , [ "eliot" ] ) , "spec 2" ); + diff --git a/jstests/core/index9.js b/jstests/core/index9.js new file mode 100644 index 00000000000..04b900949ec --- /dev/null +++ b/jstests/core/index9.js @@ -0,0 +1,25 @@ +t = db.jstests_index9; + +t.drop(); +db.createCollection( "jstests_index9" ); +assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index with default collection" ); +t.drop(); +db.createCollection( "jstests_index9", {autoIndexId: true} ); +assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index if autoIndexId: true" ); + +t.drop(); +db.createCollection( "jstests_index9", {autoIndexId:false} ); +assert.eq( 0, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 0 index if autoIndexId: false" ); +t.createIndex( { _id:1 } ); +assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); +t.createIndex( { _id:1 } ); +assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); + +t.drop(); +t.createIndex( { _id:1 } ); +assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); + +t.drop(); +t.save( {a:1} ); +t.createIndex( { _id:1 } ); +assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); diff --git a/jstests/core/indexOtherNamespace.js b/jstests/core/indexOtherNamespace.js new file mode 100644 index 00000000000..7df55188606 --- /dev/null +++ b/jstests/core/indexOtherNamespace.js @@ -0,0 +1,27 @@ +// SERVER-8814: Test that only the system.indexes namespace can be used to build indexes. + +var otherDB = db.getSiblingDB("indexOtherNS"); +otherDB.dropDatabase(); + +otherDB.foo.insert({a:1}) +assert.eq(1, otherDB.system.indexes.count()); +assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor); + +if (db.getMongo().writeMode() == 'commands') { + assert.throws(function() { + otherDB.randomNS.system.indexes.insert({ ns: "indexOtherNS.foo", + key: { a: 1}, name: "a_1" }); + }); +} +else { + assert.writeError(otherDB.randomNS.system.indexes.insert({ ns: "indexOtherNS.foo", + key: { a: 1 }, name: "a_1"})); +} + + + +// Assert that index didn't actually get built +assert.eq(1, otherDB.system.indexes.count()); +assert.eq(null, otherDB.system.namespaces.findOne({name : "indexOtherNS.foo.$a_1"})); +assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor); +otherDB.dropDatabase(); diff --git a/jstests/core/indexStatsCommand.js b/jstests/core/indexStatsCommand.js new file mode 100644 index 00000000000..9c055e37e26 --- /dev/null +++ b/jstests/core/indexStatsCommand.js @@ -0,0 +1,88 @@ +db.jstests_commands.drop(); +db.createCollection("jstests_commands"); + +t = db.jstests_commands; + +for (var i = 0; i < 3000; ++i) { + t.insert({i: i, d: i % 13}); +} + +function textWithIndexVersion(version) { + var indexName = 'test_d_' + version; + t.ensureIndex({d: 1}, {v: version, name: indexName}); + + var result = t.indexStats({index: indexName}); + if (result["bad cmd"]) { + print("storageDetails command not available: skipping"); + return; + } + + assert.commandWorked(result); + + assert(result.index === indexName); + assert(result.isIdIndex === false); + assert(isObject(result.keyPattern)); + assert.neq(result.keyPattern, null); + assert(isString(result.storageNs)); + assert(isNumber(result.bucketBodyBytes)); + assert.eq(result.depth, 1); + assert(isObject(result.overall)); + assert.neq(result.overall, null); + + function checkStats(data) { + assert(data.count instanceof NumberLong); + assert(isNumber(data.mean)); + assert(isNumber(data.stddev)); + assert(isNumber(data.min)); + assert(isNumber(data.max)); + } + + function checkAreaStats(data) { + assert(isNumber(data.numBuckets)); + + assert(isObject(data.keyCount)); + assert.neq(data.keyCount, null); + checkStats(data.keyCount); + + assert(isObject(data.usedKeyCount)); + assert.neq(data.usedKeyCount, null); + checkStats(data.usedKeyCount); + + assert(isObject(data.bsonRatio)); + assert.neq(data.bsonRatio, null); + checkStats(data.bsonRatio); + + assert(isObject(data.keyNodeRatio)); + assert.neq(data.keyNodeRatio, null); + checkStats(data.keyNodeRatio); + + assert(isObject(data.fillRatio)); + assert.neq(data.fillRatio, null); + checkStats(data.fillRatio); + } + + assert(isObject(result.overall)); + checkAreaStats(result.overall); + + assert(result.perLevel instanceof Array); + for (var i = 0; i < result.perLevel.length; ++i) { + assert(isObject(result.perLevel[i])); + checkAreaStats(result.perLevel[i]); + } + + result = t.indexStats(); + assert.commandFailed(result); + assert(result.errmsg.match(/index name is required/)); + + result = t.indexStats({index: "nonexistent"}) + assert.commandFailed(result); + assert(result.errmsg.match(/index does not exist/)); + + result = t.indexStats({index: "_id_", expandNodes: ['string']}) + assert.commandFailed(result); + assert(result.errmsg.match(/expandNodes.*numbers/)); + + t.dropIndex(indexName); +} + +[0, 1].map(textWithIndexVersion); diff --git a/jstests/core/index_arr1.js b/jstests/core/index_arr1.js new file mode 100644 index 00000000000..d35cb80a83f --- /dev/null +++ b/jstests/core/index_arr1.js @@ -0,0 +1,23 @@ + +t = db.index_arr1 +t.drop() + +t.insert( { _id : 1 , a : 5 , b : [ { x : 1 } ] } ) +t.insert( { _id : 2 , a : 5 , b : [] } ) +t.insert( { _id : 3 , a : 5 } ) + +assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A1" ) + +t.ensureIndex( { a : 1 , "b.x" : 1 } ) + +//t.find().sort( { a : 1 } )._addSpecial( "$returnKey" , 1 ).forEach( printjson ) +//t.find( { a : 5 } ).forEach( printjson ) + +assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A2" ); // SERVER-1082 + + +assert.eq( 2 , t.getIndexes().length , "B1" ) +t.insert( { _id : 4 , a : 5 , b : [] } ) +t.ensureIndex( { a : 1 , "b.a" : 1 , "b.c" : 1 } ) +assert.eq( 3 , t.getIndexes().length , "B2" ) + diff --git a/jstests/core/index_arr2.js b/jstests/core/index_arr2.js new file mode 100644 index 00000000000..101655f2ce9 --- /dev/null +++ b/jstests/core/index_arr2.js @@ -0,0 +1,51 @@ +NUM = 20; +M = 5; + +t = db.jstests_arr2; + +function test( withIndex ){ + t.drop(); + + // insert a bunch of items to force queries to use the index. + newObject = { + _id : 1, + a : [ + { b : { c : 1 } } + ] + } + + now = (new Date()).getTime() / 1000; + for (created = now - NUM; created <= now; created++ ) { + newObject['created'] = created; + t.insert(newObject); + newObject['_id'] ++; + } + + // change the last M items. + query = { + 'created' : { '$gte' : now - M } + } + + Z = t.find( query ).count(); + + if ( withIndex ){ + //t.ensureIndex( { 'a.b.c' : 1, 'created' : -1 } ) + //t.ensureIndex( { created : -1 } ) + t.ensureIndex( { 'a.b.c' : 1 } , { name : "x" } ) + } + + var res = t.update(query, { '$set' : { "a.0.b.c" : 0 } } , false , true ); + assert.eq( Z, res.nMatched, "num updated withIndex:" + withIndex ); + + // now see how many were actually updated. + query['a.b.c'] = 0; + + count = t.count(query); + + assert.eq( Z , count , "count after withIndex:" + withIndex ); +} + +test( false ) +test( true ); + + diff --git a/jstests/core/index_big1.js b/jstests/core/index_big1.js new file mode 100644 index 00000000000..6fbffa4415e --- /dev/null +++ b/jstests/core/index_big1.js @@ -0,0 +1,38 @@ +// check where "key to big" happens + +t = db.index_big1; + +N = 3200; +t.drop(); + +var s = ""; + +t.ensureIndex( { a : 1 , x : 1 } ) + +var bulk = t.initializeUnorderedBulkOp(); +for ( i=0; i= 0; i--) { + t.insert({ _id: i, k: keys[i] }); + } + } +} + +var expect = null; + +function check() { + assert(t.validate().valid); + assert.eq( 5, t.count() ); + + var c = t.find({ k: /^a/ }).count(); + assert.eq( 5, c ); +} + +function runTest( order ) { + t.drop(); + t.ensureIndex({ k: 1 }); + doInsert( order ); + check(); // check incremental addition + + t.reIndex(); + check(); // check bottom up + + t.drop(); + doInsert( order ); + assert.eq( 1, t.getIndexes().length ); + t.ensureIndex({ k: 1 }); + assert.eq( 1, t.getIndexes().length ); + + t.drop(); + doInsert( order ); + assert.eq( 1, t.getIndexes().length ); + t.ensureIndex({ k: 1 }, { background: true }); + assert.eq( 1, t.getIndexes().length ); +} + +runTest( 1 ); +runTest( 2 ); diff --git a/jstests/core/index_bigkeys_update.js b/jstests/core/index_bigkeys_update.js new file mode 100644 index 00000000000..6bdaf033542 --- /dev/null +++ b/jstests/core/index_bigkeys_update.js @@ -0,0 +1,18 @@ + +bigString = ""; +while ( bigString.length < 16000 ) + bigString += "."; + +t = db.index_bigkeys_update; +t.drop(); + +t.insert( { _id : 0, x : "asd" } ); +t.ensureIndex( { x : 1 } ); + +assert.eq( 1, t.count() ); + +assert.writeError(t.update( {} , { $set : { x : bigString } } )); + +assert.eq( 1, t.count() ); +assert.eq( "asd", t.findOne().x ); // make sure doc is the old version +assert.eq( "asd", t.findOne( { _id : 0 } ).x ); // make sure doc is the old version diff --git a/jstests/core/index_bounds_number_edge_cases.js b/jstests/core/index_bounds_number_edge_cases.js new file mode 100644 index 00000000000..0ab482028ed --- /dev/null +++ b/jstests/core/index_bounds_number_edge_cases.js @@ -0,0 +1,50 @@ +// end-to-end tests on index bounds for numerical values +// should handle numerical extremes +// such as Number.MAX_VALUE and Infinity + +t = db.indexboundsnumberedgecases; + +t.drop(); + +t.ensureIndex({a: 1}); + +t.save({a: -Infinity}); +t.save({a: -Number.MAX_VALUE}); +t.save({a: 1}); +t.save({a: Number.MAX_VALUE}); +t.save({a: Infinity}); + +// index bounds generated by query planner are +// validated in unit tests + +// lte + +assert.eq(1, t.find({a: {$lte: -Infinity}}).itcount()); +assert.eq(2, t.find({a: {$lte: -Number.MAX_VALUE}}).itcount()); +assert.eq(3, t.find({a: {$lte: 1}}).itcount()); +assert.eq(4, t.find({a: {$lte: Number.MAX_VALUE}}).itcount()); +assert.eq(5, t.find({a: {$lte: Infinity}}).itcount()); + +// lt + +assert.eq(0, t.find({a: {$lt: -Infinity}}).itcount()); +assert.eq(1, t.find({a: {$lt: -Number.MAX_VALUE}}).itcount()); +assert.eq(2, t.find({a: {$lt: 1}}).itcount()); +assert.eq(3, t.find({a: {$lt: Number.MAX_VALUE}}).itcount()); +assert.eq(4, t.find({a: {$lt: Infinity}}).itcount()); + +// gt + +assert.eq(0, t.find({a: {$gt: Infinity}}).itcount()); +assert.eq(1, t.find({a: {$gt: Number.MAX_VALUE}}).itcount()); +assert.eq(2, t.find({a: {$gt: 1}}).itcount()); +assert.eq(3, t.find({a: {$gt: -Number.MAX_VALUE}}).itcount()); +assert.eq(4, t.find({a: {$gt: -Infinity}}).itcount()); + +// gte + +assert.eq(1, t.find({a: {$gte: Infinity}}).itcount()); +assert.eq(2, t.find({a: {$gte: Number.MAX_VALUE}}).itcount()); +assert.eq(3, t.find({a: {$gte: 1}}).itcount()); +assert.eq(4, t.find({a: {$gte: -Number.MAX_VALUE}}).itcount()); +assert.eq(5, t.find({a: {$gte: -Infinity}}).itcount()); diff --git a/jstests/core/index_check1.js b/jstests/core/index_check1.js new file mode 100644 index 00000000000..7113dff0877 --- /dev/null +++ b/jstests/core/index_check1.js @@ -0,0 +1,31 @@ + +db.somecollection.drop(); + +assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 1); + +db.somecollection.save({a:1}); + +assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 2); + +db.somecollection.ensureIndex({a:1}); + +var z = db.system.namespaces.find({name:/somecollection/}).length(); +assert( z >= 1 , 3 ); + +if( z == 1 ) + print("warning: z==1, should only happen with alternate storage engines"); + +db.somecollection.drop(); + +assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 4); + +db.somecollection.save({a:1}); + +assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 5); + +db.somecollection.ensureIndex({a:1}); + +var x = db.system.namespaces.find({name:/somecollection/}).length(); +assert( x == 2 || x == z, 6); + +assert(db.somecollection.validate().valid, 7); diff --git a/jstests/core/index_check2.js b/jstests/core/index_check2.js new file mode 100644 index 00000000000..eed3b8e42b7 --- /dev/null +++ b/jstests/core/index_check2.js @@ -0,0 +1,41 @@ + +t = db.index_check2; +t.drop(); + +for ( var i=0; i<1000; i++ ){ + var a = []; + for ( var j=1; j<5; j++ ){ + a.push( "tag" + ( i * j % 50 )); + } + t.save( { num : i , tags : a } ); +} + +q1 = { tags : "tag6" }; +q2 = { tags : "tag12" }; +q3 = { tags : { $all : [ "tag6" , "tag12" ] } } + +assert.eq( 120 , t.find( q1 ).itcount() , "q1 a"); +assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" ); +assert.eq( 60 , t.find( q3 ).itcount() , "q3 a"); + +t.ensureIndex( { tags : 1 } ); + +assert.eq( 120 , t.find( q1 ).itcount() , "q1 a"); +assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" ); +assert.eq( 60 , t.find( q3 ).itcount() , "q3 a"); + +assert.eq( "BtreeCursor tags_1" , t.find( q1 ).explain().cursor , "e1" ); +assert.eq( "BtreeCursor tags_1" , t.find( q2 ).explain().cursor , "e2" ); +assert.eq( "BtreeCursor tags_1" , t.find( q3 ).explain().cursor , "e3" ); + +scanned1 = t.find(q1).explain().nscanned; +scanned2 = t.find(q2).explain().nscanned; +scanned3 = t.find(q3).explain().nscanned; + +//print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 ); + +// $all should just iterate either of the words +assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" ); + +exp3 = t.find( q3 ).explain(); +assert.eq( exp3.indexBounds.tags[0][0], exp3.indexBounds.tags[0][1], "$all range not a single key" ); diff --git a/jstests/core/index_check3.js b/jstests/core/index_check3.js new file mode 100644 index 00000000000..55515aff3f5 --- /dev/null +++ b/jstests/core/index_check3.js @@ -0,0 +1,63 @@ + + +t = db.index_check3; +t.drop(); + + + +t.save( { a : 1 } ); +t.save( { a : 2 } ); +t.save( { a : 3 } ); +t.save( { a : "z" } ); + +assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "A" ); +assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "B" ); + +t.ensureIndex( { a : 1 } ); + +assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "C" ); +assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "D" ); + +t.drop(); + +for ( var i=0; i<100; i++ ){ + var o = { i : i }; + if ( i % 2 == 0 ) + o.foo = i; + t.save( o ); +} + +t.ensureIndex( { foo : 1 } ); + +//printjson( t.find( { foo : { $lt : 50 } } ).explain() ); +assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" ); +//printjson( t.find( { foo : { $gt : 50 } } ).explain() ); +assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" ); + + +t.drop(); +t.save( {i:'a'} ); +for( var i=0; i < 10; ++i ) { + t.save( {} ); +} + +t.ensureIndex( { i : 1 } ); + +//printjson( t.find( { i : { $lte : 'a' } } ).explain() ); +assert.gt( 3 , t.find( { i : { $lte : 'a' } } ).explain().nscanned , "lte" ); +//printjson( t.find( { i : { $gte : 'a' } } ).explain() ); +// bug SERVER-99 +assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" ); +assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).count() , "gte a" ); +assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b" ); +assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).count() , "gte c" ); +assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).itcount() , "gte d" ); + +t.save( { i : "b" } ); + +assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" ); +assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).count() , "gte a2" ); +assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b2" ); +assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).itcount() , "gte c2" ); +assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : -1 } ).itcount() , "gte d2" ); +assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : 1 } ).itcount() , "gte e2" ); diff --git a/jstests/core/index_check5.js b/jstests/core/index_check5.js new file mode 100644 index 00000000000..eabb929749f --- /dev/null +++ b/jstests/core/index_check5.js @@ -0,0 +1,17 @@ + +t = db.index_check5 +t.drop(); + +t.save( { "name" : "Player1" , + "scores" : [{"level" : 1 , "score" : 100}, + {"level" : 2 , "score" : 50}], + "total" : 150 } ); +t.save( { "name" : "Player2" , + "total" : 90 , + "scores" : [ {"level" : 1 , "score" : 90}, + {"level" : 2 , "score" : 0} ] + } ); + +assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "A" ); +t.ensureIndex( { "scores.level" : 1 , "scores.score" : 1 } ); +assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "B" ); diff --git a/jstests/core/index_check6.js b/jstests/core/index_check6.js new file mode 100644 index 00000000000..be395fb3d2e --- /dev/null +++ b/jstests/core/index_check6.js @@ -0,0 +1,82 @@ + +t = db.index_check6; +t.drop(); + +t.ensureIndex( { age : 1 , rating : 1 } ); + +for ( var age=10; age<50; age++ ){ + for ( var rating=0; rating<10; rating++ ){ + t.save( { age : age , rating : rating } ); + } +} + +assert.eq( 10 , t.find( { age : 30 } ).explain().nscanned , "A" ); +assert.eq( 20 , t.find( { age : { $gte : 29 , $lte : 30 } } ).explain().nscanned , "B" ); +assert.eq( 18 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,9] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C1" ); +assert.eq( 23 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C2" ); +assert.eq( 28 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [1,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C3" ); + +assert.eq( 4 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : 5 } ).hint( {age:1,rating:1} ).explain().nscanned , "C" ); // SERVER-371 +assert.eq( 6 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : { $gte : 4 , $lte : 5 } } ).hint( {age:1,rating:1} ).explain().nscanned , "D" ); // SERVER-371 + +assert.eq.automsg( "2", "t.find( { age:30, rating:{ $gte:4, $lte:5} } ).explain().nscanned" ); + +t.drop(); + +for ( var a=1; a<10; a++ ){ + for ( var b=0; b<10; b++ ){ + for ( var c=0; c<10; c++ ) { + t.save( { a:a, b:b, c:c } ); + } + } +} + +function doQuery( count, query, sort, index ) { + var nscanned = t.find( query ).hint( index ).sort( sort ).explain().nscanned; + assert(Math.abs(count - nscanned) <= 2); +} + +function doTest( sort, index ) { + doQuery( 1, { a:5, b:5, c:5 }, sort, index ); + doQuery( 2, { a:5, b:5, c:{$gte:5,$lte:6} }, sort, index ); + doQuery( 1, { a:5, b:5, c:{$gte:5.5,$lte:6} }, sort, index ); + doQuery( 1, { a:5, b:5, c:{$gte:5,$lte:5.5} }, sort, index ); + doQuery( 3, { a:5, b:5, c:{$gte:5,$lte:7} }, sort, index ); + doQuery( 4, { a:5, b:{$gte:5,$lte:6}, c:5 }, sort, index ); + if ( sort.b > 0 ) { + doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index ); + doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index ); + } else { + doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index ); + doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index ); + } + doQuery( 7, { a:5, b:{$gte:5,$lte:7}, c:5 }, sort, index ); + doQuery( 4, { a:{$gte:5,$lte:6}, b:5, c:5 }, sort, index ); + if ( sort.a > 0 ) { + doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index ); + doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index ); + doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index ); + } else { + doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index ); + doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index ); + doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index ); + } + doQuery( 7, { a:{$gte:5,$lte:7}, b:5, c:5 }, sort, index ); + doQuery( 6, { a:{$gte:5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index ); + doQuery( 6, { a:5, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index ); + doQuery( 10, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:5 }, sort, index ); + doQuery( 14, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index ); +} + +for ( var a = -1; a <= 1; a += 2 ) { + for( var b = -1; b <= 1; b += 2 ) { + for( var c = -1; c <= 1; c += 2 ) { + t.dropIndexes(); + var spec = {a:a,b:b,c:c}; + t.ensureIndex( spec ); + doTest( spec, spec ); + doTest( {a:-a,b:-b,c:-c}, spec ); + } + } +} + diff --git a/jstests/core/index_check7.js b/jstests/core/index_check7.js new file mode 100644 index 00000000000..1d0aaebba35 --- /dev/null +++ b/jstests/core/index_check7.js @@ -0,0 +1,15 @@ + +t = db.index_check7 +t.drop() + +for ( var i=0; i<100; i++ ) + t.save( { x : i } ) + +t.ensureIndex( { x : 1 } ) +assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "A" ) + +t.ensureIndex( { x : -1 } ) +assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "B" ) + +assert.eq( 40 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" ); + diff --git a/jstests/core/index_check8.js b/jstests/core/index_check8.js new file mode 100644 index 00000000000..1964ecbe7fc --- /dev/null +++ b/jstests/core/index_check8.js @@ -0,0 +1,21 @@ + +t = db.index_check8 +t.drop(); + +t.insert( { a : 1 , b : 1 , c : 1 , d : 1 , e : 1 } ) +t.ensureIndex( { a : 1 , b : 1 , c : 1 } ) +t.ensureIndex({ a: 1, b: 1, d: 1, e: 1 }) + +// this block could be added to many tests in theory... +if ((new Date()) % 10 == 0) { + var coll = t.toString().substring(db.toString().length + 1); + print("compacting " + coll + " before continuing testing"); + // don't check return code - false for mongos + print("ok: " + db.runCommand({ compact: coll, dev: true })); +} + +x = t.find( { a : 1 , b : 1 , d : 1 } ).sort( { e : 1 } ).explain() +assert( ! x.scanAndOrder , "A : " + tojson( x ) ) + +x = t.find( { a : 1 , b : 1 , c : 1 , d : 1 } ).sort( { e : 1 } ).explain() +//assert( ! x.scanAndOrder , "B : " + tojson( x ) ) diff --git a/jstests/core/index_diag.js b/jstests/core/index_diag.js new file mode 100644 index 00000000000..21840682e7f --- /dev/null +++ b/jstests/core/index_diag.js @@ -0,0 +1,50 @@ + +t = db.index_diag +t.drop(); + +t.ensureIndex( { x : 1 } ); + +all = [] +ids = [] +xs = [] + +function r( a ){ + var n = [] + for ( var x=a.length-1; x>=0; x-- ) + n.push( a[x] ); + return n; +} + +for ( i=1; i<4; i++ ){ + o = { _id : i , x : -i } + t.insert( o ); + all.push( o ); + ids.push( { _id : i } ); + xs.push( { x : -i } ); +} + +assert.eq( all , t.find().sort( { _id : 1 } ).toArray() , "A1" ); +assert.eq( r( all ) , t.find().sort( { _id : -1 } ).toArray() , "A2" ); + +assert.eq( all , t.find().sort( { x : -1 } ).toArray() , "A3" ); +assert.eq( r( all ) , t.find().sort( { x : 1 } ).toArray() , "A4" ); + +assert.eq( ids , t.find().sort( { _id : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B1" ) +assert.eq( r( ids ) , t.find().sort( { _id : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B2" ) +assert.eq( xs , t.find().sort( { x : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B3" ) +assert.eq( r( xs ) , t.find().sort( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" ) + +assert.eq( r( xs ) , t.find().hint( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" ) + +// SERVER-4981 +t.ensureIndex( { _id : 1 , x : 1 } ); +assert.eq( all , + t.find().hint( { _id : 1 , x : 1 } )._addSpecial( "$returnKey" , true ).toArray() + ) +assert.eq( r( all ) , + t.find().hint( { _id : 1 , x : 1 } ).sort( { x : 1 } ) + ._addSpecial( "$returnKey" , true ).toArray() + ) + +assert.eq( [ {} , {} , {} ], + t.find().hint( { $natural : 1 } )._addSpecial( "$returnKey" , true ).toArray() ) diff --git a/jstests/core/index_elemmatch1.js b/jstests/core/index_elemmatch1.js new file mode 100644 index 00000000000..263eb252364 --- /dev/null +++ b/jstests/core/index_elemmatch1.js @@ -0,0 +1,43 @@ + +t = db.index_elemmatch1 +t.drop() + +x = 0 +y = 0 +var bulk = t.initializeUnorderedBulkOp(); +for ( a=0; a<100; a++ ){ + for ( b=0; b<100; b++ ){ + bulk.insert( { a : a , b : b % 10 , arr : [ { x : x++ % 10 , y : y++ % 10 } ] } ); + } +} +assert.writeOK(bulk.execute()); + +t.ensureIndex( { a : 1 , b : 1 } ) +t.ensureIndex( { "arr.x" : 1 , a : 1 } ) + +assert.eq( 100 , t.find( { a : 55 } ).itcount() , "A1" ); +assert.eq( 10 , t.find( { a : 55 , b : 7 } ).itcount() , "A2" ); + +q = { a : 55 , b : { $in : [ 1 , 5 , 8 ] } } +assert.eq( 30 , t.find( q ).itcount() , "A3" ) + +q.arr = { $elemMatch : { x : 5 , y : 5 } } +assert.eq( 10 , t.find( q ).itcount() , "A4" ) + +function nscannedForCursor( explain, cursor ) { + plans = explain.allPlans; + for( i in plans ) { + if ( plans[ i ].cursor == cursor ) { + return plans[ i ].nscanned; + } + } + return -1; +} + +assert.eq( t.find(q).itcount(), + nscannedForCursor( t.find(q).explain(true), 'BtreeCursor arr.x_1_a_1' ), "A5" ); + +printjson(t.find(q).explain()); +print("Num results:"); +assert.eq(10, t.find(q).itcount()); +printjson(t.find(q).itcount()); diff --git a/jstests/core/index_filter_commands.js b/jstests/core/index_filter_commands.js new file mode 100644 index 00000000000..cec2437fff0 --- /dev/null +++ b/jstests/core/index_filter_commands.js @@ -0,0 +1,167 @@ +/** + * Index Filter commands + * + * Commands: + * - planCacheListFilters + * Displays index filters for all query shapes in a collection. + * + * - planCacheClearFilters + * Clears index filter for a single query shape or, + * if the query shape is omitted, all filters for the collection. + * + * - planCacheSetFilter + * Sets index filter for a query shape. Overrides existing filter. + * + * Not a lot of data access in this test suite. Hint commands + * manage a non-persistent mapping in the server of + * query shape to list of index specs. + * + * Only time we might need to execute a query is to check the plan + * cache state. We would do this with the planCacheListPlans command + * on the same query shape with the index filters. + * + */ + +var t = db.jstests_index_filter_commands; + +t.drop(); + +t.save({a: 1}); + +// Add 2 indexes. +// 1st index is more efficient. +// 2nd and 3rd indexes will be used to test index filters. +var indexA1 = {a: 1}; +var indexA1B1 = {a: 1, b: 1}; +var indexA1C1 = {a: 1, c: 1}; +t.ensureIndex(indexA1); +t.ensureIndex(indexA1B1); +t.ensureIndex(indexA1C1); + +var queryA1 = {a: 1}; +var projectionA1 = {_id: 0, a: 1}; +var sortA1 = {a: -1}; + +// +// Tests for planCacheListFilters, planCacheClearFilters, planCacheSetFilter +// + +// Utility function to list index filters. +function getFilters() { + var res = t.runCommand('planCacheListFilters'); + print('planCacheListFilters() = ' + tojson(res)); + assert.commandWorked(res, 'planCacheListFilters failed'); + assert(res.hasOwnProperty('filters'), 'filters missing from planCacheListFilters result'); + return res.filters; + +} + +// Check if key is in plan cache. +function planCacheContains(shape) { + var res = t.runCommand('planCacheListPlans', shape); + return res.ok; +} + +// Utility function to list plans for a query. +function getPlans(shape) { + var res = t.runCommand('planCacheListPlans', shape); + assert.commandWorked(res, 'planCacheListPlans(' + tojson(shape, '', true) + ' failed'); + assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' + + tojson(shape, '', true) + ') result'); + return res.plans; +} + +// It is an error to retrieve index filters on a non-existent collection. +var missingCollection = db.jstests_index_filter_commands_missing; +missingCollection.drop(); +assert.commandFailed(missingCollection.runCommand('planCacheListFilters')); + +// Retrieve index filters from an empty test collection. +var filters = getFilters(); +assert.eq(0, filters.length, 'unexpected number of index filters in planCacheListFilters result'); + +// Check details of winning plan in plan cache before setting index filter. +assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count'); +var shape = {query: queryA1, sort: sortA1, projection: projectionA1}; +var planBeforeSetFilter = getPlans(shape)[0]; +print('Winning plan (before setting index filters) = ' + tojson(planBeforeSetFilter)); +// Check filterSet field in plan details +assert.eq(false, planBeforeSetFilter.filterSet, 'missing or invalid filterSet field in plan details'); + +// Add index filters for simple query. +assert.commandWorked(t.runCommand('planCacheSetFilter', + {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]})); +filters = getFilters(); +assert.eq(1, filters.length, 'no change in query settings after successfully setting index filters'); +assert.eq(queryA1, filters[0].query, 'unexpected query in filters'); +assert.eq(sortA1, filters[0].sort, 'unexpected sort in filters'); +assert.eq(projectionA1, filters[0].projection, 'unexpected projection in filters'); +assert.eq(2, filters[0].indexes.length, 'unexpected number of indexes in filters'); +assert.eq(indexA1B1, filters[0].indexes[0], 'unexpected first index'); +assert.eq(indexA1C1, filters[0].indexes[1], 'unexpected first index'); + +// Plans for query shape should be removed after setting index filter. +assert(!planCacheContains(shape), 'plan cache for query shape not flushed after updating filter'); + +// Check details of winning plan in plan cache after setting filter and re-executing query. +assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count'); +planAfterSetFilter = getPlans(shape)[0]; +print('Winning plan (after setting index filter) = ' + tojson(planAfterSetFilter)); +// Check filterSet field in plan details +assert.eq(true, planAfterSetFilter.filterSet, 'missing or invalid filterSet field in plan details'); + +// Execute query with cursor.hint(). Check that user-provided hint is overridden. +// Applying the index filters will remove the user requested index from the list +// of indexes provided to the planner. +// If the planner still tries to use the user hint, we will get a 'bad hint' error. +t.find(queryA1, projectionA1).sort(sortA1).hint(indexA1).itcount(); + +// Clear filters +assert.commandWorked(t.runCommand('planCacheClearFilters')); +filters = getFilters(); +assert.eq(0, filters.length, 'filters not cleared after successful planCacheClearFilters command'); + +// Plans should be removed after clearing filters +assert(!planCacheContains(shape), 'plan cache for query shape not flushed after clearing filters'); + +print('Plan details before setting filter = ' + tojson(planBeforeSetFilter.details, '', true)); +print('Plan details after setting filter = ' + tojson(planAfterSetFilter.details, '', true)); + +// +// explain.filterSet +// cursor.explain() should indicate if index filter has been applied. +// The following 3 runners should always provide a value for 'filterSet': +// - SingleSolutionRunner +// - MultiPlanRunner +// - CachedPlanRuner +// + +// No filter set. + +t.getPlanCache().clear(); +// SingleSolutionRunner +assert.eq(false, t.find({z: 1}).explain().filterSet, + 'missing or invalid filterSet field in SingleSolutionRunner explain'); +// MultiPlanRunner +assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, + 'missing or invalid filterSet field in MultiPlanRunner explain'); +// CachedPlanRunner +assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, + 'missing or invalid filterSet field in CachedPlanRunner explain'); + +// Add index filter. +assert.commandWorked(t.runCommand('planCacheSetFilter', + {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]})); +// Index filter with non-existent index key pattern to force use of single solution runner. +assert.commandWorked(t.runCommand('planCacheSetFilter', {query: {z: 1}, indexes: [{z: 1}]})); + +t.getPlanCache().clear(); +// SingleSolutionRunner +assert.eq(true, t.find({z: 1}).explain().filterSet, + 'missing or invalid filterSet field in SingleSolutionRunner explain'); +// MultiPlanRunner +assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, + 'missing or invalid filterSet field in MultiPlanRunner explain'); +// CachedPlanRunner +assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, + 'missing or invalid filterSet field in CachedPlanRunner explain'); diff --git a/jstests/core/index_many.js b/jstests/core/index_many.js new file mode 100644 index 00000000000..f14f3c3e0fc --- /dev/null +++ b/jstests/core/index_many.js @@ -0,0 +1,51 @@ +/* test using lots of indexes on one collection */ + +t = db.many; + +function f() { + + t.drop(); + db.many2.drop(); + + t.save({ x: 9, y : 99 }); + t.save({ x: 19, y : 99 }); + + x = 2; + var lastErr = null; + while (x < 70) { + patt = {}; + patt[x] = 1; + if (x == 20) + patt = { x: 1 }; + if (x == 64) + patt = { y: 1 }; + lastErr = t.ensureIndex(patt); + x++; + } + + assert.writeError(lastErr, "should have got an error 'too many indexes'"); + + // 40 is the limit currently + lim = t.getIndexes().length; + if (lim != 64) { + print("# of indexes should be 64 but is : " + lim); + return; + } + assert(lim == 64, "not 64 indexes"); + + assert(t.find({ x: 9 }).length() == 1, "b"); + assert(t.find({ x: 9 }).explain().cursor.match(/Btree/), "not using index?"); + + assert(t.find({ y: 99 }).length() == 2, "y idx"); + assert(t.find({ y: 99 }).explain().cursor.match(/Btree/), "not using y index?"); + + /* check that renamecollection remaps all the indexes right */ + assert(t.renameCollection("many2").ok, "rename failed"); + assert(t.find({ x: 9 }).length() == 0, "many2a"); + assert(db.many2.find({ x: 9 }).length() == 1, "many2b"); + assert(t.find({ y: 99 }).length() == 0, "many2c"); + assert(db.many2.find({ y: 99 }).length() == 2, "many2d"); + +} + +f(); diff --git a/jstests/core/index_many2.js b/jstests/core/index_many2.js new file mode 100644 index 00000000000..f113b8b87ed --- /dev/null +++ b/jstests/core/index_many2.js @@ -0,0 +1,31 @@ + +t = db.index_many2; +t.drop() + +t.save( { x : 1 } ) + +assert.eq( 1 , t.getIndexKeys().length , "A1" ) + +function make( n ){ + var x = {} + x["x"+n] = 1; + return x; +} + +for ( i=1; i<1000; i++ ){ + t.ensureIndex( make(i) ); +} + +assert.eq( 64 , t.getIndexKeys().length , "A2" ) + + +num = t.getIndexKeys().length + +t.dropIndex( make(num-1) ) +assert.eq( num - 1 , t.getIndexKeys().length , "B0" ) + +t.ensureIndex( { z : 1 } ) +assert.eq( num , t.getIndexKeys().length , "B1" ) + +t.dropIndex( "*" ); +assert.eq( 1 , t.getIndexKeys().length , "C1" ) diff --git a/jstests/core/index_sparse1.js b/jstests/core/index_sparse1.js new file mode 100644 index 00000000000..fbcc20a9217 --- /dev/null +++ b/jstests/core/index_sparse1.js @@ -0,0 +1,45 @@ + +t = db.index_sparse1; +t.drop(); + +t.insert( { _id : 1 , x : 1 } ) +t.insert( { _id : 2 , x : 2 } ) +t.insert( { _id : 3 , x : 2 } ) +t.insert( { _id : 4 } ) +t.insert( { _id : 5 } ) + +assert.eq( 5 , t.count() , "A1" ) +assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "A2" ) + +t.ensureIndex( { x : 1 } ) +assert.eq( 2 , t.getIndexes().length , "B1" ) +assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "B2" ) +t.dropIndex( { x : 1 } ) +assert.eq( 1 , t.getIndexes().length , "B3" ) + +t.ensureIndex( { x : 1 } , { sparse : 1 } ) +assert.eq( 2 , t.getIndexes().length , "C1" ) +assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "C2" ) +t.dropIndex( { x : 1 } ) +assert.eq( 1 , t.getIndexes().length , "C3" ) + +// -- sparse & unique + +t.remove( { _id : 2 } ) + +// test that we can't create a unique index without sparse +assert.writeError( t.ensureIndex( { x : 1 } , { unique : 1 } )); +assert.eq( 1 , t.getIndexes().length , "D2" ) + + +t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } ) +assert.eq( 2 , t.getIndexes().length , "E1" ) +t.dropIndex( { x : 1 } ) +assert.eq( 1 , t.getIndexes().length , "E3" ) + + +t.insert( { _id : 2 , x : 2 } ) +t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } ) +assert.eq( 1 , t.getIndexes().length , "F1" ) + + diff --git a/jstests/core/index_sparse2.js b/jstests/core/index_sparse2.js new file mode 100644 index 00000000000..56a59db3711 --- /dev/null +++ b/jstests/core/index_sparse2.js @@ -0,0 +1,23 @@ +t = db.index_sparse2; +t.drop(); + +t.insert( { _id : 1 , x : 1 , y : 1 } ) +t.insert( { _id : 2 , x : 2 } ) +t.insert( { _id : 3 } ) + +t.ensureIndex( { x : 1 , y : 1 } ) +assert.eq( 2 , t.getIndexes().length , "A1" ) +assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "A2 count()" ) +assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "A2 itcount()" ) +t.dropIndex( { x : 1 , y : 1 } ) +assert.eq( 1 , t.getIndexes().length , "A3" ) + +t.ensureIndex( { x : 1 , y : 1 } , { sparse : 1 } ) +assert.eq( 2 , t.getIndexes().length , "B1" ) +assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "B2 count()" ) +assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "B2 itcount()" ) +t.dropIndex( { x : 1 , y : 1 } ) +assert.eq( 1 , t.getIndexes().length , "B3" ) + + + diff --git a/jstests/core/indexa.js b/jstests/core/indexa.js new file mode 100644 index 00000000000..7602183adb2 --- /dev/null +++ b/jstests/core/indexa.js @@ -0,0 +1,22 @@ +// unique index constraint test for updates +// case where object doesn't grow tested here + +t = db.indexa; +t.drop(); + +t.ensureIndex( { x:1 }, true ); + +t.insert( { 'x':'A' } ); +t.insert( { 'x':'B' } ); +t.insert( { 'x':'A' } ); + +assert.eq( 2 , t.count() , "indexa 1" ); + +t.update( {x:'B'}, { x:'A' } ); + +a = t.find().toArray(); +u = Array.unique( a.map( function(z){ return z.x } ) ); +assert.eq( 2 , t.count() , "indexa 2" ); + +assert( a.length == u.length , "unique index update is broken" ); + diff --git a/jstests/core/indexapi.js b/jstests/core/indexapi.js new file mode 100644 index 00000000000..3e0b70ff15f --- /dev/null +++ b/jstests/core/indexapi.js @@ -0,0 +1,48 @@ + +t = db.indexapi; +t.drop(); + +key = { x : 1 }; + +c = { ns : t._fullName , key : key , name : t._genIndexName( key ) }; +assert.eq( c , t._indexSpec( { x : 1 } ) , "A" ); + +c.name = "bob"; +assert.eq( c , t._indexSpec( { x : 1 } , "bob" ) , "B" ); + +c.name = t._genIndexName( key ); +assert.eq( c , t._indexSpec( { x : 1 } ) , "C" ); + +c.unique = true; +assert.eq( c , t._indexSpec( { x : 1 } , true ) , "D" ); +assert.eq( c , t._indexSpec( { x : 1 } , [ true ] ) , "E" ); +assert.eq( c , t._indexSpec( { x : 1 } , { unique : true } ) , "F" ); + +c.dropDups = true; +assert.eq( c , t._indexSpec( { x : 1 } , [ true , true ] ) , "G" ); +assert.eq( c , t._indexSpec( { x : 1 } , { unique : true , dropDups : true } ) , "F" ); + +t.ensureIndex( { x : 1 } , { unique : true } ); +idx = t.getIndexes(); +assert.eq( 2 , idx.length , "M1" ); +assert.eq( key , idx[1].key , "M2" ); +assert( idx[1].unique , "M3" ); + +t.drop(); +t.ensureIndex( { x : 1 } , { unique : 1 } ); +idx = t.getIndexes(); +assert.eq( 2 , idx.length , "M1" ); +assert.eq( key , idx[1].key , "M2" ); +assert( idx[1].unique , "M3" ); +//printjson( idx ); + +// Test that attempting to create index in an invalid namespace fails. +if (db.getMongo().writeMode() == 'commands') { + assert.throws(function() { + db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } ); + }); +} +else { + assert.writeError(db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } )); +} + diff --git a/jstests/core/indexb.js b/jstests/core/indexb.js new file mode 100644 index 00000000000..d7d2e8c9f05 --- /dev/null +++ b/jstests/core/indexb.js @@ -0,0 +1,29 @@ +// unique index test for a case where the object grows +// and must move + +// see indexa.js for the test case for an update with dup id check +// when it doesn't move + + +t = db.indexb; +t.drop(); +t.ensureIndex({a:1},true); + +t.insert({a:1}); + +x = { a : 2 }; +t.save(x); + +{ + + assert( t.count() == 2, "count wrong B"); + + x.a = 1; + x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + t.save(x); // should fail, not unique. + + assert( t.count() == 2,"count wrong" ); + assert( t.find({a:1}).count() == 1,"bfail1" ); + assert( t.find({a:2}).count() == 1,"bfail2" ); + +} diff --git a/jstests/core/indexc.js b/jstests/core/indexc.js new file mode 100644 index 00000000000..b099e2d2823 --- /dev/null +++ b/jstests/core/indexc.js @@ -0,0 +1,20 @@ + +t = db.indexc; +t.drop(); + +for ( var i=1; i<100; i++ ){ + var d = new Date( ( new Date() ).getTime() + i ); + t.save( { a : i , ts : d , cats : [ i , i + 1 , i + 2 ] } ); + if ( i == 51 ) + mid = d; +} + +assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "A" ); +assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "B" ); + +t.ensureIndex( { ts : 1 , cats : 1 } ); +t.ensureIndex( { cats : 1 } ); + +// multi-key bug was firing here (related to getsetdup()): +assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "C" ); +assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "D" ); diff --git a/jstests/core/indexd.js b/jstests/core/indexd.js new file mode 100644 index 00000000000..33246ad9812 --- /dev/null +++ b/jstests/core/indexd.js @@ -0,0 +1,10 @@ + +t = db.indexd; +t.drop(); + +t.save( { a : 1 } ); +t.ensureIndex( { a : 1 } ); +assert.throws( function(){ db.indexd.$_id_.drop(); } ); +assert( t.drop() ); + +//db.indexd.$_id_.remove({}); diff --git a/jstests/core/indexe.js b/jstests/core/indexe.js new file mode 100644 index 00000000000..e84322c6510 --- /dev/null +++ b/jstests/core/indexe.js @@ -0,0 +1,22 @@ + +t = db.indexe; +t.drop(); + +var num = 1000; + +for ( i=0; i4 is worse than >5 +// assert.eq( 5, t.find( {a:{$gt:4,$gte:5}} ).explain().indexBounds.a[ 0 ][ 0 ] ); + +printjson(t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain()) + +// SERVER-12281: We should know that in[1,2] is better than in[1,2,3]. +// assert.eq( [[1,1],[2,2]], t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain().indexBounds.a ); diff --git a/jstests/core/indexr.js b/jstests/core/indexr.js new file mode 100644 index 00000000000..c3eecd045c8 --- /dev/null +++ b/jstests/core/indexr.js @@ -0,0 +1,44 @@ +// Check multikey index cases with parallel nested fields SERVER-958. + +t = db.jstests_indexr; +t.drop(); + +// Check without indexes. +t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } ); +assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); +assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) ); + +// Check with single key indexes. +t.remove({}); +t.ensureIndex( {'a.b':1,'a.c':1} ); +t.ensureIndex( {a:1,'a.c':1} ); +assert.eq( 0, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); +assert.eq( 0, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) ); +assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); +assert.eq( 4, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); + +t.save( { a: { b: 3, c: 3 } } ); +assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); +assert.eq( 1, t.count( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ) ); +assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); +assert.eq( 4, t.find( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); + +// Check with multikey indexes. +t.remove({}); +t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } ); + +assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); +assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) ); +assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] ); +assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] ); + +// Check reverse direction. +assert.eq( 1, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).itcount() ); +assert.eq( 1, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).itcount() ); + +assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).explain().indexBounds['a.c'] ); +assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).explain().indexBounds['a.c'] ); + +// Check second field is constrained if first is not. +assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {'a.b':1,'a.c':1} ).itcount() ); +assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {a:1,'a.c':1} ).itcount() ); diff --git a/jstests/core/indexs.js b/jstests/core/indexs.js new file mode 100644 index 00000000000..609f912affe --- /dev/null +++ b/jstests/core/indexs.js @@ -0,0 +1,21 @@ +// Test index key generation issue with parent and nested fields in same index and array containing subobject SERVER-3005. + +t = db.jstests_indexs; + +t.drop(); +t.ensureIndex( {a:1} ); +t.save( { a: [ { b: 3 } ] } ); +assert.eq( 1, t.count( { a:{ b:3 } } ) ); + +t.drop(); +t.ensureIndex( {a:1,'a.b':1} ); +t.save( { a: { b: 3 } } ); +assert.eq( 1, t.count( { a:{ b:3 } } ) ); +ib = t.find( { a:{ b:3 } } ).explain().indexBounds; + +t.drop(); +t.ensureIndex( {a:1,'a.b':1} ); +t.save( { a: [ { b: 3 } ] } ); +assert.eq( ib, t.find( { a:{ b:3 } } ).explain().indexBounds ); +assert.eq( 1, t.find( { a:{ b:3 } } ).explain().nscanned ); +assert.eq( 1, t.count( { a:{ b:3 } } ) ); diff --git a/jstests/core/indext.js b/jstests/core/indext.js new file mode 100644 index 00000000000..e418dc2e959 --- /dev/null +++ b/jstests/core/indext.js @@ -0,0 +1,21 @@ +// Sparse indexes with arrays SERVER-3216 + +t = db.jstests_indext; +t.drop(); + +t.ensureIndex( {'a.b':1}, {sparse:true} ); +t.save( {a:[]} ); +t.save( {a:1} ); +assert.eq( 0, t.find().hint( {'a.b':1} ).itcount() ); +assert.eq( 0, t.find().hint( {'a.b':1} ).explain().nscanned ); + +t.ensureIndex( {'a.b':1,'a.c':1}, {sparse:true} ); +t.save( {a:[]} ); +t.save( {a:1} ); +assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).itcount() ); +assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned ); + +t.save( {a:[{b:1}]} ); +t.save( {a:1} ); +assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).itcount() ); +assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned ); diff --git a/jstests/core/indexu.js b/jstests/core/indexu.js new file mode 100644 index 00000000000..9031d827bf4 --- /dev/null +++ b/jstests/core/indexu.js @@ -0,0 +1,108 @@ +// Test index key generation with duplicate values addressed by array index and +// object field. SERVER-2902 + +t = db.jstests_indexu; +t.drop(); + +var dupDoc = {a:[{'0':1}]}; // There are two 'a.0' fields in this doc. +var dupDoc2 = {a:[{'1':1},'c']}; +var noDupDoc = {a:[{'1':1}]}; + +// Test that we can't index dupDoc. +assert.writeOK( t.save( dupDoc )); +assert.writeError(t.ensureIndex( {'a.0':1} )); + +t.remove({}); +assert.writeOK(t.ensureIndex( {'a.0':1} )); +assert.writeError( t.save( dupDoc )); + +// Test that we can't index dupDoc2. +t.drop(); +assert.writeOK(t.save( dupDoc2 )); +assert.writeError(t.ensureIndex( {'a.1':1} )); + +t.remove({}); +assert.writeOK(t.ensureIndex( {'a.1':1} )); +assert.writeError(t.save( dupDoc2 )); + +// Test that we can index dupDoc with a different index. +t.drop(); +t.ensureIndex( {'a.b':1} ); +assert.writeOK(t.save( dupDoc )); + +// Test number field starting with hyphen. +t.drop(); +t.ensureIndex( {'a.-1':1} ); +assert.writeOK(t.save( {a:[{'-1':1}]} )); + +// Test number field starting with zero. +t.drop(); +t.ensureIndex( {'a.00':1} ); +assert.writeOK( t.save( {a:[{'00':1}]} )); + +// Test multiple array indexes +t.drop(); +t.ensureIndex( {'a.0':1,'a.1':1} ); +assert.writeOK( t.save( {a:[{'1':1}]} )); +assert.writeError( t.save( {a:[{'1':1},4]} )); + +// Test that we can index noDupDoc. +t.drop(); +t.save( noDupDoc ); +assert.writeOK(t.ensureIndex( {'a.0':1} )); +assert.writeOK(t.ensureIndex( {'a.1':1} )); + +t.drop(); +t.ensureIndex( {'a.0':1} ); +t.ensureIndex( {'a.1':1} ); +assert.writeOK(t.save( noDupDoc )); + +// Test that we can query noDupDoc. +assert.eq( 1, t.find( {'a.1':1} ).hint( {'a.1':1} ).itcount() ); +assert.eq( 1, t.find( {'a.1':1} ).hint( {$natural:1} ).itcount() ); +assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {'a.0':1} ).itcount() ); +assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {$natural:1} ).itcount() ); + +// Check multiple nested array fields. +t.drop(); +t.save( {a:[[1]]} ); +assert.writeOK(t.ensureIndex( {'a.0.0':1} )); +assert.eq( 1, t.find( {'a.0.0':1} ).hint( {$natural:1} ).itcount() ); +assert.eq( 1, t.find( {'a.0.0':1} ).hint( {'a.0.0':1} ).itcount() ); + +// Check where there is a duplicate for a partially addressed field but not for a fully addressed field. +t.drop(); +t.save( {a:[[1],{'0':1}]} ); +assert.writeError(t.ensureIndex( {'a.0.0':1} )); + +// Check where there is a duplicate for a fully addressed field. +t.drop(); +assert.writeOK( t.save( {a:[[1],{'0':[1]}]} )); +assert.writeError(t.ensureIndex( {'a.0.0':1} )); + +// Two ways of addressing parse to an array. +t.drop(); +t.save( {a:[{'0':1}]} ); +assert.writeError(t.ensureIndex( {'a.0.0':1} )); + +// Test several key depths - with same arrays being found. +t.drop(); +t.save( {a:[{'0':[{'0':1}]}]} ); +assert.writeError(t.ensureIndex( {'a.0.0.0.0.0.0':1} )); +assert.writeError(t.ensureIndex( {'a.0.0.0.0.0':1} )); +assert.writeError(t.ensureIndex( {'a.0.0.0.0':1} )); +assert.writeError(t.ensureIndex( {'a.0.0.0':1} )); +assert.writeError(t.ensureIndex( {'a.0.0':1} )); +assert.writeError(t.ensureIndex( {'a.0':1} )); +assert.writeOK(t.ensureIndex( {'a':1} )); + +// Two prefixes extract docs, but one terminates extraction before array. +t.drop(); +t.save( {a:[{'0':{'c':[]}}]} ); +assert.writeError(t.ensureIndex( {'a.0.c':1} )); + +t.drop(); +t.save( {a:[[{'b':1}]]} ); +assert.eq( 1, t.find( {'a.0.b':1} ).itcount() ); +t.ensureIndex( {'a.0.b':1} ); +assert.eq( 1, t.find( {'a.0.b':1} ).itcount() ); diff --git a/jstests/core/indexv.js b/jstests/core/indexv.js new file mode 100644 index 00000000000..334ec432d74 --- /dev/null +++ b/jstests/core/indexv.js @@ -0,0 +1,18 @@ +// Check null key generation. + +t = db.jstests_indexv; +t.drop(); + +t.ensureIndex( {'a.b':1} ); + +t.save( {a:[{},{b:1}]} ); +var e = t.find( {'a.b':null} ).explain(); +assert.eq( 1, e.n ); +assert.eq( 1, e.nscanned ); + +t.drop(); +t.ensureIndex( {'a.b.c':1} ); +t.save( {a:[{b:[]},{b:{c:1}}]} ); +var e = t.find( {'a.b.c':null} ).explain(); +assert.eq( 0, e.n ); +assert.eq( 1, e.nscanned ); diff --git a/jstests/core/indexw.js b/jstests/core/indexw.js new file mode 100644 index 00000000000..bd7c75b8b08 --- /dev/null +++ b/jstests/core/indexw.js @@ -0,0 +1,15 @@ +// Check that v0 keys are generated for v0 indexes SERVER-3375 + +t = db.jstests_indexw; +t.drop(); + +t.save( {a:[]} ); +assert.eq( 1, t.count( {a:[]} ) ); +t.ensureIndex( {a:1} ); +assert.eq( 1, t.count( {a:[]} ) ); +t.dropIndexes(); + +// The count result is incorrect - just checking here that v0 key generation is used. +t.ensureIndex( {a:1}, {v:0} ); +// QUERY_MIGRATION: WE GET THIS RIGHT...BY CHANCE? +// assert.eq( 0, t.count( {a:[]} ) ); diff --git a/jstests/core/insert1.js b/jstests/core/insert1.js new file mode 100644 index 00000000000..e4ce6a4e10e --- /dev/null +++ b/jstests/core/insert1.js @@ -0,0 +1,34 @@ +t = db.insert1; +t.drop(); + +var o = {a:1}; +t.insert(o); +var doc = t.findOne(); +assert.eq(1, doc.a); +assert(doc._id != null, tojson(doc)); + +t.drop(); +o = {a:2, _id:new ObjectId()}; +var id = o._id; +t.insert(o); +doc = t.findOne(); +assert.eq(2, doc.a); +assert.eq(id, doc._id); + +t.drop(); +o = {a:3, _id:"asdf"}; +id = o._id; +t.insert(o); +doc = t.findOne(); +assert.eq(3, doc.a); +assert.eq(id, doc._id); + +t.drop(); +o = {a:4, _id:null}; +t.insert(o); +doc = t.findOne(); +assert.eq(4, doc.a); +assert.eq(null, doc._id, tojson(doc)); + +var stats = db.runCommand({ collstats: "insert1" }); +assert(stats.paddingFactor == 1.0); diff --git a/jstests/core/insert2.js b/jstests/core/insert2.js new file mode 100644 index 00000000000..8ce4b25c25f --- /dev/null +++ b/jstests/core/insert2.js @@ -0,0 +1,13 @@ +// Create a new connection object so it won't affect the global connection when we modify +// it's settings. +var conn = new Mongo(db.getMongo().host); +conn._skipValidation = true; +conn.forceWriteMode(db.getMongo().writeMode()); + +t = conn.getDB(db.getName()).insert2; +t.drop(); + +assert.isnull( t.findOne() , "A" ) +assert.writeError(t.insert( { z : 1 , $inc : { x : 1 } } , 0, true )); +assert.isnull( t.findOne() , "B" ) + diff --git a/jstests/core/insert_id_undefined.js b/jstests/core/insert_id_undefined.js new file mode 100644 index 00000000000..77d7d983549 --- /dev/null +++ b/jstests/core/insert_id_undefined.js @@ -0,0 +1,5 @@ +// ensure a document with _id undefined cannot be saved +t = db.insert_id_undefined; +t.drop(); +t.insert({_id:undefined}); +assert.eq(t.count(), 0); diff --git a/jstests/core/insert_illegal_doc.js b/jstests/core/insert_illegal_doc.js new file mode 100644 index 00000000000..2b4d326e9ce --- /dev/null +++ b/jstests/core/insert_illegal_doc.js @@ -0,0 +1,22 @@ +// SERVER-12185: Do not allow insertion or update of docs which will fail the +// "parallel indexing of arrays" test +var coll = db.insert_illegal_doc; +coll.drop(); +coll.ensureIndex({a: 1, b: 1}); + +// test upsert +coll.update({}, {_id: 1, a: [1, 2, 3], b: [4, 5, 6]}, true); +assert.gleErrorCode(db, 10088); +assert.eq(0, coll.find().itcount(), "should not be a doc"); + +// test insert +coll.insert({_id: 1, a: [1, 2, 3], b: [4, 5, 6]}); +assert.gleErrorCode(db, 10088); +assert.eq(0, coll.find().itcount(), "should not be a doc"); + +// test update +coll.insert({_id: 1}); +assert.gleSuccess(db, "insert failed"); +coll.update({_id: 1}, {$set : { a : [1, 2, 3], b: [4, 5, 6]}}); +assert.gleErrorCode(db, 10088); +assert.eq(undefined, coll.findOne().a, "update should have failed"); diff --git a/jstests/core/insert_long_index_key.js b/jstests/core/insert_long_index_key.js new file mode 100644 index 00000000000..6379c36fb4a --- /dev/null +++ b/jstests/core/insert_long_index_key.js @@ -0,0 +1,10 @@ +t = db.insert_long_index_key; +t.drop(); + +var s = new Array(2000).toString(); +t.ensureIndex( { x : 1 } ); + +t.insert({ x: 1 }); +t.insert({ x: s }); + +assert.eq( 1, t.count() ); diff --git a/jstests/core/ismaster.js b/jstests/core/ismaster.js new file mode 100644 index 00000000000..0c385b02d7c --- /dev/null +++ b/jstests/core/ismaster.js @@ -0,0 +1,28 @@ +var res = db.isMaster(); +// check that the fields that should be there are there and have proper values +assert( res.maxBsonObjectSize && + isNumber(res.maxBsonObjectSize) && + res.maxBsonObjectSize > 0, "maxBsonObjectSize possibly missing:" + tojson(res)); +assert( res.maxMessageSizeBytes && + isNumber(res.maxMessageSizeBytes) && + res.maxBsonObjectSize > 0, "maxMessageSizeBytes possibly missing:" + tojson(res)); +assert( res.maxWriteBatchSize && + isNumber(res.maxWriteBatchSize) && + res.maxWriteBatchSize > 0, "maxWriteBatchSize possibly missing:" + tojson(res)); +assert(res.ismaster, "ismaster missing or false:" + tojson(res)); +assert(res.localTime, "localTime possibly missing:" + tojson(res)); +var unwantedFields = ["setName", "setVersion", "secondary", "hosts", "passives", "arbiters", + "primary", "aribterOnly", "passive", "slaveDelay", "hidden", "tags", + "buildIndexes", "me"]; +// check that the fields that shouldn't be there are not there +var badFields = []; +for (field in res) { + if (!res.hasOwnProperty(field)){ + continue; + } + if (Array.contains(unwantedFields, field)) { + badFields.push(field); + } +} +assert(badFields.length === 0, "\nthe result:\n" + tojson(res) + + "\ncontained fields it shouldn't have: " + badFields); diff --git a/jstests/count.js b/jstests/count.js deleted file mode 100644 index 5502d7176c1..00000000000 --- a/jstests/count.js +++ /dev/null @@ -1,25 +0,0 @@ -t = db.jstests_count; - -t.drop(); -t.save( { i: 1 } ); -t.save( { i: 2 } ); -assert.eq( 1, t.find( { i: 1 } ).count(), "A" ); -assert.eq( 1, t.count( { i: 1 } ) , "B" ); -assert.eq( 2, t.find().count() , "C" ); -assert.eq( 2, t.find( undefined ).count() , "D" ); -assert.eq( 2, t.find( null ).count() , "E" ); -assert.eq( 2, t.count() , "F" ); - -t.drop(); -t.save( {a:true,b:false} ); -t.ensureIndex( {b:1,a:1} ); -assert.eq( 1, t.find( {a:true,b:false} ).count() , "G" ); -assert.eq( 1, t.find( {b:false,a:true} ).count() , "H" ); - -t.drop(); -t.save( {a:true,b:false} ); -t.ensureIndex( {b:1,a:1,c:1} ); - -assert.eq( 1, t.find( {a:true,b:false} ).count() , "I" ); -assert.eq( 1, t.find( {b:false,a:true} ).count() , "J" ); - diff --git a/jstests/count10.js b/jstests/count10.js deleted file mode 100644 index ed966ffe3fe..00000000000 --- a/jstests/count10.js +++ /dev/null @@ -1,61 +0,0 @@ -// Test that interrupting a count returns an error code. - -t = db.count10; -t.drop(); - -for ( i=0; i<100; i++ ){ - t.save( { x : i } ); -} -// Make sure data is written. -db.getLastError(); - -// Start a parallel shell which repeatedly checks for a count -// query using db.currentOp(). As soon as the op is found, -// kill it via db.killOp(). -s = startParallelShell( - 'assert.soon(function() {' + - ' current = db.currentOp({"ns": db.count10.getFullName(), ' + - ' "query.count": db.count10.getName()}); ' + - - // Check that we found the count op. If not, return false so - // that assert.soon will retry. - ' assert("inprog" in current); ' + - ' if (current.inprog.length === 0) { ' + - ' jsTest.log("count10.js: did not find count op, retrying"); ' + - ' printjson(current); ' + - ' return false; ' + - ' } ' + - ' countOp = current.inprog[0]; ' + - ' if (!countOp) { ' + - ' jsTest.log("count10.js: did not find count op, retrying"); ' + - ' printjson(current); ' + - ' return false; ' + - ' } ' + - - // Found the count op. Try to kill it. - ' jsTest.log("count10.js: found count op:"); ' + - ' printjson(current); ' + - ' printjson(db.killOp(countOp.opid)); ' + - ' return true; ' + - '}, "count10.js: could not find count op after retrying, gave up");' -); - -function getKilledCount() { - try { - db.count10.find("sleep(1000)").count(); - jsTest.log("count10.js: count op completed without being killed"); - } catch (e) { - return e; - } -} - -var res = getKilledCount(); -jsTest.log("count10.js: killed count output start"); -printjson(res); -jsTest.log("count10.js: killed count output end"); -assert(res); -assert(res.match(/count failed/) !== null); -assert(res.match(/\"code\"/) !== null); - -s(); - diff --git a/jstests/count2.js b/jstests/count2.js deleted file mode 100644 index 4d060aaac20..00000000000 --- a/jstests/count2.js +++ /dev/null @@ -1,28 +0,0 @@ -t = db.count2; -t.drop(); - -for ( var i=0; i<1000; i++ ){ - t.save( { num : i , m : i % 20 } ); -} - -assert.eq( 1000 , t.count() , "A" ) -assert.eq( 1000 , t.find().count() , "B" ) -assert.eq( 1000 , t.find().toArray().length , "C" ) - -assert.eq( 50 , t.find( { m : 5 } ).toArray().length , "D" ) -assert.eq( 50 , t.find( { m : 5 } ).count() , "E" ) - -assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).toArray().length , "F" ) -assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).count() , "G" ) -assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).countReturn() , "H" ) - -assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).toArray().length , "I" ) -assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).limit(20).count() , "J" ) -assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).countReturn() , "K" ) - -assert.eq( 5 , t.find( { m : 5 } ).skip( 45 ).limit(20).countReturn() , "L" ) - -// Negative skip values should return error -var negSkipResult = db.runCommand({ count: 't', skip : -2 }); -assert( ! negSkipResult.ok , "negative skip value shouldn't work, n = " + negSkipResult.n ); -assert( negSkipResult.errmsg.length > 0 , "no error msg for negative skip" ); diff --git a/jstests/count3.js b/jstests/count3.js deleted file mode 100644 index a8c3ef5faad..00000000000 --- a/jstests/count3.js +++ /dev/null @@ -1,26 +0,0 @@ - -t = db.count3; - -t.drop(); - -t.save( { a : 1 } ); -t.save( { a : 1 , b : 2 } ); - -assert.eq( 2 , t.find( { a : 1 } ).itcount() , "A" ); -assert.eq( 2 , t.find( { a : 1 } ).count() , "B" ); - -assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).itcount() , "C" ); -assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).count() , "D" ); - -t.drop(); - -t.save( { a : 1 } ); - -assert.eq( 1 , t.find( { a : 1 } ).itcount() , "E" ); -assert.eq( 1 , t.find( { a : 1 } ).count() , "F" ); - -assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).itcount() , "G" ); -assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).count() , "H" ); - - - diff --git a/jstests/count4.js b/jstests/count4.js deleted file mode 100644 index 7be74362603..00000000000 --- a/jstests/count4.js +++ /dev/null @@ -1,17 +0,0 @@ - -t = db.count4; -t.drop(); - -for ( i=0; i<100; i++ ){ - t.save( { x : i } ); -} - -q = { x : { $gt : 25 , $lte : 75 } } - -assert.eq( 50 , t.find( q ).count() , "A" ); -assert.eq( 50 , t.find( q ).itcount() , "B" ); - -t.ensureIndex( { x : 1 } ); - -assert.eq( 50 , t.find( q ).count() , "C" ); -assert.eq( 50 , t.find( q ).itcount() , "D" ); diff --git a/jstests/count5.js b/jstests/count5.js deleted file mode 100644 index b6bbc543352..00000000000 --- a/jstests/count5.js +++ /dev/null @@ -1,30 +0,0 @@ - -t = db.count5; -t.drop(); - -for ( i=0; i<100; i++ ){ - t.save( { x : i } ); -} - -q = { x : { $gt : 25 , $lte : 75 } }; - -assert.eq( 50 , t.find( q ).count() , "A" ); -assert.eq( 50 , t.find( q ).itcount() , "B" ); - -t.ensureIndex( { x : 1 } ); - -assert.eq( 50 , t.find( q ).count() , "C" ); -assert.eq( 50 , t.find( q ).itcount() , "D" ); - -assert.eq( 50 , t.find( q ).limit(1).count() , "E" ); -assert.eq( 1 , t.find( q ).limit(1).itcount() , "F" ); - -assert.eq( 5 , t.find( q ).limit(5).size() , "G" ); -assert.eq( 5 , t.find( q ).skip(5).limit(5).size() , "H" ); -assert.eq( 2 , t.find( q ).skip(48).limit(5).size() , "I" ); - -assert.eq( 20 , t.find().limit(20).size() , "J" ); - -assert.eq( 0 , t.find().skip(120).size() , "K" ); -assert.eq( 1 , db.$cmd.findOne( { count: "count5" } )["ok"] , "L" ); -assert.eq( 1 , db.$cmd.findOne( { count: "count5", skip: 120 } )["ok"] , "M" ); diff --git a/jstests/count6.js b/jstests/count6.js deleted file mode 100644 index 44c5fa33bc7..00000000000 --- a/jstests/count6.js +++ /dev/null @@ -1,61 +0,0 @@ -// Some correctness checks for fast and normal count modes, including with skip and limit. - -t = db.jstests_count6; - -function checkCountForObject( obj ) { - t.drop(); - t.ensureIndex( {b:1,a:1} ); - - function checkCounts( query, expected ) { - assert.eq( expected, t.count( query ) , "A1" ); - assert.eq( expected, t.find( query ).skip( 0 ).limit( 0 ).count( true ) , "A2" ); - // Check proper counts with various skip and limit specs. - for( var skip = 1; skip <= 2; ++skip ) { - for( var limit = 1; limit <= 2; ++limit ) { - assert.eq( Math.max( expected - skip, 0 ), t.find( query ).skip( skip ).count( true ) , "B1" ); - assert.eq( Math.min( expected, limit ), t.find( query ).limit( limit ).count( true ) , "B2" ); - assert.eq( Math.min( Math.max( expected - skip, 0 ), limit ), t.find( query ).skip( skip ).limit( limit ).count( true ) , "B4" ); - - // Check limit(x) = limit(-x) - assert.eq( t.find( query ).limit( limit ).count( true ), - t.find( query ).limit( -limit ).count( true ) , "C1" ); - assert.eq( t.find( query ).skip( skip ).limit( limit ).count( true ), - t.find( query ).skip( skip ).limit( -limit ).count( true ) , "C2" ); - } - } - - // Check limit(0) has no effect - assert.eq( expected, t.find( query ).limit( 0 ).count( true ) , "D1" ); - assert.eq( Math.max( expected - skip, 0 ), - t.find( query ).skip( skip ).limit( 0 ).count( true ) , "D2" ); - assert.eq( expected, t.getDB().runCommand({ count: t.getName(), - query: query, limit: 0 }).n , "D3" ); - assert.eq( Math.max( expected - skip, 0 ), - t.getDB().runCommand({ count: t.getName(), - query: query, limit: 0, skip: skip }).n , "D4" ); - } - - for( var i = 0; i < 5; ++i ) { - checkCounts( {a:obj.a,b:obj.b}, i ); - checkCounts( {b:obj.b,a:obj.a}, i ); - t.insert( obj ); - } - - t.insert( {a:true,b:true} ); - t.insert( {a:true,b:1} ); - t.insert( {a:false,b:1} ); - t.insert( {a:false,b:true} ); - t.insert( {a:false,b:false} ); - - checkCounts( {a:obj.a,b:obj.b}, i ); - checkCounts( {b:obj.b,a:obj.a}, i ); - - // Check with no query - checkCounts( {}, 10 ); -} - -// Check fast count mode. -checkCountForObject( {a:true,b:false} ); - -// Check normal count mode. -checkCountForObject( {a:1,b:0} ); diff --git a/jstests/count7.js b/jstests/count7.js deleted file mode 100644 index c2c1260d49b..00000000000 --- a/jstests/count7.js +++ /dev/null @@ -1,25 +0,0 @@ -// Check normal count matching and deduping. - -t = db.jstests_count7; -t.drop(); - -t.ensureIndex( {a:1} ); -t.save( {a:'algebra'} ); -t.save( {a:'apple'} ); -t.save( {a:'azores'} ); -t.save( {a:'bumper'} ); -t.save( {a:'supper'} ); -t.save( {a:'termite'} ); -t.save( {a:'zeppelin'} ); -t.save( {a:'ziggurat'} ); -t.save( {a:'zope'} ); - -assert.eq( 5, t.count( {a:/p/} ) ); - -t.remove({}); - -t.save( {a:[1,2,3]} ); -t.save( {a:[1,2,3]} ); -t.save( {a:[1]} ); - -assert.eq( 2, t.count( {a:{$gt:1}} ) ); diff --git a/jstests/count9.js b/jstests/count9.js deleted file mode 100644 index 888ffe3b544..00000000000 --- a/jstests/count9.js +++ /dev/null @@ -1,28 +0,0 @@ -// Test fast mode count with multikey entries. - -t = db.jstests_count9; -t.drop(); - -t.ensureIndex( {a:1} ); - -t.save( {a:['a','b','a']} ); -assert.eq( 1, t.count( {a:'a'} ) ); - -t.save( {a:['a','b','a']} ); -assert.eq( 2, t.count( {a:'a'} ) ); - -t.drop(); -t.ensureIndex( {a:1,b:1} ); - -t.save( {a:['a','b','a'],b:'r'} ); -assert.eq( 1, t.count( {a:'a',b:'r'} ) ); -assert.eq( 1, t.count( {a:'a'} ) ); - -t.save( {a:['a','b','a'],b:'r'} ); -assert.eq( 2, t.count( {a:'a',b:'r'} ) ); -assert.eq( 2, t.count( {a:'a'} ) ); - -t.drop(); -t.ensureIndex( {'a.b':1,'a.c':1} ); -t.save( {a:[{b:'b',c:'c'},{b:'b',c:'c'}]} ); -assert.eq( 1, t.count( {'a.b':'b','a.c':'c'} ) ); diff --git a/jstests/count_hint.js b/jstests/count_hint.js deleted file mode 100644 index 93322d282db..00000000000 --- a/jstests/count_hint.js +++ /dev/null @@ -1,20 +0,0 @@ -// test passing hint to the count cmd -// hints are ignored if there is no query predicate -t = db.jstests_count_hint; -t.drop(); - -t.save( { i: 1 } ); -t.save( { i: 2 } ); -assert.eq( 2, t.find().count() ); - -t.ensureIndex( { i:1 } ); - -assert.eq( 1, t.find( { i: 1 } ).hint( "_id_" ).count(), "A" ); -assert.eq( 2, t.find().hint( "_id_" ).count(), "B" ); -assert.throws( function() { t.find( { i: 1 } ).hint( "BAD HINT" ).count(); } ); - -// create a sparse index which should have no entries -t.ensureIndex( { x:1 }, { sparse:true } ); - -assert.eq( 0, t.find( { i: 1 } ).hint( "x_1" ).count(), "C" ); -assert.eq( 2, t.find().hint( "x_1" ).count(), "D" ); diff --git a/jstests/counta.js b/jstests/counta.js deleted file mode 100644 index f0834d455dd..00000000000 --- a/jstests/counta.js +++ /dev/null @@ -1,14 +0,0 @@ -// Check that count returns 0 in some exception cases. - -t = db.jstests_counta; -t.drop(); - -for( i = 0; i < 10; ++i ) { - t.save( {a:i} ); -} - -// f() is undefined, causing an assertion -assert.throws( - function(){ - t.count( { $where:function() { if ( this.a < 5 ) { return true; } else { f(); } } } ); - } ); diff --git a/jstests/countb.js b/jstests/countb.js deleted file mode 100644 index 8f7131a5a6c..00000000000 --- a/jstests/countb.js +++ /dev/null @@ -1,11 +0,0 @@ -// Test fast count mode with single key index unsatisfiable constraints on a multi key index. - -t = db.jstests_countb; -t.drop(); - -t.ensureIndex( {a:1} ); -t.save( {a:['a','b']} ); -assert.eq( 0, t.find( {a:{$in:['a'],$gt:'b'}} ).count() ); -assert.eq( 0, t.find( {$and:[{a:'a'},{a:{$gt:'b'}}]} ).count() ); -assert.eq( 1, t.find( {$and:[{a:'a'},{$where:"this.a[1]=='b'"}]} ).count() ); -assert.eq( 0, t.find( {$and:[{a:'a'},{$where:"this.a[1]!='b'"}]} ).count() ); diff --git a/jstests/countc.js b/jstests/countc.js deleted file mode 100644 index 260dbb1f264..00000000000 --- a/jstests/countc.js +++ /dev/null @@ -1,124 +0,0 @@ -// In fast count mode the Matcher is bypassed when matching can be performed by a BtreeCursor and -// its delegate FieldRangeVector or an IntervalBtreeCursor. The tests below check that fast count -// mode is implemented appropriately in specific cases. -// -// SERVER-1752 - -t = db.jstests_countc; -t.drop(); - - -// Match a subset of inserted values within a $in operator. -t.drop(); -t.ensureIndex( { a:1 } ); -// Save 'a' values 0, 0.5, 1.5, 2.5 ... 97.5, 98.5, 99. -t.save( { a:0 } ); -t.save( { a:99 } ); -for( i = 0; i < 99; ++i ) { - t.save( { a:( i + 0.5 ) } ); -} -// Query 'a' values $in 0, 1, 2, ..., 99. -vals = []; -for( i = 0; i < 100; ++i ) { - vals.push( i ); -} -// Only values 0 and 99 of the $in set are present in the collection, so the expected count is 2. -assert.eq( 2, t.count( { a:{ $in:vals } } ) ); - - -// Match 'a' values within upper and lower limits. -t.drop(); -t.ensureIndex( { a:1 } ); -t.save( { a:[ 1, 2 ] } ); // Will match because 'a' is in range. -t.save( { a:9 } ); // Will not match because 'a' is not in range. -// Only one document matches. -assert.eq( 1, t.count( { a:{ $gt:0, $lt:5 } } ) ); - - -// Match two nested fields within an array. -t.drop(); -t.ensureIndex( { 'a.b':1, 'a.c':1 } ); -t.save( { a:[ { b:2, c:3 }, {} ] } ); -// The document does not match because its c value is 3. -assert.eq( 0, t.count( { 'a.b':2, 'a.c':2 } ) ); - - -// $gt:string only matches strings. -t.drop(); -t.ensureIndex( { a:1 } ); -t.save( { a:'a' } ); // Will match. -t.save( { a:{} } ); // Will not match because {} is not a string. -// Only one document matches. -assert.eq( 1, t.count( { a:{ $gte:'' } } ) ); - - -// $lte:date only matches dates. -t.drop(); -t.ensureIndex( { a:1 } ); -t.save( { a:new Date( 1 ) } ); // Will match. -t.save( { a:true } ); // Will not match because 'true' is not a date. -// Only one document matches. -assert.eq( 1, t.count( { a:{ $lte:new Date( 1 ) } } ) ); - - -// Querying for 'undefined' triggers an error. -t.drop(); -t.ensureIndex( { a:1 } ); -assert.throws( function() { t.count( { a:undefined } ); } ); - - -// Count using a descending order index. -t.drop(); -t.ensureIndex( { a:-1 } ); -t.save( { a:1 } ); -t.save( { a:2 } ); -t.save( { a:3 } ); -assert.eq( 1, t.count( { a:{ $gt:2 } } ) ); -assert.eq( 1, t.count( { a:{ $lt:2 } } ) ); -assert.eq( 2, t.count( { a:{ $lte:2 } } ) ); -assert.eq( 2, t.count( { a:{ $lt:3 } } ) ); - - -// Count using a compound index. -t.drop(); -t.ensureIndex( { a:1, b:1 } ); -t.save( { a:1, b:2 } ); -t.save( { a:2, b:1 } ); -t.save( { a:2, b:3 } ); -t.save( { a:3, b:4 } ); -assert.eq( 1, t.count( { a:1 })); -assert.eq( 2, t.count( { a:2 })); -assert.eq( 1, t.count( { a:{ $gt:2 } } ) ); -assert.eq( 1, t.count( { a:{ $lt:2 } } ) ); -assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) ); -assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) ); -assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) ); - - -// Count using a compound descending order index. -t.drop(); -t.ensureIndex( { a:1, b:-1 } ); -t.save( { a:1, b:2 } ); -t.save( { a:2, b:1 } ); -t.save( { a:2, b:3 } ); -t.save( { a:3, b:4 } ); -assert.eq( 1, t.count( { a:{ $gt:2 } } ) ); -assert.eq( 1, t.count( { a:{ $lt:2 } } ) ); -assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) ); -assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) ); -assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) ); - - -// Count with a multikey value. -t.drop(); -t.ensureIndex( { a:1 } ); -t.save( { a:[ 1, 2 ] } ); -assert.eq( 1, t.count( { a:{ $gt:0, $lte:2 } } ) ); - - -// Count with a match constraint on an unindexed field. -t.drop(); -t.ensureIndex( { a:1 } ); -t.save( { a:1, b:1 } ); -t.save( { a:1, b:2 } ); -assert.eq( 1, t.count( { a:1, $where:'this.b == 1' } ) ); diff --git a/jstests/coveredIndex1.js b/jstests/coveredIndex1.js deleted file mode 100644 index ce11f89ceed..00000000000 --- a/jstests/coveredIndex1.js +++ /dev/null @@ -1,64 +0,0 @@ - -t = db["jstests_coveredIndex1"]; -t.drop(); - -t.save({fn: "john", ln: "doe"}) -t.save({fn: "jack", ln: "doe"}) -t.save({fn: "john", ln: "smith"}) -t.save({fn: "jack", ln: "black"}) -t.save({fn: "bob", ln: "murray"}) -t.save({fn: "aaa", ln: "bbb", obj: {a: 1, b: "blah"}}) -assert.eq( t.findOne({ln: "doe"}).fn, "john", "Cannot find right record" ); -assert.eq( t.count(), 6, "Not right length" ); - -// use simple index -t.ensureIndex({ln: 1}); -assert.eq( t.find({ln: "doe"}).explain().indexOnly, false, "Find using covered index but all fields are returned"); -assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find using covered index but _id is returned"); -assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); - -// this time, without a query spec -// SERVER-2109 -//assert.eq( t.find({}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); -assert.eq( t.find({}, {ln: 1, _id: 0}).hint({ln: 1}).explain().indexOnly, true, "Find is not using covered index"); - -// use compound index -t.dropIndex({ln: 1}) -t.ensureIndex({ln: 1, fn: 1}); -// return 1 field -assert.eq( t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); -// return both fields, multiple docs returned -assert.eq( t.find({ln: "doe"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); -// match 1 record using both fields -assert.eq( t.find({ln: "doe", fn: "john"}, {ln: 1, fn: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); -// change ordering -assert.eq( t.find({fn: "john", ln: "doe"}, {fn: 1, ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); -// ask from 2nd index key -assert.eq( t.find({fn: "john"}, {fn: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key"); - -// repeat above but with _id field -t.dropIndex({ln: 1, fn: 1}) -t.ensureIndex({_id: 1, ln: 1}); -// return 1 field -assert.eq( t.find({_id: 123, ln: "doe"}, {_id: 1}).explain().indexOnly, true, "Find is not using covered index"); -// match 1 record using both fields -assert.eq( t.find({_id: 123, ln: "doe"}, {ln: 1}).explain().indexOnly, true, "Find is not using covered index"); -// change ordering -assert.eq( t.find({ln: "doe", _id: 123}, {ln: 1, _id: 1}).explain().indexOnly, true, "Find is not using covered index"); -// ask from 2nd index key -assert.eq( t.find({ln: "doe"}, {ln: 1}).explain().indexOnly, false, "Find is using covered index, but doesnt have 1st key"); - -// repeat above but with embedded obj -t.dropIndex({_id: 1, ln: 1}) -t.ensureIndex({obj: 1}); -assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object"); -assert.eq( t.find({obj: {a: 1, b: "blah"}}).explain().indexOnly, false, "Index doesnt have all fields to cover"); -assert.eq( t.find({obj: {a: 1, b: "blah"}}, {obj: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); - -// repeat above but with index on sub obj field -t.dropIndex({obj: 1}); -t.ensureIndex({"obj.a": 1, "obj.b": 1}) -assert.eq( t.find({"obj.a": 1}, {obj: 1}).explain().indexOnly, false, "Shouldnt use index when introspecting object"); - -assert(t.validate().valid); - diff --git a/jstests/coveredIndex2.js b/jstests/coveredIndex2.js deleted file mode 100644 index 56a23f43211..00000000000 --- a/jstests/coveredIndex2.js +++ /dev/null @@ -1,18 +0,0 @@ -t = db["jstests_coveredIndex2"]; -t.drop(); - -t.save({a: 1}) -t.save({a: 2}) -assert.eq( t.findOne({a: 1}).a, 1, "Cannot find right record" ); -assert.eq( t.count(), 2, "Not right length" ); - -// use simple index -t.ensureIndex({a: 1}); -assert.eq( t.find({a:1}).explain().indexOnly, false, "Find using covered index but all fields are returned"); -assert.eq( t.find({a:1}, {a: 1}).explain().indexOnly, false, "Find using covered index but _id is returned"); -assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index"); - -// add multikey -t.save({a:[3,4]}) -assert.eq( t.find({a:1}, {a: 1, _id: 0}).explain().indexOnly, false, "Find is using covered index even after multikey insert"); - diff --git a/jstests/coveredIndex3.js b/jstests/coveredIndex3.js deleted file mode 100644 index 66180342605..00000000000 --- a/jstests/coveredIndex3.js +++ /dev/null @@ -1,54 +0,0 @@ -// Check proper covered index handling when query and processGetMore yield. -// SERVER-4975 - -if ( 0 ) { // SERVER-4975 - -t = db.jstests_coveredIndex3; -t2 = db.jstests_coveredIndex3_other; -t.drop(); -t2.drop(); - -function doTest( batchSize ) { - - // Insert an array, which will make the { a:1 } index multikey and should disable covered index - // matching. - p1 = startParallelShell( - 'for( i = 0; i < 60; ++i ) { \ - db.jstests_coveredIndex3.save( { a:[ 2000, 2001 ] } ); \ - sleep( 300 ); \ - }' - ); - - // Frequent writes cause the find operation to yield. - p2 = startParallelShell( - 'for( i = 0; i < 1800; ++i ) { \ - db.jstests_coveredIndex3_other.save( {} ); \ - sleep( 10 ); \ - }' - ); - - for( i = 0; i < 30; ++i ) { - t.drop(); - t.ensureIndex( { a:1 } ); - - for( j = 0; j < 1000; ++j ) { - t.save( { a:j } ); - } - - c = t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).batchSize( batchSize ); - while( c.hasNext() ) { - o = c.next(); - // If o contains a high numeric 'a' value, it must come from an array saved in p1. - assert( !( o.a > 1500 ), 'improper object returned ' + tojson( o ) ); - } - } - - p1(); - p2(); - -} - -doTest( 2000 ); // Test query. -doTest( 500 ); // Try to test getMore - not clear if this will actually trigger the getMore issue. - -} diff --git a/jstests/coveredIndex4.js b/jstests/coveredIndex4.js deleted file mode 100644 index 136eba603cf..00000000000 --- a/jstests/coveredIndex4.js +++ /dev/null @@ -1,40 +0,0 @@ -// Test covered index projection with $or clause, specifically in getMore -// SERVER-4980 - -t = db.jstests_coveredIndex4; -t.drop(); - -t.ensureIndex( { a:1 } ); -t.ensureIndex( { b:1 } ); - -orClause = []; -for( i = 0; i < 200; ++i ) { - if ( i % 2 == 0 ) { - t.save( { a:i } ); - orClause.push( { a:i } ); - } - else { - t.save( { b:i } ); - orClause.push( { b:i } ); - } -} - -c = t.find( { $or:orClause }, { _id:0, a:1 } ); - -// No odd values of a were saved, so we should not see any in the results. -while( c.hasNext() ) { - o = c.next(); - if ( o.a ) { - assert.eq( 0, o.a % 2, 'unexpected result: ' + tojson( o ) ); - } -} - -c = t.find( { $or:orClause }, { _id:0, b:1 } ); - -// No even values of b were saved, so we should not see any in the results. -while( c.hasNext() ) { - o = c.next(); - if ( o.b ) { - assert.eq( 1, o.b % 2, 'unexpected result: ' + tojson( o ) ); - } -} diff --git a/jstests/coveredIndex5.js b/jstests/coveredIndex5.js deleted file mode 100644 index ee383cd93e2..00000000000 --- a/jstests/coveredIndex5.js +++ /dev/null @@ -1,70 +0,0 @@ -// Test use of covered indexes when there are multiple candidate indexes. - -t = db.jstests_coveredIndex5; -t.drop(); - -t.ensureIndex( { a:1, b:1 } ); -t.ensureIndex( { a:1, c:1 } ); - -function checkFields( query, projection ) { - t.ensureIndex( { z:1 } ); // clear query patterns - t.dropIndex( { z:1 } ); - - results = t.find( query, projection ).toArray(); - - expectedFields = []; - for ( k in projection ) { - if ( k != '_id' ) { - expectedFields.push( k ); - } - } - - vals = []; - for ( i in results ) { - r = results[ i ]; - printjson(r); - assert.eq( 0, r.a ); - assert.eq( expectedFields, Object.keySet( r ) ); - for ( k in projection ) { - if ( k != '_id' && k != 'a' ) { - vals.push( r[ k ] ); - } - } - } - - if ( vals.length != 0 ) { - vals.sort(); - assert.eq( [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ], vals ); - } -} - -function checkCursorCovered( cursor, covered, count, query, projection ) { - checkFields( query, projection ); - explain = t.find( query, projection ).explain( true ); - if (covered) { - assert.eq( cursor, explain.cursor ); - } - assert.eq( covered, explain.indexOnly ); - assert.eq( count, explain.n ); -} - -for( i = 0; i < 10; ++i ) { - t.save( { a:0, b:i, c:9-i } ); -} - -checkCursorCovered( 'BtreeCursor a_1_b_1', true, 10, { a:0 }, { _id:0, a:1 } ); - -checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1 } ); -checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, b:1 } ); - -// Covered index on a,c not preferentially selected. -checkCursorCovered( 'BtreeCursor a_1_b_1', false, 10, { a:0, d:null }, { _id:0, a:1, c:1 } ); - -t.save( { a:0, c:[ 1, 2 ] } ); -t.save( { a:1 } ); -checkCursorCovered( 'BtreeCursor a_1_b_1', false, 11, { a:0, d:null }, { _id:0, a:1 } ); - -t.save( { a:0, b:[ 1, 2 ] } ); -t.save( { a:1 } ); -checkCursorCovered( 'BtreeCursor a_1_b_1', false, 12, { a:0, d:null }, { _id:0, a:1 } ); - diff --git a/jstests/covered_index_compound_1.js b/jstests/covered_index_compound_1.js deleted file mode 100644 index 7e529785d12..00000000000 --- a/jstests/covered_index_compound_1.js +++ /dev/null @@ -1,45 +0,0 @@ -// Compound index covered query tests - -var coll = db.getCollection("covered_compound_1") -coll.drop() -for (i=0;i<100;i++) { - coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)}) -} -coll.ensureIndex({a:1,b:-1,c:1}) - -// Test equality - all indexed fields queried and projected -var plan = coll.find({a:10, b:"strvar_10", c:0}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query") - -// Test query on subset of fields queried and project all -var plan = coll.find({a:26, b:"strvar_0"}, {a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.2 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.2 - nscannedObjects should be 0 for covered query") - -// Test query on all fields queried and project subset -var plan = coll.find({a:38, b:"strvar_12", c: 8}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.3 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.3 - nscannedObjects should be 0 for covered query") - -// Test no query -var plan = coll.find({}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.4 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.4 - nscannedObjects should be 0 for covered query") - -// Test range query -var plan = coll.find({a:{$gt:25,$lt:43}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.5 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.5 - nscannedObjects should be 0 for covered query") - -// Test in query -var plan = coll.find({a:38, b:"strvar_12", c:{$in:[5,8]}}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.6 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.6 - nscannedObjects should be 0 for covered query") - -// Test no result -var plan = coll.find({a:38, b:"strvar_12", c:55},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.7 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.7 - nscannedObjects should be 0 for covered query") - -print('all tests passed') diff --git a/jstests/covered_index_geo_1.js b/jstests/covered_index_geo_1.js deleted file mode 100644 index 1d647dfa94c..00000000000 --- a/jstests/covered_index_geo_1.js +++ /dev/null @@ -1,18 +0,0 @@ -var coll = db.getCollection("covered_geo_1") -coll.drop() - -coll.insert({_id : 1, loc : [ 5 , 5 ], type : "type1"}) -coll.insert({_id : 2, loc : [ 6 , 6 ], type : "type2"}) -coll.insert({_id : 3, loc : [ 7 , 7 ], type : "type3"}) - -coll.ensureIndex({loc : "2d", type : 1}); - -var plan = coll.find({loc : [ 6 , 6 ]}, {loc:1, type:1, _id:0}).hint({loc:"2d", type:1}).explain(); -assert.eq(false, plan.indexOnly, "geo.1.1 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "geo.1.1 - nscannedObjects should not be 0 for a non covered query") - -var plan = coll.find({loc : [ 6 , 6 ]}, {type:1, _id:0}).hint({loc:"2d", type:1}).explain(); -assert.eq(false, plan.indexOnly, "geo.1.2 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "geo.1.2 - nscannedObjects should not be 0 for a non covered query") - -print("all tests passed") \ No newline at end of file diff --git a/jstests/covered_index_geo_2.js b/jstests/covered_index_geo_2.js deleted file mode 100644 index 52f610b7e64..00000000000 --- a/jstests/covered_index_geo_2.js +++ /dev/null @@ -1,22 +0,0 @@ -var coll = db.getCollection("covered_geo_2") -coll.drop() - -coll.insert({_id : 1, loc1 : [ 5 , 5 ], type1 : "type1", - loc2 : [ 5 , 5 ], type2 : 1}) -coll.insert({_id : 2, loc1 : [ 6 , 6 ], type1 : "type2", - loc2 : [ 5 , 5 ], type2 : 2}) -coll.insert({_id : 3, loc1 : [ 7 , 7 ], type1 : "type3", - loc2 : [ 5 , 5 ], type2 : 3}) - -coll.ensureIndex({loc1 : "2dsphere", type1 : 1}); -coll.ensureIndex({type2: 1, loc2 : "2dsphere"}); - -var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {loc1:1, type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain(); -assert.eq(false, plan.indexOnly, "geo.2.1 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "geo.2.1 - nscannedObjects should not be 0 for a non covered query") - -var plan = coll.find({loc1 : {$nearSphere: [ 6 , 6 ]}}, {type1:1, _id:0}).hint({loc1:"2dsphere", type1:1}).explain(); -assert.eq(false, plan.indexOnly, "geo.2.2 - indexOnly should be false for a non covered query") -assert.neq(0, plan.nscannedObjects, "geo.2.2 - nscannedObjects should not be 0 for a non covered query") - -print("all tests passed") diff --git a/jstests/covered_index_negative_1.js b/jstests/covered_index_negative_1.js deleted file mode 100644 index ab03e7566f6..00000000000 --- a/jstests/covered_index_negative_1.js +++ /dev/null @@ -1,61 +0,0 @@ -// Miscellaneous covered query tests. Mostly negative tests -// These are tests where we do not expect the query to be a -// covered index query. Hence we expect indexOnly=false and -// nscannedObjects > 0 - -var coll = db.getCollection("covered_negative_1") -coll.drop() -for (i=0;i<100;i++) { - coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10), d: i*10, e: [i, i%10], - f:i}) -} -coll.ensureIndex({a:1,b:-1,c:1}) -coll.ensureIndex({e:1}) -coll.ensureIndex({d:1}) -coll.ensureIndex({f:"hashed"}) - -// Test no projection -var plan = coll.find({a:10, b:"strvar_10", c:0}).hint({a:1, b:-1, c:1}).explain() -assert.eq(false, plan.indexOnly, "negative.1.1 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "negative.1.1 - nscannedObjects should not be 0 for a non covered query") - -// Test projection and not excluding _id -var plan = coll.find({a:10, b:"strvar_10", c:0},{a:1, b:1, c:1}).hint({a:1, b:-1, c:1}).explain() -assert.eq(false, plan.indexOnly, "negative.1.2 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "negative.1.2 - nscannedObjects should not be 0 for a non covered query") - -// Test projection of non-indexed field -var plan = coll.find({d:100},{d:1, c:1, _id:0}).hint({d:1}).explain() -assert.eq(false, plan.indexOnly, "negative.1.3 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "negative.1.3 - nscannedObjects should not be 0 for a non covered query") - -// Test query and projection on a multi-key index -var plan = coll.find({e:99},{e:1, _id:0}).hint({e:1}).explain() -assert.eq(false, plan.indexOnly, "negative.1.4 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "negative.1.4 - nscannedObjects should not be 0 for a non covered query") - -// Commenting out negative.1.5 and 1.6 pending fix in SERVER-8650 -// // Test projection and $natural sort -// var plan = coll.find({a:{$gt:70}},{a:1, b:1, c:1, _id:0}).sort({$natural:1}).hint({a:1, b:-1, c:1}).explain() -// // indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8561 -// assert.eq(true, plan.indexOnly, "negative.1.5 - indexOnly should be false on a non covered query") -// assert.neq(0, plan.nscannedObjects, "negative.1.5 - nscannedObjects should not be 0 for a non covered query") - -// // Test sort on non-indexed field -// var plan = coll.find({d:{$lt:1000}},{d:1, _id:0}).sort({c:1}).hint({d:1}).explain() -// //indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562 -// assert.eq(true, plan.indexOnly, "negative.1.6 - indexOnly should be false on a non covered query") -// assert.neq(0, plan.nscannedObjects, "negative.1.6 - nscannedObjects should not be 0 for a non covered query") - -// Test query on non-indexed field -var plan = coll.find({d:{$lt:1000}},{a:1, b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain() -//indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562 -// assert.eq(true, plan.indexOnly, "negative.1.7 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "negative.1.7 - nscannedObjects should not be 0 for a non covered query") - -// Test query on hashed indexed field -var plan = coll.find({f:10},{f:1, _id:0}).hint({f:"hashed"}).explain() -assert.eq(false, plan.indexOnly, "negative.1.8 - indexOnly should be false on a non covered query") -assert.neq(0, plan.nscannedObjects, "negative.1.8 - nscannedObjects should not be 0 for a non covered query") - -print('all tests passed') diff --git a/jstests/covered_index_simple_1.js b/jstests/covered_index_simple_1.js deleted file mode 100644 index 44e3c00a9f8..00000000000 --- a/jstests/covered_index_simple_1.js +++ /dev/null @@ -1,55 +0,0 @@ -// Simple covered index query test - -var coll = db.getCollection("covered_simple_1") -coll.drop() -for (i=0;i<10;i++) { - coll.insert({foo:i}) -} -for (i=0;i<10;i++) { - coll.insert({foo:i}) -} -for (i=0;i<5;i++) { - coll.insert({bar:i}) -} -coll.insert({foo:"string"}) -coll.insert({foo:{bar:1}}) -coll.insert({foo:null}) -coll.ensureIndex({foo:1}) - -// Test equality with int value -var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.1 - nscannedObjects should be 0 for covered query") - -// Test equality with string value -var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.2 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.2 - nscannedObjects should be 0 for covered query") - -// Test equality with doc value -var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.3 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.3 - nscannedObjects should be 0 for covered query") - -// Test no query -var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.4 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.4 - nscannedObjects should be 0 for covered query") - -// Test range query -var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.5 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.5 - nscannedObjects should be 0 for covered query") - -// Test in query -var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.6 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.6 - nscannedObjects should be 0 for covered query") - -// Test no return -var plan = coll.find({foo:"2"}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.1.7 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.1.7 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') - diff --git a/jstests/covered_index_simple_2.js b/jstests/covered_index_simple_2.js deleted file mode 100644 index 313cca439d8..00000000000 --- a/jstests/covered_index_simple_2.js +++ /dev/null @@ -1,43 +0,0 @@ -// Simple covered index query test with unique index - -var coll = db.getCollection("covered_simple_2") -coll.drop() -for (i=0;i<10;i++) { - coll.insert({foo:i}) -} -coll.insert({foo:"string"}) -coll.insert({foo:{bar:1}}) -coll.insert({foo:null}) -coll.ensureIndex({foo:1},{unique:true}) - -// Test equality with int value -var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.2.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.2.1 - nscannedObjects should be 0 for covered query") - -// Test equality with string value -var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.2.2 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.2.2 - nscannedObjects should be 0 for covered query") - -// Test equality with int value on a dotted field -var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.2.3 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.2.3 - nscannedObjects should be 0 for covered query") - -// Test no query -var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.2.4 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.2.4 - nscannedObjects should be 0 for covered query") - -// Test range query -var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.2.5 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.2.5 - nscannedObjects should be 0 for covered query") - -// Test in query -var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "simple.2.6 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.2.6 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') diff --git a/jstests/covered_index_simple_3.js b/jstests/covered_index_simple_3.js deleted file mode 100644 index ee586540ea4..00000000000 --- a/jstests/covered_index_simple_3.js +++ /dev/null @@ -1,57 +0,0 @@ -// Simple covered index query test with a unique sparse index - -var coll = db.getCollection("covered_simple_3"); -coll.drop(); -for (i=0;i<10;i++) { - coll.insert({foo:i}); -} -for (i=0;i<5;i++) { - coll.insert({bar:i}); -} -coll.insert({foo:"string"}); -coll.insert({foo:{bar:1}}); -coll.insert({foo:null}); -coll.ensureIndex({foo:1}, {sparse:true, unique:true}); - -// Test equality with int value -var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.1 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.1 - nscannedObjects should be 0 for covered query"); - -// Test equality with string value -var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.2 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.2 - nscannedObjects should be 0 for covered query"); - -// Test equality with int value on a dotted field -var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.3 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.3 - nscannedObjects should be 0 for covered query"); - -// Test no query -var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.4 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.4 - nscannedObjects should be 0 for covered query"); - -// Test range query -var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.5 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.5 - nscannedObjects should be 0 for covered query"); - -// Test in query -var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.6 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.6 - nscannedObjects should be 0 for covered query"); - -// Test $exists true -var plan = coll.find({foo:{$exists:true}}, {foo:1, _id:0}).hint({foo:1}).explain(); -assert.eq(true, plan.indexOnly, "simple.3.7 - indexOnly should be true on covered query"); -assert.eq(0, plan.nscannedObjects, "simple.3.7 - nscannedObjects should be 0 for covered query"); - -// SERVER-12262: currently $nin will always use a collection scan -//var plan = coll.find({foo:{$nin:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain() -//assert.eq(true, plan.indexOnly, "simple.3.8 - indexOnly should be true on covered query") -// this should be 0 but is not due to bug https://jira.mongodb.org/browse/SERVER-3187 -//assert.eq(13, plan.nscannedObjects, "simple.3.8 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') diff --git a/jstests/covered_index_simple_id.js b/jstests/covered_index_simple_id.js deleted file mode 100644 index c7f6811a33c..00000000000 --- a/jstests/covered_index_simple_id.js +++ /dev/null @@ -1,42 +0,0 @@ -// Simple covered index query test - -var coll = db.getCollection("covered_simple_id") -coll.drop() -for (i=0;i<10;i++) { - coll.insert({_id:i}) -} -coll.insert({_id:"string"}) -coll.insert({_id:{bar:1}}) -coll.insert({_id:null}) - -// Test equality with int value -var plan = coll.find({_id:1}, {_id:1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "simple.id.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.id.1 - nscannedObjects should be 0 for covered query") - -// Test equality with string value -var plan = coll.find({_id:"string"}, {_id:1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "simple.id.2 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.id.2 - nscannedObjects should be 0 for covered query") - -// Test equality with int value on a dotted field -var plan = coll.find({_id:{bar:1}}, {_id:1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "simple.id.3 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.id.3 - nscannedObjects should be 0 for covered query") - -// Test no query -var plan = coll.find({}, {_id:1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "simple.id.4 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.id.4 - nscannedObjects should be 0 for covered query") - -// Test range query -var plan = coll.find({_id:{$gt:2,$lt:6}}, {_id:1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "simple.id.5 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.id.5 - nscannedObjects should be 0 for covered query") - -// Test in query -var plan = coll.find({_id:{$in:[5,8]}}, {_id:1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "simple.id.6 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "simple.id.6 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') diff --git a/jstests/covered_index_sort_1.js b/jstests/covered_index_sort_1.js deleted file mode 100644 index adfcb5c6cb6..00000000000 --- a/jstests/covered_index_sort_1.js +++ /dev/null @@ -1,34 +0,0 @@ -// Simple covered index query test with sort - -var coll = db.getCollection("covered_sort_1") -coll.drop() -for (i=0;i<10;i++) { - coll.insert({foo:i}) -} -for (i=0;i<10;i++) { - coll.insert({foo:i}) -} -for (i=0;i<5;i++) { - coll.insert({bar:i}) -} -coll.insert({foo:"1"}) -coll.insert({foo:{bar:1}}) -coll.insert({foo:null}) -coll.ensureIndex({foo:1}) - -// Test no query and sort ascending -var plan = coll.find({}, {foo:1, _id:0}).sort({foo:1}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "sort.1.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "sort.1.1 - nscannedObjects should be 0 for covered query") - -// Test no query and sort descending -var plan = coll.find({}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "sort.1.2 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "sort.1.2 - nscannedObjects should be 0 for covered query") - -// Test range query with sort -var plan = coll.find({foo:{$gt:2}}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain() -assert.eq(true, plan.indexOnly, "sort.1.5 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "sort.1.5 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') \ No newline at end of file diff --git a/jstests/covered_index_sort_2.js b/jstests/covered_index_sort_2.js deleted file mode 100644 index e5dd48b47af..00000000000 --- a/jstests/covered_index_sort_2.js +++ /dev/null @@ -1,17 +0,0 @@ -// Simple covered index query test with sort on _id - -var coll = db.getCollection("covered_sort_2") -coll.drop() -for (i=0;i<10;i++) { - coll.insert({_id:i}) -} -coll.insert({_id:"1"}) -coll.insert({_id:{bar:1}}) -coll.insert({_id:null}) - -// Test no query -var plan = coll.find({}, {_id:1}).sort({_id:-1}).hint({_id:1}).explain() -assert.eq(true, plan.indexOnly, "sort.2.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "sort.2.1 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') \ No newline at end of file diff --git a/jstests/covered_index_sort_3.js b/jstests/covered_index_sort_3.js deleted file mode 100644 index 8f5986c4d76..00000000000 --- a/jstests/covered_index_sort_3.js +++ /dev/null @@ -1,16 +0,0 @@ -// Compound index covered query tests with sort - -var coll = db.getCollection("covered_sort_3") -coll.drop() -for (i=0;i<100;i++) { - coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)}) -} -coll.insert -coll.ensureIndex({a:1,b:-1,c:1}) - -// Test no query, sort on all fields in index order -var plan = coll.find({}, {b:1, c:1, _id:0}).sort({a:1,b:-1,c:1}).hint({a:1, b:-1, c:1}).explain() -assert.eq(true, plan.indexOnly, "compound.1.1 - indexOnly should be true on covered query") -assert.eq(0, plan.nscannedObjects, "compound.1.1 - nscannedObjects should be 0 for covered query") - -print ('all tests pass') diff --git a/jstests/create_indexes.js b/jstests/create_indexes.js deleted file mode 100644 index e83799a57ab..00000000000 --- a/jstests/create_indexes.js +++ /dev/null @@ -1,48 +0,0 @@ - -t = db.create_indexes; -t.drop(); - -res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } ); -assert( res.createdCollectionAutomatically ); -assert.eq( 1, res.numIndexesBefore ); -assert.eq( 2, res.numIndexesAfter ); - -res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" } ] } ); -assert.eq( 2, res.numIndexesBefore ); -assert( res.noChangesMade ); - -res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" }, - { key : { "y" : 1 }, name : "y_1" } ] } ); -assert( !res.createdCollectionAutomatically ); -assert.eq( 2, res.numIndexesBefore ); -assert.eq( 3, res.numIndexesAfter ); - -res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" }, - { key : { "b" : 1 }, name : "b_1" } ] } ); -assert( !res.createdCollectionAutomatically ); -assert.eq( 3, res.numIndexesBefore ); -assert.eq( 5, res.numIndexesAfter ); - -res = t.runCommand( "createIndexes", { indexes : [ { key : { "a" : 1 }, name : "a_1" }, - { key : { "b" : 1 }, name : "b_1" } ] } ); -assert.eq( 5, res.numIndexesBefore ); -assert( res.noChangesMade ); - -res = t.runCommand( "createIndexes", { indexes : [ {} ] } ); -assert( !res.ok ); - -res = t.runCommand( "createIndexes", { indexes : [ {} , { key : { m : 1 }, name : "asd" } ] } ); -assert( !res.ok ); - -assert.eq( 5, t.getIndexes().length ); - -res = t.runCommand( "createIndexes", - { indexes : [ { key : { "c" : 1 }, sparse : true, name : "c_1" } ] } ) -assert.eq( 6, t.getIndexes().length ); -assert.eq( 1, t.getIndexes().filter( function(z){ return z.sparse; } ).length ); - -res = t.runCommand( "createIndexes", - { indexes : [ { key : { "x" : "foo" }, name : "x_1" } ] } ); -assert( !res.ok ) - -assert.eq( 6, t.getIndexes().length ); diff --git a/jstests/currentop.js b/jstests/currentop.js deleted file mode 100644 index be822bf88f2..00000000000 --- a/jstests/currentop.js +++ /dev/null @@ -1,80 +0,0 @@ -print("BEGIN currentop.js"); - -// test basic currentop functionality + querying of nested documents -t = db.jstests_currentop -t.drop(); - -for(i=0;i<100;i++) { - t.save({ "num": i }); -} -// Make sure data is written before we start reading it in parallel shells. -db.getLastError(); - -print("count:" + t.count()); - -function ops(q) { - printjson( db.currentOp().inprog ); - return db.currentOp(q).inprog; -} - -print("start shell"); - -// sleep for a second for each (of 100) documents; can be killed in between documents & test should complete before 100 seconds -s1 = startParallelShell("db.jstests_currentop.count( { '$where': function() { sleep(1000); } } )"); - -print("sleep"); -sleep(1000); - -print("inprog:"); -printjson(db.currentOp().inprog) -print() -sleep(1); -print("inprog:"); -printjson(db.currentOp().inprog) -print() - -// need to wait for read to start -print("wait have some ops"); -assert.soon( function(){ - return ops( { "locks.^test": "r", "ns": "test.jstests_currentop" } ).length + - ops({ "locks.^test": "R", "ns": "test.jstests_currentop" }).length >= 1; -}, "have_some_ops"); -print("ok"); - -s2 = startParallelShell( "db.jstests_currentop.update( { '$where': function() { sleep(150); } }, { 'num': 1 }, false, true ); db.getLastError()" ); - -o = []; - -function f() { - o = ops({ "ns": "test.jstests_currentop" }); - - printjson(o); - - var writes = ops({ "locks.^test": "w", "ns": "test.jstests_currentop" }).length; - - var readops = ops({ "locks.^test": "r", "ns": "test.jstests_currentop" }); - print("readops:"); - printjson(readops); - var reads = readops.length; - - print("total: " + o.length + " w: " + writes + " r:" + reads); - - return o.length > writes && o.length > reads; -} - -print("go"); - -assert.soon( f, "f" ); - -// avoid waiting for the operations to complete (if soon succeeded) -for(var i in o) { - db.killOp(o[i].opid); -} - -start = new Date(); - -s1(); -s2(); - -// don't want to pass if timeout killed the js function -assert( ( new Date() ) - start < 30000 ); diff --git a/jstests/cursor1.js b/jstests/cursor1.js deleted file mode 100644 index 8448752bb0c..00000000000 --- a/jstests/cursor1.js +++ /dev/null @@ -1,20 +0,0 @@ - -t = db.cursor1 -t.drop(); - -big = ""; -while ( big.length < 50000 ) - big += "asdasdasdasdsdsdadsasdasdasD"; - -num = Math.ceil( 10000000 / big.length ); - -for ( var i=0; i 0; }).sort({ _id: -1 }).explain() - num = ex.n - end = new Date() - } - catch (e) { - print("cursora.js FAIL " + e); - join(); - throw e; - } - - join() - - //print( "cursora.js num: " + num + " time:" + ( end.getTime() - start.getTime() ) ) - assert.eq( 0 , t.count() , "after remove: " + tojson( ex ) ) - // assert.lt( 0 , ex.nYields , "not enough yields : " + tojson( ex ) ); // TODO make this more reliable so cen re-enable assert - if ( n == num ) - print( "cursora.js warning: shouldn't have counted all n: " + n + " num: " + num ); -} - -run( 1500 ) -run( 5000 ) -run( 1500 , true ) -run( 5000 , true ) -print("cursora.js SUCCESS") diff --git a/jstests/cursorb.js b/jstests/cursorb.js deleted file mode 100644 index 65e356e89cb..00000000000 --- a/jstests/cursorb.js +++ /dev/null @@ -1,17 +0,0 @@ -// The 'cursor not found in map -1' warning is not logged when get more exhausts a client cursor. -// SERVER-6931 - -t = db.jstests_cursorb; -t.drop(); - -// Exhaust a client cursor in get more. -for( i = 0; i < 200; ++i ) { - t.save( { a:i } ); -} -t.find().itcount(); - -// Check that the 'cursor not found in map -1' message is not printed. This message indicates an -// attempt to look up a cursor with an invalid id and should never appear in the log. -log = db.adminCommand( { getLog:'global' } ).log -log.forEach( function( line ) { assert( !line.match( /cursor not found in map -1 / ), - 'Cursor map lookup with id -1.' ); } ); diff --git a/jstests/datasize.js b/jstests/datasize.js deleted file mode 100644 index 13e9f11bf0c..00000000000 --- a/jstests/datasize.js +++ /dev/null @@ -1,35 +0,0 @@ -// test end-to-end data allocation without powerOf2Sizes enabled -f = db.jstests_datasize; -f.drop(); - -// this test requires usePowerOf2Sizes to be off -db.createCollection( f.getName(), { usePowerOf2Sizes: false } ); -assert.eq(0, f.stats().userFlags); - -assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); -f.save( {qq:'c'} ); -assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); -f.save( {qq:'fg'} ); -assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); - -f.drop(); -db.createCollection( f.getName(), { usePowerOf2Sizes: false} ); - -f.ensureIndex( {qq:1} ); -assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); -f.save( {qq:'c'} ); -assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); -f.save( {qq:'fg'} ); -assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize"} ).size ); - -assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}} ).ok ); - -assert.eq( 68, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'z' }} ).size ); -assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }} ).size ); -assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{qq:1}} ).size ); -assert.eq( 36, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'d'}, max:{qq:'z' }, keyPattern:{qq:1}} ).size ); - -assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'c' }} ).size ); -assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'d' }} ).size ); - -assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{a:1}} ).ok ); diff --git a/jstests/datasize2.js b/jstests/datasize2.js deleted file mode 100644 index 103cb2096ee..00000000000 --- a/jstests/datasize2.js +++ /dev/null @@ -1,27 +0,0 @@ - -t = db.datasize2 -t.drop(); - -N = 1000 -for ( i=0; i after.mem.mapped , "closeAllDatabases does something before:" + tojson( before.mem ) + " after:" + tojson( after.mem ) + " cmd res:" + tojson( cmdres ) ); - print( before.mem.mapped + " -->> " + after.mem.mapped ); -} -else { - print( "can't test serverStatus on this machine" ); -} - -t.save( { x : 1 } ); - -res = db._adminCommand( "listDatabases" ); -assert( res.databases && res.databases.length > 0 , "listDatabases 1 " + tojson(res) ); - -now = new Date(); -x = db._adminCommand( "ismaster" ); -assert( x.ismaster , "ismaster failed: " + tojson( x ) ) -assert( x.localTime, "ismaster didn't include time: " + tojson(x)) -localTimeSkew = x.localTime - now -if ( localTimeSkew >= 50 ) { - print( "Warning: localTimeSkew " + localTimeSkew + " > 50ms." ) -} -assert.lt( localTimeSkew, 500, "isMaster.localTime" ) - -before = db.runCommand( "serverStatus" ) -print(before.uptimeEstimate); -sleep( 5000 ) -after = db.runCommand( "serverStatus" ) -print(after.uptimeEstimate); -assert.lt( 2 , after.uptimeEstimate , "up1" ) -assert.gt( after.uptimeEstimate , before.uptimeEstimate , "up2" ) - -// Test startup_log -var stats = db.getSisterDB( "local" ).startup_log.stats(); -assert(stats.capped); - -var latestStartUpLog = db.getSisterDB( "local" ).startup_log.find().sort( { $natural: -1 } ).limit(1).next(); -var serverStatus = db._adminCommand( "serverStatus" ); -var cmdLine = db._adminCommand( "getCmdLineOpts" ).parsed; - -// Test that the startup log has the expected keys -var verbose = false; -var expectedKeys = ["_id", "hostname", "startTime", "startTimeLocal", "cmdLine", "pid", "buildinfo"]; -var keys = Object.keySet(latestStartUpLog); -assert(arrayEq(expectedKeys, keys, verbose), 'startup_log keys failed'); - -// Tests _id implicitly - should be comprised of host-timestamp -// Setup expected startTime and startTimeLocal from the supplied timestamp -var _id = latestStartUpLog._id.split('-'); // _id should consist of host-timestamp -var _idUptime = _id.pop(); -var _idHost = _id.join('-'); -var uptimeSinceEpochRounded = Math.floor(_idUptime/1000) * 1000; -var startTime = new Date(uptimeSinceEpochRounded); // Expected startTime - -assert.eq(_idHost, latestStartUpLog.hostname, "Hostname doesn't match one from _id"); -assert.eq(serverStatus.host.split(':')[0], latestStartUpLog.hostname, "Hostname doesn't match one in server status"); -assert.closeWithinMS(startTime, latestStartUpLog.startTime, - "StartTime doesn't match one from _id", 2000); // Expect less than 2 sec delta -assert.eq(cmdLine, latestStartUpLog.cmdLine, "cmdLine doesn't match that from getCmdLineOpts"); -assert.eq(serverStatus.pid, latestStartUpLog.pid, "pid doesn't match that from serverStatus"); - -// Test buildinfo -var buildinfo = db.runCommand( "buildinfo" ); -delete buildinfo.ok; // Delete extra meta info not in startup_log -var isMaster = db._adminCommand( "ismaster" ); - -// Test buildinfo has the expected keys -var expectedKeys = ["version", "gitVersion", "OpenSSLVersion", "sysInfo", "loaderFlags", "compilerFlags", "allocator", "versionArray", "javascriptEngine", "bits", "debug", "maxBsonObjectSize"]; -var keys = Object.keySet(latestStartUpLog.buildinfo); -// Disabled to check -assert(arrayIsSubset(expectedKeys, keys), "buildinfo keys failed! \n expected:\t" + expectedKeys + "\n actual:\t" + keys); -assert.eq(buildinfo, latestStartUpLog.buildinfo, "buildinfo doesn't match that from buildinfo command"); - -// Test version and version Array -var version = latestStartUpLog.buildinfo.version.split('-')[0]; -var versionArray = latestStartUpLog.buildinfo.versionArray; -var versionArrayCleaned = []; -// Only create a string with 2 dots (2.5.5, not 2.5.5.0) -for (var i = 0; i < (versionArray.length - 1); i++) if (versionArray[i] >= 0) { versionArrayCleaned.push(versionArray[i]); } - -assert.eq(serverStatus.version, latestStartUpLog.buildinfo.version, "Mongo version doesn't match that from ServerStatus"); -assert.eq(version, versionArrayCleaned.join('.'), "version doesn't match that from the versionArray"); -assert(["V8", "SpiderMonkey", "Unknown"].indexOf(latestStartUpLog.buildinfo.javascriptEngine) > -1); -assert.eq(isMaster.maxBsonObjectSize, latestStartUpLog.buildinfo.maxBsonObjectSize, "maxBsonObjectSize doesn't match one from ismaster"); diff --git a/jstests/dbcase.js b/jstests/dbcase.js deleted file mode 100644 index 25c0bcab37a..00000000000 --- a/jstests/dbcase.js +++ /dev/null @@ -1,29 +0,0 @@ -// Check db name duplication constraint SERVER-2111 - -a = db.getSisterDB( "dbcasetest_dbnamea" ) -b = db.getSisterDB( "dbcasetest_dbnameA" ) - -a.dropDatabase(); -b.dropDatabase(); - -a.foo.save( { x : 1 } ) -z = db.getLastErrorObj(); -assert.eq( 0 , z.code || 0 , "A : " + tojson(z) ) - -b.foo.save( { x : 1 } ) -z = db.getLastErrorObj(); -assert.eq( 13297 , z.code || 0 , "B : " + tojson(z) ) - -assert.neq( -1, db.getMongo().getDBNames().indexOf( a.getName() ) ); -assert.eq( -1, db.getMongo().getDBNames().indexOf( b.getName() ) ); -printjson( db.getMongo().getDBs().databases ); - -a.dropDatabase(); -b.dropDatabase(); - -ai = db.getMongo().getDBNames().indexOf( a.getName() ); -bi = db.getMongo().getDBNames().indexOf( b.getName() ); -// One of these dbs may exist if there is a slave active, but they must -// not both exist. -assert( ai == -1 || bi == -1 ); -printjson( db.getMongo().getDBs().databases ); diff --git a/jstests/dbcase2.js b/jstests/dbcase2.js deleted file mode 100644 index f9973d98837..00000000000 --- a/jstests/dbcase2.js +++ /dev/null @@ -1,9 +0,0 @@ -// SERVER-2111 Check that an in memory db name will block creation of a db with a similar but differently cased name. - -a = db.getSisterDB( "dbcase2test_dbnamea" ) -b = db.getSisterDB( "dbcase2test_dbnameA" ) - -a.c.count(); -assert.throws( function() { b.c.count() } ); - -assert.eq( -1, db.getMongo().getDBNames().indexOf( "dbcase2test_dbnameA" ) ); diff --git a/jstests/dbhash.js b/jstests/dbhash.js deleted file mode 100644 index 7fea4b4d50c..00000000000 --- a/jstests/dbhash.js +++ /dev/null @@ -1,58 +0,0 @@ - -a = db.dbhasha; -b = db.dbhashb; - -a.drop(); -b.drop(); - -// debug SERVER-761 -db.getCollectionNames().forEach( function( x ) { - v = db[ x ].validate(); - if ( !v.valid ) { - print( x ); - printjson( v ); - } - } ); - -function dbhash( mydb ) { - var ret = mydb.runCommand( "dbhash" ); - assert.commandWorked( ret, "dbhash failure" ); - return ret; -} - -function gh( coll , mydb ){ - if ( ! mydb ) mydb = db; - var x = dbhash( mydb ).collections[coll.getName()]; - if ( ! x ) - return ""; - return x; -} - -function dbh( mydb ){ - return dbhash( mydb ).md5; -} - -assert.eq( gh( a ) , gh( b ) , "A1" ); - -a.insert( { _id : 5 } ); -assert.neq( gh( a ) , gh( b ) , "A2" ); - -b.insert( { _id : 5 } ); -assert.eq( gh( a ) , gh( b ) , "A3" ); - -dba = db.getSisterDB( "dbhasha" ); -dbb = db.getSisterDB( "dbhashb" ); - -dba.dropDatabase(); -dbb.dropDatabase(); - -assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B1" ); -assert.eq( dbh( dba ) , dbh( dbb ) , "C1" ); - -dba.foo.insert( { _id : 5 } ); -assert.neq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B2" ); -assert.neq( dbh( dba ) , dbh( dbb ) , "C2" ); - -dbb.foo.insert( { _id : 5 } ); -assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B3" ); -assert.eq( dbh( dba ) , dbh( dbb ) , "C3" ); diff --git a/jstests/dbhash2.js b/jstests/dbhash2.js deleted file mode 100644 index ac491291c2b..00000000000 --- a/jstests/dbhash2.js +++ /dev/null @@ -1,22 +0,0 @@ - -mydb = db.getSisterDB( "config" ); - -t = mydb.foo; -t.drop(); - -t.insert( { x : 1 } ); -res1 = mydb.runCommand( "dbhash" ); -assert( res1.fromCache.indexOf( "config.foo" ) == -1 ); - -res2 = mydb.runCommand( "dbhash" ); -assert( res2.fromCache.indexOf( "config.foo" ) >= 0 ); -assert.eq( res1.collections.foo, res2.collections.foo ); - -t.insert( { x : 2 } ); -res3 = mydb.runCommand( "dbhash" ); -assert( res3.fromCache.indexOf( "config.foo" ) < 0 ); -assert.neq( res1.collections.foo, res3.collections.foo ); - - - - diff --git a/jstests/dbref1.js b/jstests/dbref1.js deleted file mode 100644 index 4a827662c1a..00000000000 --- a/jstests/dbref1.js +++ /dev/null @@ -1,10 +0,0 @@ - -a = db.dbref1a; -b = db.dbref1b; - -a.drop(); -b.drop(); - -a.save( { name : "eliot" } ); -b.save( { num : 1 , link : new DBPointer( "dbref1a" , a.findOne()._id ) } ); -assert.eq( "eliot" , b.findOne().link.fetch().name , "A" ); diff --git a/jstests/dbref2.js b/jstests/dbref2.js deleted file mode 100644 index d1b4870322d..00000000000 --- a/jstests/dbref2.js +++ /dev/null @@ -1,20 +0,0 @@ - -a = db.dbref2a; -b = db.dbref2b; -c = db.dbref2c; - -a.drop(); -b.drop(); -c.drop(); - -a.save( { name : "eliot" } ); -b.save( { num : 1 , link : new DBRef( "dbref2a" , a.findOne()._id ) } ); -c.save( { num : 1 , links : [ new DBRef( "dbref2a" , a.findOne()._id ) ] } ); - -assert.eq( "eliot" , b.findOne().link.fetch().name , "A" ); -assert.neq( "el" , b.findOne().link.fetch().name , "B" ); - -// $elemMatch value -var doc = c.findOne( { links: { $elemMatch: { $ref : "dbref2a", $id : a.findOne()._id } } } ); -assert.eq( "eliot" , doc.links[0].fetch().name , "C" ); -assert.neq( "el" , doc.links[0].fetch().name , "D" ); diff --git a/jstests/dbref3.js b/jstests/dbref3.js deleted file mode 100644 index 2f3ab8fa79c..00000000000 --- a/jstests/dbref3.js +++ /dev/null @@ -1,45 +0,0 @@ -// Make sure we only make a DBRef object for objects where the first field is a string named $ref -// and the second field is $id with any type. Only the first two fields matter for deciding if it -// is a DBRef. See http://docs.mongodb.org/manual/reference/database-references/#dbrefs. - -var t = db.dbref3; - -t.drop(); - -// true cases -t.insert({sub: {$ref: "foo", $id: "bar"}, dbref: true}); -t.insert({sub: {$ref: "foo", $id: "bar", $db: "baz"}, dbref: true}); -t.insert({sub: {$ref: "foo", $id: "bar", db: "baz"}, dbref: true}); // out of spec but accepted -t.insert({sub: {$ref: "foo", $id: ObjectId()}, dbref: true}); -t.insert({sub: {$ref: "foo", $id: 1}, dbref: true}); - -t.insert({sub: {$ref: 123/*not a string*/, $id: "bar"}, dbref: false}); -t.insert({sub: {$id: "bar", $ref: "foo"}, dbref: false}); -t.insert({sub: {$ref: "foo"}, dbref: false}); -t.insert({sub: {$id: "foo"}, dbref: false}); -t.insert({sub: {other: 1, $ref: "foo", $id: "bar"}, dbref: false}); - -t.find().forEach(function(obj) { - assert.eq(obj.sub.constructor == DBRef, obj.dbref, tojson(obj)); -}); - -// We should be able to run distinct against DBRef fields. -var distinctRefs = t.distinct('sub.$ref'); -print('distinct $ref = ' + distinctRefs); - -var distinctIDs = t.distinct('sub.$id'); -print('distinct $id = ' + distinctIDs); - -var distinctDBs = t.distinct('sub.$db'); -print('distinct $db = ' + distinctDBs); - -// Confirm number of unique values in each DBRef field. -assert.eq(2, distinctRefs.length); -assert.eq(4, distinctIDs.length); -assert.eq(1, distinctDBs.length); - -// $id is an array. perform positional projection on $id. -t.insert({sub: {$ref: "foo", $id: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}]}}); -var k = t.findOne({'sub.$id': {$elemMatch: {x: 2}}}, {_id: 0, 'sub.$id.$': 1}); -print('k = ' + tojson(k)); -assert.eq({sub: {$id: [{x: 2, y:2}]}}, k); \ No newline at end of file diff --git a/jstests/delx.js b/jstests/delx.js deleted file mode 100644 index aa858e92cbd..00000000000 --- a/jstests/delx.js +++ /dev/null @@ -1,32 +0,0 @@ - -a = db.getSisterDB("delxa" ) -b = db.getSisterDB("delxb" ) - -function setup( mydb ){ - mydb.dropDatabase(); - for ( i=0; i<100; i++ ){ - mydb.foo.insert( { _id : i } ); - } - mydb.getLastError(); -} - -setup( a ); -setup( b ); - -assert.eq( 100 , a.foo.find().itcount() , "A1" ) -assert.eq( 100 , b.foo.find().itcount() , "A2" ) - -x = a.foo.find().sort( { _id : 1 } ).batchSize( 60 ) -y = b.foo.find().sort( { _id : 1 } ).batchSize( 60 ) - -x.next(); -y.next(); - -a.foo.remove( { _id : { $gt : 50 } } ); -db.getLastError(); - -assert.eq( 51 , a.foo.find().itcount() , "B1" ) -assert.eq( 100 , b.foo.find().itcount() , "B2" ) - -assert.eq( 59 , x.itcount() , "C1" ) -assert.eq( 99 , y.itcount() , "C2" ); // this was asserting because ClientCursor byLoc doesn't take db into consideration diff --git a/jstests/depth_limit.js b/jstests/depth_limit.js deleted file mode 100644 index 7523a1fc9fe..00000000000 --- a/jstests/depth_limit.js +++ /dev/null @@ -1,56 +0,0 @@ -// SERVER-11781 Don't crash when converting deeply nested or cyclical JS objects to BSON. - -function test() { - function assertTooBig(obj) { - // This used to crash rather than throwing an exception. - assert.throws(function(){Object.bsonsize(obj)}); - } - - function assertNotTooBig(obj) { - assert.doesNotThrow(function(){Object.bsonsize(obj)}); - } - - function objWithDepth(depth) { - var out = 1; - while (depth--) { - out = {o: out}; - } - return out; - } - - function arrayWithDepth(depth) { - var out = 1; - while (depth--) { - out = [out]; - } - return out; - } - - assertNotTooBig({}); - assertNotTooBig({array: []}); - - var objCycle = {}; - objCycle.cycle = objCycle; - assertTooBig(objCycle); - - var arrayCycle = []; - arrayCycle.push(arrayCycle); - assertTooBig({array: arrayCycle}); - - var objDepthLimit = 150; - assertNotTooBig(objWithDepth(objDepthLimit - 1)); - assertTooBig(objWithDepth(objDepthLimit)); - - - var arrayDepthLimit = objDepthLimit - 1; // one lower due to wrapping object - assertNotTooBig({array: arrayWithDepth(arrayDepthLimit - 1)}); - assertTooBig({array: arrayWithDepth(arrayDepthLimit)}); -} - -// test in shell -test(); - -// test on server -db.depth_limit.drop(); -db.depth_limit.insert({}); -db.depth_limit.find({$where: test}).itcount(); // itcount ensures that cursor is executed on server diff --git a/jstests/distinct1.js b/jstests/distinct1.js deleted file mode 100644 index 03e425af761..00000000000 --- a/jstests/distinct1.js +++ /dev/null @@ -1,40 +0,0 @@ - -t = db.distinct1; -t.drop(); - -assert.eq( 0 , t.distinct( "a" ).length , "test empty" ); - -t.save( { a : 1 } ) -t.save( { a : 2 } ) -t.save( { a : 2 } ) -t.save( { a : 2 } ) -t.save( { a : 3 } ) - - -res = t.distinct( "a" ); -assert.eq( "1,2,3" , res.toString() , "A1" ); - -assert.eq( "1,2" , t.distinct( "a" , { a : { $lt : 3 } } ) , "A2" ); - -t.drop(); - -t.save( { a : { b : "a" } , c : 12 } ); -t.save( { a : { b : "b" } , c : 12 } ); -t.save( { a : { b : "c" } , c : 12 } ); -t.save( { a : { b : "c" } , c : 12 } ); - -res = t.distinct( "a.b" ); -assert.eq( "a,b,c" , res.toString() , "B1" ); -printjson(t._distinct( "a.b" ).stats); -assert.eq( "BasicCursor" , t._distinct( "a.b" ).stats.cursor , "B2" ) - -t.drop(); - -t.save({_id: 1, a: 1}); -t.save({_id: 2, a: 2}); - -// Test distinct with _id. -res = t.distinct( "_id" ); -assert.eq( "1,2", res.toString(), "C1" ); -res = t.distinct( "a", {_id: 1} ); -assert.eq( "1", res.toString(), "C2" ); diff --git a/jstests/distinct2.js b/jstests/distinct2.js deleted file mode 100644 index 41ee78c5117..00000000000 --- a/jstests/distinct2.js +++ /dev/null @@ -1,13 +0,0 @@ - -t = db.distinct2; -t.drop(); - -t.save({a:null}); -assert.eq( 0 , t.distinct('a.b').length , "A" ); - -t.drop(); -t.save( { a : 1 } ); -assert.eq( [1] , t.distinct( "a" ) , "B" ); -t.save( {} ) -assert.eq( [1] , t.distinct( "a" ) , "C" ); - diff --git a/jstests/distinct3.js b/jstests/distinct3.js deleted file mode 100644 index 336663d3b0d..00000000000 --- a/jstests/distinct3.js +++ /dev/null @@ -1,33 +0,0 @@ -// Yield and delete test case for query optimizer cursor. SERVER-4401 - -t = db.jstests_distinct3; -t.drop(); - -t.ensureIndex({a:1}); -t.ensureIndex({b:1}); - -for( i = 0; i < 50; ++i ) { - for( j = 0; j < 20; ++j ) { - t.save({a:i,c:i,d:j}); - } -} -for( i = 0; i < 1000; ++i ) { - t.save({b:i,c:i+50}); -} -db.getLastError(); - -// Attempt to remove the last match for the {a:1} index scan while distinct is yielding. -p = startParallelShell( 'for( i = 0; i < 2500; ++i ) { ' + - ' db.jstests_distinct3.remove( { a:49 } ); ' + - ' for( j = 0; j < 20; ++j ) { ' + - ' db.jstests_distinct3.save( { a:49, c:49, d:j } ); ' + - ' } ' + - '} ' + - 'db.getLastError(); ' ); - -for( i = 0; i < 100; ++i ) { - count = t.distinct( 'c', {$or:[{a:{$gte:0},d:0},{b:{$gte:0}}]} ).length; - assert.gt( count, 1000 ); -} - -p(); diff --git a/jstests/distinct_array1.js b/jstests/distinct_array1.js deleted file mode 100644 index 2f289ad2e79..00000000000 --- a/jstests/distinct_array1.js +++ /dev/null @@ -1,91 +0,0 @@ -t = db.distinct_array1; -t.drop(); - -t.save( { a : [1,2,3] } ) -t.save( { a : [2,3,4] } ) -t.save( { a : [3,4,5] } ) -t.save( { a : 9 } ) - - -// Without index. -res = t.distinct( "a" ).sort(); -assert.eq( "1,2,3,4,5,9" , res.toString() , "A1" ); - -// Array element 0 without index. -res = t.distinct( "a.0" ).sort(); -assert.eq( "1,2,3" , res.toString() , "A2" ); - -// Array element 1 without index. -res = t.distinct( "a.1" ).sort(); -assert.eq( "2,3,4" , res.toString() , "A3" ); - -// With index. -t.ensureIndex( { a : 1 } ); -res = t.distinct( "a" ).sort(); -assert.eq( "1,2,3,4,5,9" , res.toString() , "A4" ); - -// Array element 0 with index. -res = t.distinct( "a.0" ).sort(); -assert.eq( "1,2,3" , res.toString() , "A5" ); - -// Array element 1 with index. -res = t.distinct( "a.1" ).sort(); -assert.eq( "2,3,4" , res.toString() , "A6" ); - -//t.drop(); - -t.save( { a : [{b:"a"}, {b:"d"}] , c : 12 } ); -t.save( { a : [{b:"b"}, {b:"d"}] , c : 12 } ); -t.save( { a : [{b:"c"}, {b:"e"}] , c : 12 } ); -t.save( { a : [{b:"c"}, {b:"f"}] , c : 12 } ); -t.save( { a : [] , c : 12 } ); -t.save( { a : { b : "z"} , c : 12 } ); - -// Without index. -res = t.distinct( "a.b" ).sort(); -assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B1" ); - -// Array element 0 without index -res = t.distinct( "a.0.b" ).sort(); -assert.eq( "a,b,c" , res.toString() , "B2" ); - -// Array element 1 without index -res = t.distinct( "a.1.b" ).sort(); -assert.eq( "d,e,f" , res.toString() , "B3" ); - -// With index. -t.ensureIndex( { "a.b" : 1 } ); -res = t.distinct( "a.b" ); -res.sort() -assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B4" ); - -// _id as an document containing an array -t.save( { _id : { a : [1,2,3] } } ) -t.save( { _id : { a : [2,3,4] } } ) -t.save( { _id : { a : [3,4,5] } } ) -t.save( { _id : { a : 9 } } ) - -// Without index. -res = t.distinct( "_id.a" ).sort(); -assert.eq( "1,2,3,4,5,9" , res.toString() , "C1" ); - -// Array element 0 without index. -res = t.distinct( "_id.a.0" ).sort(); -assert.eq( "1,2,3" , res.toString() , "C2" ); - -// Array element 1 without index. -res = t.distinct( "_id.a.1" ).sort(); -assert.eq( "2,3,4" , res.toString() , "C3" ); - -// With index. -t.ensureIndex( { "_id.a" : 1 } ); -res = t.distinct( "_id.a" ).sort(); -assert.eq( "1,2,3,4,5,9" , res.toString() , "C4" ); - -// Array element 0 with index. -res = t.distinct( "_id.a.0" ).sort(); -assert.eq( "1,2,3" , res.toString() , "C5" ); - -// Array element 1 with index. -res = t.distinct( "_id.a.1" ).sort(); -assert.eq( "2,3,4" , res.toString() , "C6" ); diff --git a/jstests/distinct_index1.js b/jstests/distinct_index1.js deleted file mode 100644 index 6de1a7927e4..00000000000 --- a/jstests/distinct_index1.js +++ /dev/null @@ -1,72 +0,0 @@ - -t = db.distinct_index1 -t.drop(); - -function r( x ){ - return Math.floor( Math.sqrt( x * 123123 ) ) % 10; -} - -function d( k , q ){ - return t.runCommand( "distinct" , { key : k , query : q || {} } ) -} - -for ( i=0; i<1000; i++ ){ - o = { a : r(i*5) , b : r(i) }; - t.insert( o ); -} - -x = d( "a" ); -assert.eq( 1000 , x.stats.n , "AA1" ) -assert.eq( 1000 , x.stats.nscanned , "AA2" ) -assert.eq( 1000 , x.stats.nscannedObjects , "AA3" ) - -x = d( "a" , { a : { $gt : 5 } } ); -assert.eq( 398 , x.stats.n , "AB1" ) -assert.eq( 1000 , x.stats.nscanned , "AB2" ) -assert.eq( 1000 , x.stats.nscannedObjects , "AB3" ) - -x = d( "b" , { a : { $gt : 5 } } ); -assert.eq( 398 , x.stats.n , "AC1" ) -assert.eq( 1000 , x.stats.nscanned , "AC2" ) -assert.eq( 1000 , x.stats.nscannedObjects , "AC3" ) - - - -t.ensureIndex( { a : 1 } ) - -x = d( "a" ); -// There are only 10 values. We use the fast distinct hack and only examine each value once. -assert.eq( 10 , x.stats.n , "BA1" ) -assert.eq( 10 , x.stats.nscanned , "BA2" ) - -x = d( "a" , { a : { $gt : 5 } } ); -// Only 4 values of a are >= 5 and we use the fast distinct hack. -assert.eq(4, x.stats.n , "BB1" ) -assert.eq(4, x.stats.nscanned , "BB2" ) -assert.eq(0, x.stats.nscannedObjects , "BB3" ) - -x = d( "b" , { a : { $gt : 5 } } ); -// We can't use the fast distinct hack here because we're distinct-ing over 'b'. -assert.eq( 398 , x.stats.n , "BC1" ) -assert.eq( 398 , x.stats.nscanned , "BC2" ) -assert.eq( 398 , x.stats.nscannedObjects , "BC3" ) - -// Check proper nscannedObjects count when using a query optimizer cursor. -t.dropIndexes(); -t.ensureIndex( { a : 1, b : 1 } ); -x = d( "b" , { a : { $gt : 5 }, b : { $gt : 5 } } ); -printjson(x); -// 171 is the # of results we happen to scan when we don't use a distinct -// hack. When we use the distinct hack we scan 16, currently. -assert.lte(x.stats.n, 171); -assert.eq( 0 , x.stats.nscannedObjects , "BB3" ) - - - -// Cursor name should not be empty when using $or with hashed index. -// -t.dropIndexes(); -t.ensureIndex( { a : "hashed" } ); -x = d( "a", { $or : [ { a : 3 }, { a : 5 } ] } ); -assert.eq( 188, x.stats.n, "DA1" ); -assert.neq( "", x.stats.cursor, "DA2" ); diff --git a/jstests/distinct_index2.js b/jstests/distinct_index2.js deleted file mode 100644 index 67d28b8b95e..00000000000 --- a/jstests/distinct_index2.js +++ /dev/null @@ -1,41 +0,0 @@ -t = db.distinct_index2; -t.drop(); - -t.ensureIndex( { a : 1 , b : 1 } ) -t.ensureIndex( { c : 1 } ) - -// Uniformly distributed dataset. -// If we use a randomly generated dataset, we might not -// generate all the distinct values in the range [0, 10). -for ( var a=0; a<10; a++ ) { - for ( var b=0; b<10; b++ ) { - for ( var c=0; c<10; c++ ) { - t.insert( { a : a , b : b , c : c } ); - } - } -} - -correct = [] -for ( i=0; i<10; i++ ) - correct.push( i ) - -function check( field ){ - res = t.distinct( field ) - res = res.sort() - assert.eq( correct , res , "check: " + field ); - - if ( field != "a" ){ - res = t.distinct( field , { a : 1 } ) - res = res.sort() - assert.eq( correct , res , "check 2: " + field ); - } -} - -check( "a" ) -check( "b" ) -check( "c" ) - -// hashed index should produce same results. -t.dropIndexes(); -t.ensureIndex( { a : "hashed" } ); -check( "a" ); diff --git a/jstests/distinct_speed1.js b/jstests/distinct_speed1.js deleted file mode 100644 index 4cae5b0ae06..00000000000 --- a/jstests/distinct_speed1.js +++ /dev/null @@ -1,26 +0,0 @@ - -t = db.distinct_speed1; - -t.drop(); -for ( var i=0; i<10000; i++ ){ - t.save( { x : i % 10 } ); -} - -assert.eq( 10 , t.distinct("x").length , "A1" ); - -function fast(){ - t.find().explain().millis; -} - -function slow(){ - t.distinct("x"); -} - -for ( i=0; i<3; i++ ){ - print( "it: " + Date.timeFunc( fast ) ); - print( "di: " + Date.timeFunc( slow ) ); -} - - -t.ensureIndex( { x : 1 } ); -t.distinct( "x" , { x : 5 } ) diff --git a/jstests/drop.js b/jstests/drop.js deleted file mode 100644 index 154c35d1db3..00000000000 --- a/jstests/drop.js +++ /dev/null @@ -1,25 +0,0 @@ -var coll = db.jstests_drop; - -coll.drop(); - -res = coll.runCommand("drop"); -assert( !res.ok, tojson( res ) ); - - -assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "A"); -coll.save({}); -assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "B"); -coll.ensureIndex({a : 1}); -assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "C"); -assert.commandWorked(db.runCommand({drop : coll.getName()})); -assert.eq(0, db.system.indexes.find({ns : coll + ""}).count(), "D"); - -coll.ensureIndex({a : 1}); -assert.eq(2, db.system.indexes.find({ns : coll + ""}).count(), "E"); -assert.commandWorked(db.runCommand({deleteIndexes : coll.getName(), index : "*"}), - "delete indexes A"); -assert.eq(1, db.system.indexes.find({ns : coll + ""}).count(), "G"); - -// make sure we can still use it -coll.save({}); -assert.eq(1, coll.find().hint("_id_").toArray().length, "H"); diff --git a/jstests/drop2.js b/jstests/drop2.js deleted file mode 100644 index 9eb3aef93cd..00000000000 --- a/jstests/drop2.js +++ /dev/null @@ -1,53 +0,0 @@ -var coll = db.jstests_drop2; -coll.drop(); - -function debug( x ) { - printjson( x ); -} - -coll.save( {} ); -db.getLastError(); - -function getOpId( drop ) { - var inProg = db.currentOp().inprog; - debug( inProg ); - for ( var id in inProg ) { - var op = inProg[ id ]; - if ( drop ) { - if ( op.query && op.query.drop && op.query.drop == coll.getName() ) { - return op.opid; - } - } else { - if ( op.query && op.query.query && op.query.query.$where && op.ns == (coll + "") ) { - return op.opid; - } - } - } - return null; -} - -var shell1 = startParallelShell( "print(\"Count thread started\");" - + "db.getMongo().getCollection(\"" - + (coll + "") + "\")" - + ".count( { $where: function() {" - + "while( 1 ) { sleep( 1 ); } } } );" - + "print(\"Count thread terminating\");" ); -countOpId = null; -assert.soon( function() { countOpId = getOpId( false ); return countOpId; } ); - -var shell2 = startParallelShell( "print(\"Drop thread started\");" - + "print(\"drop result: \" + " - + "db.getMongo().getCollection(\"" - + (coll + "") + "\")" - + ".drop() );" - + "print(\"Drop thread terminating\")" ); -dropOpId = null; -assert.soon( function() { dropOpId = getOpId( true ); return dropOpId; } ); - -db.killOp( dropOpId ); -db.killOp( countOpId ); - -shell1(); -shell2(); - -coll.drop(); // in SERVER-1818, this fails diff --git a/jstests/drop3.js b/jstests/drop3.js deleted file mode 100644 index b2ca94a1550..00000000000 --- a/jstests/drop3.js +++ /dev/null @@ -1,29 +0,0 @@ -t = db.jstests_drop3; -sub = t.sub; - -t.drop(); -sub.drop(); - - -for (var i = 0; i < 10; i++){ - t.insert({}); - sub.insert({}); -} - -var cursor = t.find().batchSize(2); -var subcursor = sub.find().batchSize(2); - -cursor.next(); -subcursor.next(); -assert.eq( cursor.objsLeftInBatch(), 1 ); -assert.eq( subcursor.objsLeftInBatch(), 1 ); - -t.drop(); // should invalidate cursor, but not subcursor -db.getLastError(); - -assert.throws( function(){ cursor.itcount() } ); // throws "cursor doesn't exist on server" error on getMore -assert.eq( subcursor.itcount(), 9 ); //one already seen - - - - diff --git a/jstests/drop_index.js b/jstests/drop_index.js deleted file mode 100644 index 8e2278d00c5..00000000000 --- a/jstests/drop_index.js +++ /dev/null @@ -1,20 +0,0 @@ - -t = db.dropIndex; -t.drop(); - -t.insert( { _id : 1 , a : 2 , b : 3 } ); -assert.eq( 1 , t.getIndexes().length , "A1" ); - -t.ensureIndex( { a : 1 } ); -t.ensureIndex( { b : 1 } ); -assert.eq( 3 , t.getIndexes().length , "A2" ); - -x = db._dbCommand( { dropIndexes: t.getName() , index : t._genIndexName( { a : 1 } ) } ); -assert.eq( 2 , t.getIndexes().length , "B1 " + tojson(x) ); - -x = db._dbCommand( { dropIndexes: t.getName() , index : { b : 1 } } ) -assert.eq( 1 , t.getIndexes().length , "B2" ); - -// ensure you can recreate indexes, even if you don't use dropIndex method -t.ensureIndex({a:1}); -assert.eq(2 , t.getIndexes().length); diff --git a/jstests/dropdb.js b/jstests/dropdb.js deleted file mode 100644 index 58e3dd9fdaa..00000000000 --- a/jstests/dropdb.js +++ /dev/null @@ -1,26 +0,0 @@ -// Test that a db does not exist after it is dropped. -// Disabled in the small oplog suite because the slave may create a master db -// with the same name as the dropped db when requesting a clone. - -m = db.getMongo(); -baseName = "jstests_dropdb"; -ddb = db.getSisterDB( baseName ); - -print("initial dbs: " + tojson(m.getDBNames())); - -function check(shouldExist) { - var dbs = m.getDBNames(); - assert.eq(Array.contains(dbs, baseName), shouldExist, - "DB " + baseName + " should " + (shouldExist ? "" : "not ") + "exist." - + " dbs: " + tojson(dbs) + "\n" + tojson( m.getDBs() ) ); -} - -ddb.c.save( {} ); -ddb.getLastError(); -check(true); - -ddb.dropDatabase(); -check(false); - -ddb.dropDatabase(); -check(false); diff --git a/jstests/dropdb_race.js b/jstests/dropdb_race.js deleted file mode 100644 index bff7980011a..00000000000 --- a/jstests/dropdb_race.js +++ /dev/null @@ -1,44 +0,0 @@ -// test dropping a db with simultaneous commits - -m = db.getMongo(); -baseName = "jstests_dur_droprace"; -d = db.getSisterDB(baseName); -t = d.foo; - -assert(d.adminCommand({ setParameter: 1, syncdelay: 5 }).ok); - -var s = 0; - -var start = new Date(); - -for (var pass = 0; pass < 100; pass++) { - if (pass % 2 == 0) { - // sometimes wait for create db first, to vary the timing of things - t.insert({}); - if( pass % 4 == 0 ) - d.runCommand({getLastError:1,j:1}); - else - d.getLastError(); - } - t.insert({ x: 1 }); - t.insert({ x: 3 }); - t.ensureIndex({ x: 1 }); - sleep(s); - if (pass % 37 == 0) - d.adminCommand("closeAllDatabases"); - else if (pass % 13 == 0) - t.drop(); - else if (pass % 17 == 0) - t.dropIndexes(); - else - d.dropDatabase(); - if (pass % 7 == 0) - d.runCommand({getLastError:1,j:1}); - d.getLastError(); - s = (s + 1) % 25; - //print(pass); - if ((new Date()) - start > 60000) { - print("stopping early"); - break; - } -} diff --git a/jstests/elemMatchProjection.js b/jstests/elemMatchProjection.js deleted file mode 100644 index 73088fab699..00000000000 --- a/jstests/elemMatchProjection.js +++ /dev/null @@ -1,265 +0,0 @@ -// Tests for $elemMatch projections and $ positional operator projection. -t = db.SERVER828Test; -t.drop(); - -date1 = new Date(); - -// Insert various styles of arrays -for ( i = 0; i < 100; i++ ) { - t.insert({ group: 1, x: [ 1, 2, 3, 4, 5 ] }); - t.insert({ group: 2, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ] }); - t.insert({ group: 3, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ], - y: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] }); - t.insert({ group: 3, x: [ { a: 1, b: 3 }, { a: -6, c: 3 } ] }); - t.insert({ group: 4, x: [ { a: 1, b: 4 }, { a: -6, c: 3 } ] }); - t.insert({ group: 5, x: [ new Date(), 5, 10, 'string', new ObjectId(), 123.456 ] }); - t.insert({ group: 6, x: [ { a: 'string', b: date1 }, - { a: new ObjectId(), b: 1.2345 }, - { a: 'string2', b: date1 } ] }); - t.insert({ group: 7, x: [ { y: [ 1, 2, 3, 4 ] } ] }); - t.insert({ group: 8, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] }); - t.insert({ group: 9, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] }, - { z: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] }); - t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ], - y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] }); - t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ], - y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] }); - t.insert({ group: 11, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ], - covered: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] }); - t.insert({ group: 12, x: { y : [ { a: 1, b: 1 }, { a: 1, b: 2} ] } } ); - t.insert({ group: 13, x: [ { a: 1, b: 1 }, {a: 1, b: 2 } ] } ); - t.insert({ group: 13, x: [ { a: 1, b: 2 }, {a: 1, b: 1 } ] } ); -} -t.ensureIndex({group:1, 'y.d':1}); // for regular index test (not sure if this is really adding anything useful) -t.ensureIndex({group:1, covered:1}); // for covered index test - -// -// SERVER-828: Positional operator ($) projection tests -// -assert.eq( 1, - t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).toArray()[0].x.length, - "single object match (array length match)" ); - -assert.eq( 2, - t.find( { group:3, 'x.a':1 }, { 'x.$':1 } ).toArray()[0].x[0].b, - "single object match first" ); - -assert.eq( undefined, - t.find( { group:3, 'x.a':2 }, { _id:0, 'x.$':1 } ).toArray()[0]._id, - "single object match with filtered _id" ); - -assert.eq( 1, - t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { _id:1 } ).toArray()[0].x.length, - "sorted single object match with filtered _id (array length match)" ); - -assert.eq( 1, - t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':1 } ).toArray()[0].x.length, - "single object match with elemMatch" ); - -assert.eq( 1, - t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':1} } ).toArray()[0].x.length, - "single object match with elemMatch and positive slice" ); - -assert.eq( 1, - t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':-1} } ).toArray()[0].x.length, - "single object match with elemMatch and negative slice" ); - -assert.eq( 1, - t.find( { 'group':12, 'x.y.a':1 }, { 'x.y.$': 1 } ).toArray()[0].x.y.length, - "single object match with two level dot notation" ); - -assert.eq( 1, - t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { x:1 } ).toArray()[0].x.length, - "sorted object match (array length match)" ); - -assert.eq( { aa:1, dd:5 }, - t.find( { group:3, 'y.dd':5 }, { 'y.$':1 } ).toArray()[0].y[0], - "single object match (value match)" ); - -assert.throws( function() { - t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).toArray(); - }, [], "throw on invalid projection (field mismatch)" ); - -assert.throws( function() { - t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).sort( { x:1 } ).toArray() - }, [], "throw on invalid sorted projection (field mismatch)" ); - -assert.throws( function() {x - t.find( { group:3, 'x.a':2 }, { 'x.$':1, group:0 } ).sort( { x:1 } ).toArray(); - }, [], "throw on invalid projection combination (include and exclude)" ); - -assert.throws( function() { - t.find( { group:3, 'x.a':1, 'y.aa':1 }, { 'x.$':1, 'y.$':1 } ).toArray(); - }, [], "throw on multiple projections" ); - -assert.throws( function() { - t.find( { group:3}, { 'g.$':1 } ).toArray() - }, [], "throw on invalid projection (non-array field)" ); - -assert.eq( { aa:1, dd:5 }, - t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).toArray()[0].covered[0], - "single object match (covered index)" ); - -assert.eq( { aa:1, dd:5 }, - t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).sort( { covered:1 } ).toArray()[0].covered[0], - "single object match (sorted covered index)" ); - -assert.eq( 1, - t.find( { group:10, 'y.d': 4 }, { 'y.$':1 } ).toArray()[0].y.length, - "single object match (regular index" ); - -if (false) { - - assert.eq( 2, // SERVER-1013: allow multiple positional operators - t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].y[0].bb, - "multi match, multi proj 1" ); - - assert.eq( 5, // SSERVER-1013: allow multiple positional operators - t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].x[0].d, - "multi match, multi proj 2" ); - - assert.eq( 2, // SERVER-1243: allow multiple results from same matcher - t.find( { group:2, x: { $elemMatchAll: { a:1 } } }, { 'x.$':1 } ).toArray()[0].x.length, - "multi element match, single proj" ); - - assert.eq( 2, // SERVER-1013: multiple array matches with one prositional operator - t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1 } ).toArray()[0].y[0].bb, - "multi match, single proj 1" ); - - assert.eq( 2, // SERVER-1013: multiple array matches with one positional operator - t.find( { group:3, 'y.cc':3, 'x.b':2 }, { 'x.$':1 } ).toArray()[0].x[0].b, - "multi match, single proj 2" ); - -} - -// -// SERVER-2238: $elemMatch projections -// -assert.eq( -6, - t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x[0].a, - "single object match" ); - -assert.eq( 1, - t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x.length, - "filters non-matching array elements" ); - -assert.eq( 1, - t.find( { group:4 }, { x: { $elemMatch: { a:-6, c:3 } } } ).toArray()[0].x.length, - "filters non-matching array elements with multiple elemMatch criteria" ); - -assert.eq( 1, - t.find( { group: 13 }, { 'x' : {'$elemMatch' : { a: {$gt: 0, $lt: 2} } } } ).toArray()[0].x.length, - "filters non-matching array elements with multiple criteria for a single element in the array" ); - -assert.eq( 3, - t.find( { group:4 }, { x: { $elemMatch: { a:{ $lt:1 } } } } ).toArray()[0].x[0].c, - "object operator match" ); - -assert.eq( [ 4 ], - t.find( { group:1 }, { x: { $elemMatch: { $in:[100, 4, -123] } } } ).toArray()[0].x, - "$in number match" ); - -assert.eq( [ {a : 1, b : 2} ], - t.find( { group:2 }, { x: { $elemMatch: { a: { $in:[1] } } } } ).toArray()[0].x, - "$in number match" ); - -assert.eq( [1], - t.find( { group:1 }, { x: { $elemMatch: { $nin:[4, 5, 6] } } } ).toArray()[0].x, - "$nin number match" ); - -// but this may become a user assertion, since a single element of an array can't match more than one value -assert.eq( [ 1], - t.find( { group:1 }, { x: { $elemMatch: { $all:[1] } } } ).toArray()[0].x, - "$in number match" ); - -assert.eq( [ { a: 'string', b: date1 } ], - t.find( { group:6 }, { x: { $elemMatch: { a:'string' } } } ).toArray()[0].x, - "mixed object match on string eq" ); - -assert.eq( [ { a: 'string2', b: date1 } ], - t.find( { group:6 }, { x: { $elemMatch: { a:/ring2/ } } } ).toArray()[0].x, - "mixed object match on regexp" ); - -assert.eq( [ { a: 'string', b: date1 } ], - t.find( { group:6 }, { x: { $elemMatch: { a: { $type: 2 } } } } ).toArray()[0].x, - "mixed object match on type" ); - -assert.eq( [ { a : 2, c : 3} ], - t.find( { group:2 }, { x: { $elemMatch: { a: { $ne: 1 } } } } ).toArray()[0].x, - "mixed object match on ne" ); - -assert.eq( [ {a : 1, d : 5} ], - t.find( { group:3 }, { x: { $elemMatch: { d: { $exists: true } } } } ).toArray()[0].x, - "mixed object match on exists" ); - -assert.eq( [ {a : 2, c : 3} ], - t.find( { group:3 }, { x: { $elemMatch: { a: { $mod : [2, 0 ] } } } } ).toArray()[0].x, - "mixed object match on mod" ); - -assert.eq( {"x" : [ { "a" : 1, "b" : 2 } ], "y" : [ { "c" : 3, "d" : 4 } ] }, - t.find( { group:10 }, { _id : 0, - x: { $elemMatch: { a: 1 } }, - y: { $elemMatch: { c: 3 } } } ).toArray()[0], - "multiple $elemMatch on unique fields 1" ); - -if (false) { - - assert.eq( 2 , // SERVER-1243: handle multiple $elemMatch results - t.find( { group:4 }, { x: { $elemMatchAll: { a:{ $lte:2 } } } } ).toArray()[0].x.length, - "multi object match" ); - - assert.eq( 3 , // SERVER-1243: handle multiple $elemMatch results - t.find( { group:1 }, { x: { $elemMatchAll: { $in:[1, 2, 3] } } } ).toArray()[0].x.length, - "$in number match" ); - - assert.eq( 1 , // SERVER-1243: handle multiple $elemMatch results - t.find( { group:5 }, { x: { $elemMatchAll: { $ne: 5 } } } ).toArray()[0].x.length, - "single mixed type match 1" ); - - assert.eq( 1 , // SERVER-831: handle nested arrays - t.find( { group:9 }, { 'x.y': { $elemMatch: { a: 1 } } } ).toArray()[0].x.length, - "single dotted match" ); - -} - -// -// Batch/getMore tests -// -// test positional operator across multiple batches -a = t.find( { group:3, 'x.b':2 }, { 'x.$':1 } ).batchSize(1) -while ( a.hasNext() ) { - assert.eq( 2, a.next().x[0].b, "positional getMore test"); -} - -// test $elemMatch operator across multiple batches -a = t.find( { group:3 }, { x:{$elemMatch:{a:1}} } ).batchSize(1) -while ( a.hasNext() ) { - assert.eq( 1, a.next().x[0].a, "positional getMore test"); -} - -// verify the positional update operator matches the same element as the the positional find. this -// is to ensure consistent behavior with updates until SERVER-1013 is resolved, at which point the -// following tests should be updated. - -t.update({ group: 10, 'x.a': 3, 'y.c':1 }, { $set:{'x.$':100} }, false, true ); -// updated the wrong element, so the following assertions should be true -assert.eq( 100, - t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0], - "wrong single element match after update" ); - -assert.eq( 100, - t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0], - "wrong single element match after update" ); - -t.remove({ group: 10 }); -t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ], - y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] }); - -t.update({ group: 10, 'y.c':1, 'x.a': 3 }, { $set:{'x.$':100} }, false, true ); -// updated the correct element -assert.eq( 100, - t.find( { group:10, 'y.c':1 , x:100 }, { 'x.$':1 } ).toArray()[0].x[0], - "right single element match after update" ); -assert.eq( 100, - t.find( { group:10 , x:100 , 'y.c':1 }, { 'x.$':1 } ).toArray()[0].x[0], - "right single element match after update" ); diff --git a/jstests/error2.js b/jstests/error2.js deleted file mode 100644 index 8c27d6250e1..00000000000 --- a/jstests/error2.js +++ /dev/null @@ -1,21 +0,0 @@ -// Test that client gets stack trace on failed invoke - -f = db.jstests_error2; - -f.drop(); - -f.save( {a:1} ); - -assert.throws( - function(){ - c = f.find({$where : function(){ return a() }}); - c.next(); - } -); - -assert.throws( - function(){ - db.eval( function() { return a(); } ); - } -); - diff --git a/jstests/error5.js b/jstests/error5.js deleted file mode 100644 index 5884d20d8c1..00000000000 --- a/jstests/error5.js +++ /dev/null @@ -1,8 +0,0 @@ - -t = db.error5 -t.drop(); - -assert.throws( function(){ t.save( 4 ); printjson( t.findOne() ) } , null , "A" ); -t.save( { a : 1 } ) -assert.eq( 1 , t.count() , "B" ); - diff --git a/jstests/eval0.js b/jstests/eval0.js deleted file mode 100644 index 4375cace839..00000000000 --- a/jstests/eval0.js +++ /dev/null @@ -1,8 +0,0 @@ - -assert.eq( 17 , db.eval( function(){ return 11 + 6; } ) , "A" ); -assert.eq( 17 , db.eval( function( x ){ return 10 + x; } , 7 ) , "B" ); - -// check that functions in system.js work -db.system.js.insert({_id: "add", value: function(x,y){ return x + y;}}); -assert.eq( 20 , db.eval( "this.add(15, 5);" ) , "C" ); - diff --git a/jstests/eval1.js b/jstests/eval1.js deleted file mode 100644 index 4a5ca75f09b..00000000000 --- a/jstests/eval1.js +++ /dev/null @@ -1,17 +0,0 @@ - -t = db.eval1; -t.drop(); - -t.save( { _id : 1 , name : "eliot" } ); -t.save( { _id : 2 , name : "sara" } ); - -f = function(id){ - return db["eval1"].findOne( { _id : id } ).name; -} - - -assert.eq( "eliot" , f( 1 ) , "A" ); -assert.eq( "sara" , f( 2 ) , "B" ); -assert.eq( "eliot" , db.eval( f , 1 ) , "C" ); -assert.eq( "sara" , db.eval( f , 2 ) , "D" ); - diff --git a/jstests/eval2.js b/jstests/eval2.js deleted file mode 100644 index 6e39bb4a7bd..00000000000 --- a/jstests/eval2.js +++ /dev/null @@ -1,28 +0,0 @@ - -t = db.eval2; -t.drop(); -t.save({a:1}); -t.save({a:1}); - -var f = db.group( - { - ns: t.getName(), - key: { a:true}, - cond: { a:1 }, - reduce: function(obj,prev) { prev.csum++; } , - initial: { csum: 0} - } -); - -assert(f[0].a == 1 && f[0].csum == 2 , "on db" ); - -var f = t.group( - { - key: { a:true}, - cond: { a:1 }, - reduce: function(obj,prev) { prev.csum++; } , - initial: { csum: 0} - } -); - -assert(f[0].a == 1 && f[0].csum == 2 , "on coll" ); diff --git a/jstests/eval3.js b/jstests/eval3.js deleted file mode 100644 index 404d4d863b7..00000000000 --- a/jstests/eval3.js +++ /dev/null @@ -1,21 +0,0 @@ - -t = db.eval3; -t.drop(); - -t.save( { _id : 1 , name : "eliot" } ); -assert.eq( 1 , t.count() , "A" ); - -function z( a , b ){ - db.eval3.save( { _id : a , name : b } ); - return b; -} - -z( 2 , "sara" ); -assert.eq( 2 , t.count() , "B" ); - -assert.eq( "eliot,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() ); - -assert.eq( "joe" , db.eval( z , 3 , "joe" ) , "C" ); -assert.eq( 3 , t.count() , "D" ); - -assert.eq( "eliot,joe,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() ); diff --git a/jstests/eval4.js b/jstests/eval4.js deleted file mode 100644 index 31d6ef0c2a8..00000000000 --- a/jstests/eval4.js +++ /dev/null @@ -1,23 +0,0 @@ - -t = db.eval4; -t.drop(); - -t.save( { a : 1 } ); -t.save( { a : 2 } ); -t.save( { a : 3 } ); - -assert.eq( 3 , t.count() , "A" ); - -function f( x ){ - db.eval4.remove( { a : x } ); -} - -f( 2 ); -assert.eq( 2 , t.count() , "B" ); - -db.eval( f , 2 ); -assert.eq( 2 , t.count() , "C" ); - -db.eval( f , 3 ); -assert.eq( 1 , t.count() , "D" ); - diff --git a/jstests/eval5.js b/jstests/eval5.js deleted file mode 100644 index a9223a555a6..00000000000 --- a/jstests/eval5.js +++ /dev/null @@ -1,23 +0,0 @@ - -t = db.eval5; -t.drop(); - -t.save( { a : 1 , b : 2 , c : 3 } ); - -assert.eq( 3 , - db.eval( - function(z){ - return db.eval5.find().toArray()[0].c; - } - ) , - "something weird A" - ); - -assert.isnull( - db.eval( - function(z){ - return db.eval5.find( {} , { a : 1 } ).toArray()[0].c; - } - ), - "field spec didn't work" - ); diff --git a/jstests/eval6.js b/jstests/eval6.js deleted file mode 100644 index 5fe096974c6..00000000000 --- a/jstests/eval6.js +++ /dev/null @@ -1,15 +0,0 @@ - -t = db.eval6; -t.drop(); - -t.save( { a : 1 } ); - -db.eval( - function(){ - o = db.eval6.findOne(); - o.b = 2; - db.eval6.save( o ); - } -); - -assert.eq( 2 , t.findOne().b ); diff --git a/jstests/eval7.js b/jstests/eval7.js deleted file mode 100644 index 45e06af276c..00000000000 --- a/jstests/eval7.js +++ /dev/null @@ -1,3 +0,0 @@ - -assert.eq( 6 , db.eval( "5 + 1" ) , "A" ) -assert.throws( function(z){ db.eval( "5 + function x; + 1" )} ); diff --git a/jstests/eval8.js b/jstests/eval8.js deleted file mode 100644 index 072a890e80a..00000000000 --- a/jstests/eval8.js +++ /dev/null @@ -1,19 +0,0 @@ - -t = db.eval8; -t.drop(); - -x = { a : 1 , b : 2 }; -t.save( x ); -x = t.findOne(); - -assert( x.a && x.b , "A" ); -delete x.b; - -assert( x.a && ! x.b , "B" ) -x.b = 3; -assert( x.a && x.b , "C" ); -assert.eq( 3 , x.b , "D" ); - -t.save( x ); -y = t.findOne(); -assert.eq( tojson( x ) , tojson( y ) , "E" ); diff --git a/jstests/eval9.js b/jstests/eval9.js deleted file mode 100644 index 9c6642901e4..00000000000 --- a/jstests/eval9.js +++ /dev/null @@ -1,22 +0,0 @@ - -a = [ 1 , "asd" , null , [ 2 , 3 ] , new Date() , { x : 1 } ] - -for ( var i=0; i 0, 'C : ' + tojson( o ) ); -} -finally { - - db.setProfilingLevel(0); - db = stddb; -} diff --git a/jstests/evalc.js b/jstests/evalc.js deleted file mode 100644 index 0320ecd5133..00000000000 --- a/jstests/evalc.js +++ /dev/null @@ -1,25 +0,0 @@ -t = db.jstests_evalc; -t.drop(); - -t2 = db.evalc_done -t2.drop() - -for( i = 0; i < 10; ++i ) { - t.save( {i:i} ); -} - -// SERVER-1610 - -assert.eq( 0 , t2.count() , "X1" ) - -s = startParallelShell( "print( 'starting forked:' + Date() ); for ( i=0; i<50000; i++ ){ db.currentOp(); } print( 'ending forked:' + Date() ); db.evalc_done.insert( { x : 1 } ); " ) - -print( "starting eval: " + Date() ) -while ( true ) { - db.eval( "db.jstests_evalc.count( {i:10} );" ); - if ( t2.count() > 0 ) - break; -} -print( "end eval: " + Date() ) - -s(); diff --git a/jstests/evald.js b/jstests/evald.js deleted file mode 100644 index 77b1f42d52b..00000000000 --- a/jstests/evald.js +++ /dev/null @@ -1,98 +0,0 @@ -t = db.jstests_evald; -t.drop(); - -function debug( x ) { -// printjson( x ); -} - -for( i = 0; i < 10; ++i ) { - t.save( {i:i} ); -} -db.getLastError(); - -function op( ev, where ) { - p = db.currentOp().inprog; - debug( p ); - for ( var i in p ) { - var o = p[ i ]; - if ( where ) { - if ( o.active && o.query && o.query.query && o.query.query.$where && o.ns == "test.jstests_evald" ) { - return o.opid; - } - } else { - if ( o.active && o.query && o.query.$eval && o.query.$eval == ev ) { - return o.opid; - } - } - } - return -1; -} - -function doIt( ev, wait, where ) { - - if ( where ) { - s = startParallelShell( ev ); - } else { - s = startParallelShell( "db.eval( '" + ev + "' )" ); - } - - o = null; - assert.soon( function() { o = op( ev, where ); return o != -1 } ); - - if ( wait ) { - sleep( 2000 ); - } - - debug( "going to kill" ); - - db.killOp( o ); - - debug( "sent kill" ); - - s(); - -} - -// nested scope with nested invoke() -doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", true, true); -doIt("db.jstests_evald.count( { $where: function() { while(1) { sleep(1); } } } )", false, true); - -// simple tight loop tests with callback -doIt("while(1) { sleep(1); }", false); -doIt("while(1) { sleep(1); }", true); - -// simple tight loop tests without callback -doIt("while(1) {;}", false); -doIt("while(1) {;}", true); - -// the for loops are currently required, as a spawned op masks the parent op - see SERVER-1931 -doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", true); -doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count({i:10}); }", false); -doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", true); -doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count(); }", false); - -// try/catch with tight-loop kill tests. Catch testing is important -// due to v8::TerminateExecution internals. -// native callback with nested invoke(), drop JS exceptions -doIt("while(1) { " + - " for(var i = 0; i < 10000; ++i) {;} " + - " try { " + - " db.jstests_evald.count({i:10}); " + - " } catch (e) {} " + - "}", true ); - -// native callback, drop JS exceptions -doIt("while(1) { " + - " try { " + - " while(1) { " + - " sleep(1); " + - " } " + - " } catch (e) {} " + - "}", true ); - -// no native callback and drop JS exceptions -doIt("while(1) { " + - " try { " + - " while(1) {;} " + - " } catch (e) {} " + - "}", true ); diff --git a/jstests/evale.js b/jstests/evale.js deleted file mode 100644 index af5a303f167..00000000000 --- a/jstests/evale.js +++ /dev/null @@ -1,5 +0,0 @@ -t = db.jstests_evale; -t.drop(); - -db.eval( function() { return db.jstests_evale.count( { $where:function() { return true; } } ) } ); -db.eval( "db.jstests_evale.count( { $where:function() { return true; } } )" ); \ No newline at end of file diff --git a/jstests/evalf.js b/jstests/evalf.js deleted file mode 100644 index 01b7907ba93..00000000000 --- a/jstests/evalf.js +++ /dev/null @@ -1,27 +0,0 @@ -// test that killing a parent op interrupts the child op - -t = db.jstests_evalf; -t.drop(); - -//if ( typeof _threadInject == "undefined" ) { // don't run in v8 mode - SERVER-1900 - -// the code in eval must be under 512 chars because otherwise it's not displayed in curOp() -try { -db.eval( function() { - opid = null; - while( opid == null ) { - ops = db.currentOp().inprog; - for( i in ops ) { - o = ops[ i ]; - if ( o.active && o.query && o.query.$eval ) { opid = o.opid; } - }} - db.jstests_evalf.save( {"opid":opid} ); - db.jstests_evalf.count( { $where:function() { var id = db.jstests_evalf.findOne().opid; db.killOp( id ); while( 1 ) { ; } } } ); - } ); -} catch (ex) { - // exception is thrown in V8 when job gets killed. Does not seem like bad behavior. -} - -// make sure server and JS still work -db.eval( function() { db.jstests_evalf.count(); }); -//} diff --git a/jstests/exists.js b/jstests/exists.js deleted file mode 100644 index 3f1e904e52f..00000000000 --- a/jstests/exists.js +++ /dev/null @@ -1,49 +0,0 @@ -t = db.jstests_exists; -t.drop(); - -t.save( {} ); -t.save( {a:1} ); -t.save( {a:{b:1}} ); -t.save( {a:{b:{c:1}}} ); -t.save( {a:{b:{c:{d:null}}}} ); - -function dotest( n ){ - - assert.eq( 5, t.count() , n ); - assert.eq( 1, t.count( {a:null} ) , n ); - assert.eq( 2, t.count( {'a.b':null} ) , n ); - assert.eq( 3, t.count( {'a.b.c':null} ) , n ); - assert.eq( 5, t.count( {'a.b.c.d':null} ) , n ); - - assert.eq( 5, t.count() , n ); - assert.eq( 4, t.count( {a:{$ne:null}} ) , n ); - assert.eq( 3, t.count( {'a.b':{$ne:null}} ) , n ); - assert.eq( 2, t.count( {'a.b.c':{$ne:null}} ) , n ); - assert.eq( 0, t.count( {'a.b.c.d':{$ne:null}} ) , n ); - - assert.eq( 4, t.count( {a: {$exists:true}} ) , n ); - assert.eq( 3, t.count( {'a.b': {$exists:true}} ) , n ); - assert.eq( 2, t.count( {'a.b.c': {$exists:true}} ) , n ); - assert.eq( 1, t.count( {'a.b.c.d': {$exists:true}} ) , n ); - - assert.eq( 1, t.count( {a: {$exists:false}} ) , n ); - assert.eq( 2, t.count( {'a.b': {$exists:false}} ) , n ); - assert.eq( 3, t.count( {'a.b.c': {$exists:false}} ) , n ); - assert.eq( 4, t.count( {'a.b.c.d': {$exists:false}} ) , n ); -} - -dotest( "before index" ) -t.ensureIndex( { "a" : 1 } ) -t.ensureIndex( { "a.b" : 1 } ) -t.ensureIndex( { "a.b.c" : 1 } ) -t.ensureIndex( { "a.b.c.d" : 1 } ) -dotest( "after index" ) -assert.eq( 1, t.find( {a: {$exists:false}} ).hint( {a:1} ).itcount() ); - -t.drop(); - -t.save( {r:[{s:1}]} ); -assert( t.findOne( {'r.s':{$exists:true}} ) ); -assert( !t.findOne( {'r.s':{$exists:false}} ) ); -assert( !t.findOne( {'r.t':{$exists:true}} ) ); -assert( t.findOne( {'r.t':{$exists:false}} ) ); diff --git a/jstests/exists2.js b/jstests/exists2.js deleted file mode 100644 index e925c168f50..00000000000 --- a/jstests/exists2.js +++ /dev/null @@ -1,16 +0,0 @@ - -t = db.exists2; -t.drop(); - -t.save( { a : 1 , b : 1 } ) -t.save( { a : 1 , b : 1 , c : 1 } ) - -assert.eq( 2 , t.find().itcount() , "A1" ); -assert.eq( 2 , t.find( { a : 1 , b : 1 } ).itcount() , "A2" ); -assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "A3" ); -assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "A4" ); - -t.ensureIndex( { a : 1 , b : 1 , c : 1 } ) -assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "B1" ); -assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "B2" ); - diff --git a/jstests/exists3.js b/jstests/exists3.js deleted file mode 100644 index 53a69d6c3bb..00000000000 --- a/jstests/exists3.js +++ /dev/null @@ -1,21 +0,0 @@ -// Check exists with non empty document, based on SERVER-2470 example. - -t = db.jstests_exists3; -t.drop(); - -t.insert({a: 1, b: 2}); - -assert.eq( 1, t.find({}).sort({c: -1}).itcount() ); -assert.eq( 1, t.count({c: {$exists: false}}) ); -assert.eq( 1, t.find({c: {$exists: false}}).itcount() ); -assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() ); - -// now we have an index on the sort key -t.ensureIndex({c: -1}) - -assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() ); -assert.eq( 1, t.find({c: {$exists: false}}).itcount() ); -// still ok without the $exists -assert.eq( 1, t.find({}).sort({c: -1}).itcount() ); -// and ok with a convoluted $not $exists -assert.eq( 1, t.find({c: {$not: {$exists: true}}}).sort({c: -1}).itcount() ); diff --git a/jstests/exists4.js b/jstests/exists4.js deleted file mode 100644 index fb801ed62e9..00000000000 --- a/jstests/exists4.js +++ /dev/null @@ -1,20 +0,0 @@ -// Check various exists cases, based on SERVER-1735 example. - -t = db.jstests_exists4; -t.drop(); - -t.ensureIndex({date: -1, country_code: 1, user_id: 1}, {unique: 1, background: 1}); -t.insert({ date: new Date("08/27/2010"), tot_visit: 100}); -t.insert({ date: new Date("08/27/2010"), country_code: "IT", tot_visit: 77}); -t.insert({ date: new Date("08/27/2010"), country_code: "ES", tot_visit: 23}); -t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "and...@spacca.org", tot_visit: 11}); -t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@gmail.com", tot_visit: 5}); -t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@progloedizioni.com", tot_visit: 7}); - -assert.eq( 6, t.find({date: new Date("08/27/2010")}).count() ); -assert.eq( 5, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}}).count() ); -assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: {$exists: false}}).count() ); -assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: null}).count() ); -assert.eq( 3, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: true}}).count() ); -assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: false}}).count() ); -assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: null}).count() ); diff --git a/jstests/exists5.js b/jstests/exists5.js deleted file mode 100644 index a90a94f908f..00000000000 --- a/jstests/exists5.js +++ /dev/null @@ -1,33 +0,0 @@ -// Test some $not/$exists cases. - -t = db.jstests_exists5; -t.drop(); - -t.save( {a:1} ); -assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); -assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) ); -assert.eq( 1, t.count( {'c.d':{$not:{$exists:true}}} ) ); -assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) ); -assert.eq( 0, t.count( {'c.d':{$not:{$exists:false}}} ) ); - -t.drop(); -t.save( {a:{b:1}} ); -assert.eq( 1, t.count( {'a.b':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) ); -assert.eq( 0, t.count( {'a.b':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) ); - -t.drop(); -t.save( {a:[1]} ); -assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); -assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) ); -assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) ); - -t.drop(); -t.save( {a:[{b:1}]} ); -assert.eq( 1, t.count( {'a.b':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) ); -assert.eq( 0, t.count( {'a.b':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) ); diff --git a/jstests/exists6.js b/jstests/exists6.js deleted file mode 100644 index 2fa4ba85d49..00000000000 --- a/jstests/exists6.js +++ /dev/null @@ -1,79 +0,0 @@ -// SERVER-393 Test indexed matching with $exists. - -t = db.jstests_exists6; -t.drop(); - -t.ensureIndex( {b:1} ); -t.save( {} ); -t.save( {b:1} ); -t.save( {b:null} ); - -//--------------------------------- - -function checkIndexUse( query, usesIndex, index, bounds ) { - var x = t.find( query ).explain() - if ( usesIndex ) { - assert.eq( x.cursor.indexOf(index), 0 , tojson(x) ); - if ( ! x.indexBounds ) x.indexBounds = {} - assert.eq( bounds, x.indexBounds.b , tojson(x) ); - } - else { - assert.eq( 'BasicCursor', x.cursor, tojson(x) ); - } -} - -function checkExists( query, usesIndex, bounds ) { - checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds ); - // Whether we use an index or not, we will always scan all docs. - assert.eq( 3, t.find( query ).explain().nscanned ); - // 2 docs will match. - assert.eq( 2, t.find( query ).itcount() ); -} - -function checkMissing( query, usesIndex, bounds ) { - checkIndexUse( query, usesIndex, 'BtreeCursor b_1', bounds ); - // Nscanned changes based on index usage. - if ( usesIndex ) assert.eq( 2, t.find( query ).explain().nscanned ); - else assert.eq( 3, t.find( query ).explain().nscanned ); - // 1 doc is missing 'b'. - assert.eq( 1, t.find( query ).itcount() ); -} - -function checkExistsCompound( query, usesIndex, bounds ) { - checkIndexUse( query, usesIndex, 'BtreeCursor', bounds ); - if ( usesIndex ) assert.eq( 3, t.find( query ).explain().nscanned ); - else assert.eq( 3, t.find( query ).explain().nscanned ); - // 2 docs have a:1 and b:exists. - assert.eq( 2, t.find( query ).itcount() ); -} - -function checkMissingCompound( query, usesIndex, bounds ) { - checkIndexUse( query, usesIndex, 'BtreeCursor', bounds ); - // two possible indexes to use - // 1 doc should match - assert.eq( 1, t.find( query ).itcount() ); -} - -//--------------------------------- - -var allValues = [ [ { $minElement:1 }, { $maxElement:1 } ] ]; -var nullNull = [ [ null, null ] ]; - -// Basic cases -checkExists( {b:{$exists:true}}, true, allValues ); -// We change this to not -> not -> exists:true, and get allValue for bounds -// but we use a BasicCursor? -checkExists( {b:{$not:{$exists:false}}}, false, allValues ); -checkMissing( {b:{$exists:false}}, true, nullNull ); -checkMissing( {b:{$not:{$exists:true}}}, true, nullNull ); - -// Now check existence of second compound field. -t.ensureIndex( {a:1,b:1} ); -t.save( {a:1} ); -t.save( {a:1,b:1} ); -t.save( {a:1,b:null} ); - -checkExistsCompound( {a:1,b:{$exists:true}}, true, allValues ); -checkExistsCompound( {a:1,b:{$not:{$exists:false}}}, true, allValues ); -checkMissingCompound( {a:1,b:{$exists:false}}, true, nullNull ); -checkMissingCompound( {a:1,b:{$not:{$exists:true}}}, true, nullNull ); diff --git a/jstests/exists7.js b/jstests/exists7.js deleted file mode 100644 index 91fd589f30d..00000000000 --- a/jstests/exists7.js +++ /dev/null @@ -1,21 +0,0 @@ - -// Test that non boolean value types are allowed with $explain spec. SERVER-2322 - -t = db.jstests_explain7; -t.drop(); - -function testIntegerExistsSpec() { - t.remove({}); - t.save( {} ); - t.save( {a:1} ); - t.save( {a:2} ); - t.save( {a:3, b:3} ); - t.save( {a:4, b:4} ); - - assert.eq( 2, t.count( {b:{$exists:1}} ) ); - assert.eq( 3, t.count( {b:{$exists:0}} ) ); -} - -testIntegerExistsSpec(); -t.ensureIndex( {b:1} ); -testIntegerExistsSpec(); diff --git a/jstests/exists8.js b/jstests/exists8.js deleted file mode 100644 index ca62ebeb9ab..00000000000 --- a/jstests/exists8.js +++ /dev/null @@ -1,76 +0,0 @@ -// Test $exists with array element field names SERVER-2897 - -t = db.jstests_exists8; -t.drop(); - -t.save( {a:[1]} ); -assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.1':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.1':{$exists:true}} ) ); - -t.remove({}); -t.save( {a:[1,2]} ); -assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.1':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); -assert.eq( 1, t.count( {'a.1':{$exists:true}} ) ); - -t.remove({}); -t.save( {a:[{}]} ); -assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.1':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.1':{$exists:true}} ) ); - -t.remove({}); -t.save( {a:[{},{}]} ); -assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.1':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.0':{$exists:false}} ) ); -assert.eq( 1, t.count( {'a.1':{$exists:true}} ) ); - -t.remove({}); -t.save( {a:[{'b':2},{'a':1}]} ); -assert.eq( 1, t.count( {'a.a':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.1.a':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.0.a':{$exists:false}} ) ); - -t.remove({}); -t.save( {a:[[1]]} ); -assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.0.0':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.0.0':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.0.0.0':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.0.0.0':{$exists:false}} ) ); - -t.remove({}); -t.save( {a:[[[1]]]} ); -assert.eq( 1, t.count( {'a.0.0.0':{$exists:true}} ) ); - -t.remove({}); -t.save( {a:[[{b:1}]]} ); -assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); -assert.eq( 1, t.count( {'a.0.b':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.0.b':{$exists:false}} ) ); - -t.remove({}); -t.save( {a:[[],[{b:1}]]} ); -assert.eq( 0, t.count( {'a.0.b':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.0.b':{$exists:false}} ) ); - -t.remove({}); -t.save( {a:[[],[{b:1}]]} ); -assert.eq( 1, t.count( {'a.1.b':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.1.b':{$exists:false}} ) ); - -t.remove({}); -t.save( {a:[[],[{b:1}]]} ); -assert.eq( 1, t.count( {'a.1.0.b':{$exists:true}} ) ); -assert.eq( 0, t.count( {'a.1.0.b':{$exists:false}} ) ); - -t.remove({}); -t.save( {a:[[],[{b:1}]]} ); -assert.eq( 0, t.count( {'a.1.1.b':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.1.1.b':{$exists:false}} ) ); diff --git a/jstests/exists9.js b/jstests/exists9.js deleted file mode 100644 index 66378d1b424..00000000000 --- a/jstests/exists9.js +++ /dev/null @@ -1,41 +0,0 @@ -// SERVER-393 Test exists with various empty array and empty object cases. - -t = db.jstests_exists9; -t.drop(); - -// Check existence of missing nested field. -t.save( {a:{}} ); -assert.eq( 1, t.count( {'a.b':{$exists:false}} ) ); -assert.eq( 0, t.count( {'a.b':{$exists:true}} ) ); - -// With index. -t.ensureIndex( {'a.b':1} ); -assert.eq( 1, t.find( {'a.b':{$exists:false}} ).hint( {'a.b':1} ).itcount() ); -assert.eq( 0, t.find( {'a.b':{$exists:true}} ).hint( {'a.b':1} ).itcount() ); - -t.drop(); - -// Check that an empty array 'exists'. -t.save( {} ); -t.save( {a:[]} ); -assert.eq( 1, t.count( {a:{$exists:true}} ) ); -assert.eq( 1, t.count( {a:{$exists:false}} ) ); - -// With index. -t.ensureIndex( {a:1} ); -assert.eq( 1, t.find( {a:{$exists:true}} ).hint( {a:1} ).itcount() ); -assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).itcount() ); -assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).explain().nscanned ); - -t.drop(); - -// Check that an indexed field within an empty array does not exist. -t.save( {a:{'0':1}} ); -t.save( {a:[]} ); -assert.eq( 1, t.count( {'a.0':{$exists:true}} ) ); -assert.eq( 1, t.count( {'a.0':{$exists:false}} ) ); - -// With index. -t.ensureIndex( {'a.0':1} ); -assert.eq( 1, t.find( {'a.0':{$exists:true}} ).hint( {'a.0':1} ).itcount() ); -assert.eq( 1, t.find( {'a.0':{$exists:false}} ).hint( {'a.0':1} ).itcount() ); diff --git a/jstests/existsa.js b/jstests/existsa.js deleted file mode 100644 index 9ef7e9f374c..00000000000 --- a/jstests/existsa.js +++ /dev/null @@ -1,114 +0,0 @@ -// Sparse indexes are disallowed for $exists:false queries. SERVER-3918 - -t = db.jstests_existsa; -t.drop(); - -t.save( {} ); -t.save( { a:1 } ); -t.save( { a:{ x:1 }, b:1 } ); - -/** Configure testing of an index { :1 }. */ -function setIndex( _indexKeyField ) { - indexKeyField = _indexKeyField; - indexKeySpec = {}; - indexKeySpec[ indexKeyField ] = 1; - t.ensureIndex( indexKeySpec, { sparse:true } ); - indexCursorName = 'BtreeCursor ' + indexKeyField + '_1'; -} -setIndex( 'a' ); - -/** Validate the prefix of 'str'. */ -function assertPrefix( prefix, str ) { - assert.eq( prefix, str.substring( 0, prefix.length ) ); -} - -/** @return count when hinting the index to use. */ -function hintedCount( query ) { - assertPrefix( indexCursorName, t.find( query ).hint( indexKeySpec ).explain().cursor ); - return t.find( query ).hint( indexKeySpec ).itcount(); -} - -/** The query field does not exist and the sparse index is not used without a hint. */ -function assertMissing( query, expectedMissing, expectedIndexedMissing ) { - expectedMissing = expectedMissing || 1; - expectedIndexedMissing = expectedIndexedMissing || 0; - assert.eq( expectedMissing, t.count( query ) ); - assert.eq( 'BasicCursor', t.find( query ).explain().cursor ); - // We also shouldn't get a different count depending on whether - // an index is used or not. - assert.eq( expectedIndexedMissing, hintedCount( query ) ); -} - -/** The query field exists and the sparse index is used without a hint. */ -function assertExists( query, expectedExists ) { - expectedExists = expectedExists || 2; - assert.eq( expectedExists, t.count( query ) ); - assert.eq( 0, t.find( query ).explain().cursor.indexOf('BtreeCursor') ); - // An $exists:true predicate generates no index filters. Add another predicate on the index key - // to trigger use of the index. - andClause = {} - andClause[ indexKeyField ] = { $ne:null }; - Object.extend( query, { $and:[ andClause ] } ); - assert.eq( expectedExists, t.count( query ) ); - assertPrefix( indexCursorName, t.find( query ).explain().cursor ); - assert.eq( expectedExists, hintedCount( query ) ); -} - -/** The query field exists and the sparse index is not used without a hint. */ -function assertExistsUnindexed( query, expectedExists ) { - expectedExists = expectedExists || 2; - assert.eq( expectedExists, t.count( query ) ); - assert.eq( 'BasicCursor', t.find( query ).explain().cursor ); - // Even with another predicate on the index key, the sparse index is disallowed. - andClause = {} - andClause[ indexKeyField ] = { $ne:null }; - Object.extend( query, { $and:[ andClause ] } ); - assert.eq( expectedExists, t.count( query ) ); - assert.eq( 'BasicCursor', t.find( query ).explain().cursor ); - assert.eq( expectedExists, hintedCount( query ) ); -} - -// $exists:false queries match the proper number of documents and disallow the sparse index. -assertMissing( { a:{ $exists:false } } ); -assertMissing( { a:{ $not:{ $exists:true } } } ); -assertMissing( { $and:[ { a:{ $exists:false } } ] } ); -assertMissing( { $or:[ { a:{ $exists:false } } ] } ); -assertMissing( { $nor:[ { a:{ $exists:true } } ] } ); -assertMissing( { 'a.x':{ $exists:false } }, 2, 1 ); - -// Currently a sparse index is disallowed even if the $exists:false query is on a different field. -assertMissing( { b:{ $exists:false } }, 2, 1 ); -assertMissing( { b:{ $exists:false }, a:{ $ne:6 } }, 2, 1 ); -assertMissing( { b:{ $not:{ $exists:true } } }, 2, 1 ); - -// Top level $exists:true queries match the proper number of documents -// and use the sparse index on { a : 1 }. -assertExists( { a:{ $exists:true } } ); - -// Nested $exists queries match the proper number of documents and disallow the sparse index. -assertExistsUnindexed( { $nor:[ { a:{ $exists:false } } ] } ); -assertExistsUnindexed( { $nor:[ { 'a.x':{ $exists:false } } ] }, 1 ); -assertExistsUnindexed( { a:{ $not:{ $exists:false } } } ); - -// Nested $exists queries disallow the sparse index in some cases where it is not strictly -// necessary to do so. (Descriptive tests.) -assertExistsUnindexed( { $nor:[ { b:{ $exists:false } } ] }, 1 ); // Unindexed field. -assertExists( { $or:[ { a:{ $exists:true } } ] } ); // $exists:true not $exists:false. - -// Behavior is similar with $elemMatch. -t.drop(); -t.save( { a:[ {} ] } ); -t.save( { a:[ { b:1 } ] } ); -t.save( { a:[ { b:1 } ] } ); -setIndex( 'a.b' ); - -assertMissing( { a:{ $elemMatch:{ b:{ $exists:false } } } } ); -// A $elemMatch predicate is treated as nested, and the index should be used for $exists:true. -assertExists( { a:{ $elemMatch:{ b:{ $exists:true } } } } ); - -// A non sparse index will not be disallowed. -t.drop(); -t.save( {} ); -t.ensureIndex( { a:1 } ); -assert.eq( 1, t.find( { a:{ $exists:false } } ).itcount() ); -assert.eq( 'BtreeCursor a_1', t.find( { a:{ $exists:false } } ).explain().cursor ); diff --git a/jstests/existsb.js b/jstests/existsb.js deleted file mode 100644 index a212be145c0..00000000000 --- a/jstests/existsb.js +++ /dev/null @@ -1,76 +0,0 @@ -// Tests for $exists against documents that store a null value -// -// A document with a missing value for an indexed field -// is indexed *as if* it had the value 'null' explicitly. -// Therefore: -// { b : 1 } -// { a : null, b : 1 } -// look identical based on a standard index on { a : 1 }. -// -// -- HOWEVER!! -- -// A sparse index on { a : 1 } would include { a : null, b : 1 }, -// but would not include { b : 1 }. In this case, the two documents -// are treated equally. -// -// Also, super special edge case around sparse, compound indexes -// from Mathias: -// If we have a sparse index on { a : 1, b : 1 } -// And we insert docs {}, { a : 1 }, -// { b : 1 }, and { a : 1, b : 1 } -// everything but {} will have an index entry. -// Let's make sure we handle this properly! - -t = db.jstests_existsb; -t.drop(); - -t.save( {} ); -t.save( { a: 1 } ); -t.save( { b: 1 } ); -t.save( { a: 1, b: null } ); -t.save( { a: 1, b: 1 } ); - -/** run a series of checks, just on the number of docs found */ -function checkExistsNull() { - // Basic cases - assert.eq( 3, t.count({ a:{ $exists: true }}) ); - assert.eq( 2, t.count({ a:{ $exists: false }}) ); - assert.eq( 3, t.count({ b:{ $exists: true }}) ); - assert.eq( 2, t.count({ b:{ $exists: false }}) ); - // With negations - assert.eq( 3, t.count({ a:{ $not:{ $exists: false }}}) ); - assert.eq( 2, t.count({ a:{ $not:{ $exists: true }}}) ); - assert.eq( 3, t.count({ b:{ $not:{ $exists: false }}}) ); - assert.eq( 2, t.count({ b:{ $not:{ $exists: true }}}) ); - // Both fields - assert.eq( 2, t.count({ a:1, b: { $exists: true }}) ); - assert.eq( 1, t.count({ a:1, b: { $exists: false }}) ); - assert.eq( 1, t.count({ a:{ $exists: true }, b:1}) ); - assert.eq( 1, t.count({ a:{ $exists: false }, b:1}) ); - // Both fields, both $exists - assert.eq( 2, t.count({ a:{ $exists: true }, b:{ $exists: true }}) ); - assert.eq( 1, t.count({ a:{ $exists: true }, b:{ $exists: false }}) ); - assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: true }}) ); - assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: false }}) ); -} - -// with no index, make sure we get correct results -checkExistsNull(); - -// try with a standard index -t.ensureIndex({ a : 1 }); -checkExistsNull(); - -// try with a sparse index -t.dropIndexes(); -t.ensureIndex({ a : 1 }, { sparse:true }); -checkExistsNull(); - -// try with a compound index -t.dropIndexes(); -t.ensureIndex({ a : 1, b : 1 }); -checkExistsNull(); - -// try with sparse compound index -t.dropIndexes(); -t.ensureIndex({ a : 1, b : 1 }, { sparse:true }); -checkExistsNull(); diff --git a/jstests/explain1.js b/jstests/explain1.js deleted file mode 100644 index 4c92b102e38..00000000000 --- a/jstests/explain1.js +++ /dev/null @@ -1,48 +0,0 @@ - -t = db.explain1; -t.drop(); - -for ( var i=0; i<100; i++ ){ - t.save( { x : i } ); -} - -q = { x : { $gt : 50 } }; - -assert.eq( 49 , t.find( q ).count() , "A" ); -assert.eq( 49 , t.find( q ).itcount() , "B" ); -assert.eq( 20 , t.find( q ).limit(20).itcount() , "C" ); - -t.ensureIndex( { x : 1 } ); - -assert.eq( 49 , t.find( q ).count() , "D" ); -assert.eq( 49 , t.find( q ).itcount() , "E" ); -assert.eq( 20 , t.find( q ).limit(20).itcount() , "F" ); - -assert.eq( 49 , t.find(q).explain().n , "G" ); -assert.eq( 20 , t.find(q).limit(20).explain().n , "H" ); -assert.eq( 20 , t.find(q).limit(-20).explain().n , "I" ); -assert.eq( 49 , t.find(q).batchSize(20).explain().n , "J" ); - -// verbose explain output with stats -// display index bounds - -var explainGt = t.find({x: {$gt: 5}}).explain(true); -var boundsVerboseGt = explainGt.stats.children[0].boundsVerbose; - -print('explain stats for $gt = ' + tojson(explainGt.stats)); - -var explainGte = t.find({x: {$gte: 5}}).explain(true); -var boundsVerboseGte = explainGte.stats.children[0].boundsVerbose; - -print('explain stats for $gte = ' + tojson(explainGte.stats)); - -print('index bounds for $gt = ' + tojson(explainGt.indexBounds)); -print('index bounds for $gte = ' + tojson(explainGte.indexBounds)); - -print('verbose bounds for $gt = ' + tojson(boundsVerboseGt)); -print('verbose bounds for $gte = ' + tojson(boundsVerboseGte)); - -// Since the verbose bounds are opaque, all we try to confirm is that the -// verbose bounds for $gt is different from those generated for $gte. -assert.neq(boundsVerboseGt, boundsVerboseGte, - 'verbose bounds for $gt and $gte should not be the same'); diff --git a/jstests/explain2.js b/jstests/explain2.js deleted file mode 100644 index b70ffdc0b1e..00000000000 --- a/jstests/explain2.js +++ /dev/null @@ -1,27 +0,0 @@ - -t = db.explain2 -t.drop(); - -t.ensureIndex( { a : 1 , b : 1 } ); - -for ( i=1; i<10; i++ ){ - t.insert( { _id : i , a : i , b : i , c : i } ); -} - -function go( q , c , b , o ){ - var e = t.find( q ).hint( {a:1,b:1} ).explain(); - assert.eq( c , e.n , "count " + tojson( q ) ) - assert.eq( b , e.nscanned , "nscanned " + tojson( q ) ) - assert.eq( o , e.nscannedObjects , "nscannedObjects " + tojson( q ) ) -} - -q = { a : { $gt : 3 } } -go( q , 6 , 6 , 6 ); - -q.b = 5 -go( q , 1 , 6 , 1 ); - -delete q.b -q.c = 5 -go( q , 1 , 6 , 6 ); - diff --git a/jstests/explain3.js b/jstests/explain3.js deleted file mode 100644 index 69dcac531b9..00000000000 --- a/jstests/explain3.js +++ /dev/null @@ -1,24 +0,0 @@ -/** SERVER-2451 Kill cursor while explain is yielding */ - -t = db.jstests_explain3; -t.drop(); - -t.ensureIndex( {i:1} ); -for( var i = 0; i < 10000; ++i ) { - t.save( {i:i,j:0} ); -} -db.getLastError(); - -s = startParallelShell( "sleep( 20 ); db.jstests_explain3.dropIndex( {i:1} );" ); - -try { - t.find( {i:{$gt:-1},j:1} ).hint( {i:1} ).explain() -} catch (e) { - print( "got exception" ); - printjson( e ); -} - -s(); - -// Sanity check to make sure mongod didn't seg fault. -assert.eq( 10000, t.count() ); \ No newline at end of file diff --git a/jstests/explain4.js b/jstests/explain4.js deleted file mode 100644 index d6d3d818a72..00000000000 --- a/jstests/explain4.js +++ /dev/null @@ -1,68 +0,0 @@ -// Basic validation of explain output fields. - -t = db.jstests_explain4; -t.drop(); - -function checkField( explain, name, value ) { - assert( explain.hasOwnProperty( name ) ); - if ( value != null ) { - assert.eq( value, explain[ name ], name ); - // Check that the value is of the expected type. SERVER-5288 - assert.eq( typeof( value ), typeof( explain[ name ] ), 'type ' + name ); - } -} - -function checkNonCursorPlanFields( explain, matches, n ) { - checkField( explain, "n", n ); - checkField( explain, "nscannedObjects", matches ); - checkField( explain, "nscanned", matches ); -} - -function checkPlanFields( explain, matches, n ) { - checkField( explain, "cursor", "BasicCursor" ); - // index related fields do not appear in non-indexed plan - assert(!("indexBounds" in explain)); - checkNonCursorPlanFields( explain, matches, n ); -} - -function checkFields( matches, sort, limit ) { - cursor = t.find(); - if ( sort ) { - print("sort is {a:1}"); - cursor.sort({a:1}); - } - if ( limit ) { - print("limit = " + limit); - cursor.limit( limit ); - } - explain = cursor.explain( true ); - printjson( explain ); - checkPlanFields( explain, matches, matches > 0 ? 1 : 0 ); - checkField( explain, "scanAndOrder", sort ); - checkField( explain, "millis" ); - checkField( explain, "nYields" ); - checkField( explain, "nChunkSkips", 0 ); - checkField( explain, "isMultiKey", false ); - checkField( explain, "indexOnly", false ); - checkField( explain, "server" ); - checkField( explain, "allPlans" ); - explain.allPlans.forEach( function( x ) { checkPlanFields( x, matches, matches ); } ); -} - -checkFields( 0, false ); - -// If there's nothing in the collection, there's no point in verifying that a sort -// is done. -// checkFields( 0, true ); - -t.save( {} ); -checkFields( 1, false ); -checkFields( 1, true ); - -t.save( {} ); -checkFields( 1, false, 1 ); - -// Check basic fields with multiple clauses. -t.save( { _id:0 } ); -explain = t.find( { $or:[ { _id:0 }, { _id:1 } ] } ).explain( true ); -checkNonCursorPlanFields( explain, 1, 1 ); diff --git a/jstests/explain5.js b/jstests/explain5.js deleted file mode 100644 index a90f0726317..00000000000 --- a/jstests/explain5.js +++ /dev/null @@ -1,38 +0,0 @@ -// Check that the explain result count does proper deduping. - -t = db.jstests_explain5; -t.drop(); - -t.ensureIndex( {a:1} ); -t.ensureIndex( {b:1} ); - -t.save( {a:[1,2,3],b:[4,5,6]} ); -for( i = 0; i < 10; ++i ) { - t.save( {} ); -} - -// Check with a single in order plan. - -explain = t.find( {a:{$gt:0}} ).explain( true ); -assert.eq( 1, explain.n ); -assert.eq( 1, explain.allPlans[ 0 ].n ); - -// Check with a single out of order plan. - -explain = t.find( {a:{$gt:0}} ).sort( {z:1} ).hint( {a:1} ).explain( true ); -assert.eq( 1, explain.n ); -assert.eq( 1, explain.allPlans[ 0 ].n ); - -// Check with multiple plans. - -explain = t.find( {a:{$gt:0},b:{$gt:0}} ).explain( true ); -assert.eq( 1, explain.n ); -assert.eq( 1, explain.allPlans[ 0 ].n ); -assert.eq( 1, explain.allPlans[ 1 ].n ); - -explain = t.find( {$or:[{a:{$gt:0},b:{$gt:0}},{a:{$gt:-1},b:{$gt:-1}}]} ).explain( true ); -assert.eq( 1, explain.n ); -// Check 'n' for every alternative query plan. -for (var i = 0; i < explain.allPlans.length; ++i) { - assert.eq( 1, explain.allPlans[i].n ); -} diff --git a/jstests/explain6.js b/jstests/explain6.js deleted file mode 100644 index 47d8d2fd731..00000000000 --- a/jstests/explain6.js +++ /dev/null @@ -1,25 +0,0 @@ -// Test explain result count when a skip parameter is used. - -t = db.jstests_explain6; -t.drop(); - -t.save( {} ); -explain = t.find().skip( 1 ).explain( true ); -assert.eq( 0, explain.n ); -// With only one plan, the skip information is known for the plan. This is an arbitrary -// implementation detail, but it changes the way n is calculated. -assert.eq( 0, explain.allPlans[ 0 ].n ); - -t.ensureIndex( {a:1} ); -explain = t.find( {a:null,b:null} ).skip( 1 ).explain( true ); -assert.eq( 0, explain.n ); - -printjson( explain ); -assert.eq( 0, explain.allPlans[ 0 ].n ); - -t.dropIndexes(); -explain = t.find().skip( 1 ).sort({a:1}).explain( true ); -// Skip is applied for an in memory sort. -assert.eq( 0, explain.n ); -printjson(explain); -assert.eq( 0, explain.allPlans[ 0 ].n ); diff --git a/jstests/explain7.js b/jstests/explain7.js deleted file mode 100644 index df277aaf211..00000000000 --- a/jstests/explain7.js +++ /dev/null @@ -1,181 +0,0 @@ -// Test cases for explain()'s nscannedObjects. SERVER-4161 - -t = db.jstests_explain7; -t.drop(); - -t.save( { a:1 } ); -t.ensureIndex( { a:1 } ); - -function assertExplain( expected, explain, checkAllPlans ) { - for( field in expected ) { - assert.eq( expected[ field ], explain[ field ], field ); - } - if ( checkAllPlans && explain.allPlans && explain.allPlans.length == 1 ) { - for( field in expected ) { - assert.eq( expected[ field ], explain.allPlans[ 0 ][ field ], field ); - } - } - return explain; -} - -function assertHintedExplain( expected, cursor ) { - return assertExplain( expected, cursor.hint( { a:1 } ).explain( true ), true ); -} - -function assertUnhintedExplain( expected, cursor, checkAllPlans ) { - return assertExplain( expected, cursor.explain( true ), checkAllPlans ); -} - -// Standard query. -assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, - t.find( { a:1 } ) ); - -// Covered index query. -assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 /* no object loaded */ }, - t.find( { a:1 }, { _id:0, a:1 } ) ); - -// Covered index query, but matching requires loading document. -assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, - t.find( { a:1, b:null }, { _id:0, a:1 } ) ); - -// $returnKey query. -assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0 }, - t.find( { a:1 } )._addSpecial( "$returnKey", true ) ); - -// $returnKey query but matching requires loading document. -assertHintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, - t.find( { a:1, b:null } )._addSpecial( "$returnKey", true ) ); - -// Skip a result. -assertHintedExplain( { n:0, nscanned:1, nscannedObjects:1 }, - t.find( { a:1 } ).skip( 1 ) ); - -// Cursor sorted covered index query. -assertHintedExplain( { n:1, nscanned:1, nscannedObjects:0, scanAndOrder:false }, - t.find( { a:1 }, { _id:0, a:1 } ).sort( { a:1 } ) ); - -t.dropIndex( { a:1 } ); -t.ensureIndex( { a:1, b:1 } ); - -// In memory sort covered index query. -assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1, scanAndOrder:true }, - t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } ) - .hint( { a:1, b:1 } ) ); - -// In memory sort $returnKey query. -assertUnhintedExplain( { n:1, nscanned:1, scanAndOrder:true }, - t.find( { a:{ $gt:0 } } )._addSpecial( "$returnKey", true ).sort( { b:1 } ) - .hint( { a:1, b:1 } ) ); - -// In memory sort with skip. -assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 /* The record is still loaded. */ }, - t.find( { a:{ $gt:0 } } ).sort( { b:1 } ).skip( 1 ).hint( { a:1, b:1 } ), - false ); - -// With a multikey index. -t.drop(); -t.ensureIndex( { a:1 } ); -t.save( { a:[ 1, 2 ] } ); - -assertHintedExplain( { n:1, scanAndOrder:false }, - t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ) ); -assertHintedExplain( { n:1, scanAndOrder:true }, - t.find( { a:{ $gt:0 } }, { _id:0, a:1 } ).sort( { b:1 } ) ); - -// Dedup matches from multiple query plans. -t.drop(); -t.ensureIndex( { a:1, b:1 } ); -t.ensureIndex( { b:1, a:1 } ); -t.save( { a:1, b:1 } ); - -// Document matched by three query plans. -assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, - t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ) ); - -// Document matched by three query plans, with sorting. -assertUnhintedExplain( { n:1, nscanned:1, nscannedObjects:1 }, - t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).sort( { c:1 } ) ); - -// Document matched by three query plans, with a skip. -assertUnhintedExplain( { n:0, nscanned:1, nscannedObjects:1 }, - t.find( { a:{ $gt:0 }, b:{ $gt:0 } } ).skip( 1 ) ); - -// Hybrid ordered and unordered plans. - -t.drop(); -t.ensureIndex( { a:1, b:1 } ); -t.ensureIndex( { b:1 } ); -for( i = 0; i < 30; ++i ) { - t.save( { a:i, b:i } ); -} - -// Ordered plan chosen. -assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:30, - scanAndOrder:false }, - t.find( { b:{ $gte:0 } } ).sort( { a:1 } ) ); - -// SERVER-12769: When an index is used to provide a sort, our covering -// analysis isn't good. This could execute as a covered query, but currently -// does not. -/* -// Ordered plan chosen with a covered index. -//assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:30, nscanned:30, nscannedObjects:0, - //scanAndOrder:false }, - //t.find( { b:{ $gte:0 } }, { _id:0, b:1 } ).sort( { a:1 } ) ); -*/ - -// Ordered plan chosen, with a skip. Skip is not included in counting nscannedObjects for a single -// plan. -assertUnhintedExplain( { cursor:'BtreeCursor a_1_b_1', n:29, nscanned:30, nscannedObjects:30, - scanAndOrder:false }, - t.find( { b:{ $gte:0 } } ).sort( { a:1 } ).skip( 1 ) ); - -// Unordered plan chosen. -assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, - //nscannedObjects:1, nscannedObjectsAllPlans:2, - scanAndOrder:true }, - t.find( { b:1 } ).sort( { a:1 } ) ); - -// Unordered plan chosen and projected. -assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1, - scanAndOrder:true }, - t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ) ); - -// Unordered plan chosen, with a skip. -// Note that all plans are equally unproductive here, so we can't test which one is picked reliably. -assertUnhintedExplain( { n:0 }, - t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ).skip( 1 ) ); - -// Unordered plan chosen, $returnKey specified. -assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, scanAndOrder:true }, - t.find( { b:1 }, { _id:0, b:1 } ).sort( { a:1 } ) - ._addSpecial( "$returnKey", true ) ); - -// Unordered plan chosen, $returnKey specified, matching requires loading document. -assertUnhintedExplain( { cursor:'BtreeCursor b_1', n:1, nscanned:1, nscannedObjects:1, - scanAndOrder:true }, - t.find( { b:1, c:null }, { _id:0, b:1 } ).sort( { a:1 } ) - ._addSpecial( "$returnKey", true ) ); - -t.ensureIndex( { a:1, b:1, c:1 } ); - -// Documents matched by four query plans. -assertUnhintedExplain( { n:30, nscanned:30, nscannedObjects:30, - //nscannedObjectsAllPlans:90 // Not 120 because deduping occurs before - // loading results. - }, - t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).sort( { b:1 } ) ); - -for( i = 30; i < 150; ++i ) { - t.save( { a:i, b:i } ); -} - -explain = assertUnhintedExplain( { n:150}, - t.find( { $or:[ { a:{ $gte:-1, $lte:200 }, - b:{ $gte:0, $lte:201 } }, - { a:{ $gte:0, $lte:201 }, - b:{ $gte:-1, $lte:200 } } ] }, - { _id:0, a:1, b:1 } ).hint( { a:1, b:1 } ) ); -printjson(explain); -// Check nscannedObjects for each clause. -assert.eq( 0, explain.clauses[ 0 ].nscannedObjects ); diff --git a/jstests/explain8.js b/jstests/explain8.js deleted file mode 100644 index fde6adbd8f4..00000000000 --- a/jstests/explain8.js +++ /dev/null @@ -1,24 +0,0 @@ -// Test calculation of the 'millis' field in explain output. - -t = db.jstests_explain8; -t.drop(); - -t.ensureIndex( { a:1 } ); -for( i = 1000; i < 4000; i += 1000 ) { - t.save( { a:i } ); -} - -// Run a query with one $or clause per a-value, each of which sleeps for 'a' milliseconds. -function slow() { - sleep( this.a ); - return true; -} -clauses = []; -for( i = 1000; i < 4000; i += 1000 ) { - clauses.push( { a:i, $where:slow } ); -} -explain = t.find( { $or:clauses } ).explain( true ); -//printjson( explain ); - -// Verify the duration of the whole query, and of each clause. -assert.gt( explain.millis, 1000 - 500 + 2000 - 500 + 3000 - 500 ); diff --git a/jstests/explain9.js b/jstests/explain9.js deleted file mode 100644 index 80cab856aa7..00000000000 --- a/jstests/explain9.js +++ /dev/null @@ -1,24 +0,0 @@ -// Test that limit is applied by explain when there are both in order and out of order candidate -// plans. SERVER-4150 - -t = db.jstests_explain9; -t.drop(); - -t.ensureIndex( { a:1 } ); - -for( i = 0; i < 10; ++i ) { - t.save( { a:i, b:0 } ); -} - -explain = t.find( { a:{ $gte:0 }, b:0 } ).sort( { a:1 } ).limit( 5 ).explain( true ); -// Five results are expected, matching the limit spec. -assert.eq( 5, explain.n ); -explain.allPlans.forEach( function( x ) { - // Five results are expected for the in order plan. - if ( x.cursor == "BtreeCursor a_1" ) { - assert.eq( 5, x.n ); - } - else { - assert.gte( 5, x.n ); - } - } ); diff --git a/jstests/explain_batch_size.js b/jstests/explain_batch_size.js deleted file mode 100644 index 65bc1df40d7..00000000000 --- a/jstests/explain_batch_size.js +++ /dev/null @@ -1,19 +0,0 @@ -// minimal test to check handling of batch size when explain info is requested -// expected behavior is to return explain.n = total number of documents matching query -// batch size is also tested in another smoke test jstest/explain1.js but that test -// also covers the use of an indexed collection and includes a couple of test cases -// using limit() - -t = db.explain_batch_size; -t.drop(); - -n = 3 -for (i=0; i 110 ) { - cursor = makeCursor( query, {}, sort, batchSize, true ); - lastNonAIndexResult = -1; - for( i = 0; i < expectedLeftInBatch; ++i ) { - next = cursor.next(); - // Identify the query plan used by checking the fields of a returnKey query. - if ( !friendlyEqual( [ 'a', '_id' ], Object.keySet( next ) ) ) { - lastNonAIndexResult = i; - } - } - // The last results should come from the a,_id index. - assert.lt( lastNonAIndexResult, expectedLeftInBatch - 5 ); - } -} - -function queryWithPlanTypes( withDups ) { - t.drop(); - for( i = 1; i < numDocs; ++i ) { - t.save( { _id:i, a:i, b:0 } ); - } - if ( withDups ) { - t.save( { _id:0, a:[ 0, numDocs ], b:0 } ); // Add a dup on a:1 index. - } - else { - t.save( { _id:0, a:0, b:0 } ); - } - t.ensureIndex( { a:1, _id:1 } ); // Include _id for a covered index projection. - - // All plans in order. - checkCursorWithBatchSize( { a:{ $gte:0 } }, null, 150, 150 ); - - // All plans out of order. - checkCursorWithBatchSize( { a:{ $gte:0 } }, { c:1 }, null, 101 ); - - // Some plans in order, some out of order. - checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, 150, 150 ); - checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, null, 101 ); -} - -queryWithPlanTypes( false ); -queryWithPlanTypes( true ); diff --git a/jstests/fm1.js b/jstests/fm1.js deleted file mode 100644 index bc60a3d8911..00000000000 --- a/jstests/fm1.js +++ /dev/null @@ -1,12 +0,0 @@ - -t = db.fm1; -t.drop(); - -t.insert({foo:{bar:1}}) -t.find({},{foo:1}).toArray(); -t.find({},{'foo.bar':1}).toArray(); -t.find({},{'baz':1}).toArray(); -t.find({},{'baz.qux':1}).toArray(); -t.find({},{'foo.qux':1}).toArray(); - - diff --git a/jstests/fm2.js b/jstests/fm2.js deleted file mode 100644 index 00ccdf4afee..00000000000 --- a/jstests/fm2.js +++ /dev/null @@ -1,9 +0,0 @@ - -t = db.fm2 -t.drop(); - -t.insert( { "one" : { "two" : {"three":"four"} } } ); - -x = t.find({},{"one.two":1})[0] -assert.eq( 1 , Object.keySet( x.one ).length , "ks l 1" ); - diff --git a/jstests/fm3.js b/jstests/fm3.js deleted file mode 100644 index 8ccde6d5ab3..00000000000 --- a/jstests/fm3.js +++ /dev/null @@ -1,37 +0,0 @@ -t = db.fm3 -t.drop(); - -t.insert( {a:[{c:{e:1, f:1}}, {d:2}, 'z'], b:1} ); - - -res = t.findOne({}, {a:1}); -assert.eq(res.a, [{c:{e:1, f:1}}, {d:2}, 'z'], "one a"); -assert.eq(res.b, undefined, "one b"); - -res = t.findOne({}, {a:0}); -assert.eq(res.a, undefined, "two a"); -assert.eq(res.b, 1, "two b"); - -res = t.findOne({}, {'a.d':1}); -assert.eq(res.a, [{}, {d:2}], "three a"); -assert.eq(res.b, undefined, "three b"); - -res = t.findOne({}, {'a.d':0}); -assert.eq(res.a, [{c:{e:1, f:1}}, {}, 'z'], "four a"); -assert.eq(res.b, 1, "four b"); - -res = t.findOne({}, {'a.c':1}); -assert.eq(res.a, [{c:{e:1, f:1}}, {}], "five a"); -assert.eq(res.b, undefined, "five b"); - -res = t.findOne({}, {'a.c':0}); -assert.eq(res.a, [{}, {d:2}, 'z'], "six a"); -assert.eq(res.b, 1, "six b"); - -res = t.findOne({}, {'a.c.e':1}); -assert.eq(res.a, [{c:{e:1}}, {}], "seven a"); -assert.eq(res.b, undefined, "seven b"); - -res = t.findOne({}, {'a.c.e':0}); -assert.eq(res.a, [{c:{f:1}}, {d:2}, 'z'], "eight a"); -assert.eq(res.b, 1, "eight b"); diff --git a/jstests/fm4.js b/jstests/fm4.js deleted file mode 100644 index 1ce947ad5e7..00000000000 --- a/jstests/fm4.js +++ /dev/null @@ -1,16 +0,0 @@ -t = db.fm4 -t.drop(); - -t.insert({_id:1, a:1, b:1}); - -assert.eq( t.findOne({}, {_id:1}), {_id:1}, 1) -assert.eq( t.findOne({}, {_id:0}), {a:1, b:1}, 2) - -assert.eq( t.findOne({}, {_id:1, a:1}), {_id:1, a:1}, 3) -assert.eq( t.findOne({}, {_id:0, a:1}), {a:1}, 4) - -assert.eq( t.findOne({}, {_id:0, a:0}), {b:1}, 6) -assert.eq( t.findOne({}, { a:0}), {_id:1, b:1}, 5) - -// not sure if we want to suport this since it is the same as above -//assert.eq( t.findOne({}, {_id:1, a:0}), {_id:1, b:1}, 5) diff --git a/jstests/fsync.js b/jstests/fsync.js deleted file mode 100644 index 0cfece75c10..00000000000 --- a/jstests/fsync.js +++ /dev/null @@ -1,22 +0,0 @@ -// test the lock/unlock snapshotting feature a bit - -x=db.runCommand({fsync:1,lock:1}); // not on admin db -assert(!x.ok,"D"); - -x=db.fsyncLock(); // uses admin automatically - -assert(x.ok,"C"); - -y = db.currentOp(); -assert(y.fsyncLock,"B"); - -z = db.fsyncUnlock(); -assert( db.currentOp().fsyncLock == null, "A2" ); - -// make sure the db is unlocked -db.jstests_fsync.insert({x:1}); -db.getLastError(); - -assert( db.currentOp().fsyncLock == null, "A" ); - -assert( !db.eval('db.fsyncLock()').ok, "eval('db.fsyncLock()') should fail." ) diff --git a/jstests/fts1.js b/jstests/fts1.js deleted file mode 100644 index 6bd138d6c25..00000000000 --- a/jstests/fts1.js +++ /dev/null @@ -1,29 +0,0 @@ -load( "jstests/libs/fts.js" ); - -t = db.text1; -t.drop(); - -// this test requires usePowerOf2Sizes to be off -db.createCollection( t.getName(), {"usePowerOf2Sizes" : false } ); -assert.eq(0, t.stats().userFlags); - -assert.eq( [] , queryIDS( t , "az" ) , "A0" ); - -t.save( { _id : 1 , x : "az b c" } ); -t.save( { _id : 2 , x : "az b" } ); -t.save( { _id : 3 , x : "b c" } ); -t.save( { _id : 4 , x : "b c d" } ); - -assert.eq(t.stats().userFlags, 0, - "A new collection should not have power-of-2 storage allocation strategy"); -t.ensureIndex( { x : "text" } ); -assert.eq(t.stats().userFlags, 1, - "Creating a text index on a collection should change the allocation strategy " + - "to power-of-2."); - -assert.eq( [1,2,3,4] , queryIDS( t , "c az" ) , "A1" ); -assert.eq( [4] , queryIDS( t , "d" ) , "A2" ); - -idx = db.system.indexes.findOne( { ns: t.getFullName(), "weights.x" : 1 } ) -assert( idx.v >= 1, tojson( idx ) ) -assert( idx.textIndexVersion >= 1, tojson( idx ) ) diff --git a/jstests/fts2.js b/jstests/fts2.js deleted file mode 100644 index e0e7469fa5e..00000000000 --- a/jstests/fts2.js +++ /dev/null @@ -1,24 +0,0 @@ - -load( "jstests/libs/fts.js" ); - -t = db.text2; -t.drop(); - -t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } ); -t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } ); - -t.ensureIndex( { x : "text" } , { weights : { x : 10 , y : 1 } } ); - -assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); -assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); - -assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); -assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); - -assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" ); -assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" ); - -printjson(lastCommadResult); -assert.eq( 2 , lastCommadResult.stats.nscannedObjects , "B3" ); -assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); - diff --git a/jstests/fts3.js b/jstests/fts3.js deleted file mode 100644 index f5f72c4df0a..00000000000 --- a/jstests/fts3.js +++ /dev/null @@ -1,22 +0,0 @@ - -load( "jstests/libs/fts.js" ); - -t = db.text3; -t.drop(); - -t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } ); -t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } ); - -t.ensureIndex( { x : "text" , z : 1 } , { weights : { x : 10 , y : 1 } } ); - -assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); -assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); - -assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); -assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); - -assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" ); -assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" ); - -assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" ); -assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); diff --git a/jstests/fts4.js b/jstests/fts4.js deleted file mode 100644 index 8598457b033..00000000000 --- a/jstests/fts4.js +++ /dev/null @@ -1,22 +0,0 @@ - -load( "jstests/libs/fts.js" ); - -t = db.text4; -t.drop(); - -t.save( { _id : 1 , x : [ "az" , "b" , "x" ] , y : [ "c" , "d" , "m" ] , z : 1 } ); -t.save( { _id : 2 , x : [ "c" , "d" , "y" ] , y : [ "az" , "b" , "n" ] , z : 2 } ); - -t.ensureIndex( { y : "text" , z : 1 } , { weights : { x : 10 } } ); - -assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); -assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); - -assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); -assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); - -assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" ); -assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" ); - -assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" ); -assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); diff --git a/jstests/fts5.js b/jstests/fts5.js deleted file mode 100644 index a3097b47a4a..00000000000 --- a/jstests/fts5.js +++ /dev/null @@ -1,22 +0,0 @@ - -load( "jstests/libs/fts.js" ); - -t = db.text5; -t.drop(); - -t.save( { _id: 1 , x: [ { a: "az" } , { a: "b" } , { a: "x" } ] , y: [ "c" , "d" , "m" ] , z: 1 } ); -t.save( { _id: 2 , x: [ { a: "c" } , { a: "d" } , { a: "y" } ] , y: [ "az" , "b" , "n" ] , z: 2 } ); - -t.ensureIndex( { y: "text" , z: 1 } , { weights: { "x.a": 10 } } ); - -assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" ); -assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" ); - -assert.eq( [1] , queryIDS( t , "x" ) , "A3" ); -assert.eq( [2] , queryIDS( t , "y" ) , "A4" ); - -assert.eq( [1] , queryIDS( t , "az" , { z: 1 } ) , "B1" ); -assert.eq( [1] , queryIDS( t , "d" , { z: 1 } ) , "B2" ); - -assert.eq( 0 , lastCommadResult.stats.nscannedObjects , "B3" ); -assert.eq( 2 , lastCommadResult.stats.nscanned , "B4" ); diff --git a/jstests/fts_blog.js b/jstests/fts_blog.js deleted file mode 100644 index 38cbb826eff..00000000000 --- a/jstests/fts_blog.js +++ /dev/null @@ -1,26 +0,0 @@ -t = db.text_blog; -t.drop(); - -t.save( { _id : 1 , title : "my blog post" , text : "this is a new blog i am writing. yay" } ); -t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am writing. yay" } ); -t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } ); - -// default weight is 1 -// specify weights if you want a field to be more meaningull -t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } ); - -res = t.runCommand( "text" , { search : "blog" } ) -assert.eq( 3, res.results.length ); -assert.eq( 1, res.results[0].obj._id ); - -res = t.runCommand( "text" , { search : "write" } ) -assert.eq( 3, res.results.length ); -assert.eq( res.results[0].score, res.results[1].score ); -assert.eq( res.results[0].score, res.results[2].score ); - - - - - - - diff --git a/jstests/fts_blogwild.js b/jstests/fts_blogwild.js deleted file mode 100644 index ecad0ce0b19..00000000000 --- a/jstests/fts_blogwild.js +++ /dev/null @@ -1,40 +0,0 @@ -t = db.text_blogwild; -t.drop(); - -t.save( { _id: 1 , title: "my blog post" , text: "this is a new blog i am writing. yay eliot" } ); -t.save( { _id: 2 , title: "my 2nd post" , text: "this is a new blog i am writing. yay" } ); -t.save( { _id: 3 , title: "knives are Fun for writing eliot" , text: "this is a new blog i am writing. yay" } ); - -// default weight is 1 -// specify weights if you want a field to be more meaningull -t.ensureIndex( { dummy: "text" } , { weights: "$**" } ); - -res = t.runCommand( "text" , { search: "blog" } ); -assert.eq( 3 , res.stats.n , "A1" ); - -res = t.runCommand( "text" , { search: "write" } ); -assert.eq( 3 , res.stats.n , "B1" ); - -// mixing -t.dropIndex( "dummy_text" ); -assert.eq( 1 , t.getIndexKeys().length , "C1" ); -t.ensureIndex( { dummy: "text" } , { weights: { "$**": 1 , title: 2 } } ); - - -res = t.runCommand( "text" , { search: "write" } ); -assert.eq( 3 , res.stats.n , "C2" ); -assert.eq( 3 , res.results[0].obj._id , "C3" ); - -res = t.runCommand( "text" , { search: "blog" } ); -assert.eq( 3 , res.stats.n , "D1" ); -assert.eq( 1 , res.results[0].obj._id , "D2" ); - -res = t.runCommand( "text" , { search: "eliot" } ); -assert.eq( 2 , res.stats.n , "E1" ); -assert.eq( 3 , res.results[0].obj._id , "E2" ); - - - - - - diff --git a/jstests/fts_enabled.js b/jstests/fts_enabled.js deleted file mode 100644 index 8617caff59f..00000000000 --- a/jstests/fts_enabled.js +++ /dev/null @@ -1,5 +0,0 @@ -// Test that the textSearchEnabled server parameter works correctly (now deprecated). - -// Value true is accepted, value false is rejected. -assert.commandWorked(db.adminCommand({setParameter: 1, textSearchEnabled: true})); -assert.commandFailed(db.adminCommand({setParameter: 1, textSearchEnabled: false})); diff --git a/jstests/fts_explain.js b/jstests/fts_explain.js deleted file mode 100644 index 0d9c1fd7a9d..00000000000 --- a/jstests/fts_explain.js +++ /dev/null @@ -1,18 +0,0 @@ -// Test $text explain. SERVER-12037. - -var coll = db.fts_explain; - -coll.drop(); -coll.ensureIndex({content: "text"}, {default_language: "none"}); -assert.gleSuccess(db); - -coll.insert({content: "some data"}); -assert.gleSuccess(db); - -var explain = coll.find({$text:{$search: "\"a\" -b -\"c\""}}).explain(true); -assert.eq(explain.cursor, "TextCursor"); -assert.eq(explain.stats.type, "TEXT"); -assert.eq(explain.stats.parsedTextQuery.terms, ["a"]); -assert.eq(explain.stats.parsedTextQuery.negatedTerms, ["b"]); -assert.eq(explain.stats.parsedTextQuery.phrases, ["a"]); -assert.eq(explain.stats.parsedTextQuery.negatedPhrases, ["c"]); diff --git a/jstests/fts_index.js b/jstests/fts_index.js deleted file mode 100644 index 480bfb05fa2..00000000000 --- a/jstests/fts_index.js +++ /dev/null @@ -1,138 +0,0 @@ -// Test that: -// 1. Text indexes properly validate the index spec used to create them. -// 2. Text indexes properly enforce a schema on the language_override field. -// 3. Collections may have at most one text index. -// 4. Text indexes properly handle large documents. - -var coll = db.fts_index; -var indexName = "textIndex"; -coll.drop(); -coll.getDB().createCollection(coll.getName()); - -// -// 1. Text indexes properly validate the index spec used to create them. -// - -// Spec passes text-specific index validation. -coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanish"}); -assert(!db.getLastError()); -assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName})); -coll.dropIndexes(); - -// Spec fails text-specific index validation ("spanglish" unrecognized). -coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanglish"}); -assert(db.getLastError()); -assert.eq(0, coll.system.indexes.count({ns: coll.getFullName(), name: indexName})); -coll.dropIndexes(); - -// Spec passes general index validation. -coll.ensureIndex({"$**": "text"}, {name: indexName}); -assert(!db.getLastError()); -assert.eq(1, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName})); -coll.dropIndexes(); - -// Spec fails general index validation ("a.$**" invalid field name for key). -coll.ensureIndex({"a.$**": "text"}, {name: indexName}); -assert(db.getLastError()); -assert.eq(0, coll.getDB().system.indexes.count({ns: coll.getFullName(), name: indexName})); -coll.dropIndexes(); - -// -// 2. Text indexes properly enforce a schema on the language_override field. -// - -// Can create a text index on a collection where no documents have invalid language_override. -coll.insert({a: ""}); -coll.insert({a: "", language: "spanish"}); -coll.ensureIndex({a: "text"}); -assert(!db.getLastError()); -coll.drop(); - -// Can't create a text index on a collection containing document with an invalid language_override. -coll.insert({a: "", language: "spanglish"}); -coll.ensureIndex({a: "text"}); -assert(db.getLastError()); -coll.drop(); - -// Can insert documents with valid language_override into text-indexed collection. -coll.ensureIndex({a: "text"}); -assert(!db.getLastError()); -coll.insert({a: ""}); -coll.insert({a: "", language: "spanish"}); -assert(!db.getLastError()); -coll.drop(); - -// Can't insert documents with invalid language_override into text-indexed collection. -coll.ensureIndex({a: "text"}); -assert(!db.getLastError()); -coll.insert({a: "", language: "spanglish"}); -assert(db.getLastError()); -coll.drop(); - -// -// 3. Collections may have at most one text index. -// - -coll.ensureIndex({a: 1, b: "text", c: 1}); -assert(!db.getLastError()); -assert.eq(2, coll.getIndexes().length); - -// ensureIndex() becomes a no-op on an equivalent index spec. -coll.ensureIndex({a: 1, b: "text", c: 1}); -assert(!db.getLastError()); -assert.eq(2, coll.getIndexes().length); -coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {b: 1}}); -assert(!db.getLastError()); -assert.eq(2, coll.getIndexes().length); -coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "english"}); -assert(!db.getLastError()); -assert.eq(2, coll.getIndexes().length); -coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 2}); -assert(!db.getLastError()); -assert.eq(2, coll.getIndexes().length); -coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "language"}); -assert(!db.getLastError()); -assert.eq(2, coll.getIndexes().length); - -// ensureIndex() fails if a second text index would be built. -coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {d: 1}}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, b: "text", c: 1}, {default_language: "none"}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, b: "text", c: 1}, {textIndexVersion: 1}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, b: "text", c: 1}, {language_override: "idioma"}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, b: "text", c: 1}, {weights: {d: 1}}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, b: "text", d: 1}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, d: "text", c: 1}); -assert(db.getLastError()); -coll.ensureIndex({b: "text"}); -assert(db.getLastError()); -coll.ensureIndex({b: "text", c: 1}); -assert(db.getLastError()); -coll.ensureIndex({a: 1, b: "text"}); -assert(db.getLastError()); - -coll.dropIndexes(); - -// -// 4. Text indexes properly handle large keys. -// - -coll.ensureIndex({a: "text"}); -assert(!db.getLastError()); - -var longstring = ""; -var longstring2 = ""; -for(var i = 0; i < 1024 * 1024; ++i) { - longstring = longstring + "a"; - longstring2 = longstring2 + "b"; -} -coll.insert({a: longstring}); -coll.insert({a: longstring2}); -assert.eq(1, coll.find({$text: {$search: longstring}}).itcount(), "long string not found in index"); - -coll.drop(); diff --git a/jstests/fts_mix.js b/jstests/fts_mix.js deleted file mode 100644 index 56da123cdc3..00000000000 --- a/jstests/fts_mix.js +++ /dev/null @@ -1,159 +0,0 @@ - -load( "jstests/libs/fts.js" ); - -// test collection -tc = db.text_mix; -tc.drop(); - -// creation of collection documents -// content generated using wikipedia random article -tc.save( { _id: 1, title: "Olivia Shakespear",text: "Olivia Shakespear (born Olivia Tucker; 17 March 1863 – 3 October 1938) was a British novelist, playwright, and patron of the arts. She wrote six books that are described as \"marriage problem\" novels. Her works sold poorly, sometimes only a few hundred copies. Her last novel, Uncle Hilary, is considered her best. She wrote two plays in collaboration with Florence Farr." } ); -tc.save( { _id: 2, title: "Mahim Bora", text: "Mahim Bora (born 1926) is an Indian writer and educationist from Assam state. He was born at a tea estate of Sonitpur district. He is an M.A. in Assamese literature from Gauhati University and had been a teacher in the Nowgong College for most of his teaching career. He has now retired and lives at Nagaon. Bora spent a good part of his childhood in the culture-rich surroundings of rural Nagaon, where the river Kalong was the life-blood of a community. His impressionable mind was to capture a myriad memories of that childhood, later to find expression in his poems, short stories and novels with humour, irony and pathos woven into their texture. When this river was dammed up, its disturbing effect was on the entire community dependant on nature's bounty." } ); -tc.save( { _id: 3, title: "A break away!", text: "A break away! is an 1891 painting by Australian artist Tom Roberts. The painting depicts a mob of thirsty sheep stampeding towards a dam. A drover on horseback is attempting to turn the mob before they drown or crush each other in their desire to drink. The painting, an \"icon of Australian art\", is part of a series of works by Roberts that \"captures what was an emerging spirit of national identity.\" Roberts painted the work at Corowa. The painting depicts a time of drought, with little grass and the soil kicked up as dust. The work itself is a reflection on the pioneering days of the pastoral industry, which were coming to an end by the 1890s." } ); -tc.save( { _id: 4, title: "Linn-Kristin Riegelhuth Koren", text: "Linn-Kristin Riegelhuth Koren (born 1 August 1984, in Ski) is a Norwegian handballer playing for Larvik HK and the Norwegian national team. She is commonly known as Linka. Outside handball she is a qualified nurse." } ); -tc.save( { _id: 5, title: "Morten Jensen", text: "Morten Jensen (born December 2, 1982 in Lynge) is a Danish athlete. He primarily participates in long jump, 100 metres and 200 metres. He competed at the World Championships in 2005 and 2007, the 2006 World Indoor Championships, the 2006 European Championships, the 2007 World Championships and the 2008 Olympic Games without qualifying for the final round. He was runner-up in the 2010 Finnish Elite Games rankings, just missing out to Levern Spencer for that year's jackpot. He holds the Danish record in both long jump and 100 metres. He also holds the Danish indoor record in the 200 metres. He has been a part of the Sparta teamsine 2005, before then he was a part of FIF Hillerd. His coach was Leif Dahlberg after the 2010 European Championships he change to Lars Nielsen and Anders Miller." } ); -tc.save( { _id: 6, title: "Janet Laurence", text: "Janet Laurence (born 1947) is a Sydney based Australian artist who works in mixed media and installation. Her work has been included in major survey exhibitions, nationally and internationally and is regularly exhibited in Sydney, Melbourne and Japan. Her work explores a relationship to the natural world, often from an architectural context. It extends from the gallery space into the urban fabric, and has been realized in many site specific projects, often involving collaborations with architects, landscape architects and environmental scientists. She has received many grants and awards including a Rockefeller Residency in 1997. Laurence was a Trustee of the Art Gallery of NSW from 1995 to 2005. Laurence was the subject of John Beard's winning entry for the 2007 Archibald Prize." } ); -tc.save( { _id: 7, title: "Glen-Coats Baronets", text: "The Glen-Coats Baronetcy, of Ferguslie Park in the Parish of Abbey in the County of Renfrew, was a title in the Baronetage of the United Kingdom. It was created on 25 June 1894 for Thomas Glen-Coats, Director of the thread-making firm of J. & P. Coats, Ltd, and later Liberal Member of Parliament for Renfrewshire West. Born Thomas Coats, he assumed the additional surname of Glen, which was that of his maternal grandfather. He was succeeded by his son, the second Baronet. He won a gold medal in sailing at the 1908 Summer Olympics. The title became extinct on his death in 1954. Two other members of the Coats family also gained distinction. George Coats, 1st Baron Glentanar, was the younger brother of the first Baronet, while Sir James Coats, 1st Baronet (see Coats Baronets), was the first cousin of the first Baronet." } ); -tc.save( { _id: 8, title: "Grapeleaf Skeletonizer", text: "The Grapeleaf Skeletonizer, Harrisina americana is a moth in the family Zygaenidae. It is widespread in the eastern half of the United States, and commonly noticed defoliating grapes, especially of the Virginia creeper (Parthenocissus quinquefolia). The western grapeleaf skeletonizer, Harrisina brillians is very similar to and slightly larger than H. americana, but their distributions are different. Members of this family all produce hydrogen cyanide, a potent antipredator toxin." } ); -tc.save( { _id: 9, title: "Physics World", text: "Physics World is the membership magazine of the Institute of Physics, one of the largest physical societies in the world. It is an international monthly magazine covering all areas of physics, both pure and applied, and is aimed at physicists in research, industry and education worldwide. It was launched in 1988 by IOP Publishing Ltd and has established itself as one of the world's leading physics magazines. The magazine is sent free to members of the Institute of Physics, who can also access a digital edition of the magazine, although selected articles can be read by anyone for free online. It was redesigned in September 2005 and has an audited circulation of just under 35000. The current editor is Matin Durrani. Also on the team are Dens Milne (associate editor), Michael Banks (news editor), Louise Mayor (features editor) and Margaret Harris (reviews and careers editor). Hamish Johnston is the editor of the magazine's website physicsworld.com and James Dacey is its reporter." } ); -tc.save( { _id: 10, title: "Mallacoota, Victoria", text: "Mallacoota is a small town in the East Gippsland region of Victoria, Australia. At the 2006 census, Mallacoota had a population of 972. At holiday times, particularly Easter and Christmas, the population increases by about 8,000. It is one of the most isolated towns in the state of Victoria, 25 kilometres off the Princes Highway and 523 kilometres (325 mi) from Melbourne. It is 526 kilometres (327 mi) from Sydney, New South Wales. It is halfway between Melbourne and Sydney when travelling via Princes Highway, though that is a long route between Australia's two main cities. It is the last official township on Victoria's east coast before the border with New South Wales. Mallacoota has a regional airport (Mallacoota Airport) YMCO (XMC) consisting of a grassed field for private light planes. It is known for its wild flowers, abalone industry, the inlet estuary consisting of Top Lake and Bottom Lake, and Croajingolong National Park that surround it. It is a popular and beautiful holiday spot for boating, fishing, walking the wilderness coast, swimming, birdwatching, and surfing. The Mallacoota Arts Council runs events throughout each year. Mallacoota Inlet is one of the main villages along the wilderness coast walk from NSW to Victoria, Australia." } ); - -// begin tests - -// -------------------------------------------- INDEXING & WEIGHTING ------------------------------- - -// start with basic index, one item with default weight -tc.ensureIndex( { "title": "text" } ); - -// test the single result case.. -res = tc.runCommand( "text", { search: "Victoria" } ); -assert.eq( 1, res.results.length ); -assert.eq( 10, res.results[0].obj._id ); - -tc.dropIndexes(); - -// now let's see about multiple fields, with specific weighting -tc.ensureIndex( { "title": "text", "text": "text" }, { weights: { "title": 10 } } ); -assert.eq( [9,7,8], queryIDS( tc, "members physics" ) ); - -tc.dropIndexes(); - -// test all-1 weighting with "$**" -tc.ensureIndex( { "$**": "text" } ); -assert.eq( [2,8,7], queryIDS( tc, "family tea estate" ) ); - -tc.dropIndexes(); - -// non-1 weight on "$**" + other weight specified for some field -tc.ensureIndex( { "$**": "text" }, { weights: { "$**": 10, "text": 2 } } ); -assert.eq( [7,5], queryIDS( tc, "Olympic Games gold medal" ) ); - -tc.dropIndexes(); - -// -------------------------------------------- SEARCHING ------------------------------------------ - -// go back to "$**": 1, "title": 10.. and test more specific search functionality! -tc.ensureIndex( { "$**": "text" }, { weights: { "title": 10 } } ); - -// -------------------------------------------- STEMMING ------------------------------------------- - -// tests stemming for basic plural case -res = tc.runCommand( "text", { search: "member" } ); -res2 = tc.runCommand( "text", { search: "members" } ); -assert.eq( getIDS( res ), getIDS( res2 ) ); - -// search for something with potential 's bug. -res = tc.runCommand( "text", { search: "magazine's" } ); -res2 = tc.runCommand( "text", { search: "magazine" } ); -assert.eq( getIDS( res ), getIDS( res2 ) ); - -// -------------------------------------------- LANGUAGE ------------------------------------------- - -res = tc.runCommand( "text", { search: "member", language: "spanglish" } ); -assert.commandFailed( res ); -res = tc.runCommand( "text", { search: "member", language: "english" } ); -assert.commandWorked( res ); - -// -------------------------------------------- LIMIT RESULTS -------------------------------------- - -// ensure limit limits results -assert.eq( [2], queryIDS( tc, "rural river dam", null , { limit : 1 } ) ); - -// ensure top results are the same regardless of limit -// make sure that this uses a case where it wouldn't be otherwise.. -res = tc.runCommand( "text", { search: "united kingdom british princes", limit: 1 } ); -res2 = tc.runCommand( "text", { search: "united kingdom british princes" } ); -assert.eq( 1, res.results.length ); -assert.eq( 4, res2.results.length ); -assert.eq( res.results[0].obj._id, res2.results[0].obj._id ); - -// -------------------------------------------- PROJECTION ----------------------------------------- - -// test projection.. show just title and id -res = tc.runCommand( "text", { search: "Morten Jensen", project: { title: 1 } } ); -assert.eq( 1, res.results.length ); -assert.eq( 5, res.results[0].obj._id ); -assert.eq( null, res.results[0].obj.text ); -assert.neq( null, res.results[0].obj.title ); -assert.neq( null, res.results[0].obj._id ); - -// test negative projection, ie. show everything but text -res = tc.runCommand( "text", { search: "handball", project: { text: 0 } } ); -assert.eq( 1, res.results.length ); -assert.eq( 4, res.results[0].obj._id ); -assert.eq( null, res.results[0].obj.text ); -assert.neq( null, res.results[0].obj.title ); -assert.neq( null, res.results[0].obj._id ); - -// test projection only title, no id -res = tc.runCommand( "text", { search: "Mahim Bora", project: { _id: 0, title: 1 } } ); -assert.eq( 1, res.results.length ); -assert.eq( "Mahim Bora", res.results[0].obj.title ); -assert.eq( null, res.results[0].obj.text ); -assert.neq( null, res.results[0].obj.title ); -assert.eq( null, res.results[0].obj._id ); - -// -------------------------------------------- NEGATION ------------------------------------------- - -// test negation -assert.eq( [8], queryIDS( tc, "United -Kingdom" ) ); -assert.eq( -1, tc.findOne( { _id : 8 } ).text.search(/Kingdom/i) ); - -// test negation edge cases... hyphens, double dash, etc. -assert.eq( [4], queryIDS( tc, "Linn-Kristin" ) ); - -// -------------------------------------------- PHRASE MATCHING ------------------------------------ - -// test exact phrase matching on -assert.eq( [7], queryIDS( tc, "\"Summer Olympics\"" ) ); -assert.neq( -1, tc.findOne( { _id: 7 } ).text.indexOf("Summer Olympics") ); - -// phrasematch with other stuff.. negation, other terms, etc. -assert.eq( [10], queryIDS( tc, "\"wild flowers\" Sydney" ) ); - -assert.eq( [3], queryIDS( tc, "\"industry\" -Melbourne -Physics" ) ); - -// -------------------------------------------- EDGE CASES ----------------------------------------- - -// test empty string -res = tc.runCommand( "text", { search: "" } ); -assert.eq( 0, res.ok ) - -// test string with a space in it -res = tc.runCommand( "text", { search: " " } ); -assert.eq( 0, res.results.length ); - -// -------------------------------------------- FILTERING ------------------------------------------ - -assert.eq( [2], queryIDS( tc, "Mahim" ) ); -assert.eq( [2], queryIDS( tc, "Mahim", { _id: 2 } ) ); -assert.eq( [], queryIDS( tc, "Mahim", { _id: 1 } ) ); -assert.eq( [], queryIDS( tc, "Mahim", { _id: { $gte: 4 } } ) ); -assert.eq( [2], queryIDS( tc, "Mahim", { _id: { $lte: 4 } } ) ); - -// using regex conditional filtering -assert.eq( [9], queryIDS( tc, "members", { title: { $regex: /Phy.*/i } } ) ); - -// ------------------------------------------------------------------------------------------------- - -assert( tc.validate().valid ); diff --git a/jstests/fts_partition1.js b/jstests/fts_partition1.js deleted file mode 100644 index f1b4c437c3c..00000000000 --- a/jstests/fts_partition1.js +++ /dev/null @@ -1,23 +0,0 @@ -load( "jstests/libs/fts.js" ) - -t = db.text_parition1; -t.drop(); - -t.insert( { _id : 1 , x : 1 , y : "foo" } ); -t.insert( { _id : 2 , x : 1 , y : "bar" } ); -t.insert( { _id : 3 , x : 2 , y : "foo" } ); -t.insert( { _id : 4 , x : 2 , y : "bar" } ); - -t.ensureIndex( { x : 1, y : "text" } ); - -res = t.runCommand( "text", { search : "foo" } ); -assert.eq( 0, res.ok, tojson(res) ); - -assert.eq( [ 1 ], queryIDS( t, "foo" , { x : 1 } ) ); - -res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } } ); -assert( res.results[0].score > 0, tojson( res ) ) - -// repeat search with "language" specified, SERVER-8999 -res = t.runCommand( "text", { search : "foo" , filter : { x : 1 } , language : "english" } ); -assert( res.results[0].score > 0, tojson( res ) ) diff --git a/jstests/fts_partition_no_multikey.js b/jstests/fts_partition_no_multikey.js deleted file mode 100644 index a6320cc4a9e..00000000000 --- a/jstests/fts_partition_no_multikey.js +++ /dev/null @@ -1,17 +0,0 @@ - -t = db.fts_partition_no_multikey; -t.drop(); - -t.ensureIndex( { x : 1, y : "text" } ) - -t.insert( { x : 5 , y : "this is fun" } ); -assert.isnull( db.getLastError() ); - -t.insert( { x : [] , y : "this is fun" } ); -assert( db.getLastError() ); - -t.insert( { x : [1] , y : "this is fun" } ); -assert( db.getLastError() ); - -t.insert( { x : [1,2] , y : "this is fun" } ); -assert( db.getLastError() ); diff --git a/jstests/fts_phrase.js b/jstests/fts_phrase.js deleted file mode 100644 index 0b58bef817e..00000000000 --- a/jstests/fts_phrase.js +++ /dev/null @@ -1,25 +0,0 @@ - -t = db.text_phrase; -t.drop() - -t.save( { _id : 1 , title : "my blog post" , text : "i am writing a blog. yay" } ); -t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am typing. yay" } ); -t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } ); - -t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } ); - -res = t.runCommand( "text" , { search : "blog write" } ); -assert.eq( 3, res.results.length ); -assert.eq( 1, res.results[0].obj._id ); -assert( res.results[0].score > (res.results[1].score*2), tojson(res) ); - -res = t.runCommand( "text" , { search : "write blog" } ); -assert.eq( 3, res.results.length ); -assert.eq( 1, res.results[0].obj._id ); -assert( res.results[0].score > (res.results[1].score*2), tojson(res) ); - - - - - - diff --git a/jstests/fts_proj.js b/jstests/fts_proj.js deleted file mode 100644 index 1ecc6688d1b..00000000000 --- a/jstests/fts_proj.js +++ /dev/null @@ -1,20 +0,0 @@ -t = db.text_proj; -t.drop(); - -t.save( { _id : 1 , x : "a", y: "b", z : "c"}); -t.save( { _id : 2 , x : "d", y: "e", z : "f"}); -t.save( { _id : 3 , x : "a", y: "g", z : "h"}); - -t.ensureIndex( { x : "text"} , { default_language : "none" } ); - -res = t.runCommand("text", {search : "a"}); -assert.eq( 2, res.results.length ); -assert( res.results[0].obj.y, tojson(res) ); - -res = t.runCommand("text", {search : "a", project: {x: 1}}); -assert.eq( 2, res.results.length ); -assert( !res.results[0].obj.y, tojson(res) ); - - - - diff --git a/jstests/fts_projection.js b/jstests/fts_projection.js deleted file mode 100644 index 9bdb9dbca8a..00000000000 --- a/jstests/fts_projection.js +++ /dev/null @@ -1,99 +0,0 @@ -// Test $text with $textScore projection. - -var t = db.getSiblingDB("test").getCollection("fts_projection"); -t.drop(); - -db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true}); - -t.insert({_id: 0, a: "textual content"}); -t.insert({_id: 1, a: "additional content", b: -1}); -t.insert({_id: 2, a: "irrelevant content"}); -t.ensureIndex({a:"text"}); - -// Project the text score. -var results = t.find({$text: {$search: "textual content -irrelevant"}}, {_idCopy:0, score:{$meta: "textScore"}}).toArray(); -// printjson(results); -// Scores should exist. -assert.eq(results.length, 2); -assert(results[0].score); -assert(results[1].score); - -// indexed by _id. -var scores = [0, 0, 0]; -scores[results[0]._id] = results[0].score; -scores[results[1]._id] = results[1].score; - -// -// Edge/error cases: -// - -// Project text score into 2 fields. -results = t.find({$text: {$search: "textual content -irrelevant"}}, {otherScore: {$meta: "textScore"}, score:{$meta: "textScore"}}).toArray(); -assert.eq(2, results.length); -for (var i = 0; i < results.length; ++i) { - assert.close(scores[results[i]._id], results[i].score); - assert.close(scores[results[i]._id], results[i].otherScore); -} - -// printjson(results); - -// Project text score into "x.$" shouldn't crash -assert.throws(function() { t.find({$text: {$search: "textual content -irrelevant"}}, {'x.$': {$meta: "textScore"}}).toArray(); }); - -// TODO: We can't project 'x.y':1 and 'x':1 (yet). - -// Clobber an existing field and behave nicely. -results = t.find({$text: {$search: "textual content -irrelevant"}}, - {b: {$meta: "textScore"}}).toArray(); -assert.eq(2, results.length); -for (var i = 0; i < results.length; ++i) { - assert.close(scores[results[i]._id], results[i].b, - i + ': existing field in ' + tojson(results[i], '', true) + - ' is not clobbered with score'); -} - -assert.neq(-1, results[0].b); - -// Don't crash if we have no text score. -var results = t.find({a: /text/}, {score: {$meta: "textScore"}}).toArray(); -// printjson(results); - -// No textScore proj. with nested fields -assert.throws(function() { t.find({$text: {$search: "blah"}}, {'x.y':{$meta: "textScore"}}).toArray(); }); - -// SERVER-12173 -// When $text operator is in $or, should evaluate first -results = t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {_id: 1}]}, - {_idCopy:0, score:{$meta: "textScore"}}).toArray(); -printjson(results); -assert.eq(2, results.length); -for (var i = 0; i < results.length; ++i) { - assert.close(scores[results[i]._id], results[i].score, - i + ': TEXT under OR invalid score: ' + tojson(results[i], '', true)); -} - -// SERVER-12592 -// When $text operator is in $or, all non-$text children must be indexed. Otherwise, we should produce -// a readable error. -var errorMessage = ''; -assert.throws( function() { - try { - t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {b: 1}]}).itcount(); - } - catch (e) { - errorMessage = e; - throw e; - } -}, null, 'Expected error from failed TEXT under OR planning'); -assert.neq(-1, errorMessage.indexOf('TEXT'), - 'message from failed text planning does not mention TEXT: ' + errorMessage); -assert.neq(-1, errorMessage.indexOf('OR'), - 'message from failed text planning does not mention OR: ' + errorMessage); - -// Scores should exist. -assert.eq(results.length, 2); -assert(results[0].score, - "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or"); -assert(results[1].score, - "invalid text score for " + tojson(results[0], '', true) + " when $text is in $or"); - diff --git a/jstests/fts_querylang.js b/jstests/fts_querylang.js deleted file mode 100644 index 2a139f5b766..00000000000 --- a/jstests/fts_querylang.js +++ /dev/null @@ -1,93 +0,0 @@ -// Test $text query operator. - -var t = db.getSiblingDB("test").getCollection("fts_querylang"); -var cursor; -var results; - -db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true}); - -t.drop(); - -t.insert({_id: 0, unindexedField: 0, a: "textual content"}); -t.insert({_id: 1, unindexedField: 1, a: "additional content"}); -t.insert({_id: 2, unindexedField: 2, a: "irrelevant content"}); -t.ensureIndex({a: "text"}); - -// Test text query with no results. -assert.eq(false, t.find({$text: {$search: "words"}}).hasNext()); - -// Test basic text query. -results = t.find({$text: {$search: "textual content -irrelevant"}}).toArray(); -assert.eq(results.length, 2); -assert.neq(results[0]._id, 2); -assert.neq(results[1]._id, 2); - -// Test sort with basic text query. -results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).toArray(); -assert.eq(results.length, 2); -assert.eq(results[0]._id, 0); -assert.eq(results[1]._id, 1); - -// Test skip with basic text query. -results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).skip(1).toArray(); -assert.eq(results.length, 1); -assert.eq(results[0]._id, 1); - -// Test limit with basic text query. -results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).limit(1).toArray(); -assert.eq(results.length, 1); -assert.eq(results[0]._id, 0); - -// TODO Test basic text query with sort, once sort is enabled in the new query framework. - -// TODO Test basic text query with projection, once projection is enabled in the new query -// framework. - -// Test $and of basic text query with indexed expression. -results = t.find({$text: {$search: "content -irrelevant"}, - _id: 1}).toArray(); -assert.eq(results.length, 1); -assert.eq(results[0]._id, 1); - -// Test $and of basic text query with unindexed expression. -results = t.find({$text: {$search: "content -irrelevant"}, - unindexedField: 1}).toArray(); -assert.eq(results.length, 1); -assert.eq(results[0]._id, 1); - -// TODO Test invalid inputs for $text, $search, $language. - -// Test $language. -cursor = t.find({$text: {$search: "contents", $language: "none"}}); -assert.eq(false, cursor.hasNext()); - -cursor = t.find({$text: {$search: "contents", $language: "EN"}}); -assert.eq(true, cursor.hasNext()); - -cursor = t.find({$text: {$search: "contents", $language: "spanglish"}}); -assert.throws(function() { cursor.next() }); - -// TODO Test $and of basic text query with geo expression. - -// Test update with $text. -t.update({$text: {$search: "textual content -irrelevant"}}, {$set: {b: 1}}, {multi: true}); -assert.eq(2, t.find({b: 1}).itcount(), - 'incorrect number of documents updated'); - -// TODO Test remove with $text, once it is enabled with the new query framework. - -// TODO Test count with $text, once it is enabled with the new query framework. - -// TODO Test findAndModify with $text, once it is enabled with the new query framework. - -// TODO Test aggregate with $text, once it is enabled with the new query framework. - -// TODO Test that old query framework rejects $text queries. - -// TODO Test that $text fails without a text index. - -// TODO Test that $text accepts a hint of the text index. - -// TODO Test that $text fails if a different index is hinted. - -// TODO Test $text with {$natural:1} sort, {$natural:1} hint. diff --git a/jstests/fts_score_sort.js b/jstests/fts_score_sort.js deleted file mode 100644 index 59fb852a774..00000000000 --- a/jstests/fts_score_sort.js +++ /dev/null @@ -1,28 +0,0 @@ -// Test sorting with text score metadata. - -var t = db.getSiblingDB("test").getCollection("fts_score_sort"); -t.drop(); - -db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true}); - -t.insert({_id: 0, a: "textual content"}); -t.insert({_id: 1, a: "additional content"}); -t.insert({_id: 2, a: "irrelevant content"}); -t.ensureIndex({a:"text"}); - -// Sort by the text score. -var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({score: {$meta: "textScore"}}).toArray(); -// printjson(results); -assert.eq(results.length, 2); -assert.eq(results[0]._id, 0); -assert.eq(results[1]._id, 1); -assert(results[0].score > results[1].score); - -// Sort by {_id descending, score} and verify the order is right. -var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({_id: -1, score: {$meta: "textScore"}}).toArray(); -printjson(results); -assert.eq(results.length, 2); -assert.eq(results[0]._id, 1); -assert.eq(results[1]._id, 0); -// Note the reversal from above. -assert(results[0].score < results[1].score); diff --git a/jstests/fts_spanish.js b/jstests/fts_spanish.js deleted file mode 100644 index cdf73343b5f..00000000000 --- a/jstests/fts_spanish.js +++ /dev/null @@ -1,31 +0,0 @@ - -load( "jstests/libs/fts.js" ); - -t = db.text_spanish; -t.drop(); - -t.save( { _id: 1, title: "mi blog", text: "Este es un blog de prueba" } ); -t.save( { _id: 2, title: "mi segundo post", text: "Este es un blog de prueba" } ); -t.save( { _id: 3, title: "cuchillos son divertidos", text: "este es mi tercer blog stemmed" } ); -t.save( { _id: 4, language: "en", title: "My fourth blog", text: "This stemmed blog is in english" } ); - -// default weight is 1 -// specify weights if you want a field to be more meaningull -t.ensureIndex( { "title": "text", text: "text" }, { weights: { title: 10 }, - default_language: "es" } ); - -res = t.runCommand( "text", { search: "blog" } ); -assert.eq( 4, res.results.length ); - -assert.eq( [4], queryIDS( t, "stem" ) ); -assert.eq( [3], queryIDS( t, "stemmed" ) ); -assert.eq( [4], queryIDS( t, "stemmed", null, { language : "en" } ) ); - -assert.eq( [1,2], queryIDS( t, "prueba" ) ); - -t.save( { _id: 5, language: "spanglish", title: "", text: "" } ); -assert( db.getLastError() ); - -t.dropIndexes(); -t.ensureIndex( { "title": "text", text: "text" }, { default_language: "spanglish" } ); -assert( db.getLastError() ); diff --git a/jstests/geo1.js b/jstests/geo1.js deleted file mode 100644 index 338d96eb23c..00000000000 --- a/jstests/geo1.js +++ /dev/null @@ -1,41 +0,0 @@ - -t = db.geo1 -t.drop(); - -idx = { loc : "2d" , zip : 1 } - -t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } ) -t.insert( { zip : "10024" , loc : [ 40.786387 , 73.97709 ] } ) -t.insert( { zip : "94061" , loc : [ 37.463911 , 122.23396 ] } ) -assert.isnull( db.getLastError() ) - -// test "2d" has to be first -assert.eq( 1 , t.getIndexKeys().length , "S1" ); -t.ensureIndex( { zip : 1 , loc : "2d" } ); -assert.eq( 1 , t.getIndexKeys().length , "S2" ); - -t.ensureIndex( idx ); -assert.eq( 2 , t.getIndexKeys().length , "S3" ); - -assert.eq( 3 , t.count() , "B1" ); -t.insert( { loc : [ 200 , 200 ] } ) -assert( db.getLastError() , "B2" ) -assert.eq( 3 , t.count() , "B3" ); - -// test normal access - -wb = t.findOne( { zip : "06525" } ) -assert( wb , "C1" ); - -assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" ) -assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" ) -// assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" ) - -// test config options - -t.drop(); - -t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } ); -t.insert( { loc : [ 200 , 200 ] } ) -assert.isnull( db.getLastError() , "D1" ) - diff --git a/jstests/geo10.js b/jstests/geo10.js deleted file mode 100644 index 39da09fb9ef..00000000000 --- a/jstests/geo10.js +++ /dev/null @@ -1,21 +0,0 @@ -// Test for SERVER-2746 - -coll = db.geo10 -coll.drop(); - -db.geo10.ensureIndex( { c : '2d', t : 1 }, { min : 0, max : Math.pow( 2, 40 ) } ) -assert( db.getLastError() == null, "B" ) -assert( db.system.indexes.count({ ns : "test.geo10" }) == 2, "A3" ) - -printjson( db.system.indexes.find().toArray() ) - -db.geo10.insert( { c : [ 1, 1 ], t : 1 } ) -assert.eq( db.getLastError(), null, "C" ) - -db.geo10.insert( { c : [ 3600, 3600 ], t : 1 } ) -assert( db.getLastError() == null, "D" ) - -db.geo10.insert( { c : [ 0.001, 0.001 ], t : 1 } ) -assert( db.getLastError() == null, "E" ) - -printjson( db.geo10.find({ c : { $within : { $box : [[0.001, 0.001], [Math.pow(2, 40) - 0.001, Math.pow(2, 40) - 0.001]] } }, t : 1 }).toArray() ) diff --git a/jstests/geo2.js b/jstests/geo2.js deleted file mode 100644 index f9632ebd16d..00000000000 --- a/jstests/geo2.js +++ /dev/null @@ -1,40 +0,0 @@ - -t = db.geo2 -t.drop(); - -n = 1 -for ( var x=-100; x<100; x+=2 ){ - for ( var y=-100; y<100; y+=2 ){ - t.insert( { _id : n++ , loc : [ x , y ] } ) - } -} - -t.ensureIndex( { loc : "2d" } ) - -fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } ); - -function a( cur ){ - var total = 0; - var outof = 0; - while ( cur.hasNext() ){ - var o = cur.next(); - total += Geo.distance( [ 50 , 50 ] , o.loc ); - outof++; - } - return total/outof; -} - -assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B1" ) -assert.close( 1.33333 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(3) ) , "B2" ); -assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B3" ); - -printjson( t.find( { loc : { $near : [ 50 , 50 ] } } ).explain() ) - - -assert.lt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(50) ) , "C1" ) -assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 , 3 ] } } ).limit(50) ) , "C2" ) -assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] , $maxDistance : 3 } } ).limit(50) ) , "C3" ) - -// SERVER-8974 - test if $geoNear operator works with 2d index as well -var geoNear_cursor = t.find( { loc : { $geoNear : [50, 50] } } ); -assert.eq( geoNear_cursor.count(), 100 ) diff --git a/jstests/geo3.js b/jstests/geo3.js deleted file mode 100644 index 47637783f5b..00000000000 --- a/jstests/geo3.js +++ /dev/null @@ -1,77 +0,0 @@ - -t = db.geo3 -t.drop(); - -n = 1 -for ( var x=-100; x<100; x+=2 ){ - for ( var y=-100; y<100; y+=2 ){ - t.insert( { _id : n++ , loc : [ x , y ] , a : Math.abs( x ) % 5 , b : Math.abs( y ) % 5 } ) - } -} - - -t.ensureIndex( { loc : "2d" } ) - -fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } ); - -// test filter - -filtered1 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } ); -assert.eq( 10 , filtered1.results.length , "B1" ); -filtered1.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B2: " + tojson( z ) ); } ) -//printjson( filtered1.stats ); - -function avgA( q , len ){ - if ( ! len ) - len = 10; - var realq = { loc : { $near : [ 50 , 50 ] } }; - if ( q ) - Object.extend( realq , q ); - var as = - t.find( realq ).limit(len).map( - function(z){ - return z.a; - } - ); - assert.eq( len , as.length , "length in avgA" ); - return Array.avg( as ); -} - -function testFiltering( msg ){ - assert.gt( 2 , avgA( {} ) , msg + " testFiltering 1 " ); - assert.eq( 2 , avgA( { a : 2 } ) , msg + " testFiltering 2 " ); - assert.eq( 4 , avgA( { a : 4 } ) , msg + " testFiltering 3 " ); -} - -testFiltering( "just loc" ); - -t.dropIndex( { loc : "2d" } ) -assert.eq( 1 , t.getIndexKeys().length , "setup 3a" ) -t.ensureIndex( { loc : "2d" , a : 1 } ) -assert.eq( 2 , t.getIndexKeys().length , "setup 3b" ) - -filtered2 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } ); -assert.eq( 10 , filtered2.results.length , "B3" ); -filtered2.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B4: " + tojson( z ) ); } ) - -assert.eq( filtered1.stats.avgDistance , filtered2.stats.avgDistance , "C1" ) -assert.eq( filtered1.stats.nscanned , filtered2.stats.nscanned , "C3" ) -assert.gt( filtered1.stats.objectsLoaded , filtered2.stats.objectsLoaded , "C3" ) - -testFiltering( "loc and a" ); - -t.dropIndex( { loc : "2d" , a : 1 } ) -assert.eq( 1 , t.getIndexKeys().length , "setup 4a" ) -t.ensureIndex( { loc : "2d" , b : 1 } ) -assert.eq( 2 , t.getIndexKeys().length , "setup 4b" ) - -testFiltering( "loc and b" ); - - -q = { loc : { $near : [ 50 , 50 ] } } -assert.eq( 100 , t.find( q ).limit(100).itcount() , "D1" ) -assert.eq( 100 , t.find( q ).limit(100).count() , "D2" ) - -assert.eq( 20 , t.find( q ).limit(20).itcount() , "D3" ) -assert.eq( 20 , t.find( q ).limit(20).size() , "D4" ) - diff --git a/jstests/geo4.js b/jstests/geo4.js deleted file mode 100644 index 78404ab720c..00000000000 --- a/jstests/geo4.js +++ /dev/null @@ -1,10 +0,0 @@ -var t = db.geo4; -t.drop(); - -t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } ); - -t.ensureIndex( { loc : "2d" }, { bits : 33 } ); -assert.eq( db.getLastError() , "bits in geo index must be between 1 and 32" , "a" ); - -t.ensureIndex( { loc : "2d" }, { bits : 32 } ); -assert( !db.getLastError(), "b" ); diff --git a/jstests/geo5.js b/jstests/geo5.js deleted file mode 100644 index 67b00f85b44..00000000000 --- a/jstests/geo5.js +++ /dev/null @@ -1,18 +0,0 @@ -t = db.geo5; -t.drop(); - -t.insert( { p : [ 0,0 ] } ) -t.ensureIndex( { p : "2d" } ) - -res = t.runCommand( "geoNear" , { near : [1,1] } ); -assert.eq( 1 , res.results.length , "A1" ); - -t.insert( { p : [ 1,1 ] } ) -t.insert( { p : [ -1,-1 ] } ) -res = t.runCommand( "geoNear" , { near : [50,50] } ); -assert.eq( 3 , res.results.length , "A2" ); - -t.insert( { p : [ -1,-1 ] } ) -res = t.runCommand( "geoNear" , { near : [50,50] } ); -assert.eq( 4 , res.results.length , "A3" ); - diff --git a/jstests/geo6.js b/jstests/geo6.js deleted file mode 100644 index 185795c57ba..00000000000 --- a/jstests/geo6.js +++ /dev/null @@ -1,24 +0,0 @@ - -t = db.geo6; -t.drop(); - -t.ensureIndex( { loc : "2d" } ); - -assert.eq( 0 , t.find().itcount() , "pre0" ); -assert.eq( 0 , t.find( { loc : { $near : [50,50] } } ).itcount() , "pre1" ) - -t.insert( { _id : 1 , loc : [ 1 , 1 ] } ) -t.insert( { _id : 2 , loc : [ 1 , 2 ] } ) -t.insert( { _id : 3 } ) - -assert.eq( 3 , t.find().itcount() , "A1" ) -assert.eq( 2 , t.find().hint( { loc : "2d" } ).itcount() , "A2" ) -assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).itcount() , "A3" ) - -t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).forEach(printjson); -assert.eq( 1 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).next()._id , "B1" ) -assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : -1 } ).next()._id , "B1" ) - - -t.insert( { _id : 4 , loc : [] } ) -assert.eq( 4 , t.find().itcount() , "C1" ) diff --git a/jstests/geo7.js b/jstests/geo7.js deleted file mode 100644 index c220da54249..00000000000 --- a/jstests/geo7.js +++ /dev/null @@ -1,20 +0,0 @@ - -t = db.geo7; -t.drop(); - -t.insert({_id:1,y:[1,1]}) -t.insert({_id:2,y:[1,1],z:3}) -t.insert({_id:3,y:[1,1],z:4}) -t.insert({_id:4,y:[1,1],z:5}) - -t.ensureIndex({y:"2d",z:1}) - -assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A1" ); - -t.dropIndex({y:"2d",z:1}) - -t.ensureIndex({y:"2d"}) -assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A2" ); - -t.insert( { _id : 5 , y : 5 } ); -assert.eq( 5 , t.findOne( { y : 5 } )._id , "B1" ); diff --git a/jstests/geo8.js b/jstests/geo8.js deleted file mode 100644 index 301f3bcc0d1..00000000000 --- a/jstests/geo8.js +++ /dev/null @@ -1,13 +0,0 @@ - -t = db.geo8 -t.drop() - -t.insert( { loc : [ 5 , 5 ] } ) -t.insert( { loc : [ 5 , 6 ] } ) -t.insert( { loc : [ 5 , 7 ] } ) -t.insert( { loc : [ 4 , 5 ] } ) -t.insert( { loc : [ 100 , 100 ] } ) - -t.ensureIndex( { loc : "2d" } ) - -t.runCommand( "geoWalk" ); diff --git a/jstests/geo9.js b/jstests/geo9.js deleted file mode 100644 index 8b6510f03b5..00000000000 --- a/jstests/geo9.js +++ /dev/null @@ -1,28 +0,0 @@ - -t = db.geo9 -t.drop(); - -t.save( { _id : 1 , a : [ 10 , 10 ] , b : [ 50 , 50 ] } ) -t.save( { _id : 2 , a : [ 11 , 11 ] , b : [ 51 , 52 ] } ) -t.save( { _id : 3 , a : [ 12 , 12 ] , b : [ 52 , 52 ] } ) - -t.save( { _id : 4 , a : [ 50 , 50 ] , b : [ 10 , 10 ] } ) -t.save( { _id : 5 , a : [ 51 , 51 ] , b : [ 11 , 11 ] } ) -t.save( { _id : 6 , a : [ 52 , 52 ] , b : [ 12 , 12 ] } ) - -t.ensureIndex( { a : "2d" } ) -t.ensureIndex( { b : "2d" } ) - -function check( field ){ - var q = {} - q[field] = { $near : [ 11 , 11 ] } - arr = t.find( q ).limit(3).map( - function(z){ - return Geo.distance( [ 11 , 11 ] , z[field] ); - } - ); - assert.eq( 2 * Math.sqrt( 2 ) , Array.sum( arr ) , "test " + field ); -} - -check( "a" ) -check( "b" ) diff --git a/jstests/geo_2d_explain.js b/jstests/geo_2d_explain.js deleted file mode 100644 index 8195642aabc..00000000000 --- a/jstests/geo_2d_explain.js +++ /dev/null @@ -1,29 +0,0 @@ -var t = db.geo_2d_explain; - -t.drop(); - -var n = 1000; - -// insert n documents with integer _id, a can be 1-5, loc is close to [40, 40] -t.drop() -t.ensureIndex({loc: "2d", _id: 1}) - -var x = 40; -var y = 40; -for (var i = 0; i < n; i++) { - // random number in range [1, 5] - var a = Math.floor(Math.random() * 5) + 1; - var dist = 4.0; - var dx = (Math.random() - 0.5) * dist; - var dy = (Math.random() - 0.5) * dist; - var loc = [x + dx, y + dy]; - t.save({_id: i, a: a, loc: loc}); -} - -var explain = t.find({loc: {$near: [40, 40]}, _id: {$lt: 50}}).explain(); - -print('explain = ' + tojson(explain)); - -assert.eq({}, explain.indexBounds); -assert.eq(explain.n, explain.nscannedObjects); -assert.lte(explain.n, explain.nscanned); diff --git a/jstests/geo_2d_with_geojson_point.js b/jstests/geo_2d_with_geojson_point.js deleted file mode 100644 index b5afc8b77b8..00000000000 --- a/jstests/geo_2d_with_geojson_point.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Use of GeoJSON points should be prohibited with a 2d index, SERVER-10636. - */ - -var t = db.geo_2d_with_geojson_point; -t.drop(); -t.ensureIndex({loc: '2d'}); - -var geoJSONPoint = { - type: 'Point', - coordinates: [0, 0] -}; - -print(assert.throws( - function() { - t.findOne({ - loc: {$near: {$geometry: geoJSONPoint}}}); - }, - [], - 'querying 2d index with GeoJSON point.')); diff --git a/jstests/geo_allowedcomparisons.js b/jstests/geo_allowedcomparisons.js deleted file mode 100644 index 171178d0c7b..00000000000 --- a/jstests/geo_allowedcomparisons.js +++ /dev/null @@ -1,107 +0,0 @@ -// A test for what geometries can interact with what other geometries. -t = db.geo_allowedcomparisons; - -// Any GeoJSON object can intersect with any geojson object. -geojsonPoint = { "type" : "Point", "coordinates": [ 0, 0 ] }; -oldPoint = [0,0]; - -// GeoJSON polygons can contain any geojson object and OLD points. -geojsonPoly = { "type" : "Polygon", - "coordinates" : [ [ [-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]]}; - -// This can be contained by GJ polygons, intersected by anything GJ and old points. -geojsonLine = { "type" : "LineString", "coordinates": [ [ 0, 0], [1, 1]]} - -// $centerSphere can contain old or new points. -oldCenterSphere = [[0, 0], Math.PI / 180]; -// $box can contain old points. -oldBox = [[-5,-5], [5,5]]; -// $polygon can contain old points. -oldPolygon = [[-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]] -// $center can contain old points. -oldCenter = [[0, 0], 1]; - -t.drop(); -t.ensureIndex({geo: "2d"}); -// 2d doesn't know what to do w/this -t.insert({geo: geojsonPoint}); -assert(db.getLastError()); -// Old points are OK. -t.insert({geo: oldPoint}) -assert(!db.getLastError()); -// Lines not OK in 2d -t.insert({geo: geojsonLine}) -assert(db.getLastError()) -// Shapes are not OK to insert in 2d -t.insert({geo: geojsonPoly}) -assert(db.getLastError()); -t.insert({geo: oldCenterSphere}) -assert(db.getLastError()); -t.insert({geo: oldCenter}) -assert(db.getLastError()); -// If we try to insert a polygon, it thinks it's an array of points. Let's not -// do that. Ditto for the box. - -// Verify that even if we can't index them, we can use them in a matcher. -t.insert({gj: geojsonLine}) -t.insert({gj: geojsonPoly}) -geojsonPoint2 = { "type" : "Point", "coordinates": [ 0, 0.001 ] }; -t.insert({gjp: geojsonPoint2}) - -// We convert between old and new style points. -assert.eq(1, t.find({gjp: {$geoWithin: {$box: oldBox}}}).itcount()); -assert.eq(1, t.find({gjp: {$geoWithin: {$polygon: oldPolygon}}}).itcount()); -assert.eq(1, t.find({gjp: {$geoWithin: {$center: oldCenter}}}).itcount()); -assert.eq(1, t.find({gjp: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount()) - -function runTests() { - // Each find the box, the polygon, and the old point. - assert.eq(1, t.find({geo: {$geoWithin: {$box: oldBox}}}).itcount()) - assert.eq(1, t.find({geo: {$geoWithin: {$polygon: oldPolygon}}}).itcount()) - // Each find the old point. - assert.eq(1, t.find({geo: {$geoWithin: {$center: oldCenter}}}).itcount()) - assert.eq(1, t.find({geo: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount()) - // Using geojson with 2d-style geoWithin syntax should choke. - assert.throws(function() { return t.find({geo: {$geoWithin: {$polygon: geojsonPoly}}}) - .itcount();}) - // Using old polygon w/new syntax should choke too. - assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldPolygon}}}) - .itcount();}) - assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldBox}}}) - .itcount();}) - assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenter}}}) - .itcount();}) - assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenterSphere}}}) - .itcount();}) - // Even if we only have a 2d index, the 2d suitability function should - // allow the matcher to deal with this. If we have a 2dsphere index we use it. - assert.eq(1, t.find({geo: {$geoWithin: {$geometry: geojsonPoly}}}).itcount()) - assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoly}}}).itcount()) - assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: oldPoint}}}).itcount()) - assert.eq(1, t.find({geo: {$geoIntersects: {$geometry: geojsonPoint}}}).itcount()) -} - -// We have a 2d index right now. Let's see what it does. -runTests(); - -// No index now. -t.dropIndex({geo: "2d"}) -runTests(); - -// 2dsphere index now. -t.ensureIndex({geo: "2dsphere"}) -assert(!db.getLastError()) -// 2dsphere does not support arrays of points. -t.insert({geo: [geojsonPoint2, geojsonPoint]}) -assert(db.getLastError()) -runTests(); - -// Old stuff is not GeoJSON (or old-style point). All should fail. -t.insert({geo: oldBox}) -assert(db.getLastError()) -t.insert({geo: oldPolygon}) -assert(db.getLastError()) -t.insert({geo: oldCenter}) -assert(db.getLastError()) -t.insert({geo: oldCenterSphere}) -assert(db.getLastError()) diff --git a/jstests/geo_array0.js b/jstests/geo_array0.js deleted file mode 100644 index 5fe46781d1d..00000000000 --- a/jstests/geo_array0.js +++ /dev/null @@ -1,27 +0,0 @@ -// Make sure the very basics of geo arrays are sane by creating a few multi location docs -t = db.geoarray - -function test(index) { - t.drop(); - t.insert( { zip : "10001", loc : { home : [ 10, 10 ], work : [ 50, 50 ] } } ) - t.insert( { zip : "10002", loc : { home : [ 20, 20 ], work : [ 50, 50 ] } } ) - t.insert( { zip : "10003", loc : { home : [ 30, 30 ], work : [ 50, 50 ] } } ) - assert.isnull( db.getLastError() ) - - if (index) { - t.ensureIndex( { loc : "2d", zip : 1 } ); - assert.isnull( db.getLastError() ) - assert.eq( 2, t.getIndexKeys().length ) - } - - t.insert( { zip : "10004", loc : { home : [ 40, 40 ], work : [ 50, 50 ] } } ) - assert.isnull( db.getLastError() ) - - // test normal access - printjson( t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() ) - assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() ); - assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() ); -} - -//test(false); // this was removed as part of SERVER-6400 -test(true) diff --git a/jstests/geo_array1.js b/jstests/geo_array1.js deleted file mode 100644 index ca61050c888..00000000000 --- a/jstests/geo_array1.js +++ /dev/null @@ -1,38 +0,0 @@ -// Make sure many locations in one doc works, in the form of an array - -t = db.geoarray1 -function test(index) { - t.drop(); - - var locObj = [] - // Add locations everywhere - for ( var i = 0; i < 10; i++ ) { - for ( var j = 0; j < 10; j++ ) { - if ( j % 2 == 0 ) - locObj.push( [ i, j ] ) - else - locObj.push( { x : i, y : j } ) - } - } - - // Add docs with all these locations - for( var i = 0; i < 300; i++ ){ - t.insert( { loc : locObj } ) - } - - if (index) { - t.ensureIndex( { loc : "2d" } ) - } - - // Pull them back - for ( var i = 0; i < 10; i++ ) { - for ( var j = 0; j < 10; j++ ) { - assert.eq(300, t.find({loc: {$within: {$box: [[i - 0.5, j - 0.5 ], - [i + 0.5,j + 0.5]]}}}) - .count()) - } - } -} - -test(true) -test(false) diff --git a/jstests/geo_array2.js b/jstests/geo_array2.js deleted file mode 100644 index 0e8d57dd855..00000000000 --- a/jstests/geo_array2.js +++ /dev/null @@ -1,163 +0,0 @@ -// Check the semantics of near calls with multiple locations - -t = db.geoarray2 -t.drop(); - -var numObjs = 10; -var numLocs = 100; - -// Test the semantics of near / nearSphere / etc. queries with multiple keys per object - -for( var i = -1; i < 2; i++ ){ - for(var j = -1; j < 2; j++ ){ - - locObj = [] - - if( i != 0 || j != 0 ) - locObj.push( { x : i * 50 + Random.rand(), - y : j * 50 + Random.rand() } ) - locObj.push( { x : Random.rand(), - y : Random.rand() } ) - locObj.push( { x : Random.rand(), - y : Random.rand() } ) - - t.insert({ name : "" + i + "" + j , loc : locObj , type : "A" }) - t.insert({ name : "" + i + "" + j , loc : locObj , type : "B" }) - } -} - -t.ensureIndex({ loc : "2d" , type : 1 }) - -assert.isnull( db.getLastError() ) - -print( "Starting testing phase... ") - -for( var t = 0; t < 2; t++ ){ - -var type = t == 0 ? "A" : "B" - -for( var i = -1; i < 2; i++ ){ - for(var j = -1; j < 2; j++ ){ - - var center = [ i * 50 , j * 50 ] - var count = i == 0 && j == 0 ? 9 : 1 - var objCount = 1 - - // Do near check - - var nearResults = db.runCommand( { geoNear : "geoarray2" , - near : center , - num : count, - query : { type : type } } ).results - //printjson( nearResults ) - - var objsFound = {} - var lastResult = 0; - for( var k = 0; k < nearResults.length; k++ ){ - - // All distances should be small, for the # of results - assert.gt( 1.5 , nearResults[k].dis ) - // Distances should be increasing - assert.lte( lastResult, nearResults[k].dis ) - // Objs should be of the right type - assert.eq( type, nearResults[k].obj.type ) - - lastResult = nearResults[k].dis - - var objKey = "" + nearResults[k].obj._id - - if( objKey in objsFound ) objsFound[ objKey ]++ - else objsFound[ objKey ] = 1 - - } - - // Make sure we found the right objects each time - // Note: Multiple objects could be found for diff distances. - for( var q in objsFound ){ - assert.eq( objCount , objsFound[q] ) - } - - - // Do nearSphere check - - // Earth Radius - var eRad = 6371 - - nearResults = db.geoarray2.find( { loc : { $nearSphere : center , $maxDistance : 500 /* km */ / eRad }, type : type } ).toArray() - - assert.eq( nearResults.length , count ) - - objsFound = {} - lastResult = 0; - for( var k = 0; k < nearResults.length; k++ ){ - var objKey = "" + nearResults[k]._id - if( objKey in objsFound ) objsFound[ objKey ]++ - else objsFound[ objKey ] = 1 - - } - - // Make sure we found the right objects each time - for( var q in objsFound ){ - assert.eq( objCount , objsFound[q] ) - } - - - - // Within results do not return duplicate documents - - var count = i == 0 && j == 0 ? 9 : 1 - var objCount = i == 0 && j == 0 ? 1 : 1 - - // Do within check - objsFound = {} - - var box = [ [center[0] - 1, center[1] - 1] , [center[0] + 1, center[1] + 1] ] - - //printjson( box ) - - var withinResults = db.geoarray2.find({ loc : { $within : { $box : box } } , type : type }).toArray() - - assert.eq( withinResults.length , count ) - - for( var k = 0; k < withinResults.length; k++ ){ - var objKey = "" + withinResults[k]._id - if( objKey in objsFound ) objsFound[ objKey ]++ - else objsFound[ objKey ] = 1 - } - - //printjson( objsFound ) - - // Make sure we found the right objects each time - for( var q in objsFound ){ - assert.eq( objCount , objsFound[q] ) - } - - - // Do within check (circle) - objsFound = {} - - withinResults = db.geoarray2.find({ loc : { $within : { $center : [ center, 1.5 ] } } , type : type }).toArray() - - assert.eq( withinResults.length , count ) - - for( var k = 0; k < withinResults.length; k++ ){ - var objKey = "" + withinResults[k]._id - if( objKey in objsFound ) objsFound[ objKey ]++ - else objsFound[ objKey ] = 1 - } - - // Make sure we found the right objects each time - for( var q in objsFound ){ - assert.eq( objCount , objsFound[q] ) - } - - - - } -} - -} - - - - diff --git a/jstests/geo_borders.js b/jstests/geo_borders.js deleted file mode 100644 index 953850ad7f3..00000000000 --- a/jstests/geo_borders.js +++ /dev/null @@ -1,168 +0,0 @@ -t = db.borders -t.drop() - -epsilon = 0.0001; - -// For these tests, *required* that step ends exactly on max -min = -1 -max = 1 -step = 1 -numItems = 0; - -for ( var x = min; x <= max; x += step ) { - for ( var y = min; y <= max; y += step ) { - t.insert( { loc : { x : x, y : y } } ) - numItems++; - } -} - -overallMin = -1 -overallMax = 1 - -// Create a point index slightly smaller than the points we have -t.ensureIndex( { loc : "2d" }, { max : overallMax - epsilon / 2, min : overallMin + epsilon / 2 } ) -assert( db.getLastError() ) - -// Create a point index only slightly bigger than the points we have -t.ensureIndex( { loc : "2d" }, { max : overallMax + epsilon, min : overallMin - epsilon } ) -assert.isnull( db.getLastError() ) - -// ************ -// Box Tests -// ************ - -// If the bounds are bigger than the box itself, just clip at the borders -assert.eq( numItems, t.find( - { loc : { $within : { $box : [ - [ overallMin - 2 * epsilon, overallMin - 2 * epsilon ], - [ overallMax + 2 * epsilon, overallMax + 2 * epsilon ] ] } } } ).count() ); - -// Check this works also for bounds where only a single dimension is off-bounds -assert.eq( numItems - 5, t.find( - { loc : { $within : { $box : [ - [ overallMin - 2 * epsilon, overallMin - 0.5 * epsilon ], - [ overallMax - epsilon, overallMax - epsilon ] ] } } } ).count() ); - -// Make sure we can get at least close to the bounds of the index -assert.eq( numItems, t.find( - { loc : { $within : { $box : [ - [ overallMin - epsilon / 2, overallMin - epsilon / 2 ], - [ overallMax + epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() ); - -// Make sure we can get at least close to the bounds of the index -assert.eq( numItems, t.find( - { loc : { $within : { $box : [ - [ overallMax + epsilon / 2, overallMax + epsilon / 2 ], - [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() ); - -// Check that swapping min/max has good behavior -assert.eq( numItems, t.find( - { loc : { $within : { $box : [ - [ overallMax + epsilon / 2, overallMax + epsilon / 2 ], - [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() ); - -assert.eq( numItems, t.find( - { loc : { $within : { $box : [ - [ overallMax + epsilon / 2, overallMin - epsilon / 2 ], - [ overallMin - epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() ); - -// ************** -// Circle tests -// ************** - -center = ( overallMax + overallMin ) / 2 -center = [ center, center ] -radius = overallMax - -offCenter = [ center[0] + radius, center[1] + radius ] -onBounds = [ offCenter[0] + epsilon, offCenter[1] + epsilon ] -offBounds = [ onBounds[0] + epsilon, onBounds[1] + epsilon ] -onBoundsNeg = [ -onBounds[0], -onBounds[1] ] - -// Make sure we can get all points when radius is exactly at full bounds -assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + epsilon ] } } } ).count() ); - -// Make sure we can get points when radius is over full bounds -assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + 2 * epsilon ] } } } ).count() ); - -// Make sure we can get points when radius is over full bounds, off-centered -assert.lt( 0, t.find( { loc : { $within : { $center : [ offCenter, radius + 2 * epsilon ] } } } ).count() ); - -// Make sure we get correct corner point when center is in bounds -// (x bounds wrap, so could get other corner) -cornerPt = t.findOne( { loc : { $within : { $center : [ offCenter, step / 2 ] } } } ); -assert.eq( cornerPt.loc.y, overallMax ) - -// Make sure we get correct corner point when center is on bounds -// NOTE: Only valid points on MIN bounds -cornerPt = t - .findOne( { loc : { $within : { $center : [ onBoundsNeg, Math.sqrt( 2 * epsilon * epsilon ) + ( step / 2 ) ] } } } ); -assert.eq( cornerPt.loc.y, overallMin ) - -// Make sure we can't get corner point when center is over bounds -try { - t.findOne( { loc : { $within : { $center : [ offBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } ); - assert( false ) -} catch (e) { -} - -// Make sure we can't get corner point when center is on max bounds -try { - t.findOne( { loc : { $within : { $center : [ onBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } ); - assert( false ) -} catch (e) { -} - -// *********** -// Near tests -// *********** - -// Make sure we can get all nearby points to point in range -assert.eq( overallMax, t.find( { loc : { $near : offCenter } } ).next().loc.y ); - -// Make sure we can get all nearby points to point on boundary -assert.eq( overallMin, t.find( { loc : { $near : onBoundsNeg } } ).next().loc.y ); - -// Make sure we can't get all nearby points to point over boundary -try { - t.findOne( { loc : { $near : offBounds } } ) - assert( false ) -} catch (e) { -} -// Make sure we can't get all nearby points to point on max boundary -try { - t.findOne( { loc : { $near : onBoundsNeg } } ) - assert( false ) -} catch (e) { -} - -// Make sure we can get all nearby points within one step (4 points in top -// corner) -assert.eq( 4, t.find( { loc : { $near : offCenter, $maxDistance : step * 1.9 } } ).count() ); - -// ************** -// Command Tests -// ************** -// Make sure we can get all nearby points to point in range -assert.eq( overallMax, db.runCommand( { geoNear : "borders", near : offCenter } ).results[0].obj.loc.y ); - -// Make sure we can get all nearby points to point on boundary -assert.eq( overallMin, db.runCommand( { geoNear : "borders", near : onBoundsNeg } ).results[0].obj.loc.y ); - -// Make sure we can't get all nearby points to point over boundary -try { - db.runCommand( { geoNear : "borders", near : offBounds } ).results.length - assert( false ) -} catch (e) { -} - -// Make sure we can't get all nearby points to point on max boundary -try { - db.runCommand( { geoNear : "borders", near : onBounds } ).results.length - assert( false ) -} catch (e) { -} - -// Make sure we can get all nearby points within one step (4 points in top -// corner) -assert.eq( 4, db.runCommand( { geoNear : "borders", near : offCenter, maxDistance : step * 1.5 } ).results.length ); diff --git a/jstests/geo_box1.js b/jstests/geo_box1.js deleted file mode 100644 index 5ef335158e1..00000000000 --- a/jstests/geo_box1.js +++ /dev/null @@ -1,43 +0,0 @@ - -t = db.geo_box1; -t.drop(); - -num = 0; -for ( x=0; x<=20; x++ ){ - for ( y=0; y<=20; y++ ){ - o = { _id : num++ , loc : [ x , y ] } - t.save( o ) - } -} - -t.ensureIndex( { loc : "2d" } ); - -searches = [ - [ [ 1 , 2 ] , [ 4 , 5 ] ] , - [ [ 1 , 1 ] , [ 2 , 2 ] ] , - [ [ 0 , 2 ] , [ 4 , 5 ] ] , - [ [ 1 , 1 ] , [ 2 , 8 ] ] , -]; - - -for ( i=0; i> ' + - '(' + covering[1][0] + ',' + covering[1][1] + ')'); -// Compare covering against $box coordinates. -// min X -assert.lte(covering[0][0], 4); -// min Y -assert.lte(covering[0][1], 4); -// max X -assert.gte(covering[1][0], 6); -// max Y -assert.gte(covering[1][1], 6); diff --git a/jstests/geo_box3.js b/jstests/geo_box3.js deleted file mode 100644 index 8941f637518..00000000000 --- a/jstests/geo_box3.js +++ /dev/null @@ -1,36 +0,0 @@ -// How to construct a test to stress the flaw in SERVER-994: -// construct an index, think up a bounding box inside the index that -// doesn't include the center of the index, and put a point inside the -// bounding box. - -// This is the bug reported in SERVER-994. -t=db.geo_box3; -t.drop(); -t.insert({ point : { x : -15000000, y : 10000000 } }); -t.ensureIndex( { point : "2d" } , { min : -21000000 , max : 21000000 } ); -var c=t.find({point: {"$within": {"$box": [[-20000000, 7000000], [0, 15000000]]} } }); -assert.eq(1, c.count(), "A1"); - -// Same thing, modulo 1000000. -t=db.geo_box3; -t.drop(); -t.insert({ point : { x : -15, y : 10 } }); -t.ensureIndex( { point : "2d" } , { min : -21 , max : 21 } ); -var c=t.find({point: {"$within": {"$box": [[-20, 7], [0, 15]]} } }); -assert.eq(1, c.count(), "B1"); - -// Two more examples, one where the index is centered at the origin, -// one not. -t=db.geo_box3; -t.drop(); -t.insert({ point : { x : 1.0 , y : 1.0 } }); -t.ensureIndex( { point : "2d" } , { min : -2 , max : 2 } ); -var c=t.find({point: {"$within": {"$box": [[.1, .1], [1.99, 1.99]]} } }); -assert.eq(1, c.count(), "C1"); - -t=db.geo_box3; -t.drop(); -t.insert({ point : { x : 3.9 , y : 3.9 } }); -t.ensureIndex( { point : "2d" } , { min : 0 , max : 4 } ); -var c=t.find({point: {"$within": {"$box": [[2.05, 2.05], [3.99, 3.99]]} } }); -assert.eq(1, c.count(), "D1"); diff --git a/jstests/geo_center_sphere1.js b/jstests/geo_center_sphere1.js deleted file mode 100644 index 9f5eaec8764..00000000000 --- a/jstests/geo_center_sphere1.js +++ /dev/null @@ -1,96 +0,0 @@ -t = db.geo_center_sphere1; - -function test(index) { - t.drop(); - skip = 3 // lower for more rigor, higher for more speed (tested with .5, .678, 1, 2, 3, and 4) - - searches = [ - // x , y rad - [ [ 5 , 0 ] , 0.05 ] , // ~200 miles - [ [ 135 , 0 ] , 0.05 ] , - - [ [ 5 , 70 ] , 0.05 ] , - [ [ 135 , 70 ] , 0.05 ] , - [ [ 5 , 85 ] , 0.05 ] , - - [ [ 20 , 0 ] , 0.25 ] , // ~1000 miles - [ [ 20 , -45 ] , 0.25 ] , - [ [ -20 , 60 ] , 0.25 ] , - [ [ -20 , -70 ] , 0.25 ] , - ]; - correct = searches.map( function(z){ return []; } ); - - num = 0; - - for ( x=-179; x<=179; x += skip ){ - for ( y=-89; y<=89; y += skip ){ - o = { _id : num++ , loc : [ x , y ] } - t.save( o ) - for ( i=0; i= distance ) minNewDistance = newDistance - } - - //print( "Dist from : " + results[i].loc[j] + " to " + startPoint + " is " - // + minNewDistance + " vs " + radius ) - - assert.lte( minNewDistance, radius ) - assert.gte( minNewDistance, distance ) - distance = minNewDistance - - } - - // geoNear - var results = db.runCommand( { - geoNear : "sphere", near : startPoint, maxDistance : radius, num : 2 * pointsIn, spherical : true } ).results - - /* - printjson( results ); - - for ( var j = 0; j < results[0].obj.loc.length; j++ ) { - var newDistance = Geo.sphereDistance( startPoint, results[0].obj.loc[j] ) - if( newDistance <= radius ) print( results[0].obj.loc[j] + " : " + newDistance ) - } - */ - - assert.eq( docsIn, results.length ) - - var distance = 0; - for ( var i = 0; i < results.length; i++ ) { - var retDistance = results[i].dis - - // print( "Dist from : " + results[i].loc + " to " + startPoint + " is " - // + retDistance + " vs " + radius ) - - var distInObj = false - for ( var j = 0; j < results[i].obj.loc.length && distInObj == false; j++ ) { - var newDistance = Geo.sphereDistance( startPoint, results[i].obj.loc[j] ) - distInObj = ( newDistance >= retDistance - 0.0001 && newDistance <= retDistance + 0.0001 ) - } - - assert( distInObj ) - assert.lte( retDistance, radius ) - assert.gte( retDistance, distance ) - distance = retDistance - } - - //break; -} - - diff --git a/jstests/geo_circle1.js b/jstests/geo_circle1.js deleted file mode 100644 index 852b60d186b..00000000000 --- a/jstests/geo_circle1.js +++ /dev/null @@ -1,43 +0,0 @@ - -t = db.geo_circle1; -t.drop(); - -searches = [ - [ [ 5 , 5 ] , 3 ] , - [ [ 5 , 5 ] , 1 ] , - [ [ 5 , 5 ] , 5 ] , - [ [ 0 , 5 ] , 5 ] , -]; -correct = searches.map( function(z){ return []; } ); - -num = 0; - -for ( x=0; x<=20; x++ ){ - for ( y=0; y<=20; y++ ){ - o = { _id : num++ , loc : [ x , y ] } - t.save( o ) - for ( i=0; i queries[i].maxDistance ) - continue; - if ( queries[i].search.z != n % 5 ) - continue; - answers[i].results.push( { _id : n , loc : [ x , y ]} ) - answers[i].totalDistance += d; - } - - n++; - } -} - -t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } ); - -for ( i=0; i queries[i].maxDistance ) - continue; - if ( queries[i].search.z != n % 10 && - queries[i].search.z != ( n + 5 ) % 10 ) - continue; - answers[i].results.push( { _id : n , loc : [ x , y ] } ) - answers[i].totalDistance += d; - } - - n++; - } -} - -t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } ); - -for ( i=0; i1 from us but <1.5 -// These points are (-+1, -+1) -resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {$and: [{geo: {$within: {$center: [[0, 0], 1.5]}}}, - {geo: {$not: {$within: {$center: [[0,0], 1]}}}}]}}) -assert.eq(resNear.results.length, 4) diff --git a/jstests/geo_oob_sphere.js b/jstests/geo_oob_sphere.js deleted file mode 100644 index f2c76457af9..00000000000 --- a/jstests/geo_oob_sphere.js +++ /dev/null @@ -1,42 +0,0 @@ -// -// Ensures spherical queries report invalid latitude values in points and center positions -// - -t = db.geooobsphere -t.drop(); - -t.insert({ loc : { x : 30, y : 89 } }) -t.insert({ loc : { x : 30, y : 89 } }) -t.insert({ loc : { x : 30, y : 89 } }) -t.insert({ loc : { x : 30, y : 89 } }) -t.insert({ loc : { x : 30, y : 89 } }) -t.insert({ loc : { x : 30, y : 89 } }) -t.insert({ loc : { x : 30, y : 91 } }) - -t.ensureIndex({ loc : "2d" }) -assert.isnull( db.getLastError() ) - -assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 91 ], $maxDistance : 0.25 } }).count() } ); -var err = db.getLastError() -assert( err != null ) -printjson( err ) - -assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 89 ], $maxDistance : 0.25 } }).count() } ); -var err = db.getLastError() -assert( err != null ) -printjson( err ) - -assert.throws( function() { t.find({ loc : { $within : { $centerSphere : [[ -180, -91 ], 0.25] } } }).count() } ); -var err = db.getLastError() -assert( err != null ) -printjson( err ) - -db.runCommand({ geoNear : "geooobsphere", near : [179, -91], maxDistance : 0.25, spherical : true }) -var err = db.getLastError() -assert( err != null ) -printjson( err ) - -db.runCommand({ geoNear : "geooobsphere", near : [30, 89], maxDistance : 0.25, spherical : true }) -var err = db.getLastError() -assert( err != null ) -printjson( err ) \ No newline at end of file diff --git a/jstests/geo_or.js b/jstests/geo_or.js deleted file mode 100644 index fd9b7234a21..00000000000 --- a/jstests/geo_or.js +++ /dev/null @@ -1,62 +0,0 @@ -// multiple geo clauses with $or - -t = db.geoor; - -t.drop(); - -var p = [-71.34895, 42.46037]; -var q = [1.48736, 42.55327]; - -t.save({loc: p}); -t.save({loc: q}); - -var indexname = "2dsphere"; - -t.ensureIndex({loc: indexname}) - -assert.eq(1, t.find({loc: p}).itcount(), indexname); - -// $or supports at most one $near clause -assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}]}).itcount(), - 'geo query not supported by $or. index type: ' + indexname); -assert.throws(function() { - assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}, - {loc: {$nearSphere: q}}]}).itcount(), - 'geo query not supported by $or. index type: ' + indexname); -}, null, '$or with multiple $near clauses'); - -// the following tests should match the points in the collection - -assert.eq(2, t.find({$or: [ - {loc: {$geoWithin: {$centerSphere: [p, 10]}}}, - {loc: {$geoWithin: {$centerSphere: [p, 10]}}} - ]}).itcount(), - 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname); -assert.eq(2, t.find({$or: [ - {loc: {$geoIntersects: {$geometry: {type: 'LineString', coordinates: [p, q]}}}}, - {loc: {$geoIntersects: {$geometry: {type: 'LineString', - coordinates: [[0,0], [1,1]]}}}} - ]}).itcount(), - 'multiple $geoIntersects LineString clauses not supported by $or. index type: ' + indexname); -assert.eq(2, t.find({$or: [ - {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: p}}}}, - {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: q}}}} - ]}).itcount(), - 'multiple $geoIntersects Point clauses not supported by $or. index type: ' + indexname); -assert.eq(2, t.find({$or: [ - {loc: {$geoIntersects: {$geometry: {type: 'Polygon', - coordinates: [[[0, 0], p, q, [0, 0]]]}}}}, - {loc: {$geoIntersects: {$geometry: - {type: 'Polygon', coordinates: [[[0, 0], [1, 1], [0, 1], [0, 0]]]}}}} - ]}).itcount(), - 'multiple $geoIntersects Polygon clauses not supported by $or. index type: ' + indexname); - -t.dropIndexes(); - -var indexname = "2d"; - -t.ensureIndex({loc: indexname}) - -assert.eq(2, t.find({$or: [{loc: {$geoWithin: {$centerSphere: [p, 10]}}}, - {loc: {$geoWithin: {$centerSphere: [p, 10]}}}]}).itcount(), - 'multiple $geoWithin clauses not supported by $or. index type: ' + indexname); diff --git a/jstests/geo_poly_edge.js b/jstests/geo_poly_edge.js deleted file mode 100644 index 31a0849e67d..00000000000 --- a/jstests/geo_poly_edge.js +++ /dev/null @@ -1,22 +0,0 @@ -// -// Tests polygon edge cases -// - -var coll = db.getCollection( 'jstests_geo_poly_edge' ) -coll.drop(); - -coll.ensureIndex({ loc : "2d" }) - -coll.insert({ loc : [10, 10] }) -coll.insert({ loc : [10, -10] }) - -assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, -10 ]] } } }).itcount(), 2 ) - -assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, 10 ]] } } }).itcount(), 1 ) - - -coll.insert({ loc : [179, 0] }) -coll.insert({ loc : [0, 179] }) - -assert.eq( coll.find({ loc : { $within : { $polygon : [[0, 0], [1000, 0], [1000, 1000], [0, 1000]] } } }).itcount(), 3 ) - diff --git a/jstests/geo_poly_line.js b/jstests/geo_poly_line.js deleted file mode 100644 index aca77b6ab0a..00000000000 --- a/jstests/geo_poly_line.js +++ /dev/null @@ -1,17 +0,0 @@ -// Test that weird polygons work SERVER-3725 - -t = db.geo_polygon5; -t.drop(); - -t.insert({loc:[0,0]}) -t.insert({loc:[1,0]}) -t.insert({loc:[2,0]}) -t.insert({loc:[3,0]}) -t.insert({loc:[4,0]}) - -t.ensureIndex( { loc : "2d" } ); - -printjson( t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).toArray() ) - -assert.eq( 5, t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).itcount() ) - diff --git a/jstests/geo_polygon1.js b/jstests/geo_polygon1.js deleted file mode 100644 index 4b7427a4da2..00000000000 --- a/jstests/geo_polygon1.js +++ /dev/null @@ -1,74 +0,0 @@ -// -// Tests for N-dimensional polygon querying -// - -t = db.geo_polygon1; -t.drop(); - -num = 0; -for ( x=1; x < 9; x++ ){ - for ( y= 1; y < 9; y++ ){ - o = { _id : num++ , loc : [ x , y ] }; - t.save( o ); - } -} - -t.ensureIndex( { loc : "2d" } ); - -triangle = [[0,0], [1,1], [0,2]]; - -// Look at only a small slice of the data within a triangle -assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" ); - -boxBounds = [ [0,0], [0,10], [10,10], [10,0] ]; - -assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" ); - -//Make sure we can add object-based polygons -assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() ) - -// Look in a box much bigger than the one we have data in -boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]]; -assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" ); - -t.drop(); - -pacman = [ - [0,2], [0,4], [2,6], [4,6], // Head - [6,4], [4,3], [6,2], // Mouth - [4,0], [2,0] // Bottom - ]; - -t.save({loc: [1,3] }); // Add a point that's in -t.ensureIndex( { loc : "2d" } ); -assert.isnull( db.getLastError() ) - -assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" ); - -t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening -t.save({ loc : [3, 7] }) // Add a point above the center of the head -t.save({ loc : [3,-1] }) // Add a point below the center of the bottom - -assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" ); - -// Make sure we can't add bad polygons -okay = true -try{ - t.find( { loc : { $within : { $polygon : [1, 2] } } } ).toArray() - okay = false -} -catch(e){} -assert(okay) -try{ - t.find( { loc : { $within : { $polygon : [[1, 2]] } } } ).toArray() - okay = false -} -catch(e){} -assert(okay) -try{ - t.find( { loc : { $within : { $polygon : [[1, 2], [2, 3]] } } } ).toArray() - okay = false -} -catch(e){} -assert(okay) - diff --git a/jstests/geo_polygon1_noindex.js b/jstests/geo_polygon1_noindex.js deleted file mode 100644 index 2a94bbbfd09..00000000000 --- a/jstests/geo_polygon1_noindex.js +++ /dev/null @@ -1,47 +0,0 @@ -// SERVER-7343: allow $within without a geo index. - -t = db.geo_polygon1_noindex; -t.drop(); - -num = 0; -for ( x=1; x < 9; x++ ){ - for ( y= 1; y < 9; y++ ){ - o = { _id : num++ , loc : [ x , y ] }; - t.save( o ); - } -} - -triangle = [[0,0], [1,1], [0,2]]; - -// Look at only a small slice of the data within a triangle -assert.eq( 1 , t.find({ loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" ); - -boxBounds = [ [0,0], [0,10], [10,10], [10,0] ]; - -assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" ); - -//Make sure we can add object-based polygons -assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() ) - -// Look in a box much bigger than the one we have data in -boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]]; -assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" ); - -t.drop(); - -pacman = [ - [0,2], [0,4], [2,6], [4,6], // Head - [6,4], [4,3], [6,2], // Mouth - [4,0], [2,0] // Bottom - ]; - -t.save({loc: [1,3] }); // Add a point that's in -assert.isnull( db.getLastError() ) - -assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" ); - -t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening -t.save({ loc : [3, 7] }) // Add a point above the center of the head -t.save({ loc : [3,-1] }) // Add a point below the center of the bottom - -assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" ); diff --git a/jstests/geo_polygon2.js b/jstests/geo_polygon2.js deleted file mode 100644 index 617801bfc7b..00000000000 --- a/jstests/geo_polygon2.js +++ /dev/null @@ -1,266 +0,0 @@ -// -// More tests for N-dimensional polygon querying -// - -// Create a polygon of some shape (no holes) -// using turtle graphics. Basically, will look like a very contorted octopus (quad-pus?) shape. -// There are no holes, but some edges will probably touch. - -var numTests = 10 - -for ( var test = 0; test < numTests; test++ ) { - - Random.srand( 1337 + test ); - - var numTurtles = 4; - var gridSize = [ 40, 40 ]; - var turtleSteps = 500; - var bounds = [ Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001 ] - var rotation = Math.PI * Random.rand(); - var bits = Math.floor( Random.rand() * 32 ); - - printjson( { test : test, rotation : rotation, bits : bits }) - - var rotatePoint = function( x, y ) { - - if( y == undefined ){ - y = x[1] - x = x[0] - } - - xp = x * Math.cos( rotation ) - y * Math.sin( rotation ) - yp = y * Math.cos( rotation ) + x * Math.sin( rotation ) - - var scaleX = (bounds[1] - bounds[0]) / 360 - var scaleY = (bounds[1] - bounds[0]) / 360 - - x *= scaleX - y *= scaleY - - return [xp, yp] - - } - - - var grid = [] - for ( var i = 0; i < gridSize[0]; i++ ) { - grid.push( new Array( gridSize[1] ) ) - } - - grid.toString = function() { - - var gridStr = ""; - for ( var j = grid[0].length - 1; j >= -1; j-- ) { - for ( var i = 0; i < grid.length; i++ ) { - if ( i == 0 ) - gridStr += ( j == -1 ? " " : ( j % 10) ) + ": " - if ( j != -1 ) - gridStr += "[" + ( grid[i][j] != undefined ? grid[i][j] : " " ) + "]" - else - gridStr += " " + ( i % 10 ) + " " - } - gridStr += "\n" - } - - return gridStr; - } - - var turtles = [] - for ( var i = 0; i < numTurtles; i++ ) { - - var up = ( i % 2 == 0 ) ? i - 1 : 0; - var left = ( i % 2 == 1 ) ? ( i - 1 ) - 1 : 0; - - turtles[i] = [ - [ Math.floor( gridSize[0] / 2 ), Math.floor( gridSize[1] / 2 ) ], - [ Math.floor( gridSize[0] / 2 ) + left, Math.floor( gridSize[1] / 2 ) + up ] ]; - - grid[turtles[i][1][0]][turtles[i][1][1]] = i - - } - - grid[Math.floor( gridSize[0] / 2 )][Math.floor( gridSize[1] / 2 )] = "S" - - // print( grid.toString() ) - - var pickDirections = function() { - - var up = Math.floor( Random.rand() * 3 ) - if ( up == 2 ) - up = -1 - - if ( up == 0 ) { - var left = Math.floor( Random.rand() * 3 ) - if ( left == 2 ) - left = -1 - } else - left = 0 - - if ( Random.rand() < 0.5 ) { - var swap = left - left = up - up = swap - } - - return [ left, up ] - } - - for ( var s = 0; s < turtleSteps; s++ ) { - - for ( var t = 0; t < numTurtles; t++ ) { - - var dirs = pickDirections() - var up = dirs[0] - var left = dirs[1] - - var lastTurtle = turtles[t][turtles[t].length - 1] - var nextTurtle = [ lastTurtle[0] + left, lastTurtle[1] + up ] - - if ( nextTurtle[0] >= gridSize[0] || nextTurtle[1] >= gridSize[1] || nextTurtle[0] < 0 || nextTurtle[1] < 0 ) - continue; - - if ( grid[nextTurtle[0]][nextTurtle[1]] == undefined ) { - turtles[t].push( nextTurtle ) - grid[nextTurtle[0]][nextTurtle[1]] = t; - } - - } - } - - // print( grid.toString() ) - - turtlePaths = [] - for ( var t = 0; t < numTurtles; t++ ) { - - turtlePath = [] - - var nextSeg = function(currTurtle, prevTurtle) { - - var pathX = currTurtle[0] - - if ( currTurtle[1] < prevTurtle[1] ) { - pathX = currTurtle[0] + 1 - pathY = prevTurtle[1] - } else if ( currTurtle[1] > prevTurtle[1] ) { - pathX = currTurtle[0] - pathY = currTurtle[1] - } else if ( currTurtle[0] < prevTurtle[0] ) { - pathX = prevTurtle[0] - pathY = currTurtle[1] - } else if ( currTurtle[0] > prevTurtle[0] ) { - pathX = currTurtle[0] - pathY = currTurtle[1] + 1 - } - - // print( " Prev : " + prevTurtle + " Curr : " + currTurtle + " path - // : " - // + [pathX, pathY]); - - return [ pathX, pathY ] - } - - for ( var s = 1; s < turtles[t].length; s++ ) { - - currTurtle = turtles[t][s] - prevTurtle = turtles[t][s - 1] - - turtlePath.push( nextSeg( currTurtle, prevTurtle ) ) - - } - - for ( var s = turtles[t].length - 2; s >= 0; s-- ) { - - currTurtle = turtles[t][s] - prevTurtle = turtles[t][s + 1] - - turtlePath.push( nextSeg( currTurtle, prevTurtle ) ) - - } - - // printjson( turtlePath ) - - // End of the line is not inside our polygon. - var lastTurtle = turtles[t][turtles[t].length - 1] - grid[lastTurtle[0]][lastTurtle[1]] = undefined - - fixedTurtlePath = [] - for ( var s = 1; s < turtlePath.length; s++ ) { - - if ( turtlePath[s - 1][0] == turtlePath[s][0] && turtlePath[s - 1][1] == turtlePath[s][1] ) - continue; - - var up = turtlePath[s][1] - turtlePath[s - 1][1] - var right = turtlePath[s][0] - turtlePath[s - 1][0] - var addPoint = ( up != 0 && right != 0 ) - - if ( addPoint && up != right ) { - fixedTurtlePath.push( [ turtlePath[s][0], turtlePath[s - 1][1] ] ) - } else if ( addPoint ) { - fixedTurtlePath.push( [ turtlePath[s - 1][0], turtlePath[s][1] ] ) - } - - fixedTurtlePath.push( turtlePath[s] ) - - } - - // printjson( fixedTurtlePath ) - - turtlePaths.push( fixedTurtlePath ) - - } - - // Uncomment to print polygon shape - // print( grid.toString() ) - - var polygon = [] - for ( var t = 0; t < turtlePaths.length; t++ ) { - for ( var s = 0; s < turtlePaths[t].length; s++ ) { - polygon.push( rotatePoint( turtlePaths[t][s] ) ) - } - } - - // Uncomment to print out polygon - // printjson( polygon ) - - t = db.polytest2 - t.drop() - - // Test single and multi-location documents - var pointsIn = 0 - var pointsOut = 0 - var allPointsIn = [] - var allPointsOut = [] - - for ( var j = grid[0].length - 1; j >= 0; j-- ) { - for ( var i = 0; i < grid.length; i++ ) { - - var point = rotatePoint( [ i + 0.5, j + 0.5 ] ) - - t.insert( { loc : point } ) - if ( grid[i][j] != undefined ){ - allPointsIn.push( point ) - pointsIn++ - } - else{ - allPointsOut.push( point ) - pointsOut++ - } - } - } - - t.ensureIndex( { loc : "2d" }, { bits : 1 + bits, max : bounds[1], min : bounds[0] } ) - assert.isnull( db.getLastError() ) - - t.insert( { loc : allPointsIn } ) - t.insert( { loc : allPointsOut } ) - allPoints = allPointsIn.concat( allPointsOut ) - t.insert( { loc : allPoints } ) - - print( "Points : " ) - printjson( { pointsIn : pointsIn, pointsOut : pointsOut } ) - //print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() ) - - assert.eq( gridSize[0] * gridSize[1] + 3, t.find().count() ) - assert.eq( 2 + pointsIn, t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() ); - -} diff --git a/jstests/geo_polygon3.js b/jstests/geo_polygon3.js deleted file mode 100644 index b144bfbc589..00000000000 --- a/jstests/geo_polygon3.js +++ /dev/null @@ -1,54 +0,0 @@ -// -// Tests for polygon querying with varying levels of accuracy -// - -var numTests = 31; - -for( var n = 0; n < numTests; n++ ){ - - t = db.geo_polygon3; - t.drop(); - - num = 0; - for ( x=1; x < 9; x++ ){ - for ( y= 1; y < 9; y++ ){ - o = { _id : num++ , loc : [ x , y ] }; - t.save( o ); - } - } - - t.ensureIndex( { loc : "2d" }, { bits : 2 + n } ); - - triangle = [[0,0], [1,1], [0,2]]; - - // Look at only a small slice of the data within a triangle - assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).itcount() , "Triangle Test" ); - - - boxBounds = [ [0,0], [0,10], [10,10], [10,0] ]; - - assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Bounding Box Test" ); - - // Look in a box much bigger than the one we have data in - boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]]; - assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Big Bounding Box Test" ); - - t.drop(); - - pacman = [ - [0,2], [0,4], [2,6], [4,6], // Head - [6,4], [4,3], [6,2], // Mouth - [4,0], [2,0] // Bottom - ]; - - t.save({loc: [1,3] }); // Add a point that's in - t.ensureIndex( { loc : "2d" }, { bits : 2 + t } ); - - assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman single point" ); - - t.save({ loc : [5, 3] }) // Add a point that's out right in the mouth opening - t.save({ loc : [3, 7] }) // Add a point above the center of the head - t.save({ loc : [3,-1] }) // Add a point below the center of the bottom - - assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman double point" ); -} diff --git a/jstests/geo_queryoptimizer.js b/jstests/geo_queryoptimizer.js deleted file mode 100644 index 7a438bce8fb..00000000000 --- a/jstests/geo_queryoptimizer.js +++ /dev/null @@ -1,27 +0,0 @@ - -t = db.geo_qo1; -t.drop() - -t.ensureIndex({loc:"2d"}) - -t.insert({'issue':0}) -t.insert({'issue':1}) -t.insert({'issue':2}) -t.insert({'issue':2, 'loc':[30.12,-118]}) -t.insert({'issue':1, 'loc':[30.12,-118]}) -t.insert({'issue':0, 'loc':[30.12,-118]}) - -assert.eq( 6 , t.find().itcount() , "A1" ) - -assert.eq( 2 , t.find({'issue':0}).itcount() , "A2" ) - -assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "A3" ) - -assert.eq( 2 , t.find({'issue':0}).itcount() , "B1" ) - -assert.eq( 6 , t.find().itcount() , "B2" ) - -assert.eq( 2 , t.find({'issue':0}).itcount() , "B3" ) - -assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "B4" ) - diff --git a/jstests/geo_regex0.js b/jstests/geo_regex0.js deleted file mode 100644 index 79042b9074e..00000000000 --- a/jstests/geo_regex0.js +++ /dev/null @@ -1,18 +0,0 @@ -// From SERVER-2247 -// Tests to make sure regex works with geo indices - -t = db.regex0 -t.drop() - -t.ensureIndex( { point : '2d', words : 1 } ) -t.insert( { point : [ 1, 1 ], words : [ 'foo', 'bar' ] } ) - -regex = { words : /^f/ } -geo = { point : { $near : [ 1, 1 ] } } -both = { point : { $near : [ 1, 1 ] }, words : /^f/ } - -assert.eq(1, t.find( regex ).count() ) -assert.eq(1, t.find( geo ).count() ) -assert.eq(1, t.find( both ).count() ) - - diff --git a/jstests/geo_s2cursorlimitskip.js b/jstests/geo_s2cursorlimitskip.js deleted file mode 100644 index a4eaf74afbc..00000000000 --- a/jstests/geo_s2cursorlimitskip.js +++ /dev/null @@ -1,69 +0,0 @@ -// Test various cursor behaviors -var t = db.geo_s2getmmm -t.drop(); -t.ensureIndex({geo: "2dsphere"}); - -Random.setRandomSeed(); -var random = Random.rand; - -/* - * To test that getmore is working within 2dsphere index. - * We insert a bunch of points, get a cursor, and fetch some - * of the points. Then we insert a bunch more points, and - * finally fetch a bunch more. - * If the final fetches work successfully, then getmore should - * be working - */ -function sign() { return random() > 0.5 ? 1 : -1; } -function insertRandomPoints(num, minDist, maxDist){ - for(var i = 0; i < num; i++){ - var lat = sign() * (minDist + random() * (maxDist - minDist)); - var lng = sign() * (minDist + random() * (maxDist - minDist)); - var point = { geo: { type: "Point", coordinates: [lng, lat] } }; - t.insert(point); - assert(!db.getLastError()); - } -} - -var initialPointCount = 200 -var smallBit = 10 -var secondPointCount = 100 - -// Insert points between 0.01 and 1.0 away. -insertRandomPoints(initialPointCount, 0.01, 1.0); - -var cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).batchSize(4); -assert.eq(cursor.count(), initialPointCount); - -for(var j = 0; j < smallBit; j++){ - assert(cursor.hasNext()); - cursor.next(); -} -// We looked at (initialPointCount - smallBit) points, should be more. -assert(cursor.hasNext()) - -// Insert points outside of the shell we've tested thus far -insertRandomPoints(secondPointCount, 2.01, 3.0); -assert.eq(cursor.count(), initialPointCount + secondPointCount) - -for(var k = 0; k < initialPointCount + secondPointCount - smallBit; k++){ - assert(cursor.hasNext()) - var tmpPoint = cursor.next(); -} -// Shouldn't be any more points to look at now. -assert(!cursor.hasNext()) - -var someLimit = 23; -// Make sure limit does something. -cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit) -// Count doesn't work here -- ignores limit/skip, so we use itcount. -assert.eq(cursor.itcount(), someLimit) -// Make sure skip works by skipping some stuff ourselves. -var someSkip = 3; -cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit + someSkip) -for (var i = 0; i < someSkip; ++i) { cursor.next(); } -var cursor2 = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).skip(someSkip).limit(someLimit) -while (cursor.hasNext()) { - assert(cursor2.hasNext()); - assert.eq(cursor.next(), cursor2.next()); -} diff --git a/jstests/geo_s2dedupnear.js b/jstests/geo_s2dedupnear.js deleted file mode 100644 index ac31e082891..00000000000 --- a/jstests/geo_s2dedupnear.js +++ /dev/null @@ -1,11 +0,0 @@ -// Make sure that we don't return several of the same result due to faulty -// assumptions about the btree cursor. That is, don't return duplicate results. -t = db.geo_s2dedupnear -t.drop() - -t.ensureIndex( { geo : "2dsphere" } ) -var x = { "type" : "Polygon", - "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]} -t.insert({geo: x}) -res = t.find({geo: {$geoNear: {"type" : "Point", "coordinates" : [31, 41]}}}) -assert.eq(res.itcount(), 1) diff --git a/jstests/geo_s2descindex.js b/jstests/geo_s2descindex.js deleted file mode 100644 index 39d153a6e55..00000000000 --- a/jstests/geo_s2descindex.js +++ /dev/null @@ -1,64 +0,0 @@ -// -// Tests 2dsphere with descending fields, ensures correct lookup -// - -var coll = db.getCollection("twodspheredesc"); - -var descriptors = [["field1", -1], ["field2", -1], ["coordinates", "2dsphere"]] -var docA = {field1 : "a", field2 : 1, coordinates : [-118.2400013, 34.073893]} -var docB = {field1 : "b", field2 : 1, coordinates : [-118.2400012, 34.073894]} - -// Try both regular and near index cursors -var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893], - 0.44915760491198753]}}}; -var queryNear = {coordinates : {$geoNear : {"type" : "Point", "coordinates" : [0, 0]}}}; - -// -// The idea here is we try "2dsphere" indexes in combination with descending -// other fields in various -// positions and ensure that we return correct results. -// - -for ( var t = 0; t < descriptors.length; t++) { - - var descriptor = {}; - for ( var i = 0; i < descriptors.length; i++) { - descriptor[descriptors[i][0]] = descriptors[i][1]; - } - - jsTest.log("Trying 2dsphere index with descriptor " + tojson(descriptor)); - - coll.drop(); - coll.ensureIndex(descriptor); - - coll.insert(docA); - coll.insert(docB); - - assert.eq(1, coll.count(Object.merge(query, {field1 : "a"}))); - assert.eq(1, coll.count(Object.merge(query, {field1 : "b"}))); - assert.eq(2, coll.count(Object.merge(query, {field2 : 1}))); - assert.eq(0, coll.count(Object.merge(query, {field2 : 0}))); - - var firstEls = descriptors.splice(1); - descriptors = firstEls.concat(descriptors); -} - -// -// Data taken from previously-hanging result -// - -jsTest.log("Trying case found in wild..."); - -coll.drop(); -coll.ensureIndex({coordinates : "2dsphere", field : -1}); -coll.insert({coordinates : [-118.240013, 34.073893]}); -var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893], - 0.44915760491198753]}}, - field : 1}; - -assert.eq(null, coll.findOne(query)); -coll.remove({}) -coll.insert({coordinates : [-118.240013, 34.073893], field : 1}); -assert.neq(null, coll.findOne(query)); - -jsTest.log("Success!"); diff --git a/jstests/geo_s2disjoint_holes.js b/jstests/geo_s2disjoint_holes.js deleted file mode 100644 index cd8f3f4d58f..00000000000 --- a/jstests/geo_s2disjoint_holes.js +++ /dev/null @@ -1,94 +0,0 @@ -// -// We should prohibit polygons with holes not bounded by their exterior shells. -// -// From spec: -// -// "For Polygons with multiple rings, the first must be the exterior ring and -// any others must be interior rings or holes." -// http://geojson.org/geojson-spec.html#polygon -// - -var t = db.geo_s2disjoint_holes, - coordinates = [ - // One square. - [[9, 9], [9, 11], [11, 11], [11, 9], [9, 9]], - // Another disjoint square. - [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]] - ], - poly = { - type: 'Polygon', - coordinates: coordinates - }, - multiPoly = { - type: 'MultiPolygon', - // Multi-polygon's coordinates are wrapped in one more array. - coordinates: [coordinates] - }; - -t.drop(); - -jsTest.log("We're going to print some error messages, don't be alarmed."); - -// -// Can't query with a polygon or multi-polygon that has a non-contained hole. -// -print(assert.throws( - function() { - t.findOne({geo: {$geoWithin: {$geometry: poly}}}); - }, - [], - "parsing a polygon with non-overlapping holes.")); - -print(assert.throws( - function() { - t.findOne({geo: {$geoWithin: {$geometry: multiPoly}}}); - }, - [], - "parsing a multi-polygon with non-overlapping holes.")); - -// -// Can't insert a bad polygon or a bad multi-polygon with a 2dsphere index. -// -t.createIndex({p: '2dsphere'}); -t.insert({p: poly}); -var error = t.getDB().getLastError(); -printjson(error); -assert(error); - -t.insert({p: multiPoly}); -error = t.getDB().getLastError(); -printjson(error); -assert(error); - -// -// Can't create a 2dsphere index when the collection contains a bad polygon or -// bad multi-polygon. -// -t.drop(); -t.insert({p: poly}); -t.createIndex({p: '2dsphere'}); -error = t.getDB().getLastError(); -printjson(error); -assert(error); -assert.eq(1, t.getIndexes().length); - -t.drop(); -t.insert({p: multiPoly}); -t.createIndex({p: '2dsphere'}); -error = t.getDB().getLastError(); -printjson(error); -assert(error); -assert.eq(1, t.getIndexes().length); - -// -// But with no index we can insert bad polygons and bad multi-polygons. -// -t.drop(); -t.insert({p: poly}); -assert.eq(null, t.getDB().getLastError()); -t.insert({p: multiPoly}); -assert.eq(null, t.getDB().getLastError()); - -t.drop(); - -jsTest.log("Success.") diff --git a/jstests/geo_s2dupe_points.js b/jstests/geo_s2dupe_points.js deleted file mode 100644 index 74eb09fa63a..00000000000 --- a/jstests/geo_s2dupe_points.js +++ /dev/null @@ -1,72 +0,0 @@ -// See: SERVER-9240, SERVER-9401. -// s2 rejects shapes with duplicate adjacent points as invalid, but they are -// valid in GeoJSON. We store the duplicates, but internally remove them -// before indexing or querying. -t = db.geo_s2dupe_points -t.drop() -t.ensureIndex({geo: "2dsphere"}) - -function testDuplicates(shapeName, shapeWithDupes, shapeWithoutDupes) { - // insert a doc with dupes - t.insert(shapeWithDupes) - assert(!db.getLastError(), db.getLastError()); - - // duplicates are preserved when the document is fetched by _id - assert.eq(shapeWithDupes, t.findOne({_id: shapeName})); - assert.neq(shapeWithoutDupes, t.findOne({_id: shapeName}).geo); - - // can query with $geoIntersects inserted doc using both the duplicated and de-duplicated docs - assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithDupes.geo } } } ).itcount(), 1); - assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithoutDupes } } } ).itcount(), 1); - - // direct document equality in queries is preserved - assert.eq(t.find({ geo: shapeWithoutDupes} ).itcount(), 0); - assert.eq(t.find({ geo: shapeWithDupes.geo } ).itcount(), 1); -} - -// LineString -var lineWithDupes = { _id: "line", geo: { type: "LineString", - coordinates: [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6] ] - } -}; -var lineWithoutDupes = { type: "LineString", coordinates: [ [40,5], [41,6] ] }; - -// Polygon -var polygonWithDupes = { _id: "poly", geo: { type: "Polygon", - coordinates: [ - [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ], - [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0], [-2.0, -2.0] ] - ] } -}; -var polygonWithoutDupes = { type: "Polygon", - coordinates: [ - [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ], - [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0] ] - ] -}; - -// MultiPolygon -var multiPolygonWithDupes = { _id: "multi", geo: { type: "MultiPolygon", coordinates: [ - [ - [ [102.0, 2.0], [103.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ] - ], - [ - [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], - [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.8, 0.8], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] - ] - ] -} }; -var multiPolygonWithoutDupes = { type: "MultiPolygon", coordinates: [ - [ - [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ] - ], - [ - [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], - [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] - ] - ] -}; - -testDuplicates("line", lineWithDupes, lineWithoutDupes); -testDuplicates("poly", polygonWithDupes, polygonWithoutDupes); -testDuplicates("multi", multiPolygonWithDupes, multiPolygonWithoutDupes); diff --git a/jstests/geo_s2edgecases.js b/jstests/geo_s2edgecases.js deleted file mode 100755 index bf46baba744..00000000000 --- a/jstests/geo_s2edgecases.js +++ /dev/null @@ -1,40 +0,0 @@ -t = db.geo_s2edgecases -t.drop() - -roundworldpoint = { "type" : "Point", "coordinates": [ 180, 0 ] } - -// Opposite the equator -roundworld = { "type" : "Polygon", - "coordinates" : [ [ [179,1], [-179,1], [-179,-1], [179,-1], [179,1]]]} -t.insert({geo : roundworld}) - -roundworld2 = { "type" : "Polygon", - "coordinates" : [ [ [179,1], [179,-1], [-179,-1], [-179,1], [179,1]]]} -t.insert({geo : roundworld2}) - -// North pole -santapoint = { "type" : "Point", "coordinates": [ 180, 90 ] } -santa = { "type" : "Polygon", - "coordinates" : [ [ [179,89], [179,90], [-179,90], [-179,89], [179,89]]]} -t.insert({geo : santa}) -santa2 = { "type" : "Polygon", - "coordinates" : [ [ [179,89], [-179,89], [-179,90], [179,90], [179,89]]]} -t.insert({geo : santa2}) - -// South pole -penguinpoint = { "type" : "Point", "coordinates": [ 0, -90 ] } -penguin1 = { "type" : "Polygon", - "coordinates" : [ [ [0,-89], [0,-90], [179,-90], [179,-89], [0,-89]]]} -t.insert({geo : penguin1}) -penguin2 = { "type" : "Polygon", - "coordinates" : [ [ [0,-89], [179,-89], [179,-90], [0,-90], [0,-89]]]} -t.insert({geo : penguin2}) - -t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } ) - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : roundworldpoint} } }); -assert.eq(res.count(), 2); -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : santapoint} } }); -assert.eq(res.count(), 2); -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : penguinpoint} } }); -assert.eq(res.count(), 2); diff --git a/jstests/geo_s2exact.js b/jstests/geo_s2exact.js deleted file mode 100644 index a7cf9627765..00000000000 --- a/jstests/geo_s2exact.js +++ /dev/null @@ -1,21 +0,0 @@ -// Queries on exact geometry should return the exact geometry. -t = db.geo_s2exact -t.drop() - -function test(geometry) { - t.insert({geo: geometry}) - assert.eq(1, t.find({geo: geometry}).itcount(), geometry) - t.ensureIndex({geo: "2dsphere"}) - assert.eq(1, t.find({geo: geometry}).itcount(), geometry) - t.dropIndex({geo: "2dsphere"}) -} - -pointA = { "type" : "Point", "coordinates": [ 40, 5 ] } -test(pointA) - -someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]} -test(someline) - -somepoly = { "type" : "Polygon", - "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} -test(somepoly) diff --git a/jstests/geo_s2holesameasshell.js b/jstests/geo_s2holesameasshell.js deleted file mode 100644 index c3a127305ff..00000000000 --- a/jstests/geo_s2holesameasshell.js +++ /dev/null @@ -1,46 +0,0 @@ -// If polygons have holes, the holes cannot be equal to the entire geometry. -var t = db.geo_s2holessameasshell -t.drop(); -t.ensureIndex({geo: "2dsphere"}); - -var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]}; -var edgePoint = {"type": "Point", "coordinates": [0, 0.5]}; -var cornerPoint = {"type": "Point", "coordinates": [0, 0]}; - -// Various "edge" cases. None of them should be returned by the non-polygon -// polygon below. -t.insert({geo : centerPoint}); -t.insert({geo : edgePoint}); -t.insert({geo : cornerPoint}); - -// This generates an empty covering. -var polygonWithFullHole = { "type" : "Polygon", "coordinates": [ - [[0,0], [0,1], [1, 1], [1, 0], [0, 0]], - [[0,0], [0,1], [1, 1], [1, 0], [0, 0]] - ] -}; - -// No keys for insert should error. -t.insert({geo: polygonWithFullHole}) -assert(db.getLastError()) - -// No covering to search over should give an empty result set. -assert.throws(function() { - return t.find({geo: {$geoWithin: {$geometry: polygonWithFullHole}}}).count()}) - -// Similar polygon to the one above, but is covered by two holes instead of -// one. -var polygonWithTwoHolesCoveringWholeArea = {"type" : "Polygon", "coordinates": [ - [[0,0], [0,1], [1, 1], [1, 0], [0, 0]], - [[0,0], [0,0.5], [1, 0.5], [1, 0], [0, 0]], - [[0,0.5], [0,1], [1, 1], [1, 0.5], [0, 0.5]] - ] -}; - -// No keys for insert should error. -t.insert({geo: polygonWithTwoHolesCoveringWholeArea}); -assert(db.getLastError()); - -// No covering to search over should give an empty result set. -assert.throws(function() { - return t.find({geo: {$geoWithin: {$geometry: polygonWithTwoHolesCoveringWholeArea}}}).count()}) diff --git a/jstests/geo_s2index.js b/jstests/geo_s2index.js deleted file mode 100755 index cabcea72d19..00000000000 --- a/jstests/geo_s2index.js +++ /dev/null @@ -1,107 +0,0 @@ -t = db.geo_s2index -t.drop() - -// We internally drop adjacent duplicate points in lines. -someline = { "type" : "LineString", "coordinates": [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6]]} -t.insert( {geo : someline , nonGeo: "someline"}) -t.ensureIndex({geo: "2dsphere"}) -foo = t.find({geo: {$geoIntersects: {$geometry: {type: "Point", coordinates: [40,5]}}}}).next(); -assert.eq(foo.geo, someline); -t.dropIndex({geo: "2dsphere"}) - -pointA = { "type" : "Point", "coordinates": [ 40, 5 ] } -t.insert( {geo : pointA , nonGeo: "pointA"}) - -pointD = { "type" : "Point", "coordinates": [ 41.001, 6.001 ] } -t.insert( {geo : pointD , nonGeo: "pointD"}) - -pointB = { "type" : "Point", "coordinates": [ 41, 6 ] } -t.insert( {geo : pointB , nonGeo: "pointB"}) - -pointC = { "type" : "Point", "coordinates": [ 41, 6 ] } -t.insert( {geo : pointC} ) - -// Add a point within the polygon but not on the border. Don't want to be on -// the path of the polyline. -pointE = { "type" : "Point", "coordinates": [ 40.6, 5.4 ] } -t.insert( {geo : pointE} ) - -// Make sure we can index this without error. -t.insert({nonGeo: "noGeoField!"}) - -somepoly = { "type" : "Polygon", - "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} -t.insert( {geo : somepoly, nonGeo: "somepoly" }) - -t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } ) -// We have a point without any geo data. Don't error. -assert(!db.getLastError()) - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointA} } }); -assert.eq(res.itcount(), 3); - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointB} } }); -assert.eq(res.itcount(), 4); - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointD} } }); -assert.eq(res.itcount(), 1); - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : someline} } }) -assert.eq(res.itcount(), 5); - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 6); - -res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 6); - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }).limit(1) -assert.eq(res.itcount(), 1); - -res = t.find({ "nonGeo": "pointA", - "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 1); - -// Don't crash mongod if we give it bad input. -t.drop() -t.ensureIndex({loc: "2dsphere", x:1}) -t.save({loc: [0,0]}) -assert.throws(function() { return t.count({loc: {$foo:[0,0]}}) }) -assert.throws(function() { return t.find({ "nonGeo": "pointA", - "geo" : { "$geoIntersects" : { "$geometry" : somepoly}, - "$near": {"$geometry" : somepoly }}}).count()}) - -// If we specify a datum, it has to be valid (WGS84). -t.drop() -t.ensureIndex({loc: "2dsphere"}) -t.insert({loc: {type:'Point', coordinates: [40, 5], crs:{ type: 'name', properties:{name:'EPSG:2000'}}}}) -assert(db.getLastError()); -assert.eq(0, t.find().itcount()) -t.insert({loc: {type:'Point', coordinates: [40, 5]}}) -assert(!db.getLastError()); -t.insert({loc: {type:'Point', coordinates: [40, 5], crs:{ type: 'name', properties:{name:'EPSG:4326'}}}}) -assert(!db.getLastError()); -t.insert({loc: {type:'Point', coordinates: [40, 5], crs:{ type: 'name', properties:{name:'urn:ogc:def:crs:OGC:1.3:CRS84'}}}}) -assert(!db.getLastError()); - -// We can pass level parameters and we verify that they're valid. -// 0 <= coarsestIndexedLevel <= finestIndexedLevel <= 30. -t.drop(); -t.save({loc: [0,0]}) -t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 17, coarsestIndexedLevel: 5}) -assert(!db.getLastError()); - -t.drop(); -t.save({loc: [0,0]}) -t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 31, coarsestIndexedLevel: 5}) -assert(db.getLastError()); - -t.drop(); -t.save({loc: [0,0]}) -t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 30, coarsestIndexedLevel: 0}) -assert(!db.getLastError()); - -t.drop(); -t.save({loc: [0,0]}) -t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 30, coarsestIndexedLevel: -1}) -assert(db.getLastError()); diff --git a/jstests/geo_s2indexoldformat.js b/jstests/geo_s2indexoldformat.js deleted file mode 100755 index 6af593a817c..00000000000 --- a/jstests/geo_s2indexoldformat.js +++ /dev/null @@ -1,28 +0,0 @@ -// Make sure that the 2dsphere index can deal with non-GeoJSON points. -// 2dsphere does not accept legacy shapes, only legacy points. -t = db.geo_s2indexoldformat -t.drop() - -t.insert( {geo : [40, 5], nonGeo: ["pointA"]}) -t.insert( {geo : [41.001, 6.001], nonGeo: ["pointD"]}) -t.insert( {geo : [41, 6], nonGeo: ["pointB"]}) -t.insert( {geo : [41, 6]} ) -t.insert( {geo : {x:40.6, y:5.4}} ) - -t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } ) - -res = t.find({ "geo" : { "$geoIntersects" : { "$geometry": {x:40, y:5}}}}) -assert.eq(res.count(), 1); - -res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [41,6]}}}) -assert.eq(res.count(), 2); - -// We don't support legacy polygons in 2dsphere. -t.insert( {geo : [[40,5],[40,6],[41,6],[41,5]], nonGeo: ["somepoly"] }) -assert(db.getLastError()); - -t.insert( {geo : {a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}}) -assert(db.getLastError()); - -res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [[40,5],[40,6],[41,6],[41,5]]}}}) -assert(db.getLastError()); diff --git a/jstests/geo_s2indexversion1.js b/jstests/geo_s2indexversion1.js deleted file mode 100644 index 8524faeddbd..00000000000 --- a/jstests/geo_s2indexversion1.js +++ /dev/null @@ -1,150 +0,0 @@ -// Tests 2dsphere index option "2dsphereIndexVersion". Verifies that GeoJSON objects that are new -// in version 2 are not allowed in version 1. - -var coll = db.getCollection("geo_s2indexversion1"); -coll.drop(); - -// -// Index build should fail for invalid values of "2dsphereIndexVersion". -// - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": -1}); -assert.gleError(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 0}); -assert.gleError(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 3}); -assert.gleError(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": Infinity}); -assert.gleError(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": "foo"}); -assert.gleError(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": {a: 1}}); -assert.gleError(db); -coll.drop(); - -// -// Index build should succeed for valid values of "2dsphereIndexVersion". -// - -coll.ensureIndex({geo: "2dsphere"}); -assert.gleSuccess(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1}); -assert.gleSuccess(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(1)}); -assert.gleSuccess(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(1)}); -assert.gleSuccess(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2}); -assert.gleSuccess(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberInt(2)}); -assert.gleSuccess(db); -coll.drop(); - -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": NumberLong(2)}); -assert.gleSuccess(db); -coll.drop(); - -// -// {2dsphereIndexVersion: 2} should be the default for new indexes. -// - -coll.ensureIndex({geo: "2dsphere"}); -assert.gleSuccess(db); -var specObj = coll.getDB().system.indexes.findOne({ns: coll.getFullName(), name: "geo_2dsphere"}); -assert.eq(2, specObj["2dsphereIndexVersion"]); -coll.drop(); - -// -// Test compatibility of various GeoJSON objects with both 2dsphere index versions. -// - -var pointDoc = {geo: {type: "Point", coordinates: [40, 5]}}; -var lineStringDoc = {geo: {type: "LineString", coordinates: [[40, 5], [41, 6]]}}; -var polygonDoc = {geo: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 1], [0, 0]]]}}; -var multiPointDoc = {geo: {type: "MultiPoint", - coordinates: [[-73.9580, 40.8003], [-73.9498, 40.7968], - [-73.9737, 40.7648], [-73.9814, 40.7681]]}}; -var multiLineStringDoc = {geo: {type: "MultiLineString", - coordinates: [[[-73.96943, 40.78519], [-73.96082, 40.78095]], - [[-73.96415, 40.79229], [-73.95544, 40.78854]], - [[-73.97162, 40.78205], [-73.96374, 40.77715]], - [[-73.97880, 40.77247], [-73.97036, 40.76811]]]}}; -var multiPolygonDoc = {geo: {type: "MultiPolygon", - coordinates: [[[[-73.958, 40.8003], [-73.9498, 40.7968], - [-73.9737, 40.7648], [-73.9814, 40.7681], - [-73.958, 40.8003]]], - [[[-73.958, 40.8003], [-73.9498, 40.7968], - [-73.9737, 40.7648], [-73.958, 40.8003]]]]}}; -var geometryCollectionDoc = {geo: {type: "GeometryCollection", - geometries: [{type: "MultiPoint", - coordinates: [[-73.9580, 40.8003], - [-73.9498, 40.7968], - [-73.9737, 40.7648], - [-73.9814, 40.7681]]}, - {type: "MultiLineString", - coordinates: [[[-73.96943, 40.78519], - [-73.96082, 40.78095]], - [[-73.96415, 40.79229], - [-73.95544, 40.78854]], - [[-73.97162, 40.78205], - [-73.96374, 40.77715]], - [[-73.97880, 40.77247], - [-73.97036, 40.76811]]]}]}}; - -// {2dsphereIndexVersion: 2} indexes allow all supported GeoJSON objects. -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2}); -assert.gleSuccess(db); -coll.insert(pointDoc); -assert.gleSuccess(db); -coll.insert(lineStringDoc); -assert.gleSuccess(db); -coll.insert(polygonDoc); -assert.gleSuccess(db); -coll.insert(multiPointDoc); -assert.gleSuccess(db); -coll.insert(multiLineStringDoc); -assert.gleSuccess(db); -coll.insert(multiPolygonDoc); -assert.gleSuccess(db); -coll.insert(geometryCollectionDoc); -assert.gleSuccess(db); -coll.drop(); - -// {2dsphereIndexVersion: 1} indexes allow only Point, LineString, and Polygon. -coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 1}); -assert.gleSuccess(db); -coll.insert(pointDoc); -assert.gleSuccess(db); -coll.insert(lineStringDoc); -assert.gleSuccess(db); -coll.insert(polygonDoc); -assert.gleSuccess(db); -coll.insert(multiPointDoc); -assert.gleError(db); -coll.insert(multiLineStringDoc); -assert.gleError(db); -coll.insert(multiPolygonDoc); -assert.gleError(db); -coll.insert(geometryCollectionDoc); -assert.gleError(db); -coll.drop(); diff --git a/jstests/geo_s2intersection.js b/jstests/geo_s2intersection.js deleted file mode 100644 index 42abacca98d..00000000000 --- a/jstests/geo_s2intersection.js +++ /dev/null @@ -1,141 +0,0 @@ -var t = db.geo_s2intersectinglines -t.drop() -t.ensureIndex( { geo : "2dsphere" } ); - -/* All the tests in this file are generally confirming intersections based upon - * these three geo objects. - */ -var canonLine = { - name: 'canonLine', - geo: { - type: "LineString", - coordinates: [[0.0, 0.0], [1.0, 0.0]] - } -}; - -var canonPoint = { - name: 'canonPoint', - geo: { - type: "Point", - coordinates: [10.0, 10.0] - } -}; - -var canonPoly = { - name: 'canonPoly', - geo: { - type: "Polygon", - coordinates: [ - [[50.0, 50.0], [51.0, 50.0], [51.0, 51.0], [50.0, 51.0], [50.0, 50.0]] - ] - } -}; - -t.insert(canonLine); -t.insert(canonPoint); -t.insert(canonPoly); - - -//Case 1: Basic sanity intersection. -var testLine = {type: "LineString", - coordinates: [[0.5, 0.5], [0.5, -0.5]]}; - -var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonLine'); - - -//Case 2: Basic Polygon intersection. -// we expect that the canonLine should intersect with this polygon. -var testPoly = {type: "Polygon", - coordinates: [ - [[0.4, -0.1],[0.4, 0.1], [0.6, 0.1], [0.6, -0.1], [0.4, -0.1]] - ]} - -result = t.find({geo: {$geoIntersects: {$geometry: testPoly}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonLine'); - - -//Case 3: Intersects the vertex of a line. -// When a line intersects the vertex of a line, we expect this to -// count as a geoIntersection. -testLine = {type: "LineString", - coordinates: [[0.0, 0.5], [0.0, -0.5]]}; - -result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonLine'); - -// Case 4: Sanity no intersection. -// This line just misses the canonLine in the negative direction. This -// should not count as a geoIntersection. -testLine = {type: "LineString", - coordinates: [[-0.1, 0.5], [-0.1, -0.5]]}; - -result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 0); - - -// Case 5: Overlapping line - only partially overlaps. -// Undefined behaviour: does intersect -testLine = {type: "LineString", - coordinates: [[-0.5, 0.0], [0.5, 0.0]]}; - -var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonLine'); - - -// Case 6: Contained line - this line is fully contained by the canonLine -// Undefined behaviour: doesn't intersect. -testLine = {type: "LineString", - coordinates: [[0.1, 0.0], [0.9, 0.0]]}; - -result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 0); - -// Case 7: Identical line in the identical position. -// Undefined behaviour: does intersect. -testLine = {type: "LineString", - coordinates: [[0.0, 0.0], [1.0, 0.0]]}; - -result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonLine'); - -// Case 8: Point intersection - we search with a line that intersects -// with the canonPoint. -testLine = {type: "LineString", - coordinates: [[10.0, 11.0], [10.0, 9.0]]}; - -result = t.find({geo: {$geoIntersects: {$geometry: testLine}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonPoint'); - -// Case 9: Point point intersection -// as above but with an identical point to the canonPoint. We expect an -// intersection here. -testPoint = {type: "Point", - coordinates: [10.0, 10.0]} - -result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonPoint'); - - -//Case 10: Sanity point non-intersection. -var testPoint = {type: "Point", - coordinates: [12.0, 12.0]} - -result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}}); -assert.eq(result.count(), 0); - -// Case 11: Point polygon intersection -// verify that a point inside a polygon $geoIntersects. -testPoint = {type: "Point", - coordinates: [50.5, 50.5]} - -result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}}); -assert.eq(result.count(), 1); -assert.eq(result[0]['name'], 'canonPoly'); diff --git a/jstests/geo_s2largewithin.js b/jstests/geo_s2largewithin.js deleted file mode 100644 index 2327f1fb02d..00000000000 --- a/jstests/geo_s2largewithin.js +++ /dev/null @@ -1,45 +0,0 @@ -// If our $within is enormous, create a coarse covering for the search so it -// doesn't take forever. -t = db.geo_s2largewithin -t.drop() -t.ensureIndex( { geo : "2dsphere" } ); - -testPoint = { - name: "origin", - geo: { - type: "Point", - coordinates: [0.0, 0.0] - } -}; - -testHorizLine = { - name: "horiz", - geo: { - type: "LineString", - coordinates: [[-2.0, 10.0], [2.0, 10.0]] - } -}; - -testVertLine = { - name: "vert", - geo: { - type: "LineString", - coordinates: [[10.0, -2.0], [10.0, 2.0]] - } -}; - -t.insert(testPoint); -t.insert(testHorizLine); -t.insert(testVertLine); - -//Test a poly that runs horizontally along the equator. - -longPoly = {type: "Polygon", - coordinates: [ - [[30.0, 1.0], [-30.0, 1.0], [-30.0, -1.0], [30.0, -1.0], [30.0, 1.0]] - ]}; - -result = t.find({geo: {$geoWithin: {$geometry: longPoly}}}); -assert.eq(result.itcount(), 1); -result = t.find({geo: {$geoWithin: {$geometry: longPoly}}}); -assert.eq("origin", result[0].name) diff --git a/jstests/geo_s2meridian.js b/jstests/geo_s2meridian.js deleted file mode 100644 index 0d5b4b20e6d..00000000000 --- a/jstests/geo_s2meridian.js +++ /dev/null @@ -1,109 +0,0 @@ -t = db.geo_s2meridian; -t.drop(); -t.ensureIndex({geo: "2dsphere"}); - -/* - * Test 1: check that intersection works on the meridian. We insert a line - * that crosses the meridian, and then run a geoIntersect with a line - * that runs along the meridian. - */ - -meridianCrossingLine = { - geo: { - type: "LineString", - coordinates: [ - [-178.0, 10.0], - [178.0, 10.0]] - } -}; - -t.insert(meridianCrossingLine); -assert(! db.getLastError()); - -lineAlongMeridian = { - type: "LineString", - coordinates: [ - [180.0, 11.0], - [180.0, 9.0] - ] -} - -result = t.find({geo: {$geoIntersects: {$geometry: lineAlongMeridian}}}); -assert.eq(result.itcount(), 1); - -t.drop(); -t.ensureIndex({geo: "2dsphere"}); -/* - * Test 2: check that within work across the meridian. We insert points - * on the meridian, and immediately on either side, and confirm that a poly - * covering all of them returns them all. - */ -pointOnNegativeSideOfMeridian = { - geo: { - type: "Point", - coordinates: [-179.0, 1.0] - } -}; -pointOnMeridian = { - geo: { - type: "Point", - coordinates: [180.0, 1.0] - } -}; -pointOnPositiveSideOfMeridian = { - geo: { - type: "Point", - coordinates: [179.0, 1.0] - } -}; - -t.insert(pointOnMeridian); -t.insert(pointOnNegativeSideOfMeridian); -t.insert(pointOnPositiveSideOfMeridian); - -meridianCrossingPoly = { - type: "Polygon", - coordinates: [ - [[-178.0, 10.0], [178.0, 10.0], [178.0, -10.0], [-178.0, -10.0], [-178.0, 10.0]] - ] -}; - -result = t.find({geo: {$geoWithin: {$geometry: meridianCrossingPoly}}}); -assert.eq(result.itcount(), 3); - -t.drop(); -t.ensureIndex({geo: "2dsphere"}); -/* - * Test 3: Check that near works around the meridian. Insert two points, one - * closer, but across the meridian, and confirm they both come back, and - * that the order is correct. - */ -pointOnNegativeSideOfMerid = { - name: "closer", - geo: { - type: "Point", - coordinates: [-179.0, 0.0] - } -}; - -pointOnPositiveSideOfMerid = { - name: "farther", - geo: { - type: "Point", - coordinates: [176.0, 0.0] - } -}; - -t.insert(pointOnNegativeSideOfMerid); -t.insert(pointOnPositiveSideOfMerid); - -pointOnPositiveSideOfMeridian = { - type: "Point", - coordinates: [179.0, 0.0] -}; - -result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}}); -assert.eq(result.itcount(), 2); -result = t.find({geo: {$geoNear: pointOnPositiveSideOfMeridian}}); -assert.eq(result[0].name, "closer"); -assert.eq(result[1].name, "farther"); diff --git a/jstests/geo_s2multi.js b/jstests/geo_s2multi.js deleted file mode 100644 index b40eef5543e..00000000000 --- a/jstests/geo_s2multi.js +++ /dev/null @@ -1,50 +0,0 @@ -t = db.geo_s2index -t.drop() - -t.ensureIndex({geo: "2dsphere"}) - -// Let's try the examples in the GeoJSON spec. -multiPointA = { "type": "MultiPoint", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] } -t.insert({geo: multiPointA}); -assert(!db.getLastError()); - -multiLineStringA = { "type": "MultiLineString", "coordinates": [ [ [100.0, 0.0], [101.0, 1.0] ], - [ [102.0, 2.0], [103.0, 3.0] ]]} -t.insert({geo: multiLineStringA}); -assert(!db.getLastError()); - -multiPolygonA = { "type": "MultiPolygon", "coordinates": [ - [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]], - [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], - [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]]} -t.insert({geo: multiPolygonA}) -assert(!db.getLastError()); - -assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: - {"type": "Point", "coordinates": [100,0]}}}}).itcount()); -assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: - {"type": "Point", "coordinates": [101.0,1.0]}}}}).itcount()); - -// Inside the hole in multiPolygonA -assert.eq(0, t.find({geo: {$geoIntersects: {$geometry: - {"type": "Point", "coordinates": [100.21,0.21]}}}}).itcount()); - -// One point inside the hole, one out. -assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: - {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21]]}}}}).itcount()); -assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: - {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21],[101,1]]}}}}).itcount()); -// Polygon contains itself and the multipoint. -assert.eq(2, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount()); - -partialPolygonA = { "type": "Polygon", "coordinates": - [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ] }; -t.insert({geo: partialPolygonA}); -assert(!db.getLastError()); -// Polygon contains itself, the partial poly, and the multipoint -assert.eq(3, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount()); - -assert.eq(1, t.find({geo: {$geoWithin: {$geometry: partialPolygonA}}}).itcount()); - -// Itself, the multi poly, the multipoint... -assert.eq(3, t.find({geo: {$geoIntersects: {$geometry: partialPolygonA}}}).itcount()); diff --git a/jstests/geo_s2near.js b/jstests/geo_s2near.js deleted file mode 100644 index 136e821b4b8..00000000000 --- a/jstests/geo_s2near.js +++ /dev/null @@ -1,84 +0,0 @@ -// Test 2dsphere near search, called via find and geoNear. -t = db.geo_s2near -t.drop(); - -// Make sure that geoNear gives us back loc -goldenPoint = {type: "Point", coordinates: [ 31.0, 41.0]} -t.insert({geo: goldenPoint}) -t.ensureIndex({ geo : "2dsphere" }) -resNear = db.runCommand({geoNear : t.getName(), near: [30, 40], num: 1, spherical: true, includeLocs: true}) -assert.eq(resNear.results[0].loc, goldenPoint) - -// FYI: -// One degree of long @ 0 is 111km or so. -// One degree of lat @ 0 is 110km or so. -lat = 0 -lng = 0 -points = 10 -for (var x = -points; x < points; x += 1) { - for (var y = -points; y < points; y += 1) { - t.insert({geo : { "type" : "Point", "coordinates" : [lng + x/1000.0, lat + y/1000.0]}}) - } -} - -origin = { "type" : "Point", "coordinates": [ lng, lat ] } - -t.ensureIndex({ geo : "2dsphere" }) - -// Near only works when the query is a point. -someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]} -somepoly = { "type" : "Polygon", - "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} -assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : someline } } }).count()}) -assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : somepoly } } }).count()}) -assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: someline, spherical:true }).results.length}) -assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: somepoly, spherical:true }).results.length}) - -// Do some basic near searches. -res = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 2000} } }).limit(10) -resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, maxDistance: Math.PI, spherical: true}) -assert.eq(res.itcount(), resNear.results.length, 10) - -res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10) -resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, spherical: true}) -assert.eq(res.itcount(), resNear.results.length, 10) - -// Find all the points! -res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000) -resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true}) -assert.eq(resNear.results.length, res.itcount(), (2 * points) * (2 * points)) - -// longitude goes -180 to 180 -// latitude goes -90 to 90 -// Let's put in some perverse (polar) data and make sure we get it back. -// Points go long, lat. -t.insert({geo: { "type" : "Point", "coordinates" : [-180, -90]}}) -t.insert({geo: { "type" : "Point", "coordinates" : [180, -90]}}) -t.insert({geo: { "type" : "Point", "coordinates" : [180, 90]}}) -t.insert({geo: { "type" : "Point", "coordinates" : [-180, 90]}}) -res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000) -resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true}) -assert.eq(res.itcount(), resNear.results.length, (2 * points) * (2 * points) + 4) - -function testRadAndDegreesOK(distance) { - // Distance for old style points is radians. - resRadians = t.find({geo: {$nearSphere: [0,0], $maxDistance: (distance / (6378.1 * 1000))}}) - // Distance for new style points is meters. - resMeters = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: distance} } }) - // And we should get the same # of results no matter what. - assert.eq(resRadians.itcount(), resMeters.itcount()) - - // Also, geoNear should behave the same way. - resGNMeters = db.runCommand({geoNear : t.getName(), near: origin, maxDistance: distance, spherical: true}) - resGNRadians = db.runCommand({geoNear : t.getName(), near: [0,0], maxDistance: (distance / (6378.1 * 1000)), spherical: true}) - assert.eq(resGNRadians.results.length, resGNMeters.results.length) - for (var i = 0; i < resGNRadians.length; ++i) { - // Radius of earth * radians = distance in meters. - assert.close(resGNRadians.results[i].dis * 6378.1 * 1000, resGNMeters.results[i].dis) - } -} - -testRadAndDegreesOK(1); -testRadAndDegreesOK(10) -testRadAndDegreesOK(50) -testRadAndDegreesOK(10000) diff --git a/jstests/geo_s2nearComplex.js b/jstests/geo_s2nearComplex.js deleted file mode 100644 index 16a24d6db24..00000000000 --- a/jstests/geo_s2nearComplex.js +++ /dev/null @@ -1,269 +0,0 @@ -var t = db.get_s2nearcomplex -t.drop() -t.ensureIndex({geo: "2dsphere"}) - -/* Short names for math operations */ -Random.setRandomSeed(); -var random = Random.rand; -var PI = Math.PI; -var asin = Math.asin; -var sin = Math.sin; -var cos = Math.cos; -var atan2 = Math.atan2 - - -var originGeo = {type: "Point", coordinates: [20.0, 20.0]}; -// Center point for all tests. -var origin = { - name: "origin", - geo: originGeo -} - - -/* - * Convenience function for checking that coordinates match. threshold let's you - * specify how accurate equals should be. - */ -function coordinateEqual(first, second, threshold){ - threshold = threshold || 0.001 - first = first['geo']['coordinates'] - second = second['geo']['coordinates'] - if(Math.abs(first[0] - second[0]) <= threshold){ - if(Math.abs(first[1] - second[1]) <= threshold){ - return true; - } - } - return false; -} - -/* - * Creates `count` random and uniformly distributed points centered around `origin` - * no points will be closer to origin than minDist, and no points will be further - * than maxDist. Points will be inserted into the global `t` collection, and will - * be returned. - * based on this algorithm: http://williams.best.vwh.net/avform.htm#LL - */ -function uniformPoints(origin, count, minDist, maxDist){ - var i; - var lng = origin['geo']['coordinates'][0]; - var lat = origin['geo']['coordinates'][1]; - var distances = []; - var points = []; - for(i=0; i < count; i++){ - distances.push((random() * (maxDist - minDist)) + minDist); - } - distances.sort(); - while(points.length < count){ - var angle = random() * 2 * PI; - var distance = distances[points.length]; - var pointLat = asin((sin(lat) * cos(distance)) + (cos(lat) * sin(distance) * cos(angle))); - var pointDLng = atan2(sin(angle) * sin(distance) * cos(lat), cos(distance) - sin(lat) * sin(pointLat)); - var pointLng = ((lng - pointDLng + PI) % 2*PI) - PI; - - // Latitude must be [-90, 90] - var newLat = lat + pointLat; - if (newLat > 90) newLat -= 180; - if (newLat < -90) newLat += 180; - - // Longitude must be [-180, 180] - var newLng = lng + pointLng; - if (newLng > 180) newLng -= 360; - if (newLng < -180) newLng += 360; - - var newPoint = { - geo: { - type: "Point", - //coordinates: [lng + pointLng, lat + pointLat] - coordinates: [newLng, newLat] - } - }; - - points.push(newPoint); - } - for(i=0; i < points.length; i++){ - t.insert(points[i]); - assert(!db.getLastError()); - } - return points; -} - -/* - * Creates a random uniform field as above, excepting for `numberOfHoles` gaps that - * have `sizeOfHoles` points missing centered around a random point. - */ -function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, sizeOfHoles){ - var points = uniformPoints(origin, count, minDist, maxDist); - var i; - for(i=0; i smallQuery[1]); - -// Let's just index one field. -var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere"}, - {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}}); -print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]); -// assert(smallQuery[0] > smallQuery[1]); - -// And the other one. -var smallQuery = timeWithoutAndWithAnIndex({from: "2dsphere"}, - {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}}); -print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]); -// assert(smallQuery[0] > smallQuery[1]); diff --git a/jstests/geo_s2validindex.js b/jstests/geo_s2validindex.js deleted file mode 100644 index fee00d8d208..00000000000 --- a/jstests/geo_s2validindex.js +++ /dev/null @@ -1,26 +0,0 @@ -// -// Tests valid cases for creation of 2dsphere index -// - -var coll = db.getCollection("twodspherevalid"); - -// Valid index -coll.drop(); -assert.eq(undefined, coll.ensureIndex({geo : "2dsphere", other : 1})); - -// Valid index -coll.drop(); -assert.eq(undefined, coll.ensureIndex({geo : "2dsphere", other : 1, geo2 : "2dsphere"})); - -// Invalid index, using hash with 2dsphere -coll.drop(); -assert.neq(undefined, coll.ensureIndex({geo : "2dsphere", other : "hash"}).err); - -// Invalid index, using 2d with 2dsphere -coll.drop(); -assert.neq(undefined, coll.ensureIndex({geo : "2dsphere", other : "2d"}).err); - -jsTest.log("Success!"); - -// Ensure the empty collection is gone, so that small_oplog passes. -coll.drop(); diff --git a/jstests/geo_s2within.js b/jstests/geo_s2within.js deleted file mode 100644 index 87fd32a7676..00000000000 --- a/jstests/geo_s2within.js +++ /dev/null @@ -1,36 +0,0 @@ -// Test some cases that might be iffy with $within, mostly related to polygon w/holes. -t = db.geo_s2within -t.drop() -t.ensureIndex({geo: "2dsphere"}) - -somepoly = { "type" : "Polygon", - "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]} - -t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [40.2, 5.2]]}}) -// This is only partially contained within the polygon. -t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [42, 7]]}}) - -res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 1); - -t.drop() -t.ensureIndex({geo: "2dsphere"}) -somepoly = { "type" : "Polygon", - "coordinates" : [ [ [40,5], [40,8], [43,8], [43,5], [40,5]], - [ [41,6], [42,6], [42,7], [41,7], [41,6]]]} - -t.insert({geo:{ "type" : "Point", "coordinates": [ 40, 5 ] }}) -res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 1); -// In the hole. Shouldn't find it. -t.insert({geo:{ "type" : "Point", "coordinates": [ 41.1, 6.1 ] }}) -res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 1); -// Also in the hole. -t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.1, 6.1], [41.2, 6.2]]}}) -res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 1); -// Half-hole, half-not. Shouldn't be $within. -t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.5, 6.5], [42.5, 7.5]]}}) -res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } }) -assert.eq(res.itcount(), 1); diff --git a/jstests/geo_small_large.js b/jstests/geo_small_large.js deleted file mode 100644 index aff4743fc71..00000000000 --- a/jstests/geo_small_large.js +++ /dev/null @@ -1,151 +0,0 @@ -// SERVER-2386, general geo-indexing using very large and very small bounds - -load( "jstests/libs/geo_near_random.js" ); - -// Do some random tests (for near queries) with very large and small ranges - -var test = new GeoNearRandomTest( "geo_small_large" ); - -bounds = { min : -Math.pow( 2, 34 ), max : Math.pow( 2, 34 ) }; - -test.insertPts( 50, bounds ); - -printjson( db["geo_small_large"].find().limit( 10 ).toArray() ) - -test.testPt( [ 0, 0 ] ); -test.testPt( test.mkPt( undefined, bounds ) ); -test.testPt( test.mkPt( undefined, bounds ) ); -test.testPt( test.mkPt( undefined, bounds ) ); -test.testPt( test.mkPt( undefined, bounds ) ); - -test = new GeoNearRandomTest( "geo_small_large" ); - -bounds = { min : -Math.pow( 2, -34 ), max : Math.pow( 2, -34 ) }; - -test.insertPts( 50, bounds ); - -printjson( db["geo_small_large"].find().limit( 10 ).toArray() ) - -test.testPt( [ 0, 0 ] ); -test.testPt( test.mkPt( undefined, bounds ) ); -test.testPt( test.mkPt( undefined, bounds ) ); -test.testPt( test.mkPt( undefined, bounds ) ); -test.testPt( test.mkPt( undefined, bounds ) ); - - -// Check that our box and circle queries also work -var scales = [ Math.pow( 2, 40 ), Math.pow( 2, -40 ), Math.pow(2, 2), Math.pow(3, -15), Math.pow(3, 15) ] - -for ( var i = 0; i < scales.length; i++ ) { - - scale = scales[i]; - - var eps = Math.pow( 2, -7 ) * scale; - var radius = 5 * scale; - var max = 10 * scale; - var min = -max; - var range = max - min; - var bits = 2 + Math.random() * 30 - - var t = db["geo_small_large"] - t.drop(); - t.ensureIndex( { p : "2d" }, { min : min, max : max, bits : bits }) - - var outPoints = 0; - var inPoints = 0; - - printjson({ eps : eps, radius : radius, max : max, min : min, range : range, bits : bits }) - - // Put a point slightly inside and outside our range - for ( var j = 0; j < 2; j++ ) { - var currRad = ( j % 2 == 0 ? radius + eps : radius - eps ); - t.insert( { p : { x : currRad, y : 0 } } ); - print( db.getLastError() ) - } - - printjson( t.find().toArray() ); - - assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1, "Incorrect center points found!" ) - assert.eq( t.count( { p : { $within : { $box : [ [ -radius, -radius ], [ radius, radius ] ] } } } ), 1, - "Incorrect box points found!" ) - - shouldFind = [] - randoms = [] - - for ( var j = 0; j < 2; j++ ) { - - var randX = Math.random(); // randoms[j].randX - var randY = Math.random(); // randoms[j].randY - - randoms.push({ randX : randX, randY : randY }) - - var x = randX * ( range - eps ) + eps + min; - var y = randY * ( range - eps ) + eps + min; - - t.insert( { p : [ x, y ] } ); - - if ( x * x + y * y > radius * radius ){ - // print( "out point "); - // printjson({ x : x, y : y }) - outPoints++ - } - else{ - // print( "in point "); - // printjson({ x : x, y : y }) - inPoints++ - shouldFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) }) - } - } - - /* - function printDiff( didFind, shouldFind ){ - - for( var i = 0; i < shouldFind.length; i++ ){ - var beenFound = false; - for( var j = 0; j < didFind.length && !beenFound ; j++ ){ - beenFound = shouldFind[i].x == didFind[j].x && - shouldFind[i].y == didFind[j].y - } - - if( !beenFound ){ - print( "Could not find: " ) - shouldFind[i].inRadius = ( radius - shouldFind[i].radius >= 0 ) - printjson( shouldFind[i] ) - } - } - } - - print( "Finding random pts... ") - var found = t.find( { p : { $within : { $center : [[0, 0], radius ] } } } ).toArray() - var didFind = [] - for( var f = 0; f < found.length; f++ ){ - //printjson( found[f] ) - var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0] - var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1] - didFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) }) - } - - print( "Did not find but should: ") - printDiff( didFind, shouldFind ) - print( "Found but should not have: ") - printDiff( shouldFind, didFind ) - */ - - assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1 + inPoints, - "Incorrect random center points found!\n" + tojson( randoms ) ) - - print("Found " + inPoints + " points in and " + outPoints + " points out."); - - var found = t.find( { p : { $near : [0, 0], $maxDistance : radius } } ).toArray() - var dist = 0; - for( var f = 0; f < found.length; f++ ){ - var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0] - var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1] - print( "Dist: x : " + x + " y : " + y + " dist : " + Math.sqrt( x * x + y * y) + " radius : " + radius ) - } - - assert.eq( t.count( { p : { $near : [0, 0], $maxDistance : radius } } ), 1 + inPoints, - "Incorrect random center points found near!\n" + tojson( randoms ) ) - -} - diff --git a/jstests/geo_sort1.js b/jstests/geo_sort1.js deleted file mode 100644 index 67de80e65c7..00000000000 --- a/jstests/geo_sort1.js +++ /dev/null @@ -1,22 +0,0 @@ - -t = db.geo_sort1 -t.drop(); - -for ( x=0; x<10; x++ ){ - for ( y=0; y<10; y++ ){ - t.insert( { loc : [ x , y ] , foo : x * x * y } ); - } -} - -t.ensureIndex( { loc : "2d" , foo : 1 } ) - -q = t.find( { loc : { $near : [ 5 , 5 ] } , foo : { $gt : 20 } } ) -m = function(z){ return z.foo; } - -a = q.clone().map( m ); -b = q.clone().sort( { foo : 1 } ).map( m ); - -assert.neq( a , b , "A" ); -a.sort(); -b.sort(); -assert.eq( a , b , "B" ); diff --git a/jstests/geo_uniqueDocs.js b/jstests/geo_uniqueDocs.js deleted file mode 100644 index 61f1a40522d..00000000000 --- a/jstests/geo_uniqueDocs.js +++ /dev/null @@ -1,40 +0,0 @@ -// Test uniqueDocs option for $within and geoNear queries SERVER-3139 -// SERVER-12120 uniqueDocs is deprecated. Server always returns unique documents. - -collName = 'geo_uniqueDocs_test' -t = db.geo_uniqueDocs_test -t.drop() - -t.save( { locs : [ [0,2], [3,4]] } ) -t.save( { locs : [ [6,8], [10,10] ] } ) - -t.ensureIndex( { locs : '2d' } ) - -// geoNear tests -// uniqueDocs option is ignored. -assert.eq(2, db.runCommand({geoNear:collName, near:[0,0]}).results.length) -assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:false}).results.length) -assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:true}).results.length) -results = db.runCommand({geoNear:collName, near:[0,0], num:2}).results -assert.eq(2, results.length) -assert.close(2, results[0].dis) -assert.close(10, results[1].dis) -results = db.runCommand({geoNear:collName, near:[0,0], num:2, uniqueDocs:true}).results -assert.eq(2, results.length) -assert.close(2, results[0].dis) -assert.close(10, results[1].dis) - -// $within tests - -assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]]}}}).itcount()) -assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : true}}}).itcount()) -assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : false}}}).itcount()) - -assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : true}}}).itcount()) -assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : false}}}).itcount()) - -assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : true}}}).itcount()) -assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : false}}}).itcount()) - -assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : true}}}).itcount()) -assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : false}}}).itcount()) diff --git a/jstests/geo_uniqueDocs2.js b/jstests/geo_uniqueDocs2.js deleted file mode 100644 index f9b95113f78..00000000000 --- a/jstests/geo_uniqueDocs2.js +++ /dev/null @@ -1,80 +0,0 @@ -// Additional checks for geo uniqueDocs and includeLocs SERVER-3139. -// SERVER-12120 uniqueDocs is deprecated. -// Server always returns results with implied uniqueDocs=true - -collName = 'jstests_geo_uniqueDocs2'; -t = db[collName]; -t.drop(); - -t.save( {loc:[[20,30],[40,50]]} ); -t.ensureIndex( {loc:'2d'} ); - -// Check exact matches of different locations. -assert.eq( 1, t.count( { loc : [20,30] } ) ); -assert.eq( 1, t.count( { loc : [40,50] } ) ); - -// Check behavior for $near, where $uniqueDocs mode is unavailable. -assert.eq( [t.findOne()], t.find( { loc: { $near: [50,50] } } ).toArray() ); - -// Check correct number of matches for $within / $uniqueDocs. -// uniqueDocs ignored - does not affect results. -assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40] } } } ) ); -assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : true } } } ) ); -assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ) ); - -// For $within / $uniqueDocs, limit applies to docs. -assert.eq( 1, t.find( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ).limit(1).itcount() ); - -// Now check a circle only containing one of the locs. -assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10] } } } ) ); -assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : true } } } ) ); -assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : false } } } ) ); - -// Check number and character of results with geoNear / uniqueDocs / includeLocs. -notUniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : false } ); -uniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : false } ); -notUniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ); -uniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : true } ); - -// Check that only unique docs are returned. -assert.eq( 1, notUniqueNotInclude.results.length ); -assert.eq( 1, uniqueNotInclude.results.length ); -assert.eq( 1, notUniqueInclude.results.length ); -assert.eq( 1, uniqueInclude.results.length ); - -// Check that locs are included. -assert( !notUniqueNotInclude.results[0].loc ); -assert( !uniqueNotInclude.results[0].loc ); -assert( notUniqueInclude.results[0].loc ); -assert( uniqueInclude.results[0].loc ); - -// For geoNear / uniqueDocs, 'num' limit seems to apply to locs. -assert.eq( 1, db.runCommand( { geoNear : collName , near : [50,50], num : 1, uniqueDocs : false, includeLocs : false } ).results.length ); - -// Check locs returned in includeLocs mode. -t.remove({}); -objLocs = [{x:20,y:30,z:['loc1','loca']},{x:40,y:50,z:['loc2','locb']}]; -t.save( {loc:objLocs} ); -results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results; -assert.contains( results[0].loc, objLocs ); - -// Check locs returned in includeLocs mode, where locs are arrays. -t.remove({}); -arrLocs = [[20,30],[40,50]]; -t.save( {loc:arrLocs} ); -results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results; -// The original loc arrays are returned as objects. -expectedLocs = arrLocs - -assert.contains( results[0].loc, expectedLocs ); - -// Test a large number of locations in the array. -t.drop(); -arr = []; -for( i = 0; i < 10000; ++i ) { - arr.push( [10,10] ); -} -arr.push( [100,100] ); -t.save( {loc:arr} ); -t.ensureIndex( {loc:'2d'} ); -assert.eq( 1, t.count( { loc : { $within : { $center : [[99, 99], 5] } } } ) ); diff --git a/jstests/geo_update.js b/jstests/geo_update.js deleted file mode 100644 index dd4b28c8374..00000000000 --- a/jstests/geo_update.js +++ /dev/null @@ -1,37 +0,0 @@ -// Tests geo queries w/ update & upsert -// from SERVER-3428 - -var coll = db.testGeoUpdate -coll.drop() - -coll.ensureIndex({ loc : "2d" }) - -// Test normal update -print( "Updating..." ) - -coll.insert({ loc : [1.0, 2.0] }) - -coll.update({ loc : { $near : [1.0, 2.0] } }, - { x : true, loc : [1.0, 2.0] }) - -// Test upsert -print( "Upserting..." ) - -coll.update({ loc : { $within : { $center : [[10, 20], 1] } } }, - { x : true }, - true) - -coll.update({ loc : { $near : [10.0, 20.0], $maxDistance : 1 } }, - { x : true }, - true) - - -coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } }, - { $set : { loc : [100, 100] }, $push : { people : "chris" } }, - true) - -coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } }, - { $set : { loc : [100, 100] }, $push : { people : "john" } }, - true) - -assert.eq( 4, coll.find().itcount() ) diff --git a/jstests/geo_update1.js b/jstests/geo_update1.js deleted file mode 100644 index 68a8de668b3..00000000000 --- a/jstests/geo_update1.js +++ /dev/null @@ -1,38 +0,0 @@ - -t = db.geo_update1 -t.drop() - -for(var x = 0; x < 10; x++ ) { - for(var y = 0; y < 10; y++ ) { - t.insert({"loc": [x, y] , x : x , y : y , z : 1 }); - } -} - -t.ensureIndex( { loc : "2d" } ) - -function p(){ - print( "--------------" ); - for ( var y=0; y<10; y++ ){ - var c = t.find( { y : y } ).sort( { x : 1 } ) - var s = ""; - while ( c.hasNext() ) - s += c.next().z + " "; - print( s ) - } - print( "--------------" ); -} - -p() - -t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true); -assert.isnull( db.getLastError() , "B1" ) -p() - -t.update({}, {'$inc' : { 'z' : 1}}, false, true); -assert.isnull( db.getLastError() , "B2" ) -p() - - -t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true); -assert.isnull( db.getLastError() , "B3" ) -p() diff --git a/jstests/geo_update2.js b/jstests/geo_update2.js deleted file mode 100644 index 2308b2c7899..00000000000 --- a/jstests/geo_update2.js +++ /dev/null @@ -1,40 +0,0 @@ - -t = db.geo_update2 -t.drop() - -for(var x = 0; x < 10; x++ ) { - for(var y = 0; y < 10; y++ ) { - t.insert({"loc": [x, y] , x : x , y : y }); - } -} - -t.ensureIndex( { loc : "2d" } ) - -function p(){ - print( "--------------" ); - for ( var y=0; y<10; y++ ){ - var c = t.find( { y : y } ).sort( { x : 1 } ) - var s = ""; - while ( c.hasNext() ) - s += c.next().z + " "; - print( s ) - } - print( "--------------" ); -} - -p() - - -t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true); -assert.isnull( db.getLastError() , "B1" ) -p() - -t.update({}, {'$inc' : { 'z' : 1}}, false, true); -assert.isnull( db.getLastError() , "B2" ) -p() - - -t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}}, {'$inc' : { 'z' : 1}}, false, true); -assert.isnull( db.getLastError() , "B3" ) -p() - diff --git a/jstests/geo_update_btree.js b/jstests/geo_update_btree.js deleted file mode 100644 index 38d9692faeb..00000000000 --- a/jstests/geo_update_btree.js +++ /dev/null @@ -1,25 +0,0 @@ -// Tests whether the geospatial search is stable under btree updates - -var coll = db.getCollection( "jstests_geo_update_btree" ) -coll.drop() - -coll.ensureIndex( { loc : '2d' } ) - -for ( i = 0; i < 10000; i++ ) { - coll.insert( { loc : [ Random.rand() * 180, Random.rand() * 180 ], v : '' } ); -} - -var big = new Array( 3000 ).toString() - -for ( i = 0; i < 1000; i++ ) { - coll.update( - { loc : { $within : { $center : [ [ Random.rand() * 180, Random.rand() * 180 ], Random.rand() * 50 ] } } }, - { $set : { v : big } }, false, true ) - - if (testingReplication) - db.getLastError(2); - else - db.getLastError(); - - if( i % 10 == 0 ) print( i ); -} diff --git a/jstests/geo_update_btree2.js b/jstests/geo_update_btree2.js deleted file mode 100644 index d99970c73e0..00000000000 --- a/jstests/geo_update_btree2.js +++ /dev/null @@ -1,71 +0,0 @@ -// Tests whether the geospatial search is stable under btree updates -// -// Tests the implementation of the 2d search, not the behavior we promise. MongoDB currently -// promises no isolation, so there is no guarantee that we get the results we expect in this file. - -// The old query system, if it saw a 2d query, would never consider a collscan. -// -// The new query system can answer the queries in this file with a collscan and ranks -// the collscan against the indexed result. -// -// In order to expose the specific NON GUARANTEED isolation behavior this file tests -// we disable table scans to ensure that the new query system only looks at the 2d -// scan. -assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) ); - -var status = function( msg ){ - print( "\n\n###\n" + msg + "\n###\n\n" ) -} - -var coll = db.getCollection( "jstests_geo_update_btree2" ) -coll.drop() - -coll.ensureIndex( { loc : '2d' } ) - -status( "Inserting points..." ) - -var numPoints = 10 -for ( i = 0; i < numPoints; i++ ) { - coll.insert( { _id : i, loc : [ Random.rand() * 180, Random.rand() * 180 ], i : i % 2 } ); -} - -status( "Starting long query..." ) - -var query = coll.find({ loc : { $within : { $box : [[-180, -180], [180, 180]] } } }).batchSize( 2 ) -var firstValues = [ query.next()._id, query.next()._id ] -printjson( firstValues ) - -status( "Removing points not returned by query..." ) - -var allQuery = coll.find() -var removeIds = [] -while( allQuery.hasNext() ){ - var id = allQuery.next()._id - if( firstValues.indexOf( id ) < 0 ){ - removeIds.push( id ) - } -} - -var updateIds = [] -for( var i = 0, max = removeIds.length / 2; i < max; i++ ) updateIds.push( removeIds.pop() ) - -printjson( removeIds ) -coll.remove({ _id : { $in : removeIds } }) - -status( "Updating points returned by query..." ) -printjson(updateIds); - -var big = new Array( 3000 ).toString() -for( var i = 0; i < updateIds.length; i++ ) - coll.update({ _id : updateIds[i] }, { $set : { data : big } }) - -status( "Counting final points..." ) - -// It's not defined whether or not we return documents that are modified during a query. We -// shouldn't crash, but it's not defined how many results we get back. This test is modifying every -// doc not returned by the query, and since we currently handle the invalidation by removing them, -// we won't return them. But we shouldn't crash. -// assert.eq( ( numPoints - 2 ) / 2, query.itcount() ) -query.itcount(); - -assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false} ) ); diff --git a/jstests/geo_update_dedup.js b/jstests/geo_update_dedup.js deleted file mode 100644 index 8ec08b82ea0..00000000000 --- a/jstests/geo_update_dedup.js +++ /dev/null @@ -1,60 +0,0 @@ -// Test that updates with geo queries which match -// the same document multiple times only apply -// the update once - -var t = db.jstests_geo_update_dedup;; - -// 2d index with $near -t.drop(); -t.ensureIndex({locs: "2d"}); -t.save({locs: [[49.999,49.999], [50.0,50.0], [50.001,50.001]]}); - -var q = {locs: {$near: [50.0, 50.0]}}; -assert.eq(1, t.find(q).itcount(), 'duplicates returned from query'); - -t.update({locs: {$near: [50.0, 50.0]}}, {$inc: {touchCount: 1}}, false, true); -assert.eq(1, db.getLastErrorObj().n); -assert.eq(1, t.findOne().touchCount); - -t.drop(); -t.ensureIndex({locs: "2d"}); -t.save({locs: [{x:49.999,y:49.999}, {x:50.0,y:50.0}, {x:50.001,y:50.001}]}); -t.update({locs: {$near: {x:50.0, y:50.0}}}, {$inc: {touchCount: 1}}); -assert.eq(1, db.getLastErrorObj().n); -assert.eq(1, t.findOne().touchCount); - -// 2d index with $within -t.drop(); -t.ensureIndex({loc: "2d"}); -t.save({loc: [[0, 0], [1, 1]]}); - -t.update({loc: {$within: {$center: [[0, 0], 2]}}}, {$inc: {touchCount: 1}}, false, true); -assert.eq(1, db.getLastErrorObj().n); -assert.eq(1, t.findOne().touchCount); - -// 2dsphere index with $geoNear -t.drop(); -t.ensureIndex({geo: "2dsphere"}); -var x = { "type" : "Polygon", - "coordinates" : [[[49.999,49.999], [50.0,50.0], [50.001,50.001], [49.999,49.999]]]} -t.save({geo: x}) - -t.update({geo: {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}}, - {$inc: {touchCount: 1}}, false, true); -assert.eq(1, db.getLastErrorObj().n); -assert.eq(1, t.findOne().touchCount); - -t.drop(); -var locdata = [ - {geo: {type: "Point", coordinates: [49.999,49.999]}}, - {geo: {type: "Point", coordinates: [50.000,50.000]}}, - {geo: {type: "Point", coordinates: [50.001,50.001]}} -]; -t.save({locdata: locdata, count: 0}) -t.ensureIndex({"locdata.geo": "2dsphere"}); - -t.update({"locdata.geo": {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}}, - {$inc: {touchCount: 1}}, false, true); -assert.eq(1, db.getLastErrorObj().n); -assert.eq(1, t.findOne().touchCount); - diff --git a/jstests/geo_withinquery.js b/jstests/geo_withinquery.js deleted file mode 100644 index 11701d34c62..00000000000 --- a/jstests/geo_withinquery.js +++ /dev/null @@ -1,15 +0,0 @@ -// SERVER-7343: allow $within without a geo index. -t = db.geo_withinquery; -t.drop(); - -num = 0; -for ( x=0; x<=20; x++ ){ - for ( y=0; y<=20; y++ ){ - o = { _id : num++ , loc : [ x , y ] } - t.save( o ) - } -} - -assert.eq(21 * 21 - 1, t.find({ $and: [ {loc: {$ne:[0,0]}}, - {loc: {$within: {$box: [[0,0], [100,100]]}}}, - ]}).itcount(), "UHOH!") diff --git a/jstests/geoa.js b/jstests/geoa.js deleted file mode 100644 index 3081f6c5c2e..00000000000 --- a/jstests/geoa.js +++ /dev/null @@ -1,12 +0,0 @@ - -t = db.geoa -t.drop(); - -t.save( { _id : 1 , a : { loc : [ 5 , 5 ] } } ) -t.save( { _id : 2 , a : { loc : [ 6 , 6 ] } } ) -t.save( { _id : 3 , a : { loc : [ 7 , 7 ] } } ) - -t.ensureIndex( { "a.loc" : "2d" } ); - -cur = t.find( { "a.loc" : { $near : [ 6 , 6 ] } } ); -assert.eq( 2 , cur.next()._id , "A1" ); diff --git a/jstests/geob.js b/jstests/geob.js deleted file mode 100644 index 0dcc2658ba2..00000000000 --- a/jstests/geob.js +++ /dev/null @@ -1,35 +0,0 @@ -var t = db.geob; -t.drop(); - -var a = {p: [0, 0]}; -var b = {p: [1, 0]}; -var c = {p: [3, 4]}; -var d = {p: [0, 6]}; - -t.save(a); -t.save(b); -t.save(c); -t.save(d); -t.ensureIndex({p: "2d"}); - -var res = t.runCommand("geoNear", {near: [0,0]}); -assert.close(3, res.stats.avgDistance, "A"); - -assert.close(0, res.results[0].dis, "B1"); -assert.eq(a._id, res.results[0].obj._id, "B2"); - -assert.close(1, res.results[1].dis, "C1"); -assert.eq(b._id, res.results[1].obj._id, "C2"); - -assert.close(5, res.results[2].dis, "D1"); -assert.eq(c._id, res.results[2].obj._id, "D2"); - -assert.close(6, res.results[3].dis, "E1"); -assert.eq(d._id, res.results[3].obj._id, "E2"); - -res = t.runCommand("geoNear", {near: [0,0], distanceMultiplier: 2}); -assert.close(6, res.stats.avgDistance, "F"); -assert.close(0, res.results[0].dis, "G"); -assert.close(2, res.results[1].dis, "H"); -assert.close(10, res.results[2].dis, "I"); -assert.close(12, res.results[3].dis, "J"); diff --git a/jstests/geoc.js b/jstests/geoc.js deleted file mode 100644 index 8b0178095e8..00000000000 --- a/jstests/geoc.js +++ /dev/null @@ -1,24 +0,0 @@ - -t = db.geoc; -t.drop() - -N = 1000; - -for (var i=0; i - -contains = function(arr,obj) { - var i = arr.length; - while (i--) { - if (arr[i] === obj) { - return true; - } - } - return false; -} - -var resp = db.adminCommand({getLog:"*"}) -assert( resp.ok == 1, "error executing getLog command" ); -assert( resp.names, "no names field" ); -assert( resp.names.length > 0, "names array is empty" ); -assert( contains(resp.names,"global") , "missing global category" ); -assert( !contains(resp.names,"butty") , "missing butty category" ); - -resp = db.adminCommand({getLog:"global"}) -assert( resp.ok == 1, "error executing getLog command" ); -assert( resp.log, "no log field" ); -assert( resp.log.length > 0 , "no log lines" ); diff --git a/jstests/getlog2.js b/jstests/getlog2.js deleted file mode 100644 index 2712f96fc3e..00000000000 --- a/jstests/getlog2.js +++ /dev/null @@ -1,47 +0,0 @@ -// tests getlog as well as slow querying logging - -glcol = db.getLogTest2; -glcol.drop() - -contains = function(arr, func) { - var i = arr.length; - while (i--) { - if (func(arr[i])) { - return true; - } - } - return false; -} - -// test doesn't work when talking to mongos -if(db.isMaster().msg != "isdbgrid") { - // run a slow query - glcol.save({ "SENTINEL": 1 }); - glcol.findOne({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }); - - // run a slow update - glcol.update({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }, { "x": "x" }); - - var resp = db.adminCommand({getLog:"global"}); - assert( resp.ok == 1, "error executing getLog command" ); - assert( resp.log, "no log field" ); - assert( resp.log.length > 0 , "no log lines" ); - - // ensure that slow query is logged in detail - assert( contains(resp.log, function(v) { - print(v); - return v.indexOf(" query ") != -1 && v.indexOf("query:") != -1 && - v.indexOf("nscanned:") != -1 && - v.indexOf("nscannedObjects:") != -1 && - v.indexOf("SENTINEL") != -1; - }) ); - - // same, but for update - assert( contains(resp.log, function(v) { - print(v); - return v.indexOf(" update ") != -1 && v.indexOf("query:") != -1 && - v.indexOf("nscanned:") != -1 && - v.indexOf("nscannedObjects:") != -1 && - v.indexOf("SENTINEL") != -1; - }) ); -} diff --git a/jstests/group1.js b/jstests/group1.js deleted file mode 100644 index c4147c0d89a..00000000000 --- a/jstests/group1.js +++ /dev/null @@ -1,64 +0,0 @@ -t = db.group1; -t.drop(); - -t.save( { n : 1 , a : 1 } ); -t.save( { n : 2 , a : 1 } ); -t.save( { n : 3 , a : 2 } ); -t.save( { n : 4 , a : 2 } ); -t.save( { n : 5 , a : 2 } ); - -var p = { key : { a : true } , - reduce : function(obj,prev) { prev.count++; }, - initial: { count: 0 } - }; - -res = t.group( p ); - -assert( res.length == 2 , "A" ); -assert( res[0].a == 1 , "B" ); -assert( res[0].count == 2 , "C" ); -assert( res[1].a == 2 , "D" ); -assert( res[1].count == 3 , "E" ); - -assert.eq( res , t.groupcmd( p ) , "ZZ" ); - -ret = t.groupcmd( { key : {} , reduce : p.reduce , initial : p.initial } ); -assert.eq( 1 , ret.length , "ZZ 2" ); -assert.eq( 5 , ret[0].count , "ZZ 3" ); - -ret = t.groupcmd( { key : {} , reduce : function(obj,prev){ prev.sum += obj.n } , initial : { sum : 0 } } ); -assert.eq( 1 , ret.length , "ZZ 4" ); -assert.eq( 15 , ret[0].sum , "ZZ 5" ); - -t.drop(); - -t.save( { "a" : 2 } ); -t.save( { "b" : 5 } ); -t.save( { "a" : 1 } ); -t.save( { "a" : 2 } ); - -c = {key: {a:1}, cond: {}, initial: {"count": 0}, reduce: function(obj, prev) { prev.count++; } }; - -assert.eq( t.group( c ) , t.groupcmd( c ) , "ZZZZ" ); - - -t.drop(); - -t.save( { name : { first : "a" , last : "A" } } ); -t.save( { name : { first : "b" , last : "B" } } ); -t.save( { name : { first : "a" , last : "A" } } ); - - -p = { key : { 'name.first' : true } , - reduce : function(obj,prev) { prev.count++; }, - initial: { count: 0 } - }; - -res = t.group( p ); -assert.eq( 2 , res.length , "Z1" ); -assert.eq( "a" , res[0]['name.first'] , "Z2" ) -assert.eq( "b" , res[1]['name.first'] , "Z3" ) -assert.eq( 2 , res[0].count , "Z4" ) -assert.eq( 1 , res[1].count , "Z5" ) - - diff --git a/jstests/group2.js b/jstests/group2.js deleted file mode 100644 index a8e6653470a..00000000000 --- a/jstests/group2.js +++ /dev/null @@ -1,38 +0,0 @@ -t = db.group2; -t.drop(); - -t.save({a: 2}); -t.save({b: 5}); -t.save({a: 1}); - -cmd = { key: {a: 1}, - initial: {count: 0}, - reduce: function(obj, prev) { - prev.count++; - } - }; - -result = t.group(cmd); - -assert.eq(3, result.length, "A"); -assert.eq(null, result[1].a, "C"); -assert("a" in result[1], "D"); -assert.eq(1, result[2].a, "E"); - -assert.eq(1, result[0].count, "F"); -assert.eq(1, result[1].count, "G"); -assert.eq(1, result[2].count, "H"); - - -delete cmd.key -cmd["$keyf"] = function(x){ return { a : x.a }; }; -result2 = t.group( cmd ); - -assert.eq( result , result2, "check result2" ); - - -delete cmd.$keyf -cmd["keyf"] = function(x){ return { a : x.a }; }; -result3 = t.group( cmd ); - -assert.eq( result , result3, "check result3" ); diff --git a/jstests/group3.js b/jstests/group3.js deleted file mode 100644 index d113b9d570f..00000000000 --- a/jstests/group3.js +++ /dev/null @@ -1,43 +0,0 @@ -t = db.group3; -t.drop(); - -t.save({a: 1}); -t.save({a: 2}); -t.save({a: 3}); -t.save({a: 4}); - - -cmd = { initial: {count: 0, sum: 0}, - reduce: function(obj, prev) { - prev.count++; - prev.sum += obj.a; - }, - finalize: function(obj) { - if (obj.count){ - obj.avg = obj.sum / obj.count; - }else{ - obj.avg = 0; - } - }, - }; - -result1 = t.group(cmd); - -assert.eq(1, result1.length, "test1"); -assert.eq(10, result1[0].sum, "test1"); -assert.eq(4, result1[0].count, "test1"); -assert.eq(2.5, result1[0].avg, "test1"); - - -cmd['finalize'] = function(obj) { - if (obj.count){ - return obj.sum / obj.count; - }else{ - return 0; - } -}; - -result2 = t.group(cmd); - -assert.eq(1, result2.length, "test2"); -assert.eq(2.5, result2[0], "test2"); diff --git a/jstests/group4.js b/jstests/group4.js deleted file mode 100644 index e75c0d1ae2c..00000000000 --- a/jstests/group4.js +++ /dev/null @@ -1,45 +0,0 @@ - -t = db.group4 -t.drop(); - -function test( c , n ){ - var x = {}; - c.forEach( - function(z){ - assert.eq( z.count , z.values.length , n + "\t" + tojson( z ) ); - } - ); -} - -t.insert({name:'bob',foo:1}) -t.insert({name:'bob',foo:2}) -t.insert({name:'alice',foo:1}) -t.insert({name:'alice',foo:3}) -t.insert({name:'fred',foo:3}) -t.insert({name:'fred',foo:4}) - -x = t.group( - { - key: {foo:1}, - initial: {count:0,values:[]}, - reduce: function (obj, prev){ - prev.count++ - prev.values.push(obj.name) - } - } -); -test( x , "A" ); - -x = t.group( - { - key: {foo:1}, - initial: {count:0}, - reduce: function (obj, prev){ - if (!prev.values) {prev.values = [];} - prev.count++; - prev.values.push(obj.name); - } - } -); -test( x , "B" ); - diff --git a/jstests/group5.js b/jstests/group5.js deleted file mode 100644 index 3534fe5f030..00000000000 --- a/jstests/group5.js +++ /dev/null @@ -1,38 +0,0 @@ - -t = db.group5; -t.drop(); - -// each group has groupnum+1 5 users -for ( var group=0; group<10; group++ ){ - for ( var i=0; i<5+group; i++ ){ - t.save( { group : "group" + group , user : i } ) - } -} - -function c( group ){ - return t.group( - { - key : { group : 1 } , - q : { group : "group" + group } , - initial : { users : {} }, - reduce : function(obj,prev){ - prev.users[obj.user] = true; // add this user to the hash - }, - finalize : function(x){ - var count = 0; - for (var key in x.users){ - count++; - } - - //replace user obj with count - //count add new field and keep users - x.users = count; - return x; - } - })[0]; // returns array -} - -assert.eq( "group0" , c(0).group , "g0" ); -assert.eq( 5 , c(0).users , "g0 a" ); -assert.eq( "group5" , c(5).group , "g5" ); -assert.eq( 10 , c(5).users , "g5 a" ); diff --git a/jstests/group6.js b/jstests/group6.js deleted file mode 100644 index b77a37a5d11..00000000000 --- a/jstests/group6.js +++ /dev/null @@ -1,32 +0,0 @@ -t = db.jstests_group6; -t.drop(); - -for( i = 1; i <= 10; ++i ) { - t.save( {i:new NumberLong( i ),y:1} ); -} - -assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" ); - -t.drop(); -for( i = 1; i <= 10; ++i ) { - if ( i % 2 == 0 ) { - t.save( {i:new NumberLong( i ),y:1} ); - } else { - t.save( {i:i,y:1} ); - } -} - -assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" ); - -t.drop(); -for( i = 1; i <= 10; ++i ) { - if ( i % 2 == 1 ) { - t.save( {i:new NumberLong( i ),y:1} ); - } else { - t.save( {i:i,y:1} ); - } -} - -assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" ); - -assert.eq.automsg( "NumberLong(10)", "t.group( {$reduce: function(doc, prev) { prev.count += 1; }, initial: {count: new NumberLong(0) }} )[ 0 ].count" ); \ No newline at end of file diff --git a/jstests/group7.js b/jstests/group7.js deleted file mode 100644 index f18a84055f4..00000000000 --- a/jstests/group7.js +++ /dev/null @@ -1,45 +0,0 @@ -// Test yielding group command SERVER-1395 - -t = db.jstests_group7; -t.drop(); - -function checkForYield( docs, updates ) { - t.drop(); - a = 0; - for( var i = 0; i < docs; ++i ) { - t.save( {a:a} ); - } - db.getLastError(); - - // Iteratively update all a values atomically. - p = startParallelShell( 'for( a = 0; a < ' + updates + '; ++a ) { db.jstests_group7.update( {$atomic:true}, {$set:{a:a}}, false, true ); db.getLastError(); }' ); - - for( var i = 0; i < updates; ++i ) { - print("running group " + i + " of " + updates); - ret = t.group({key:{a:1},reduce:function(){},initial:{}}); - // Check if group sees more than one a value, indicating that it yielded. - if ( ret.length > 1 ) { - p(); - return true; - } - printjson( ret ); - } - - p(); - return false; -} - -var yielded = false; -var docs = 1500; -var updates = 50; -for( var j = 1; j <= 6; ++j ) { - print("Iteration " + j + " docs = " + docs + " updates = " + updates); - if ( checkForYield( docs, updates ) ) { - yielded = true; - break; - } - // Increase docs and updates to encourage yielding. - docs *= 2; - updates *= 2; -} -assert( yielded ); diff --git a/jstests/group_empty.js b/jstests/group_empty.js deleted file mode 100644 index 62a734ed0f8..00000000000 --- a/jstests/group_empty.js +++ /dev/null @@ -1,8 +0,0 @@ - -t = db.group_empty; -t.drop(); - -res1 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}}); -t.ensureIndex( { x : 1 } ); -res2 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}}); -assert.eq( res1, res2 ); diff --git a/jstests/grow_hash_table.js b/jstests/grow_hash_table.js deleted file mode 100644 index 3e148b7240f..00000000000 --- a/jstests/grow_hash_table.js +++ /dev/null @@ -1,45 +0,0 @@ -// This test creates a large projection, which causes a set of field names to -// be stored in a StringMap (based on UnorderedFastKeyTable). The hash table -// starts with 20 slots, but must be grown repeatedly to hold the complete set -// of fields. This test verifies that we can grow the hash table repeatedly -// with no failures. -// -// Related to SERVER-9824. - -var testDB = db.getSiblingDB('grow_hash_table'); - -var doTest = function(count) { - print('Testing with count of ' + count); - testDB.dropDatabase(); - var id = { data: 1 }; - var doc = { _id: id }; - var projection = { }; - - // Create a document and a projection with fields r1, r2, r3 ... - for (var i = 1; i <= count; ++i) { - var r = 'r' + i; - doc[r] = i; - projection[r] = 1; - } - - // Store the document - testDB.collection.insert(doc); - var errorObj = testDB.getLastErrorObj(); - assert(errorObj.err == null, - 'Failed to insert document, getLastErrorObj = ' + tojsononeline(errorObj)); - - // Try to read the document using a large projection - try { - var findCount = testDB.collection.find({ _id: id }, projection).itcount(); - assert(findCount == 1, - 'Failed to find single stored document, find().itcount() == ' + findCount); - } - catch (e) { - testDB.dropDatabase(); - doassert('Test FAILED! Caught exception ' + tojsononeline(e)); - } - testDB.dropDatabase(); - jsTest.log('Test PASSED'); -} - -doTest(10000); diff --git a/jstests/hashindex1.js b/jstests/hashindex1.js deleted file mode 100644 index 34bd6dc0725..00000000000 --- a/jstests/hashindex1.js +++ /dev/null @@ -1,94 +0,0 @@ -var t = db.hashindex1; -t.drop() - -//test non-single field hashed indexes don't get created (maybe change later) -var badspec = {a : "hashed" , b : 1}; -t.ensureIndex( badspec ); -assert.eq( t.getIndexes().length , 1 , "only _id index should be created"); - -//test unique index not created (maybe change later) -var goodspec = {a : "hashed"}; -t.ensureIndex( goodspec , {"unique" : true}); -assert.eq( t.getIndexes().length , 1 , "unique index got created."); - -//now test that non-unique index does get created -t.ensureIndex(goodspec); -assert.eq( t.getIndexes().length , 2 , "hashed index didn't get created"); - -//test basic inserts -for(i=0; i < 10; i++ ){ - t.insert( {a:i } ); -} -assert.eq( t.find().count() , 10 , "basic insert didn't work"); -assert.eq( t.find().hint(goodspec).toArray().length , 10 , "basic insert didn't work"); -assert.eq( t.find({a : 3}).hint({_id : 1}).toArray()[0]._id , - t.find({a : 3}).hint(goodspec).toArray()[0]._id , - "hashindex lookup didn't work" ); - - -//make sure things with the same hash are not both returned -t.insert( {a: 3.1} ); -assert.eq( t.find().count() , 11 , "additional insert didn't work"); -assert.eq( t.find({a : 3.1}).hint(goodspec).toArray().length , 1); -assert.eq( t.find({a : 3}).hint(goodspec).toArray().length , 1); -//test right obj is found -assert.eq( t.find({a : 3.1}).hint(goodspec).toArray()[0].a , 3.1); - -//test that hashed cursor is used when it should be -var cursorname = "BtreeCursor a_hashed"; -assert.eq( t.find({a : 1}).explain().cursor , - cursorname , - "not using hashed cursor"); - -// SERVER-12222 -//printjson( t.find({a : {$gte : 3 , $lte : 3}}).explain() ) -//assert.eq( t.find({a : {$gte : 3 , $lte : 3}}).explain().cursor , -// cursorname , -// "not using hashed cursor"); -assert.neq( t.find({c : 1}).explain().cursor , - cursorname , - "using irrelevant hashed cursor"); - -printjson( t.find({a : {$in : [1,2]}}).explain() ) -// Hash index used with a $in set membership predicate. -assert.eq( t.find({a : {$in : [1,2]}}).explain()["cursor"], - "BtreeCursor a_hashed", - "not using hashed cursor"); - -// Hash index used with a singleton $and predicate conjunction. -assert.eq( t.find({$and : [{a : 1}]}).explain()["cursor"], - "BtreeCursor a_hashed", - "not using hashed cursor"); - -// Hash index used with a non singleton $and predicate conjunction. -assert.eq( t.find({$and : [{a : {$in : [1,2]}},{a : {$gt : 1}}]}).explain()["cursor"], - "BtreeCursor a_hashed", - "not using hashed cursor"); - -//test creation of index based on hash of _id index -var goodspec2 = {'_id' : "hashed"}; -t.ensureIndex( goodspec2 ); -assert.eq( t.getIndexes().length , 3 , "_id index didn't get created"); - -var newid = t.findOne()["_id"]; -assert.eq( t.find( {_id : newid} ).hint( {_id : 1} ).toArray()[0]._id , - t.find( {_id : newid} ).hint( goodspec2 ).toArray()[0]._id, - "using hashed index and different index returns different docs"); - - -//test creation of sparse hashed index -var sparseindex = {b : "hashed"}; -t.ensureIndex( sparseindex , {"sparse" : true}); -assert.eq( t.getIndexes().length , 4 , "sparse index didn't get created"); - -//test sparse index has smaller total items on after inserts -for(i=0; i < 10; i++ ){ - t.insert( {b : i} ); -} -var totalb = t.find().hint(sparseindex).toArray().length; -assert.eq( totalb , 10 , "sparse index has wrong total"); - -var total = t.find().hint({"_id" : 1}).toArray().length; -var totala = t.find().hint(goodspec).toArray().length; -assert.eq(total , totala , "non-sparse index has wrong total"); -assert.lt(totalb , totala , "sparse index should have smaller total"); diff --git a/jstests/hashtest1.js b/jstests/hashtest1.js deleted file mode 100644 index 981a0c36877..00000000000 --- a/jstests/hashtest1.js +++ /dev/null @@ -1,78 +0,0 @@ -//hashtest1.js -//Simple tests to check hashing of various types -//make sure that different numeric types hash to same thing, and other sanity checks - -var hash = function( v , seed ){ - if (seed) - return db.runCommand({"_hashBSONElement" : v , "seed" : seed})["out"]; - else - return db.runCommand({"_hashBSONElement" : v})["out"]; -}; - -var oidHash = hash( ObjectId() ); -var oidHash2 = hash( ObjectId() ); -var oidHash3 = hash( ObjectId() ); -assert(! friendlyEqual( oidHash, oidHash2) , "ObjectIDs should hash to different things"); -assert(! friendlyEqual( oidHash, oidHash3) , "ObjectIDs should hash to different things"); -assert(! friendlyEqual( oidHash2, oidHash3) , "ObjectIDs should hash to different things"); - -var intHash = hash( NumberInt(3) ); -var doubHash = hash( 3 ); -var doubHash2 = hash( 3.0 ); -var longHash = hash( NumberLong(3) ); -var fracHash = hash( NumberInt(3.5) ); -assert.eq( intHash , doubHash ); -assert.eq( intHash , doubHash2 ); -assert.eq( intHash , longHash ); -assert.eq( intHash , fracHash ); - -var trueHash = hash( true ); -var falseHash = hash( false ); -assert(! friendlyEqual( trueHash, falseHash) , "true and false should hash to different things"); - -var nullHash = hash( null ); -assert(! friendlyEqual( falseHash , nullHash ) , "false and null should hash to different things"); - -var dateHash = hash( new Date() ); -sleep(1); -var isodateHash = hash( ISODate() ); -assert(! friendlyEqual( dateHash, isodateHash) , "different dates should hash to different things"); - -var stringHash = hash( "3" ); -assert(! friendlyEqual( intHash , stringHash ), "3 and \"3\" should hash to different things"); - -var regExpHash = hash( RegExp("3") ); -assert(! friendlyEqual( stringHash , regExpHash) , "\"3\" and RegExp(3) should hash to different things"); - -var intHash4 = hash( 4 ); -assert(! friendlyEqual( intHash , intHash4 ), "3 and 4 should hash to different things"); - -var intHashSeeded = hash( 4 , 3 ); -assert(! friendlyEqual(intHash4 , intHashSeeded ), "different seeds should make different hashes"); - -var minkeyHash = hash( MinKey ); -var maxkeyHash = hash( MaxKey ); -assert(! friendlyEqual(minkeyHash , maxkeyHash ), "minkey and maxkey should hash to different things"); - -var arrayHash = hash( [0,1.0,NumberLong(2)] ); -var arrayHash2 = hash( [0,NumberInt(1),2] ); -assert.eq( arrayHash , arrayHash2 , "didn't squash numeric types in array"); - -var objectHash = hash( {"0":0, "1" : NumberInt(1), "2" : 2} ); -assert(! friendlyEqual(objectHash , arrayHash2) , "arrays and sub-objects should hash to different things"); - -var c = hash( {a : {}, b : 1} ); -var d = hash( {a : {b : 1}} ); -assert(! friendlyEqual( c , d ) , "hashing doesn't group sub-docs and fields correctly"); - -var e = hash( {a : 3 , b : [NumberLong(3), {c : NumberInt(3)}]} ); -var f = hash( {a : NumberLong(3) , b : [NumberInt(3), {c : 3.0}]} ); -assert.eq( e , f , "recursive number squashing doesn't work"); - -var nanHash = hash( 0/0 ); -var zeroHash = hash( 0 ); -assert.eq( nanHash , zeroHash , "NaN and Zero should hash to the same thing"); - - -//should also test that CodeWScope hashes correctly -//but waiting for SERVER-3391 (CodeWScope support in shell) \ No newline at end of file diff --git a/jstests/hint1.js b/jstests/hint1.js deleted file mode 100644 index b5a580f2b93..00000000000 --- a/jstests/hint1.js +++ /dev/null @@ -1,16 +0,0 @@ - -p = db.jstests_hint1; -p.drop(); - -p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true } ); -p.ensureIndex( { ts: 1 } ); - -e = p.find( { live: true, ts: { $lt: new Date( 1234119308272 ) }, cls: "entry", verticals: "alleyinsider" } ).sort( { ts: -1 } ).hint( { ts: 1 } ).explain(); -assert.eq(e.indexBounds.ts[0][0].getTime(), new Date(1234119308272).getTime(), "A"); - -//printjson(e); - -assert.eq( /*just below min date is bool true*/true, e.indexBounds.ts[0][1], "B"); - -assert.eq(1, p.find({ live: true, ts: { $lt: new Date(1234119308272) }, cls: "entry", verticals: "alleyinsider" }).sort({ ts: -1 }).hint({ ts: 1 }).count()); - diff --git a/jstests/hostinfo.js b/jstests/hostinfo.js deleted file mode 100644 index 16c3810b2c4..00000000000 --- a/jstests/hostinfo.js +++ /dev/null @@ -1,33 +0,0 @@ -// SERVER-4615: Ensure hostInfo() command returns expected results on each platform - -assert.commandWorked( db.hostInfo() ); -var hostinfo = db.hostInfo(); - -// test for os-specific fields -if (hostinfo.os.type == "Windows") { - assert.neq( hostinfo.os.name, "" || null, "Missing Windows os name" ); - assert.neq( hostinfo.os.version, "" || null, "Missing Windows version" ); - -} else if (hostinfo.os.type == "Linux") { - assert.neq( hostinfo.os.name, "" || null, "Missing Linux os/distro name" ); - assert.neq( hostinfo.os.version, "" || null, "Missing Lindows version" ); - -} else if (hostinfo.os.type == "Darwin") { - assert.neq( hostinfo.os.name, "" || null, "Missing Darwin os name" ); - assert.neq( hostinfo.os.version, "" || null, "Missing Darwin version" ); - -} else if (hostinfo.os.type == "BSD") { - assert.neq( hostinfo.os.name, "" || null, "Missing FreeBSD os name" ); - assert.neq( hostinfo.os.version, "" || null, "Missing FreeBSD version" ); -} - -// comment out this block for systems which have not implemented hostinfo. -if (hostinfo.os.type != "") { - assert.neq( hostinfo.system.hostname, "" || null, "Missing Hostname" ); - assert.neq( hostinfo.system.currentTime, "" || null, "Missing Current Time" ); - assert.neq( hostinfo.system.cpuAddrSize, "" || null || 0, "Missing CPU Address Size" ); - assert.neq( hostinfo.system.memSizeMB, "" || null, "Missing Memory Size" ); - assert.neq( hostinfo.system.numCores, "" || null || 0, "Missing Number of Cores" ); - assert.neq( hostinfo.system.cpuArch, "" || null, "Missing CPU Architecture" ); - assert.neq( hostinfo.system.numaEnabled, "" || null, "Missing NUMA flag" ); -} diff --git a/jstests/id1.js b/jstests/id1.js deleted file mode 100644 index 9236340e4ec..00000000000 --- a/jstests/id1.js +++ /dev/null @@ -1,16 +0,0 @@ - -t = db.id1 -t.drop(); - -t.save( { _id : { a : 1 , b : 2 } , x : "a" } ); -t.save( { _id : { a : 1 , b : 2 } , x : "b" } ); -t.save( { _id : { a : 3 , b : 2 } , x : "c" } ); -t.save( { _id : { a : 4 , b : 2 } , x : "d" } ); -t.save( { _id : { a : 4 , b : 2 } , x : "e" } ); -t.save( { _id : { a : 2 , b : 2 } , x : "f" } ); - -assert.eq( 4 , t.find().count() , "A" ); -assert.eq( "b" , t.findOne( { _id : { a : 1 , b : 2 } } ).x ); -assert.eq( "c" , t.findOne( { _id : { a : 3 , b : 2 } } ).x ); -assert.eq( "e" , t.findOne( { _id : { a : 4 , b : 2 } } ).x ); -assert.eq( "f" , t.findOne( { _id : { a : 2 , b : 2 } } ).x ); diff --git a/jstests/idhack.js b/jstests/idhack.js deleted file mode 100644 index 21409645489..00000000000 --- a/jstests/idhack.js +++ /dev/null @@ -1,43 +0,0 @@ - -t = db.idhack -t.drop() - - -t.insert( { _id : { x : 1 } , z : 1 } ) -t.insert( { _id : { x : 2 } , z : 2 } ) -t.insert( { _id : { x : 3 } , z : 3 } ) -t.insert( { _id : 1 , z : 4 } ) -t.insert( { _id : 2 , z : 5 } ) -t.insert( { _id : 3 , z : 6 } ) - -assert.eq( 2 , t.findOne( { _id : { x : 2 } } ).z , "A1" ) -assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).count() , "A2" ) -assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).itcount() , "A3" ) - -t.update( { _id : { x : 2 } } , { $set : { z : 7 } } ) -assert.eq( 7 , t.findOne( { _id : { x : 2 } } ).z , "B1" ) - -t.update( { _id : { $gte : 2 } } , { $set : { z : 8 } } , false , true ) -assert.eq( 4 , t.findOne( { _id : 1 } ).z , "C1" ) -assert.eq( 8 , t.findOne( { _id : 2 } ).z , "C2" ) -assert.eq( 8 , t.findOne( { _id : 3 } ).z , "C3" ) - -// explain output should show that the ID hack was applied. -var query = { _id : { x : 2 } }; -var explain = t.find( query ).explain( true ); -print( "explain for " + tojson( query , "" , true ) + " = " + tojson( explain ) ); -assert.eq( 1 , explain.n , "D1" ); -assert.eq( 1 , explain.nscanned , "D2" ); -assert.neq( undefined , explain.cursor , "D3" ); -assert.neq( "" , explain.cursor , "D4" ); -assert.neq( undefined , explain.indexBounds , "D5" ); -assert.neq( {} , explain.indexBounds , "D6" ); - -// ID hack cannot be used with hint(). -var query = { _id : { x : 2 } }; -var explain = t.find( query ).explain(); -t.ensureIndex( { _id : 1 , a : 1 } ); -var hintExplain = t.find( query ).hint( { _id : 1 , a : 1 } ).explain(); -print( "explain for hinted query = " + tojson( hintExplain ) ); -assert.neq( explain.cursor, hintExplain.cursor, "E1" ); - diff --git a/jstests/in.js b/jstests/in.js deleted file mode 100644 index da1313692e1..00000000000 --- a/jstests/in.js +++ /dev/null @@ -1,24 +0,0 @@ - -t = db.in1; -t.drop(); - -t.save( { a : 1 } ); -t.save( { a : 2 } ); - -// $in must take an array as argument: SERVER-7445 -assert.throws( function() { return t.find( { a : { $in : { x : 1 } } } ).itcount(); } ); -assert.throws( function() { return t.find( { a : { $in : 1 } } ).itcount(); } ); - -assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount() , "A" ); -assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "B" ); -assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "C" ); - -t.ensureIndex( { a : 1 } ); - -assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount(), "D" ); -assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "E" ); -assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" ); - -assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" ); - -assert.eq( 1 , t.find( { a : { $gt: 1, $in : [ 2 ] } } ).itcount() , "H" ); diff --git a/jstests/in2.js b/jstests/in2.js deleted file mode 100644 index 66b90daa25a..00000000000 --- a/jstests/in2.js +++ /dev/null @@ -1,33 +0,0 @@ - -t = db.in2; - -function go( name , index ){ - - t.drop(); - - t.save( { a : 1 , b : 1 } ); - t.save( { a : 1 , b : 2 } ); - t.save( { a : 1 , b : 3 } ); - - t.save( { a : 1 , b : 1 } ); - t.save( { a : 2 , b : 2 } ); - t.save( { a : 3 , b : 3 } ); - - t.save( { a : 1 , b : 1 } ); - t.save( { a : 2 , b : 1 } ); - t.save( { a : 3 , b : 1 } ); - - if ( index ) - t.ensureIndex( index ); - - assert.eq( 7 , t.find( { a : { $in : [ 1 , 2 ] } } ).count() , name + " A" ); - - assert.eq( 6 , t.find( { a : { $in : [ 1 , 2 ] } , b : { $in : [ 1 , 2 ] } } ).count() , name + " B" ); -} - -go( "no index" ); -go( "index on a" , { a : 1 } ); -go( "index on b" , { b : 1 } ); -go( "index on a&b" , { a : 1 , b : 1 } ); - - diff --git a/jstests/in3.js b/jstests/in3.js deleted file mode 100644 index b0a8bb7b81f..00000000000 --- a/jstests/in3.js +++ /dev/null @@ -1,11 +0,0 @@ -t = db.jstests_in3; - -t.drop(); -t.ensureIndex( {i:1} ); -assert.eq( {i:[[3,3]]}, t.find( {i:{$in:[3]}} ).explain().indexBounds , "A1" ); -assert.eq( {i:[[3,3],[6,6]]}, t.find( {i:{$in:[3,6]}} ).explain().indexBounds , "A2" ); - -for ( var i=0; i<20; i++ ) - t.insert( { i : i } ); - -assert.eq( 3 , t.find( {i:{$in:[3,6]}} ).explain().nscanned , "B1" ) diff --git a/jstests/in4.js b/jstests/in4.js deleted file mode 100644 index 3e3dca29528..00000000000 --- a/jstests/in4.js +++ /dev/null @@ -1,42 +0,0 @@ -t = db.jstests_in4; - -function checkRanges( a, b ) { - assert.eq( a, b ); -} - -t.drop(); -t.ensureIndex( {a:1,b:1} ); -checkRanges( {a:[[2,2]],b:[[3,3]]}, t.find( {a:2,b:3} ).explain().indexBounds ); -checkRanges( {a:[[2,2],[3,3]],b:[[4,4]]}, t.find( {a:{$in:[2,3]},b:4} ).explain().indexBounds ); -checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds ); -checkRanges( {a:[[2,2],[3,3]],b:[[4,4],[5,5]]}, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).explain().indexBounds ); - -checkRanges( {a:[[2,2],[3,3]],b:[[4,10]]}, t.find( {a:{$in:[2,3]},b:{$gt:4,$lt:10}} ).explain().indexBounds ); - -t.save( {a:1,b:1} ); -t.save( {a:2,b:4.5} ); -t.save( {a:2,b:4} ); -assert.eq( 2, t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).hint( {a:1,b:1} ).explain().nscanned ); -assert.eq( 2, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).a ); -assert.eq( 4, t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).b ); - -t.drop(); -t.ensureIndex( {a:1,b:1,c:1} ); -checkRanges( {a:[[2,2]],b:[[3,3],[4,4]],c:[[5,5]]}, t.find( {a:2,b:{$in:[3,4]},c:5} ).explain().indexBounds ); - -t.save( {a:2,b:3,c:5} ); -t.save( {a:2,b:3,c:4} ); -assert.eq( 1, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned ); -t.remove({}); -t.save( {a:2,b:4,c:5} ); -t.save( {a:2,b:4,c:4} ); -assert.eq( 2, t.find( {a:2,b:{$in:[3,4]},c:5} ).hint( {a:1,b:1,c:1} ).explain().nscanned ); - -t.drop(); -t.ensureIndex( {a:1,b:-1} ); -ib = t.find( {a:2,b:{$in:[3,4]}} ).explain().indexBounds; -checkRanges( {a:[[2,2]],b:[[4,4],[3,3]]}, ib ); -assert( ib.b[ 0 ][ 0 ] > ib.b[ 1 ][ 0 ] ); -ib = t.find( {a:2,b:{$in:[3,4]}} ).sort( {a:-1,b:1} ).explain().indexBounds; -checkRanges( {a:[[2,2]],b:[[3,3],[4,4]]}, ib ); -assert( ib.b[ 0 ][ 0 ] < ib.b[ 1 ][ 0 ] ); diff --git a/jstests/in5.js b/jstests/in5.js deleted file mode 100644 index 435c8864004..00000000000 --- a/jstests/in5.js +++ /dev/null @@ -1,56 +0,0 @@ - -t = db.in5 - -function go( fn ){ - t.drop(); - o = {}; - o[fn] = { a : 1 , b : 2 }; - t.insert( o ); - - x = {}; - x[fn] = { a : 1 , b : 2 }; - assert.eq( 1 , t.find( x ).itcount() , "A1 - " + fn ); - - - y = {}; - y[fn] = { $in : [ { a : 1 , b : 2 } ] } - assert.eq( 1 , t.find( y ).itcount() , "A2 - " + fn ); - - - z = {}; - z[fn+".a"] = 1; - z[fn+".b"] = { $in : [ 2 ] } - assert.eq( 1 , t.find( z ).itcount() , "A3 - " + fn ); // SERVER-1366 - - - i = {} - i[fn] = 1 - t.ensureIndex( i ) - - assert.eq( 1 , t.find( x ).itcount() , "B1 - " + fn ); - assert.eq( 1 , t.find( y ).itcount() , "B2 - " + fn ); - assert.eq( 1 , t.find( z ).itcount() , "B3 - " + fn ); // SERVER-1366 - - t.dropIndex( i ) - - assert.eq( 1 , t.getIndexes().length , "T2" ); - - i = {} - i[fn + ".a" ] = 1; - t.ensureIndex( i ) - assert.eq( 2 , t.getIndexes().length , "T3" ); - - assert.eq( 1 , t.find( x ).itcount() , "C1 - " + fn ); - assert.eq( 1 , t.find( y ).itcount() , "C2 - " + fn ); - assert.eq( 1 , t.find( z ).itcount() , "C3 - " + fn ); // SERVER-1366 - - t.dropIndex( i ) - - -} - -go( "x" ); -go( "_id" ) - - - diff --git a/jstests/in6.js b/jstests/in6.js deleted file mode 100644 index f114d93442a..00000000000 --- a/jstests/in6.js +++ /dev/null @@ -1,13 +0,0 @@ -t = db.jstests_in6; -t.drop(); - -t.save( {} ); - -function doTest() { - assert.eq.automsg( "1", "t.count( {i:null} )" ); - assert.eq.automsg( "1", "t.count( {i:{$in:[null]}} )" ); -} - -doTest(); -t.ensureIndex( {i:1} ); -doTest(); diff --git a/jstests/in8.js b/jstests/in8.js deleted file mode 100644 index 5e7e587629f..00000000000 --- a/jstests/in8.js +++ /dev/null @@ -1,23 +0,0 @@ -// SERVER-2829 Test arrays matching themselves within a $in expression. - -t = db.jstests_in8; -t.drop(); - -t.save( {key: [1]} ); -t.save( {key: ['1']} ); -t.save( {key: [[2]]} ); - -function doTest() { - assert.eq( 1, t.count( {key:[1]} ) ); - assert.eq( 1, t.count( {key:{$in:[[1]]}} ) ); - assert.eq( 1, t.count( {key:{$in:[[1]],$ne:[2]}} ) ); - assert.eq( 1, t.count( {key:{$in:[['1']],$type:2}} ) ); - assert.eq( 1, t.count( {key:['1']} ) ); - assert.eq( 1, t.count( {key:{$in:[['1']]}} ) ); - assert.eq( 1, t.count( {key:[2]} ) ); - assert.eq( 1, t.count( {key:{$in:[[2]]}} ) ); -} - -doTest(); -t.ensureIndex( {key:1} ); -doTest(); diff --git a/jstests/in9.js b/jstests/in9.js deleted file mode 100644 index cbe28e2e2df..00000000000 --- a/jstests/in9.js +++ /dev/null @@ -1,35 +0,0 @@ -// SERVER-2343 Test $in empty array matching. - -t = db.jstests_in9; -t.drop(); - -function someData() { - t.remove({}); - t.save( {key: []} ); -} - -function moreData() { - someData(); - t.save( {key: [1]} ); - t.save( {key: ['1']} ); - t.save( {key: null} ); - t.save( {} ); -} - -function check() { - assert.eq( 1, t.count( {key:[]} ) ); - assert.eq( 1, t.count( {key:{$in:[[]]}} ) ); -} - -function doTest() { - someData(); - check(); - moreData(); - check(); -} - -doTest(); - -// SERVER-1943 not fixed yet -t.ensureIndex( {key:1} ); -doTest(); diff --git a/jstests/ina.js b/jstests/ina.js deleted file mode 100644 index cf614ab994d..00000000000 --- a/jstests/ina.js +++ /dev/null @@ -1,15 +0,0 @@ -// Uassert when $elemMatch is attempted within $in SERVER-3545 - -t = db.jstests_ina; -t.drop(); -t.save( {} ); - -assert.throws( function() { t.find( {a:{$in:[{$elemMatch:{b:1}}]}} ).itcount(); } ); -assert.throws( function() { t.find( {a:{$not:{$in:[{$elemMatch:{b:1}}]}}} ).itcount(); } ); - -assert.throws( function() { t.find( {a:{$nin:[{$elemMatch:{b:1}}]}} ).itcount(); } ); -assert.throws( function() { t.find( {a:{$not:{$nin:[{$elemMatch:{b:1}}]}}} ).itcount(); } ); - -// NOTE Above we don't check cases like {b:2,$elemMatch:{b:3,4}} - generally -// we assume that the first key is $elemMatch if any key is, and validating -// every key is expensive in some cases. \ No newline at end of file diff --git a/jstests/inb.js b/jstests/inb.js deleted file mode 100644 index 34ec843d36c..00000000000 --- a/jstests/inb.js +++ /dev/null @@ -1,19 +0,0 @@ -// Test $in regular expressions with overlapping index bounds. SERVER-4677 - -t = db.jstests_inb; -t.drop(); - -function checkBoundsAndResults( query ) { - assert.eq( [ 'a', 'b' ], t.find( query ).explain().indexBounds.x[0] ); - assert.eq( 4, t.count( query ) ); - assert.eq( 4, t.find( query ).itcount() ); -} - -t.ensureIndex( {x:1} ); -t.save( {x:'aa'} ); -t.save( {x:'ab'} ); -t.save( {x:'ac'} ); -t.save( {x:'ad'} ); - -checkBoundsAndResults( {x:{$in:[/^a/,/^ab/]}} ); -checkBoundsAndResults( {x:{$in:[/^ab/,/^a/]}} ); diff --git a/jstests/inc-SERVER-7446.js b/jstests/inc-SERVER-7446.js deleted file mode 100644 index 73cdef3dbd5..00000000000 --- a/jstests/inc-SERVER-7446.js +++ /dev/null @@ -1,43 +0,0 @@ -var c = db.incSERVER7446 - -// A 32 bit overflow spills to 64 bits -c.drop(); -c.save( { a: NumberInt( "2147483647" ) } ); -c.update( {}, { $inc:{ a:NumberInt( 1 ) } } ); -var gle = db.getLastErrorObj(); -assert.eq(1, gle.n, "Object not inserted"); -var res = c.findOne(); -assert.eq(NumberLong, res.a.constructor, - "NumberInt incremented beyond std::numeric_limits::max() not NumberLong"); -assert.eq(NumberLong("2147483648"), res.a, - "NumberInt incremented beyond std::numeric_limits::max() has wrong value"); - -// A 32 bit underflow spills to 64 bits -c.drop(); -c.save( { a: NumberInt( "-2147483648" ) } ); -c.update( {}, { $inc:{ a:NumberInt( -1 ) } } ); -gle = db.getLastErrorObj(); -assert.eq(1, gle.n, "Object not inserted"); -res = c.findOne(); -assert.eq(NumberLong, res.a.constructor, - "NumberInt decremented beyond std::numeric_limits::min() not NumberLong"); -assert.eq(NumberLong("-2147483649"), res.a, - "NumberInt decremented beyond std::numeric_limits::min() has wrong value"); - -// A 64 bit overflow is an error -c.drop(); -c.save( { a: NumberLong( "9223372036854775807" ) } ); -c.update( {}, { $inc:{ a:NumberInt( 1 ) } } ); -gle = db.getLastErrorObj(); -assert.eq(0, gle.n, - "Did not fail to increment a NumberLong past std::numeric_limits::max()"); - -// A 64 bit underflow is an error -c.drop(); -c.save( { a: NumberLong( "-9223372036854775808" ) } ); -c.update( {}, { $inc:{ a:NumberInt( -1 ) } } ); -gle = db.getLastErrorObj(); -assert.eq(0, gle.n, - "Did not fail to decrement a NumberLong past std::numeric_limits::min()"); - -c.drop() diff --git a/jstests/inc1.js b/jstests/inc1.js deleted file mode 100644 index 027f307a476..00000000000 --- a/jstests/inc1.js +++ /dev/null @@ -1,32 +0,0 @@ - -t = db.inc1; -t.drop(); - -function test( num , name ){ - assert.eq( 1 , t.count() , name + " count" ); - assert.eq( num , t.findOne().x , name + " value" ); -} - -t.save( { _id : 1 , x : 1 } ); -test( 1 , "A" ); - -t.update( { _id : 1 } , { $inc : { x : 1 } } ); -test( 2 , "B" ); - -t.update( { _id : 1 } , { $inc : { x : 1 } } ); -test( 3 , "C" ); - -t.update( { _id : 2 } , { $inc : { x : 1 } } ); -test( 3 , "D" ); - -t.update( { _id : 1 } , { $inc : { x : 2 } } ); -test( 5 , "E" ); - -t.update( { _id : 1 } , { $inc : { x : -1 } } ); -test( 4 , "F" ); - -t.ensureIndex( { x : 1 } ); - -t.update( { _id : 1 } , { $inc : { x : 1 } } ); -test( 5 , "G" ); - diff --git a/jstests/inc2.js b/jstests/inc2.js deleted file mode 100644 index 75a8e65a384..00000000000 --- a/jstests/inc2.js +++ /dev/null @@ -1,22 +0,0 @@ - -t = db.inc2 -t.drop(); - -t.save( { _id : 1 , x : 1 } ); -t.save( { _id : 2 , x : 2 } ); -t.save( { _id : 3 , x : 3 } ); - -function order(){ - return t.find().sort( { x : 1 } ).map( function(z){ return z._id; } ); -} - -assert.eq( "1,2,3" , order() , "A" ); - -t.update( { _id : 1 } , { $inc : { x : 4 } } ); -assert.eq( "2,3,1" , order() , "B" ); - -t.ensureIndex( { x : 1 } ); -assert.eq( "2,3,1" , order() , "C" ); - -t.update( { _id : 3 } , { $inc : { x : 4 } } ); -assert.eq( "2,1,3" , order() , "D" ); diff --git a/jstests/inc3.js b/jstests/inc3.js deleted file mode 100644 index baeeb198cf4..00000000000 --- a/jstests/inc3.js +++ /dev/null @@ -1,16 +0,0 @@ - -t = db.inc3; - -t.drop(); -t.save( { _id : 1 , z : 1 , a : 1 } ); -t.update( {} , { $inc : { z : 1 , a : 1 } } ); -t.update( {} , { $inc : { a : 1 , z : 1 } } ); -assert.eq( { _id : 1 , z : 3 , a : 3 } , t.findOne() , "A" ) - - -t.drop(); -t.save( { _id : 1 , a : 1 , z : 1 } ); -t.update( {} , { $inc : { z : 1 , a : 1 } } ); -t.update( {} , { $inc : { a : 1 , z : 1 } } ); -assert.eq( { _id : 1 , a : 3 , z : 3 } , t.findOne() , "B" ) - diff --git a/jstests/index1.js b/jstests/index1.js deleted file mode 100644 index 64bbfa8732b..00000000000 --- a/jstests/index1.js +++ /dev/null @@ -1,24 +0,0 @@ - -t = db.embeddedIndexTest; - -t.remove( {} ); - -o = { name : "foo" , z : { a : 17 , b : 4} }; -t.save( o ); - -assert( t.findOne().z.a == 17 ); -assert( t.findOne( { z : { a : 17 } } ) == null); - -t.ensureIndex( { "z.a" : 1 } ); - -assert( t.findOne().z.a == 17 ); -assert( t.findOne( { z : { a : 17 } } ) == null); - -o = { name : "bar" , z : { a : 18 } }; -t.save( o ); - -assert.eq.automsg( "2", "t.find().length()" ); -assert.eq.automsg( "2", "t.find().sort( { 'z.a' : 1 } ).length()" ); -assert.eq.automsg( "2", "t.find().sort( { 'z.a' : -1 } ).length()" ); - -assert(t.validate().valid); diff --git a/jstests/index10.js b/jstests/index10.js deleted file mode 100644 index 92f5927097d..00000000000 --- a/jstests/index10.js +++ /dev/null @@ -1,32 +0,0 @@ -// unique index, drop dups - -t = db.jstests_index10; -t.drop(); - -t.save( {i:1} ); -t.save( {i:2} ); -t.save( {i:1} ); -t.save( {i:3} ); -t.save( {i:1} ); - -t.ensureIndex( {i:1} ); -assert.eq( 5, t.count() ); -t.dropIndexes(); -t.ensureIndex( {i:1}, true ); -err = db.getLastErrorObj(); -assert( err.err , "err.err" ); -assert.eq( 11000, err.code ); -assert( 1 == db.system.indexes.count( {ns:"test.jstests_index10" } ), "only id index" ); -// t.dropIndexes(); - -ts = t.totalIndexSize(); -t.ensureIndex( {i:1}, [ true, true ] ); -ts2 = t.totalIndexSize(); - -assert.eq( ts * 2, ts2, "totalIndexSize fail" ); - -assert.eq( 3, t.count() ); -assert.eq( 1, t.count( {i:1} ) ); - -t.ensureIndex( {j:1}, [ true, true ] ); -assert.eq( 1, t.count() ); diff --git a/jstests/index13.js b/jstests/index13.js deleted file mode 100644 index 7e317d90d94..00000000000 --- a/jstests/index13.js +++ /dev/null @@ -1,147 +0,0 @@ -// Top level match fields within an $elemMatch clause may constrain multiple subfields from a -// compound multikey index. SERVER-3104 -// -// Given a multikey index { 'a.b':1, 'a.c':1 } and query { 'a.b':3, 'a.c':3 } only the index field -// 'a.b' is constrained to the range [3, 3], while the index field 'a.c' is just constrained -// to be within minkey and maxkey. This implementation ensures that the document -// { a:[ { b:3 }, { c:3 } ] }, which generates index keys { 'a.b':3, 'a.c':null } and -// { 'a.b':null and 'a.c':3 } will be retrieved for the query. (See SERVER-958 for more -// information.) -// -// If the query is instead { a:{ $elemMatch:{ b:3, c:3 } } } then the document -// { a:[ { b:3 }, { c:3 } ] } does not match. Until SERVER-3104 was implemented, the index -// constraints would be [3,3] on the 'a.b' field and [minkey,maxkey] on the 'a.c' field, the same as -// for the non $elemMatch query in the previous paragraph. With the SERVER-3104 implementation, -// constraints on two fields within a $elemMatch parent can both be applied to an index. Due to the -// SERVER-3104 implementation, the index constraints become [3,3] on the 'a.b' field _and_ [3,3] on -// the 'a.c' field. - -t = db.jstests_index13; -t.drop(); - -function assertConsistentResults( query ) { - assert.eq( t.find( query ).hint( { $natural:1 } ).sort( { _id:1 } ).toArray(), - t.find( query ).hint( index ).sort( { _id:1 } ).toArray() ); -} - -function assertResults( query ) { - explain = t.find( query ).hint( index ).explain(); - // printjson( explain ); // debug - assertConsistentResults( query ); -} - -// Cases with single dotted index fied names. -index = { 'a.b':1, 'a.c':1 }; -t.ensureIndex( index ); -t.save( { a:[ { b:1 }, { c:1 } ] } ); -t.save( { a:[ { b:1, c:1 } ] } ); -assert.eq( 2, t.count() ); -// Without $elemMatch. -assertResults( { 'a.b':1, 'a.c':1 } ); -// With $elemMatch. -assertResults( { a:{ $elemMatch:{ b:1, c:1 } } } ); - -// Without shared $elemMatch. -assertResults( { 'a.b':1, a:{ $elemMatch:{ c:1 } } } ); -// Two different $elemMatch expressions. -assertResults( { $and:[ { a:{ $elemMatch:{ b:1 } } }, - { a:{ $elemMatch:{ c:1 } } } ] } ); - - -// Cases relating to parse order and inclusion of intersected ranges. -assertResults( { 'a.b':1, a:{ $elemMatch:{ b:{ $gt:0 }, c:1 } } } ); -assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':1 } ); -assertResults( { 'a.c':1, a:{ $elemMatch:{ b:1, c:1 } } } ); -assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':{ $gt:0 } } ); - -// Cases with $elemMatch on multiple fields. -t.remove({}); -index = { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 }; -t.ensureIndex( index ); -t.insert( { a:[ { b:1 }, { c:1 } ], d: { e:1, f:1 } } ); -t.insert( { a:[ { b:1, c:1 } ], d: { e:1, f:1 } } ); -t.insert( { a:{ b:1, c:1 }, d:[ { e:1, f:1 } ] } ); -t.insert( { a:{ b:1, c:1 }, d:[ { e:1 }, { f:1 } ] } ); - -assert.eq( 4, t.count() ); - -// Without $elemMatch. -assertResults( { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 } ); -// With $elemMatch. -assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd': { $elemMatch:{ e:1, f:1 } } } ); -assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd.e': 1, 'd.f' : 1 } ); -assertResults( { 'a.b': 1, 'a.c' : 1, 'd': { $elemMatch:{ e:1, f:1 } } } ); - - -// Cases with nested $elemMatch. -t.remove({}) -index = { 'a.b.c':1, 'a.b.d' :1 }; -t.ensureIndex( index ); -t.insert( { a:[ { b: [ { c : 1, d : 1 } ] } ] } ) ; -t.insert( { a:[ { b: [ { c : 1 } , { d : 1 } ] } ] } ) ; -assert.eq( 2, t.count() ); -// Without $elemMatch. -assertResults( { 'a.b.c':1, 'a.b.d':1 } ); -// With $elemMatch. -assertResults( { "a" : { $elemMatch : { "b" : { $elemMatch : { c : 1, d : 1 } } } } } ); - -// Cases with double dotted index field names. -t.drop(); -index = { 'a.b.x':1, 'a.b.y':1 }; -t.ensureIndex( index ); -t.save( { a:{ b:{ x:1, y:1 } } } ); -t.save( { a:[ { b:{ x:1 } }, { b:{ y:1 } } ] } ); -t.save( { a:[ { b:[ { x:1 }, { y:1 } ] } ] } ); -t.save( { a:[ { b:[ { x:1, y:1 } ] } ] } ); -assert.eq( 4, t.count() ); - -// No $elemMatch. -assertResults( { 'a.b.x':1, 'a.b.y':1 } ); -// $elemMatch with dotted children. -assertResults( { a:{ $elemMatch:{ 'b.x':1, 'b.y':1 } } } ); -// $elemMatch with undotted children. -assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } ); - -// Cases where a field is indexed along with its children. -t.dropIndexes(); -index = { 'a':1, 'a.b.x':1, 'a.b.y':1 }; -t.ensureIndex( index ); - -// With $ne. -assertResults( { a:{ $ne:4 }, 'a.b':{ $elemMatch:{ x:1, y:1 } } } ); - -// No constraint on a prior parent field. -assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } ); - -// Cases with double dotted index field names branching to different fields at each dot. -t.drop(); -index = { 'a.b.c':1, 'a.e.f':1, 'a.b.d':1, 'a.e.g':1 } -t.ensureIndex( index ); -t.save( { a:{ b:{ c:1, d:1 }, e:{ f:1, g:1 } } } ); -t.save( { a:[ { b:{ c:1 }, e:{ f:1 } }, { b:{ d:1 }, e:{ g:1 } } ] } ); -t.save( { a:[ { b:{ c:1 } }, { e:{ f:1 } }, { b:{ d:1 } }, { e:{ g:1 } } ] } ); -t.save( { a:[ { b:[ { c:1 }, { d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } ); -t.save( { a:[ { b:[ { c:[ 1 ] }, { d:[ 1 ] } ] }, { e:[ { f:[ 1 ] }, { g:[ 1 ] } ] } ] } ); -t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } ); -t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1, g:1 } ] } ] } ); -assert.eq( 7, t.count() ); - -// Constraint on a prior cousin field. -assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, - 'a.e':{ $elemMatch:{ f:1, g:1 } } } ); - -// Different constraint on a prior cousin field. -assertResults( { 'a.b':{ $elemMatch:{ d:1 } }, - 'a.e':{ $elemMatch:{ f:1, g:1 } } } ); - - -// Cases with double dotted index field names branching to different fields at each dot, and the -// same field name strings after the second dot. -t.drop(); -index = { 'a.b.c':1, 'a.e.c':1, 'a.b.d':1, 'a.e.d':1 } -t.ensureIndex( index ); -t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { c:1, d:1 } ] } ] } ); -assert.eq( 1, t.count() ); - -// Constraint on a prior cousin field with the same field names. -assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, 'a.e':{ $elemMatch:{ c:1, d:1 } } } ); diff --git a/jstests/index2.js b/jstests/index2.js deleted file mode 100644 index b54abcaa792..00000000000 --- a/jstests/index2.js +++ /dev/null @@ -1,40 +0,0 @@ -/* test indexing where the key is an embedded object. - */ - -t = db.embeddedIndexTest2; - -t.drop(); -assert( t.findOne() == null ); - -o = { name : "foo" , z : { a : 17 } }; -p = { name : "foo" , z : { a : 17 } }; -q = { name : "barrr" , z : { a : 18 } }; -r = { name : "barrr" , z : { k : "zzz", L:[1,2] } }; - -t.save( o ); - -assert( t.findOne().z.a == 17 ); - -t.save( p ); -t.save( q ); - -assert( t.findOne({z:{a:17}}).z.a==17 ); -assert( t.find({z:{a:17}}).length() == 2 ); -assert( t.find({z:{a:18}}).length() == 1 ); - -t.save( r ); - -assert( t.findOne({z:{a:17}}).z.a==17 ); -assert( t.find({z:{a:17}}).length() == 2 ); -assert( t.find({z:{a:18}}).length() == 1 ); - -t.ensureIndex( { z : 1 } ); - -assert( t.findOne({z:{a:17}}).z.a==17 ); -assert( t.find({z:{a:17}}).length() == 2 ); -assert( t.find({z:{a:18}}).length() == 1 ); - -assert( t.find().sort( { z : 1 } ).length() == 4 ); -assert( t.find().sort( { z : -1 } ).length() == 4 ); - -assert(t.validate().valid); diff --git a/jstests/index3.js b/jstests/index3.js deleted file mode 100644 index 80139460cb4..00000000000 --- a/jstests/index3.js +++ /dev/null @@ -1,16 +0,0 @@ - - -t = db.index3; -t.drop(); - -assert( t.getIndexes().length == 0 ); - -t.ensureIndex( { name : 1 } ); - -t.save( { name : "a" } ); - -t.ensureIndex( { name : 1 } ); - -assert( t.getIndexes().length == 2 ); - -assert(t.validate().valid); diff --git a/jstests/index4.js b/jstests/index4.js deleted file mode 100644 index 9dd731c83ee..00000000000 --- a/jstests/index4.js +++ /dev/null @@ -1,33 +0,0 @@ -// index4.js - - -t = db.index4; -t.drop(); - -t.save( { name : "alleyinsider" , - instances : [ - { pool : "prod1" } , - { pool : "dev1" } - ] - } ); - -t.save( { name : "clusterstock" , - instances : [ - { pool : "dev1" } - ] - } ); - - -// this should fail, not allowed -- we confirm that. -t.ensureIndex( { instances : { pool : 1 } } ); -assert.eq( 0, db.system.indexes.find( {ns:"test.index4",name:{$ne:"_id_"}} ).count(), "no indexes should be here yet"); - -t.ensureIndex( { "instances.pool" : 1 } ); - -sleep( 10 ); - -a = t.find( { instances : { pool : "prod1" } } ); -assert( a.length() == 1, "len1" ); -assert( a[0].name == "alleyinsider", "alley" ); - -assert(t.validate().valid, "valid" ); diff --git a/jstests/index5.js b/jstests/index5.js deleted file mode 100644 index 841ac12ed45..00000000000 --- a/jstests/index5.js +++ /dev/null @@ -1,24 +0,0 @@ -// index5.js - test reverse direction index - -function validate() { - assert.eq( 2, t.find().count() ); - f = t.find().sort( { a: 1 } ); - assert.eq( 2, t.count() ); - assert.eq( 1, f[ 0 ].a ); - assert.eq( 2, f[ 1 ].a ); - r = t.find().sort( { a: -1 } ); - assert.eq( 2, r.count() ); - assert.eq( 2, r[ 0 ].a ); - assert.eq( 1, r[ 1 ].a ); -} - -t = db.index5; -t.drop(); - -t.save( { a: 1 } ); -t.save( { a: 2 } ); - -validate(); - -t.ensureIndex( { a: -1 } ); -validate(); diff --git a/jstests/index6.js b/jstests/index6.js deleted file mode 100644 index 8dbd8f74fcf..00000000000 --- a/jstests/index6.js +++ /dev/null @@ -1,8 +0,0 @@ -// index6.js Test indexes on array subelements. - -r = db.ed.db.index6; -r.drop(); - -r.save( { comments : [ { name : "eliot", foo : 1 } ] } ); -r.ensureIndex( { "comments.name": 1 } ); -assert( r.findOne( { "comments.name": "eliot" } ) ); diff --git a/jstests/index7.js b/jstests/index7.js deleted file mode 100644 index 9e3a6c66d11..00000000000 --- a/jstests/index7.js +++ /dev/null @@ -1,67 +0,0 @@ -// index7.js Test that we use an index when and only when we expect to. - -function index( q ) { - assert( q.explain().cursor.match( /^BtreeCursor/ ) , "index assert" ); -} - -function noIndex( q ) { - assert( q.explain().cursor.match( /^BasicCursor/ ) , "noIndex assert" ); -} - -function start( k, q, rev) { - var exp = q.explain().indexBounds; - var s = {a:exp.a[rev?1:0][0],b:exp.b[0][0]}; - assert.eq( k.a, s.a ); - assert.eq( k.b, s.b ); -} -function end( k, q, rev) { - var exp = q.explain().indexBounds - var e = {a:exp.a[rev?1:0][1],b:exp.b[0][1]}; - assert.eq( k.a, e.a ); - assert.eq( k.b, e.b ); -} -function both( k, q ) { - start( k, q ); - end( k, q ); -} - -f = db.ed_db_index7; -f.drop(); - -f.save( { a : 5 } ) -f.ensureIndex( { a: 1 } ); -index( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { a: 1 } ) ); -noIndex( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { $natural: 1 } ) ); -f.drop(); - -f.ensureIndex( { a: 1, b: 1 } ); -assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] ); -assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] ); -assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][0] ); -assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.a[0][1] ); -assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c ); -assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds.c ); - -start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) ); -start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) ); -start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true ); -start( { a: "a", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) ); -end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) ); -end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) ); -end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true ); -end( { a: "b", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) ); - -start( { a: "z", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) ); -end( { a: "{", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) ); - -start( { a: "az", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) ); -end( { a: "a{", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) ); - -both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { a: 1, b: 1 } ) ); - -both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).hint( { a: 1, b: 1 } ) ); -both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) ); - -f.drop(); -f.ensureIndex( { b: 1, a: 1 } ); -both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { b: 1, a: 1 } ) ); diff --git a/jstests/index8.js b/jstests/index8.js deleted file mode 100644 index 719ad2dd2cb..00000000000 --- a/jstests/index8.js +++ /dev/null @@ -1,62 +0,0 @@ -// Test key uniqueness - -t = db.jstests_index8; -t.drop(); - -t.ensureIndex( { a: 1 } ); -t.ensureIndex( { b: 1 }, true ); -t.ensureIndex( { c: 1 }, [ false, "cIndex" ] ); - -checkIndexes = function( num ) { -// printjson( db.system.indexes.find( { ns: "test.jstests_index8" } ).toArray() ); - indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } ).toArray(); - var start = 0; - if ( indexes[0].name == "_id_" ) - start = 1; - assert( !indexes[ start ].unique , "A" + num ); - assert( indexes[ start + 1 ].unique , "B" + num + " " + tojson( indexes[start+1] ) ); - assert( !indexes[ start + 2 ].unique , "C" + num ); - assert.eq( "cIndex", indexes[ start + 2 ].name , "D" + num ); -} - -checkIndexes( 1 ); - -t.reIndex(); -checkIndexes( 2 ); - -t.save( { a: 2, b: 1 } ); -t.save( { a: 2 } ); -assert.eq( 2, t.find().count() ); - -t.save( { b: 4 } ); -t.save( { b: 4 } ); -assert.eq( 3, t.find().count() ); -assert.eq( 3, t.find().hint( {c:1} ).toArray().length ); -assert.eq( 3, t.find().hint( {b:1} ).toArray().length ); -assert.eq( 3, t.find().hint( {a:1} ).toArray().length ); - -t.drop(); -t.ensureIndex( { a: 1, b: -1 }, true ); -t.save( { a: 2, b: 3 } ); -t.save( { a: 2, b: 3 } ); -t.save( { a: 2, b: 4 } ); -t.save( { a: 1, b: 3 } ); -assert.eq( 3, t.find().count() ); - -t.drop(); -t.ensureIndex( { a: 1 }, true ); -t.save( { a: [ 2, 3 ] } ); -t.save( { a: 2 } ); -assert.eq( 1, t.find().count() ); - -t.drop(); -t.ensureIndex( { a: 1 }, true ); -t.save( { a: 2 } ); -t.save( { a: [ 1, 2, 3 ] } ); -t.save( { a: [ 3, 2, 1 ] } ); -assert.eq( 1, t.find().sort( { a: 1 } ).hint( { a: 1 } ).toArray().length ); -assert.eq( 1, t.find().sort( { a: -1 } ).hint( { a: 1 } ).toArray().length ); - -assert.eq( t._indexSpec( { x : 1 } , true ) , t._indexSpec( { x : 1 } , [ true ] ) , "spec 1" ); -assert.eq( t._indexSpec( { x : 1 } , "eliot" ) , t._indexSpec( { x : 1 } , [ "eliot" ] ) , "spec 2" ); - diff --git a/jstests/index9.js b/jstests/index9.js deleted file mode 100644 index 04b900949ec..00000000000 --- a/jstests/index9.js +++ /dev/null @@ -1,25 +0,0 @@ -t = db.jstests_index9; - -t.drop(); -db.createCollection( "jstests_index9" ); -assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index with default collection" ); -t.drop(); -db.createCollection( "jstests_index9", {autoIndexId: true} ); -assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 1 index if autoIndexId: true" ); - -t.drop(); -db.createCollection( "jstests_index9", {autoIndexId:false} ); -assert.eq( 0, db.system.indexes.count( {ns: "test.jstests_index9"} ), "There should be 0 index if autoIndexId: false" ); -t.createIndex( { _id:1 } ); -assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); -t.createIndex( { _id:1 } ); -assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); - -t.drop(); -t.createIndex( { _id:1 } ); -assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); - -t.drop(); -t.save( {a:1} ); -t.createIndex( { _id:1 } ); -assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) ); diff --git a/jstests/indexOtherNamespace.js b/jstests/indexOtherNamespace.js deleted file mode 100644 index 5bb7355ddb6..00000000000 --- a/jstests/indexOtherNamespace.js +++ /dev/null @@ -1,21 +0,0 @@ -// SERVER-8814: Test that only the system.indexes namespace can be used to build indexes. - -function assertGLENotOK(status) { - assert(status.ok && status.err !== null, - "Expected not-OK status object; found " + tojson(status)); -} - -var otherDB = db.getSiblingDB("indexOtherNS"); -otherDB.dropDatabase(); - -otherDB.foo.insert({a:1}) -assert.eq(1, otherDB.system.indexes.count()); -assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor); - -otherDB.randomNS.system.indexes.insert({ns:"indexOtherNS.foo", key:{a:1}, name:"a_1"}); -assertGLENotOK(otherDB.getLastErrorObj()); -// Assert that index didn't actually get built -assert.eq(1, otherDB.system.indexes.count()); -assert.eq(null, otherDB.system.namespaces.findOne({name : "indexOtherNS.foo.$a_1"})); -assert.eq("BasicCursor", otherDB.foo.find({a:1}).explain().cursor); -otherDB.dropDatabase(); diff --git a/jstests/indexStatsCommand.js b/jstests/indexStatsCommand.js deleted file mode 100644 index 9c055e37e26..00000000000 --- a/jstests/indexStatsCommand.js +++ /dev/null @@ -1,88 +0,0 @@ -db.jstests_commands.drop(); -db.createCollection("jstests_commands"); - -t = db.jstests_commands; - -for (var i = 0; i < 3000; ++i) { - t.insert({i: i, d: i % 13}); -} - -function textWithIndexVersion(version) { - var indexName = 'test_d_' + version; - t.ensureIndex({d: 1}, {v: version, name: indexName}); - - var result = t.indexStats({index: indexName}); - if (result["bad cmd"]) { - print("storageDetails command not available: skipping"); - return; - } - - assert.commandWorked(result); - - assert(result.index === indexName); - assert(result.isIdIndex === false); - assert(isObject(result.keyPattern)); - assert.neq(result.keyPattern, null); - assert(isString(result.storageNs)); - assert(isNumber(result.bucketBodyBytes)); - assert.eq(result.depth, 1); - assert(isObject(result.overall)); - assert.neq(result.overall, null); - - function checkStats(data) { - assert(data.count instanceof NumberLong); - assert(isNumber(data.mean)); - assert(isNumber(data.stddev)); - assert(isNumber(data.min)); - assert(isNumber(data.max)); - } - - function checkAreaStats(data) { - assert(isNumber(data.numBuckets)); - - assert(isObject(data.keyCount)); - assert.neq(data.keyCount, null); - checkStats(data.keyCount); - - assert(isObject(data.usedKeyCount)); - assert.neq(data.usedKeyCount, null); - checkStats(data.usedKeyCount); - - assert(isObject(data.bsonRatio)); - assert.neq(data.bsonRatio, null); - checkStats(data.bsonRatio); - - assert(isObject(data.keyNodeRatio)); - assert.neq(data.keyNodeRatio, null); - checkStats(data.keyNodeRatio); - - assert(isObject(data.fillRatio)); - assert.neq(data.fillRatio, null); - checkStats(data.fillRatio); - } - - assert(isObject(result.overall)); - checkAreaStats(result.overall); - - assert(result.perLevel instanceof Array); - for (var i = 0; i < result.perLevel.length; ++i) { - assert(isObject(result.perLevel[i])); - checkAreaStats(result.perLevel[i]); - } - - result = t.indexStats(); - assert.commandFailed(result); - assert(result.errmsg.match(/index name is required/)); - - result = t.indexStats({index: "nonexistent"}) - assert.commandFailed(result); - assert(result.errmsg.match(/index does not exist/)); - - result = t.indexStats({index: "_id_", expandNodes: ['string']}) - assert.commandFailed(result); - assert(result.errmsg.match(/expandNodes.*numbers/)); - - t.dropIndex(indexName); -} - -[0, 1].map(textWithIndexVersion); diff --git a/jstests/index_arr1.js b/jstests/index_arr1.js deleted file mode 100644 index d35cb80a83f..00000000000 --- a/jstests/index_arr1.js +++ /dev/null @@ -1,23 +0,0 @@ - -t = db.index_arr1 -t.drop() - -t.insert( { _id : 1 , a : 5 , b : [ { x : 1 } ] } ) -t.insert( { _id : 2 , a : 5 , b : [] } ) -t.insert( { _id : 3 , a : 5 } ) - -assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A1" ) - -t.ensureIndex( { a : 1 , "b.x" : 1 } ) - -//t.find().sort( { a : 1 } )._addSpecial( "$returnKey" , 1 ).forEach( printjson ) -//t.find( { a : 5 } ).forEach( printjson ) - -assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A2" ); // SERVER-1082 - - -assert.eq( 2 , t.getIndexes().length , "B1" ) -t.insert( { _id : 4 , a : 5 , b : [] } ) -t.ensureIndex( { a : 1 , "b.a" : 1 , "b.c" : 1 } ) -assert.eq( 3 , t.getIndexes().length , "B2" ) - diff --git a/jstests/index_arr2.js b/jstests/index_arr2.js deleted file mode 100644 index 321bed8ad03..00000000000 --- a/jstests/index_arr2.js +++ /dev/null @@ -1,51 +0,0 @@ -NUM = 20; -M = 5; - -t = db.jstests_arr2; - -function test( withIndex ){ - t.drop(); - - // insert a bunch of items to force queries to use the index. - newObject = { - _id : 1, - a : [ - { b : { c : 1 } } - ] - } - - now = (new Date()).getTime() / 1000; - for (created = now - NUM; created <= now; created++ ) { - newObject['created'] = created; - t.insert(newObject); - newObject['_id'] ++; - } - - // change the last M items. - query = { - 'created' : { '$gte' : now - M } - } - - Z = t.find( query ).count(); - - if ( withIndex ){ - //t.ensureIndex( { 'a.b.c' : 1, 'created' : -1 } ) - //t.ensureIndex( { created : -1 } ) - t.ensureIndex( { 'a.b.c' : 1 } , { name : "x" } ) - } - - t.update(query, { '$set' : { "a.0.b.c" : 0 } } , false , true ) - assert.eq( Z , db.getLastErrorObj().n , "num updated withIndex:" + withIndex ); - - // now see how many were actually updated. - query['a.b.c'] = 0; - - count = t.count(query); - - assert.eq( Z , count , "count after withIndex:" + withIndex ); -} - -test( false ) -test( true ); - - diff --git a/jstests/index_big1.js b/jstests/index_big1.js deleted file mode 100644 index 3e53692a2f6..00000000000 --- a/jstests/index_big1.js +++ /dev/null @@ -1,36 +0,0 @@ -// check where "key to big" happens - -t = db.index_big1; - -N = 3200; -t.drop(); - -var s = ""; - -t.ensureIndex( { a : 1 , x : 1 } ) - -for ( i=0; i= 0; i--) { - t.insert({ _id: i, k: keys[i] }); - } - } -} - -var expect = null; - -function check() { - assert(t.validate().valid); - assert.eq( 5, t.count() ); - - var c = t.find({ k: /^a/ }).count(); - assert.eq( 5, c ); -} - -function runTest( order ) { - t.drop(); - t.ensureIndex({ k: 1 }); - doInsert( order ); - check(); // check incremental addition - - t.reIndex(); - check(); // check bottom up - - t.drop(); - doInsert( order ); - assert.eq( 1, t.getIndexes().length ); - t.ensureIndex({ k: 1 }); - assert.eq( 1, t.getIndexes().length ); - - t.drop(); - doInsert( order ); - assert.eq( 1, t.getIndexes().length ); - t.ensureIndex({ k: 1 }, { background: true }); - assert.eq( 1, t.getIndexes().length ); -} - -runTest( 1 ); -runTest( 2 ); diff --git a/jstests/index_bigkeys_update.js b/jstests/index_bigkeys_update.js deleted file mode 100644 index dd428b5fd4b..00000000000 --- a/jstests/index_bigkeys_update.js +++ /dev/null @@ -1,20 +0,0 @@ - -bigString = ""; -while ( bigString.length < 16000 ) - bigString += "."; - -t = db.index_bigkeys_update; -t.drop(); - -t.insert( { _id : 0, x : "asd" } ); -t.ensureIndex( { x : 1 } ); - -assert.eq( 1, t.count() ); - -t.update( {} , { $set : { x : bigString } } ); -err = db.getLastErrorObj(); -assert( err.err, err ); - -assert.eq( 1, t.count() ); -assert.eq( "asd", t.findOne().x ); // make sure doc is the old version -assert.eq( "asd", t.findOne( { _id : 0 } ).x ); // make sure doc is the old version diff --git a/jstests/index_bounds_number_edge_cases.js b/jstests/index_bounds_number_edge_cases.js deleted file mode 100644 index 0ab482028ed..00000000000 --- a/jstests/index_bounds_number_edge_cases.js +++ /dev/null @@ -1,50 +0,0 @@ -// end-to-end tests on index bounds for numerical values -// should handle numerical extremes -// such as Number.MAX_VALUE and Infinity - -t = db.indexboundsnumberedgecases; - -t.drop(); - -t.ensureIndex({a: 1}); - -t.save({a: -Infinity}); -t.save({a: -Number.MAX_VALUE}); -t.save({a: 1}); -t.save({a: Number.MAX_VALUE}); -t.save({a: Infinity}); - -// index bounds generated by query planner are -// validated in unit tests - -// lte - -assert.eq(1, t.find({a: {$lte: -Infinity}}).itcount()); -assert.eq(2, t.find({a: {$lte: -Number.MAX_VALUE}}).itcount()); -assert.eq(3, t.find({a: {$lte: 1}}).itcount()); -assert.eq(4, t.find({a: {$lte: Number.MAX_VALUE}}).itcount()); -assert.eq(5, t.find({a: {$lte: Infinity}}).itcount()); - -// lt - -assert.eq(0, t.find({a: {$lt: -Infinity}}).itcount()); -assert.eq(1, t.find({a: {$lt: -Number.MAX_VALUE}}).itcount()); -assert.eq(2, t.find({a: {$lt: 1}}).itcount()); -assert.eq(3, t.find({a: {$lt: Number.MAX_VALUE}}).itcount()); -assert.eq(4, t.find({a: {$lt: Infinity}}).itcount()); - -// gt - -assert.eq(0, t.find({a: {$gt: Infinity}}).itcount()); -assert.eq(1, t.find({a: {$gt: Number.MAX_VALUE}}).itcount()); -assert.eq(2, t.find({a: {$gt: 1}}).itcount()); -assert.eq(3, t.find({a: {$gt: -Number.MAX_VALUE}}).itcount()); -assert.eq(4, t.find({a: {$gt: -Infinity}}).itcount()); - -// gte - -assert.eq(1, t.find({a: {$gte: Infinity}}).itcount()); -assert.eq(2, t.find({a: {$gte: Number.MAX_VALUE}}).itcount()); -assert.eq(3, t.find({a: {$gte: 1}}).itcount()); -assert.eq(4, t.find({a: {$gte: -Number.MAX_VALUE}}).itcount()); -assert.eq(5, t.find({a: {$gte: -Infinity}}).itcount()); diff --git a/jstests/index_check1.js b/jstests/index_check1.js deleted file mode 100644 index 7113dff0877..00000000000 --- a/jstests/index_check1.js +++ /dev/null @@ -1,31 +0,0 @@ - -db.somecollection.drop(); - -assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 1); - -db.somecollection.save({a:1}); - -assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 2); - -db.somecollection.ensureIndex({a:1}); - -var z = db.system.namespaces.find({name:/somecollection/}).length(); -assert( z >= 1 , 3 ); - -if( z == 1 ) - print("warning: z==1, should only happen with alternate storage engines"); - -db.somecollection.drop(); - -assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 4); - -db.somecollection.save({a:1}); - -assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 5); - -db.somecollection.ensureIndex({a:1}); - -var x = db.system.namespaces.find({name:/somecollection/}).length(); -assert( x == 2 || x == z, 6); - -assert(db.somecollection.validate().valid, 7); diff --git a/jstests/index_check2.js b/jstests/index_check2.js deleted file mode 100644 index eed3b8e42b7..00000000000 --- a/jstests/index_check2.js +++ /dev/null @@ -1,41 +0,0 @@ - -t = db.index_check2; -t.drop(); - -for ( var i=0; i<1000; i++ ){ - var a = []; - for ( var j=1; j<5; j++ ){ - a.push( "tag" + ( i * j % 50 )); - } - t.save( { num : i , tags : a } ); -} - -q1 = { tags : "tag6" }; -q2 = { tags : "tag12" }; -q3 = { tags : { $all : [ "tag6" , "tag12" ] } } - -assert.eq( 120 , t.find( q1 ).itcount() , "q1 a"); -assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" ); -assert.eq( 60 , t.find( q3 ).itcount() , "q3 a"); - -t.ensureIndex( { tags : 1 } ); - -assert.eq( 120 , t.find( q1 ).itcount() , "q1 a"); -assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" ); -assert.eq( 60 , t.find( q3 ).itcount() , "q3 a"); - -assert.eq( "BtreeCursor tags_1" , t.find( q1 ).explain().cursor , "e1" ); -assert.eq( "BtreeCursor tags_1" , t.find( q2 ).explain().cursor , "e2" ); -assert.eq( "BtreeCursor tags_1" , t.find( q3 ).explain().cursor , "e3" ); - -scanned1 = t.find(q1).explain().nscanned; -scanned2 = t.find(q2).explain().nscanned; -scanned3 = t.find(q3).explain().nscanned; - -//print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 ); - -// $all should just iterate either of the words -assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" ); - -exp3 = t.find( q3 ).explain(); -assert.eq( exp3.indexBounds.tags[0][0], exp3.indexBounds.tags[0][1], "$all range not a single key" ); diff --git a/jstests/index_check3.js b/jstests/index_check3.js deleted file mode 100644 index 55515aff3f5..00000000000 --- a/jstests/index_check3.js +++ /dev/null @@ -1,63 +0,0 @@ - - -t = db.index_check3; -t.drop(); - - - -t.save( { a : 1 } ); -t.save( { a : 2 } ); -t.save( { a : 3 } ); -t.save( { a : "z" } ); - -assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "A" ); -assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "B" ); - -t.ensureIndex( { a : 1 } ); - -assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "C" ); -assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "D" ); - -t.drop(); - -for ( var i=0; i<100; i++ ){ - var o = { i : i }; - if ( i % 2 == 0 ) - o.foo = i; - t.save( o ); -} - -t.ensureIndex( { foo : 1 } ); - -//printjson( t.find( { foo : { $lt : 50 } } ).explain() ); -assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" ); -//printjson( t.find( { foo : { $gt : 50 } } ).explain() ); -assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" ); - - -t.drop(); -t.save( {i:'a'} ); -for( var i=0; i < 10; ++i ) { - t.save( {} ); -} - -t.ensureIndex( { i : 1 } ); - -//printjson( t.find( { i : { $lte : 'a' } } ).explain() ); -assert.gt( 3 , t.find( { i : { $lte : 'a' } } ).explain().nscanned , "lte" ); -//printjson( t.find( { i : { $gte : 'a' } } ).explain() ); -// bug SERVER-99 -assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" ); -assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).count() , "gte a" ); -assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b" ); -assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).count() , "gte c" ); -assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).itcount() , "gte d" ); - -t.save( { i : "b" } ); - -assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" ); -assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).count() , "gte a2" ); -assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b2" ); -assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).itcount() , "gte c2" ); -assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : -1 } ).itcount() , "gte d2" ); -assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : 1 } ).itcount() , "gte e2" ); diff --git a/jstests/index_check5.js b/jstests/index_check5.js deleted file mode 100644 index eabb929749f..00000000000 --- a/jstests/index_check5.js +++ /dev/null @@ -1,17 +0,0 @@ - -t = db.index_check5 -t.drop(); - -t.save( { "name" : "Player1" , - "scores" : [{"level" : 1 , "score" : 100}, - {"level" : 2 , "score" : 50}], - "total" : 150 } ); -t.save( { "name" : "Player2" , - "total" : 90 , - "scores" : [ {"level" : 1 , "score" : 90}, - {"level" : 2 , "score" : 0} ] - } ); - -assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "A" ); -t.ensureIndex( { "scores.level" : 1 , "scores.score" : 1 } ); -assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "B" ); diff --git a/jstests/index_check6.js b/jstests/index_check6.js deleted file mode 100644 index be395fb3d2e..00000000000 --- a/jstests/index_check6.js +++ /dev/null @@ -1,82 +0,0 @@ - -t = db.index_check6; -t.drop(); - -t.ensureIndex( { age : 1 , rating : 1 } ); - -for ( var age=10; age<50; age++ ){ - for ( var rating=0; rating<10; rating++ ){ - t.save( { age : age , rating : rating } ); - } -} - -assert.eq( 10 , t.find( { age : 30 } ).explain().nscanned , "A" ); -assert.eq( 20 , t.find( { age : { $gte : 29 , $lte : 30 } } ).explain().nscanned , "B" ); -assert.eq( 18 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,9] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C1" ); -assert.eq( 23 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C2" ); -assert.eq( 28 , t.find( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [1,8] } } ).hint( {age:1,rating:1} ).explain().nscanned , "C3" ); - -assert.eq( 4 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : 5 } ).hint( {age:1,rating:1} ).explain().nscanned , "C" ); // SERVER-371 -assert.eq( 6 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : { $gte : 4 , $lte : 5 } } ).hint( {age:1,rating:1} ).explain().nscanned , "D" ); // SERVER-371 - -assert.eq.automsg( "2", "t.find( { age:30, rating:{ $gte:4, $lte:5} } ).explain().nscanned" ); - -t.drop(); - -for ( var a=1; a<10; a++ ){ - for ( var b=0; b<10; b++ ){ - for ( var c=0; c<10; c++ ) { - t.save( { a:a, b:b, c:c } ); - } - } -} - -function doQuery( count, query, sort, index ) { - var nscanned = t.find( query ).hint( index ).sort( sort ).explain().nscanned; - assert(Math.abs(count - nscanned) <= 2); -} - -function doTest( sort, index ) { - doQuery( 1, { a:5, b:5, c:5 }, sort, index ); - doQuery( 2, { a:5, b:5, c:{$gte:5,$lte:6} }, sort, index ); - doQuery( 1, { a:5, b:5, c:{$gte:5.5,$lte:6} }, sort, index ); - doQuery( 1, { a:5, b:5, c:{$gte:5,$lte:5.5} }, sort, index ); - doQuery( 3, { a:5, b:5, c:{$gte:5,$lte:7} }, sort, index ); - doQuery( 4, { a:5, b:{$gte:5,$lte:6}, c:5 }, sort, index ); - if ( sort.b > 0 ) { - doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index ); - doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index ); - } else { - doQuery( 2, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index ); - doQuery( 2, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index ); - } - doQuery( 7, { a:5, b:{$gte:5,$lte:7}, c:5 }, sort, index ); - doQuery( 4, { a:{$gte:5,$lte:6}, b:5, c:5 }, sort, index ); - if ( sort.a > 0 ) { - doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index ); - doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index ); - doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index ); - } else { - doQuery( 2, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index ); - doQuery( 2, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index ); - doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index ); - } - doQuery( 7, { a:{$gte:5,$lte:7}, b:5, c:5 }, sort, index ); - doQuery( 6, { a:{$gte:5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index ); - doQuery( 6, { a:5, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index ); - doQuery( 10, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:5 }, sort, index ); - doQuery( 14, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index ); -} - -for ( var a = -1; a <= 1; a += 2 ) { - for( var b = -1; b <= 1; b += 2 ) { - for( var c = -1; c <= 1; c += 2 ) { - t.dropIndexes(); - var spec = {a:a,b:b,c:c}; - t.ensureIndex( spec ); - doTest( spec, spec ); - doTest( {a:-a,b:-b,c:-c}, spec ); - } - } -} - diff --git a/jstests/index_check7.js b/jstests/index_check7.js deleted file mode 100644 index 1d0aaebba35..00000000000 --- a/jstests/index_check7.js +++ /dev/null @@ -1,15 +0,0 @@ - -t = db.index_check7 -t.drop() - -for ( var i=0; i<100; i++ ) - t.save( { x : i } ) - -t.ensureIndex( { x : 1 } ) -assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "A" ) - -t.ensureIndex( { x : -1 } ) -assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "B" ) - -assert.eq( 40 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" ); - diff --git a/jstests/index_check8.js b/jstests/index_check8.js deleted file mode 100644 index 1964ecbe7fc..00000000000 --- a/jstests/index_check8.js +++ /dev/null @@ -1,21 +0,0 @@ - -t = db.index_check8 -t.drop(); - -t.insert( { a : 1 , b : 1 , c : 1 , d : 1 , e : 1 } ) -t.ensureIndex( { a : 1 , b : 1 , c : 1 } ) -t.ensureIndex({ a: 1, b: 1, d: 1, e: 1 }) - -// this block could be added to many tests in theory... -if ((new Date()) % 10 == 0) { - var coll = t.toString().substring(db.toString().length + 1); - print("compacting " + coll + " before continuing testing"); - // don't check return code - false for mongos - print("ok: " + db.runCommand({ compact: coll, dev: true })); -} - -x = t.find( { a : 1 , b : 1 , d : 1 } ).sort( { e : 1 } ).explain() -assert( ! x.scanAndOrder , "A : " + tojson( x ) ) - -x = t.find( { a : 1 , b : 1 , c : 1 , d : 1 } ).sort( { e : 1 } ).explain() -//assert( ! x.scanAndOrder , "B : " + tojson( x ) ) diff --git a/jstests/index_diag.js b/jstests/index_diag.js deleted file mode 100644 index 21840682e7f..00000000000 --- a/jstests/index_diag.js +++ /dev/null @@ -1,50 +0,0 @@ - -t = db.index_diag -t.drop(); - -t.ensureIndex( { x : 1 } ); - -all = [] -ids = [] -xs = [] - -function r( a ){ - var n = [] - for ( var x=a.length-1; x>=0; x-- ) - n.push( a[x] ); - return n; -} - -for ( i=1; i<4; i++ ){ - o = { _id : i , x : -i } - t.insert( o ); - all.push( o ); - ids.push( { _id : i } ); - xs.push( { x : -i } ); -} - -assert.eq( all , t.find().sort( { _id : 1 } ).toArray() , "A1" ); -assert.eq( r( all ) , t.find().sort( { _id : -1 } ).toArray() , "A2" ); - -assert.eq( all , t.find().sort( { x : -1 } ).toArray() , "A3" ); -assert.eq( r( all ) , t.find().sort( { x : 1 } ).toArray() , "A4" ); - -assert.eq( ids , t.find().sort( { _id : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B1" ) -assert.eq( r( ids ) , t.find().sort( { _id : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B2" ) -assert.eq( xs , t.find().sort( { x : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B3" ) -assert.eq( r( xs ) , t.find().sort( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" ) - -assert.eq( r( xs ) , t.find().hint( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" ) - -// SERVER-4981 -t.ensureIndex( { _id : 1 , x : 1 } ); -assert.eq( all , - t.find().hint( { _id : 1 , x : 1 } )._addSpecial( "$returnKey" , true ).toArray() - ) -assert.eq( r( all ) , - t.find().hint( { _id : 1 , x : 1 } ).sort( { x : 1 } ) - ._addSpecial( "$returnKey" , true ).toArray() - ) - -assert.eq( [ {} , {} , {} ], - t.find().hint( { $natural : 1 } )._addSpecial( "$returnKey" , true ).toArray() ) diff --git a/jstests/index_elemmatch1.js b/jstests/index_elemmatch1.js deleted file mode 100644 index 9170ce66018..00000000000 --- a/jstests/index_elemmatch1.js +++ /dev/null @@ -1,41 +0,0 @@ - -t = db.index_elemmatch1 -t.drop() - -x = 0 -y = 0 -for ( a=0; a<100; a++ ){ - for ( b=0; b<100; b++ ){ - t.insert( { a : a , b : b % 10 , arr : [ { x : x++ % 10 , y : y++ % 10 } ] } ) - } -} - -t.ensureIndex( { a : 1 , b : 1 } ) -t.ensureIndex( { "arr.x" : 1 , a : 1 } ) - -assert.eq( 100 , t.find( { a : 55 } ).itcount() , "A1" ); -assert.eq( 10 , t.find( { a : 55 , b : 7 } ).itcount() , "A2" ); - -q = { a : 55 , b : { $in : [ 1 , 5 , 8 ] } } -assert.eq( 30 , t.find( q ).itcount() , "A3" ) - -q.arr = { $elemMatch : { x : 5 , y : 5 } } -assert.eq( 10 , t.find( q ).itcount() , "A4" ) - -function nscannedForCursor( explain, cursor ) { - plans = explain.allPlans; - for( i in plans ) { - if ( plans[ i ].cursor == cursor ) { - return plans[ i ].nscanned; - } - } - return -1; -} - -assert.eq( t.find(q).itcount(), - nscannedForCursor( t.find(q).explain(true), 'BtreeCursor arr.x_1_a_1' ), "A5" ); - -printjson(t.find(q).explain()); -print("Num results:"); -assert.eq(10, t.find(q).itcount()); -printjson(t.find(q).itcount()); diff --git a/jstests/index_filter_commands.js b/jstests/index_filter_commands.js deleted file mode 100644 index cec2437fff0..00000000000 --- a/jstests/index_filter_commands.js +++ /dev/null @@ -1,167 +0,0 @@ -/** - * Index Filter commands - * - * Commands: - * - planCacheListFilters - * Displays index filters for all query shapes in a collection. - * - * - planCacheClearFilters - * Clears index filter for a single query shape or, - * if the query shape is omitted, all filters for the collection. - * - * - planCacheSetFilter - * Sets index filter for a query shape. Overrides existing filter. - * - * Not a lot of data access in this test suite. Hint commands - * manage a non-persistent mapping in the server of - * query shape to list of index specs. - * - * Only time we might need to execute a query is to check the plan - * cache state. We would do this with the planCacheListPlans command - * on the same query shape with the index filters. - * - */ - -var t = db.jstests_index_filter_commands; - -t.drop(); - -t.save({a: 1}); - -// Add 2 indexes. -// 1st index is more efficient. -// 2nd and 3rd indexes will be used to test index filters. -var indexA1 = {a: 1}; -var indexA1B1 = {a: 1, b: 1}; -var indexA1C1 = {a: 1, c: 1}; -t.ensureIndex(indexA1); -t.ensureIndex(indexA1B1); -t.ensureIndex(indexA1C1); - -var queryA1 = {a: 1}; -var projectionA1 = {_id: 0, a: 1}; -var sortA1 = {a: -1}; - -// -// Tests for planCacheListFilters, planCacheClearFilters, planCacheSetFilter -// - -// Utility function to list index filters. -function getFilters() { - var res = t.runCommand('planCacheListFilters'); - print('planCacheListFilters() = ' + tojson(res)); - assert.commandWorked(res, 'planCacheListFilters failed'); - assert(res.hasOwnProperty('filters'), 'filters missing from planCacheListFilters result'); - return res.filters; - -} - -// Check if key is in plan cache. -function planCacheContains(shape) { - var res = t.runCommand('planCacheListPlans', shape); - return res.ok; -} - -// Utility function to list plans for a query. -function getPlans(shape) { - var res = t.runCommand('planCacheListPlans', shape); - assert.commandWorked(res, 'planCacheListPlans(' + tojson(shape, '', true) + ' failed'); - assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' + - tojson(shape, '', true) + ') result'); - return res.plans; -} - -// It is an error to retrieve index filters on a non-existent collection. -var missingCollection = db.jstests_index_filter_commands_missing; -missingCollection.drop(); -assert.commandFailed(missingCollection.runCommand('planCacheListFilters')); - -// Retrieve index filters from an empty test collection. -var filters = getFilters(); -assert.eq(0, filters.length, 'unexpected number of index filters in planCacheListFilters result'); - -// Check details of winning plan in plan cache before setting index filter. -assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count'); -var shape = {query: queryA1, sort: sortA1, projection: projectionA1}; -var planBeforeSetFilter = getPlans(shape)[0]; -print('Winning plan (before setting index filters) = ' + tojson(planBeforeSetFilter)); -// Check filterSet field in plan details -assert.eq(false, planBeforeSetFilter.filterSet, 'missing or invalid filterSet field in plan details'); - -// Add index filters for simple query. -assert.commandWorked(t.runCommand('planCacheSetFilter', - {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]})); -filters = getFilters(); -assert.eq(1, filters.length, 'no change in query settings after successfully setting index filters'); -assert.eq(queryA1, filters[0].query, 'unexpected query in filters'); -assert.eq(sortA1, filters[0].sort, 'unexpected sort in filters'); -assert.eq(projectionA1, filters[0].projection, 'unexpected projection in filters'); -assert.eq(2, filters[0].indexes.length, 'unexpected number of indexes in filters'); -assert.eq(indexA1B1, filters[0].indexes[0], 'unexpected first index'); -assert.eq(indexA1C1, filters[0].indexes[1], 'unexpected first index'); - -// Plans for query shape should be removed after setting index filter. -assert(!planCacheContains(shape), 'plan cache for query shape not flushed after updating filter'); - -// Check details of winning plan in plan cache after setting filter and re-executing query. -assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count'); -planAfterSetFilter = getPlans(shape)[0]; -print('Winning plan (after setting index filter) = ' + tojson(planAfterSetFilter)); -// Check filterSet field in plan details -assert.eq(true, planAfterSetFilter.filterSet, 'missing or invalid filterSet field in plan details'); - -// Execute query with cursor.hint(). Check that user-provided hint is overridden. -// Applying the index filters will remove the user requested index from the list -// of indexes provided to the planner. -// If the planner still tries to use the user hint, we will get a 'bad hint' error. -t.find(queryA1, projectionA1).sort(sortA1).hint(indexA1).itcount(); - -// Clear filters -assert.commandWorked(t.runCommand('planCacheClearFilters')); -filters = getFilters(); -assert.eq(0, filters.length, 'filters not cleared after successful planCacheClearFilters command'); - -// Plans should be removed after clearing filters -assert(!planCacheContains(shape), 'plan cache for query shape not flushed after clearing filters'); - -print('Plan details before setting filter = ' + tojson(planBeforeSetFilter.details, '', true)); -print('Plan details after setting filter = ' + tojson(planAfterSetFilter.details, '', true)); - -// -// explain.filterSet -// cursor.explain() should indicate if index filter has been applied. -// The following 3 runners should always provide a value for 'filterSet': -// - SingleSolutionRunner -// - MultiPlanRunner -// - CachedPlanRuner -// - -// No filter set. - -t.getPlanCache().clear(); -// SingleSolutionRunner -assert.eq(false, t.find({z: 1}).explain().filterSet, - 'missing or invalid filterSet field in SingleSolutionRunner explain'); -// MultiPlanRunner -assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, - 'missing or invalid filterSet field in MultiPlanRunner explain'); -// CachedPlanRunner -assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, - 'missing or invalid filterSet field in CachedPlanRunner explain'); - -// Add index filter. -assert.commandWorked(t.runCommand('planCacheSetFilter', - {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]})); -// Index filter with non-existent index key pattern to force use of single solution runner. -assert.commandWorked(t.runCommand('planCacheSetFilter', {query: {z: 1}, indexes: [{z: 1}]})); - -t.getPlanCache().clear(); -// SingleSolutionRunner -assert.eq(true, t.find({z: 1}).explain().filterSet, - 'missing or invalid filterSet field in SingleSolutionRunner explain'); -// MultiPlanRunner -assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, - 'missing or invalid filterSet field in MultiPlanRunner explain'); -// CachedPlanRunner -assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1).explain().filterSet, - 'missing or invalid filterSet field in CachedPlanRunner explain'); diff --git a/jstests/index_many.js b/jstests/index_many.js deleted file mode 100644 index 46705a20470..00000000000 --- a/jstests/index_many.js +++ /dev/null @@ -1,51 +0,0 @@ -/* test using lots of indexes on one collection */ - -t = db.many; - -function f() { - - t.drop(); - db.many2.drop(); - - t.save({ x: 9, y : 99 }); - t.save({ x: 19, y : 99 }); - - x = 2; - while (x < 70) { - patt = {}; - patt[x] = 1; - if (x == 20) - patt = { x: 1 }; - if (x == 64) - patt = { y: 1 }; - t.ensureIndex(patt); - x++; - } - - // print( tojson(db.getLastErrorObj()) ); - assert(db.getLastError(), "should have got an error 'too many indexes'"); - - // 40 is the limit currently - lim = t.getIndexes().length; - if (lim != 64) { - print("# of indexes should be 64 but is : " + lim); - return; - } - assert(lim == 64, "not 64 indexes"); - - assert(t.find({ x: 9 }).length() == 1, "b"); - assert(t.find({ x: 9 }).explain().cursor.match(/Btree/), "not using index?"); - - assert(t.find({ y: 99 }).length() == 2, "y idx"); - assert(t.find({ y: 99 }).explain().cursor.match(/Btree/), "not using y index?"); - - /* check that renamecollection remaps all the indexes right */ - assert(t.renameCollection("many2").ok, "rename failed"); - assert(t.find({ x: 9 }).length() == 0, "many2a"); - assert(db.many2.find({ x: 9 }).length() == 1, "many2b"); - assert(t.find({ y: 99 }).length() == 0, "many2c"); - assert(db.many2.find({ y: 99 }).length() == 2, "many2d"); - -} - -f(); diff --git a/jstests/index_many2.js b/jstests/index_many2.js deleted file mode 100644 index f113b8b87ed..00000000000 --- a/jstests/index_many2.js +++ /dev/null @@ -1,31 +0,0 @@ - -t = db.index_many2; -t.drop() - -t.save( { x : 1 } ) - -assert.eq( 1 , t.getIndexKeys().length , "A1" ) - -function make( n ){ - var x = {} - x["x"+n] = 1; - return x; -} - -for ( i=1; i<1000; i++ ){ - t.ensureIndex( make(i) ); -} - -assert.eq( 64 , t.getIndexKeys().length , "A2" ) - - -num = t.getIndexKeys().length - -t.dropIndex( make(num-1) ) -assert.eq( num - 1 , t.getIndexKeys().length , "B0" ) - -t.ensureIndex( { z : 1 } ) -assert.eq( num , t.getIndexKeys().length , "B1" ) - -t.dropIndex( "*" ); -assert.eq( 1 , t.getIndexKeys().length , "C1" ) diff --git a/jstests/index_sparse1.js b/jstests/index_sparse1.js deleted file mode 100644 index eab3c7fec95..00000000000 --- a/jstests/index_sparse1.js +++ /dev/null @@ -1,46 +0,0 @@ - -t = db.index_sparse1; -t.drop(); - -t.insert( { _id : 1 , x : 1 } ) -t.insert( { _id : 2 , x : 2 } ) -t.insert( { _id : 3 , x : 2 } ) -t.insert( { _id : 4 } ) -t.insert( { _id : 5 } ) - -assert.eq( 5 , t.count() , "A1" ) -assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "A2" ) - -t.ensureIndex( { x : 1 } ) -assert.eq( 2 , t.getIndexes().length , "B1" ) -assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "B2" ) -t.dropIndex( { x : 1 } ) -assert.eq( 1 , t.getIndexes().length , "B3" ) - -t.ensureIndex( { x : 1 } , { sparse : 1 } ) -assert.eq( 2 , t.getIndexes().length , "C1" ) -assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "C2" ) -t.dropIndex( { x : 1 } ) -assert.eq( 1 , t.getIndexes().length , "C3" ) - -// -- sparse & unique - -t.remove( { _id : 2 } ) - -// test that we can't create a unique index without sparse -t.ensureIndex( { x : 1 } , { unique : 1 } ) -assert( db.getLastError() , "D1" ) -assert.eq( 1 , t.getIndexes().length , "D2" ) - - -t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } ) -assert.eq( 2 , t.getIndexes().length , "E1" ) -t.dropIndex( { x : 1 } ) -assert.eq( 1 , t.getIndexes().length , "E3" ) - - -t.insert( { _id : 2 , x : 2 } ) -t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } ) -assert.eq( 1 , t.getIndexes().length , "F1" ) - - diff --git a/jstests/index_sparse2.js b/jstests/index_sparse2.js deleted file mode 100644 index 56a59db3711..00000000000 --- a/jstests/index_sparse2.js +++ /dev/null @@ -1,23 +0,0 @@ -t = db.index_sparse2; -t.drop(); - -t.insert( { _id : 1 , x : 1 , y : 1 } ) -t.insert( { _id : 2 , x : 2 } ) -t.insert( { _id : 3 } ) - -t.ensureIndex( { x : 1 , y : 1 } ) -assert.eq( 2 , t.getIndexes().length , "A1" ) -assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "A2 count()" ) -assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "A2 itcount()" ) -t.dropIndex( { x : 1 , y : 1 } ) -assert.eq( 1 , t.getIndexes().length , "A3" ) - -t.ensureIndex( { x : 1 , y : 1 } , { sparse : 1 } ) -assert.eq( 2 , t.getIndexes().length , "B1" ) -assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "B2 count()" ) -assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "B2 itcount()" ) -t.dropIndex( { x : 1 , y : 1 } ) -assert.eq( 1 , t.getIndexes().length , "B3" ) - - - diff --git a/jstests/indexa.js b/jstests/indexa.js deleted file mode 100644 index 7602183adb2..00000000000 --- a/jstests/indexa.js +++ /dev/null @@ -1,22 +0,0 @@ -// unique index constraint test for updates -// case where object doesn't grow tested here - -t = db.indexa; -t.drop(); - -t.ensureIndex( { x:1 }, true ); - -t.insert( { 'x':'A' } ); -t.insert( { 'x':'B' } ); -t.insert( { 'x':'A' } ); - -assert.eq( 2 , t.count() , "indexa 1" ); - -t.update( {x:'B'}, { x:'A' } ); - -a = t.find().toArray(); -u = Array.unique( a.map( function(z){ return z.x } ) ); -assert.eq( 2 , t.count() , "indexa 2" ); - -assert( a.length == u.length , "unique index update is broken" ); - diff --git a/jstests/indexapi.js b/jstests/indexapi.js deleted file mode 100644 index 7bc5d45acd3..00000000000 --- a/jstests/indexapi.js +++ /dev/null @@ -1,40 +0,0 @@ - -t = db.indexapi; -t.drop(); - -key = { x : 1 }; - -c = { ns : t._fullName , key : key , name : t._genIndexName( key ) }; -assert.eq( c , t._indexSpec( { x : 1 } ) , "A" ); - -c.name = "bob"; -assert.eq( c , t._indexSpec( { x : 1 } , "bob" ) , "B" ); - -c.name = t._genIndexName( key ); -assert.eq( c , t._indexSpec( { x : 1 } ) , "C" ); - -c.unique = true; -assert.eq( c , t._indexSpec( { x : 1 } , true ) , "D" ); -assert.eq( c , t._indexSpec( { x : 1 } , [ true ] ) , "E" ); -assert.eq( c , t._indexSpec( { x : 1 } , { unique : true } ) , "F" ); - -c.dropDups = true; -assert.eq( c , t._indexSpec( { x : 1 } , [ true , true ] ) , "G" ); -assert.eq( c , t._indexSpec( { x : 1 } , { unique : true , dropDups : true } ) , "F" ); - -t.ensureIndex( { x : 1 } , { unique : true } ); -idx = t.getIndexes(); -assert.eq( 2 , idx.length , "M1" ); -assert.eq( key , idx[1].key , "M2" ); -assert( idx[1].unique , "M3" ); - -t.drop(); -t.ensureIndex( { x : 1 } , { unique : 1 } ); -idx = t.getIndexes(); -assert.eq( 2 , idx.length , "M1" ); -assert.eq( key , idx[1].key , "M2" ); -assert( idx[1].unique , "M3" ); -//printjson( idx ); - -db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } ); -assert( db.getLastError() != null , "Z1" ); diff --git a/jstests/indexb.js b/jstests/indexb.js deleted file mode 100644 index d7d2e8c9f05..00000000000 --- a/jstests/indexb.js +++ /dev/null @@ -1,29 +0,0 @@ -// unique index test for a case where the object grows -// and must move - -// see indexa.js for the test case for an update with dup id check -// when it doesn't move - - -t = db.indexb; -t.drop(); -t.ensureIndex({a:1},true); - -t.insert({a:1}); - -x = { a : 2 }; -t.save(x); - -{ - - assert( t.count() == 2, "count wrong B"); - - x.a = 1; - x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; - t.save(x); // should fail, not unique. - - assert( t.count() == 2,"count wrong" ); - assert( t.find({a:1}).count() == 1,"bfail1" ); - assert( t.find({a:2}).count() == 1,"bfail2" ); - -} diff --git a/jstests/indexbindata.js b/jstests/indexbindata.js deleted file mode 100755 index e69de29bb2d..00000000000 diff --git a/jstests/indexc.js b/jstests/indexc.js deleted file mode 100644 index b099e2d2823..00000000000 --- a/jstests/indexc.js +++ /dev/null @@ -1,20 +0,0 @@ - -t = db.indexc; -t.drop(); - -for ( var i=1; i<100; i++ ){ - var d = new Date( ( new Date() ).getTime() + i ); - t.save( { a : i , ts : d , cats : [ i , i + 1 , i + 2 ] } ); - if ( i == 51 ) - mid = d; -} - -assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "A" ); -assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "B" ); - -t.ensureIndex( { ts : 1 , cats : 1 } ); -t.ensureIndex( { cats : 1 } ); - -// multi-key bug was firing here (related to getsetdup()): -assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "C" ); -assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "D" ); diff --git a/jstests/indexd.js b/jstests/indexd.js deleted file mode 100644 index 33246ad9812..00000000000 --- a/jstests/indexd.js +++ /dev/null @@ -1,10 +0,0 @@ - -t = db.indexd; -t.drop(); - -t.save( { a : 1 } ); -t.ensureIndex( { a : 1 } ); -assert.throws( function(){ db.indexd.$_id_.drop(); } ); -assert( t.drop() ); - -//db.indexd.$_id_.remove({}); diff --git a/jstests/indexe.js b/jstests/indexe.js deleted file mode 100644 index 213f7c74cf0..00000000000 --- a/jstests/indexe.js +++ /dev/null @@ -1,22 +0,0 @@ - -t = db.indexe; -t.drop(); - -num = 100000; - -for ( i=0; i4 is worse than >5 -// assert.eq( 5, t.find( {a:{$gt:4,$gte:5}} ).explain().indexBounds.a[ 0 ][ 0 ] ); - -printjson(t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain()) - -// SERVER-12281: We should know that in[1,2] is better than in[1,2,3]. -// assert.eq( [[1,1],[2,2]], t.find( {a:{$in:[1,2,3]},$or:[{a:{$in:[1,2]}}]} ).explain().indexBounds.a ); diff --git a/jstests/indexr.js b/jstests/indexr.js deleted file mode 100644 index c3eecd045c8..00000000000 --- a/jstests/indexr.js +++ /dev/null @@ -1,44 +0,0 @@ -// Check multikey index cases with parallel nested fields SERVER-958. - -t = db.jstests_indexr; -t.drop(); - -// Check without indexes. -t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } ); -assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); -assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) ); - -// Check with single key indexes. -t.remove({}); -t.ensureIndex( {'a.b':1,'a.c':1} ); -t.ensureIndex( {a:1,'a.c':1} ); -assert.eq( 0, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); -assert.eq( 0, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) ); -assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); -assert.eq( 4, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); - -t.save( { a: { b: 3, c: 3 } } ); -assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); -assert.eq( 1, t.count( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ) ); -assert.eq( 4, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); -assert.eq( 4, t.find( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'][0][1] ); - -// Check with multikey indexes. -t.remove({}); -t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } ); - -assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) ); -assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) ); -assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] ); -assert.eq( [[{$minElement:1},{$maxElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).explain().indexBounds['a.c'] ); - -// Check reverse direction. -assert.eq( 1, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).itcount() ); -assert.eq( 1, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).itcount() ); - -assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).explain().indexBounds['a.c'] ); -assert.eq( [[{$maxElement:1},{$minElement:1}]], t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).explain().indexBounds['a.c'] ); - -// Check second field is constrained if first is not. -assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {'a.b':1,'a.c':1} ).itcount() ); -assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {a:1,'a.c':1} ).itcount() ); diff --git a/jstests/indexs.js b/jstests/indexs.js deleted file mode 100644 index 609f912affe..00000000000 --- a/jstests/indexs.js +++ /dev/null @@ -1,21 +0,0 @@ -// Test index key generation issue with parent and nested fields in same index and array containing subobject SERVER-3005. - -t = db.jstests_indexs; - -t.drop(); -t.ensureIndex( {a:1} ); -t.save( { a: [ { b: 3 } ] } ); -assert.eq( 1, t.count( { a:{ b:3 } } ) ); - -t.drop(); -t.ensureIndex( {a:1,'a.b':1} ); -t.save( { a: { b: 3 } } ); -assert.eq( 1, t.count( { a:{ b:3 } } ) ); -ib = t.find( { a:{ b:3 } } ).explain().indexBounds; - -t.drop(); -t.ensureIndex( {a:1,'a.b':1} ); -t.save( { a: [ { b: 3 } ] } ); -assert.eq( ib, t.find( { a:{ b:3 } } ).explain().indexBounds ); -assert.eq( 1, t.find( { a:{ b:3 } } ).explain().nscanned ); -assert.eq( 1, t.count( { a:{ b:3 } } ) ); diff --git a/jstests/indext.js b/jstests/indext.js deleted file mode 100644 index e418dc2e959..00000000000 --- a/jstests/indext.js +++ /dev/null @@ -1,21 +0,0 @@ -// Sparse indexes with arrays SERVER-3216 - -t = db.jstests_indext; -t.drop(); - -t.ensureIndex( {'a.b':1}, {sparse:true} ); -t.save( {a:[]} ); -t.save( {a:1} ); -assert.eq( 0, t.find().hint( {'a.b':1} ).itcount() ); -assert.eq( 0, t.find().hint( {'a.b':1} ).explain().nscanned ); - -t.ensureIndex( {'a.b':1,'a.c':1}, {sparse:true} ); -t.save( {a:[]} ); -t.save( {a:1} ); -assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).itcount() ); -assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned ); - -t.save( {a:[{b:1}]} ); -t.save( {a:1} ); -assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).itcount() ); -assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).explain().nscanned ); diff --git a/jstests/indexu.js b/jstests/indexu.js deleted file mode 100644 index de0d9831dab..00000000000 --- a/jstests/indexu.js +++ /dev/null @@ -1,137 +0,0 @@ -// Test index key generation with duplicate values addressed by array index and -// object field. SERVER-2902 - -t = db.jstests_indexu; -t.drop(); - -var dupDoc = {a:[{'0':1}]}; // There are two 'a.0' fields in this doc. -var dupDoc2 = {a:[{'1':1},'c']}; -var noDupDoc = {a:[{'1':1}]}; - -// Test that we can't index dupDoc. -t.save( dupDoc ); -assert( !db.getLastError() ); -t.ensureIndex( {'a.0':1} ); -assert( db.getLastError() ); - -t.remove({}); -t.ensureIndex( {'a.0':1} ); -assert( !db.getLastError() ); -t.save( dupDoc ); -assert( db.getLastError() ); - -// Test that we can't index dupDoc2. -t.drop(); -t.save( dupDoc2 ); -assert( !db.getLastError() ); -t.ensureIndex( {'a.1':1} ); -assert( db.getLastError() ); - -t.remove({}); -t.ensureIndex( {'a.1':1} ); -assert( !db.getLastError() ); -t.save( dupDoc2 ); -assert( db.getLastError() ); - -// Test that we can index dupDoc with a different index. -t.drop(); -t.ensureIndex( {'a.b':1} ); -t.save( dupDoc ); -assert( !db.getLastError() ); - -// Test number field starting with hyphen. -t.drop(); -t.ensureIndex( {'a.-1':1} ); -t.save( {a:[{'-1':1}]} ); -assert( !db.getLastError() ); - -// Test number field starting with zero. -t.drop(); -t.ensureIndex( {'a.00':1} ); -t.save( {a:[{'00':1}]} ); -assert( !db.getLastError() ); - -// Test multiple array indexes -t.drop(); -t.ensureIndex( {'a.0':1,'a.1':1} ); -t.save( {a:[{'1':1}]} ); -assert( !db.getLastError() ); -t.save( {a:[{'1':1},4]} ); -assert( db.getLastError() ); - -// Test that we can index noDupDoc. -t.drop(); -t.save( noDupDoc ); -t.ensureIndex( {'a.0':1} ); -assert( !db.getLastError() ); -t.ensureIndex( {'a.1':1} ); -assert( !db.getLastError() ); - -t.drop(); -t.ensureIndex( {'a.0':1} ); -t.ensureIndex( {'a.1':1} ); -t.save( noDupDoc ); -assert( !db.getLastError() ); - -// Test that we can query noDupDoc. -assert.eq( 1, t.find( {'a.1':1} ).hint( {'a.1':1} ).itcount() ); -assert.eq( 1, t.find( {'a.1':1} ).hint( {$natural:1} ).itcount() ); -assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {'a.0':1} ).itcount() ); -assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {$natural:1} ).itcount() ); - -// Check multiple nested array fields. -t.drop(); -t.save( {a:[[1]]} ); -t.ensureIndex( {'a.0.0':1} ); -assert( !db.getLastError() ); -assert.eq( 1, t.find( {'a.0.0':1} ).hint( {$natural:1} ).itcount() ); -assert.eq( 1, t.find( {'a.0.0':1} ).hint( {'a.0.0':1} ).itcount() ); - -// Check where there is a duplicate for a partially addressed field but not for a fully addressed field. -t.drop(); -t.save( {a:[[1],{'0':1}]} ); -t.ensureIndex( {'a.0.0':1} ); -assert( db.getLastError() ); - -// Check where there is a duplicate for a fully addressed field. -t.drop(); -t.save( {a:[[1],{'0':[1]}]} ); -assert( !db.getLastError() ); -t.ensureIndex( {'a.0.0':1} ); -assert( db.getLastError() ); - -// Two ways of addressing parse to an array. -t.drop(); -t.save( {a:[{'0':1}]} ); -t.ensureIndex( {'a.0.0':1} ); -assert( db.getLastError() ); - -// Test several key depths - with same arrays being found. -t.drop(); -t.save( {a:[{'0':[{'0':1}]}]} ); -t.ensureIndex( {'a.0.0.0.0.0.0':1} ); -assert( db.getLastError() ); -t.ensureIndex( {'a.0.0.0.0.0':1} ); -assert( db.getLastError() ); -t.ensureIndex( {'a.0.0.0.0':1} ); -assert( db.getLastError() ); -t.ensureIndex( {'a.0.0.0':1} ); -assert( db.getLastError() ); -t.ensureIndex( {'a.0.0':1} ); -assert( db.getLastError() ); -t.ensureIndex( {'a.0':1} ); -assert( db.getLastError() ); -t.ensureIndex( {'a':1} ); -assert( !db.getLastError() ); - -// Two prefixes extract docs, but one terminates extraction before array. -t.drop(); -t.save( {a:[{'0':{'c':[]}}]} ); -t.ensureIndex( {'a.0.c':1} ); -assert( db.getLastError() ); - -t.drop(); -t.save( {a:[[{'b':1}]]} ); -assert.eq( 1, t.find( {'a.0.b':1} ).itcount() ); -t.ensureIndex( {'a.0.b':1} ); -assert.eq( 1, t.find( {'a.0.b':1} ).itcount() ); diff --git a/jstests/indexv.js b/jstests/indexv.js deleted file mode 100644 index 334ec432d74..00000000000 --- a/jstests/indexv.js +++ /dev/null @@ -1,18 +0,0 @@ -// Check null key generation. - -t = db.jstests_indexv; -t.drop(); - -t.ensureIndex( {'a.b':1} ); - -t.save( {a:[{},{b:1}]} ); -var e = t.find( {'a.b':null} ).explain(); -assert.eq( 1, e.n ); -assert.eq( 1, e.nscanned ); - -t.drop(); -t.ensureIndex( {'a.b.c':1} ); -t.save( {a:[{b:[]},{b:{c:1}}]} ); -var e = t.find( {'a.b.c':null} ).explain(); -assert.eq( 0, e.n ); -assert.eq( 1, e.nscanned ); diff --git a/jstests/indexw.js b/jstests/indexw.js deleted file mode 100644 index bd7c75b8b08..00000000000 --- a/jstests/indexw.js +++ /dev/null @@ -1,15 +0,0 @@ -// Check that v0 keys are generated for v0 indexes SERVER-3375 - -t = db.jstests_indexw; -t.drop(); - -t.save( {a:[]} ); -assert.eq( 1, t.count( {a:[]} ) ); -t.ensureIndex( {a:1} ); -assert.eq( 1, t.count( {a:[]} ) ); -t.dropIndexes(); - -// The count result is incorrect - just checking here that v0 key generation is used. -t.ensureIndex( {a:1}, {v:0} ); -// QUERY_MIGRATION: WE GET THIS RIGHT...BY CHANCE? -// assert.eq( 0, t.count( {a:[]} ) ); diff --git a/jstests/insert1.js b/jstests/insert1.js deleted file mode 100644 index 7e6b73b6566..00000000000 --- a/jstests/insert1.js +++ /dev/null @@ -1,44 +0,0 @@ -t = db.insert1; -t.drop(); - -o = {a:1}; -t.insert(o); -id = t._lastID -assert.eq(o, {a:1}, "input unchanged 1"); -assert.eq(typeof(id), "object", "1"); -assert.eq(id.constructor, ObjectId, "1"); -assert.eq(t.findOne({_id:id}).a, 1, "find by id 1"); -assert.eq(t.findOne({a:1})._id, id , "find by val 1"); - -o = {a:2, _id:new ObjectId()}; -id1 = o._id -t.insert(o); -id2 = t._lastID -assert.eq(id1, id2, "ids match 2"); -assert.eq(o, {a:2, _id:id1}, "input unchanged 2"); -assert.eq(typeof(id2), "object", "2"); -assert.eq(id2.constructor, ObjectId, "2"); -assert.eq(t.findOne({_id:id1}).a, 2, "find by id 2"); -assert.eq(t.findOne({a:2})._id, id1 , "find by val 2"); - -o = {a:3, _id:"asdf"}; -id1 = o._id -t.insert(o); -id2 = t._lastID -assert.eq(id1, id2, "ids match 3"); -assert.eq(o, {a:3, _id:id1}, "input unchanged 3"); -assert.eq(typeof(id2), "string", "3"); -assert.eq(t.findOne({_id:id1}).a, 3, "find by id 3"); -assert.eq(t.findOne({a:3})._id, id1 , "find by val 3"); - -o = {a:4, _id:null}; -id1 = o._id -t.insert(o); -id2 = t._lastID -assert.eq(id1, id2, "ids match 4"); -assert.eq(o, {a:4, _id:id1}, "input unchanged 4"); -assert.eq(t.findOne({_id:id1}).a, 4, "find by id 4"); -assert.eq(t.findOne({a:4})._id, id1 , "find by val 4"); - -var stats = db.runCommand({ collstats: "insert1" }); -assert(stats.paddingFactor == 1.0); diff --git a/jstests/insert2.js b/jstests/insert2.js deleted file mode 100644 index 9480efeac4d..00000000000 --- a/jstests/insert2.js +++ /dev/null @@ -1,8 +0,0 @@ - -t = db.insert2 -t.drop() - -assert.isnull( t.findOne() , "A" ) -t.insert( { z : 1 , $inc : { x : 1 } } , 0, true ); -assert.isnull( t.findOne() , "B" ) - diff --git a/jstests/insert_id_undefined.js b/jstests/insert_id_undefined.js deleted file mode 100644 index 945640a815b..00000000000 --- a/jstests/insert_id_undefined.js +++ /dev/null @@ -1,6 +0,0 @@ -// ensure a document with _id undefined cannot be saved -t = db.insert_id_undefined; -t.drop(); -t.insert({_id:undefined}); -db.getLastError(); -assert.eq(t.count(), 0); diff --git a/jstests/insert_illegal_doc.js b/jstests/insert_illegal_doc.js deleted file mode 100644 index 2b4d326e9ce..00000000000 --- a/jstests/insert_illegal_doc.js +++ /dev/null @@ -1,22 +0,0 @@ -// SERVER-12185: Do not allow insertion or update of docs which will fail the -// "parallel indexing of arrays" test -var coll = db.insert_illegal_doc; -coll.drop(); -coll.ensureIndex({a: 1, b: 1}); - -// test upsert -coll.update({}, {_id: 1, a: [1, 2, 3], b: [4, 5, 6]}, true); -assert.gleErrorCode(db, 10088); -assert.eq(0, coll.find().itcount(), "should not be a doc"); - -// test insert -coll.insert({_id: 1, a: [1, 2, 3], b: [4, 5, 6]}); -assert.gleErrorCode(db, 10088); -assert.eq(0, coll.find().itcount(), "should not be a doc"); - -// test update -coll.insert({_id: 1}); -assert.gleSuccess(db, "insert failed"); -coll.update({_id: 1}, {$set : { a : [1, 2, 3], b: [4, 5, 6]}}); -assert.gleErrorCode(db, 10088); -assert.eq(undefined, coll.findOne().a, "update should have failed"); diff --git a/jstests/insert_long_index_key.js b/jstests/insert_long_index_key.js deleted file mode 100644 index 6379c36fb4a..00000000000 --- a/jstests/insert_long_index_key.js +++ /dev/null @@ -1,10 +0,0 @@ -t = db.insert_long_index_key; -t.drop(); - -var s = new Array(2000).toString(); -t.ensureIndex( { x : 1 } ); - -t.insert({ x: 1 }); -t.insert({ x: s }); - -assert.eq( 1, t.count() ); diff --git a/jstests/ismaster.js b/jstests/ismaster.js deleted file mode 100644 index 0c385b02d7c..00000000000 --- a/jstests/ismaster.js +++ /dev/null @@ -1,28 +0,0 @@ -var res = db.isMaster(); -// check that the fields that should be there are there and have proper values -assert( res.maxBsonObjectSize && - isNumber(res.maxBsonObjectSize) && - res.maxBsonObjectSize > 0, "maxBsonObjectSize possibly missing:" + tojson(res)); -assert( res.maxMessageSizeBytes && - isNumber(res.maxMessageSizeBytes) && - res.maxBsonObjectSize > 0, "maxMessageSizeBytes possibly missing:" + tojson(res)); -assert( res.maxWriteBatchSize && - isNumber(res.maxWriteBatchSize) && - res.maxWriteBatchSize > 0, "maxWriteBatchSize possibly missing:" + tojson(res)); -assert(res.ismaster, "ismaster missing or false:" + tojson(res)); -assert(res.localTime, "localTime possibly missing:" + tojson(res)); -var unwantedFields = ["setName", "setVersion", "secondary", "hosts", "passives", "arbiters", - "primary", "aribterOnly", "passive", "slaveDelay", "hidden", "tags", - "buildIndexes", "me"]; -// check that the fields that shouldn't be there are not there -var badFields = []; -for (field in res) { - if (!res.hasOwnProperty(field)){ - continue; - } - if (Array.contains(unwantedFields, field)) { - badFields.push(field); - } -} -assert(badFields.length === 0, "\nthe result:\n" + tojson(res) - + "\ncontained fields it shouldn't have: " + badFields); diff --git a/jstests/slowNightly/sharding_jscore_passthrough.js b/jstests/slowNightly/sharding_jscore_passthrough.js index e6f36d74ae2..0d164c6e92c 100644 --- a/jstests/slowNightly/sharding_jscore_passthrough.js +++ b/jstests/slowNightly/sharding_jscore_passthrough.js @@ -1,7 +1,10 @@ var myShardingTest = new ShardingTest("sharding_passthrough", 2, 0, 1); myShardingTest.adminCommand({ enablesharding : "test" }); + var db = myShardingTest.getDB("test"); db.getMongo().forceWriteMode("commands"); +_useWriteCommandsDefault = function() { return true; }; // for tests launching parallel shells. + var res = db.adminCommand({ setParameter: 1, useClusterWriteCommands: true }); var files = listFiles("jstests/core"); diff --git a/src/mongo/shell/assert.js b/src/mongo/shell/assert.js index ac483bc8bb5..25bc108febd 100644 --- a/src/mongo/shell/assert.js +++ b/src/mongo/shell/assert.js @@ -347,10 +347,10 @@ assert.writeOK = function(res, msg) { if (!res) errMsg = "missing first argument, no response to check" - else if (!res.getWriteError) + else if (!res.getWriteConcernError) // not BulkWriteResult/SingleWriteResult. assert.gleOK(res, msg) else { - if (res.getWriteError()) { + if (res.hasWriteErrors()) { errMsg = "write failed with errors: " + tojson(res) } else if (res.getWriteConcernError()) { errMsg = "write concern failed with errors: " + tojson(res) @@ -395,7 +395,7 @@ assert.writeError = function(res, msg) { if (!res.err) errMsg = "no error" + tojson(res); } else { - if (!(res.getWriteError() || res.getWriteConcernError())) + if (!(res.hasWriteErrors() || res.getWriteConcernError())) errMsg = "no write errors : " + tojson(res); } if (errMsg != "" && msg) diff --git a/src/mongo/shell/collection.js b/src/mongo/shell/collection.js index 32ed762a8a8..b1920e33a6e 100644 --- a/src/mongo/shell/collection.js +++ b/src/mongo/shell/collection.js @@ -506,7 +506,7 @@ DBCollection.prototype.createIndex = function( keys , options ){ DBCollection.prototype.ensureIndex = function( keys , options ){ var result = this.createIndex(keys, options); - if ( this._mongo.useWriteCommands() ) { + if ( this.getMongo().writeMode() != "legacy" ) { return result; } -- cgit v1.2.1