summaryrefslogtreecommitdiff
path: root/jstests/core
diff options
context:
space:
mode:
Diffstat (limited to 'jstests/core')
-rw-r--r--jstests/core/all.js70
-rw-r--r--jstests/core/all2.js96
-rw-r--r--jstests/core/all3.js30
-rw-r--r--jstests/core/all4.js34
-rw-r--r--jstests/core/all5.js30
-rw-r--r--jstests/core/and.js84
-rw-r--r--jstests/core/and2.js12
-rw-r--r--jstests/core/and3.js92
-rw-r--r--jstests/core/andor.js148
-rw-r--r--jstests/core/apitest_db.js95
-rw-r--r--jstests/core/apitest_dbcollection.js194
-rw-r--r--jstests/core/apply_ops1.js225
-rw-r--r--jstests/core/apply_ops2.js74
-rw-r--r--jstests/core/apply_ops_dups.js29
-rw-r--r--jstests/core/array1.js16
-rw-r--r--jstests/core/array3.js11
-rw-r--r--jstests/core/array4.js18
-rw-r--r--jstests/core/array_match1.js34
-rw-r--r--jstests/core/array_match2.js18
-rw-r--r--jstests/core/array_match3.js10
-rw-r--r--jstests/core/array_match4.js4
-rw-r--r--jstests/core/arrayfind1.js43
-rw-r--r--jstests/core/arrayfind2.js35
-rw-r--r--jstests/core/arrayfind3.js17
-rw-r--r--jstests/core/arrayfind4.js22
-rw-r--r--jstests/core/arrayfind5.js27
-rw-r--r--jstests/core/arrayfind6.js14
-rw-r--r--jstests/core/arrayfind7.js52
-rw-r--r--jstests/core/arrayfind8.js163
-rw-r--r--jstests/core/arrayfind9.js30
-rw-r--r--jstests/core/arrayfinda.js20
-rw-r--r--jstests/core/arrayfindb.js12
-rw-r--r--jstests/core/auth1.js46
-rw-r--r--jstests/core/auth2.js8
-rw-r--r--jstests/core/auth_copydb.js18
-rw-r--r--jstests/core/autoid.js16
-rw-r--r--jstests/core/bad_index_plugin.js6
-rw-r--r--jstests/core/basic1.js22
-rw-r--r--jstests/core/basic2.js18
-rw-r--r--jstests/core/basic3.js28
-rw-r--r--jstests/core/basic4.js16
-rw-r--r--jstests/core/basic5.js7
-rw-r--r--jstests/core/basic6.js4
-rw-r--r--jstests/core/basic7.js11
-rw-r--r--jstests/core/basic8.js8
-rw-r--r--jstests/core/basic9.js12
-rw-r--r--jstests/core/basica.js18
-rw-r--r--jstests/core/basicb.js5
-rw-r--r--jstests/core/batch_size.js13
-rw-r--r--jstests/core/batch_write_command_delete.js174
-rw-r--r--jstests/core/batch_write_command_insert.js169
-rw-r--r--jstests/core/batch_write_command_update.js222
-rw-r--r--jstests/core/batch_write_command_wc.js6
-rw-r--r--jstests/core/bench_test1.js46
-rw-r--r--jstests/core/bench_test2.js54
-rw-r--r--jstests/core/bench_test3.js32
-rw-r--r--jstests/core/big_object1.js56
-rw-r--r--jstests/core/binData.js16
-rw-r--r--jstests/core/bindata_indexonly.js24
-rw-r--r--jstests/core/bittest.js15
-rw-r--r--jstests/core/bulk_api_ordered.js80
-rw-r--r--jstests/core/bulk_api_unordered.js80
-rw-r--r--jstests/core/bulk_insert.js20
-rw-r--r--jstests/core/bulk_insert_capped.js12
-rw-r--r--jstests/core/bulk_legacy_enforce_gle.js87
-rw-r--r--jstests/core/bypass_doc_validation.js80
-rw-r--r--jstests/core/capped.js13
-rw-r--r--jstests/core/capped1.js9
-rw-r--r--jstests/core/capped5.js49
-rw-r--r--jstests/core/capped6.js33
-rw-r--r--jstests/core/capped9.js29
-rw-r--r--jstests/core/capped_convertToCapped1.js28
-rw-r--r--jstests/core/capped_empty.js24
-rw-r--r--jstests/core/capped_max1.js22
-rw-r--r--jstests/core/capped_update.js15
-rw-r--r--jstests/core/cappeda.js25
-rw-r--r--jstests/core/check_shard_index.js175
-rw-r--r--jstests/core/cleanup_orphaned.js2
-rw-r--r--jstests/core/clone_as_capped_nonexistant.js16
-rw-r--r--jstests/core/collection_info_cache_race.js11
-rw-r--r--jstests/core/collection_truncate.js6
-rw-r--r--jstests/core/collmod.js98
-rw-r--r--jstests/core/compact_keeps_indexes.js22
-rw-r--r--jstests/core/compare_timestamps.js1
-rw-r--r--jstests/core/connection_status.js7
-rw-r--r--jstests/core/connection_string_validation.js80
-rw-r--r--jstests/core/constructors.js214
-rw-r--r--jstests/core/copydb.js5
-rw-r--r--jstests/core/count.js33
-rw-r--r--jstests/core/count10.js56
-rw-r--r--jstests/core/count11.js16
-rw-r--r--jstests/core/count2.js34
-rw-r--r--jstests/core/count3.js25
-rw-r--r--jstests/core/count4.js18
-rw-r--r--jstests/core/count5.js36
-rw-r--r--jstests/core/count6.js78
-rw-r--r--jstests/core/count7.js30
-rw-r--r--jstests/core/count9.js30
-rw-r--r--jstests/core/count_plan_summary.js5
-rw-r--r--jstests/core/counta.js27
-rw-r--r--jstests/core/countb.js12
-rw-r--r--jstests/core/countc.js137
-rw-r--r--jstests/core/coveredIndex1.js56
-rw-r--r--jstests/core/coveredIndex2.js28
-rw-r--r--jstests/core/coveredIndex3.js79
-rw-r--r--jstests/core/coveredIndex4.js35
-rw-r--r--jstests/core/covered_index_compound_1.js55
-rw-r--r--jstests/core/covered_index_negative_1.js64
-rw-r--r--jstests/core/covered_index_simple_1.js59
-rw-r--r--jstests/core/covered_index_simple_2.js46
-rw-r--r--jstests/core/covered_index_simple_3.js62
-rw-r--r--jstests/core/covered_index_simple_id.js42
-rw-r--r--jstests/core/covered_index_sort_1.js42
-rw-r--r--jstests/core/covered_index_sort_2.js17
-rw-r--r--jstests/core/covered_index_sort_3.js18
-rw-r--r--jstests/core/create_collection_fail_cleanup.js9
-rw-r--r--jstests/core/create_indexes.js124
-rw-r--r--jstests/core/crud_api.js724
-rw-r--r--jstests/core/currentop.js34
-rw-r--r--jstests/core/currentop_predicate.js14
-rw-r--r--jstests/core/cursor1.js16
-rw-r--r--jstests/core/cursor2.js18
-rw-r--r--jstests/core/cursor3.js41
-rw-r--r--jstests/core/cursor4.js74
-rw-r--r--jstests/core/cursor5.js58
-rw-r--r--jstests/core/cursor6.js92
-rw-r--r--jstests/core/cursor7.js87
-rw-r--r--jstests/core/cursora.js51
-rw-r--r--jstests/core/cursorb.js11
-rw-r--r--jstests/core/datasize2.js48
-rw-r--r--jstests/core/date1.js18
-rw-r--r--jstests/core/date2.js12
-rw-r--r--jstests/core/date3.js34
-rw-r--r--jstests/core/db.js9
-rw-r--r--jstests/core/dbadmin.js56
-rw-r--r--jstests/core/dbcase.js24
-rw-r--r--jstests/core/dbcase2.js9
-rw-r--r--jstests/core/dbhash.js65
-rw-r--r--jstests/core/dbhash2.js26
-rw-r--r--jstests/core/dbref1.js6
-rw-r--r--jstests/core/dbref2.js16
-rw-r--r--jstests/core/dbref3.js6
-rw-r--r--jstests/core/delx.js35
-rw-r--r--jstests/core/depth_limit.js17
-rw-r--r--jstests/core/distinct1.js57
-rw-r--r--jstests/core/distinct2.js13
-rw-r--r--jstests/core/distinct3.js36
-rw-r--r--jstests/core/distinct4.js31
-rw-r--r--jstests/core/distinct_array1.js101
-rw-r--r--jstests/core/distinct_index1.js81
-rw-r--r--jstests/core/distinct_index2.js38
-rw-r--r--jstests/core/distinct_speed1.js21
-rw-r--r--jstests/core/doc_validation.js10
-rw-r--r--jstests/core/doc_validation_invalid_validators.js57
-rw-r--r--jstests/core/doc_validation_options.js56
-rw-r--r--jstests/core/drop.js11
-rw-r--r--jstests/core/drop2.js58
-rw-r--r--jstests/core/drop3.js18
-rw-r--r--jstests/core/drop_index.js22
-rw-r--r--jstests/core/dropdb.js14
-rw-r--r--jstests/core/dropdb_race.js14
-rw-r--r--jstests/core/elemMatchProjection.js432
-rw-r--r--jstests/core/error2.js27
-rw-r--r--jstests/core/error5.js10
-rw-r--r--jstests/core/eval0.js24
-rw-r--r--jstests/core/eval1.js18
-rw-r--r--jstests/core/eval2.js42
-rw-r--r--jstests/core/eval3.js34
-rw-r--r--jstests/core/eval4.js25
-rw-r--r--jstests/core/eval5.js27
-rw-r--r--jstests/core/eval6.js16
-rw-r--r--jstests/core/eval7.js6
-rw-r--r--jstests/core/eval8.js19
-rw-r--r--jstests/core/eval9.js27
-rw-r--r--jstests/core/eval_mr.js12
-rw-r--r--jstests/core/eval_nolock.js20
-rw-r--r--jstests/core/evala.js7
-rw-r--r--jstests/core/evalb.js31
-rw-r--r--jstests/core/evalc.js15
-rw-r--r--jstests/core/evald.js78
-rw-r--r--jstests/core/evale.js10
-rw-r--r--jstests/core/evalg.js15
-rw-r--r--jstests/core/exists.js83
-rw-r--r--jstests/core/exists2.js19
-rw-r--r--jstests/core/exists3.js22
-rw-r--r--jstests/core/exists4.js53
-rw-r--r--jstests/core/exists5.js44
-rw-r--r--jstests/core/exists6.js32
-rw-r--r--jstests/core/exists7.js16
-rw-r--r--jstests/core/exists8.js98
-rw-r--r--jstests/core/exists9.js40
-rw-r--r--jstests/core/existsa.js100
-rw-r--r--jstests/core/existsb.js50
-rw-r--r--jstests/core/explain1.js30
-rw-r--r--jstests/core/explain2.js18
-rw-r--r--jstests/core/explain3.js16
-rw-r--r--jstests/core/explain4.js13
-rw-r--r--jstests/core/explain5.js28
-rw-r--r--jstests/core/explain6.js35
-rw-r--r--jstests/core/explain_batch_size.js10
-rw-r--r--jstests/core/explain_count.js34
-rw-r--r--jstests/core/explain_delete.js37
-rw-r--r--jstests/core/explain_distinct.js17
-rw-r--r--jstests/core/explain_execution_error.js42
-rw-r--r--jstests/core/explain_find.js19
-rw-r--r--jstests/core/explain_find_and_modify.js273
-rw-r--r--jstests/core/explain_missing_collection.js2
-rw-r--r--jstests/core/explain_missing_database.js2
-rw-r--r--jstests/core/explain_multi_plan.js15
-rw-r--r--jstests/core/explain_shell_helpers.js10
-rw-r--r--jstests/core/explain_upsert.js20
-rw-r--r--jstests/core/filemd5.js13
-rw-r--r--jstests/core/find1.js49
-rw-r--r--jstests/core/find2.js16
-rw-r--r--jstests/core/find3.js8
-rw-r--r--jstests/core/find4.js42
-rw-r--r--jstests/core/find5.js60
-rw-r--r--jstests/core/find6.js44
-rw-r--r--jstests/core/find7.js12
-rw-r--r--jstests/core/find8.js20
-rw-r--r--jstests/core/find9.js24
-rw-r--r--jstests/core/find_and_modify.js62
-rw-r--r--jstests/core/find_and_modify2.js18
-rw-r--r--jstests/core/find_and_modify3.js42
-rw-r--r--jstests/core/find_and_modify4.js50
-rw-r--r--jstests/core/find_and_modify_concurrent_update.js9
-rw-r--r--jstests/core/find_and_modify_empty_coll.js4
-rw-r--r--jstests/core/find_and_modify_empty_update.js4
-rw-r--r--jstests/core/find_and_modify_server6226.js5
-rw-r--r--jstests/core/find_and_modify_server6254.js9
-rw-r--r--jstests/core/find_and_modify_server6582.js17
-rw-r--r--jstests/core/find_and_modify_server6588.js39
-rw-r--r--jstests/core/find_and_modify_server6659.js6
-rw-r--r--jstests/core/find_and_modify_server6865.js286
-rw-r--r--jstests/core/find_and_modify_server6909.js29
-rw-r--r--jstests/core/find_and_modify_server6993.js12
-rw-r--r--jstests/core/find_and_modify_server7660.js17
-rw-r--r--jstests/core/find_and_modify_where.js7
-rw-r--r--jstests/core/find_dedup.js17
-rw-r--r--jstests/core/find_getmore_bsonsize.js5
-rw-r--r--jstests/core/find_getmore_cmd.js21
-rw-r--r--jstests/core/find_size.js4
-rw-r--r--jstests/core/finda.js94
-rw-r--r--jstests/core/fm1.js14
-rw-r--r--jstests/core/fm2.js7
-rw-r--r--jstests/core/fm3.js33
-rw-r--r--jstests/core/fm4.js16
-rw-r--r--jstests/core/fsync.js154
-rw-r--r--jstests/core/fts1.js22
-rw-r--r--jstests/core/fts2.js22
-rw-r--r--jstests/core/fts3.js21
-rw-r--r--jstests/core/fts4.js21
-rw-r--r--jstests/core/fts5.js20
-rw-r--r--jstests/core/fts_blog.js30
-rw-r--r--jstests/core/fts_blogwild.js60
-rw-r--r--jstests/core/fts_casesensitive.js4
-rw-r--r--jstests/core/fts_diacritic_and_caseinsensitive.js2
-rw-r--r--jstests/core/fts_diacritic_and_casesensitive.js94
-rw-r--r--jstests/core/fts_diacriticsensitive.js20
-rw-r--r--jstests/core/fts_explain.js2
-rw-r--r--jstests/core/fts_index.js40
-rw-r--r--jstests/core/fts_index2.js4
-rw-r--r--jstests/core/fts_index_version1.js3
-rw-r--r--jstests/core/fts_index_version2.js5
-rw-r--r--jstests/core/fts_mix.js199
-rw-r--r--jstests/core/fts_partition1.js25
-rw-r--r--jstests/core/fts_partition_no_multikey.js10
-rw-r--r--jstests/core/fts_phrase.js38
-rw-r--r--jstests/core/fts_proj.js24
-rw-r--r--jstests/core/fts_projection.js48
-rw-r--r--jstests/core/fts_querylang.js30
-rw-r--r--jstests/core/fts_score_sort.js12
-rw-r--r--jstests/core/fts_spanish.js29
-rw-r--r--jstests/core/geo1.js39
-rw-r--r--jstests/core/geo10.js15
-rw-r--r--jstests/core/geo2.js44
-rw-r--r--jstests/core/geo3.js111
-rw-r--r--jstests/core/geo5.js23
-rw-r--r--jstests/core/geo6.js29
-rw-r--r--jstests/core/geo7.js22
-rw-r--r--jstests/core/geo9.js36
-rw-r--r--jstests/core/geo_2d_with_geojson_point.js10
-rw-r--r--jstests/core/geo_allowedcomparisons.js54
-rw-r--r--jstests/core/geo_array0.js24
-rw-r--r--jstests/core/geo_array1.js32
-rw-r--r--jstests/core/geo_array2.js280
-rw-r--r--jstests/core/geo_big_polygon.js157
-rw-r--r--jstests/core/geo_big_polygon2.js1116
-rw-r--r--jstests/core/geo_big_polygon3.js286
-rw-r--r--jstests/core/geo_borders.js164
-rw-r--r--jstests/core/geo_box1.js56
-rw-r--r--jstests/core/geo_box1_noindex.js46
-rw-r--r--jstests/core/geo_box2.js18
-rw-r--r--jstests/core/geo_box3.js32
-rw-r--r--jstests/core/geo_center_sphere1.js103
-rw-r--r--jstests/core/geo_center_sphere2.js119
-rw-r--r--jstests/core/geo_circle1.js63
-rw-r--r--jstests/core/geo_circle1_noindex.js38
-rw-r--r--jstests/core/geo_circle2.js39
-rw-r--r--jstests/core/geo_circle2a.js41
-rw-r--r--jstests/core/geo_circle3.js29
-rw-r--r--jstests/core/geo_circle4.js38
-rw-r--r--jstests/core/geo_circle5.js20
-rw-r--r--jstests/core/geo_distinct.js117
-rw-r--r--jstests/core/geo_exactfetch.js4
-rw-r--r--jstests/core/geo_fiddly_box.js53
-rw-r--r--jstests/core/geo_fiddly_box2.js41
-rw-r--r--jstests/core/geo_group.js46
-rw-r--r--jstests/core/geo_haystack1.js68
-rw-r--r--jstests/core/geo_haystack2.js69
-rw-r--r--jstests/core/geo_haystack3.js38
-rw-r--r--jstests/core/geo_invalid_2d_params.js10
-rw-r--r--jstests/core/geo_invalid_polygon.js17
-rw-r--r--jstests/core/geo_mapreduce.js54
-rw-r--r--jstests/core/geo_mapreduce2.js39
-rw-r--r--jstests/core/geo_max.js69
-rw-r--r--jstests/core/geo_mindistance.js188
-rw-r--r--jstests/core/geo_mindistance_boundaries.js124
-rw-r--r--jstests/core/geo_multikey0.js29
-rw-r--r--jstests/core/geo_multikey1.js13
-rw-r--r--jstests/core/geo_multinest0.js60
-rw-r--r--jstests/core/geo_multinest1.js37
-rw-r--r--jstests/core/geo_near_random1.js18
-rw-r--r--jstests/core/geo_near_random2.js19
-rw-r--r--jstests/core/geo_nearwithin.js26
-rw-r--r--jstests/core/geo_oob_sphere.js37
-rw-r--r--jstests/core/geo_operator_crs.js38
-rw-r--r--jstests/core/geo_or.js86
-rw-r--r--jstests/core/geo_poly_edge.js22
-rw-r--r--jstests/core/geo_poly_line.js17
-rw-r--r--jstests/core/geo_polygon1.js81
-rw-r--r--jstests/core/geo_polygon1_noindex.js66
-rw-r--r--jstests/core/geo_polygon2.js178
-rw-r--r--jstests/core/geo_polygon3.js104
-rw-r--r--jstests/core/geo_queryoptimizer.js29
-rw-r--r--jstests/core/geo_regex0.js25
-rw-r--r--jstests/core/geo_s2cursorlimitskip.js30
-rw-r--r--jstests/core/geo_s2dedupnear.js10
-rw-r--r--jstests/core/geo_s2descindex.js47
-rw-r--r--jstests/core/geo_s2disjoint_holes.js40
-rw-r--r--jstests/core/geo_s2dupe_points.js81
-rw-r--r--[-rwxr-xr-x]jstests/core/geo_s2edgecases.js71
-rw-r--r--jstests/core/geo_s2exact.js16
-rw-r--r--jstests/core/geo_s2explain.js54
-rw-r--r--jstests/core/geo_s2holesameasshell.js45
-rw-r--r--[-rwxr-xr-x]jstests/core/geo_s2index.js163
-rw-r--r--[-rwxr-xr-x]jstests/core/geo_s2indexoldformat.js24
-rw-r--r--jstests/core/geo_s2indexversion1.js108
-rw-r--r--jstests/core/geo_s2intersection.js111
-rw-r--r--jstests/core/geo_s2largewithin.js27
-rw-r--r--jstests/core/geo_s2meridian.js46
-rw-r--r--jstests/core/geo_s2multi.js70
-rw-r--r--jstests/core/geo_s2near.js88
-rw-r--r--jstests/core/geo_s2nearComplex.js109
-rw-r--r--jstests/core/geo_s2near_equator_opposite.js14
-rw-r--r--jstests/core/geo_s2nearcorrect.js12
-rw-r--r--jstests/core/geo_s2nearwithin.js51
-rw-r--r--jstests/core/geo_s2nongeoarray.js12
-rw-r--r--[-rwxr-xr-x]jstests/core/geo_s2nonstring.js26
-rw-r--r--jstests/core/geo_s2nopoints.js7
-rw-r--r--jstests/core/geo_s2oddshapes.js94
-rw-r--r--jstests/core/geo_s2ordering.js13
-rw-r--r--jstests/core/geo_s2overlappingpolys.js195
-rw-r--r--[-rwxr-xr-x]jstests/core/geo_s2polywithholes.js71
-rw-r--r--jstests/core/geo_s2selfintersectingpoly.js9
-rw-r--r--jstests/core/geo_s2sparse.js34
-rw-r--r--jstests/core/geo_s2twofields.js48
-rw-r--r--jstests/core/geo_s2validindex.js8
-rw-r--r--jstests/core/geo_s2within.js38
-rw-r--r--jstests/core/geo_small_large.js122
-rw-r--r--jstests/core/geo_sort1.js22
-rw-r--r--jstests/core/geo_uniqueDocs.js38
-rw-r--r--jstests/core/geo_uniqueDocs2.js93
-rw-r--r--jstests/core/geo_update.js38
-rw-r--r--jstests/core/geo_update1.js40
-rw-r--r--jstests/core/geo_update2.js43
-rw-r--r--jstests/core/geo_update_btree.js32
-rw-r--r--jstests/core/geo_update_btree2.js51
-rw-r--r--jstests/core/geo_update_dedup.js35
-rw-r--r--jstests/core/geo_validate.js96
-rw-r--r--jstests/core/geo_withinquery.js18
-rw-r--r--jstests/core/geoa.js12
-rw-r--r--jstests/core/geob.js20
-rw-r--r--jstests/core/geoc.js33
-rw-r--r--jstests/core/geod.js20
-rw-r--r--jstests/core/geoe.js37
-rw-r--r--jstests/core/geof.js18
-rw-r--r--jstests/core/geonear_cmd_input_validation.js67
-rw-r--r--jstests/core/getlog1.js26
-rw-r--r--jstests/core/getlog2.js68
-rw-r--r--jstests/core/getmore_invalidation.js14
-rw-r--r--jstests/core/group1.js185
-rw-r--r--jstests/core/group2.js26
-rw-r--r--jstests/core/group3.js33
-rw-r--r--jstests/core/group4.js63
-rw-r--r--jstests/core/group5.js53
-rw-r--r--jstests/core/group6.js36
-rw-r--r--jstests/core/group7.js26
-rw-r--r--jstests/core/group8.js22
-rw-r--r--jstests/core/group_empty.js9
-rw-r--r--jstests/core/grow_hash_table.js15
-rw-r--r--jstests/core/hashindex1.js131
-rw-r--r--jstests/core/hashtest1.js129
-rw-r--r--jstests/core/hint1.js15
-rw-r--r--jstests/core/hostinfo.js34
-rw-r--r--jstests/core/id1.js22
-rw-r--r--jstests/core/idhack.js95
-rw-r--r--jstests/core/in.js30
-rw-r--r--jstests/core/in2.js49
-rw-r--r--jstests/core/in3.js32
-rw-r--r--jstests/core/in4.js24
-rw-r--r--jstests/core/in5.js70
-rw-r--r--jstests/core/in6.js8
-rw-r--r--jstests/core/in7.js18
-rw-r--r--jstests/core/in8.js20
-rw-r--r--jstests/core/inc-SERVER-7446.js38
-rw-r--r--jstests/core/inc1.js37
-rw-r--r--jstests/core/inc2.js26
-rw-r--r--jstests/core/inc3.js18
-rw-r--r--jstests/core/index1.js32
-rw-r--r--jstests/core/index13.js172
-rw-r--r--jstests/core/index2.js76
-rw-r--r--jstests/core/index3.js10
-rw-r--r--jstests/core/index4.js31
-rw-r--r--jstests/core/index5.js24
-rw-r--r--jstests/core/index6.js6
-rw-r--r--jstests/core/index8.js81
-rw-r--r--jstests/core/index9.js30
-rw-r--r--jstests/core/indexOtherNamespace.js10
-rw-r--r--jstests/core/index_arr1.js20
-rw-r--r--jstests/core/index_arr2.js52
-rw-r--r--jstests/core/index_big1.js31
-rw-r--r--[-rwxr-xr-x]jstests/core/index_bigkeys.js49
-rw-r--r--jstests/core/index_bigkeys_nofail.js42
-rw-r--r--jstests/core/index_bigkeys_update.js16
-rw-r--r--jstests/core/index_bigkeys_validation.js2
-rw-r--r--jstests/core/index_check2.js44
-rw-r--r--jstests/core/index_check3.js87
-rw-r--r--jstests/core/index_check5.js25
-rw-r--r--jstests/core/index_check6.js127
-rw-r--r--jstests/core/index_check7.js15
-rw-r--r--jstests/core/index_create_too_many.js13
-rw-r--r--jstests/core/index_create_with_nul_in_name.js8
-rw-r--r--jstests/core/index_diag.js49
-rw-r--r--jstests/core/index_dropdups_ignore.js10
-rw-r--r--jstests/core/index_elemmatch1.js39
-rw-r--r--jstests/core/index_filter_commands.js96
-rw-r--r--jstests/core/index_many.js26
-rw-r--r--jstests/core/index_many2.js27
-rw-r--r--jstests/core/index_partial_create_drop.js23
-rw-r--r--jstests/core/index_partial_read_ops.js12
-rw-r--r--jstests/core/index_partial_write_ops.js9
-rw-r--r--jstests/core/index_plugins.js28
-rw-r--r--jstests/core/index_sparse1.js66
-rw-r--r--jstests/core/index_sparse2.js33
-rw-r--r--jstests/core/index_stats.js50
-rw-r--r--jstests/core/indexa.js23
-rw-r--r--jstests/core/indexapi.js50
-rw-r--r--jstests/core/indexb.js27
-rw-r--r--jstests/core/indexc.js20
-rw-r--r--jstests/core/indexd.js12
-rw-r--r--jstests/core/indexe.js20
-rw-r--r--jstests/core/indexes_on_indexes.js10
-rw-r--r--jstests/core/indexf.js14
-rw-r--r--jstests/core/indexg.js10
-rw-r--r--jstests/core/indexj.js64
-rw-r--r--jstests/core/indexl.js32
-rw-r--r--jstests/core/indexm.js19
-rw-r--r--jstests/core/indexn.js24
-rw-r--r--jstests/core/indexp.js22
-rw-r--r--jstests/core/indexr.js34
-rw-r--r--jstests/core/indexs.js21
-rw-r--r--jstests/core/indext.js22
-rw-r--r--jstests/core/indexu.js117
-rw-r--r--jstests/core/indexv.js20
-rw-r--r--jstests/core/insert1.js25
-rw-r--r--jstests/core/insert2.js8
-rw-r--r--jstests/core/insert_id_undefined.js4
-rw-r--r--jstests/core/insert_illegal_doc.js2
-rw-r--r--jstests/core/insert_long_index_key.js8
-rw-r--r--jstests/core/invalid_db_name.js11
-rw-r--r--jstests/core/ismaster.js39
-rw-r--r--jstests/core/js1.js22
-rw-r--r--jstests/core/js2.js30
-rw-r--r--jstests/core/js3.js132
-rw-r--r--jstests/core/js4.js81
-rw-r--r--jstests/core/js5.js10
-rw-r--r--jstests/core/js7.js6
-rw-r--r--jstests/core/js8.js37
-rw-r--r--jstests/core/js9.js31
-rw-r--r--jstests/core/json1.js61
-rw-r--r--jstests/core/kill_cursors.js60
-rw-r--r--jstests/core/killop.js49
-rw-r--r--jstests/core/list_collections1.js173
-rw-r--r--jstests/core/list_collections_filter.js18
-rw-r--r--jstests/core/list_indexes.js21
-rw-r--r--jstests/core/list_indexes_invalid.js32
-rw-r--r--jstests/core/loadserverscripts.js36
-rw-r--r--jstests/core/loglong.js24
-rw-r--r--jstests/core/logprocessdetails.js12
-rw-r--r--jstests/core/long_index_rename.js16
-rw-r--r--jstests/core/map1.js34
-rw-r--r--jstests/core/max_doc_size.js54
-rw-r--r--jstests/core/max_time_ms.js365
-rw-r--r--jstests/core/maxscan.js16
-rw-r--r--jstests/core/minmax.js76
-rw-r--r--jstests/core/minmax_edge.js144
-rw-r--r--jstests/core/mod1.js38
-rw-r--r--jstests/core/mr1.js227
-rw-r--r--jstests/core/mr2.js90
-rw-r--r--jstests/core/mr3.js73
-rw-r--r--jstests/core/mr4.js49
-rw-r--r--jstests/core/mr5.js65
-rw-r--r--jstests/core/mr_bigobject.js33
-rw-r--r--jstests/core/mr_bigobject_replace.js27
-rw-r--r--jstests/core/mr_comments.js42
-rw-r--r--jstests/core/mr_errorhandling.js43
-rw-r--r--jstests/core/mr_index.js42
-rw-r--r--jstests/core/mr_index2.js25
-rw-r--r--jstests/core/mr_index3.js107
-rw-r--r--jstests/core/mr_killop.js202
-rw-r--r--jstests/core/mr_merge.js59
-rw-r--r--jstests/core/mr_merge2.js48
-rw-r--r--jstests/core/mr_mutable_properties.js42
-rw-r--r--jstests/core/mr_optim.js30
-rw-r--r--jstests/core/mr_outreduce.js51
-rw-r--r--jstests/core/mr_outreduce2.js29
-rw-r--r--jstests/core/mr_replaceIntoDB.js46
-rw-r--r--jstests/core/mr_sort.js51
-rw-r--r--jstests/core/mr_stored.js82
-rw-r--r--jstests/core/mr_undef.js24
-rw-r--r--jstests/core/multi.js26
-rw-r--r--jstests/core/multi2.js30
-rw-r--r--jstests/core/multikey_geonear.js8
-rw-r--r--jstests/core/ne1.js12
-rw-r--r--jstests/core/ne2.js18
-rw-r--r--jstests/core/ne3.js22
-rw-r--r--jstests/core/nestedarr1.js27
-rw-r--r--jstests/core/nestedobj1.js26
-rw-r--r--jstests/core/nin.js104
-rw-r--r--jstests/core/nin2.js64
-rw-r--r--jstests/core/no_db_created.js11
-rw-r--r--jstests/core/not1.js23
-rw-r--r--jstests/core/not2.js120
-rw-r--r--jstests/core/not3.js8
-rw-r--r--jstests/core/notablescan.js42
-rw-r--r--jstests/core/ns_length.js11
-rw-r--r--jstests/core/null.js26
-rw-r--r--jstests/core/null2.js54
-rw-r--r--jstests/core/null_field_name.js12
-rw-r--r--jstests/core/numberint.js116
-rw-r--r--jstests/core/numberlong.js171
-rw-r--r--jstests/core/numberlong2.js15
-rw-r--r--jstests/core/numberlong3.js24
-rw-r--r--jstests/core/numberlong4.js24
-rw-r--r--jstests/core/objid1.js20
-rw-r--r--jstests/core/objid2.js6
-rw-r--r--jstests/core/objid3.js7
-rw-r--r--jstests/core/objid4.js23
-rw-r--r--jstests/core/objid5.js20
-rw-r--r--jstests/core/objid7.js15
-rw-r--r--jstests/core/opcounters_active.js45
-rw-r--r--jstests/core/opcounters_write_cmd.js74
-rw-r--r--jstests/core/or1.js75
-rw-r--r--jstests/core/or2.js79
-rw-r--r--jstests/core/or3.js86
-rw-r--r--jstests/core/or4.js116
-rw-r--r--jstests/core/or5.js96
-rw-r--r--jstests/core/or7.js40
-rw-r--r--jstests/core/or8.js28
-rw-r--r--jstests/core/or9.js48
-rw-r--r--jstests/core/or_inexact.js120
-rw-r--r--jstests/core/ora.js10
-rw-r--r--jstests/core/orb.js15
-rw-r--r--jstests/core/orc.js52
-rw-r--r--jstests/core/ord.js24
-rw-r--r--jstests/core/ore.js10
-rw-r--r--jstests/core/orf.js20
-rw-r--r--jstests/core/org.js16
-rw-r--r--jstests/core/orh.js14
-rw-r--r--jstests/core/orj.js228
-rw-r--r--jstests/core/ork.js22
-rw-r--r--jstests/core/oro.js23
-rw-r--r--jstests/core/orp.js41
-rw-r--r--jstests/core/plan_cache_clear.js2
-rw-r--r--jstests/core/plan_cache_list_plans.js24
-rw-r--r--jstests/core/plan_cache_list_shapes.js9
-rw-r--r--jstests/core/plan_cache_shell_helpers.js100
-rw-r--r--jstests/core/pop_server_13516.js6
-rw-r--r--jstests/core/profile1.js17
-rw-r--r--jstests/core/profile2.js6
-rw-r--r--jstests/core/profile3.js45
-rw-r--r--jstests/core/profile4.js47
-rw-r--r--jstests/core/profile5.js3
-rw-r--r--jstests/core/profile_no_such_db.js64
-rw-r--r--jstests/core/proj_key1.js15
-rw-r--r--jstests/core/pull.js36
-rw-r--r--jstests/core/pull2.js42
-rw-r--r--jstests/core/pull_or.js21
-rw-r--r--jstests/core/pull_remove1.js18
-rw-r--r--jstests/core/pullall.js34
-rw-r--r--jstests/core/pullall2.js33
-rw-r--r--jstests/core/push.js75
-rw-r--r--jstests/core/push2.js12
-rw-r--r--jstests/core/push_sort.js77
-rw-r--r--jstests/core/pushall.js26
-rw-r--r--jstests/core/query1.js28
-rw-r--r--jstests/core/queryoptimizer3.js40
-rw-r--r--jstests/core/queryoptimizer6.js6
-rw-r--r--jstests/core/queryoptimizera.js60
-rw-r--r--jstests/core/read_after_optime.js20
-rw-r--r--jstests/core/recursion.js24
-rw-r--r--jstests/core/ref.js23
-rw-r--r--jstests/core/ref2.js21
-rw-r--r--jstests/core/ref3.js15
-rw-r--r--jstests/core/ref4.js20
-rw-r--r--jstests/core/regex.js26
-rw-r--r--jstests/core/regex2.js83
-rw-r--r--jstests/core/regex3.js55
-rw-r--r--jstests/core/regex4.js29
-rw-r--r--jstests/core/regex5.js40
-rw-r--r--jstests/core/regex6.js78
-rw-r--r--jstests/core/regex7.js36
-rw-r--r--jstests/core/regex8.js22
-rw-r--r--jstests/core/regex9.js12
-rw-r--r--jstests/core/regex_embed1.js29
-rw-r--r--jstests/core/regex_limit.js15
-rw-r--r--jstests/core/regex_not_id.js6
-rw-r--r--jstests/core/regex_options.js8
-rw-r--r--jstests/core/regex_util.js43
-rw-r--r--jstests/core/regexa.js16
-rw-r--r--jstests/core/regexb.js11
-rw-r--r--jstests/core/regexc.js6
-rw-r--r--jstests/core/remove.js25
-rw-r--r--jstests/core/remove2.js38
-rw-r--r--jstests/core/remove3.js18
-rw-r--r--jstests/core/remove4.js12
-rw-r--r--jstests/core/remove6.js39
-rw-r--r--jstests/core/remove7.js39
-rw-r--r--jstests/core/remove8.js20
-rw-r--r--jstests/core/remove9.js13
-rw-r--r--jstests/core/remove_justone.js18
-rw-r--r--jstests/core/remove_undefined.js57
-rw-r--r--jstests/core/removea.js24
-rw-r--r--jstests/core/removeb.js35
-rw-r--r--jstests/core/removec.js22
-rw-r--r--jstests/core/rename.js67
-rw-r--r--jstests/core/rename2.js16
-rw-r--r--jstests/core/rename3.js24
-rw-r--r--jstests/core/rename4.js187
-rw-r--r--jstests/core/rename5.js46
-rw-r--r--jstests/core/rename6.js24
-rw-r--r--jstests/core/rename7.js62
-rw-r--r--jstests/core/rename8.js5
-rw-r--r--jstests/core/rename_stayTemp.js25
-rw-r--r--jstests/core/repair_database.js17
-rw-r--r--jstests/core/repair_server12955.js8
-rw-r--r--jstests/core/return_key.js22
-rw-r--r--jstests/core/role_management_helpers.js222
-rw-r--r--jstests/core/run_program1.js12
-rw-r--r--jstests/core/server1470.js22
-rw-r--r--jstests/core/server14753.js8
-rw-r--r--jstests/core/server5346.js18
-rw-r--r--jstests/core/server7756.js11
-rw-r--r--jstests/core/server9385.js14
-rw-r--r--jstests/core/server9547.js2
-rw-r--r--jstests/core/set1.js8
-rw-r--r--jstests/core/set2.js21
-rw-r--r--jstests/core/set3.js9
-rw-r--r--jstests/core/set4.js24
-rw-r--r--jstests/core/set5.js12
-rw-r--r--jstests/core/set6.js24
-rw-r--r--jstests/core/set7.js72
-rw-r--r--jstests/core/set_param1.js172
-rw-r--r--jstests/core/shell1.js7
-rw-r--r--jstests/core/shell_writeconcern.js36
-rw-r--r--jstests/core/shellkillop.js75
-rw-r--r--jstests/core/shelltypes.js51
-rw-r--r--jstests/core/show_record_id.js26
-rw-r--r--jstests/core/skip1.js28
-rw-r--r--jstests/core/slice1.js82
-rw-r--r--jstests/core/snapshot_queries.js2
-rw-r--r--jstests/core/sort1.js67
-rw-r--r--jstests/core/sort10.js26
-rw-r--r--jstests/core/sort2.js34
-rw-r--r--jstests/core/sort3.js21
-rw-r--r--jstests/core/sort4.js49
-rw-r--r--jstests/core/sort5.js32
-rw-r--r--jstests/core/sort6.js40
-rw-r--r--jstests/core/sort7.js28
-rw-r--r--jstests/core/sort8.js36
-rw-r--r--jstests/core/sort9.js34
-rw-r--r--jstests/core/sort_numeric.js36
-rw-r--r--jstests/core/sortb.js24
-rw-r--r--jstests/core/sortc.js34
-rw-r--r--jstests/core/sortd.js67
-rw-r--r--jstests/core/sortf.js16
-rw-r--r--jstests/core/sortg.js52
-rw-r--r--jstests/core/sorth.js162
-rw-r--r--jstests/core/sorti.js28
-rw-r--r--jstests/core/sortj.js14
-rw-r--r--jstests/core/sortk.js164
-rw-r--r--jstests/core/sortl.js24
-rw-r--r--jstests/core/splitvector.js261
-rw-r--r--jstests/core/stages_and_hash.js42
-rw-r--r--jstests/core/stages_and_sorted.js57
-rw-r--r--jstests/core/stages_collection_scan.js18
-rw-r--r--jstests/core/stages_delete.js12
-rw-r--r--jstests/core/stages_fetch.js36
-rw-r--r--jstests/core/stages_ixscan.js96
-rw-r--r--jstests/core/stages_limit_skip.js23
-rw-r--r--jstests/core/stages_mergesort.js38
-rw-r--r--jstests/core/stages_or.js36
-rw-r--r--jstests/core/stages_sort.js32
-rw-r--r--jstests/core/stages_text.js6
-rw-r--r--jstests/core/startup_log.js141
-rw-r--r--jstests/core/storageDetailsCommand.js1
-rw-r--r--jstests/core/storefunc.js93
-rw-r--r--jstests/core/string_with_nul_bytes.js4
-rw-r--r--jstests/core/sub1.js13
-rw-r--r--jstests/core/system_profile.js32
-rw-r--r--jstests/core/tailable_skip_limit.js4
-rw-r--r--jstests/core/temp_cleanup.js20
-rw-r--r--jstests/core/test_command_line_test_helpers.js8
-rw-r--r--jstests/core/testminmax.js31
-rw-r--r--jstests/core/top.js56
-rw-r--r--jstests/core/ts1.js39
-rw-r--r--jstests/core/type1.js29
-rw-r--r--jstests/core/type2.js16
-rw-r--r--jstests/core/type3.js52
-rw-r--r--jstests/core/type4.js14
-rw-r--r--jstests/core/type5.js8
-rw-r--r--jstests/core/type6.js12
-rw-r--r--jstests/core/type7.js2
-rw-r--r--jstests/core/type8.js2
-rw-r--r--jstests/core/uniqueness.js57
-rw-r--r--jstests/core/unset.js25
-rw-r--r--jstests/core/unset2.js32
-rw-r--r--jstests/core/update2.js20
-rw-r--r--jstests/core/update3.js30
-rw-r--r--jstests/core/update5.js56
-rw-r--r--jstests/core/update6.js55
-rw-r--r--jstests/core/update7.js167
-rw-r--r--jstests/core/update8.js10
-rw-r--r--jstests/core/update9.js21
-rw-r--r--jstests/core/update_addToSet.js77
-rw-r--r--jstests/core/update_addToSet2.js12
-rw-r--r--jstests/core/update_addToSet3.js19
-rw-r--r--jstests/core/update_arraymatch1.js21
-rw-r--r--jstests/core/update_arraymatch2.js22
-rw-r--r--jstests/core/update_arraymatch3.js19
-rw-r--r--jstests/core/update_arraymatch4.js21
-rw-r--r--jstests/core/update_arraymatch5.js21
-rw-r--r--jstests/core/update_arraymatch6.js10
-rw-r--r--jstests/core/update_arraymatch7.js8
-rw-r--r--jstests/core/update_arraymatch8.js184
-rw-r--r--jstests/core/update_bit_examples.js6
-rw-r--r--jstests/core/update_blank1.js13
-rw-r--r--jstests/core/update_currentdate_examples.js6
-rw-r--r--jstests/core/update_dbref.js20
-rw-r--r--jstests/core/update_find_and_modify_id.js7
-rw-r--r--jstests/core/update_invalid1.js4
-rw-r--r--jstests/core/update_min_max_examples.js56
-rw-r--r--jstests/core/update_mul_examples.js10
-rw-r--r--jstests/core/update_multi3.js27
-rw-r--r--jstests/core/update_multi4.js18
-rw-r--r--jstests/core/update_multi5.js14
-rw-r--r--jstests/core/update_multi6.js9
-rw-r--r--jstests/core/update_replace.js14
-rw-r--r--jstests/core/update_server-12848.js12
-rw-r--r--jstests/core/update_setOnInsert.js32
-rw-r--r--jstests/core/updatea.js63
-rw-r--r--jstests/core/updateb.js10
-rw-r--r--jstests/core/updatec.js12
-rw-r--r--jstests/core/updated.js30
-rw-r--r--jstests/core/updatee.js57
-rw-r--r--jstests/core/updatef.js17
-rw-r--r--jstests/core/updateg.js16
-rw-r--r--jstests/core/updateh.js66
-rw-r--r--jstests/core/updatei.js60
-rw-r--r--jstests/core/updatej.js10
-rw-r--r--jstests/core/updatek.js13
-rw-r--r--jstests/core/updatel.js45
-rw-r--r--jstests/core/updatem.js14
-rw-r--r--jstests/core/upsert_and.js18
-rw-r--r--jstests/core/upsert_fields.js231
-rw-r--r--jstests/core/upsert_shell.js60
-rw-r--r--[-rwxr-xr-x]jstests/core/useindexonobjgtlt.js14
-rw-r--r--jstests/core/user_management_helpers.js171
-rw-r--r--jstests/core/validate_cmd_ns.js11
-rw-r--r--jstests/core/validate_pseudocommand_ns.js20
-rw-r--r--jstests/core/validate_user_documents.js37
-rw-r--r--jstests/core/verify_update_mods.js68
-rw-r--r--jstests/core/where1.js62
-rw-r--r--jstests/core/where2.js12
-rw-r--r--jstests/core/where3.js20
-rw-r--r--jstests/core/where4.js40
-rw-r--r--jstests/core/write_result.js53
798 files changed, 18894 insertions, 17155 deletions
diff --git a/jstests/core/all.js b/jstests/core/all.js
index d4ddd6c6ee4..221cf1daeda 100644
--- a/jstests/core/all.js
+++ b/jstests/core/all.js
@@ -2,46 +2,44 @@ t = db.jstests_all;
t.drop();
doTest = function() {
-
- t.save( { a:[ 1,2,3 ] } );
- t.save( { a:[ 1,2,4 ] } );
- t.save( { a:[ 1,8,5 ] } );
- t.save( { a:[ 1,8,6 ] } );
- t.save( { a:[ 1,9,7 ] } );
- t.save( { a : [] } );
- t.save( {} );
-
- assert.eq( 5, t.find( { a: { $all: [ 1 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 1, 2 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 1, 8 ] } } ).count() );
- assert.eq( 1, t.find( { a: { $all: [ 1, 3 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 2 ] } } ).count() );
- assert.eq( 1, t.find( { a: { $all: [ 2, 3 ] } } ).count() );
- assert.eq( 2, t.find( { a: { $all: [ 2, 1 ] } } ).count() );
-
- t.save( { a: [ 2, 2 ] } );
- assert.eq( 3, t.find( { a: { $all: [ 2, 2 ] } } ).count() );
-
- t.save( { a: [ [ 2 ] ] } );
- assert.eq( 3, t.find( { a: { $all: [ 2 ] } } ).count() );
-
- t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 10 ] } } ).count() );
- assert.eq( 1, t.find( { a: { $all: [ 11 ] } } ).count() );
-
- t.save( { a: { b: [ 20, 30 ] } } );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() );
-
-
- assert.eq( 5 , t.find( { a : { $all : [1] } } ).count() , "E1" );
- assert.eq( 0 , t.find( { a : { $all : [19] } } ).count() , "E2" );
- assert.eq( 0 , t.find( { a : { $all : [] } } ).count() , "E3" );
+ t.save({a: [1, 2, 3]});
+ t.save({a: [1, 2, 4]});
+ t.save({a: [1, 8, 5]});
+ t.save({a: [1, 8, 6]});
+ t.save({a: [1, 9, 7]});
+ t.save({a: []});
+ t.save({});
+
+ assert.eq(5, t.find({a: {$all: [1]}}).count());
+ assert.eq(2, t.find({a: {$all: [1, 2]}}).count());
+ assert.eq(2, t.find({a: {$all: [1, 8]}}).count());
+ assert.eq(1, t.find({a: {$all: [1, 3]}}).count());
+ assert.eq(2, t.find({a: {$all: [2]}}).count());
+ assert.eq(1, t.find({a: {$all: [2, 3]}}).count());
+ assert.eq(2, t.find({a: {$all: [2, 1]}}).count());
+
+ t.save({a: [2, 2]});
+ assert.eq(3, t.find({a: {$all: [2, 2]}}).count());
+
+ t.save({a: [[2]]});
+ assert.eq(3, t.find({a: {$all: [2]}}).count());
+
+ t.save({a: [{b: [10, 11]}, 11]});
+ assert.eq(1, t.find({'a.b': {$all: [10]}}).count());
+ assert.eq(1, t.find({a: {$all: [11]}}).count());
+
+ t.save({a: {b: [20, 30]}});
+ assert.eq(1, t.find({'a.b': {$all: [20]}}).count());
+ assert.eq(1, t.find({'a.b': {$all: [20, 30]}}).count());
+
+ assert.eq(5, t.find({a: {$all: [1]}}).count(), "E1");
+ assert.eq(0, t.find({a: {$all: [19]}}).count(), "E2");
+ assert.eq(0, t.find({a: {$all: []}}).count(), "E3");
};
doTest();
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
doTest();
diff --git a/jstests/core/all2.js b/jstests/core/all2.js
index 6beb346775d..4842460c4db 100644
--- a/jstests/core/all2.js
+++ b/jstests/core/all2.js
@@ -2,85 +2,83 @@
t = db.all2;
t.drop();
-t.save( { a : [ { x : 1 } , { x : 2 } ] } );
-t.save( { a : [ { x : 2 } , { x : 3 } ] } );
-t.save( { a : [ { x : 3 } , { x : 4 } ] } );
+t.save({a: [{x: 1}, {x: 2}]});
+t.save({a: [{x: 2}, {x: 3}]});
+t.save({a: [{x: 3}, {x: 4}]});
state = "no index";
-function check( n , q , e ){
- assert.eq( n , t.find( q ).count() , tojson( q ) + " " + e + " count " + state );
- assert.eq( n , t.find( q ).itcount() , tojson( q ) + " " + e + " itcount" + state );
+function check(n, q, e) {
+ assert.eq(n, t.find(q).count(), tojson(q) + " " + e + " count " + state);
+ assert.eq(n, t.find(q).itcount(), tojson(q) + " " + e + " itcount" + state);
}
-check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
+check(1, {"a.x": {$in: [1]}}, "A");
+check(2, {"a.x": {$in: [2]}}, "B");
-check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
+check(2, {"a.x": {$in: [1, 2]}}, "C");
+check(3, {"a.x": {$in: [2, 3]}}, "D");
+check(3, {"a.x": {$in: [1, 3]}}, "E");
-check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
+check(1, {"a.x": {$all: [1, 2]}}, "F");
+check(1, {"a.x": {$all: [2, 3]}}, "G");
+check(0, {"a.x": {$all: [1, 3]}}, "H");
-t.ensureIndex( { "a.x" : 1 } );
+t.ensureIndex({"a.x": 1});
state = "index";
-check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
+check(1, {"a.x": {$in: [1]}}, "A");
+check(2, {"a.x": {$in: [2]}}, "B");
-check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
+check(2, {"a.x": {$in: [1, 2]}}, "C");
+check(3, {"a.x": {$in: [2, 3]}}, "D");
+check(3, {"a.x": {$in: [1, 3]}}, "E");
-check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
+check(1, {"a.x": {$all: [1, 2]}}, "F");
+check(1, {"a.x": {$all: [2, 3]}}, "G");
+check(0, {"a.x": {$all: [1, 3]}}, "H");
// --- more
t.drop();
-t.save( { a : [ 1 , 2 ] } );
-t.save( { a : [ 2 , 3 ] } );
-t.save( { a : [ 3 , 4 ] } );
+t.save({a: [1, 2]});
+t.save({a: [2, 3]});
+t.save({a: [3, 4]});
state = "more no index";
-check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
+check(1, {"a": {$in: [1]}}, "A");
+check(2, {"a": {$in: [2]}}, "B");
-check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
+check(2, {"a": {$in: [1, 2]}}, "C");
+check(3, {"a": {$in: [2, 3]}}, "D");
+check(3, {"a": {$in: [1, 3]}}, "E");
-check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
+check(1, {"a": {$all: [1, 2]}}, "F");
+check(1, {"a": {$all: [2, 3]}}, "G");
+check(0, {"a": {$all: [1, 3]}}, "H");
-t.ensureIndex( { "a" : 1 } );
+t.ensureIndex({"a": 1});
state = "more index";
-check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
-check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
+check(1, {"a": {$in: [1]}}, "A");
+check(2, {"a": {$in: [2]}}, "B");
-check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
-check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
-check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
-
-check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
-check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
-check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
+check(2, {"a": {$in: [1, 2]}}, "C");
+check(3, {"a": {$in: [2, 3]}}, "D");
+check(3, {"a": {$in: [1, 3]}}, "E");
+check(1, {"a": {$all: [1, 2]}}, "F");
+check(1, {"a": {$all: [2, 3]}}, "G");
+check(0, {"a": {$all: [1, 3]}}, "H");
// more 2
state = "more 2";
t.drop();
-t.save( { name : [ "harry","jack","tom" ] } );
-check( 0 , { name : { $all : ["harry","john"] } } , "A" );
-t.ensureIndex( { name : 1 } );
-check( 0 , { name : { $all : ["harry","john"] } } , "B" );
-
+t.save({name: ["harry", "jack", "tom"]});
+check(0, {name: {$all: ["harry", "john"]}}, "A");
+t.ensureIndex({name: 1});
+check(0, {name: {$all: ["harry", "john"]}}, "B");
diff --git a/jstests/core/all3.js b/jstests/core/all3.js
index b7a05321bbf..ae1a9460089 100644
--- a/jstests/core/all3.js
+++ b/jstests/core/all3.js
@@ -5,24 +5,24 @@ t.drop();
t.save({});
-assert.eq( 1, t.count( {foo:{$in:[null]}} ) );
-assert.eq( 1, t.count( {foo:{$all:[null]}} ) );
-assert.eq( 0, t.count( {foo:{$not:{$all:[null]}}} ) );
-assert.eq( 0, t.count( {foo:{$not:{$in:[null]}}} ) );
+assert.eq(1, t.count({foo: {$in: [null]}}));
+assert.eq(1, t.count({foo: {$all: [null]}}));
+assert.eq(0, t.count({foo: {$not: {$all: [null]}}}));
+assert.eq(0, t.count({foo: {$not: {$in: [null]}}}));
t.remove({});
-t.save({foo:1});
-assert.eq( 0, t.count( {foo:{$in:[null]}} ) );
-assert.eq( 0, t.count( {foo:{$all:[null]}} ) );
-assert.eq( 1, t.count( {foo:{$not:{$in:[null]}}} ) );
-assert.eq( 1, t.count( {foo:{$not:{$all:[null]}}} ) );
+t.save({foo: 1});
+assert.eq(0, t.count({foo: {$in: [null]}}));
+assert.eq(0, t.count({foo: {$all: [null]}}));
+assert.eq(1, t.count({foo: {$not: {$in: [null]}}}));
+assert.eq(1, t.count({foo: {$not: {$all: [null]}}}));
t.remove({});
-t.save( {foo:[0,1]} );
-assert.eq( 1, t.count( {foo:{$in:[[0,1]]}} ) );
-assert.eq( 1, t.count( {foo:{$all:[[0,1]]}} ) );
+t.save({foo: [0, 1]});
+assert.eq(1, t.count({foo: {$in: [[0, 1]]}}));
+assert.eq(1, t.count({foo: {$all: [[0, 1]]}}));
t.remove({});
-t.save( {foo:[]} );
-assert.eq( 1, t.count( {foo:{$in:[[]]}} ) );
-assert.eq( 1, t.count( {foo:{$all:[[]]}} ) );
+t.save({foo: []});
+assert.eq(1, t.count({foo: {$in: [[]]}}));
+assert.eq(1, t.count({foo: {$all: [[]]}}));
diff --git a/jstests/core/all4.js b/jstests/core/all4.js
index 109795754bc..eb979289496 100644
--- a/jstests/core/all4.js
+++ b/jstests/core/all4.js
@@ -3,26 +3,26 @@
t = db.jstests_all4;
t.drop();
-function checkQuery( query, val ) {
- assert.eq( val, t.count(query) );
- assert.eq( val, t.find(query).itcount() );
+function checkQuery(query, val) {
+ assert.eq(val, t.count(query));
+ assert.eq(val, t.find(query).itcount());
}
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 0 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
+checkQuery({a: {$all: []}}, 0);
+checkQuery({a: {$all: [1]}}, 0);
+checkQuery({a: {$all: [{$elemMatch: {b: 1}}]}}, 0);
t.save({});
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 0 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
+checkQuery({a: {$all: []}}, 0);
+checkQuery({a: {$all: [1]}}, 0);
+checkQuery({a: {$all: [{$elemMatch: {b: 1}}]}}, 0);
-t.save({a:1});
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 1 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 0 );
+t.save({a: 1});
+checkQuery({a: {$all: []}}, 0);
+checkQuery({a: {$all: [1]}}, 1);
+checkQuery({a: {$all: [{$elemMatch: {b: 1}}]}}, 0);
-t.save({a:[{b:1}]});
-checkQuery( {a:{$all:[]}}, 0 );
-checkQuery( {a:{$all:[1]}}, 1 );
-checkQuery( {a:{$all:[{$elemMatch:{b:1}}]}}, 1 );
+t.save({a: [{b: 1}]});
+checkQuery({a: {$all: []}}, 0);
+checkQuery({a: {$all: [1]}}, 1);
+checkQuery({a: {$all: [{$elemMatch: {b: 1}}]}}, 1);
diff --git a/jstests/core/all5.js b/jstests/core/all5.js
index a5d9e312292..a5faaa1767f 100644
--- a/jstests/core/all5.js
+++ b/jstests/core/all5.js
@@ -3,26 +3,26 @@
t = db.jstests_all5;
t.drop();
-function checkMatch( doc ) {
+function checkMatch(doc) {
t.drop();
- t.save( doc );
- assert.eq( 1, t.count( {a:{$elemMatch:{b:null}}} ) );
- assert.eq( 1, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) );
+ t.save(doc);
+ assert.eq(1, t.count({a: {$elemMatch: {b: null}}}));
+ assert.eq(1, t.count({a: {$all: [{$elemMatch: {b: null}}]}}));
}
-function checkNoMatch( doc ) {
+function checkNoMatch(doc) {
t.drop();
- t.save( doc );
- assert.eq( 0, t.count( {a:{$all:[{$elemMatch:{b:null}}]}} ) );
+ t.save(doc);
+ assert.eq(0, t.count({a: {$all: [{$elemMatch: {b: null}}]}}));
}
-checkNoMatch( {} );
-checkNoMatch( {a:1} );
+checkNoMatch({});
+checkNoMatch({a: 1});
-checkNoMatch( {a:[]} );
-checkNoMatch( {a:[1]} );
+checkNoMatch({a: []});
+checkNoMatch({a: [1]});
-checkMatch( {a:[{}]} );
-checkMatch( {a:[{c:1}]} );
-checkMatch( {a:[{b:null}]} );
-checkNoMatch( {a:[{b:1}]}, 0 );
+checkMatch({a: [{}]});
+checkMatch({a: [{c: 1}]});
+checkMatch({a: [{b: null}]});
+checkNoMatch({a: [{b: 1}]}, 0);
diff --git a/jstests/core/and.js b/jstests/core/and.js
index ea2fec4554e..a29d95e84e8 100644
--- a/jstests/core/and.js
+++ b/jstests/core/and.js
@@ -3,72 +3,78 @@
t = db.jstests_and;
t.drop();
-t.save( {a:[1,2]} );
-t.save( {a:'foo'} );
+t.save({a: [1, 2]});
+t.save({a: 'foo'});
function check() {
// $and must be an array
- assert.throws( function() { t.find( {$and:4} ).toArray(); } );
+ assert.throws(function() {
+ t.find({$and: 4}).toArray();
+ });
// $and array must not be empty
- assert.throws( function() { t.find( {$and:[]} ).toArray(); } );
+ assert.throws(function() {
+ t.find({$and: []}).toArray();
+ });
// $and elements must be objects
- assert.throws( function() { t.find( {$and:[4]} ).toArray(); } );
+ assert.throws(function() {
+ t.find({$and: [4]}).toArray();
+ });
// Check equality matching
- assert.eq( 1, t.count( {$and:[{a:1}]} ) );
- assert.eq( 1, t.count( {$and:[{a:1},{a:2}]} ) );
- assert.eq( 0, t.count( {$and:[{a:1},{a:3}]} ) );
- assert.eq( 0, t.count( {$and:[{a:1},{a:2},{a:3}]} ) );
- assert.eq( 1, t.count( {$and:[{a:'foo'}]} ) );
- assert.eq( 0, t.count( {$and:[{a:'foo'},{a:'g'}]} ) );
+ assert.eq(1, t.count({$and: [{a: 1}]}));
+ assert.eq(1, t.count({$and: [{a: 1}, {a: 2}]}));
+ assert.eq(0, t.count({$and: [{a: 1}, {a: 3}]}));
+ assert.eq(0, t.count({$and: [{a: 1}, {a: 2}, {a: 3}]}));
+ assert.eq(1, t.count({$and: [{a: 'foo'}]}));
+ assert.eq(0, t.count({$and: [{a: 'foo'}, {a: 'g'}]}));
// Check $and with other fields
- assert.eq( 1, t.count( {a:2,$and:[{a:1}]} ) );
- assert.eq( 0, t.count( {a:0,$and:[{a:1}]} ) );
- assert.eq( 0, t.count( {a:2,$and:[{a:0}]} ) );
- assert.eq( 1, t.count( {a:1,$and:[{a:1}]} ) );
+ assert.eq(1, t.count({a: 2, $and: [{a: 1}]}));
+ assert.eq(0, t.count({a: 0, $and: [{a: 1}]}));
+ assert.eq(0, t.count({a: 2, $and: [{a: 0}]}));
+ assert.eq(1, t.count({a: 1, $and: [{a: 1}]}));
// Check recursive $and
- assert.eq( 1, t.count( {a:2,$and:[{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {a:0,$and:[{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {a:2,$and:[{$and:[{a:0}]}]} ) );
- assert.eq( 1, t.count( {a:1,$and:[{$and:[{a:1}]}]} ) );
+ assert.eq(1, t.count({a: 2, $and: [{$and: [{a: 1}]}]}));
+ assert.eq(0, t.count({a: 0, $and: [{$and: [{a: 1}]}]}));
+ assert.eq(0, t.count({a: 2, $and: [{$and: [{a: 0}]}]}));
+ assert.eq(1, t.count({a: 1, $and: [{$and: [{a: 1}]}]}));
- assert.eq( 1, t.count( {$and:[{a:2},{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {$and:[{a:0},{$and:[{a:1}]}]} ) );
- assert.eq( 0, t.count( {$and:[{a:2},{$and:[{a:0}]}]} ) );
- assert.eq( 1, t.count( {$and:[{a:1},{$and:[{a:1}]}]} ) );
+ assert.eq(1, t.count({$and: [{a: 2}, {$and: [{a: 1}]}]}));
+ assert.eq(0, t.count({$and: [{a: 0}, {$and: [{a: 1}]}]}));
+ assert.eq(0, t.count({$and: [{a: 2}, {$and: [{a: 0}]}]}));
+ assert.eq(1, t.count({$and: [{a: 1}, {$and: [{a: 1}]}]}));
// Some of these cases were more important with an alternative $and syntax
// that was rejected, but they're still valid checks.
// Check simple regex
- assert.eq( 1, t.count( {$and:[{a:/foo/}]} ) );
+ assert.eq(1, t.count({$and: [{a: /foo/}]}));
// Check multiple regexes
- assert.eq( 1, t.count( {$and:[{a:/foo/},{a:/^f/},{a:/o/}]} ) );
- assert.eq( 0, t.count( {$and:[{a:/foo/},{a:/^g/}]} ) );
- assert.eq( 1, t.count( {$and:[{a:/^f/},{a:'foo'}]} ) );
+ assert.eq(1, t.count({$and: [{a: /foo/}, {a: /^f/}, {a: /o/}]}));
+ assert.eq(0, t.count({$and: [{a: /foo/}, {a: /^g/}]}));
+ assert.eq(1, t.count({$and: [{a: /^f/}, {a: 'foo'}]}));
// Check regex flags
- assert.eq( 0, t.count( {$and:[{a:/^F/},{a:'foo'}]} ) );
- assert.eq( 1, t.count( {$and:[{a:/^F/i},{a:'foo'}]} ) );
+ assert.eq(0, t.count({$and: [{a: /^F/}, {a: 'foo'}]}));
+ assert.eq(1, t.count({$and: [{a: /^F/i}, {a: 'foo'}]}));
// Check operator
- assert.eq( 1, t.count( {$and:[{a:{$gt:0}}]} ) );
+ assert.eq(1, t.count({$and: [{a: {$gt: 0}}]}));
// Check where
- assert.eq( 1, t.count( {a:'foo',$where:'this.a=="foo"'} ) );
- assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) );
- assert.eq( 1, t.count( {$and:[{a:'foo'}],$where:'this.a=="foo"'} ) );
+ assert.eq(1, t.count({a: 'foo', $where: 'this.a=="foo"'}));
+ assert.eq(1, t.count({$and: [{a: 'foo'}], $where: 'this.a=="foo"'}));
+ assert.eq(1, t.count({$and: [{a: 'foo'}], $where: 'this.a=="foo"'}));
// Nested where ok
- assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}]}) );
- assert.eq( 1, t.count({$and:[{a:'foo'},{$where:'this.a=="foo"'}]}) );
- assert.eq( 1, t.count({$and:[{$where:'this.a=="foo"'}],$where:'this.a=="foo"'}) );
+ assert.eq(1, t.count({$and: [{$where: 'this.a=="foo"'}]}));
+ assert.eq(1, t.count({$and: [{a: 'foo'}, {$where: 'this.a=="foo"'}]}));
+ assert.eq(1, t.count({$and: [{$where: 'this.a=="foo"'}], $where: 'this.a=="foo"'}));
}
check();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
check();
-assert.eq( 1, t.find({a:1,$and:[{a:2}]}).itcount() );
-assert.eq( 1, t.find({$and:[{a:1},{a:2}]}).itcount() );
+assert.eq(1, t.find({a: 1, $and: [{a: 2}]}).itcount());
+assert.eq(1, t.find({$and: [{a: 1}, {a: 2}]}).itcount());
diff --git a/jstests/core/and2.js b/jstests/core/and2.js
index 6e7214eb7f3..f3b014c13ad 100644
--- a/jstests/core/and2.js
+++ b/jstests/core/and2.js
@@ -3,11 +3,11 @@
t = db.jstests_and2;
t.drop();
-t.save( {a:[1,2]} );
-t.update( {a:1}, {$set:{'a.$':5}} );
-assert.eq( [5,2], t.findOne().a );
+t.save({a: [1, 2]});
+t.update({a: 1}, {$set: {'a.$': 5}});
+assert.eq([5, 2], t.findOne().a);
t.drop();
-t.save( {a:[1,2]} );
-t.update( {$and:[{a:1}]}, {$set:{'a.$':5}} );
-assert.eq( [5,2], t.findOne().a );
+t.save({a: [1, 2]});
+t.update({$and: [{a: 1}]}, {$set: {'a.$': 5}});
+assert.eq([5, 2], t.findOne().a);
diff --git a/jstests/core/and3.js b/jstests/core/and3.js
index a0a779937b1..4f6d6bd28fd 100644
--- a/jstests/core/and3.js
+++ b/jstests/core/and3.js
@@ -3,53 +3,53 @@
t = db.jstests_and3;
t.drop();
-t.save( {a:1} );
-t.save( {a:'foo'} );
+t.save({a: 1});
+t.save({a: 'foo'});
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-function checkScanMatch( query, docsExamined, n ) {
- var e = t.find( query ).hint( {a:1} ).explain( "executionStats" );
- assert.eq( docsExamined, e.executionStats.totalDocsExamined );
- assert.eq( n, e.executionStats.nReturned );
+function checkScanMatch(query, docsExamined, n) {
+ var e = t.find(query).hint({a: 1}).explain("executionStats");
+ assert.eq(docsExamined, e.executionStats.totalDocsExamined);
+ assert.eq(n, e.executionStats.nReturned);
}
-checkScanMatch( {a:/o/}, 1, 1 );
-checkScanMatch( {a:/a/}, 0, 0 );
-checkScanMatch( {a:{$not:/o/}}, 2, 1 );
-checkScanMatch( {a:{$not:/a/}}, 2, 2 );
-
-checkScanMatch( {$and:[{a:/o/}]}, 1, 1 );
-checkScanMatch( {$and:[{a:/a/}]}, 0, 0 );
-checkScanMatch( {$and:[{a:{$not:/o/}}]}, 2, 1 );
-checkScanMatch( {$and:[{a:{$not:/a/}}]}, 2, 2 );
-checkScanMatch( {$and:[{a:/o/},{a:{$not:/o/}}]}, 1, 0 );
-checkScanMatch( {$and:[{a:/o/},{a:{$not:/a/}}]}, 1, 1 );
-checkScanMatch( {$or:[{a:/o/}]}, 1, 1 );
-checkScanMatch( {$or:[{a:/a/}]}, 0, 0 );
-checkScanMatch( {$nor:[{a:/o/}]}, 2, 1 );
-checkScanMatch( {$nor:[{a:/a/}]}, 2, 2 );
-
-checkScanMatch( {$and:[{$and:[{a:/o/}]}]}, 1, 1 );
-checkScanMatch( {$and:[{$and:[{a:/a/}]}]}, 0, 0 );
-checkScanMatch( {$and:[{$and:[{a:{$not:/o/}}]}]}, 2, 1 );
-checkScanMatch( {$and:[{$and:[{a:{$not:/a/}}]}]}, 2, 2 );
-checkScanMatch( {$and:[{$or:[{a:/o/}]}]}, 1, 1 );
-checkScanMatch( {$and:[{$or:[{a:/a/}]}]}, 0, 0 );
-checkScanMatch( {$or:[{a:{$not:/o/}}]}, 2, 1 );
-checkScanMatch( {$and:[{$or:[{a:{$not:/o/}}]}]}, 2, 1 );
-checkScanMatch( {$and:[{$or:[{a:{$not:/a/}}]}]}, 2, 2 );
-checkScanMatch( {$and:[{$nor:[{a:/o/}]}]}, 2, 1 );
-checkScanMatch( {$and:[{$nor:[{a:/a/}]}]}, 2, 2 );
-
-checkScanMatch( {$where:'this.a==1'}, 2, 1 );
-checkScanMatch( {$and:[{$where:'this.a==1'}]}, 2, 1 );
-
-checkScanMatch( {a:1,$where:'this.a==1'}, 1, 1 );
-checkScanMatch( {a:1,$and:[{$where:'this.a==1'}]}, 1, 1 );
-checkScanMatch( {$and:[{a:1},{$where:'this.a==1'}]}, 1, 1 );
-checkScanMatch( {$and:[{a:1,$where:'this.a==1'}]}, 1, 1 );
-checkScanMatch( {a:1,$and:[{a:1},{a:1,$where:'this.a==1'}]}, 1, 1 );
-
-assert.eq( 0, t.find({a:1,$and:[{a:2}]}).itcount() );
-assert.eq( 0, t.find({$and:[{a:1},{a:2}]}).itcount() );
+checkScanMatch({a: /o/}, 1, 1);
+checkScanMatch({a: /a/}, 0, 0);
+checkScanMatch({a: {$not: /o/}}, 2, 1);
+checkScanMatch({a: {$not: /a/}}, 2, 2);
+
+checkScanMatch({$and: [{a: /o/}]}, 1, 1);
+checkScanMatch({$and: [{a: /a/}]}, 0, 0);
+checkScanMatch({$and: [{a: {$not: /o/}}]}, 2, 1);
+checkScanMatch({$and: [{a: {$not: /a/}}]}, 2, 2);
+checkScanMatch({$and: [{a: /o/}, {a: {$not: /o/}}]}, 1, 0);
+checkScanMatch({$and: [{a: /o/}, {a: {$not: /a/}}]}, 1, 1);
+checkScanMatch({$or: [{a: /o/}]}, 1, 1);
+checkScanMatch({$or: [{a: /a/}]}, 0, 0);
+checkScanMatch({$nor: [{a: /o/}]}, 2, 1);
+checkScanMatch({$nor: [{a: /a/}]}, 2, 2);
+
+checkScanMatch({$and: [{$and: [{a: /o/}]}]}, 1, 1);
+checkScanMatch({$and: [{$and: [{a: /a/}]}]}, 0, 0);
+checkScanMatch({$and: [{$and: [{a: {$not: /o/}}]}]}, 2, 1);
+checkScanMatch({$and: [{$and: [{a: {$not: /a/}}]}]}, 2, 2);
+checkScanMatch({$and: [{$or: [{a: /o/}]}]}, 1, 1);
+checkScanMatch({$and: [{$or: [{a: /a/}]}]}, 0, 0);
+checkScanMatch({$or: [{a: {$not: /o/}}]}, 2, 1);
+checkScanMatch({$and: [{$or: [{a: {$not: /o/}}]}]}, 2, 1);
+checkScanMatch({$and: [{$or: [{a: {$not: /a/}}]}]}, 2, 2);
+checkScanMatch({$and: [{$nor: [{a: /o/}]}]}, 2, 1);
+checkScanMatch({$and: [{$nor: [{a: /a/}]}]}, 2, 2);
+
+checkScanMatch({$where: 'this.a==1'}, 2, 1);
+checkScanMatch({$and: [{$where: 'this.a==1'}]}, 2, 1);
+
+checkScanMatch({a: 1, $where: 'this.a==1'}, 1, 1);
+checkScanMatch({a: 1, $and: [{$where: 'this.a==1'}]}, 1, 1);
+checkScanMatch({$and: [{a: 1}, {$where: 'this.a==1'}]}, 1, 1);
+checkScanMatch({$and: [{a: 1, $where: 'this.a==1'}]}, 1, 1);
+checkScanMatch({a: 1, $and: [{a: 1}, {a: 1, $where: 'this.a==1'}]}, 1, 1);
+
+assert.eq(0, t.find({a: 1, $and: [{a: 2}]}).itcount());
+assert.eq(0, t.find({$and: [{a: 1}, {a: 2}]}).itcount());
diff --git a/jstests/core/andor.js b/jstests/core/andor.js
index 73327acde9b..c574ab261a4 100644
--- a/jstests/core/andor.js
+++ b/jstests/core/andor.js
@@ -4,96 +4,96 @@ t = db.jstests_andor;
t.drop();
// not ok
-function ok( q ) {
- assert.eq( 1, t.find( q ).itcount() );
+function ok(q) {
+ assert.eq(1, t.find(q).itcount());
}
-t.save( {a:1} );
+t.save({a: 1});
test = function() {
-
- ok( {a:1} );
-
- ok( {$and:[{a:1}]} );
- ok( {$or:[{a:1}]} );
-
- ok( {$and:[{$and:[{a:1}]}]} );
- ok( {$or:[{$or:[{a:1}]}]} );
-
- ok( {$and:[{$or:[{a:1}]}]} );
- ok( {$or:[{$and:[{a:1}]}]} );
-
- ok( {$and:[{$and:[{$or:[{a:1}]}]}]} );
- ok( {$and:[{$or:[{$and:[{a:1}]}]}]} );
- ok( {$or:[{$and:[{$and:[{a:1}]}]}]} );
-
- ok( {$or:[{$and:[{$or:[{a:1}]}]}]} );
-
+
+ ok({a: 1});
+
+ ok({$and: [{a: 1}]});
+ ok({$or: [{a: 1}]});
+
+ ok({$and: [{$and: [{a: 1}]}]});
+ ok({$or: [{$or: [{a: 1}]}]});
+
+ ok({$and: [{$or: [{a: 1}]}]});
+ ok({$or: [{$and: [{a: 1}]}]});
+
+ ok({$and: [{$and: [{$or: [{a: 1}]}]}]});
+ ok({$and: [{$or: [{$and: [{a: 1}]}]}]});
+ ok({$or: [{$and: [{$and: [{a: 1}]}]}]});
+
+ ok({$or: [{$and: [{$or: [{a: 1}]}]}]});
+
// now test $nor
-
- ok( {$and:[{a:1}]} );
- ok( {$nor:[{a:2}]} );
-
- ok( {$and:[{$and:[{a:1}]}]} );
- ok( {$nor:[{$nor:[{a:1}]}]} );
-
- ok( {$and:[{$nor:[{a:2}]}]} );
- ok( {$nor:[{$and:[{a:2}]}]} );
-
- ok( {$and:[{$and:[{$nor:[{a:2}]}]}]} );
- ok( {$and:[{$nor:[{$and:[{a:2}]}]}]} );
- ok( {$nor:[{$and:[{$and:[{a:2}]}]}]} );
-
- ok( {$nor:[{$and:[{$nor:[{a:1}]}]}]} );
-
+
+ ok({$and: [{a: 1}]});
+ ok({$nor: [{a: 2}]});
+
+ ok({$and: [{$and: [{a: 1}]}]});
+ ok({$nor: [{$nor: [{a: 1}]}]});
+
+ ok({$and: [{$nor: [{a: 2}]}]});
+ ok({$nor: [{$and: [{a: 2}]}]});
+
+ ok({$and: [{$and: [{$nor: [{a: 2}]}]}]});
+ ok({$and: [{$nor: [{$and: [{a: 2}]}]}]});
+ ok({$nor: [{$and: [{$and: [{a: 2}]}]}]});
+
+ ok({$nor: [{$and: [{$nor: [{a: 1}]}]}]});
+
};
test();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
test();
// Test an inequality base match.
test = function() {
-
- ok( {a:{$ne:2}} );
-
- ok( {$and:[{a:{$ne:2}}]} );
- ok( {$or:[{a:{$ne:2}}]} );
-
- ok( {$and:[{$and:[{a:{$ne:2}}]}]} );
- ok( {$or:[{$or:[{a:{$ne:2}}]}]} );
-
- ok( {$and:[{$or:[{a:{$ne:2}}]}]} );
- ok( {$or:[{$and:[{a:{$ne:2}}]}]} );
-
- ok( {$and:[{$and:[{$or:[{a:{$ne:2}}]}]}]} );
- ok( {$and:[{$or:[{$and:[{a:{$ne:2}}]}]}]} );
- ok( {$or:[{$and:[{$and:[{a:{$ne:2}}]}]}]} );
-
- ok( {$or:[{$and:[{$or:[{a:{$ne:2}}]}]}]} );
-
+
+ ok({a: {$ne: 2}});
+
+ ok({$and: [{a: {$ne: 2}}]});
+ ok({$or: [{a: {$ne: 2}}]});
+
+ ok({$and: [{$and: [{a: {$ne: 2}}]}]});
+ ok({$or: [{$or: [{a: {$ne: 2}}]}]});
+
+ ok({$and: [{$or: [{a: {$ne: 2}}]}]});
+ ok({$or: [{$and: [{a: {$ne: 2}}]}]});
+
+ ok({$and: [{$and: [{$or: [{a: {$ne: 2}}]}]}]});
+ ok({$and: [{$or: [{$and: [{a: {$ne: 2}}]}]}]});
+ ok({$or: [{$and: [{$and: [{a: {$ne: 2}}]}]}]});
+
+ ok({$or: [{$and: [{$or: [{a: {$ne: 2}}]}]}]});
+
// now test $nor
-
- ok( {$and:[{a:{$ne:2}}]} );
- ok( {$nor:[{a:{$ne:1}}]} );
-
- ok( {$and:[{$and:[{a:{$ne:2}}]}]} );
- ok( {$nor:[{$nor:[{a:{$ne:2}}]}]} );
-
- ok( {$and:[{$nor:[{a:{$ne:1}}]}]} );
- ok( {$nor:[{$and:[{a:{$ne:1}}]}]} );
-
- ok( {$and:[{$and:[{$nor:[{a:{$ne:1}}]}]}]} );
- ok( {$and:[{$nor:[{$and:[{a:{$ne:1}}]}]}]} );
- ok( {$nor:[{$and:[{$and:[{a:{$ne:1}}]}]}]} );
-
- ok( {$nor:[{$and:[{$nor:[{a:{$ne:2}}]}]}]} );
-
+
+ ok({$and: [{a: {$ne: 2}}]});
+ ok({$nor: [{a: {$ne: 1}}]});
+
+ ok({$and: [{$and: [{a: {$ne: 2}}]}]});
+ ok({$nor: [{$nor: [{a: {$ne: 2}}]}]});
+
+ ok({$and: [{$nor: [{a: {$ne: 1}}]}]});
+ ok({$nor: [{$and: [{a: {$ne: 1}}]}]});
+
+ ok({$and: [{$and: [{$nor: [{a: {$ne: 1}}]}]}]});
+ ok({$and: [{$nor: [{$and: [{a: {$ne: 1}}]}]}]});
+ ok({$nor: [{$and: [{$and: [{a: {$ne: 1}}]}]}]});
+
+ ok({$nor: [{$and: [{$nor: [{a: {$ne: 2}}]}]}]});
+
};
t.drop();
-t.save( {a:1} );
+t.save({a: 1});
test();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
test();
diff --git a/jstests/core/apitest_db.js b/jstests/core/apitest_db.js
index f2271649016..688baa8dd4f 100644
--- a/jstests/core/apitest_db.js
+++ b/jstests/core/apitest_db.js
@@ -2,40 +2,44 @@
* Tests for the db object enhancement
*/
-assert( "test" == db, "wrong database currently not test" );
+assert("test" == db, "wrong database currently not test");
-dd = function( x ){
- //print( x );
+dd = function(x) {
+ // print( x );
};
-dd( "a" );
+dd("a");
-
-dd( "b" );
+dd("b");
/*
* be sure the public collection API is complete
*/
-assert(db.createCollection , "createCollection" );
-assert(db.getProfilingLevel , "getProfilingLevel" );
-assert(db.setProfilingLevel , "setProfilingLevel" );
-assert(db.dbEval , "dbEval" );
-assert(db.group , "group" );
+assert(db.createCollection, "createCollection");
+assert(db.getProfilingLevel, "getProfilingLevel");
+assert(db.setProfilingLevel, "setProfilingLevel");
+assert(db.dbEval, "dbEval");
+assert(db.group, "group");
-dd( "c" );
+dd("c");
/*
* test createCollection
*/
-db.getCollection( "test" ).drop();
-db.getCollectionNames().forEach( function(x) { assert(x != "test"); });
+db.getCollection("test").drop();
+db.getCollectionNames().forEach(function(x) {
+ assert(x != "test");
+});
-dd( "d" );
+dd("d");
db.createCollection("test");
var found = false;
-db.getCollectionNames().forEach( function(x) { if (x == "test") found = true; });
+db.getCollectionNames().forEach(function(x) {
+ if (x == "test")
+ found = true;
+});
assert(found, "found test.test in collection infos");
// storageEngine in collection options must:
@@ -62,37 +66,41 @@ assert.commandWorked(db.createCollection('test', {storageEngine: validStorageEng
var collectionInfos = db.getCollectionInfos({name: 'test'});
assert.eq(1, collectionInfos.length, "'test' collection not created");
assert.eq('test', collectionInfos[0].name, "'test' collection not created");
-assert.docEq(validStorageEngineOptions, collectionInfos[0].options.storageEngine,
+assert.docEq(validStorageEngineOptions,
+ collectionInfos[0].options.storageEngine,
'storage engine options not found in listCommands result');
// The indexOptionDefaults must be a document that contains only a storageEngine field.
db.idxOptions.drop();
assert.commandFailed(db.createCollection('idxOptions', {indexOptionDefaults: 'not a document'}));
-assert.commandFailed(db.createCollection('idxOptions', {
- indexOptionDefaults: {unknownOption: true}
-}), 'created a collection with an unknown option to indexOptionDefaults');
+assert.commandFailed(db.createCollection('idxOptions',
+ {indexOptionDefaults: {unknownOption: true}}),
+ 'created a collection with an unknown option to indexOptionDefaults');
assert.commandWorked(db.createCollection('idxOptions', {indexOptionDefaults: {}}),
'should have been able to specify an empty object for indexOptionDefaults');
assert(db.idxOptions.drop());
-assert.commandWorked(db.createCollection('idxOptions', {
- indexOptionDefaults: {storageEngine: {}}
-}), 'should have been able to configure zero storage engines');
+assert.commandWorked(db.createCollection('idxOptions', {indexOptionDefaults: {storageEngine: {}}}),
+ 'should have been able to configure zero storage engines');
assert(db.idxOptions.drop());
// The storageEngine subdocument must configure only registered storage engines.
-assert.commandFailed(db.createCollection('idxOptions', {
- indexOptionDefaults: {storageEngine: {unknownStorageEngine: {}}}
-}), 'configured an unregistered storage engine');
+assert.commandFailed(
+ db.createCollection('idxOptions',
+ {indexOptionDefaults: {storageEngine: {unknownStorageEngine: {}}}}),
+ 'configured an unregistered storage engine');
// The storageEngine subdocument must contain valid storage engine options.
-assert.commandFailed(db.createCollection('idxOptions', {
- indexOptionDefaults: {storageEngine: invalidStorageEngineOptions}
-}), 'configured a storage engine with invalid options');
+assert.commandFailed(
+ db.createCollection('idxOptions',
+ {indexOptionDefaults: {storageEngine: invalidStorageEngineOptions}}),
+ 'configured a storage engine with invalid options');
// Tests that a non-active storage engine can be configured so long as it is registered.
if (db.serverBuildInfo().bits === 64) {
// wiredTiger is not a registered storage engine on 32-bit systems.
- var indexOptions = {storageEngine: {}};
+ var indexOptions = {
+ storageEngine: {}
+ };
if (storageEngineName === 'wiredTiger') {
indexOptions.storageEngine.mmapv1 = {};
} else {
@@ -104,9 +112,8 @@ if (db.serverBuildInfo().bits === 64) {
}
// Tests that the indexOptionDefaults are retrievable from the collection options.
-assert.commandWorked(db.createCollection('idxOptions', {
- indexOptionDefaults: {storageEngine: validStorageEngineOptions}
-}));
+assert.commandWorked(db.createCollection(
+ 'idxOptions', {indexOptionDefaults: {storageEngine: validStorageEngineOptions}}));
var collectionInfos = db.getCollectionInfos({name: 'idxOptions'});
assert.eq(1, collectionInfos.length, "'idxOptions' collection not created");
@@ -114,12 +121,12 @@ assert.docEq({storageEngine: validStorageEngineOptions},
collectionInfos[0].options.indexOptionDefaults,
'indexOptionDefaults were not applied: ' + tojson(collectionInfos));
-dd( "e" );
+dd("e");
/*
* profile level
- */
-
+ */
+
db.setProfilingLevel(0);
assert(db.getProfilingLevel() == 0, "prof level 0");
@@ -132,22 +139,18 @@ assert(db.getProfilingLevel() == 2, "p2");
db.setProfilingLevel(0);
assert(db.getProfilingLevel() == 0, "prof level 0");
-dd( "f" );
+dd("f");
asserted = false;
try {
db.setProfilingLevel(10);
assert(false);
-}
-catch (e) {
+} catch (e) {
asserted = true;
assert(e.dbSetProfilingException);
}
-assert( asserted, "should have asserted" );
-
-dd( "g" );
-
-
+assert(asserted, "should have asserted");
-assert.eq( "foo" , db.getSisterDB( "foo" ).getName() );
-assert.eq( "foo" , db.getSiblingDB( "foo" ).getName() );
+dd("g");
+assert.eq("foo", db.getSisterDB("foo").getName());
+assert.eq("foo", db.getSiblingDB("foo").getName());
diff --git a/jstests/core/apitest_dbcollection.js b/jstests/core/apitest_dbcollection.js
index d60778363b8..d542ad6d7e1 100644
--- a/jstests/core/apitest_dbcollection.js
+++ b/jstests/core/apitest_dbcollection.js
@@ -2,47 +2,45 @@
* Tests for the db collection
*/
-
-
/*
* test drop
*/
-db.getCollection( "test_db" ).drop();
-assert.eq(0, db.getCollection( "test_db" ).find().length(), "1");
+db.getCollection("test_db").drop();
+assert.eq(0, db.getCollection("test_db").find().length(), "1");
-db.getCollection( "test_db" ).save({a:1});
-assert.eq(1, db.getCollection( "test_db" ).find().length(), "2");
+db.getCollection("test_db").save({a: 1});
+assert.eq(1, db.getCollection("test_db").find().length(), "2");
-db.getCollection( "test_db" ).drop();
-assert.eq(0, db.getCollection( "test_db" ).find().length(), "3");
+db.getCollection("test_db").drop();
+assert.eq(0, db.getCollection("test_db").find().length(), "3");
/*
* test count
*/
-assert.eq(0, db.getCollection( "test_db" ).count(), "4");
-db.getCollection( "test_db" ).save({a:1});
-assert.eq(1, db.getCollection( "test_db" ).count(), "5");
+assert.eq(0, db.getCollection("test_db").count(), "4");
+db.getCollection("test_db").save({a: 1});
+assert.eq(1, db.getCollection("test_db").count(), "5");
for (i = 0; i < 100; i++) {
- db.getCollection( "test_db" ).save({a:1});
+ db.getCollection("test_db").save({a: 1});
}
-assert.eq(101, db.getCollection( "test_db" ).count(), "6");
-db.getCollection( "test_db" ).drop();
-assert.eq(0, db.getCollection( "test_db" ).count(), "7");
+assert.eq(101, db.getCollection("test_db").count(), "6");
+db.getCollection("test_db").drop();
+assert.eq(0, db.getCollection("test_db").count(), "7");
- /*
- * test validate
- */
+/*
+ * test validate
+ */
-db.getCollection( "test_db" ).drop();
-assert.eq(0, db.getCollection( "test_db" ).count(), "8");
+db.getCollection("test_db").drop();
+assert.eq(0, db.getCollection("test_db").count(), "8");
for (i = 0; i < 100; i++) {
- db.getCollection( "test_db" ).save({a:1});
+ db.getCollection("test_db").save({a: 1});
}
(function() {
- var validateResult = assert.commandWorked(db.getCollection( "test_db" ).validate());
+ var validateResult = assert.commandWorked(db.getCollection("test_db").validate());
// Extract validation results from mongos output if running in a sharded context.
if (jsTest.isMongos(db.getMongo())) {
// Sample mongos format:
@@ -73,7 +71,8 @@ for (i = 0; i < 100; i++) {
validateResult = result;
}
- assert.eq('test.test_db', validateResult.ns,
+ assert.eq('test.test_db',
+ validateResult.ns,
'incorrect namespace in db.collection.validate() result: ' + tojson(validateResult));
assert(validateResult.valid, 'collection validation failed');
assert.eq(100, validateResult.nrecords, 11);
@@ -83,82 +82,83 @@ for (i = 0; i < 100; i++) {
* test deleteIndex, deleteIndexes
*/
-db.getCollection( "test_db" ).drop();
-assert.eq(0, db.getCollection( "test_db" ).count(), "12");
-db.getCollection( "test_db" ).dropIndexes();
-assert.eq(0, db.getCollection( "test_db" ).getIndexes().length, "13");
+db.getCollection("test_db").drop();
+assert.eq(0, db.getCollection("test_db").count(), "12");
+db.getCollection("test_db").dropIndexes();
+assert.eq(0, db.getCollection("test_db").getIndexes().length, "13");
-db.getCollection( "test_db" ).save({a:10});
-assert.eq(1, db.getCollection( "test_db" ).getIndexes().length, "14");
+db.getCollection("test_db").save({a: 10});
+assert.eq(1, db.getCollection("test_db").getIndexes().length, "14");
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).save({a:10});
+db.getCollection("test_db").ensureIndex({a: 1});
+db.getCollection("test_db").save({a: 10});
-print( tojson( db.getCollection( "test_db" ).getIndexes() ) );
-assert.eq(2, db.getCollection( "test_db" ).getIndexes().length, "15");
+print(tojson(db.getCollection("test_db").getIndexes()));
+assert.eq(2, db.getCollection("test_db").getIndexes().length, "15");
-db.getCollection( "test_db" ).dropIndex({a:1});
-assert.eq(1, db.getCollection( "test_db" ).getIndexes().length, "16");
+db.getCollection("test_db").dropIndex({a: 1});
+assert.eq(1, db.getCollection("test_db").getIndexes().length, "16");
-db.getCollection( "test_db" ).save({a:10});
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).save({a:10});
+db.getCollection("test_db").save({a: 10});
+db.getCollection("test_db").ensureIndex({a: 1});
+db.getCollection("test_db").save({a: 10});
-assert.eq(2, db.getCollection( "test_db" ).getIndexes().length, "17");
+assert.eq(2, db.getCollection("test_db").getIndexes().length, "17");
-db.getCollection( "test_db" ).dropIndex("a_1");
-assert.eq(1, db.getCollection( "test_db" ).getIndexes().length, "18");
+db.getCollection("test_db").dropIndex("a_1");
+assert.eq(1, db.getCollection("test_db").getIndexes().length, "18");
-db.getCollection( "test_db" ).save({a:10, b:11});
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).ensureIndex({b:1});
-db.getCollection( "test_db" ).save({a:10, b:12});
+db.getCollection("test_db").save({a: 10, b: 11});
+db.getCollection("test_db").ensureIndex({a: 1});
+db.getCollection("test_db").ensureIndex({b: 1});
+db.getCollection("test_db").save({a: 10, b: 12});
-assert.eq(3, db.getCollection( "test_db" ).getIndexes().length, "19");
+assert.eq(3, db.getCollection("test_db").getIndexes().length, "19");
-db.getCollection( "test_db" ).dropIndex({b:1});
-assert.eq(2, db.getCollection( "test_db" ).getIndexes().length, "20");
-db.getCollection( "test_db" ).dropIndex({a:1});
-assert.eq(1, db.getCollection( "test_db" ).getIndexes().length, "21");
+db.getCollection("test_db").dropIndex({b: 1});
+assert.eq(2, db.getCollection("test_db").getIndexes().length, "20");
+db.getCollection("test_db").dropIndex({a: 1});
+assert.eq(1, db.getCollection("test_db").getIndexes().length, "21");
-db.getCollection( "test_db" ).save({a:10, b:11});
-db.getCollection( "test_db" ).ensureIndex({a:1});
-db.getCollection( "test_db" ).ensureIndex({b:1});
-db.getCollection( "test_db" ).save({a:10, b:12});
+db.getCollection("test_db").save({a: 10, b: 11});
+db.getCollection("test_db").ensureIndex({a: 1});
+db.getCollection("test_db").ensureIndex({b: 1});
+db.getCollection("test_db").save({a: 10, b: 12});
-assert.eq(3, db.getCollection( "test_db" ).getIndexes().length, "22");
+assert.eq(3, db.getCollection("test_db").getIndexes().length, "22");
-db.getCollection( "test_db" ).dropIndexes();
-assert.eq(1, db.getCollection( "test_db" ).getIndexes().length, "23");
+db.getCollection("test_db").dropIndexes();
+assert.eq(1, db.getCollection("test_db").getIndexes().length, "23");
-db.getCollection( "test_db" ).find();
+db.getCollection("test_db").find();
-db.getCollection( "test_db" ).drop();
-assert.eq(0, db.getCollection( "test_db" ).getIndexes().length, "24");
+db.getCollection("test_db").drop();
+assert.eq(0, db.getCollection("test_db").getIndexes().length, "24");
/*
* stats()
*/
- (function() {
+(function() {
var t = db.apttest_dbcollection;
// Non-existent collection.
t.drop();
- assert.commandFailed(t.stats(),
- 'db.collection.stats() should fail on non-existent collection');
+ assert.commandFailed(t.stats(), 'db.collection.stats() should fail on non-existent collection');
// scale - passed to stats() as sole numerical argument or part of an options object.
t.drop();
- assert.commandWorked(db.createCollection(t.getName(), {capped: true, size: 10*1024*1024}));
- var collectionStats = assert.commandWorked(t.stats(1024*1024));
- assert.eq(10, collectionStats.maxSize,
+ assert.commandWorked(db.createCollection(t.getName(), {capped: true, size: 10 * 1024 * 1024}));
+ var collectionStats = assert.commandWorked(t.stats(1024 * 1024));
+ assert.eq(10,
+ collectionStats.maxSize,
'db.collection.stats(scale) - capped collection size scaled incorrectly: ' +
- tojson(collectionStats));
- var collectionStats = assert.commandWorked(t.stats({scale: 1024*1024}));
- assert.eq(10, collectionStats.maxSize,
+ tojson(collectionStats));
+ var collectionStats = assert.commandWorked(t.stats({scale: 1024 * 1024}));
+ assert.eq(10,
+ collectionStats.maxSize,
'db.collection.stats({scale: N}) - capped collection size scaled incorrectly: ' +
- tojson(collectionStats));
+ tojson(collectionStats));
// indexDetails - If true, includes 'indexDetails' field in results. Default: false.
t.drop();
@@ -167,47 +167,54 @@ assert.eq(0, db.getCollection( "test_db" ).getIndexes().length, "24");
collectionStats = assert.commandWorked(t.stats());
assert(!collectionStats.hasOwnProperty('indexDetails'),
'unexpected indexDetails found in db.collection.stats() result: ' +
- tojson(collectionStats));
+ tojson(collectionStats));
collectionStats = assert.commandWorked(t.stats({indexDetails: false}));
assert(!collectionStats.hasOwnProperty('indexDetails'),
'unexpected indexDetails found in db.collection.stats({indexDetails: true}) result: ' +
- tojson(collectionStats));
+ tojson(collectionStats));
collectionStats = assert.commandWorked(t.stats({indexDetails: true}));
assert(collectionStats.hasOwnProperty('indexDetails'),
'indexDetails missing from db.collection.stats({indexDetails: true}) result: ' +
- tojson(collectionStats));
+ tojson(collectionStats));
// Returns index name.
function getIndexName(indexKey) {
var indexes = t.getIndexes().filter(function(doc) {
return friendlyEqual(doc.key, indexKey);
});
- assert.eq(1, indexes.length, tojson(indexKey) + ' not found in getIndexes() result: ' +
- tojson(t.getIndexes()));
+ assert.eq(
+ 1,
+ indexes.length,
+ tojson(indexKey) + ' not found in getIndexes() result: ' + tojson(t.getIndexes()));
return indexes[0].name;
}
function checkIndexDetails(options, indexName) {
var collectionStats = assert.commandWorked(t.stats(options));
assert(collectionStats.hasOwnProperty('indexDetails'),
- 'indexDetails missing from ' + 'db.collection.stats(' + tojson(options) +
- ') result: ' + tojson(collectionStats));
+ 'indexDetails missing from ' +
+ 'db.collection.stats(' + tojson(options) + ') result: ' +
+ tojson(collectionStats));
// Currently, indexDetails is only supported with WiredTiger.
var storageEngine = jsTest.options().storageEngine;
if (storageEngine && storageEngine !== 'wiredTiger') {
return;
}
- assert.eq(1, Object.keys(collectionStats.indexDetails).length,
+ assert.eq(1,
+ Object.keys(collectionStats.indexDetails).length,
'indexDetails must have exactly one entry');
assert(collectionStats.indexDetails[indexName],
indexName + ' missing from indexDetails: ' + tojson(collectionStats.indexDetails));
- assert.neq(0, Object.keys(collectionStats.indexDetails[indexName]).length,
+ assert.neq(0,
+ Object.keys(collectionStats.indexDetails[indexName]).length,
indexName + ' exists in indexDetails but contains no information: ' +
- tojson(collectionStats));
+ tojson(collectionStats));
}
// indexDetailsKey - show indexDetails results for this index key only.
- var indexKey = {a: 1};
+ var indexKey = {
+ a: 1
+ };
var indexName = getIndexName(indexKey);
checkIndexDetails({indexDetails: true, indexDetailsKey: indexKey}, indexName);
@@ -218,12 +225,13 @@ assert.eq(0, db.getCollection( "test_db" ).getIndexes().length, "24");
var error = assert.throws(function() {
t.stats({indexDetails: true, indexDetailsKey: indexKey, indexDetailsName: indexName});
}, null, 'indexDetailsKey and indexDetailsName cannot be used at the same time');
- assert.eq(Error, error.constructor,
+ assert.eq(Error,
+ error.constructor,
'db.collection.stats() failed when both indexDetailsKey and indexDetailsName ' +
- 'are used but with incorrect error type');
+ 'are used but with incorrect error type');
t.drop();
- }());
+}());
/*
* test db.collection.totalSize()
@@ -236,19 +244,24 @@ assert.eq(0, db.getCollection( "test_db" ).getIndexes().length, "24");
t.drop();
var failedStats = assert.commandFailed(t.stats());
assert.eq(failedStats.storageSize, t.storageSize());
- assert.eq(undefined, t.storageSize(),
+ assert.eq(undefined,
+ t.storageSize(),
'db.collection.storageSize() on empty collection should return undefined');
assert.eq(failedStats.totalIndexSize, t.totalIndexSize());
- assert.eq(undefined, t.totalIndexSize(),
+ assert.eq(undefined,
+ t.totalIndexSize(),
'db.collection.totalIndexSize() on empty collection should return undefined');
- assert.eq(undefined, t.totalSize(),
+ assert.eq(undefined,
+ t.totalSize(),
'db.collection.totalSize() on empty collection should return undefined');
t.save({a: 1});
var stats = assert.commandWorked(t.stats());
- assert.neq(undefined, t.storageSize(),
+ assert.neq(undefined,
+ t.storageSize(),
'db.collection.storageSize() cannot be undefined on a non-empty collection');
- assert.neq(undefined, t.totalIndexSize(),
+ assert.neq(undefined,
+ t.totalIndexSize(),
'db.collection.totalIndexSize() cannot be undefined on a non-empty collection');
if (db.isMaster().msg !== 'isdbgrid' && db.serverStatus().storageEngine.name === 'mmapv1') {
@@ -256,7 +269,8 @@ assert.eq(0, db.getCollection( "test_db" ).getIndexes().length, "24");
// collection.
assert.eq(stats.storageSize, t.storageSize());
assert.eq(stats.totalIndexSize, t.totalIndexSize());
- assert.eq(t.storageSize() + t.totalIndexSize(), t.totalSize(),
+ assert.eq(t.storageSize() + t.totalIndexSize(),
+ t.totalSize(),
'incorrect db.collection.totalSize() on a non-empty collection');
}
diff --git a/jstests/core/apply_ops1.js b/jstests/core/apply_ops1.js
index 6129c202d59..8a19caa9f23 100644
--- a/jstests/core/apply_ops1.js
+++ b/jstests/core/apply_ops1.js
@@ -57,9 +57,8 @@
// Empty 'ns' field value in operation type other than 'n'.
assert.commandFailed(
- db.adminCommand({applyOps: [{op: 'c', ns: ''}]}),
- 'applyOps should fail on non-"n" operation type with empty "ns" field value'
- );
+ db.adminCommand({applyOps: [{op: 'c', ns: ''}]}),
+ 'applyOps should fail on non-"n" operation type with empty "ns" field value');
// Missing 'o' field value in an operation of type 'i' on 'system.indexes' collection.
assert.commandFailedWithCode(
@@ -75,76 +74,100 @@
// Missing 'ns' field in index spec.
assert.commandFailedWithCode(
- db.adminCommand({applyOps: [{op: 'i', ns: db.getName() + '.system.indexes', o: {
- key: {a: 1},
- name: 'a_1',
- }}]}),
+ db.adminCommand({
+ applyOps: [{
+ op: 'i',
+ ns: db.getName() + '.system.indexes',
+ o: {
+ key: {a: 1},
+ name: 'a_1',
+ }
+ }]
+ }),
ErrorCodes.NoSuchKey,
'applyOps should fail on system.indexes insert operation with missing index namespace');
// Non-string 'ns' field in index spec.
assert.commandFailedWithCode(
- db.adminCommand({applyOps: [{op: 'i', ns: db.getName() + '.system.indexes', o: {
- ns: 12345,
- key: {a: 1},
- name: 'a_1',
- }}]}),
+ db.adminCommand({
+ applyOps: [{
+ op: 'i',
+ ns: db.getName() + '.system.indexes',
+ o: {
+ ns: 12345,
+ key: {a: 1},
+ name: 'a_1',
+ }
+ }]
+ }),
ErrorCodes.TypeMismatch,
'applyOps should fail on system.indexes insert operation with non-string index namespace');
// Invalid 'ns' field in index spec.
assert.commandFailedWithCode(
- db.adminCommand({applyOps: [{op: 'i', ns: db.getName() + '.system.indexes', o: {
- ns: 'invalid_namespace',
- key: {a: 1},
- name: 'a_1',
- }}]}),
+ db.adminCommand({
+ applyOps: [{
+ op: 'i',
+ ns: db.getName() + '.system.indexes',
+ o: {
+ ns: 'invalid_namespace',
+ key: {a: 1},
+ name: 'a_1',
+ }
+ }]
+ }),
ErrorCodes.InvalidNamespace,
'applyOps should fail on system.indexes insert operation with invalid index namespace');
// Inconsistent database name in index spec namespace.
assert.commandFailedWithCode(
- db.adminCommand({applyOps: [{op: 'i', ns: db.getName() + '.system.indexes', o: {
- ns: 'baddbprefix' + t.getFullName(),
- key: {a: 1},
- name: 'a_1',
- }}]}),
+ db.adminCommand({
+ applyOps: [{
+ op: 'i',
+ ns: db.getName() + '.system.indexes',
+ o: {
+ ns: 'baddbprefix' + t.getFullName(),
+ key: {a: 1},
+ name: 'a_1',
+ }
+ }]
+ }),
ErrorCodes.InvalidNamespace,
'applyOps should fail on system.indexes insert operation with index namespace containing ' +
- 'inconsistent database name');
+ 'inconsistent database name');
// Valid 'ns' field value in unknown operation type 'x'.
assert.commandFailed(
- db.adminCommand({applyOps: [{op: 'x', ns: t.getFullName()}]}),
- 'applyOps should fail on unknown operation type "x" with valid "ns" value'
- );
-
- assert.eq(0, t.find().count() , "Non-zero amount of documents in collection to start");
- assert.commandFailed(db.adminCommand(
- {applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 17}}]}
- ),
+ db.adminCommand({applyOps: [{op: 'x', ns: t.getFullName()}]}),
+ 'applyOps should fail on unknown operation type "x" with valid "ns" value');
+
+ assert.eq(0, t.find().count(), "Non-zero amount of documents in collection to start");
+ assert.commandFailed(
+ db.adminCommand({applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 17}}]}),
"Applying an insert operation on a non-existent collection should fail");
assert.commandWorked(db.createCollection(t.getName()));
- var a = db.adminCommand(
- {applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 17}}]}
- );
- assert.eq(1, t.find().count() , "Valid insert failed");
+ var a = db.adminCommand({applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 17}}]});
+ assert.eq(1, t.find().count(), "Valid insert failed");
assert.eq(true, a.results[0], "Bad result value for valid insert");
- a = assert.commandWorked(db.adminCommand(
- {applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 17}}]}
- ));
- assert.eq(1, t.find().count() , "Duplicate insert failed");
+ a = assert.commandWorked(
+ db.adminCommand({applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 17}}]}));
+ assert.eq(1, t.find().count(), "Duplicate insert failed");
assert.eq(true, a.results[0], "Bad result value for duplicate insert");
- var o = {_id: 5, x: 17};
- assert.eq(o , t.findOne() , "Mismatching document inserted.");
+ var o = {
+ _id: 5,
+ x: 17
+ };
+ assert.eq(o, t.findOne(), "Mismatching document inserted.");
- var res = db.runCommand({applyOps: [
- {op: "u", ns: t.getFullName(), o2: { _id : 5 }, o: {$inc: {x: 1}}},
- {op: "u", ns: t.getFullName(), o2: { _id : 5 }, o: {$inc: {x: 1}}}
- ]});
+ var res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}}
+ ]
+ });
o.x++;
o.x++;
@@ -154,14 +177,14 @@
assert.eq(true, res.results[0], "Bad result value for valid update");
assert.eq(true, res.results[1], "Bad result value for valid update");
- //preCondition fully matches
- res = db.runCommand({applyOps:
- [
- {op: "u", ns: t.getFullName(), o2: {_id : 5}, o: {$inc: {x :1}}},
- {op: "u", ns: t.getFullName(), o2: {_id : 5}, o: {$inc: {x :1}}}
- ],
- preCondition: [{ns : t.getFullName(), q: {_id: 5}, res:{x: 19}}]
- });
+ // preCondition fully matches
+ res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}}
+ ],
+ preCondition: [{ns: t.getFullName(), q: {_id: 5}, res: {x: 19}}]
+ });
o.x++;
o.x++;
@@ -171,68 +194,78 @@
assert.eq(true, res.results[0], "Bad result value for valid update");
assert.eq(true, res.results[1], "Bad result value for valid update");
- //preCondition doesn't match ns
- res = db.runCommand({applyOps:
- [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}}
- ],
- preCondition: [{ns: "foo.otherName", q: {_id: 5}, res: {x: 21}}]
- });
+ // preCondition doesn't match ns
+ res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}}
+ ],
+ preCondition: [{ns: "foo.otherName", q: {_id: 5}, res: {x: 21}}]
+ });
assert.eq(o, t.findOne(), "preCondition didn't match, but ops were still applied");
- //preCondition doesn't match query
- res = db.runCommand({applyOps:
- [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x : 1}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x : 1}}}
- ],
- preCondition: [{ns: t.getFullName(), q: {_id: 5}, res: {x: 19}}]
- });
+ // preCondition doesn't match query
+ res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}}
+ ],
+ preCondition: [{ns: t.getFullName(), q: {_id: 5}, res: {x: 19}}]
+ });
assert.eq(o, t.findOne(), "preCondition didn't match, but ops were still applied");
- res = db.runCommand({applyOps:
- [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x : 1}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 6}, o: {$inc: {x : 1}}}
- ]
- });
+ res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$inc: {x: 1}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 6}, o: {$inc: {x: 1}}}
+ ]
+ });
assert.eq(true, res.results[0], "Valid update failed");
assert.eq(true, res.results[1], "Valid update failed");
// Foreground index build.
res = assert.commandWorked(db.adminCommand({
- applyOps: [{"op": "i", "ns": db.getName() + ".system.indexes", "o": {
- ns: t.getFullName(),
- key: {a: 1},
- name: "a_1",
- }
- }]}));
+ applyOps: [{
+ "op": "i",
+ "ns": db.getName() + ".system.indexes",
+ "o": {
+ ns: t.getFullName(),
+ key: {a: 1},
+ name: "a_1",
+ }
+ }]
+ }));
assert.eq(1, res.applied, "Incorrect number of operations applied");
assert.eq(true, res.results[0], "Foreground index creation failed");
res = t.getIndexes();
- assert.eq(
- 1,
- res.filter(function(element, index, array) {return element.name == 'a_1';}).length,
- 'Foreground index not found in listIndexes result: ' + tojson(res));
+ assert.eq(1,
+ res.filter(function(element, index, array) {
+ return element.name == 'a_1';
+ }).length,
+ 'Foreground index not found in listIndexes result: ' + tojson(res));
// Background indexes are created in the foreground when processed by applyOps.
res = assert.commandWorked(db.adminCommand({
- applyOps: [{"op": "i", "ns": db.getName() + ".system.indexes", "o": {
- ns: t.getFullName(),
- key: {b: 1},
- name: "b_1",
- background: true,
- }
- }]}));
+ applyOps: [{
+ "op": "i",
+ "ns": db.getName() + ".system.indexes",
+ "o": {
+ ns: t.getFullName(),
+ key: {b: 1},
+ name: "b_1",
+ background: true,
+ }
+ }]
+ }));
assert.eq(1, res.applied, "Incorrect number of operations applied");
assert.eq(true, res.results[0], "Background index creation failed");
res = t.getIndexes();
- assert.eq(
- 1,
- res.filter(function(element, index, array) {return element.name == 'b_1';}).length,
- 'Background index not found in listIndexes result: ' + tojson(res));
+ assert.eq(1,
+ res.filter(function(element, index, array) {
+ return element.name == 'b_1';
+ }).length,
+ 'Background index not found in listIndexes result: ' + tojson(res));
})();
diff --git a/jstests/core/apply_ops2.js b/jstests/core/apply_ops2.js
index 1a5923c3465..bf804214846 100644
--- a/jstests/core/apply_ops2.js
+++ b/jstests/core/apply_ops2.js
@@ -1,70 +1,56 @@
-//Test applyops upsert flag SERVER-7452
+// Test applyops upsert flag SERVER-7452
var t = db.apply_ops2;
t.drop();
assert.eq(0, t.find().count(), "test collection not empty");
-t.insert({_id:1, x:"init"});
+t.insert({_id: 1, x: "init"});
-//alwaysUpsert = true
+// alwaysUpsert = true
print("Testing applyOps with alwaysUpsert = true");
-var res = db.runCommand({ applyOps: [
- {
- op: "u",
- ns: t.getFullName(),
- o2 : { _id: 1 },
- o: { $set: { x: "upsert=true existing" }}
- },
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 2 },
- o: { $set : { x: "upsert=true non-existing" }}
- }], alwaysUpsert: true });
+var res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 1}, o: {$set: {x: "upsert=true existing"}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 2}, o: {$set: {x: "upsert=true non-existing"}}}
+ ],
+ alwaysUpsert: true
+});
assert.eq(true, res.results[0], "upsert = true, existing doc update failed");
assert.eq(true, res.results[1], "upsert = true, nonexisting doc not upserted");
assert.eq(2, t.find().count(), "2 docs expected after upsert");
-//alwaysUpsert = false
+// alwaysUpsert = false
print("Testing applyOps with alwaysUpsert = false");
-res = db.runCommand({ applyOps: [
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 1 },
- o: { $set : { x: "upsert=false existing" }}
- },
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 3 },
- o: { $set: { x: "upsert=false non-existing" }}
- }], alwaysUpsert: false });
+res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 1}, o: {$set: {x: "upsert=false existing"}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 3}, o: {$set: {x: "upsert=false non-existing"}}}
+ ],
+ alwaysUpsert: false
+});
assert.eq(true, res.results[0], "upsert = false, existing doc update failed");
assert.eq(false, res.results[1], "upsert = false, nonexisting doc upserted");
assert.eq(2, t.find().count(), "2 docs expected after upsert failure");
-//alwaysUpsert not specified, should default to true
+// alwaysUpsert not specified, should default to true
print("Testing applyOps with default alwaysUpsert");
-res = db.runCommand({ applyOps: [
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 1 },
- o: { $set: { x: "upsert=default existing" }}
- },
- {
- op: "u",
- ns: t.getFullName(),
- o2: { _id: 4 },
- o: { $set: { x: "upsert=defaults non-existing" }}
- }]});
+res = db.runCommand({
+ applyOps: [
+ {op: "u", ns: t.getFullName(), o2: {_id: 1}, o: {$set: {x: "upsert=default existing"}}},
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 4},
+ o: {$set: {x: "upsert=defaults non-existing"}}
+ }
+ ]
+});
assert.eq(true, res.results[0], "default upsert, existing doc update failed");
assert.eq(true, res.results[1], "default upsert, nonexisting doc not upserted");
diff --git a/jstests/core/apply_ops_dups.js b/jstests/core/apply_ops_dups.js
index 9d8dfb8dc0f..bdca02a605c 100644
--- a/jstests/core/apply_ops_dups.js
+++ b/jstests/core/apply_ops_dups.js
@@ -4,23 +4,28 @@
t.drop();
// Check that duplicate _id fields don't cause an error
- assert.writeOK(t.insert({_id:0, x:1}));
- assert.commandWorked(t.createIndex({x:1}, {unique:true}));
- var a = assert.commandWorked(db.adminCommand(
- {applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: -1}},
- {"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 0}}]}
- ));
+ assert.writeOK(t.insert({_id: 0, x: 1}));
+ assert.commandWorked(t.createIndex({x: 1}, {unique: true}));
+ var a = assert.commandWorked(db.adminCommand({
+ applyOps: [
+ {"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: -1}},
+ {"op": "i", "ns": t.getFullName(), "o": {_id: 5, x: 0}}
+ ]
+ }));
printjson(a);
printjson(t.find().toArray());
- assert.eq(2, t.find().count() , "Invalid insert worked");
+ assert.eq(2, t.find().count(), "Invalid insert worked");
assert.eq(true, a.results[0], "Valid insert was rejected");
assert.eq(true, a.results[1], "Insert should have not failed (but should be ignored");
printjson(t.find().toArray());
// Check that dups on non-id cause errors
- var a = assert.commandFailedWithCode(db.adminCommand(
- {applyOps: [{"op": "i", "ns": t.getFullName(), "o": {_id: 1, x: 0}},
- {"op": "i", "ns": t.getFullName(), "o": {_id: 2, x: 1}}]}
- ), 11000 /*DuplicateKey*/);
- assert.eq(2, t.find().count() , "Invalid insert worked");
+ var a = assert.commandFailedWithCode(db.adminCommand({
+ applyOps: [
+ {"op": "i", "ns": t.getFullName(), "o": {_id: 1, x: 0}},
+ {"op": "i", "ns": t.getFullName(), "o": {_id: 2, x: 1}}
+ ]
+ }),
+ 11000 /*DuplicateKey*/);
+ assert.eq(2, t.find().count(), "Invalid insert worked");
})();
diff --git a/jstests/core/array1.js b/jstests/core/array1.js
index 3a27feb028a..8d6be81ca1f 100644
--- a/jstests/core/array1.js
+++ b/jstests/core/array1.js
@@ -1,14 +1,16 @@
t = db.array1;
t.drop();
-x = { a : [ 1 , 2 ] };
+x = {
+ a: [1, 2]
+};
-t.save( { a : [ [1,2] ] } );
-assert.eq( 1 , t.find( x ).count() , "A" );
+t.save({a: [[1, 2]]});
+assert.eq(1, t.find(x).count(), "A");
-t.save( x );
+t.save(x);
delete x._id;
-assert.eq( 2 , t.find( x ).count() , "B" );
+assert.eq(2, t.find(x).count(), "B");
-t.ensureIndex( { a : 1 } );
-assert.eq( 2 , t.find( x ).count() , "C" ); // TODO SERVER-146
+t.ensureIndex({a: 1});
+assert.eq(2, t.find(x).count(), "C"); // TODO SERVER-146
diff --git a/jstests/core/array3.js b/jstests/core/array3.js
index 8b024bd3a0c..42acdfb6d3e 100644
--- a/jstests/core/array3.js
+++ b/jstests/core/array3.js
@@ -1,8 +1,7 @@
-assert.eq( 5 , Array.sum( [ 1 , 4 ] ), "A" );
-assert.eq( 2.5 , Array.avg( [ 1 , 4 ] ), "B" );
-
-arr = [ 2 , 4 , 4 , 4 , 5 , 5 , 7 , 9 ];
-assert.eq( 5 , Array.avg( arr ) , "C" );
-assert.eq( 2 , Array.stdDev( arr ) , "D" );
+assert.eq(5, Array.sum([1, 4]), "A");
+assert.eq(2.5, Array.avg([1, 4]), "B");
+arr = [2, 4, 4, 4, 5, 5, 7, 9];
+assert.eq(5, Array.avg(arr), "C");
+assert.eq(2, Array.stdDev(arr), "D");
diff --git a/jstests/core/array4.js b/jstests/core/array4.js
index 1053e160f11..c6fe1599880 100644
--- a/jstests/core/array4.js
+++ b/jstests/core/array4.js
@@ -3,9 +3,11 @@ t = db.array4;
t.drop();
t.insert({"a": ["1", "2", "3"]});
-t.insert({"a" : ["2", "1"]});
+t.insert({"a": ["2", "1"]});
-var x = {'a.0' : /1/};
+var x = {
+ 'a.0': /1/
+};
assert.eq(t.count(x), 1);
@@ -14,17 +16,19 @@ assert.eq(t.findOne(x).a[1], 2);
t.drop();
-t.insert({"a" : {"0" : "1"}});
-t.insert({"a" : ["2", "1"]});
+t.insert({"a": {"0": "1"}});
+t.insert({"a": ["2", "1"]});
assert.eq(t.count(x), 1);
assert.eq(t.findOne(x).a[0], 1);
t.drop();
-t.insert({"a" : ["0", "1", "2", "3", "4", "5", "6", "1", "1", "1", "2", "3", "2", "1"]});
-t.insert({"a" : ["2", "1"]});
+t.insert({"a": ["0", "1", "2", "3", "4", "5", "6", "1", "1", "1", "2", "3", "2", "1"]});
+t.insert({"a": ["2", "1"]});
-x = {"a.12" : /2/};
+x = {
+ "a.12": /2/
+};
assert.eq(t.count(x), 1);
assert.eq(t.findOne(x).a[0], 0);
diff --git a/jstests/core/array_match1.js b/jstests/core/array_match1.js
index 194ebcb85c3..9923677b8df 100644
--- a/jstests/core/array_match1.js
+++ b/jstests/core/array_match1.js
@@ -2,30 +2,30 @@
t = db.array_match1;
t.drop();
-t.insert( { _id : 1 , a : [ 5 , 5 ] } );
-t.insert( { _id : 2 , a : [ 6 , 6 ] } );
-t.insert( { _id : 3 , a : [ 5 , 5 ] } );
+t.insert({_id: 1, a: [5, 5]});
+t.insert({_id: 2, a: [6, 6]});
+t.insert({_id: 3, a: [5, 5]});
-function test( f , m ){
+function test(f, m) {
var q = {};
- q[f] = [5,5];
- assert.eq( 2 , t.find( q ).itcount() , m + "1" );
+ q[f] = [5, 5];
+ assert.eq(2, t.find(q).itcount(), m + "1");
- q[f] = [6,6];
- assert.eq( 1 , t.find( q ).itcount() , m + "2" );
+ q[f] = [6, 6];
+ assert.eq(1, t.find(q).itcount(), m + "2");
}
-test( "a" , "A" );
-t.ensureIndex( { a : 1 } );
-test( "a" , "B" );
+test("a", "A");
+t.ensureIndex({a: 1});
+test("a", "B");
t.drop();
-t.insert( { _id : 1 , a : { b : [ 5 , 5 ] } } );
-t.insert( { _id : 2 , a : { b : [ 6 , 6 ] } } );
-t.insert( { _id : 3 , a : { b : [ 5 , 5 ] } } );
+t.insert({_id: 1, a: {b: [5, 5]}});
+t.insert({_id: 2, a: {b: [6, 6]}});
+t.insert({_id: 3, a: {b: [5, 5]}});
-test( "a.b" , "C" );
-t.ensureIndex( { a : 1 } );
-test( "a.b" , "D" );
+test("a.b", "C");
+t.ensureIndex({a: 1});
+test("a.b", "D");
diff --git a/jstests/core/array_match2.js b/jstests/core/array_match2.js
index d254b0a3fdd..44cbfe33941 100644
--- a/jstests/core/array_match2.js
+++ b/jstests/core/array_match2.js
@@ -2,19 +2,19 @@
t = db.jstests_array_match2;
t.drop();
-t.save( {a:[{1:4},5]} );
+t.save({a: [{1: 4}, 5]});
// When the array index is the last field, both of these match types work.
-assert.eq( 1, t.count( {'a.1':4} ) );
-assert.eq( 1, t.count( {'a.1':5} ) );
+assert.eq(1, t.count({'a.1': 4}));
+assert.eq(1, t.count({'a.1': 5}));
t.remove({});
// When the array index is not the last field, only one of the match types works.
-t.save( {a:[{1:{foo:4}},{foo:5}]} );
-assert.eq( 1, t.count( {'a.1.foo':4} ) );
-assert.eq( 1, t.count( {'a.1.foo':5} ) );
+t.save({a: [{1: {foo: 4}}, {foo: 5}]});
+assert.eq(1, t.count({'a.1.foo': 4}));
+assert.eq(1, t.count({'a.1.foo': 5}));
// Same issue with the $exists operator
t.remove({});
-t.save( {a:[{1:{foo:4}},{}]} );
-assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1.foo':{$exists:true}} ) );
+t.save({a: [{1: {foo: 4}}, {}]});
+assert.eq(1, t.count({'a.1': {$exists: true}}));
+assert.eq(1, t.count({'a.1.foo': {$exists: true}}));
diff --git a/jstests/core/array_match3.js b/jstests/core/array_match3.js
index c8653430770..837341afc8a 100644
--- a/jstests/core/array_match3.js
+++ b/jstests/core/array_match3.js
@@ -4,10 +4,10 @@ t = db.jstests_array_match3;
t.drop();
// Test matching numericallly referenced array element.
-t.save( {a:{'0':5}} );
-t.save( {a:[5]} );
-assert.eq( 2, t.count( {'a.0':5} ) );
+t.save({a: {'0': 5}});
+t.save({a: [5]});
+assert.eq(2, t.count({'a.0': 5}));
// Test with index.
-t.ensureIndex( {'a.0':1} );
-assert.eq( 2, t.count( {'a.0':5} ) );
+t.ensureIndex({'a.0': 1});
+assert.eq(2, t.count({'a.0': 5}));
diff --git a/jstests/core/array_match4.js b/jstests/core/array_match4.js
index b4cdec5143a..4956fc1d8b2 100644
--- a/jstests/core/array_match4.js
+++ b/jstests/core/array_match4.js
@@ -3,7 +3,9 @@ var t = db.array_match4;
t.drop();
t.save({a: [1, 2]});
-var query_gte = {a: {$gte: [1, 2]}};
+var query_gte = {
+ a: {$gte: [1, 2]}
+};
//
// without index
diff --git a/jstests/core/arrayfind1.js b/jstests/core/arrayfind1.js
index 5a9f2227806..bd8d47b845e 100644
--- a/jstests/core/arrayfind1.js
+++ b/jstests/core/arrayfind1.js
@@ -2,32 +2,31 @@
t = db.arrayfind1;
t.drop();
-t.save( { a : [ { x : 1 } ] } );
-t.save( { a : [ { x : 1 , y : 2 , z : 1 } ] } );
-t.save( { a : [ { x : 1 , y : 1 , z : 3 } ] } );
+t.save({a: [{x: 1}]});
+t.save({a: [{x: 1, y: 2, z: 1}]});
+t.save({a: [{x: 1, y: 1, z: 3}]});
-function test( exptected , q , name ){
- assert.eq( exptected , t.find( q ).itcount() , name + " " + tojson( q ) + " itcount" );
- assert.eq( exptected , t.find( q ).count() , name + " " + tojson( q ) + " count" );
+function test(exptected, q, name) {
+ assert.eq(exptected, t.find(q).itcount(), name + " " + tojson(q) + " itcount");
+ assert.eq(exptected, t.find(q).count(), name + " " + tojson(q) + " count");
}
-test( 3 , {} , "A1" );
-test( 1 , { "a.y" : 2 } , "A2" );
-test( 1 , { "a" : { x : 1 } } , "A3" );
-test( 3 , { "a" : { $elemMatch : { x : 1 } } } , "A4" ); // SERVER-377
+test(3, {}, "A1");
+test(1, {"a.y": 2}, "A2");
+test(1, {"a": {x: 1}}, "A3");
+test(3, {"a": {$elemMatch: {x: 1}}}, "A4"); // SERVER-377
+t.save({a: [{x: 2}]});
+t.save({a: [{x: 3}]});
+t.save({a: [{x: 4}]});
-t.save( { a : [ { x : 2 } ] } );
-t.save( { a : [ { x : 3 } ] } );
-t.save( { a : [ { x : 4 } ] } );
+assert.eq(1, t.find({a: {$elemMatch: {x: 2}}}).count(), "B1");
+assert.eq(2, t.find({a: {$elemMatch: {x: {$gt: 2}}}}).count(), "B2");
-assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "B1" );
-assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "B2" );
+t.ensureIndex({"a.x": 1});
+assert.eq(1, t.find({a: {$elemMatch: {x: 2}}}).count(), "D1");
+assert.eq(3, t.find({"a.x": 1}).count(), "D2.1");
+assert.eq(3, t.find({"a.x": {$gt: 1}}).count(), "D2.2");
+assert.eq(2, t.find({a: {$elemMatch: {x: {$gt: 2}}}}).count(), "D3");
-t.ensureIndex( { "a.x" : 1 } );
-assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "D1" );
-assert.eq( 3, t.find( { "a.x" : 1 } ).count() , "D2.1" );
-assert.eq( 3, t.find( { "a.x" : { $gt : 1 } } ).count() , "D2.2" );
-assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" );
-
-assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" );
+assert.eq(2, t.find({a: {$ne: 2, $elemMatch: {x: {$gt: 2}}}}).count(), "E1");
diff --git a/jstests/core/arrayfind2.js b/jstests/core/arrayfind2.js
index b292b13bf52..60eaa27f0d7 100644
--- a/jstests/core/arrayfind2.js
+++ b/jstests/core/arrayfind2.js
@@ -2,22 +2,27 @@
t = db.arrayfind2;
t.drop();
-function go( prefix ){
- assert.eq( 3 , t.count() , prefix + " A1" );
- assert.eq( 3 , t.find( { a : { $elemMatch : { x : { $gt : 4 } } } } ).count() , prefix + " A2" );
- assert.eq( 1 , t.find( { a : { $elemMatch : { x : { $lt : 2 } } } } ).count() , prefix + " A3" );
- assert.eq( 1 , t.find( { a : { $all : [ { $elemMatch : { x : { $lt : 4 } } } ,
- { $elemMatch : { x : { $gt : 5 } } } ] } } ).count() , prefix + " A4" );
-
- assert.throws( function() { return t.findOne( { a : { $all : [ 1, { $elemMatch : { x : 3 } } ] } } ); } );
- assert.throws( function() { return t.findOne( { a : { $all : [ /a/, { $elemMatch : { x : 3 } } ] } } ); } );
+function go(prefix) {
+ assert.eq(3, t.count(), prefix + " A1");
+ assert.eq(3, t.find({a: {$elemMatch: {x: {$gt: 4}}}}).count(), prefix + " A2");
+ assert.eq(1, t.find({a: {$elemMatch: {x: {$lt: 2}}}}).count(), prefix + " A3");
+ assert.eq(
+ 1,
+ t.find({a: {$all: [{$elemMatch: {x: {$lt: 4}}}, {$elemMatch: {x: {$gt: 5}}}]}}).count(),
+ prefix + " A4");
+ assert.throws(function() {
+ return t.findOne({a: {$all: [1, {$elemMatch: {x: 3}}]}});
+ });
+ assert.throws(function() {
+ return t.findOne({a: {$all: [/a/, {$elemMatch: {x: 3}}]}});
+ });
}
-t.save( { a : [ { x : 1 } , { x : 5 } ] } );
-t.save( { a : [ { x : 3 } , { x : 5 } ] } );
-t.save( { a : [ { x : 3 } , { x : 6 } ] } );
+t.save({a: [{x: 1}, {x: 5}]});
+t.save({a: [{x: 3}, {x: 5}]});
+t.save({a: [{x: 3}, {x: 6}]});
-go( "no index" );
-t.ensureIndex( { a : 1 } );
-go( "index(a)" );
+go("no index");
+t.ensureIndex({a: 1});
+go("index(a)");
diff --git a/jstests/core/arrayfind3.js b/jstests/core/arrayfind3.js
index 395b428ac1f..07fbc3670d5 100644
--- a/jstests/core/arrayfind3.js
+++ b/jstests/core/arrayfind3.js
@@ -2,15 +2,14 @@
t = db.arrayfind3;
t.drop();
-t.save({a:[1,2]});
-t.save({a:[1, 2, 6]});
-t.save({a:[1, 4, 6]});
+t.save({a: [1, 2]});
+t.save({a: [1, 2, 6]});
+t.save({a: [1, 4, 6]});
+assert.eq(2, t.find({a: {$gte: 3, $lte: 5}}).itcount(), "A1");
+assert.eq(1, t.find({a: {$elemMatch: {$gte: 3, $lte: 5}}}).itcount(), "A2");
-assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "A1" );
-assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "A2" );
+t.ensureIndex({a: 1});
-t.ensureIndex( { a : 1 } );
-
-assert.eq( 2 , t.find( {a:{$gte:3, $lte: 5}} ).itcount() , "B1" );
-assert.eq( 1 , t.find( {a:{$elemMatch:{$gte:3, $lte: 5}}} ).itcount() , "B2" );
+assert.eq(2, t.find({a: {$gte: 3, $lte: 5}}).itcount(), "B1");
+assert.eq(1, t.find({a: {$elemMatch: {$gte: 3, $lte: 5}}}).itcount(), "B2");
diff --git a/jstests/core/arrayfind4.js b/jstests/core/arrayfind4.js
index 17b02c8886b..a43a914b930 100644
--- a/jstests/core/arrayfind4.js
+++ b/jstests/core/arrayfind4.js
@@ -3,20 +3,20 @@
t = db.jstests_arrayfind4;
t.drop();
-t.save( {a:[]} );
-t.ensureIndex( {a:1} );
+t.save({a: []});
+t.ensureIndex({a: 1});
-assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() );
+assert.eq(1, t.find({a: []}).hint({$natural: 1}).itcount());
+assert.eq(1, t.find({a: []}).hint({a: 1}).itcount());
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() );
+assert.eq(1, t.find({a: {$in: [[]]}}).hint({$natural: 1}).itcount());
+assert.eq(1, t.find({a: {$in: [[]]}}).hint({a: 1}).itcount());
t.remove({});
-t.save( {a:[[]]} );
+t.save({a: [[]]});
-assert.eq( 1, t.find( {a:[]} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:[]} ).hint( {a:1} ).itcount() );
+assert.eq(1, t.find({a: []}).hint({$natural: 1}).itcount());
+assert.eq(1, t.find({a: []}).hint({a: 1}).itcount());
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$in:[[]]}} ).hint( {a:1} ).itcount() );
+assert.eq(1, t.find({a: {$in: [[]]}}).hint({$natural: 1}).itcount());
+assert.eq(1, t.find({a: {$in: [[]]}}).hint({a: 1}).itcount());
diff --git a/jstests/core/arrayfind5.js b/jstests/core/arrayfind5.js
index 9ff6e2b8a5f..3fba886b83a 100644
--- a/jstests/core/arrayfind5.js
+++ b/jstests/core/arrayfind5.js
@@ -3,21 +3,22 @@
t = db.jstests_arrayfind5;
t.drop();
-function check( nullElemMatch ) {
- assert.eq( 1, t.find( {'a.b':1} ).itcount() );
- assert.eq( 1, t.find( {a:{$elemMatch:{b:1}}} ).itcount() );
- assert.eq( nullElemMatch ? 1 : 0 , t.find( {'a.b':null} ).itcount() );
- assert.eq( nullElemMatch ? 1 : 0, t.find( {a:{$elemMatch:{b:null}}} ).itcount() ); // see SERVER-3377
+function check(nullElemMatch) {
+ assert.eq(1, t.find({'a.b': 1}).itcount());
+ assert.eq(1, t.find({a: {$elemMatch: {b: 1}}}).itcount());
+ assert.eq(nullElemMatch ? 1 : 0, t.find({'a.b': null}).itcount());
+ assert.eq(nullElemMatch ? 1 : 0,
+ t.find({a: {$elemMatch: {b: null}}}).itcount()); // see SERVER-3377
}
-t.save( {a:[{},{b:1}]} );
-check( true );
-t.ensureIndex( {'a.b':1} );
-check( true );
+t.save({a: [{}, {b: 1}]});
+check(true);
+t.ensureIndex({'a.b': 1});
+check(true);
t.drop();
-t.save( {a:[5,{b:1}]} );
-check( false );
-t.ensureIndex( {'a.b':1} );
-check( false );
+t.save({a: [5, {b: 1}]});
+check(false);
+t.ensureIndex({'a.b': 1});
+check(false);
diff --git a/jstests/core/arrayfind6.js b/jstests/core/arrayfind6.js
index 9b54d5b2c07..f01271b6673 100644
--- a/jstests/core/arrayfind6.js
+++ b/jstests/core/arrayfind6.js
@@ -3,19 +3,19 @@
t = db.jstests_arrayfind6;
t.drop();
-t.save( { a:[ { b:1, c:2 } ] } );
+t.save({a: [{b: 1, c: 2}]});
function checkElemMatchMatches() {
- assert.eq( 1, t.count( { a:{ $elemMatch:{ b:1, c:2 } } } ) );
- assert.eq( 0, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:2 } } } } ) );
- assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:1, c:3 } } } } ) );
- assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:1 }, c:3 } } } } ) );
+ assert.eq(1, t.count({a: {$elemMatch: {b: 1, c: 2}}}));
+ assert.eq(0, t.count({a: {$not: {$elemMatch: {b: 1, c: 2}}}}));
+ assert.eq(1, t.count({a: {$not: {$elemMatch: {b: 1, c: 3}}}}));
+ assert.eq(1, t.count({a: {$not: {$elemMatch: {b: {$ne: 1}, c: 3}}}}));
// Index bounds must be determined for $not:$elemMatch, not $not:$ne. In this case if index
// bounds are determined for $not:$ne, the a.b index will be constrained to the interval [2,2]
// and the saved document will not be matched as it should.
- assert.eq( 1, t.count( { a:{ $not:{ $elemMatch:{ b:{ $ne:2 }, c:3 } } } } ) );
+ assert.eq(1, t.count({a: {$not: {$elemMatch: {b: {$ne: 2}, c: 3}}}}));
}
checkElemMatchMatches();
-t.ensureIndex( { 'a.b':1 } );
+t.ensureIndex({'a.b': 1});
checkElemMatchMatches();
diff --git a/jstests/core/arrayfind7.js b/jstests/core/arrayfind7.js
index 7c44de1dc1d..f0dc2e2caa8 100644
--- a/jstests/core/arrayfind7.js
+++ b/jstests/core/arrayfind7.js
@@ -3,50 +3,50 @@
t = db.jstests_arrayfind7;
t.drop();
-t.save( { a:[ { b:[ { c:1, d:2 } ] } ] } );
+t.save({a: [{b: [{c: 1, d: 2}]}]});
function checkElemMatchMatches() {
- assert.eq( 1, t.count( { a:{ $elemMatch:{ b:{ $elemMatch:{ c:1, d:2 } } } } } ) );
+ assert.eq(1, t.count({a: {$elemMatch: {b: {$elemMatch: {c: 1, d: 2}}}}}));
}
// The document is matched using nested $elemMatch expressions, with and without an index.
checkElemMatchMatches();
-t.ensureIndex( { 'a.b.c':1 } );
+t.ensureIndex({'a.b.c': 1});
checkElemMatchMatches();
-function checkElemMatch( index, document, query ) {
+function checkElemMatch(index, document, query) {
// The document is matched without an index, and with single and multi key indexes.
t.drop();
- t.save( document );
- assert.eq( 1, t.count( query ) );
- t.ensureIndex( index );
- assert.eq( 1, t.count( query ) );
- t.save( { a:{ b:{ c:[ 10, 11 ] } } } ); // Make the index multikey.
- assert.eq( 1, t.count( query ) );
+ t.save(document);
+ assert.eq(1, t.count(query));
+ t.ensureIndex(index);
+ assert.eq(1, t.count(query));
+ t.save({a: {b: {c: [10, 11]}}}); // Make the index multikey.
+ assert.eq(1, t.count(query));
}
// Two constraints within a nested $elemMatch expression.
-checkElemMatch( { 'a.b.c':1 },
- { a:[ { b:[ { c:1 } ] } ] },
- { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $lte:1 } } } } } });
+checkElemMatch({'a.b.c': 1},
+ {a: [{b: [{c: 1}]}]},
+ {a: {$elemMatch: {b: {$elemMatch: {c: {$gte: 1, $lte: 1}}}}}});
// Two constraints within a nested $elemMatch expression, one of which contains the other.
-checkElemMatch( { 'a.b.c':1 },
- { a:[ { b:[ { c:2 } ] } ] },
- { a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1, $in:[2] } } } } } });
+checkElemMatch({'a.b.c': 1},
+ {a: [{b: [{c: 2}]}]},
+ {a: {$elemMatch: {b: {$elemMatch: {c: {$gte: 1, $in: [2]}}}}}});
// Two nested $elemMatch expressions.
-checkElemMatch( { 'a.d.e':1, 'a.b.c':1 },
- { a:[ { b:[ { c:1 } ], d:[ { e:1 } ] } ] },
- { a:{ $elemMatch:{ d:{ $elemMatch:{ e:{ $lte:1 } } },
- b:{ $elemMatch:{ c:{ $gte:1 } } } } } });
+checkElemMatch(
+ {'a.d.e': 1, 'a.b.c': 1},
+ {a: [{b: [{c: 1}], d: [{e: 1}]}]},
+ {a: {$elemMatch: {d: {$elemMatch: {e: {$lte: 1}}}, b: {$elemMatch: {c: {$gte: 1}}}}}});
// A non $elemMatch expression and a nested $elemMatch expression.
-checkElemMatch( { 'a.x':1, 'a.b.c':1 },
- { a:[ { b:[ { c:1 } ], x:1 } ] },
- { 'a.x':1, a:{ $elemMatch:{ b:{ $elemMatch:{ c:{ $gte:1 } } } } } });
+checkElemMatch({'a.x': 1, 'a.b.c': 1},
+ {a: [{b: [{c: 1}], x: 1}]},
+ {'a.x': 1, a: {$elemMatch: {b: {$elemMatch: {c: {$gte: 1}}}}}});
// $elemMatch is applied directly to a top level field.
-checkElemMatch( { 'a.b.c':1 },
- { a:[ { b:[ { c:[ 1 ] } ] } ] },
- { a:{ $elemMatch:{ 'b.c':{ $elemMatch:{ $gte:1, $lte:1 } } } } });
+checkElemMatch({'a.b.c': 1},
+ {a: [{b: [{c: [1]}]}]},
+ {a: {$elemMatch: {'b.c': {$elemMatch: {$gte: 1, $lte: 1}}}}});
diff --git a/jstests/core/arrayfind8.js b/jstests/core/arrayfind8.js
index e74093d9457..d322229a298 100644
--- a/jstests/core/arrayfind8.js
+++ b/jstests/core/arrayfind8.js
@@ -6,34 +6,34 @@ var debuggingEnabled = false;
t = db.jstests_arrayfind8;
t.drop();
-function debug( x ) {
- if ( debuggingEnabled ) {
- printjson( x );
+function debug(x) {
+ if (debuggingEnabled) {
+ printjson(x);
}
}
/** Set index state for the test. */
-function setIndexKey( key ) {
+function setIndexKey(key) {
indexKey = key;
indexSpec = {};
- indexSpec[ key ] = 1;
+ indexSpec[key] = 1;
}
-setIndexKey( 'a' );
+setIndexKey('a');
/** Check that the query results match the documents in the 'expected' array. */
-function assertResults( expected, query, context ) {
- debug( query );
- assert.eq( expected.length, t.count( query ), 'unexpected count in ' + context );
- results = t.find( query ).toArray();
- for( i in results ) {
+function assertResults(expected, query, context) {
+ debug(query);
+ assert.eq(expected.length, t.count(query), 'unexpected count in ' + context);
+ results = t.find(query).toArray();
+ for (i in results) {
found = false;
- for( j in expected ) {
- if ( friendlyEqual( expected[ j ], results[ i ].a ) ) {
+ for (j in expected) {
+ if (friendlyEqual(expected[j], results[i].a)) {
found = true;
}
}
- assert( found, 'unexpected result ' + results[ i ] + ' in ' + context );
+ assert(found, 'unexpected result ' + results[i] + ' in ' + context);
}
}
@@ -43,23 +43,22 @@ function assertResults( expected, query, context ) {
* @param elemMatch - document matched by elemMatchQuery but not standardQuery
* @param notElemMatch - document matched by standardQuery but not elemMatchQuery
*/
-function checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, context ) {
-
- function mayPush( arr, elt ) {
- if ( elt ) {
- arr.push( elt );
+function checkMatch(bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, context) {
+ function mayPush(arr, elt) {
+ if (elt) {
+ arr.push(elt);
}
}
expectedStandardQueryResults = [];
- mayPush( expectedStandardQueryResults, bothMatch );
- mayPush( expectedStandardQueryResults, nonElemMatch );
- assertResults( expectedStandardQueryResults, standardQuery, context + ' standard query' );
+ mayPush(expectedStandardQueryResults, bothMatch);
+ mayPush(expectedStandardQueryResults, nonElemMatch);
+ assertResults(expectedStandardQueryResults, standardQuery, context + ' standard query');
expectedElemMatchQueryResults = [];
- mayPush( expectedElemMatchQueryResults, bothMatch );
- mayPush( expectedElemMatchQueryResults, elemMatch );
- assertResults( expectedElemMatchQueryResults, elemMatchQuery, context + ' elemMatch query' );
+ mayPush(expectedElemMatchQueryResults, bothMatch);
+ mayPush(expectedElemMatchQueryResults, elemMatch);
+ assertResults(expectedElemMatchQueryResults, elemMatchQuery, context + ' elemMatch query');
}
/**
@@ -71,100 +70,100 @@ function checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatc
* @param notElemMatch - document matched by standardQuery but not elemMatchQuery
* @param additionalConstraints - additional query parameters not generated from @param subQuery
*/
-function checkQuery( subQuery, bothMatch, elemMatch, nonElemMatch,
- additionalConstraints ) {
+function checkQuery(subQuery, bothMatch, elemMatch, nonElemMatch, additionalConstraints) {
t.drop();
additionalConstraints = additionalConstraints || {};
-
+
// Construct standard and elemMatch queries from subQuery.
- firstSubQueryKey = Object.keySet( subQuery )[ 0 ];
- if ( firstSubQueryKey[ 0 ] == '$' ) {
- standardQuery = { $and:[ { a:subQuery }, additionalConstraints ] };
- }
- else {
+ firstSubQueryKey = Object.keySet(subQuery)[0];
+ if (firstSubQueryKey[0] == '$') {
+ standardQuery = {
+ $and: [{a: subQuery}, additionalConstraints]
+ };
+ } else {
// If the subQuery contains a field rather than operators, append to the 'a' field.
modifiedSubQuery = {};
- modifiedSubQuery[ 'a.' + firstSubQueryKey ] = subQuery[ firstSubQueryKey ];
- standardQuery = { $and:[ modifiedSubQuery, additionalConstraints ] };
+ modifiedSubQuery['a.' + firstSubQueryKey] = subQuery[firstSubQueryKey];
+ standardQuery = {
+ $and: [modifiedSubQuery, additionalConstraints]
+ };
}
- elemMatchQuery = { $and:[ { a:{ $elemMatch:subQuery } }, additionalConstraints ] };
- debug( elemMatchQuery );
-
- function maySave( aValue ) {
- if ( aValue ) {
- debug( { a:aValue } );
- t.save( { a:aValue } );
+ elemMatchQuery = {
+ $and: [{a: {$elemMatch: subQuery}}, additionalConstraints]
+ };
+ debug(elemMatchQuery);
+
+ function maySave(aValue) {
+ if (aValue) {
+ debug({a: aValue});
+ t.save({a: aValue});
}
}
// Save all documents and check matching without indexes.
- maySave( bothMatch );
- maySave( elemMatch );
- maySave( nonElemMatch );
+ maySave(bothMatch);
+ maySave(elemMatch);
+ maySave(nonElemMatch);
- checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'unindexed' );
+ checkMatch(bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'unindexed');
// Check matching and index bounds for a single key index.
t.drop();
- maySave( bothMatch );
- maySave( elemMatch );
+ maySave(bothMatch);
+ maySave(elemMatch);
// The nonElemMatch document is not tested here, as it will often make the index multikey.
- t.ensureIndex( indexSpec );
- checkMatch( bothMatch, elemMatch, null, standardQuery, elemMatchQuery, 'single key index' );
+ t.ensureIndex(indexSpec);
+ checkMatch(bothMatch, elemMatch, null, standardQuery, elemMatchQuery, 'single key index');
// Check matching and index bounds for a multikey index.
// Now the nonElemMatch document is tested.
- maySave( nonElemMatch );
+ maySave(nonElemMatch);
// Force the index to be multikey.
- t.save( { a:[ -1, -2 ] } );
- t.save( { a:{ b:[ -1, -2 ] } } );
- checkMatch( bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery,
- 'multikey index' );
+ t.save({a: [-1, -2]});
+ t.save({a: {b: [-1, -2]}});
+ checkMatch(bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatchQuery, 'multikey index');
}
maxNumber = Infinity;
// Basic test.
-checkQuery( { $gt:4 }, [ 5 ] );
+checkQuery({$gt: 4}, [5]);
// Multiple constraints within a $elemMatch clause.
-checkQuery( { $gt:4, $lt:6 }, [ 5 ], null, [ 3, 7 ] );
-checkQuery( { $gt:4, $not:{ $gte:6 } }, [ 5 ] );
-checkQuery( { $gt:4, $not:{ $ne:6 } }, [ 6 ] );
-checkQuery( { $gte:5, $lte:5 }, [ 5 ], null, [ 4, 6 ] );
-checkQuery( { $in:[ 4, 6 ], $gt:5 }, [ 6 ], null, [ 4, 7 ] );
-checkQuery( { $regex:'^a' }, [ 'a' ] );
+checkQuery({$gt: 4, $lt: 6}, [5], null, [3, 7]);
+checkQuery({$gt: 4, $not: {$gte: 6}}, [5]);
+checkQuery({$gt: 4, $not: {$ne: 6}}, [6]);
+checkQuery({$gte: 5, $lte: 5}, [5], null, [4, 6]);
+checkQuery({$in: [4, 6], $gt: 5}, [6], null, [4, 7]);
+checkQuery({$regex: '^a'}, ['a']);
// Some constraints within a $elemMatch clause and other constraints outside of it.
-checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $lt:6 } } );
-checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $lte:5 } } );
-checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $gt:5 } } );
+checkQuery({$gt: 4}, [5], null, null, {a: {$lt: 6}});
+checkQuery({$gte: 5}, [5], null, null, {a: {$lte: 5}});
+checkQuery({$in: [4, 6]}, [6], null, null, {a: {$gt: 5}});
// Constraints in different $elemMatch clauses.
-checkQuery( { $gt:4 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } );
-checkQuery( { $gt:4 }, [ 3, 7 ], null, null, { a:{ $elemMatch:{ $lt:6 } } } );
-checkQuery( { $gte:5 }, [ 5 ], null, null, { a:{ $elemMatch:{ $lte:5 } } } );
-checkQuery( { $in:[ 4, 6 ] }, [ 6 ], null, null, { a:{ $elemMatch:{ $gt:5 } } } );
+checkQuery({$gt: 4}, [5], null, null, {a: {$elemMatch: {$lt: 6}}});
+checkQuery({$gt: 4}, [3, 7], null, null, {a: {$elemMatch: {$lt: 6}}});
+checkQuery({$gte: 5}, [5], null, null, {a: {$elemMatch: {$lte: 5}}});
+checkQuery({$in: [4, 6]}, [6], null, null, {a: {$elemMatch: {$gt: 5}}});
// TODO SERVER-1264
-if ( 0 ) {
-checkQuery( { $elemMatch:{ $in:[ 5 ] } }, null, [[ 5 ]], [ 5 ], null );
+if (0) {
+ checkQuery({$elemMatch: {$in: [5]}}, null, [[5]], [5], null);
}
-setIndexKey( 'a.b' );
-checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:1 } ]],
- [ { b:1 } ], null );
-checkQuery( { $elemMatch:{ b:{ $gte:1, $lte:1 } } }, null, [[ { b:[ 0, 2 ] } ]],
- [ { b:[ 0, 2 ] } ], null );
+setIndexKey('a.b');
+checkQuery({$elemMatch: {b: {$gte: 1, $lte: 1}}}, null, [[{b: 1}]], [{b: 1}], null);
+checkQuery({$elemMatch: {b: {$gte: 1, $lte: 1}}}, null, [[{b: [0, 2]}]], [{b: [0, 2]}], null);
// Constraints for a top level (SERVER-1264 style) $elemMatch nested within a non top level
// $elemMatch.
-checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:1 } } }, [ { b:[ 1 ] } ] );
-checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 1 ] } ] );
+checkQuery({b: {$elemMatch: {$gte: 1, $lte: 1}}}, [{b: [1]}]);
+checkQuery({b: {$elemMatch: {$gte: 1, $lte: 4}}}, [{b: [1]}]);
-checkQuery( { b:{ $elemMatch:{ $gte:1, $lte:4 } } }, [ { b:[ 2 ] } ], null,
- null, { 'a.b':{ $in:[ 2, 5 ] } } );
-checkQuery( { b:{ $elemMatch:{ $in:[ 1, 2 ] }, $in:[ 2, 3 ] } },
- [ { b:[ 2 ] } ], null, [ { b:[ 1 ] }, { b:[ 3 ] } ], null );
+checkQuery({b: {$elemMatch: {$gte: 1, $lte: 4}}}, [{b: [2]}], null, null, {'a.b': {$in: [2, 5]}});
+checkQuery(
+ {b: {$elemMatch: {$in: [1, 2]}, $in: [2, 3]}}, [{b: [2]}], null, [{b: [1]}, {b: [3]}], null);
diff --git a/jstests/core/arrayfind9.js b/jstests/core/arrayfind9.js
index 4ee14c56580..98396701dce 100644
--- a/jstests/core/arrayfind9.js
+++ b/jstests/core/arrayfind9.js
@@ -4,31 +4,31 @@ t = db.jstests_arrayfind9;
t.drop();
// Top level field $elemMatch:$not matching
-t.save( { a:[ 1 ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ $not:{ $ne:1 } } } } ) );
+t.save({a: [1]});
+assert.eq(1, t.count({a: {$elemMatch: {$not: {$ne: 1}}}}));
// Top level field object $elemMatch matching.
t.drop();
-t.save( { a:[ {} ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ $gte:{} } } } ) );
+t.save({a: [{}]});
+assert.eq(1, t.count({a: {$elemMatch: {$gte: {}}}}));
// Top level field array $elemMatch matching.
t.drop();
-t.save( { a:[ [] ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ $in:[ [] ] } } } ) );
+t.save({a: [[]]});
+assert.eq(1, t.count({a: {$elemMatch: {$in: [[]]}}}));
// Matching by array index.
t.drop();
-t.save( { a:[ [ 'x' ] ] } );
-assert.eq( 1, t.count( { a:{ $elemMatch:{ '0':'x' } } } ) );
+t.save({a: [['x']]});
+assert.eq(1, t.count({a: {$elemMatch: {'0': 'x'}}}));
// Matching multiple values of a nested array.
t.drop();
-t.save( { a:[ { b:[ 0, 2 ] } ] } );
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { 'a.b':1 } );
-plans = [ { $natural:1 }, { a:1 }, { 'a.b':1 } ];
-for( i in plans ) {
- p = plans[ i ];
- assert.eq( 1, t.find( { a:{ $elemMatch:{ b:{ $gte:1, $lte:1 } } } } ).hint( p ).itcount() );
+t.save({a: [{b: [0, 2]}]});
+t.ensureIndex({a: 1});
+t.ensureIndex({'a.b': 1});
+plans = [{$natural: 1}, {a: 1}, {'a.b': 1}];
+for (i in plans) {
+ p = plans[i];
+ assert.eq(1, t.find({a: {$elemMatch: {b: {$gte: 1, $lte: 1}}}}).hint(p).itcount());
}
diff --git a/jstests/core/arrayfinda.js b/jstests/core/arrayfinda.js
index 179d3985580..f2939d0c1ba 100644
--- a/jstests/core/arrayfinda.js
+++ b/jstests/core/arrayfinda.js
@@ -4,18 +4,18 @@ t = db.jstests_arrayfinda;
t.drop();
// $elemMatch only matches elements within arrays (a descriptive, not a normative test).
-t.save( { a:[ { b:1 } ] } );
-t.save( { a:{ b:1 } } );
+t.save({a: [{b: 1}]});
+t.save({a: {b: 1}});
-function assertExpectedMatch( cursor ) {
- assert.eq( [ { b:1 } ], cursor.next().a );
- assert( !cursor.hasNext() );
+function assertExpectedMatch(cursor) {
+ assert.eq([{b: 1}], cursor.next().a);
+ assert(!cursor.hasNext());
}
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ) );
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ) );
+assertExpectedMatch(t.find({a: {$elemMatch: {b: {$gte: 1}}}}));
+assertExpectedMatch(t.find({a: {$elemMatch: {b: 1}}}));
// $elemMatch is not used to perform key matching. SERVER-6001
-t.ensureIndex( { a:1 } );
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:{ $gte:1 } } } } ).hint( { a:1 } ) );
-assertExpectedMatch( t.find( { a:{ $elemMatch:{ b:1 } } } ).hint( { a:1 } ) );
+t.ensureIndex({a: 1});
+assertExpectedMatch(t.find({a: {$elemMatch: {b: {$gte: 1}}}}).hint({a: 1}));
+assertExpectedMatch(t.find({a: {$elemMatch: {b: 1}}}).hint({a: 1}));
diff --git a/jstests/core/arrayfindb.js b/jstests/core/arrayfindb.js
index ad1a86be142..483e1e46134 100644
--- a/jstests/core/arrayfindb.js
+++ b/jstests/core/arrayfindb.js
@@ -5,11 +5,13 @@ t.drop();
// Case #1: Ensure correct matching for $elemMatch with an embedded $and (SERVER-13664).
t.save({a: [{b: 1, c: 25}, {a: 3, b: 59}]});
-assert.eq(0, t.find({a: {$elemMatch: {b: {$gte: 2, $lt: 4}, c: 25}}}).itcount(),
+assert.eq(0,
+ t.find({a: {$elemMatch: {b: {$gte: 2, $lt: 4}, c: 25}}}).itcount(),
"Case #1: wrong number of results returned -- unindexed");
t.ensureIndex({"a.b": 1, "a.c": 1});
-assert.eq(0, t.find({a: {$elemMatch: {b: {$gte: 2, $lt: 4}, c: 25}}}).itcount(),
+assert.eq(0,
+ t.find({a: {$elemMatch: {b: {$gte: 2, $lt: 4}, c: 25}}}).itcount(),
"Case #1: wrong number of results returned -- indexed");
// Case #2: Ensure correct matching for $elemMatch with an embedded $or.
@@ -17,10 +19,12 @@ t.drop();
t.save({a: [{b: 1}, {c: 1}]});
t.save({a: [{b: 2}, {c: 1}]});
t.save({a: [{b: 1}, {c: 2}]});
-assert.eq(2, t.find({a: {$elemMatch: {$or: [{b: 2}, {c: 2}]}}}).itcount(),
+assert.eq(2,
+ t.find({a: {$elemMatch: {$or: [{b: 2}, {c: 2}]}}}).itcount(),
"Case #2: wrong number of results returned -- unindexed");
t.ensureIndex({"a.b": 1});
t.ensureIndex({"a.c": 1});
-assert.eq(2, t.find({a: {$elemMatch: {$or: [{b: 2}, {c: 2}]}}}).itcount(),
+assert.eq(2,
+ t.find({a: {$elemMatch: {$or: [{b: 2}, {c: 2}]}}}).itcount(),
"Case #2: wrong number of results returned -- indexed");
diff --git a/jstests/core/auth1.js b/jstests/core/auth1.js
index 268f55f2af8..46d61f7d4b0 100644
--- a/jstests/core/auth1.js
+++ b/jstests/core/auth1.js
@@ -2,53 +2,51 @@ var mydb = db.getSiblingDB('auth1_db');
mydb.dropAllUsers();
pass = "a" + Math.random();
-//print( "password [" + pass + "]" );
+// print( "password [" + pass + "]" );
-mydb.createUser({user: "eliot" ,pwd: pass, roles: jsTest.basicUserRoles});
+mydb.createUser({user: "eliot", pwd: pass, roles: jsTest.basicUserRoles});
-assert( mydb.auth( "eliot" , pass ) , "auth failed" );
-assert( ! mydb.auth( "eliot" , pass + "a" ) , "auth should have failed" );
+assert(mydb.auth("eliot", pass), "auth failed");
+assert(!mydb.auth("eliot", pass + "a"), "auth should have failed");
pass2 = "b" + Math.random();
mydb.changeUserPassword("eliot", pass2);
-assert( ! mydb.auth( "eliot" , pass ) , "failed to change password failed" );
-assert( mydb.auth( "eliot" , pass2 ) , "new password didn't take" );
+assert(!mydb.auth("eliot", pass), "failed to change password failed");
+assert(mydb.auth("eliot", pass2), "new password didn't take");
-assert( mydb.auth( "eliot" , pass2 ) , "what?" );
-mydb.dropUser( "eliot" );
-assert( ! mydb.auth( "eliot" , pass2 ) , "didn't drop user" );
+assert(mydb.auth("eliot", pass2), "what?");
+mydb.dropUser("eliot");
+assert(!mydb.auth("eliot", pass2), "didn't drop user");
-
-var a = mydb.getMongo().getDB( "admin" );
+var a = mydb.getMongo().getDB("admin");
a.dropAllUsers();
pass = "c" + Math.random();
a.createUser({user: "super", pwd: pass, roles: jsTest.adminUserRoles});
-assert( a.auth( "super" , pass ) , "auth failed" );
-assert( !a.auth( "super" , pass + "a" ) , "auth should have failed" );
+assert(a.auth("super", pass), "auth failed");
+assert(!a.auth("super", pass + "a"), "auth should have failed");
mydb.dropAllUsers();
pass = "a" + Math.random();
-mydb.createUser({user: "eliot" , pwd: pass, roles: jsTest.basicUserRoles});
+mydb.createUser({user: "eliot", pwd: pass, roles: jsTest.basicUserRoles});
-assert.commandFailed( mydb.runCommand( { authenticate: 1, user: "eliot", nonce: "foo", key: "bar" } ) );
+assert.commandFailed(mydb.runCommand({authenticate: 1, user: "eliot", nonce: "foo", key: "bar"}));
// check sanity check SERVER-3003
var before = a.system.users.count({db: mydb.getName()});
-assert.throws( function(){
- mydb.createUser({ user: "" , pwd: "abc", roles: jsTest.basicUserRoles});
-} , null , "C1" );
-assert.throws( function(){
- mydb.createUser({ user: "abc" , pwd: "", roles: jsTest.basicUserRoles});
-} , null , "C2" );
-
+assert.throws(function() {
+ mydb.createUser({user: "", pwd: "abc", roles: jsTest.basicUserRoles});
+}, null, "C1");
+assert.throws(function() {
+ mydb.createUser({user: "abc", pwd: "", roles: jsTest.basicUserRoles});
+}, null, "C2");
var after = a.system.users.count({db: mydb.getName()});
-assert( before > 0 , "C3" );
-assert.eq( before , after , "C4" );
+assert(before > 0, "C3");
+assert.eq(before, after, "C4");
// Clean up after ourselves so other tests using authentication don't get messed up.
mydb.dropAllUsers();
diff --git a/jstests/core/auth2.js b/jstests/core/auth2.js
index 2c4cff6796b..c9e6d29dac9 100644
--- a/jstests/core/auth2.js
+++ b/jstests/core/auth2.js
@@ -1,9 +1,9 @@
// just make sure logout doesn't break anything
// SERVER-724
-db.runCommand({logout : 1});
-x = db.runCommand({logout : 1});
-assert.eq( 1 , x.ok , "A" );
+db.runCommand({logout: 1});
+x = db.runCommand({logout: 1});
+assert.eq(1, x.ok, "A");
x = db.logout();
-assert.eq( 1 , x.ok , "B" );
+assert.eq(1, x.ok, "B");
diff --git a/jstests/core/auth_copydb.js b/jstests/core/auth_copydb.js
index f04cd0b0d29..ae0e6888c01 100644
--- a/jstests/core/auth_copydb.js
+++ b/jstests/core/auth_copydb.js
@@ -1,19 +1,19 @@
-a = db.getSisterDB( "copydb2-test-a" );
-b = db.getSisterDB( "copydb2-test-b" );
+a = db.getSisterDB("copydb2-test-a");
+b = db.getSisterDB("copydb2-test-b");
a.dropDatabase();
b.dropDatabase();
a.dropAllUsers();
b.dropAllUsers();
-a.foo.save( { a : 1 } );
+a.foo.save({a: 1});
-a.createUser({user: "chevy" , pwd: "chase", roles: jsTest.basicUserRoles});
+a.createUser({user: "chevy", pwd: "chase", roles: jsTest.basicUserRoles});
-assert.eq( 1 , a.foo.count() , "A" );
-assert.eq( 0 , b.foo.count() , "B" );
+assert.eq(1, a.foo.count(), "A");
+assert.eq(0, b.foo.count(), "B");
// SERVER-727
-a.copyDatabase( a._name , b._name, "" , "chevy" , "chase" );
-assert.eq( 1 , a.foo.count() , "C" );
-assert.eq( 1 , b.foo.count() , "D" );
+a.copyDatabase(a._name, b._name, "", "chevy", "chase");
+assert.eq(1, a.foo.count(), "C");
+assert.eq(1, b.foo.count(), "D");
diff --git a/jstests/core/autoid.js b/jstests/core/autoid.js
index 6c8062fd093..f4707e5fe65 100644
--- a/jstests/core/autoid.js
+++ b/jstests/core/autoid.js
@@ -1,11 +1,11 @@
f = db.jstests_autoid;
f.drop();
-f.save( {z:1} );
-a = f.findOne( {z:1} );
-f.update( {z:1}, {z:2} );
-b = f.findOne( {z:2} );
-assert.eq( a._id.str, b._id.str );
-c = f.update( {z:2}, {z:"abcdefgabcdefgabcdefg"} );
-c = f.findOne( {} );
-assert.eq( a._id.str, c._id.str );
+f.save({z: 1});
+a = f.findOne({z: 1});
+f.update({z: 1}, {z: 2});
+b = f.findOne({z: 2});
+assert.eq(a._id.str, b._id.str);
+c = f.update({z: 2}, {z: "abcdefgabcdefgabcdefg"});
+c = f.findOne({});
+assert.eq(a._id.str, c._id.str);
diff --git a/jstests/core/bad_index_plugin.js b/jstests/core/bad_index_plugin.js
index c22bba5e0cc..7ecfe76c198 100644
--- a/jstests/core/bad_index_plugin.js
+++ b/jstests/core/bad_index_plugin.js
@@ -1,11 +1,11 @@
// SERVER-5826 ensure you can't build an index with a non-existent plugin
t = db.bad_index_plugin;
-assert.commandWorked(t.ensureIndex({ good: 1 }));
-assert.eq(t.getIndexes().length, 2); // good + _id
+assert.commandWorked(t.ensureIndex({good: 1}));
+assert.eq(t.getIndexes().length, 2); // good + _id
var err = t.ensureIndex({bad: 'bad'});
assert.commandFailed(err);
assert(err.code >= 0);
-assert.eq(t.getIndexes().length, 2); // good + _id (no bad)
+assert.eq(t.getIndexes().length, 2); // good + _id (no bad)
diff --git a/jstests/core/basic1.js b/jstests/core/basic1.js
index e5fa577f0b2..f4ca8a283d9 100644
--- a/jstests/core/basic1.js
+++ b/jstests/core/basic1.js
@@ -1,21 +1,23 @@
-t = db.getCollection( "basic1" );
+t = db.getCollection("basic1");
t.drop();
-o = { a : 1 };
-t.save( o );
+o = {
+ a: 1
+};
+t.save(o);
-assert.eq( 1 , t.findOne().a , "first" );
-assert( o._id , "now had id" );
-assert( o._id.str , "id not a real id" );
+assert.eq(1, t.findOne().a, "first");
+assert(o._id, "now had id");
+assert(o._id.str, "id not a real id");
o.a = 2;
-t.save( o );
+t.save(o);
-assert.eq( 2 , t.findOne().a , "second" );
+assert.eq(2, t.findOne().a, "second");
assert(t.validate().valid);
-// not a very good test of currentOp, but tests that it at least
+// not a very good test of currentOp, but tests that it at least
// is sort of there:
-assert( db.currentOp().inprog != null );
+assert(db.currentOp().inprog != null);
diff --git a/jstests/core/basic2.js b/jstests/core/basic2.js
index aaa3de4366e..50b3db323ce 100644
--- a/jstests/core/basic2.js
+++ b/jstests/core/basic2.js
@@ -1,16 +1,18 @@
-t = db.getCollection( "basic2" );
+t = db.getCollection("basic2");
t.drop();
-o = { n : 2 };
-t.save( o );
+o = {
+ n: 2
+};
+t.save(o);
-assert.eq( 1 , t.find().count() );
+assert.eq(1, t.find().count());
-assert.eq( 2 , t.find( o._id ).toArray()[0].n );
-assert.eq( 2 , t.find( o._id , { n : 1 } ).toArray()[0].n );
+assert.eq(2, t.find(o._id).toArray()[0].n);
+assert.eq(2, t.find(o._id, {n: 1}).toArray()[0].n);
-t.remove( o._id );
-assert.eq( 0 , t.find().count() );
+t.remove(o._id);
+assert.eq(0, t.find().count());
assert(t.validate().valid);
diff --git a/jstests/core/basic3.js b/jstests/core/basic3.js
index 61f68047d30..ec0b48ec0cf 100644
--- a/jstests/core/basic3.js
+++ b/jstests/core/basic3.js
@@ -1,8 +1,8 @@
// Tests that "." cannot be in field names
-t = db.getCollection( "foo_basic3" );
+t = db.getCollection("foo_basic3");
t.drop();
-//more diagnostics on bad save, if exception fails
+// more diagnostics on bad save, if exception fails
doBadSave = function(param) {
print("doing save with " + tojson(param));
var res = t.save(param);
@@ -10,7 +10,7 @@ doBadSave = function(param) {
printjson(res);
};
-//more diagnostics on bad save, if exception fails
+// more diagnostics on bad save, if exception fails
doBadUpdate = function(query, update) {
print("doing update with " + tojson(query) + " " + tojson(update));
var res = t.update(query, update);
@@ -18,28 +18,24 @@ doBadUpdate = function(query, update) {
printjson(res);
};
-assert.throws(doBadSave, [{"a.b":5}], ". in names aren't allowed doesn't work");
+assert.throws(doBadSave, [{"a.b": 5}], ". in names aren't allowed doesn't work");
-assert.throws(doBadSave,
- [{ "x" : { "a.b" : 5 } }],
- ". in embedded names aren't allowed doesn't work");
+assert.throws(doBadSave, [{"x": {"a.b": 5}}], ". in embedded names aren't allowed doesn't work");
// following tests make sure update keys are checked
-t.save({"a": 0,"b": 1});
+t.save({"a": 0, "b": 1});
-assert.throws(doBadUpdate, [{a:0}, { "b.b" : 1 }],
- "must deny '.' in key of update");
+assert.throws(doBadUpdate, [{a: 0}, {"b.b": 1}], "must deny '.' in key of update");
// upsert with embedded doc
-assert.throws(doBadUpdate, [{a:10}, { c: {"b.b" : 1 }}],
- "must deny embedded '.' in key of update");
+assert.throws(doBadUpdate, [{a: 10}, {c: {"b.b": 1}}], "must deny embedded '.' in key of update");
// if it is a modifier, it should still go through
-t.update({"a": 0}, {$set: { "c.c": 1}});
-t.update({"a": 0}, {$inc: { "c.c": 1}});
+t.update({"a": 0}, {$set: {"c.c": 1}});
+t.update({"a": 0}, {$inc: {"c.c": 1}});
// edge cases
-assert.throws(doBadUpdate, [{a:0}, { "":{"b.b" : 1} }],
+assert.throws(doBadUpdate,
+ [{a: 0}, {"": {"b.b": 1}}],
"must deny '' embedded '.' in key of update");
t.update({"a": 0}, {});
-
diff --git a/jstests/core/basic4.js b/jstests/core/basic4.js
index 0cf7a261e63..4b2cf6f96be 100644
--- a/jstests/core/basic4.js
+++ b/jstests/core/basic4.js
@@ -1,12 +1,12 @@
-t = db.getCollection( "basic4" );
+t = db.getCollection("basic4");
t.drop();
-t.save( { a : 1 , b : 1.0 } );
+t.save({a: 1, b: 1.0});
-assert( t.findOne() );
-assert( t.findOne( { a : 1 } ) );
-assert( t.findOne( { a : 1.0 } ) );
-assert( t.findOne( { b : 1 } ) );
-assert( t.findOne( { b : 1.0 } ) );
+assert(t.findOne());
+assert(t.findOne({a: 1}));
+assert(t.findOne({a: 1.0}));
+assert(t.findOne({b: 1}));
+assert(t.findOne({b: 1.0}));
-assert( ! t.findOne( { b : 2.0 } ) );
+assert(!t.findOne({b: 2.0}));
diff --git a/jstests/core/basic5.js b/jstests/core/basic5.js
index bfa40fb8f5e..7ec41ef7872 100644
--- a/jstests/core/basic5.js
+++ b/jstests/core/basic5.js
@@ -1,6 +1,5 @@
-t = db.getCollection( "basic5" );
+t = db.getCollection("basic5");
t.drop();
-t.save( { a : 1 , b : [ 1 , 2 , 3 ] } );
-assert.eq( 3 , t.findOne().b.length );
-
+t.save({a: 1, b: [1, 2, 3]});
+assert.eq(3, t.findOne().b.length);
diff --git a/jstests/core/basic6.js b/jstests/core/basic6.js
index e0cd6f1586e..c5919bfb158 100644
--- a/jstests/core/basic6.js
+++ b/jstests/core/basic6.js
@@ -4,5 +4,5 @@ t = db.basic6;
t.findOne();
t.a.findOne();
-assert.eq( "test.basic6" , t.toString() );
-assert.eq( "test.basic6.a" , t.a.toString() );
+assert.eq("test.basic6", t.toString());
+assert.eq("test.basic6.a", t.a.toString());
diff --git a/jstests/core/basic7.js b/jstests/core/basic7.js
index 4ae7d6902b3..bfe82ccda17 100644
--- a/jstests/core/basic7.js
+++ b/jstests/core/basic7.js
@@ -2,10 +2,9 @@
t = db.basic7;
t.drop();
-t.save( { a : 1 } );
-t.ensureIndex( { a : 1 } );
-
-assert.eq( t.find().toArray()[0].a , 1 );
-assert.eq( t.find().arrayAccess(0).a , 1 );
-assert.eq( t.find()[0].a , 1 );
+t.save({a: 1});
+t.ensureIndex({a: 1});
+assert.eq(t.find().toArray()[0].a, 1);
+assert.eq(t.find().arrayAccess(0).a, 1);
+assert.eq(t.find()[0].a, 1);
diff --git a/jstests/core/basic8.js b/jstests/core/basic8.js
index 513da0d15d1..d9b158487cc 100644
--- a/jstests/core/basic8.js
+++ b/jstests/core/basic8.js
@@ -2,10 +2,10 @@
t = db.basic8;
t.drop();
-t.save( { a : 1 } );
+t.save({a: 1});
o = t.findOne();
o.b = 2;
-t.save( o );
+t.save(o);
-assert.eq( 1 , t.find().count() , "A" );
-assert.eq( 2 , t.findOne().b , "B" );
+assert.eq(1, t.find().count(), "A");
+assert.eq(2, t.findOne().b, "B");
diff --git a/jstests/core/basic9.js b/jstests/core/basic9.js
index 3078fcad2bc..bebaeb54740 100644
--- a/jstests/core/basic9.js
+++ b/jstests/core/basic9.js
@@ -1,5 +1,5 @@
// Tests that $<prefix> field names are not allowed, but you can use a $ anywhere else.
-t = db.getCollection( "foo_basic9" );
+t = db.getCollection("foo_basic9");
t.drop();
// more diagnostics on bad save, if exception fails
@@ -10,10 +10,10 @@ doBadSave = function(param) {
print('Should have errored out: ' + tojson(res));
};
-t.save({foo$foo:5});
-t.save({foo$:5});
+t.save({foo$foo: 5});
+t.save({foo$: 5});
-assert.throws(doBadSave, [{$foo:5}], "key names aren't allowed to start with $ doesn't work");
-assert.throws(doBadSave,
- [{x:{$foo:5}}],
+assert.throws(doBadSave, [{$foo: 5}], "key names aren't allowed to start with $ doesn't work");
+assert.throws(doBadSave,
+ [{x: {$foo: 5}}],
"embedded key names aren't allowed to start with $ doesn't work");
diff --git a/jstests/core/basica.js b/jstests/core/basica.js
index 0cc364beb42..1fe8b7c5de4 100644
--- a/jstests/core/basica.js
+++ b/jstests/core/basica.js
@@ -1,10 +1,9 @@
t = db.basica;
-
t.drop();
-t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+t.save({a: 1, b: [{x: 2, y: 2}, {x: 3, y: 3}]});
x = t.findOne();
x.b["0"].x = 4;
@@ -14,20 +13,19 @@ x.b[0]["asd"] = 11;
x.a = 2;
x.z = 11;
-tojson( x );
-t.save( x );
-assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" );
+tojson(x);
+t.save(x);
+assert.eq(tojson(x), tojson(t.findOne()), "FIRST");
// -----
t.drop();
-t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+t.save({a: 1, b: [{x: 2, y: 2}, {x: 3, y: 3}]});
x = t.findOne();
x.b["0"].z = 4;
-//printjson( x );
-t.save( x );
-assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" );
-
+// printjson( x );
+t.save(x);
+assert.eq(tojson(x), tojson(t.findOne()), "SECOND");
diff --git a/jstests/core/basicb.js b/jstests/core/basicb.js
index 95eb60151af..65531d706a1 100644
--- a/jstests/core/basicb.js
+++ b/jstests/core/basicb.js
@@ -2,5 +2,6 @@
t = db.basicb;
t.drop();
-assert.throws( function() { t.insert( { '$a' : 5 } ); });
-
+assert.throws(function() {
+ t.insert({'$a': 5});
+});
diff --git a/jstests/core/batch_size.js b/jstests/core/batch_size.js
index dce9cda4451..eca41e412ba 100644
--- a/jstests/core/batch_size.js
+++ b/jstests/core/batch_size.js
@@ -81,21 +81,26 @@ assert.lte(explain.executionStats.totalKeysExamined, 60, 'S');
assert.lte(explain.executionStats.totalDocsExamined, 60, 'T');
assert.eq(explain.executionStats.nReturned, 6, 'U');
-
// -------
-
// During plan ranking, we treat ntoreturn as a limit. This prevents us from buffering
// too much data in a blocking sort stage during plan ranking.
t.drop();
// Generate big string to use in the object - 1MB+ String
var bigStr = "ABCDEFGHIJKLMNBOPQRSTUVWXYZ012345687890";
-while (bigStr.length < 1000000) { bigStr = bigStr + "::" + bigStr; }
+while (bigStr.length < 1000000) {
+ bigStr = bigStr + "::" + bigStr;
+}
// Insert enough documents to exceed the 32 MB in-memory sort limit.
for (var i = 0; i < 40; i++) {
- var doc = {x: 1, y: 1, z: i, big: bigStr};
+ var doc = {
+ x: 1,
+ y: 1,
+ z: i,
+ big: bigStr
+ };
t.insert(doc);
}
diff --git a/jstests/core/batch_write_command_delete.js b/jstests/core/batch_write_command_delete.js
index eb08179e463..2aefcea6a7f 100644
--- a/jstests/core/batch_write_command_delete.js
+++ b/jstests/core/batch_write_command_delete.js
@@ -2,7 +2,7 @@
// Ensures that mongod respects the batch write protocols for delete
//
-var coll = db.getCollection( "batch_write_delete" );
+var coll = db.getCollection("batch_write_delete");
coll.drop();
assert(coll.getDB().getMongo().useWriteCommands(), "test is not running with write commands");
@@ -13,18 +13,13 @@ var batch;
var maxWriteBatchSize = 1000;
-function resultOK( result ) {
- return result.ok &&
- !( 'code' in result ) &&
- !( 'errmsg' in result ) &&
- !( 'errInfo' in result ) &&
- !( 'writeErrors' in result );
+function resultOK(result) {
+ return result.ok && !('code' in result) && !('errmsg' in result) && !('errInfo' in result) &&
+ !('writeErrors' in result);
}
-function resultNOK( result ) {
- return !result.ok &&
- typeof( result.code ) == 'number' &&
- typeof( result.errmsg ) == 'string';
+function resultNOK(result) {
+ return !result.ok && typeof(result.code) == 'number' && typeof(result.errmsg) == 'string';
}
// EACH TEST BELOW SHOULD BE SELF-CONTAINED, FOR EASIER DEBUGGING
@@ -32,8 +27,10 @@ function resultNOK( result ) {
//
// NO DOCS, illegal command
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName()};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName()
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
assert.eq(1, coll.count());
@@ -41,9 +38,11 @@ assert.eq(1, coll.count());
//
// Single document remove, default write concern specified
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {a:1}, limit: 1}]};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 1}]
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
@@ -52,10 +51,12 @@ assert.eq(0, coll.count());
//
// Single document delete, w:0 write concern specified
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {a: 1}, limit: 1}],
- writeConcern:{w:0}};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 1}],
+ writeConcern: {w: 0}
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(0, coll.count());
@@ -67,11 +68,13 @@ for (var field in result) {
//
// Single document remove, w:1 write concern specified, ordered:true
coll.remove({});
-coll.insert([{a:1}, {a:1}]);
-request = { delete: coll.getName(),
- deletes: [{q: {a: 1}, limit: 1}],
- writeConcern:{w:1},
- ordered: false};
+coll.insert([{a: 1}, {a: 1}]);
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 1}],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
@@ -80,11 +83,13 @@ assert.eq(1, coll.count());
//
// Multiple document remove, w:1 write concern specified, ordered:true, default top
coll.remove({});
-coll.insert([{a:1}, {a:1}]);
-request = { delete: coll.getName(),
- deletes: [{q: {a: 1}, limit: 0}],
- writeConcern:{w:1},
- ordered: false};
+coll.insert([{a: 1}, {a: 1}]);
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 0}],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(2, result.n);
@@ -93,11 +98,13 @@ assert.eq(0, coll.count());
//
// Multiple document remove, w:1 write concern specified, ordered:true, top:0
coll.remove({});
-coll.insert([{a:1}, {a:1}]);
-request = { delete: coll.getName(),
- deletes: [{q: {a: 1}, limit: 0}],
- writeConcern:{w:1},
- ordered: false};
+coll.insert([{a: 1}, {a: 1}]);
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 0}],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(2, result.n);
@@ -108,13 +115,15 @@ assert.eq(0, coll.count());
coll.remove({});
batch = [];
for (var i = 0; i < maxWriteBatchSize; ++i) {
- coll.insert({a:i});
- batch.push({q:{a:i}, limit: 0});
+ coll.insert({a: i});
+ batch.push({q: {a: i}, limit: 0});
}
-request = { delete: coll.getName(),
- deletes: batch,
- writeConcern:{w:1},
- ordered: false};
+request = {
+ delete: coll.getName(),
+ deletes: batch,
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(batch.length, result.n);
@@ -125,13 +134,15 @@ assert.eq(0, coll.count());
coll.remove({});
batch = [];
for (var i = 0; i < maxWriteBatchSize + 1; ++i) {
- coll.insert({a:i});
- batch.push({q:{a:i}, limit: 0});
+ coll.insert({a: i});
+ batch.push({q: {a: i}, limit: 0});
}
-request = { delete: coll.getName(),
- deletes: batch,
- writeConcern:{w:1},
- ordered: false};
+request = {
+ delete: coll.getName(),
+ deletes: batch,
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
assert.eq(batch.length, coll.count());
@@ -139,13 +150,13 @@ assert.eq(batch.length, coll.count());
//
// Cause remove error using ordered:true
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {a:1}, limit: 0},
- {q: {$set: {a: 1}}, limit: 0},
- {q: {$set: {a: 1}}, limit: 0}],
- writeConcern:{w:1},
- ordered: true};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 0}, {q: {$set: {a: 1}}, limit: 0}, {q: {$set: {a: 1}}, limit: 0}],
+ writeConcern: {w: 1},
+ ordered: true
+};
result = coll.runCommand(request);
assert.commandWorked(result);
assert.eq(1, result.n);
@@ -160,13 +171,13 @@ assert.eq(0, coll.count());
//
// Cause remove error using ordered:false
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {$set: {a: 1}}, limit: 0},
- {q: {$set: {a: 1}}, limit: 0},
- {q: {a:1}, limit: 0}],
- writeConcern:{w:1},
- ordered: false};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {$set: {a: 1}}, limit: 0}, {q: {$set: {a: 1}}, limit: 0}, {q: {a: 1}, limit: 0}],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert.commandWorked(result);
assert.eq(1, result.n);
@@ -184,13 +195,13 @@ assert.eq(0, coll.count());
//
// Cause remove error using ordered:false and w:0
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {$set: {a: 1}}, limit: 0},
- {q: {$set: {a: 1}}, limit: 0},
- {q: {a:1}, limit: 0}],
- writeConcern:{w:0},
- ordered: false};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {$set: {a: 1}}, limit: 0}, {q: {$set: {a: 1}}, limit: 0}, {q: {a: 1}, limit: 0}],
+ writeConcern: {w: 0},
+ ordered: false
+};
result = coll.runCommand(request);
assert.commandWorked(result);
assert.eq(0, coll.count());
@@ -202,13 +213,14 @@ for (var field in result) {
//
// Cause remove error using ordered:true and w:0
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {$set: {a: 1}}, limit: 0},
- {q: {$set: {a: 1}}, limit: 0},
- {q: {a:1}, limit:(1)}],
- writeConcern:{w:0},
- ordered: true};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes:
+ [{q: {$set: {a: 1}}, limit: 0}, {q: {$set: {a: 1}}, limit: 0}, {q: {a: 1}, limit: (1)}],
+ writeConcern: {w: 0},
+ ordered: true
+};
result = coll.runCommand(request);
assert.commandWorked(result);
assert.eq(1, coll.count());
@@ -220,10 +232,12 @@ for (var field in result) {
//
// When limit is not 0 and 1
coll.remove({});
-coll.insert({a:1});
-request = { delete: coll.getName(),
- deletes: [{q: {a: 1}, limit: 2}],
- writeConcern:{w:0},
- ordered: false};
+coll.insert({a: 1});
+request = {
+ delete: coll.getName(),
+ deletes: [{q: {a: 1}, limit: 2}],
+ writeConcern: {w: 0},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
diff --git a/jstests/core/batch_write_command_insert.js b/jstests/core/batch_write_command_insert.js
index f0b2ed24985..6b42cf08ebf 100644
--- a/jstests/core/batch_write_command_insert.js
+++ b/jstests/core/batch_write_command_insert.js
@@ -2,7 +2,7 @@
// Ensures that mongod respects the batch write protocol for inserts
//
-var coll = db.getCollection( "batch_write_insert" );
+var coll = db.getCollection("batch_write_insert");
coll.drop();
assert(coll.getDB().getMongo().useWriteCommands(), "test is not running with write commands");
@@ -13,18 +13,13 @@ var batch;
var maxWriteBatchSize = 1000;
-function resultOK( result ) {
- return result.ok &&
- !( 'code' in result ) &&
- !( 'errmsg' in result ) &&
- !( 'errInfo' in result ) &&
- !( 'writeErrors' in result );
+function resultOK(result) {
+ return result.ok && !('code' in result) && !('errmsg' in result) && !('errInfo' in result) &&
+ !('writeErrors' in result);
}
-function resultNOK( result ) {
- return !result.ok &&
- typeof( result.code ) == 'number' &&
- typeof( result.errmsg ) == 'string';
+function resultNOK(result) {
+ return !result.ok && typeof(result.code) == 'number' && typeof(result.errmsg) == 'string';
}
// EACH TEST BELOW SHOULD BE SELF-CONTAINED, FOR EASIER DEBUGGING
@@ -32,14 +27,19 @@ function resultNOK( result ) {
//
// NO DOCS, illegal command
coll.remove({});
-request = { insert: coll.getName() };
+request = {
+ insert: coll.getName()
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
//
// Single document insert, no write concern specified
coll.remove({});
-request = { insert: coll.getName(), documents: [{ a: 1 }]};
+request = {
+ insert: coll.getName(),
+ documents: [{a: 1}]
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
@@ -48,7 +48,11 @@ assert.eq(coll.count(), 1);
//
// Single document insert, w:0 write concern specified, missing ordered
coll.remove({});
-request = { insert: coll.getName(), documents: [{ a: 1 }], writeConcern: { w: 0 }};
+request = {
+ insert: coll.getName(),
+ documents: [{a: 1}],
+ writeConcern: {w: 0}
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(coll.count(), 1);
@@ -60,7 +64,12 @@ for (var field in result) {
//
// Single document insert, w:1 write concern specified, ordered:true
coll.remove({});
-request = { insert: coll.getName(), documents: [{ a: 1 }], writeConcern: { w: 1 }, ordered: true };
+request = {
+ insert: coll.getName(),
+ documents: [{a: 1}],
+ writeConcern: {w: 1},
+ ordered: true
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
@@ -69,7 +78,12 @@ assert.eq(coll.count(), 1);
//
// Single document insert, w:1 write concern specified, ordered:false
coll.remove({});
-request = { insert: coll.getName(), documents: [{ a: 1 }], writeConcern: { w: 1 }, ordered: false };
+request = {
+ insert: coll.getName(),
+ documents: [{a: 1}],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
@@ -80,8 +94,8 @@ assert.eq(coll.count(), 1);
coll.remove({});
request = {
insert: coll.getName(),
- documents: [{ $set: { a: 1 }}],
- writeConcern: { w: 1 },
+ documents: [{$set: {a: 1}}],
+ writeConcern: {w: 1},
ordered: false
};
result = coll.runCommand(request);
@@ -96,8 +110,8 @@ assert.eq(coll.count(), 0);
coll.remove({});
request = {
insert: coll.getName(),
- documents: [{ o: { $set: { a: 1 }}}],
- writeConcern: { w: 1 },
+ documents: [{o: {$set: {a: 1}}}],
+ writeConcern: {w: 1},
ordered: false
};
result = coll.runCommand(request);
@@ -112,7 +126,12 @@ batch = [];
for (var i = 0; i < maxWriteBatchSize; ++i) {
batch.push({});
}
-request = { insert: coll.getName(), documents: batch, writeConcern: { w: 1 }, ordered: false };
+request = {
+ insert: coll.getName(),
+ documents: batch,
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(batch.length, result.n);
@@ -125,7 +144,12 @@ batch = [];
for (var i = 0; i < maxWriteBatchSize + 1; ++i) {
batch.push({});
}
-request = { insert : coll.getName(), documents: batch, writeConcern: { w: 1 }, ordered: false };
+request = {
+ insert: coll.getName(),
+ documents: batch,
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
assert.eq(coll.count(), 0);
@@ -133,7 +157,10 @@ assert.eq(coll.count(), 0);
//
// Batch of size zero should fail to insert
coll.remove({});
-request = { insert: coll.getName(), documents: [] };
+request = {
+ insert: coll.getName(),
+ documents: []
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
@@ -141,13 +168,16 @@ assert(resultNOK(result), tojson(result));
//
// Unique index tests
coll.remove({});
-coll.ensureIndex({a : 1}, {unique : true});
+coll.ensureIndex({a: 1}, {unique: true});
//
// Should fail single insert due to duplicate key
coll.remove({});
-coll.insert({a:1});
-request = { insert: coll.getName(), documents: [{ a: 1 }] };
+coll.insert({a: 1});
+request = {
+ insert: coll.getName(),
+ documents: [{a: 1}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(1, result.writeErrors.length);
@@ -159,9 +189,9 @@ assert.eq(coll.count(), 1);
coll.remove({});
request = {
insert: coll.getName(),
- documents: [{ a: 1 }, { a: 1 }, { a: 1 }],
- writeConcern: { w: 1 },
- ordered:false
+ documents: [{a: 1}, {a: 1}, {a: 1}],
+ writeConcern: {w: 1},
+ ordered: false
};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
@@ -184,8 +214,8 @@ assert.eq(coll.count(), 1);
coll.remove({});
request = {
insert: coll.getName(),
- documents: [{ a: 1 }, { a: 1 }, { a: 1 }],
- writeConcern: { w: 1 },
+ documents: [{a: 1}, {a: 1}, {a: 1}],
+ writeConcern: {w: 1},
ordered: true
};
result = coll.runCommand(request);
@@ -202,12 +232,15 @@ assert.eq(coll.count(), 1);
//
// Ensure _id is the first field in all documents
coll.remove({});
-request = { insert: coll.getName(), documents : [{ a: 1 }, { a: 2, _id: 2 }]};
+request = {
+ insert: coll.getName(),
+ documents: [{a: 1}, {a: 2, _id: 2}]
+};
result = coll.runCommand(request);
assert.eq(2, coll.count());
coll.find().forEach(function(doc) {
var firstKey = null;
- for ( var key in doc) {
+ for (var key in doc) {
firstKey = key;
break;
}
@@ -221,8 +254,10 @@ coll.find().forEach(function(doc) {
//
// Successful index creation
coll.drop();
-request = { insert: "system.indexes",
- documents: [{ ns: coll.toString(), key: { x: 1 }, name: "x_1" }]};
+request = {
+ insert: "system.indexes",
+ documents: [{ns: coll.toString(), key: {x: 1}, name: "x_1"}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(1, result.n);
@@ -231,10 +266,11 @@ assert.eq(coll.getIndexes().length, 2);
//
// Duplicate index insertion gives n = 0
coll.drop();
-coll.ensureIndex({x : 1}, {unique : true});
-request = { insert: "system.indexes",
- documents : [{ ns: coll.toString(),
- key: { x: 1 }, name: "x_1", unique: true}]};
+coll.ensureIndex({x: 1}, {unique: true});
+request = {
+ insert: "system.indexes",
+ documents: [{ns: coll.toString(), key: {x: 1}, name: "x_1", unique: true}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(0, result.n, 'duplicate index insertion should give n = 0: ' + tojson(result));
@@ -244,9 +280,10 @@ assert.eq(coll.getIndexes().length, 2);
//
// Invalid index insertion with mismatched collection db
coll.drop();
-request = { insert: "system.indexes",
- documents: [{ ns: "invalid." + coll.getName(),
- key: { x: 1 }, name: "x_1", unique: true }]};
+request = {
+ insert: "system.indexes",
+ documents: [{ns: "invalid." + coll.getName(), key: {x: 1}, name: "x_1", unique: true}]
+};
result = coll.runCommand(request);
assert(!result.ok, tojson(result));
assert.eq(coll.getIndexes().length, 0);
@@ -254,7 +291,10 @@ assert.eq(coll.getIndexes().length, 0);
//
// Empty index insertion
coll.drop();
-request = { insert: "system.indexes", documents : [{}] };
+request = {
+ insert: "system.indexes",
+ documents: [{}]
+};
result = coll.runCommand(request);
assert(!result.ok, tojson(result));
assert.eq(coll.getIndexes().length, 0);
@@ -262,7 +302,10 @@ assert.eq(coll.getIndexes().length, 0);
//
// Invalid index desc
coll.drop();
-request = { insert: "system.indexes", documents: [{ ns: coll.toString() }] };
+request = {
+ insert: "system.indexes",
+ documents: [{ns: coll.toString()}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(0, result.n);
@@ -272,8 +315,10 @@ assert.eq(coll.getIndexes().length, 1);
//
// Invalid index desc
coll.drop();
-request = { insert: "system.indexes",
- documents: [{ ns: coll.toString(), key: { x: 1 }}] };
+request = {
+ insert: "system.indexes",
+ documents: [{ns: coll.toString(), key: {x: 1}}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(0, result.n);
@@ -283,8 +328,10 @@ assert.eq(coll.getIndexes().length, 1);
//
// Invalid index desc
coll.drop();
-request = { insert: "system.indexes",
- documents: [{ ns: coll.toString(), name: "x_1" }]};
+request = {
+ insert: "system.indexes",
+ documents: [{ns: coll.toString(), name: "x_1"}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(0, result.n);
@@ -294,9 +341,13 @@ assert.eq(coll.getIndexes().length, 1);
//
// Cannot insert more than one index at a time through the batch writes
coll.drop();
-request = { insert: "system.indexes",
- documents: [{ ns: coll.toString(), key: { x: 1 }, name: "x_1" },
- { ns: coll.toString(), key: { y: 1 }, name: "y_1" }]};
+request = {
+ insert: "system.indexes",
+ documents: [
+ {ns: coll.toString(), key: {x: 1}, name: "x_1"},
+ {ns: coll.toString(), key: {y: 1}, name: "y_1"}
+ ]
+};
result = coll.runCommand(request);
assert(!result.ok, tojson(result));
assert.eq(coll.getIndexes().length, 0);
@@ -308,12 +359,12 @@ coll.insert({_id: 50}); // Create a document to force a duplicate key exception
var bulk = coll.initializeOrderedBulkOp();
for (i = 1; i < 100; i++) {
- bulk.insert( { _id: i } );
+ bulk.insert({_id: i});
}
try {
bulk.execute();
assert(false, "should have failed due to duplicate key");
-} catch(err) {
+} catch (err) {
assert(coll.count() == 50, "Unexpected number inserted by bulk write: " + coll.count());
}
@@ -322,14 +373,12 @@ try {
// Note: due to SERVER-13304 this test is at the end of this file, and we don't drop
// the collection afterwards.
coll.drop();
-coll.insert({ x : 1 });
-request = { insert: "system.indexes",
- documents: [{ ns: coll.toString(),
- key: { x: 1 },
- name: "x_1",
- background : true }]};
+coll.insert({x: 1});
+request = {
+ insert: "system.indexes",
+ documents: [{ns: coll.toString(), key: {x: 1}, name: "x_1", background: true}]
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(1, result.n);
assert.eq(coll.getIndexes().length, 2);
-
diff --git a/jstests/core/batch_write_command_update.js b/jstests/core/batch_write_command_update.js
index 00964131efe..2d9d2d699b2 100644
--- a/jstests/core/batch_write_command_update.js
+++ b/jstests/core/batch_write_command_update.js
@@ -2,7 +2,7 @@
// Ensures that mongod respects the batch write protocols for updates
//
-var coll = db.getCollection( "batch_write_update" );
+var coll = db.getCollection("batch_write_update");
coll.drop();
assert(coll.getDB().getMongo().useWriteCommands(), "test is not running with write commands");
@@ -13,18 +13,13 @@ var batch;
var maxWriteBatchSize = 1000;
-function resultOK( result ) {
- return result.ok &&
- !( 'code' in result ) &&
- !( 'errmsg' in result ) &&
- !( 'errInfo' in result ) &&
- !( 'writeErrors' in result );
+function resultOK(result) {
+ return result.ok && !('code' in result) && !('errmsg' in result) && !('errInfo' in result) &&
+ !('writeErrors' in result);
}
-function resultNOK( result ) {
- return !result.ok &&
- typeof( result.code ) == 'number' &&
- typeof( result.errmsg ) == 'string';
+function resultNOK(result) {
+ return !result.ok && typeof(result.code) == 'number' && typeof(result.errmsg) == 'string';
}
// EACH TEST BELOW SHOULD BE SELF-CONTAINED, FOR EASIER DEBUGGING
@@ -32,19 +27,23 @@ function resultNOK( result ) {
//
// NO DOCS, illegal command
coll.remove({});
-request = { update: coll.getName() };
+request = {
+ update: coll.getName()
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
//
// Single document upsert, no write concern specified
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { a: 1 }}, upsert: true }]};
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}]
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
-assert( 'upserted' in result );
+assert('upserted' in result);
assert.eq(1, result.upserted.length);
assert.eq(0, result.upserted[0].index);
@@ -56,13 +55,15 @@ assert.eq(0, result.nModified, "missing/wrong nModified");
//
// Single document upsert, write concern specified, no ordered specified
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { a: 1 }}, upsert: true }],
- writeConcern: { w: 1 }};
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}],
+ writeConcern: {w: 1}
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
-assert( 'upserted' in result );
+assert('upserted' in result);
assert.eq(1, result.upserted.length);
assert.eq(0, result.upserted[0].index);
@@ -74,14 +75,16 @@ assert.eq(0, result.nModified, "missing/wrong nModified");
//
// Single document upsert, write concern specified, ordered = true
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { a: 1 }}, upsert: true }],
- writeConcern: { w: 1 },
- ordered: true };
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}],
+ writeConcern: {w: 1},
+ ordered: true
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
-assert( 'upserted' in result );
+assert('upserted' in result);
assert.eq(1, result.upserted.length);
assert.eq(0, result.upserted[0].index);
@@ -93,13 +96,15 @@ assert.eq(0, result.nModified, "missing/wrong nModified");
//
// Single document upsert, write concern 0 specified, ordered = true
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { a: 1 }}, upsert: true }],
- writeConcern: { w: 0 },
- ordered: true };
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}],
+ writeConcern: {w: 0},
+ ordered: true
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
-assert.eq(1, coll.count({ }));
+assert.eq(1, coll.count({}));
for (var field in result) {
assert.eq('ok', field, 'unexpected field found in result: ' + field);
@@ -108,11 +113,15 @@ for (var field in result) {
//
// Two document upsert, write concern 0 specified, ordered = true
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { a: 2 }, u: { $set: { a: 1 }}, upsert: true },
- { q: { a: 2 }, u: { $set: { a: 2 }}, upsert: true }],
- writeConcern: { w: 0 },
- ordered: true };
+request = {
+ update: coll.getName(),
+ updates: [
+ {q: {a: 2}, u: {$set: {a: 1}}, upsert: true},
+ {q: {a: 2}, u: {$set: {a: 2}}, upsert: true}
+ ],
+ writeConcern: {w: 0},
+ ordered: true
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(2, coll.count());
@@ -124,10 +133,12 @@ for (var field in result) {
//
// Single document update
coll.remove({});
-coll.insert({a:1});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { c: 1 }}}],
- writeConcern: { w: 1 }};
+coll.insert({a: 1});
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {c: 1}}}],
+ writeConcern: {w: 1}
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(1, result.n);
@@ -138,12 +149,16 @@ assert.eq(1, result.nModified, "missing/wrong nModified");
//
// Multi document update/upsert
coll.remove({});
-coll.insert({b:1});
-request = { update: coll.getName(),
- updates: [{ q: { b: 1 }, u: { $set: { b: 1, a: 1 }}, upsert: true },
- { q: { b: 2 }, u: { $set: { b: 2, a: 1 }}, upsert: true }],
- writeConcern: { w: 1 },
- ordered: false };
+coll.insert({b: 1});
+request = {
+ update: coll.getName(),
+ updates: [
+ {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true},
+ {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true}
+ ],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(2, result.n);
@@ -157,86 +172,97 @@ assert.eq(2, coll.count());
//
// Multiple document update
coll.remove({});
-coll.insert({a:1});
-coll.insert({a:1});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { c: 2 }}, multi: true }],
- writeConcern: { w: 1 },
- ordered: false };
+coll.insert({a: 1});
+coll.insert({a: 1});
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {c: 2}}, multi: true}],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(2, result.n);
assert.eq(2, result.nModified, "missing/wrong nModified");
-assert.eq(2, coll.find({a:1, c:2}).count());
+assert.eq(2, coll.find({a: 1, c: 2}).count());
assert.eq(2, coll.count());
//
-//Multiple document update, some no-ops
+// Multiple document update, some no-ops
coll.remove({});
-coll.insert({a:1, c:2});
-coll.insert({a:1});
-request = { update: coll.getName(),
- updates: [{ q: { a: 1 }, u: { $set: { c: 2 }}, multi: true }],
- writeConcern: { w: 1 },
- ordered: false };
-printjson( result = coll.runCommand(request) );
+coll.insert({a: 1, c: 2});
+coll.insert({a: 1});
+request = {
+ update: coll.getName(),
+ updates: [{q: {a: 1}, u: {$set: {c: 2}}, multi: true}],
+ writeConcern: {w: 1},
+ ordered: false
+};
+printjson(result = coll.runCommand(request));
assert(resultOK(result), tojson(result));
assert.eq(2, result.n);
assert.eq(1, result.nModified, "missing/wrong nModified");
-assert.eq(2, coll.find({a:1, c:2}).count());
+assert.eq(2, coll.find({a: 1, c: 2}).count());
assert.eq(2, coll.count());
//
// Large batch under the size threshold should update successfully
coll.remove({});
-coll.insert({a:0});
+coll.insert({a: 0});
batch = [];
for (var i = 0; i < maxWriteBatchSize; ++i) {
- batch.push({q:{}, u: {$inc: {a:1}}});
+ batch.push({q: {}, u: {$inc: {a: 1}}});
}
-request = { update: coll.getName(),
- updates: batch,
- writeConcern: { w: 1 },
- ordered: false };
+request = {
+ update: coll.getName(),
+ updates: batch,
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultOK(result), tojson(result));
assert.eq(batch.length, result.n);
assert.eq(batch.length, result.nModified, "missing/wrong nModified");
-assert.eq(1, coll.find({a:batch.length}).count());
+assert.eq(1, coll.find({a: batch.length}).count());
assert.eq(1, coll.count());
//
// Large batch above the size threshold should fail to update
coll.remove({});
-coll.insert({a:0});
+coll.insert({a: 0});
batch = [];
for (var i = 0; i < maxWriteBatchSize + 1; ++i) {
- batch.push({q:{}, u: {$inc: {a:1}}});
+ batch.push({q: {}, u: {$inc: {a: 1}}});
}
-request = { update: coll.getName(),
- updates: batch,
- writeConcern: { w: 1 },
- ordered: false };
+request = {
+ update: coll.getName(),
+ updates: batch,
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(resultNOK(result), tojson(result));
-assert.eq(1, coll.find({a:0}).count());
+assert.eq(1, coll.find({a: 0}).count());
assert.eq(1, coll.count());
-
//
//
// Unique index tests
coll.remove({});
-coll.ensureIndex({a : 1}, {unique : true});
+coll.ensureIndex({a: 1}, {unique: true});
//
// Upsert fail due to duplicate key index, w:0, ordered:true
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { b: 1 }, u: { $set: { b: 1, a: 1 }}, upsert: true },
- { q: { b: 2 }, u: { $set: { b: 2, a: 1 }}, upsert: true }],
- writeConcern: { w: 0 },
- ordered: true };
+request = {
+ update: coll.getName(),
+ updates: [
+ {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true},
+ {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true}
+ ],
+ writeConcern: {w: 0},
+ ordered: true
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(1, coll.count());
@@ -248,12 +274,16 @@ for (var field in result) {
//
// Upsert fail due to duplicate key index, w:1, ordered:true
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { b: 1 }, u: { $set: { b: 1, a: 1 }}, upsert: true },
- { q: { b: 3 }, u: { $set: { b: 3, a: 2 }}, upsert: true },
- { q: { b: 2 }, u: { $set: { b: 2, a: 1 }}, upsert: true }],
- writeConcern: { w: 1 },
- ordered: true };
+request = {
+ update: coll.getName(),
+ updates: [
+ {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true},
+ {q: {b: 3}, u: {$set: {b: 3, a: 2}}, upsert: true},
+ {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true}
+ ],
+ writeConcern: {w: 1},
+ ordered: true
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(2, result.n);
@@ -274,13 +304,17 @@ assert.eq(1, coll.count({_id: result.upserted[1]._id}));
//
// Upsert fail due to duplicate key index, w:1, ordered:false
coll.remove({});
-request = { update: coll.getName(),
- updates: [{ q: { b: 1 }, u: { $set: { b: 1, a: 1 }}, upsert: true },
- { q: { b: 2 }, u: { $set: { b: 2, a: 1 }}, upsert: true },
- { q: { b: 2 }, u: { $set: { b: 2, a: 1 }}, upsert: true },
- { q: { b: 3 }, u: { $set: { b: 3, a: 3 }}, upsert: true }],
- writeConcern: { w: 1 },
- ordered: false };
+request = {
+ update: coll.getName(),
+ updates: [
+ {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true},
+ {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true},
+ {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true},
+ {q: {b: 3}, u: {$set: {b: 3, a: 3}}, upsert: true}
+ ],
+ writeConcern: {w: 1},
+ ordered: false
+};
result = coll.runCommand(request);
assert(result.ok, tojson(result));
assert.eq(2, result.n);
diff --git a/jstests/core/batch_write_command_wc.js b/jstests/core/batch_write_command_wc.js
index bad79e5d605..07a6bf96f9d 100644
--- a/jstests/core/batch_write_command_wc.js
+++ b/jstests/core/batch_write_command_wc.js
@@ -2,15 +2,13 @@
// Ensures that the server respects the batch write command WriteConcern, and behaves correctly
//
-var coll = db.getCollection( "batch_write_wc" );
+var coll = db.getCollection("batch_write_wc");
coll.drop();
assert(coll.getDB().getMongo().useWriteCommands(), "test is not running with write commands");
// Basic validation of WriteConcern
// -- {}, versus {w:0}/{w:1} +opt wTimeout
-// -- j:1, fsync:1,
+// -- j:1, fsync:1,
// -- replication: w:N (>1), w:String, wTimeout
// -- randomField:true, etc
-
-
diff --git a/jstests/core/bench_test1.js b/jstests/core/bench_test1.js
index bbc38af2ba5..2242cf229ec 100644
--- a/jstests/core/bench_test1.js
+++ b/jstests/core/bench_test1.js
@@ -2,36 +2,40 @@
t = db.bench_test1;
t.drop();
-t.insert( { _id : 1 , x : 1 } );
-t.insert( { _id : 2 , x : 1 } );
+t.insert({_id: 1, x: 1});
+t.insert({_id: 2, x: 1});
ops = [
- { op : "findOne" , ns : t.getFullName() , query : { _id : 1 } } ,
- { op : "update" , ns : t.getFullName() , query : { _id : 1 } , update : { $inc : { x : 1 } } }
+ {op: "findOne", ns: t.getFullName(), query: {_id: 1}},
+ {op: "update", ns: t.getFullName(), query: {_id: 1}, update: {$inc: {x: 1}}}
];
seconds = 2;
-benchArgs = { ops : ops , parallel : 2 , seconds : seconds , host : db.getMongo().host };
+benchArgs = {
+ ops: ops,
+ parallel: 2,
+ seconds: seconds,
+ host: db.getMongo().host
+};
if (jsTest.options().auth) {
benchArgs['db'] = 'admin';
benchArgs['username'] = jsTest.options().adminUser;
benchArgs['password'] = jsTest.options().adminPassword;
}
-res = benchRun( benchArgs );
-
-assert.lte( seconds * res.update , t.findOne( { _id : 1 } ).x * 1.5 , "A1" );
-
-
-assert.eq( 1 , t.getIndexes().length , "B1" );
-benchArgs['ops']=[ { op : "createIndex" , ns : t.getFullName() , key : { x : 1 } } ];
-benchArgs['parallel']=1;
-benchArgs['seconds']=1;
-benchRun( benchArgs );
-assert.eq( 2 , t.getIndexes().length , "B2" );
-benchArgs['ops']=[ { op : "dropIndex" , ns : t.getFullName() , key : { x : 1 } } ];
-benchRun( benchArgs );
-assert.soon( function(){ return t.getIndexes().length == 1; } );
-
-
+res = benchRun(benchArgs);
+
+assert.lte(seconds * res.update, t.findOne({_id: 1}).x * 1.5, "A1");
+
+assert.eq(1, t.getIndexes().length, "B1");
+benchArgs['ops'] = [{op: "createIndex", ns: t.getFullName(), key: {x: 1}}];
+benchArgs['parallel'] = 1;
+benchArgs['seconds'] = 1;
+benchRun(benchArgs);
+assert.eq(2, t.getIndexes().length, "B2");
+benchArgs['ops'] = [{op: "dropIndex", ns: t.getFullName(), key: {x: 1}}];
+benchRun(benchArgs);
+assert.soon(function() {
+ return t.getIndexes().length == 1;
+});
diff --git a/jstests/core/bench_test2.js b/jstests/core/bench_test2.js
index c2e3881632c..072686348e4 100644
--- a/jstests/core/bench_test2.js
+++ b/jstests/core/bench_test2.js
@@ -2,16 +2,20 @@
t = db.bench_test2;
t.drop();
-for ( i=0; i<100; i++ )
- t.insert( { _id : i , x : 0 } );
-
-benchArgs = { ops : [ { ns : t.getFullName() ,
- op : "update" ,
- query : { _id : { "#RAND_INT" : [ 0 , 100 ] } } ,
- update : { $inc : { x : 1 } } } ] ,
- parallel : 2 ,
- seconds : 1 ,
- host : db.getMongo().host };
+for (i = 0; i < 100; i++)
+ t.insert({_id: i, x: 0});
+
+benchArgs = {
+ ops: [{
+ ns: t.getFullName(),
+ op: "update",
+ query: {_id: {"#RAND_INT": [0, 100]}},
+ update: {$inc: {x: 1}}
+ }],
+ parallel: 2,
+ seconds: 1,
+ host: db.getMongo().host
+};
if (jsTest.options().auth) {
benchArgs['db'] = 'admin';
@@ -19,29 +23,25 @@ if (jsTest.options().auth) {
benchArgs['password'] = jsTest.options().adminPassword;
}
-res = benchRun( benchArgs );
-printjson( res );
+res = benchRun(benchArgs);
+printjson(res);
sumsq = 0;
sum = 0;
min = 1000;
max = 0;
-t.find().forEach(
- function(z){
- sum += z.x;
- sumsq += Math.pow( ( res.update / 100 ) - z.x , 2 );
- min = Math.min( z.x , min );
- max = Math.max( z.x , max );
- }
-);
+t.find().forEach(function(z) {
+ sum += z.x;
+ sumsq += Math.pow((res.update / 100) - z.x, 2);
+ min = Math.min(z.x, min);
+ max = Math.max(z.x, max);
+});
avg = sum / 100;
-std = Math.sqrt( sumsq / 100 );
-
-print( "Avg: " + avg );
-print( "Std: " + std );
-print( "Min: " + min );
-print( "Max: " + max );
-
+std = Math.sqrt(sumsq / 100);
+print("Avg: " + avg);
+print("Std: " + std);
+print("Min: " + min);
+print("Max: " + max);
diff --git a/jstests/core/bench_test3.js b/jstests/core/bench_test3.js
index 2e130662829..24e230cc16d 100644
--- a/jstests/core/bench_test3.js
+++ b/jstests/core/bench_test3.js
@@ -1,15 +1,18 @@
t = db.bench_test3;
t.drop();
-
-benchArgs = { ops : [ { ns : t.getFullName() ,
- op : "update" ,
- upsert : true ,
- query : { _id : { "#RAND_INT" : [ 0 , 5 , 4 ] } } ,
- update : { $inc : { x : 1 } } } ] ,
- parallel : 2 ,
- seconds : 5 ,
- host : db.getMongo().host };
+benchArgs = {
+ ops: [{
+ ns: t.getFullName(),
+ op: "update",
+ upsert: true,
+ query: {_id: {"#RAND_INT": [0, 5, 4]}},
+ update: {$inc: {x: 1}}
+ }],
+ parallel: 2,
+ seconds: 5,
+ host: db.getMongo().host
+};
if (jsTest.options().auth) {
benchArgs['db'] = 'admin';
@@ -17,11 +20,14 @@ if (jsTest.options().auth) {
benchArgs['password'] = jsTest.options().adminPassword;
}
-res = benchRun( benchArgs );
-printjson( res );
+res = benchRun(benchArgs);
+printjson(res);
var keys = [];
var totals = {};
-db.bench_test3.find().sort( { _id : 1 } ).forEach( function(z){ keys.push( z._id ); totals[z._id] = z.x; } );
+db.bench_test3.find().sort({_id: 1}).forEach(function(z) {
+ keys.push(z._id);
+ totals[z._id] = z.x;
+});
printjson(totals);
-assert.eq( [ 0 , 4 , 8 , 12 , 16 ] , keys );
+assert.eq([0, 4, 8, 12, 16], keys);
diff --git a/jstests/core/big_object1.js b/jstests/core/big_object1.js
index 82ecf025799..017fcdc9756 100644
--- a/jstests/core/big_object1.js
+++ b/jstests/core/big_object1.js
@@ -2,54 +2,54 @@
t = db.big_object1;
t.drop();
-if ( db.adminCommand( "buildinfo" ).bits == 64 ){
-
+if (db.adminCommand("buildinfo").bits == 64) {
var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
var s = large;
- while ( s.length < 850 * 1024 ){
+ while (s.length < 850 * 1024) {
s += large;
}
x = 0;
- while ( true ){
+ while (true) {
var result;
- n = { _id : x , a : [] };
- for ( i=0; i<14+x; i++ )
- n.a.push( s );
+ n = {
+ _id: x,
+ a: []
+ };
+ for (i = 0; i < 14 + x; i++)
+ n.a.push(s);
try {
- result = t.insert( n );
+ result = t.insert(n);
o = n;
- }
- catch ( e ){
+ } catch (e) {
break;
}
-
- if ( result.hasWriteError() )
+
+ if (result.hasWriteError())
break;
x++;
}
-
- printjson( t.stats(1024*1024) );
-
- assert.lt( 15 * 1024 * 1024 , Object.bsonsize( o ) , "A1" );
- assert.gt( 17 * 1024 * 1024 , Object.bsonsize( o ) , "A2" );
-
- assert.eq( x , t.count() , "A3" );
-
- for ( i=0; i<x; i++ ){
- o = t.findOne( { _id : i } );
+
+ printjson(t.stats(1024 * 1024));
+
+ assert.lt(15 * 1024 * 1024, Object.bsonsize(o), "A1");
+ assert.gt(17 * 1024 * 1024, Object.bsonsize(o), "A2");
+
+ assert.eq(x, t.count(), "A3");
+
+ for (i = 0; i < x; i++) {
+ o = t.findOne({_id: i});
try {
// test large mongo -> js conversion
var a = o.a;
- } catch(e) {
+ } catch (e) {
assert(false, "Caught exception trying to insert during iteration " + i + ": " + e);
}
- assert( o , "B" + i );
+ assert(o, "B" + i);
}
-
+
t.drop();
-}
-else {
- print( "skipping big_object1 b/c not 64-bit" );
+} else {
+ print("skipping big_object1 b/c not 64-bit");
}
print("SUCCESS");
diff --git a/jstests/core/binData.js b/jstests/core/binData.js
index 3f037650e05..521815bec95 100644
--- a/jstests/core/binData.js
+++ b/jstests/core/binData.js
@@ -5,10 +5,16 @@ assert.eq(x.base64(), "OEJTfmD8twzaj/LPKLIVkA==", "bad base64");
assert.eq(x.type, 3, "bad type");
assert.eq(x.length(), 16, "bad length");
-x = new BinData(0, "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=");
-assert.eq(x.hex(), "4d616e2069732064697374696e677569736865642c206e6f74206f6e6c792062792068697320726561736f6e2c2062757420627920746869732073696e67756c61722070617373696f6e2066726f6d206f7468657220616e696d616c732c2077686963682069732061206c757374206f6620746865206d696e642c20746861742062792061207065727365766572616e6365206f662064656c6967687420696e2074686520636f6e74696e75656420616e6420696e6465666174696761626c652067656e65726174696f6e206f66206b6e6f776c656467652c2065786365656473207468652073686f727420766568656d656e6365206f6620616e79206361726e616c20706c6561737572652e", "bad hex");
-assert.eq(x.base64(), "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=", "bad base64");
+x = new BinData(
+ 0,
+ "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=");
+assert.eq(
+ x.hex(),
+ "4d616e2069732064697374696e677569736865642c206e6f74206f6e6c792062792068697320726561736f6e2c2062757420627920746869732073696e67756c61722070617373696f6e2066726f6d206f7468657220616e696d616c732c2077686963682069732061206c757374206f6620746865206d696e642c20746861742062792061207065727365766572616e6365206f662064656c6967687420696e2074686520636f6e74696e75656420616e6420696e6465666174696761626c652067656e65726174696f6e206f66206b6e6f776c656467652c2065786365656473207468652073686f727420766568656d656e6365206f6620616e79206361726e616c20706c6561737572652e",
+ "bad hex");
+assert.eq(
+ x.base64(),
+ "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=",
+ "bad base64");
assert.eq(x.type, 0, "bad type");
assert.eq(x.length(), 269, "bad length");
-
-
diff --git a/jstests/core/bindata_indexonly.js b/jstests/core/bindata_indexonly.js
index ece4a1c82eb..a8a2d281560 100644
--- a/jstests/core/bindata_indexonly.js
+++ b/jstests/core/bindata_indexonly.js
@@ -18,14 +18,16 @@
assert.commandWorked(coll.createIndex({_id: 1, a: 1}));
function testIndexOnlyBinData(blob) {
- var explain = coll.find({$and: [{_id: {$lte: BinData(0, blob)}},
- {_id: {$gte: BinData(0, blob)}}]}, {_id: 1, a: 1})
- .hint({_id: 1, a: 1})
- .explain("executionStats");
+ var explain =
+ coll.find({$and: [{_id: {$lte: BinData(0, blob)}}, {_id: {$gte: BinData(0, blob)}}]},
+ {_id: 1, a: 1})
+ .hint({_id: 1, a: 1})
+ .explain("executionStats");
assert(isIndexOnly(explain.queryPlanner.winningPlan),
"indexonly.BinData(0, " + blob + ") - must be index-only");
- assert.eq(1, explain.executionStats.nReturned,
+ assert.eq(1,
+ explain.executionStats.nReturned,
"EXACTone.BinData(0, " + blob + ") - should only return one in unique set");
}
@@ -40,28 +42,32 @@
.hint({_id: 1, a: 1})
.explain("executionStats");
assert(isIndexOnly(explain), "indexonly.$lt.1 - must be index-only");
- assert.eq(0, explain.executionStats.nReturned,
+ assert.eq(0,
+ explain.executionStats.nReturned,
"correctcount.$lt.1 - not returning correct documents");
explain = coll.find({_id: {$gt: BinData(0, "////////////////////////////")}}, {_id: 1, a: 1})
.hint({_id: 1, a: 1})
.explain("executionStats");
assert(isIndexOnly(explain), "indexonly.$gt.2 - must be index-only");
- assert.eq(0, explain.executionStats.nReturned,
+ assert.eq(0,
+ explain.executionStats.nReturned,
"correctcount.$gt.2 - not returning correct documents");
explain = coll.find({_id: {$lte: BinData(0, "AQAAAAEBAAVlbl9VSwAAAAAAAAhv")}}, {_id: 1, a: 1})
.hint({_id: 1, a: 1})
.explain("executionStats");
assert(isIndexOnly(explain), "indexonly.$lte.3 - must be index-only");
- assert.eq(2, explain.executionStats.nReturned,
+ assert.eq(2,
+ explain.executionStats.nReturned,
"correctcount.$lte.3 - not returning correct documents");
explain = coll.find({_id: {$gte: BinData(0, "AQAAAAEBAAVlbl9VSwAAAAAAAAhz")}}, {_id: 1, a: 1})
.hint({_id: 1, a: 1})
.explain("executionStats");
assert(isIndexOnly(explain), "indexonly.$gte.3 - must be index-only");
- assert.eq(2, explain.executionStats.nReturned,
+ assert.eq(2,
+ explain.executionStats.nReturned,
"correctcount.$gte.3 - not returning correct documents");
coll.drop();
diff --git a/jstests/core/bittest.js b/jstests/core/bittest.js
index ebd44734952..45559d8f505 100644
--- a/jstests/core/bittest.js
+++ b/jstests/core/bittest.js
@@ -12,7 +12,8 @@
var explain = coll.find(query).explain("executionStats");
assert(isCollscan(explain.queryPlanner.winningPlan),
"expected bit test query plan to be COLLSCAN");
- assert.eq(count, explain.executionStats.nReturned,
+ assert.eq(count,
+ explain.executionStats.nReturned,
"bit test query not returning correct documents");
}
@@ -95,7 +96,7 @@
// Tests with array of bit positions.
var allPositions = [];
- for (var i = 0; i < 64; i ++) {
+ for (var i = 0; i < 64; i++) {
allPositions.push(i);
}
assertQueryCorrect({a: {$bitsAllSet: []}}, 3);
@@ -143,8 +144,14 @@
assertQueryCorrect({a: {$bitsAnyClear: BinData(0, "////////////////////////////")}}, 3);
// Tests with multiple predicates.
- assertQueryCorrect({a: {$bitsAllSet: BinData(0, "AANgAAAAAAAAAAAAAAAAAAAAAAAA"),
- $bitsAllClear: BinData(0, "//yf////////////////////////")}}, 1);
+ assertQueryCorrect(
+ {
+ a: {
+ $bitsAllSet: BinData(0, "AANgAAAAAAAAAAAAAAAAAAAAAAAA"),
+ $bitsAllClear: BinData(0, "//yf////////////////////////")
+ }
+ },
+ 1);
coll.drop();
})(); \ No newline at end of file
diff --git a/jstests/core/bulk_api_ordered.js b/jstests/core/bulk_api_ordered.js
index 87ecd66a1b5..a880cdb630e 100644
--- a/jstests/core/bulk_api_ordered.js
+++ b/jstests/core/bulk_api_ordered.js
@@ -20,32 +20,36 @@ var executeTests = function() {
*/
var bulkOp = coll.initializeOrderedBulkOp();
- assert.throws( function(){ bulkOp.find();} );
- assert.throws( function(){ bulkOp.insert({$key: 1});} );
+ assert.throws(function() {
+ bulkOp.find();
+ });
+ assert.throws(function() {
+ bulkOp.insert({$key: 1});
+ });
/**
* Single successful ordered bulk operation
*/
var bulkOp = coll.initializeOrderedBulkOp();
- bulkOp.insert({a:1});
- bulkOp.find({a:1}).updateOne({$set: {b:1}});
+ bulkOp.insert({a: 1});
+ bulkOp.find({a: 1}).updateOne({$set: {b: 1}});
// no-op, should increment nMatched but not nModified
- bulkOp.find({a:1}).updateOne({$set: {b:1}});
- bulkOp.find({a:2}).upsert().updateOne({$set: {b:2}});
- bulkOp.insert({a:3});
- bulkOp.find({a:3}).update({$set: {b:1}});
- bulkOp.find({a:3}).upsert().update({$set: {b:2}});
- bulkOp.find({a:10}).upsert().update({$set: {b:2}});
- bulkOp.find({a:2}).replaceOne({a:11});
- bulkOp.find({a:11}).removeOne();
- bulkOp.find({a:3}).remove({a:3});
+ bulkOp.find({a: 1}).updateOne({$set: {b: 1}});
+ bulkOp.find({a: 2}).upsert().updateOne({$set: {b: 2}});
+ bulkOp.insert({a: 3});
+ bulkOp.find({a: 3}).update({$set: {b: 1}});
+ bulkOp.find({a: 3}).upsert().update({$set: {b: 2}});
+ bulkOp.find({a: 10}).upsert().update({$set: {b: 2}});
+ bulkOp.find({a: 2}).replaceOne({a: 11});
+ bulkOp.find({a: 11}).removeOne();
+ bulkOp.find({a: 3}).remove({a: 3});
var result = bulkOp.execute();
assert.eq(2, result.nInserted);
assert.eq(2, result.nUpserted);
assert.eq(5, result.nMatched);
// only check nModified if write commands are enabled
- if ( coll.getMongo().writeMode() == "commands" ) {
- assert.eq(4, result.nModified);
+ if (coll.getMongo().writeMode() == "commands") {
+ assert.eq(4, result.nModified);
}
assert.eq(2, result.nRemoved);
var upserts = result.getUpsertedIds();
@@ -58,29 +62,35 @@ var executeTests = function() {
assert.eq(2, coll.find({}).itcount(), "find should return two documents");
// illegal to try to convert a multi-op batch into a SingleWriteResult
- assert.throws(function() { result.toSingleResult(); } );
+ assert.throws(function() {
+ result.toSingleResult();
+ });
// attempt to re-run bulk operation
- assert.throws(function() { bulkOp.execute(); } );
+ assert.throws(function() {
+ bulkOp.execute();
+ });
// Test SingleWriteResult
var singleBatch = coll.initializeOrderedBulkOp();
- singleBatch.find({a:4}).upsert().updateOne({$set: {b:1}});
+ singleBatch.find({a: 4}).upsert().updateOne({$set: {b: 1}});
var singleResult = singleBatch.execute().toSingleResult();
assert(singleResult.getUpsertedId() != null);
// Create unique index
coll.remove({});
- coll.ensureIndex({a : 1}, {unique : true});
+ coll.ensureIndex({a: 1}, {unique: true});
/**
* Single error ordered bulk operation
*/
var bulkOp = coll.initializeOrderedBulkOp();
- bulkOp.insert({b:1, a:1});
- bulkOp.find({b:2}).upsert().updateOne({$set: {a:1}});
- bulkOp.insert({b:3, a:2});
- var result = assert.throws( function() { bulkOp.execute(); } );
+ bulkOp.insert({b: 1, a: 1});
+ bulkOp.find({b: 2}).upsert().updateOne({$set: {a: 1}});
+ bulkOp.insert({b: 3, a: 2});
+ var result = assert.throws(function() {
+ bulkOp.execute();
+ });
assert(result instanceof BulkWriteError);
assert(result instanceof Error);
// Basic properties check
@@ -107,19 +117,21 @@ var executeTests = function() {
// Create unique index
coll.dropIndexes();
coll.remove({});
- coll.ensureIndex({a : 1}, {unique : true});
+ coll.ensureIndex({a: 1}, {unique: true});
/**
* Multiple error ordered bulk operation
*/
var bulkOp = coll.initializeOrderedBulkOp();
- bulkOp.insert({b:1, a:1});
- bulkOp.find({b:2}).upsert().updateOne({$set: {a:1}});
- bulkOp.find({b:3}).upsert().updateOne({$set: {a:2}});
- bulkOp.find({b:2}).upsert().updateOne({$set: {a:1}});
- bulkOp.insert({b:4, a:3});
- bulkOp.insert({b:5, a:1});
- var result = assert.throws( function() { bulkOp.execute(); } );
+ bulkOp.insert({b: 1, a: 1});
+ bulkOp.find({b: 2}).upsert().updateOne({$set: {a: 1}});
+ bulkOp.find({b: 3}).upsert().updateOne({$set: {a: 2}});
+ bulkOp.find({b: 2}).upsert().updateOne({$set: {a: 1}});
+ bulkOp.insert({b: 4, a: 3});
+ bulkOp.insert({b: 5, a: 1});
+ var result = assert.throws(function() {
+ bulkOp.execute();
+ });
// Basic properties check
assert.eq(1, result.nInserted);
@@ -139,17 +151,17 @@ var executeTests = function() {
// Create unique index
coll.dropIndexes();
coll.remove({});
- coll.ensureIndex({a : 1}, {unique : true});
+ coll.ensureIndex({a: 1}, {unique: true});
};
-var buildVersion = parseInt(db.runCommand({buildInfo:1}).versionArray.slice(0, 3).join(""), 10);
+var buildVersion = parseInt(db.runCommand({buildInfo: 1}).versionArray.slice(0, 3).join(""), 10);
// Save the existing useWriteCommands function
var _useWriteCommands = coll.getMongo().useWriteCommands;
//
// Only execute write command tests if we have > 2.5.5 otherwise
// execute the down converted version
-if(buildVersion >= 255) {
+if (buildVersion >= 255) {
// Force the use of useWriteCommands
coll._mongo.useWriteCommands = function() {
return true;
diff --git a/jstests/core/bulk_api_unordered.js b/jstests/core/bulk_api_unordered.js
index 0323dabd10b..6720e644e47 100644
--- a/jstests/core/bulk_api_unordered.js
+++ b/jstests/core/bulk_api_unordered.js
@@ -22,32 +22,36 @@ var executeTests = function() {
*/
var bulkOp = coll.initializeUnorderedBulkOp();
- assert.throws( function(){ bulkOp.find();} );
- assert.throws( function(){ bulkOp.insert({$key: 1});} );
+ assert.throws(function() {
+ bulkOp.find();
+ });
+ assert.throws(function() {
+ bulkOp.insert({$key: 1});
+ });
/**
* Single successful unordered bulk operation
*/
var bulkOp = coll.initializeUnorderedBulkOp();
- bulkOp.insert({a:1});
- bulkOp.find({a:1}).updateOne({$set: {b:1}});
+ bulkOp.insert({a: 1});
+ bulkOp.find({a: 1}).updateOne({$set: {b: 1}});
// no-op, should increment nMatched but not nModified
- bulkOp.find({a:1}).updateOne({$set: {b:1}});
- bulkOp.find({a:2}).upsert().updateOne({$set: {b:2}});
- bulkOp.insert({a:3});
- bulkOp.find({a:3}).update({$set: {b:1}});
- bulkOp.find({a:3}).upsert().update({$set: {b:2}});
- bulkOp.find({a:10}).upsert().update({$set: {b:2}});
- bulkOp.find({a:2}).replaceOne({a:11});
- bulkOp.find({a:11}).removeOne();
- bulkOp.find({a:3}).remove({a:3});
+ bulkOp.find({a: 1}).updateOne({$set: {b: 1}});
+ bulkOp.find({a: 2}).upsert().updateOne({$set: {b: 2}});
+ bulkOp.insert({a: 3});
+ bulkOp.find({a: 3}).update({$set: {b: 1}});
+ bulkOp.find({a: 3}).upsert().update({$set: {b: 2}});
+ bulkOp.find({a: 10}).upsert().update({$set: {b: 2}});
+ bulkOp.find({a: 2}).replaceOne({a: 11});
+ bulkOp.find({a: 11}).removeOne();
+ bulkOp.find({a: 3}).remove({a: 3});
var result = bulkOp.execute();
assert.eq(2, result.nInserted);
assert.eq(2, result.nUpserted);
assert.eq(5, result.nMatched);
// only check nModified if write commands are enabled
- if ( coll.getMongo().writeMode() == "commands" ) {
- assert.eq(4, result.nModified);
+ if (coll.getMongo().writeMode() == "commands") {
+ assert.eq(4, result.nModified);
}
assert.eq(2, result.nRemoved);
assert.eq(false, result.hasWriteErrors());
@@ -62,29 +66,35 @@ var executeTests = function() {
assert.eq(2, coll.find({}).itcount(), "find should return two documents");
// illegal to try to convert a multi-op batch into a SingleWriteResult
- assert.throws(function() { result.toSingleResult(); } );
+ assert.throws(function() {
+ result.toSingleResult();
+ });
// attempt to re-run bulk
- assert.throws(function() { bulkOp.execute(); } );
+ assert.throws(function() {
+ bulkOp.execute();
+ });
// Test SingleWriteResult
var singleBatch = coll.initializeUnorderedBulkOp();
- singleBatch.find({a:4}).upsert().updateOne({$set: {b:1}});
+ singleBatch.find({a: 4}).upsert().updateOne({$set: {b: 1}});
var singleResult = singleBatch.execute().toSingleResult();
assert(singleResult.getUpsertedId() != null);
// Create unique index
coll.remove({});
- coll.ensureIndex({a : 1}, {unique : true});
+ coll.ensureIndex({a: 1}, {unique: true});
/**
* Single error unordered bulk operation
*/
var bulkOp = coll.initializeUnorderedBulkOp();
- bulkOp.insert({b:1, a:1});
- bulkOp.find({b:2}).upsert().updateOne({$set: {a:1}});
- bulkOp.insert({b:3, a:2});
- var result = assert.throws( function() { bulkOp.execute(); } );
+ bulkOp.insert({b: 1, a: 1});
+ bulkOp.find({b: 2}).upsert().updateOne({$set: {a: 1}});
+ bulkOp.insert({b: 3, a: 2});
+ var result = assert.throws(function() {
+ bulkOp.execute();
+ });
// Basic properties check
assert.eq(2, result.nInserted);
@@ -106,19 +116,21 @@ var executeTests = function() {
// Create unique index
coll.dropIndexes();
coll.remove({});
- coll.ensureIndex({a : 1}, {unique : true});
+ coll.ensureIndex({a: 1}, {unique: true});
/**
* Multiple error unordered bulk operation
*/
var bulkOp = coll.initializeUnorderedBulkOp();
- bulkOp.insert({b:1, a:1});
- bulkOp.find({b:2}).upsert().updateOne({$set: {a:1}});
- bulkOp.find({b:3}).upsert().updateOne({$set: {a:2}});
- bulkOp.find({b:2}).upsert().updateOne({$set: {a:1}});
- bulkOp.insert({b:4, a:3});
- bulkOp.insert({b:5, a:1});
- var result = assert.throws( function() { bulkOp.execute(); } );
+ bulkOp.insert({b: 1, a: 1});
+ bulkOp.find({b: 2}).upsert().updateOne({$set: {a: 1}});
+ bulkOp.find({b: 3}).upsert().updateOne({$set: {a: 2}});
+ bulkOp.find({b: 2}).upsert().updateOne({$set: {a: 1}});
+ bulkOp.insert({b: 4, a: 3});
+ bulkOp.insert({b: 5, a: 1});
+ var result = assert.throws(function() {
+ bulkOp.execute();
+ });
// Basic properties check
assert.eq(2, result.nInserted);
@@ -154,17 +166,17 @@ var executeTests = function() {
// Create unique index
coll.dropIndexes();
coll.remove({});
- coll.ensureIndex({a : 1}, {unique : true});
+ coll.ensureIndex({a: 1}, {unique: true});
};
-var buildVersion = parseInt(db.runCommand({buildInfo:1}).versionArray.slice(0, 3).join(""), 10);
+var buildVersion = parseInt(db.runCommand({buildInfo: 1}).versionArray.slice(0, 3).join(""), 10);
// Save the existing useWriteCommands function
var _useWriteCommands = coll.getMongo().useWriteCommands;
//
// Only execute write command tests if we have > 2.5.5 otherwise
// execute the down converted version
-if(buildVersion >= 255) {
+if (buildVersion >= 255) {
// Force the use of useWriteCommands
coll._mongo.useWriteCommands = function() {
return true;
diff --git a/jstests/core/bulk_insert.js b/jstests/core/bulk_insert.js
index a946cba8ddb..157b24aabba 100644
--- a/jstests/core/bulk_insert.js
+++ b/jstests/core/bulk_insert.js
@@ -4,21 +4,21 @@ var coll = db.bulkInsertTest;
coll.drop();
var seed = new Date().getTime();
-Random.srand( seed );
+Random.srand(seed);
print("Seed for randomized test is " + seed);
-var bulkSize = Math.floor( Random.rand() * 200 ) + 1;
-var numInserts = Math.floor( Random.rand() * 300 ) + 1;
+var bulkSize = Math.floor(Random.rand() * 200) + 1;
+var numInserts = Math.floor(Random.rand() * 300) + 1;
-print( "Inserting " + numInserts + " bulks of " + bulkSize + " documents." );
+print("Inserting " + numInserts + " bulks of " + bulkSize + " documents.");
-for( var i = 0; i < numInserts; i++ ){
+for (var i = 0; i < numInserts; i++) {
var bulk = [];
- for( var j = 0; j < bulkSize; j++ ){
- bulk.push({ hi : "there", i : i, j : j });
+ for (var j = 0; j < bulkSize; j++) {
+ bulk.push({hi: "there", i: i, j: j});
}
-
- coll.insert( bulk );
+
+ coll.insert(bulk);
}
-assert.eq( coll.count(), bulkSize * numInserts );
+assert.eq(coll.count(), bulkSize * numInserts);
diff --git a/jstests/core/bulk_insert_capped.js b/jstests/core/bulk_insert_capped.js
index 70edf98ca4e..129c393dbfb 100644
--- a/jstests/core/bulk_insert_capped.js
+++ b/jstests/core/bulk_insert_capped.js
@@ -5,19 +5,19 @@
var t = db.capped_multi_insert;
t.drop();
- db.createCollection(t.getName(), {capped: true, size: 16*1024, max: 1});
+ db.createCollection(t.getName(), {capped: true, size: 16 * 1024, max: 1});
- t.insert([{_id:1}, {_id:2}]);
+ t.insert([{_id: 1}, {_id: 2}]);
assert.gleSuccess(db);
// Ensure the collection is valid.
var res = t.validate(true);
assert(res.valid, tojson(res));
-
+
// Ensure that various ways of iterating the collection only return one document.
- assert.eq(t.find().itcount(), 1); // Table scan.
- assert.eq(t.find({}, {_id: 1}).hint({_id: 1}).itcount(), 1); // Index only (covered).
- assert.eq(t.find().hint({_id: 1}).itcount(), 1); // Index scan with fetch.
+ assert.eq(t.find().itcount(), 1); // Table scan.
+ assert.eq(t.find({}, {_id: 1}).hint({_id: 1}).itcount(), 1); // Index only (covered).
+ assert.eq(t.find().hint({_id: 1}).itcount(), 1); // Index scan with fetch.
// Ensure that the second document is the one that is kept.
assert.eq(t.findOne(), {_id: 2});
diff --git a/jstests/core/bulk_legacy_enforce_gle.js b/jstests/core/bulk_legacy_enforce_gle.js
index 4efc280ab37..2e8e076e070 100644
--- a/jstests/core/bulk_legacy_enforce_gle.js
+++ b/jstests/core/bulk_legacy_enforce_gle.js
@@ -9,92 +9,103 @@ var coll = db.bulk_legacy_enforce_gle;
// batch of size 1 no error case.
coll.drop();
var bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-assert( bulk.execute() instanceof BulkWriteResult );
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+assert(bulk.execute() instanceof BulkWriteResult);
-var gle = db.runCommand({ getLastError: 1 });
+var gle = db.runCommand({getLastError: 1});
assert(gle.ok, tojson(gle));
assert.eq(1, gle.n);
// batch of size 1 should not call resetError even when it errors out.
coll.drop();
-coll.insert({ _id: 1 });
+coll.insert({_id: 1});
bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-assert.throws( function() { bulk.execute(); } );
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+assert.throws(function() {
+ bulk.execute();
+});
-gle = db.runCommand({ getLastError: 1 });
+gle = db.runCommand({getLastError: 1});
assert(gle.ok, tojson(gle));
assert.neq(null, gle.err);
// batch with all error except last should not call resetError.
coll.drop();
-coll.insert({ _id: 1 });
+coll.insert({_id: 1});
bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 0 });
-var res = assert.throws( function() { bulk.execute(); } );
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+bulk.find({none: 1}).upsert().updateOne({_id: 0});
+var res = assert.throws(function() {
+ bulk.execute();
+});
assert.eq(2, res.getWriteErrors().length);
-gle = db.runCommand({ getLastError: 1 });
+gle = db.runCommand({getLastError: 1});
assert(gle.ok, tojson(gle));
assert.eq(1, gle.n);
// batch with error at middle should not call resetError.
coll.drop();
-coll.insert({ _id: 1 });
+coll.insert({_id: 1});
bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 0 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 2 });
-var res = assert.throws( function() { bulk.execute(); } );
+bulk.find({none: 1}).upsert().updateOne({_id: 0});
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+bulk.find({none: 1}).upsert().updateOne({_id: 2});
+var res = assert.throws(function() {
+ bulk.execute();
+});
assert.eq(1, res.getWriteErrors().length);
-gle = db.runCommand({ getLastError: 1 });
+gle = db.runCommand({getLastError: 1});
assert(gle.ok, tojson(gle));
// mongos sends the bulk as one while the shell sends the write individually
assert.gte(gle.n, 1);
// batch with error at last should call resetError.
coll.drop();
-coll.insert({ _id: 2 });
+coll.insert({_id: 2});
bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 0 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 2 });
-res = assert.throws( function() { bulk.execute(); } );
+bulk.find({none: 1}).upsert().updateOne({_id: 0});
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+bulk.find({none: 1}).upsert().updateOne({_id: 2});
+res = assert.throws(function() {
+ bulk.execute();
+});
assert.eq(1, res.getWriteErrors().length);
-gle = db.runCommand({ getLastError: 1 });
+gle = db.runCommand({getLastError: 1});
assert(gle.ok, tojson(gle));
assert.eq(0, gle.n);
// batch with error at last should not call resetError if { w: 1 }
coll.drop();
-coll.insert({ _id: 2 });
+coll.insert({_id: 2});
bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 0 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 2 });
-res = assert.throws( function() { bulk.execute(); } );
+bulk.find({none: 1}).upsert().updateOne({_id: 0});
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+bulk.find({none: 1}).upsert().updateOne({_id: 2});
+res = assert.throws(function() {
+ bulk.execute();
+});
assert.eq(1, res.getWriteErrors().length);
-gle = db.runCommand({ getLastError: 1, w: 1 });
+gle = db.runCommand({getLastError: 1, w: 1});
assert(gle.ok, tojson(gle));
assert.neq(null, gle.err);
// batch with error at last should not call resetError if { w: 0 }
coll.drop();
-coll.insert({ _id: 2 });
+coll.insert({_id: 2});
bulk = coll.initializeUnorderedBulkOp();
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 0 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 1 });
-bulk.find({ none: 1 }).upsert().updateOne({ _id: 2 });
-res = assert.throws( function() { bulk.execute(); } );
+bulk.find({none: 1}).upsert().updateOne({_id: 0});
+bulk.find({none: 1}).upsert().updateOne({_id: 1});
+bulk.find({none: 1}).upsert().updateOne({_id: 2});
+res = assert.throws(function() {
+ bulk.execute();
+});
assert.eq(1, res.getWriteErrors().length);
-gle = db.runCommand({ getLastError: 1, w: 0 });
+gle = db.runCommand({getLastError: 1, w: 0});
assert(gle.ok, tojson(gle));
assert.neq(null, gle.err);
-
diff --git a/jstests/core/bypass_doc_validation.js b/jstests/core/bypass_doc_validation.js
index 1dfe9b4d158..79a2eb7d4a2 100644
--- a/jstests/core/bypass_doc_validation.js
+++ b/jstests/core/bypass_doc_validation.js
@@ -21,8 +21,7 @@
var op = [{ts: Timestamp(0, 0), h: 1, v: 2, op: 'i', ns: coll.getFullName(), o: {_id: 9}}];
// SERVER-21345: applyOps is returning UnknownError instead of DocumentValidationFailure
assert.commandFailedWithCode(
- myDb.runCommand({applyOps: op, bypassDocumentValidation: false}), 8
- );
+ myDb.runCommand({applyOps: op, bypassDocumentValidation: false}), 8);
assert.eq(0, coll.count({_id: 9}));
assert.commandWorked(myDb.runCommand({applyOps: op, bypassDocumentValidation: true}));
assert.eq(1, coll.count({_id: 9}));
@@ -35,12 +34,11 @@
assert.commandWorked(myDb.createCollection(outputCollName, {validator: {a: {$exists: true}}}));
// Test the aggregate shell helper.
- var pipeline = [
- {$match: {_id: 1}},
- {$project: {aggregation: {$add: [1]}}},
- {$out: outputCollName}
- ];
- assert.throws(function() { coll.aggregate(pipeline, {bypassDocumentValidation: false}); });
+ var pipeline =
+ [{$match: {_id: 1}}, {$project: {aggregation: {$add: [1]}}}, {$out: outputCollName}];
+ assert.throws(function() {
+ coll.aggregate(pipeline, {bypassDocumentValidation: false});
+ });
assert.eq(0, outputColl.count({aggregation: 1}));
coll.aggregate(pipeline, {bypassDocumentValidation: true});
assert.eq(1, outputColl.count({aggregation: 1}));
@@ -48,15 +46,14 @@
// Test the copyDb command.
var copyDbName = dbName + '_copy';
myDb.getSiblingDB(copyDbName).dropDatabase();
- assert.commandFailedWithCode(db.adminCommand({
- copydb: 1, fromdb: dbName, todb: copyDbName, bypassDocumentValidation: false}),
- docValidationErrorCode
- );
+ assert.commandFailedWithCode(
+ db.adminCommand(
+ {copydb: 1, fromdb: dbName, todb: copyDbName, bypassDocumentValidation: false}),
+ docValidationErrorCode);
assert.eq(0, db.getSiblingDB(copyDbName)[collName].count());
myDb.getSiblingDB(copyDbName).dropDatabase();
- assert.commandWorked(db.adminCommand({
- copydb: 1, fromdb: dbName, todb: copyDbName, bypassDocumentValidation: true
- }));
+ assert.commandWorked(db.adminCommand(
+ {copydb: 1, fromdb: dbName, todb: copyDbName, bypassDocumentValidation: true}));
assert.eq(coll.count(), db.getSiblingDB(copyDbName)[collName].count());
// Test the findAndModify shell helper.
@@ -68,40 +65,57 @@
assert.eq(1, coll.count({findAndModify: 1}));
// Test the map/reduce command.
- var map = function() {emit(1, 1);};
- var reduce = function(k, vs) {return 'mapReduce';};
- assert.commandFailedWithCode(coll.runCommand({mapReduce: collName,
- map: map, reduce: reduce, out: {replace: outputCollName}, bypassDocumentValidation: false}),
- docValidationErrorCode
- );
+ var map = function() {
+ emit(1, 1);
+ };
+ var reduce = function(k, vs) {
+ return 'mapReduce';
+ };
+ assert.commandFailedWithCode(coll.runCommand({
+ mapReduce: collName,
+ map: map,
+ reduce: reduce,
+ out: {replace: outputCollName},
+ bypassDocumentValidation: false
+ }),
+ docValidationErrorCode);
assert.eq(0, outputColl.count({value: 'mapReduce'}));
- var res = coll.runCommand({mapReduce: collName,
- map: map, reduce: reduce, out: {replace: outputCollName}, bypassDocumentValidation: true});
+ var res = coll.runCommand({
+ mapReduce: collName,
+ map: map,
+ reduce: reduce,
+ out: {replace: outputCollName},
+ bypassDocumentValidation: true
+ });
assert.commandWorked(res);
assert.eq(1, outputColl.count({value: 'mapReduce'}));
// Test the insert command. Includes a test for a doc with no _id (SERVER-20859).
res = myDb.runCommand({insert: collName, documents: [{}], bypassDocumentValidation: false});
assert.eq(res.writeErrors[0].code, docValidationErrorCode, tojson(res));
- res = myDb.runCommand({insert: collName, documents: [{}, {_id: 6}],
- bypassDocumentValidation: false
- });
+ res = myDb.runCommand(
+ {insert: collName, documents: [{}, {_id: 6}], bypassDocumentValidation: false});
assert.eq(0, coll.count({_id: 6}));
assert.eq(res.writeErrors[0].code, docValidationErrorCode, tojson(res));
- res = myDb.runCommand({insert: collName, documents: [{}, {_id: 6}],
- bypassDocumentValidation: true
- });
+ res = myDb.runCommand(
+ {insert: collName, documents: [{}, {_id: 6}], bypassDocumentValidation: true});
assert.commandWorked(res);
assert.eq(null, res.writeErrors);
assert.eq(1, coll.count({_id: 6}));
// Test the update command.
- res = myDb.runCommand({update: collName,
- updates: [{q: {}, u: {$set: {update: 1}}}], bypassDocumentValidation: false});
+ res = myDb.runCommand({
+ update: collName,
+ updates: [{q: {}, u: {$set: {update: 1}}}],
+ bypassDocumentValidation: false
+ });
assert.eq(res.writeErrors[0].code, docValidationErrorCode, tojson(res));
assert.eq(0, coll.count({update: 1}));
- res = myDb.runCommand({update: collName,
- updates: [{q: {}, u: {$set: {update: 1}}}], bypassDocumentValidation: true});
+ res = myDb.runCommand({
+ update: collName,
+ updates: [{q: {}, u: {$set: {update: 1}}}],
+ bypassDocumentValidation: true
+ });
assert.commandWorked(res);
assert.eq(null, res.writeErrors);
assert.eq(1, coll.count({update: 1}));
diff --git a/jstests/core/capped.js b/jstests/core/capped.js
index 72eddb8de2f..0b2945bba04 100644
--- a/jstests/core/capped.js
+++ b/jstests/core/capped.js
@@ -1,12 +1,11 @@
db.jstests_capped.drop();
-db.createCollection("jstests_capped", {capped:true, size:30000});
+db.createCollection("jstests_capped", {capped: true, size: 30000});
t = db.jstests_capped;
-assert.eq( 1, t.getIndexes().length, "expected a count of one index for new capped collection" );
+assert.eq(1, t.getIndexes().length, "expected a count of one index for new capped collection");
+t.save({x: 1});
+t.save({x: 2});
-t.save({x:1});
-t.save({x:2});
-
-assert( t.find().sort({$natural:1})[0].x == 1 , "expected obj.x==1");
-assert( t.find().sort({$natural:-1})[0].x == 2, "expected obj.x == 2");
+assert(t.find().sort({$natural: 1})[0].x == 1, "expected obj.x==1");
+assert(t.find().sort({$natural: -1})[0].x == 2, "expected obj.x == 2");
diff --git a/jstests/core/capped1.js b/jstests/core/capped1.js
index 96099d7a9fd..923ee3aa668 100644
--- a/jstests/core/capped1.js
+++ b/jstests/core/capped1.js
@@ -2,10 +2,9 @@
t = db.capped1;
t.drop();
-db.createCollection("capped1" , {capped:true, size:1024 });
+db.createCollection("capped1", {capped: true, size: 1024});
v = t.validate();
-assert( v.valid , "A : " + tojson( v ) ); // SERVER-485
-
-t.save( { x : 1 } );
-assert( t.validate().valid , "B" );
+assert(v.valid, "A : " + tojson(v)); // SERVER-485
+t.save({x: 1});
+assert(t.validate().valid, "B");
diff --git a/jstests/core/capped5.js b/jstests/core/capped5.js
index 8a40c78ae98..33d78c5e17f 100644
--- a/jstests/core/capped5.js
+++ b/jstests/core/capped5.js
@@ -4,37 +4,36 @@ tn = "capped5";
t = db[tn];
t.drop();
+db.createCollection(tn, {capped: true, size: 1024 * 1024 * 1});
+t.insert({_id: 5, x: 11, z: 52});
+assert.eq(1, t.getIndexKeys().length, "A0"); // now we assume _id index even on capped coll
+assert.eq(52, t.findOne({x: 11}).z, "A1");
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.insert( { _id : 5 , x : 11 , z : 52 } );
-assert.eq( 1 , t.getIndexKeys().length , "A0" ); //now we assume _id index even on capped coll
-assert.eq( 52 , t.findOne( { x : 11 } ).z , "A1" );
+t.ensureIndex({_id: 1});
+t.ensureIndex({x: 1});
-t.ensureIndex( { _id : 1 } );
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" );
-assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" );
+assert.eq(52, t.findOne({x: 11}).z, "B1");
+assert.eq(52, t.findOne({_id: 5}).z, "B2");
t.drop();
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.insert( { _id : 5 , x : 11 } );
-t.insert( { _id : 5 , x : 12 } );
-assert.eq( 1, t.getIndexes().length ); //now we assume _id index
-assert.eq( 1, t.find().toArray().length ); //_id index unique, so second insert fails
+db.createCollection(tn, {capped: true, size: 1024 * 1024 * 1});
+t.insert({_id: 5, x: 11});
+t.insert({_id: 5, x: 12});
+assert.eq(1, t.getIndexes().length); // now we assume _id index
+assert.eq(1, t.find().toArray().length); //_id index unique, so second insert fails
t.drop();
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.insert( { _id : 5 , x : 11 } );
-t.insert( { _id : 6 , x : 12 } );
-t.ensureIndex( { x:1 }, {unique:true} );
-assert.eq( 2, t.getIndexes().length ); //now we assume _id index
-assert.eq( 2, t.find().hint( {x:1} ).toArray().length );
+db.createCollection(tn, {capped: true, size: 1024 * 1024 * 1});
+t.insert({_id: 5, x: 11});
+t.insert({_id: 6, x: 12});
+t.ensureIndex({x: 1}, {unique: true});
+assert.eq(2, t.getIndexes().length); // now we assume _id index
+assert.eq(2, t.find().hint({x: 1}).toArray().length);
// SERVER-525 (closed) unique indexes in capped collection
t.drop();
-db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
-t.ensureIndex( { _id:1 } ); // note we assume will be automatically unique because it is _id
-t.insert( { _id : 5 , x : 11 } );
-t.insert( { _id : 5 , x : 12 } );
-assert.eq( 1, t.find().toArray().length );
+db.createCollection(tn, {capped: true, size: 1024 * 1024 * 1});
+t.ensureIndex({_id: 1}); // note we assume will be automatically unique because it is _id
+t.insert({_id: 5, x: 11});
+t.insert({_id: 5, x: 12});
+assert.eq(1, t.find().toArray().length);
diff --git a/jstests/core/capped6.js b/jstests/core/capped6.js
index e643c77760d..d7b8a60985a 100644
--- a/jstests/core/capped6.js
+++ b/jstests/core/capped6.js
@@ -12,19 +12,19 @@
* check is performed in both forward and reverse directions.
*/
function checkOrder(i, valueArray) {
- res = coll.find().sort( { $natural: -1 } );
- assert( res.hasNext(), "A" );
+ res = coll.find().sort({$natural: -1});
+ assert(res.hasNext(), "A");
var j = i;
- while(res.hasNext()) {
- assert.eq( valueArray[j--].a, res.next().a, "B" );
+ while (res.hasNext()) {
+ assert.eq(valueArray[j--].a, res.next().a, "B");
}
- res = coll.find().sort( { $natural: 1 } );
- assert( res.hasNext(), "C" );
- while( res.hasNext() ) {
- assert.eq( valueArray[++j].a, res.next().a, "D" );
+ res = coll.find().sort({$natural: 1});
+ assert(res.hasNext(), "C");
+ while (res.hasNext()) {
+ assert.eq(valueArray[++j].a, res.next().a, "D");
}
- assert.eq( j, i, "E" );
+ assert.eq(j, i, "E");
}
/*
@@ -32,13 +32,15 @@
*/
function prepareCollection(shouldReverse) {
coll.drop();
- db._dbCommand({create: "capped6", capped: true, size: 1000, $nExtents: 11,
- autoIndexId: false});
+ db._dbCommand(
+ {create: "capped6", capped: true, size: 1000, $nExtents: 11, autoIndexId: false});
var valueArray = new Array(maxDocuments);
var c = "";
- for( i = 0; i < maxDocuments; ++i, c += "-" ) {
+ for (i = 0; i < maxDocuments; ++i, c += "-") {
// The a values are strings of increasing length.
- valueArray[i] = {a: c};
+ valueArray[i] = {
+ a: c
+ };
}
if (shouldReverse) {
valueArray.reverse();
@@ -52,11 +54,10 @@
* 'maxDocuments' number of documents since it is a capped collection.
* 2. Remove all but one documents via one or more "captrunc" requests.
* 3. For each subsequent call to this function, keep track of the removed documents using
- * 'valueArrayIndexes' and re-insert the removed documents each time this function is
+ * 'valueArrayIndexes' and re-insert the removed documents each time this function is
* called.
*/
function runCapTrunc(valueArray, valueArrayCurIndex, n, inc) {
-
// If n <= 0, no documents are removed by captrunc.
assert.gt(n, 0);
assert.gte(valueArray.length, maxDocuments);
@@ -73,7 +74,7 @@
var iterations = Math.floor((count - 1) / (n + inc));
for (i = 0; i < iterations; ++i) {
- assert.commandWorked(db.runCommand({captrunc:"capped6", n:n, inc:inc}));
+ assert.commandWorked(db.runCommand({captrunc: "capped6", n: n, inc: inc}));
count -= (n + inc);
valueArrayCurIndex -= (n + inc);
checkOrder(valueArrayCurIndex, valueArray);
diff --git a/jstests/core/capped9.js b/jstests/core/capped9.js
index 50ebb64744f..ae54839c9d1 100644
--- a/jstests/core/capped9.js
+++ b/jstests/core/capped9.js
@@ -2,26 +2,21 @@
t = db.capped9;
t.drop();
-db.createCollection("capped9" , {capped:true, size:1024*50 });
+db.createCollection("capped9", {capped: true, size: 1024 * 50});
-t.insert( { _id : 1 , x : 2 , y : 3 } );
-
-assert.eq( 1 , t.find( { x : 2 } ).itcount() , "A1" );
-assert.eq( 1 , t.find( { y : 3 } ).itcount() , "A2" );
-//assert.throws( function(){ t.find( { _id : 1 } ).itcount(); } , [] , "A3" ); // SERVER-3064
-
-t.update( { _id : 1 } , { $set : { y : 4 } } );
-//assert( db.getLastError() , "B1" ); // SERVER-3064
-//assert.eq( 3 , t.findOne().y , "B2" ); // SERVER-3064
-
-t.ensureIndex( { _id : 1 } );
-
-assert.eq( 1 , t.find( { _id : 1 } ).itcount() , "D1" );
-
-assert.writeOK( t.update( { _id: 1 }, { $set: { y: 4 } } ));
-assert.eq( 4 , t.findOne().y , "D2" );
+t.insert({_id: 1, x: 2, y: 3});
+assert.eq(1, t.find({x: 2}).itcount(), "A1");
+assert.eq(1, t.find({y: 3}).itcount(), "A2");
+// assert.throws( function(){ t.find( { _id : 1 } ).itcount(); } , [] , "A3" ); // SERVER-3064
+t.update({_id: 1}, {$set: {y: 4}});
+// assert( db.getLastError() , "B1" ); // SERVER-3064
+// assert.eq( 3 , t.findOne().y , "B2" ); // SERVER-3064
+t.ensureIndex({_id: 1});
+assert.eq(1, t.find({_id: 1}).itcount(), "D1");
+assert.writeOK(t.update({_id: 1}, {$set: {y: 4}}));
+assert.eq(4, t.findOne().y, "D2");
diff --git a/jstests/core/capped_convertToCapped1.js b/jstests/core/capped_convertToCapped1.js
index 4ee9ff2785e..269a33f89a5 100644
--- a/jstests/core/capped_convertToCapped1.js
+++ b/jstests/core/capped_convertToCapped1.js
@@ -8,26 +8,22 @@ dest.drop();
N = 1000;
-for( i = 0; i < N; ++i ) {
- source.save( {i:i} );
+for (i = 0; i < N; ++i) {
+ source.save({i: i});
}
-assert.eq( N, source.count() );
+assert.eq(N, source.count());
// should all fit
-res = db.runCommand( { cloneCollectionAsCapped:source.getName(),
- toCollection:dest.getName(),
- size:100000 } );
-assert.commandWorked( res );
-assert.eq( source.count(), dest.count() );
-assert.eq( N, source.count() ); // didn't delete source
+res = db.runCommand(
+ {cloneCollectionAsCapped: source.getName(), toCollection: dest.getName(), size: 100000});
+assert.commandWorked(res);
+assert.eq(source.count(), dest.count());
+assert.eq(N, source.count()); // didn't delete source
dest.drop();
// should NOT all fit
-assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:source.getName(),
- toCollection:dest.getName(),
- size:1000 } ) );
-
-
-assert.eq( N, source.count() ); // didn't delete source
-assert.gt( source.count(), dest.count() );
+assert.commandWorked(db.runCommand(
+ {cloneCollectionAsCapped: source.getName(), toCollection: dest.getName(), size: 1000}));
+assert.eq(N, source.count()); // didn't delete source
+assert.gt(source.count(), dest.count());
diff --git a/jstests/core/capped_empty.js b/jstests/core/capped_empty.js
index b922dca0d46..63272a4e546 100644
--- a/jstests/core/capped_empty.js
+++ b/jstests/core/capped_empty.js
@@ -2,21 +2,21 @@
t = db.capped_empty;
t.drop();
-db.createCollection( t.getName() , { capped : true , size : 100 } );
+db.createCollection(t.getName(), {capped: true, size: 100});
-t.insert( { x : 1 } );
-t.insert( { x : 2 } );
-t.insert( { x : 3 } );
-t.ensureIndex( { x : 1 } );
+t.insert({x: 1});
+t.insert({x: 2});
+t.insert({x: 3});
+t.ensureIndex({x: 1});
-assert.eq( 3 , t.count() );
+assert.eq(3, t.count());
-t.runCommand( "emptycapped" );
+t.runCommand("emptycapped");
-assert.eq( 0 , t.count() );
+assert.eq(0, t.count());
-t.insert( { x : 1 } );
-t.insert( { x : 2 } );
-t.insert( { x : 3 } );
+t.insert({x: 1});
+t.insert({x: 2});
+t.insert({x: 3});
-assert.eq( 3 , t.count() );
+assert.eq(3, t.count());
diff --git a/jstests/core/capped_max1.js b/jstests/core/capped_max1.js
index 7811489773b..e4fbda62233 100644
--- a/jstests/core/capped_max1.js
+++ b/jstests/core/capped_max1.js
@@ -5,24 +5,26 @@ t.drop();
var max = 10;
var maxSize = 64 * 1024;
-db.createCollection( t.getName() , {capped: true, size: maxSize, max: max } );
-assert.eq( max, t.stats().max );
-assert.eq( maxSize, t.stats().maxSize );
-assert.eq( Math.floor(maxSize/1000), t.stats(1000).maxSize );
+db.createCollection(t.getName(), {capped: true, size: maxSize, max: max});
+assert.eq(max, t.stats().max);
+assert.eq(maxSize, t.stats().maxSize);
+assert.eq(Math.floor(maxSize / 1000), t.stats(1000).maxSize);
-for ( var i=0; i < max * 2; i++ ) {
- t.insert( { x : i } );
+for (var i = 0; i < max * 2; i++) {
+ t.insert({x: i});
}
-assert.eq( max, t.count() );
+assert.eq(max, t.count());
// Test invalidation of cursors
var cursor = t.find().batchSize(4);
assert(cursor.hasNext());
var myX = cursor.next();
-for ( var j = 0; j < max * 2; j++ ) {
- t.insert( { x : j+i } );
+for (var j = 0; j < max * 2; j++) {
+ t.insert({x: j + i});
}
// Cursor should now be dead.
-assert.throws(function () { cursor.toArray(); });
+assert.throws(function() {
+ cursor.toArray();
+});
diff --git a/jstests/core/capped_update.js b/jstests/core/capped_update.js
index 293cada4f8e..3ae434a8334 100644
--- a/jstests/core/capped_update.js
+++ b/jstests/core/capped_update.js
@@ -7,24 +7,23 @@
'use strict';
var t = db.cannot_change_capped_size;
t.drop();
- assert.commandWorked(db.createCollection(t.getName(), {capped: true,
- size: 1024,
- autoIndexId:false}));
+ assert.commandWorked(
+ db.createCollection(t.getName(), {capped: true, size: 1024, autoIndexId: false}));
assert.eq(0, t.getIndexes().length, "the capped collection has indexes");
for (var j = 1; j <= 10; j++) {
assert.writeOK(t.insert({_id: j, s: "Hello, World!"}));
}
- assert.writeOK(t.update({_id: 3}, {s: "Hello, Mongo!"})); // Mongo is same length as World
+ assert.writeOK(t.update({_id: 3}, {s: "Hello, Mongo!"})); // Mongo is same length as World
assert.writeError(t.update({_id: 3}, {$set: {s: "Hello!"}}));
assert.writeError(t.update({_id: 10}, {}));
assert.writeError(t.update({_id: 10}, {s: "Hello, World!!!"}));
- assert.commandWorked(t.getDB().runCommand({godinsert:t.getName(), obj:{a:2}}));
- var doc = t.findOne({a:2});
+ assert.commandWorked(t.getDB().runCommand({godinsert: t.getName(), obj: {a: 2}}));
+ var doc = t.findOne({a: 2});
assert.eq(undefined, doc["_id"], "now has _id after godinsert");
- assert.writeOK(t.update({a:2}, {$inc:{a:1}}));
- doc = t.findOne({a:3});
+ assert.writeOK(t.update({a: 2}, {$inc: {a: 1}}));
+ doc = t.findOne({a: 3});
assert.eq(undefined, doc["_id"], "now has _id after update");
})();
diff --git a/jstests/core/cappeda.js b/jstests/core/cappeda.js
index 79df5f33aa6..f5c56a44e89 100644
--- a/jstests/core/cappeda.js
+++ b/jstests/core/cappeda.js
@@ -2,31 +2,30 @@
t = db.scan_capped_id;
t.drop();
-x = t.runCommand( "create" , { capped : true , size : 10000 } );
-assert( x.ok );
+x = t.runCommand("create", {capped: true, size: 10000});
+assert(x.ok);
-for ( i=0; i<100; i++ )
- t.insert( { _id : i , x : 1 } );
+for (i = 0; i < 100; i++)
+ t.insert({_id: i, x: 1});
function q() {
- return t.findOne( { _id : 5 } );
+ return t.findOne({_id: 5});
}
function u() {
- var res = t.update( { _id : 5 } , { $set : { x : 2 } } );
- if ( res.hasWriteError() )
+ var res = t.update({_id: 5}, {$set: {x: 2}});
+ if (res.hasWriteError())
throw res;
}
-
// SERVER-3064
-//assert.throws( q , [] , "A1" );
-//assert.throws( u , [] , "B1" );
+// assert.throws( q , [] , "A1" );
+// assert.throws( u , [] , "B1" );
-t.ensureIndex( { _id : 1 } );
+t.ensureIndex({_id: 1});
-assert.eq( 1 , q().x );
+assert.eq(1, q().x);
q();
u();
-assert.eq( 2 , q().x );
+assert.eq(2, q().x);
diff --git a/jstests/core/check_shard_index.js b/jstests/core/check_shard_index.js
index a862715e5e6..6551699c65c 100644
--- a/jstests/core/check_shard_index.js
+++ b/jstests/core/check_shard_index.js
@@ -5,137 +5,138 @@
f = db.jstests_shardingindex;
f.drop();
-
// -------------------------
// Case 1: all entries filled or empty should make a valid index
//
f.drop();
-f.ensureIndex( { x: 1 , y: 1 } );
-assert.eq( 0 , f.count() , "1. initial count should be zero" );
-
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok, "1a" );
+f.ensureIndex({x: 1, y: 1});
+assert.eq(0, f.count(), "1. initial count should be zero");
-f.save( { x: 1 , y : 1 } );
-assert.eq( 1 , f.count() , "1. count after initial insert should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "1b" );
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(true, res.ok, "1a");
+f.save({x: 1, y: 1});
+assert.eq(1, f.count(), "1. count after initial insert should be 1");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(true, res.ok, "1b");
// -------------------------
// Case 2: entry with null values would make an index unsuitable
//
f.drop();
-f.ensureIndex( { x: 1 , y: 1 } );
-assert.eq( 0 , f.count() , "2. initial count should be zero" );
+f.ensureIndex({x: 1, y: 1});
+assert.eq(0, f.count(), "2. initial count should be zero");
-f.save( { x: 1 , y : 1 } );
-f.save( { x: null , y : 1 } );
+f.save({x: 1, y: 1});
+f.save({x: null, y: 1});
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "2a " + tojson(res) );
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(true, res.ok, "2a " + tojson(res));
-f.save( { y: 2 } );
-assert.eq( 3 , f.count() , "2. count after initial insert should be 3" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "2b " + tojson(res) );
+f.save({y: 2});
+assert.eq(3, f.count(), "2. count after initial insert should be 3");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(false, res.ok, "2b " + tojson(res));
// Check _id index
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {_id:1} });
-assert.eq( true , res.ok , "2c " + tojson(res) );
-assert( res.idskip , "2d " + tojson(res) );
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {_id: 1}});
+assert.eq(true, res.ok, "2c " + tojson(res));
+assert(res.idskip, "2d " + tojson(res));
// -------------------------
// Case 3: entry with array values would make an index unsuitable
//
f.drop();
-f.ensureIndex( { x: 1 , y: 1 } );
-assert.eq( 0 , f.count() , "3. initial count should be zero" );
+f.ensureIndex({x: 1, y: 1});
+assert.eq(0, f.count(), "3. initial count should be zero");
-f.save( { x: 1 , y : 1 } );
-f.save( { x: [1, 2] , y : 2 } );
+f.save({x: 1, y: 1});
+f.save({x: [1, 2], y: 2});
-assert.eq( 2 , f.count() , "3. count after initial insert should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "3a " + tojson(res) );
+assert.eq(2, f.count(), "3. count after initial insert should be 2");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(false, res.ok, "3a " + tojson(res));
-f.remove( { y : 2 } );
+f.remove({y: 2});
f.reIndex();
-assert.eq( 1 , f.count() , "3. count after removing array value should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "3b " + tojson(res) );
+assert.eq(1, f.count(), "3. count after removing array value should be 1");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(true, res.ok, "3b " + tojson(res));
-f.save( { x : 2, y : [1, 2] } );
+f.save({x: 2, y: [1, 2]});
-assert.eq( 2 , f.count() , "3. count after adding array value should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "3c " + tojson(res) );
+assert.eq(2, f.count(), "3. count after adding array value should be 2");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(false, res.ok, "3c " + tojson(res));
// -------------------------
// Case 4: Handles prefix shard key indexes.
//
f.drop();
-f.ensureIndex( { x: 1 , y: 1, z: 1 } );
-assert.eq( 0 , f.count() , "4. initial count should be zero" );
-
-f.save( { x: 1 , y : 1, z : 1 } );
+f.ensureIndex({x: 1, y: 1, z: 1});
+assert.eq(0, f.count(), "4. initial count should be zero");
-assert.eq( 1 , f.count() , "4. count after initial insert should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( true , res.ok , "4a " + tojson(res) );
+f.save({x: 1, y: 1, z: 1});
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( true , res.ok , "4b " + tojson(res) );
+assert.eq(1, f.count(), "4. count after initial insert should be 1");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1}});
+assert.eq(true, res.ok, "4a " + tojson(res));
-f.save( { x: [1, 2] , y : 2, z : 2 } );
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(true, res.ok, "4b " + tojson(res));
-assert.eq( 2 , f.count() , "4. count after adding array value should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( false , res.ok , "4c " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "4d " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( false , res.ok , "4e " + tojson(res) );
+f.save({x: [1, 2], y: 2, z: 2});
+assert.eq(2, f.count(), "4. count after adding array value should be 2");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1}});
+assert.eq(false, res.ok, "4c " + tojson(res));
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(false, res.ok, "4d " + tojson(res));
+res = db.runCommand(
+ {checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1, z: 1}});
+assert.eq(false, res.ok, "4e " + tojson(res));
-f.remove( { y : 2 } );
+f.remove({y: 2});
f.reIndex();
-assert.eq( 1 , f.count() , "4. count after removing array value should be 1" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( true , res.ok , "4f " + tojson(res) );
-
-f.save( { x : 3, y : [1, 2], z : 3 } );
-
-assert.eq( 2 , f.count() , "4. count after adding array value on second key should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( false , res.ok , "4g " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "4h " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( false , res.ok , "4i " + tojson(res) );
-
-f.remove( { x : 3 } );
-f.reIndex(); // Necessary so that the index is no longer marked as multikey
-
-assert.eq( 1 , f.count() , "4. count after removing array value should be 1 again" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( true , res.ok , "4e " + tojson(res) );
-
-f.save( { x : 4, y : 4, z : [1, 2] } );
-
-assert.eq( 2 , f.count() , "4. count after adding array value on third key should be 2" );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1} });
-assert.eq( false , res.ok , "4c " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1} });
-assert.eq( false , res.ok , "4d " + tojson(res) );
-res = db.runCommand( { checkShardingIndex: "test.jstests_shardingindex" , keyPattern: {x:1, y:1, z:1} });
-assert.eq( false , res.ok , "4e " + tojson(res) );
-
+assert.eq(1, f.count(), "4. count after removing array value should be 1");
+res = db.runCommand(
+ {checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1, z: 1}});
+assert.eq(true, res.ok, "4f " + tojson(res));
+
+f.save({x: 3, y: [1, 2], z: 3});
+
+assert.eq(2, f.count(), "4. count after adding array value on second key should be 2");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1}});
+assert.eq(false, res.ok, "4g " + tojson(res));
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(false, res.ok, "4h " + tojson(res));
+res = db.runCommand(
+ {checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1, z: 1}});
+assert.eq(false, res.ok, "4i " + tojson(res));
+
+f.remove({x: 3});
+f.reIndex(); // Necessary so that the index is no longer marked as multikey
+
+assert.eq(1, f.count(), "4. count after removing array value should be 1 again");
+res = db.runCommand(
+ {checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1, z: 1}});
+assert.eq(true, res.ok, "4e " + tojson(res));
+
+f.save({x: 4, y: 4, z: [1, 2]});
+
+assert.eq(2, f.count(), "4. count after adding array value on third key should be 2");
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1}});
+assert.eq(false, res.ok, "4c " + tojson(res));
+res = db.runCommand({checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1}});
+assert.eq(false, res.ok, "4d " + tojson(res));
+res = db.runCommand(
+ {checkShardingIndex: "test.jstests_shardingindex", keyPattern: {x: 1, y: 1, z: 1}});
+assert.eq(false, res.ok, "4e " + tojson(res));
print("PASSED");
diff --git a/jstests/core/cleanup_orphaned.js b/jstests/core/cleanup_orphaned.js
index a0444a9f90c..2ece4316fa2 100644
--- a/jstests/core/cleanup_orphaned.js
+++ b/jstests/core/cleanup_orphaned.js
@@ -1,3 +1,3 @@
// Test that cleanupOrphaned cannot be run on stand alone mongod.
-var res = db.adminCommand({ cleanupOrphaned: 'unsharded.coll' });
+var res = db.adminCommand({cleanupOrphaned: 'unsharded.coll'});
assert(!res.ok, tojson(res));
diff --git a/jstests/core/clone_as_capped_nonexistant.js b/jstests/core/clone_as_capped_nonexistant.js
index e6f925d2a13..f9e68ee9648 100644
--- a/jstests/core/clone_as_capped_nonexistant.js
+++ b/jstests/core/clone_as_capped_nonexistant.js
@@ -8,23 +8,19 @@
testDb.dropDatabase();
// Database does not exist here
- var res = testDb.runCommand({cloneCollectionAsCapped: 'foo',
- toCollection: 'bar',
- size: 1024});
+ var res = testDb.runCommand({cloneCollectionAsCapped: 'foo', toCollection: 'bar', size: 1024});
assert.eq(res.ok, 0, "cloning a nonexistent collection to capped should not have worked");
var isSharded = (db.isMaster().msg == "isdbgrid");
- assert.eq(res.errmsg,
- isSharded ? "no such cmd: cloneCollectionAsCapped"
- : "database " + dbname + " not found",
- "converting a nonexistent to capped failed but for the wrong reason");
+ assert.eq(
+ res.errmsg,
+ isSharded ? "no such cmd: cloneCollectionAsCapped" : "database " + dbname + " not found",
+ "converting a nonexistent to capped failed but for the wrong reason");
// Database exists, but collection doesn't
testDb.coll.insert({});
- var res = testDb.runCommand({cloneCollectionAsCapped: 'foo',
- toCollection: 'bar',
- size: 1024});
+ var res = testDb.runCommand({cloneCollectionAsCapped: 'foo', toCollection: 'bar', size: 1024});
assert.eq(res.ok, 0, "cloning a nonexistent collection to capped should not have worked");
assert.eq(res.errmsg,
isSharded ? "no such cmd: cloneCollectionAsCapped"
diff --git a/jstests/core/collection_info_cache_race.js b/jstests/core/collection_info_cache_race.js
index e1b06d52e46..d57fc3340db 100644
--- a/jstests/core/collection_info_cache_race.js
+++ b/jstests/core/collection_info_cache_race.js
@@ -3,12 +3,11 @@
// Create collection without an index, then try to save a doc.
var coll = db.collection_info_cache_race;
coll.drop();
-assert.commandWorked(db.createCollection(coll.getName(), {autoIndexId:false}));
+assert.commandWorked(db.createCollection(coll.getName(), {autoIndexId: false}));
// Fails when SERVER-16502 was not fixed, due to invariant
-assert.writeOK(coll.save({_id:false}, {writeConcern:{w:1}}));
-
+assert.writeOK(coll.save({_id: false}, {writeConcern: {w: 1}}));
coll.drop();
-assert.commandWorked(db.createCollection(coll.getName(), {autoIndexId:false}));
-assert.eq(null,coll.findOne());
-assert.writeOK(coll.save({_id:false}, {writeConcern:{w:1}}));
+assert.commandWorked(db.createCollection(coll.getName(), {autoIndexId: false}));
+assert.eq(null, coll.findOne());
+assert.writeOK(coll.save({_id: false}, {writeConcern: {w: 1}}));
diff --git a/jstests/core/collection_truncate.js b/jstests/core/collection_truncate.js
index 1581762f30a..08de3e8c1ed 100644
--- a/jstests/core/collection_truncate.js
+++ b/jstests/core/collection_truncate.js
@@ -5,7 +5,7 @@ t.drop();
function truncate() {
// Until SERVER-15274 is implemented, this is the only way to truncate a collection.
- assert.commandWorked(t.runCommand('emptycapped')); // works on non-capped as well.
+ assert.commandWorked(t.runCommand('emptycapped')); // works on non-capped as well.
}
function assertEmpty() {
@@ -27,14 +27,14 @@ function assertEmpty() {
}
// Single record case.
-t.insert({a:1});
+t.insert({a: 1});
truncate();
assertEmpty();
// Multi-extent case.
var initialStorageSize = t.stats().storageSize;
while (t.stats().storageSize == initialStorageSize) {
- t.insert({a:1});
+ t.insert({a: 1});
}
truncate();
assertEmpty();
diff --git a/jstests/core/collmod.js b/jstests/core/collmod.js
index f901c8a3e6f..e91a7bd8484 100644
--- a/jstests/core/collmod.js
+++ b/jstests/core/collmod.js
@@ -1,94 +1,94 @@
// Basic js tests for the collMod command.
// Test setting the usePowerOf2Sizes flag, and modifying TTL indexes.
-function debug( x ) {
- //printjson( x );
+function debug(x) {
+ // printjson( x );
}
var coll = "collModTest";
-var t = db.getCollection( coll );
+var t = db.getCollection(coll);
t.drop();
var isMongos = ("isdbgrid" == db.runCommand("ismaster").msg);
-db.createCollection( coll );
+db.createCollection(coll);
-function findTTL( key, expireAfterSeconds ) {
+function findTTL(key, expireAfterSeconds) {
var all = t.getIndexes();
- all = all.filter( function(z) {
- return z.expireAfterSeconds == expireAfterSeconds &&
- friendlyEqual( z.key, key ); } );
+ all = all.filter(function(z) {
+ return z.expireAfterSeconds == expireAfterSeconds && friendlyEqual(z.key, key);
+ });
return all.length == 1;
}
function findCollectionInfo() {
var all = db.getCollectionInfos();
- all = all.filter( function(z) { return z.name == t.getName(); } );
+ all = all.filter(function(z) {
+ return z.name == t.getName();
+ });
assert.eq(all.length, 1);
return all[0];
}
// ensure we fail with gibberish options
-assert.commandFailed(t.runCommand('collmod', {NotARealOption:1}));
+assert.commandFailed(t.runCommand('collmod', {NotARealOption: 1}));
// add a TTL index
-t.ensureIndex( {a : 1}, { "expireAfterSeconds": 50 } );
-assert( findTTL( { a : 1 }, 50 ), "TTL index not added" );
+t.ensureIndex({a: 1}, {"expireAfterSeconds": 50});
+assert(findTTL({a: 1}, 50), "TTL index not added");
// try to modify it with a bad key pattern
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : "bad" , "expireAfterSeconds" : 100 } } );
-debug( res );
-assert.eq( 0 , res.ok , "mod shouldn't work with bad keypattern");
+var res =
+ db.runCommand({"collMod": coll, "index": {"keyPattern": "bad", "expireAfterSeconds": 100}});
+debug(res);
+assert.eq(0, res.ok, "mod shouldn't work with bad keypattern");
// try to modify it without expireAfterSeconds field
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1} } } );
-debug( res );
-assert.eq( 0 , res.ok , "TTL mod shouldn't work without expireAfterSeconds");
+var res = db.runCommand({"collMod": coll, "index": {"keyPattern": {a: 1}}});
+debug(res);
+assert.eq(0, res.ok, "TTL mod shouldn't work without expireAfterSeconds");
// try to modify it with a non-numeric expireAfterSeconds field
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : "100" } } );
-debug( res );
-assert.eq( 0 , res.ok , "TTL mod shouldn't work with non-numeric expireAfterSeconds");
+var res =
+ db.runCommand({"collMod": coll, "index": {"keyPattern": {a: 1}, "expireAfterSeconds": "100"}});
+debug(res);
+assert.eq(0, res.ok, "TTL mod shouldn't work with non-numeric expireAfterSeconds");
// this time modifying should finally work
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1}, "expireAfterSeconds" : 100 } } );
-debug( res );
-assert( findTTL( {a:1}, 100 ), "TTL index not modified" );
+var res =
+ db.runCommand({"collMod": coll, "index": {"keyPattern": {a: 1}, "expireAfterSeconds": 100}});
+debug(res);
+assert(findTTL({a: 1}, 100), "TTL index not modified");
// try to modify a faulty TTL index with a non-numeric expireAfterSeconds field
-t.dropIndex( {a : 1 } );
-t.ensureIndex( {a : 1} , { "expireAfterSeconds": "50" } );
-var res = db.runCommand( { "collMod" : coll,
- "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } );
-debug( res );
-assert.eq( 0, res.ok, "shouldn't be able to modify faulty index spec" );
+t.dropIndex({a: 1});
+t.ensureIndex({a: 1}, {"expireAfterSeconds": "50"});
+var res =
+ db.runCommand({"collMod": coll, "index": {"keyPattern": {a: 1}, "expireAfterSeconds": 100}});
+debug(res);
+assert.eq(0, res.ok, "shouldn't be able to modify faulty index spec");
// try with new index, this time set both expireAfterSeconds and the usePowerOf2Sizes flag
-t.dropIndex( {a : 1 } );
-t.ensureIndex( {a : 1} , { "expireAfterSeconds": 50 } );
-var res = db.runCommand( { "collMod" : coll ,
- "usePowerOf2Sizes" : true,
- "index" : { "keyPattern" : {a : 1} , "expireAfterSeconds" : 100 } } );
-debug( res );
-assert( findTTL( {a:1}, 100), "TTL index should be 100 now" );
+t.dropIndex({a: 1});
+t.ensureIndex({a: 1}, {"expireAfterSeconds": 50});
+var res = db.runCommand({
+ "collMod": coll,
+ "usePowerOf2Sizes": true,
+ "index": {"keyPattern": {a: 1}, "expireAfterSeconds": 100}
+});
+debug(res);
+assert(findTTL({a: 1}, 100), "TTL index should be 100 now");
// Clear usePowerOf2Sizes and enable noPadding. Make sure collection options.flags is updated.
-var res = db.runCommand( { "collMod" : coll ,
- "usePowerOf2Sizes" : false,
- "noPadding" : true} );
-debug( res );
+var res = db.runCommand({"collMod": coll, "usePowerOf2Sizes": false, "noPadding": true});
+debug(res);
assert.commandWorked(res);
var info = findCollectionInfo();
-assert.eq(info.options.flags, 2, tojson(info)); // 2 is CollectionOptions::Flag_NoPadding
+assert.eq(info.options.flags, 2, tojson(info)); // 2 is CollectionOptions::Flag_NoPadding
// Clear noPadding and check results object and options.flags.
-var res = db.runCommand( { "collMod" : coll ,
- "noPadding" : false} );
-debug( res );
+var res = db.runCommand({"collMod": coll, "noPadding": false});
+debug(res);
assert.commandWorked(res);
if (!isMongos) {
// don't check this for sharding passthrough since mongos has a different output format.
diff --git a/jstests/core/compact_keeps_indexes.js b/jstests/core/compact_keeps_indexes.js
index 68f52c126f5..f2da7597cdf 100644
--- a/jstests/core/compact_keeps_indexes.js
+++ b/jstests/core/compact_keeps_indexes.js
@@ -8,22 +8,22 @@
var coll = db.compact_keeps_indexes;
coll.drop();
- coll.insert({_id:1, x:1});
- coll.ensureIndex({x:1});
+ coll.insert({_id: 1, x: 1});
+ coll.ensureIndex({x: 1});
assert.eq(coll.getIndexes().length, 2);
// force:true is for replset passthroughs
- var res = coll.runCommand('compact', {force:true});
+ var res = coll.runCommand('compact', {force: true});
// Some storage engines (for example, inMemoryExperiment) do not support the compact command.
- if (res.code == 115) { // CommandNotSupported
+ if (res.code == 115) { // CommandNotSupported
return;
}
assert.commandWorked(res);
assert.eq(coll.getIndexes().length, 2);
- assert.eq(coll.find({_id:1}).itcount(), 1);
- assert.eq(coll.find({x:1}).itcount(), 1);
+ assert.eq(coll.find({_id: 1}).itcount(), 1);
+ assert.eq(coll.find({x: 1}).itcount(), 1);
// Run compact repeatedly while simultaneously creating and dropping a collection in a
// different database.
@@ -31,19 +31,19 @@
// The test uses a single collection in the database test_compact_keeps_indexes_drop
// which triggers a series of slow resync operations in the slave as the collection is
// repeatedly created and dropped.
- var isMasterSlave = testingReplication &&
- !assert.commandWorked(db.isMaster()).hasOwnProperty('setName');
+ var isMasterSlave =
+ testingReplication && !assert.commandWorked(db.isMaster()).hasOwnProperty('setName');
if (!isMasterSlave) {
var dropCollectionShell = startParallelShell(function() {
var t = db.getSiblingDB('test_compact_keeps_indexes_drop').testcoll;
t.drop();
- for (var i=0; i<100; i++) {
+ for (var i = 0; i < 100; i++) {
t.save({a: 1});
t.drop();
}
});
- for (var i=0; i<10; i++) {
- coll.runCommand('compact');
+ for (var i = 0; i < 10; i++) {
+ coll.runCommand('compact');
}
dropCollectionShell();
}
diff --git a/jstests/core/compare_timestamps.js b/jstests/core/compare_timestamps.js
index 2a7fcb3683c..2440fac3fe1 100644
--- a/jstests/core/compare_timestamps.js
+++ b/jstests/core/compare_timestamps.js
@@ -7,4 +7,3 @@
assert.writeOK(t.insert({a: new Timestamp(0, 0), b: "zero"}));
assert.eq(t.find().sort({a: 1}).limit(1).next().b, "zero", "timestamp must compare unsigned");
}());
-
diff --git a/jstests/core/connection_status.js b/jstests/core/connection_status.js
index bbae51b9eb0..29b8999ccc8 100644
--- a/jstests/core/connection_status.js
+++ b/jstests/core/connection_status.js
@@ -15,7 +15,7 @@ function test(userName) {
var users = output.authInfo.authenticatedUsers;
var matches = 0;
- for (var i=0; i < users.length; i++) {
+ for (var i = 0; i < users.length; i++) {
if (users[i].db != dbName)
continue;
@@ -28,7 +28,7 @@ function test(userName) {
var roles = output.authInfo.authenticatedUserRoles;
matches = 0;
- for (var i=0; i < roles.length; i++) {
+ for (var i = 0; i < roles.length; i++) {
if (roles[i].db != "admin")
continue;
@@ -48,7 +48,7 @@ function test(userName) {
var privileges = output.authInfo.authenticatedUserPrivileges;
matches = 0;
- for (var i=0; i < privileges.length; i++) {
+ for (var i = 0; i < privileges.length; i++) {
if (privileges[i].resource.anyResource) {
matches++;
}
@@ -65,4 +65,3 @@ function test(userName) {
test("someone");
test("someone else");
-
diff --git a/jstests/core/connection_string_validation.js b/jstests/core/connection_string_validation.js
index a682b27ea79..232650f230b 100644
--- a/jstests/core/connection_string_validation.js
+++ b/jstests/core/connection_string_validation.js
@@ -3,39 +3,36 @@
port = "27017";
-if ( db.getMongo().host.indexOf( ":" ) >= 0 ) {
- var idx = db.getMongo().host.indexOf( ":" );
- port = db.getMongo().host.substring( idx + 1 );
+if (db.getMongo().host.indexOf(":") >= 0) {
+ var idx = db.getMongo().host.indexOf(":");
+ port = db.getMongo().host.substring(idx + 1);
}
-var goodStrings = [
- "localhost:" + port + "/test",
- "127.0.0.1:" + port + "/test"
- ];
+var goodStrings = ["localhost:" + port + "/test", "127.0.0.1:" + port + "/test"];
var badStrings = [
- { s: undefined, r: /^Missing connection string$/ },
- { s: 7, r: /^Incorrect type/ },
- { s: null, r: /^Incorrect type/ },
- { s: "", r: /^Empty connection string$/ },
- { s: " ", r: /^Empty connection string$/ },
- { s: ":", r: /^Missing host name/ },
- { s: "/", r: /^Missing host name/ },
- { s: ":/", r: /^Missing host name/ },
- { s: ":/test", r: /^Missing host name/ },
- { s: ":" + port + "/", r: /^Missing host name/ },
- { s: ":" + port + "/test", r: /^Missing host name/ },
- { s: "/test", r: /^Missing host name/ },
- { s: "localhost:/test", r: /^Missing port number/ },
- { s: "127.0.0.1:/test", r: /^Missing port number/ },
- { s: "127.0.0.1:cat/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:1cat/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:123456/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:65536/test", r: /^Invalid port number/ },
- { s: "::1:65536/test", r: /^Invalid port number/ },
- { s: "127.0.0.1:" + port + "/", r: /^Missing database name/ },
- { s: "::1:" + port + "/", r: /^Missing database name/ }
- ];
+ {s: undefined, r: /^Missing connection string$/},
+ {s: 7, r: /^Incorrect type/},
+ {s: null, r: /^Incorrect type/},
+ {s: "", r: /^Empty connection string$/},
+ {s: " ", r: /^Empty connection string$/},
+ {s: ":", r: /^Missing host name/},
+ {s: "/", r: /^Missing host name/},
+ {s: ":/", r: /^Missing host name/},
+ {s: ":/test", r: /^Missing host name/},
+ {s: ":" + port + "/", r: /^Missing host name/},
+ {s: ":" + port + "/test", r: /^Missing host name/},
+ {s: "/test", r: /^Missing host name/},
+ {s: "localhost:/test", r: /^Missing port number/},
+ {s: "127.0.0.1:/test", r: /^Missing port number/},
+ {s: "127.0.0.1:cat/test", r: /^Invalid port number/},
+ {s: "127.0.0.1:1cat/test", r: /^Invalid port number/},
+ {s: "127.0.0.1:123456/test", r: /^Invalid port number/},
+ {s: "127.0.0.1:65536/test", r: /^Invalid port number/},
+ {s: "::1:65536/test", r: /^Invalid port number/},
+ {s: "127.0.0.1:" + port + "/", r: /^Missing database name/},
+ {s: "::1:" + port + "/", r: /^Missing database name/}
+];
function testGood(i, connectionString) {
print("\nTesting good connection string " + i + " (\"" + connectionString + "\") ...");
@@ -44,18 +41,17 @@ function testGood(i, connectionString) {
try {
var connectDB = connect(connectionString);
connectDB = null;
- }
- catch (e) {
+ } catch (e) {
gotException = true;
exception = e;
}
if (!gotException) {
- print("Good connection string " + i +
- " (\"" + connectionString + "\") correctly validated");
+ print("Good connection string " + i + " (\"" + connectionString +
+ "\") correctly validated");
return;
}
- var message = "FAILED to correctly validate goodString " + i +
- " (\"" + connectionString + "\"): exception was \"" + tojson(exception) + "\"";
+ var message = "FAILED to correctly validate goodString " + i + " (\"" + connectionString +
+ "\"): exception was \"" + tojson(exception) + "\"";
doassert(message);
}
@@ -67,8 +63,7 @@ function testBad(i, connectionString, errorRegex) {
try {
var connectDB = connect(connectionString);
connectDB = null;
- }
- catch (e) {
+ } catch (e) {
gotException = true;
exception = e;
if (errorRegex.test(e.message)) {
@@ -80,13 +75,12 @@ function testBad(i, connectionString, errorRegex) {
"\") correctly rejected:\n" + tojson(exception));
return;
}
- var message = "FAILED to generate correct exception for badString " + i +
- " (\"" + connectionString + "\"): ";
+ var message = "FAILED to generate correct exception for badString " + i + " (\"" +
+ connectionString + "\"): ";
if (gotException) {
- message += "exception was \"" + tojson(exception) +
- "\", it should have matched \"" + errorRegex.toString() + "\"";
- }
- else {
+ message += "exception was \"" + tojson(exception) + "\", it should have matched \"" +
+ errorRegex.toString() + "\"";
+ } else {
message += "no exception was thrown";
}
doassert(message);
diff --git a/jstests/core/constructors.js b/jstests/core/constructors.js
index 346b1ca0dfc..814766ee2c3 100644
--- a/jstests/core/constructors.js
+++ b/jstests/core/constructors.js
@@ -2,8 +2,8 @@
// Takes a list of constructors and returns a new list with an extra entry for each constructor with
// "new" prepended
-function addConstructorsWithNew (constructorList) {
- function prependNew (constructor) {
+function addConstructorsWithNew(constructorList) {
+ function prependNew(constructor) {
return "new " + constructor;
}
@@ -12,152 +12,143 @@ function addConstructorsWithNew (constructorList) {
// We use slice(0) here to make a copy of our lists
var validWithNew = valid.concat(valid.slice(0).map(prependNew));
var invalidWithNew = invalid.concat(invalid.slice(0).map(prependNew));
- return { "valid" : validWithNew, "invalid" : invalidWithNew };
+ return {
+ "valid": validWithNew,
+ "invalid": invalidWithNew
+ };
}
-function clientEvalConstructorTest (constructorList) {
+function clientEvalConstructorTest(constructorList) {
constructorList = addConstructorsWithNew(constructorList);
- constructorList.valid.forEach(function (constructor) {
+ constructorList.valid.forEach(function(constructor) {
try {
eval(constructor);
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in eval context: " + e);
+ } catch (e) {
+ throw("valid constructor: " + constructor + " failed in eval context: " + e);
}
});
- constructorList.invalid.forEach(function (constructor) {
- assert.throws(function () { eval(constructor); },
- [], "invalid constructor did not throw error in eval context: " + constructor);
+ constructorList.invalid.forEach(function(constructor) {
+ assert.throws(function() {
+ eval(constructor);
+ }, [], "invalid constructor did not throw error in eval context: " + constructor);
});
}
-function dbEvalConstructorTest (constructorList) {
+function dbEvalConstructorTest(constructorList) {
constructorList = addConstructorsWithNew(constructorList);
- constructorList.valid.forEach(function (constructor) {
+ constructorList.valid.forEach(function(constructor) {
try {
db.eval(constructor);
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in db.eval context: " + e);
+ } catch (e) {
+ throw("valid constructor: " + constructor + " failed in db.eval context: " + e);
}
});
- constructorList.invalid.forEach(function (constructor) {
- assert.throws(function () { db.eval(constructor); },
- [], "invalid constructor did not throw error in db.eval context: " + constructor);
+ constructorList.invalid.forEach(function(constructor) {
+ assert.throws(function() {
+ db.eval(constructor);
+ }, [], "invalid constructor did not throw error in db.eval context: " + constructor);
});
}
-function mapReduceConstructorTest (constructorList) {
+function mapReduceConstructorTest(constructorList) {
constructorList = addConstructorsWithNew(constructorList);
t = db.mr_constructors;
t.drop();
- t.save( { "partner" : 1, "visits" : 9 } );
- t.save( { "partner" : 2, "visits" : 9 } );
- t.save( { "partner" : 1, "visits" : 11 } );
- t.save( { "partner" : 1, "visits" : 30 } );
- t.save( { "partner" : 2, "visits" : 41 } );
- t.save( { "partner" : 2, "visits" : 41 } );
+ t.save({"partner": 1, "visits": 9});
+ t.save({"partner": 2, "visits": 9});
+ t.save({"partner": 1, "visits": 11});
+ t.save({"partner": 1, "visits": 30});
+ t.save({"partner": 2, "visits": 41});
+ t.save({"partner": 2, "visits": 41});
- constructorList.valid.forEach(function (constructor) {
+ constructorList.valid.forEach(function(constructor) {
try {
m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }");
r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }");
- res = t.mapReduce( m , r , { out : "mr_constructors_out" , scope : { xx : 1 } } );
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in mapReduce context: " + e);
+ res = t.mapReduce(m, r, {out: "mr_constructors_out", scope: {xx: 1}});
+ } catch (e) {
+ throw("valid constructor: " + constructor + " failed in mapReduce context: " + e);
}
});
- constructorList.invalid.forEach(function (constructor) {
+ constructorList.invalid.forEach(function(constructor) {
m = eval("dummy = function(){ emit( \"test\" , " + constructor + " ) }");
r = eval("dummy = function( k , v ){ return { test : " + constructor + " } }");
- assert.throws(function () { res = t.mapReduce( m , r ,
- { out : "mr_constructors_out" , scope : { xx : 1 } } ); },
- [], "invalid constructor did not throw error in mapReduce context: " + constructor);
+ assert.throws(function() {
+ res = t.mapReduce(m, r, {out: "mr_constructors_out", scope: {xx: 1}});
+ }, [], "invalid constructor did not throw error in mapReduce context: " + constructor);
});
db.mr_constructors_out.drop();
t.drop();
}
-function whereConstructorTest (constructorList) {
+function whereConstructorTest(constructorList) {
constructorList = addConstructorsWithNew(constructorList);
t = db.where_constructors;
t.drop();
- assert.writeOK( t.insert({ x : 1 }));
+ assert.writeOK(t.insert({x: 1}));
- constructorList.valid.forEach(function (constructor) {
+ constructorList.valid.forEach(function(constructor) {
try {
- t.findOne({ $where : constructor });
- }
- catch (e) {
- throw ("valid constructor: " + constructor + " failed in $where query: " + e);
+ t.findOne({$where: constructor});
+ } catch (e) {
+ throw("valid constructor: " + constructor + " failed in $where query: " + e);
}
});
- constructorList.invalid.forEach(function (constructor) {
- assert.throws(function () { t.findOne({ $where : constructor }); },
- [], "invalid constructor did not throw error in $where query: " + constructor);
+ constructorList.invalid.forEach(function(constructor) {
+ assert.throws(function() {
+ t.findOne({$where: constructor});
+ }, [], "invalid constructor did not throw error in $where query: " + constructor);
});
}
var dbrefConstructors = {
- "valid" : [
- "DBRef(\"namespace\", 0)",
- "DBRef(\"namespace\", \"test\")",
- "DBRef(\"namespace\", \"test\", \"database\")",
- "DBRef(\"namespace\", ObjectId())",
- "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"))",
- "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"), \"database\")",
- ],
- "invalid" : [
- "DBRef()",
- "DBRef(true, ObjectId())",
- "DBRef(true, ObjectId(), true)",
- "DBRef(\"namespace\")",
- "DBRef(\"namespace\", ObjectId(), true)",
- "DBRef(\"namespace\", ObjectId(), 123)",
- ]
+ "valid": [
+ "DBRef(\"namespace\", 0)",
+ "DBRef(\"namespace\", \"test\")",
+ "DBRef(\"namespace\", \"test\", \"database\")",
+ "DBRef(\"namespace\", ObjectId())",
+ "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"))",
+ "DBRef(\"namespace\", ObjectId(\"000000000000000000000000\"), \"database\")",
+ ],
+ "invalid": [
+ "DBRef()",
+ "DBRef(true, ObjectId())",
+ "DBRef(true, ObjectId(), true)",
+ "DBRef(\"namespace\")",
+ "DBRef(\"namespace\", ObjectId(), true)",
+ "DBRef(\"namespace\", ObjectId(), 123)",
+ ]
};
var dbpointerConstructors = {
- "valid" : [
- "DBPointer(\"namespace\", ObjectId())",
- "DBPointer(\"namespace\", ObjectId(\"000000000000000000000000\"))",
- ],
- "invalid" : [
- "DBPointer()",
- "DBPointer(true, ObjectId())",
- "DBPointer(\"namespace\", 0)",
- "DBPointer(\"namespace\", \"test\")",
- "DBPointer(\"namespace\")",
- "DBPointer(\"namespace\", ObjectId(), true)",
- ]
+ "valid": [
+ "DBPointer(\"namespace\", ObjectId())",
+ "DBPointer(\"namespace\", ObjectId(\"000000000000000000000000\"))",
+ ],
+ "invalid": [
+ "DBPointer()",
+ "DBPointer(true, ObjectId())",
+ "DBPointer(\"namespace\", 0)",
+ "DBPointer(\"namespace\", \"test\")",
+ "DBPointer(\"namespace\")",
+ "DBPointer(\"namespace\", ObjectId(), true)",
+ ]
};
-
var objectidConstructors = {
- "valid" : [
- 'ObjectId()',
- 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFF")',
- ],
- "invalid" : [
- 'ObjectId(5)',
- 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFQ")',
- ]
+ "valid": ['ObjectId()', 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFF")', ],
+ "invalid": ['ObjectId(5)', 'ObjectId("FFFFFFFFFFFFFFFFFFFFFFFQ")', ]
};
var timestampConstructors = {
- "valid" : [
- 'Timestamp()',
- 'Timestamp(0,0)',
- 'Timestamp(1.0,1.0)',
- ],
- "invalid" : [
+ "valid": ['Timestamp()', 'Timestamp(0,0)', 'Timestamp(1.0,1.0)', ],
+ "invalid": [
'Timestamp(0)',
'Timestamp(0,0,0)',
'Timestamp("test","test")',
@@ -166,14 +157,12 @@ var timestampConstructors = {
'Timestamp(true,true)',
'Timestamp(true,0)',
'Timestamp(0,true)',
- ]
+ ]
};
var bindataConstructors = {
- "valid" : [
- 'BinData(0,"test")',
- ],
- "invalid" : [
+ "valid": ['BinData(0,"test")', ],
+ "invalid": [
'BinData(0,"test", "test")',
'BinData()',
'BinData(-1, "")',
@@ -185,14 +174,12 @@ var bindataConstructors = {
'BinData(0, {})',
'BinData(0, [])',
'BinData(0, function () {})',
- ]
+ ]
};
var uuidConstructors = {
- "valid" : [
- 'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- ],
- "invalid" : [
+ "valid": ['UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', ],
+ "invalid": [
'UUID("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
'UUID()',
'UUID("aa")',
@@ -204,14 +191,12 @@ var uuidConstructors = {
'UUID({})',
'UUID([])',
'UUID(function () {})',
- ]
+ ]
};
var md5Constructors = {
- "valid" : [
- 'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- ],
- "invalid" : [
+ "valid": ['MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")', ],
+ "invalid": [
'MD5("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
'MD5()',
'MD5("aa")',
@@ -223,17 +208,17 @@ var md5Constructors = {
'MD5({})',
'MD5([])',
'MD5(function () {})',
- ]
+ ]
};
var hexdataConstructors = {
- "valid" : [
+ "valid": [
'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
'HexData(0, "")',
'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")',
- 'HexData(0, "000000000000000000000005")', // SERVER-9605
- ],
- "invalid" : [
+ 'HexData(0, "000000000000000000000005")', // SERVER-9605
+ ],
+ "invalid": [
'HexData(0, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0)',
'HexData()',
'HexData(0)',
@@ -249,19 +234,12 @@ var hexdataConstructors = {
'HexData(0, [])',
'HexData(0, function () {})',
'HexData(0, "invalidhex")',
- ]
+ ]
};
var dateConstructors = {
- "valid" : [
- 'Date()',
- 'Date(0)',
- 'Date(0,0)',
- 'Date(0,0,0)',
- 'Date("foo")',
- ],
- "invalid" : [
- ]
+ "valid": ['Date()', 'Date(0)', 'Date(0,0)', 'Date(0,0,0)', 'Date("foo")', ],
+ "invalid": []
};
clientEvalConstructorTest(dbrefConstructors);
diff --git a/jstests/core/copydb.js b/jstests/core/copydb.js
index d653053fbb0..4494bcc4716 100644
--- a/jstests/core/copydb.js
+++ b/jstests/core/copydb.js
@@ -16,6 +16,5 @@ assert.eq(1, db2.foo.count(), "D");
assert.eq(db1.foo.getIndexes().length, db2.foo.getIndexes().length);
// Test command input validation.
-assert.commandFailed(db1.adminCommand({copydb: 1,
- fromdb: db1.getName(),
- todb: "copydb.invalid"})); // Name can't contain dot.
+assert.commandFailed(db1.adminCommand(
+ {copydb: 1, fromdb: db1.getName(), todb: "copydb.invalid"})); // Name can't contain dot.
diff --git a/jstests/core/count.js b/jstests/core/count.js
index 5502d7176c1..9ec6a424c34 100644
--- a/jstests/core/count.js
+++ b/jstests/core/count.js
@@ -1,25 +1,24 @@
t = db.jstests_count;
t.drop();
-t.save( { i: 1 } );
-t.save( { i: 2 } );
-assert.eq( 1, t.find( { i: 1 } ).count(), "A" );
-assert.eq( 1, t.count( { i: 1 } ) , "B" );
-assert.eq( 2, t.find().count() , "C" );
-assert.eq( 2, t.find( undefined ).count() , "D" );
-assert.eq( 2, t.find( null ).count() , "E" );
-assert.eq( 2, t.count() , "F" );
+t.save({i: 1});
+t.save({i: 2});
+assert.eq(1, t.find({i: 1}).count(), "A");
+assert.eq(1, t.count({i: 1}), "B");
+assert.eq(2, t.find().count(), "C");
+assert.eq(2, t.find(undefined).count(), "D");
+assert.eq(2, t.find(null).count(), "E");
+assert.eq(2, t.count(), "F");
t.drop();
-t.save( {a:true,b:false} );
-t.ensureIndex( {b:1,a:1} );
-assert.eq( 1, t.find( {a:true,b:false} ).count() , "G" );
-assert.eq( 1, t.find( {b:false,a:true} ).count() , "H" );
+t.save({a: true, b: false});
+t.ensureIndex({b: 1, a: 1});
+assert.eq(1, t.find({a: true, b: false}).count(), "G");
+assert.eq(1, t.find({b: false, a: true}).count(), "H");
t.drop();
-t.save( {a:true,b:false} );
-t.ensureIndex( {b:1,a:1,c:1} );
-
-assert.eq( 1, t.find( {a:true,b:false} ).count() , "I" );
-assert.eq( 1, t.find( {b:false,a:true} ).count() , "J" );
+t.save({a: true, b: false});
+t.ensureIndex({b: 1, a: 1, c: 1});
+assert.eq(1, t.find({a: true, b: false}).count(), "I");
+assert.eq(1, t.find({b: false, a: true}).count(), "J");
diff --git a/jstests/core/count10.js b/jstests/core/count10.js
index 24d61956e4d..2a1853c399a 100644
--- a/jstests/core/count10.js
+++ b/jstests/core/count10.js
@@ -3,40 +3,38 @@
t = db.count10;
t.drop();
-for ( i=0; i<100; i++ ){
- t.save( { x : i } );
+for (i = 0; i < 100; i++) {
+ t.save({x: i});
}
// Start a parallel shell which repeatedly checks for a count
// query using db.currentOp(). As soon as the op is found,
// kill it via db.killOp().
-s = startParallelShell(
- 'assert.soon(function() {' +
- ' current = db.currentOp({"ns": db.count10.getFullName(), ' +
- ' "query.count": db.count10.getName()}); ' +
-
- // Check that we found the count op. If not, return false so
- // that assert.soon will retry.
- ' assert("inprog" in current); ' +
- ' if (current.inprog.length === 0) { ' +
- ' jsTest.log("count10.js: did not find count op, retrying"); ' +
- ' printjson(current); ' +
- ' return false; ' +
- ' } ' +
- ' countOp = current.inprog[0]; ' +
- ' if (!countOp) { ' +
- ' jsTest.log("count10.js: did not find count op, retrying"); ' +
- ' printjson(current); ' +
- ' return false; ' +
- ' } ' +
-
- // Found the count op. Try to kill it.
- ' jsTest.log("count10.js: found count op:"); ' +
- ' printjson(current); ' +
- ' printjson(db.killOp(countOp.opid)); ' +
- ' return true; ' +
- '}, "count10.js: could not find count op after retrying, gave up");'
-);
+s = startParallelShell('assert.soon(function() {' +
+ ' current = db.currentOp({"ns": db.count10.getFullName(), ' +
+ ' "query.count": db.count10.getName()}); ' +
+
+ // Check that we found the count op. If not, return false so
+ // that assert.soon will retry.
+ ' assert("inprog" in current); ' +
+ ' if (current.inprog.length === 0) { ' +
+ ' jsTest.log("count10.js: did not find count op, retrying"); ' +
+ ' printjson(current); ' +
+ ' return false; ' +
+ ' } ' +
+ ' countOp = current.inprog[0]; ' +
+ ' if (!countOp) { ' +
+ ' jsTest.log("count10.js: did not find count op, retrying"); ' +
+ ' printjson(current); ' +
+ ' return false; ' +
+ ' } ' +
+
+ // Found the count op. Try to kill it.
+ ' jsTest.log("count10.js: found count op:"); ' +
+ ' printjson(current); ' +
+ ' printjson(db.killOp(countOp.opid)); ' +
+ ' return true; ' +
+ '}, "count10.js: could not find count op after retrying, gave up");');
function getKilledCount() {
try {
diff --git a/jstests/core/count11.js b/jstests/core/count11.js
index 14392b9d90c..4ce218bfc43 100644
--- a/jstests/core/count11.js
+++ b/jstests/core/count11.js
@@ -5,8 +5,12 @@ var t = db.count11;
t.drop();
-var validQuery = {a: 1};
-var invalidQuery = {a: {$invalid: 1}};
+var validQuery = {
+ a: 1
+};
+var invalidQuery = {
+ a: {$invalid: 1}
+};
// Query non-existing collection with empty query.
assert.eq(0, t.find().count());
@@ -16,11 +20,15 @@ assert.eq(0, t.find().itcount());
// Returns 0 on valid syntax query.
// Fails on invalid syntax query.
assert.eq(0, t.find(validQuery).count());
-assert.throws(function() { t.find(invalidQuery).count(); });
+assert.throws(function() {
+ t.find(invalidQuery).count();
+});
// Query existing collection.
// Returns 0 on valid syntax query.
// Fails on invalid syntax query.
assert.commandWorked(db.createCollection(t.getName()));
assert.eq(0, t.find(validQuery).count());
-assert.throws(function() { t.find(invalidQuery).count(); });
+assert.throws(function() {
+ t.find(invalidQuery).count();
+});
diff --git a/jstests/core/count2.js b/jstests/core/count2.js
index 29084306a2f..0999a27e9ff 100644
--- a/jstests/core/count2.js
+++ b/jstests/core/count2.js
@@ -1,28 +1,28 @@
t = db.count2;
t.drop();
-for ( var i=0; i<1000; i++ ){
- t.save( { num : i , m : i % 20 } );
+for (var i = 0; i < 1000; i++) {
+ t.save({num: i, m: i % 20});
}
-assert.eq( 1000 , t.count() , "A" );
-assert.eq( 1000 , t.find().count() , "B" );
-assert.eq( 1000 , t.find().toArray().length , "C" );
+assert.eq(1000, t.count(), "A");
+assert.eq(1000, t.find().count(), "B");
+assert.eq(1000, t.find().toArray().length, "C");
-assert.eq( 50 , t.find( { m : 5 } ).toArray().length , "D" );
-assert.eq( 50 , t.find( { m : 5 } ).count() , "E" );
+assert.eq(50, t.find({m: 5}).toArray().length, "D");
+assert.eq(50, t.find({m: 5}).count(), "E");
-assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).toArray().length , "F" );
-assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).count() , "G" );
-assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).countReturn() , "H" );
+assert.eq(40, t.find({m: 5}).skip(10).toArray().length, "F");
+assert.eq(50, t.find({m: 5}).skip(10).count(), "G");
+assert.eq(40, t.find({m: 5}).skip(10).countReturn(), "H");
-assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).toArray().length , "I" );
-assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).limit(20).count() , "J" );
-assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).countReturn() , "K" );
+assert.eq(20, t.find({m: 5}).skip(10).limit(20).toArray().length, "I");
+assert.eq(50, t.find({m: 5}).skip(10).limit(20).count(), "J");
+assert.eq(20, t.find({m: 5}).skip(10).limit(20).countReturn(), "K");
-assert.eq( 5 , t.find( { m : 5 } ).skip( 45 ).limit(20).countReturn() , "L" );
+assert.eq(5, t.find({m: 5}).skip(45).limit(20).countReturn(), "L");
// Negative skip values should return error
-var negSkipResult = db.runCommand({ count: 't', skip : -2 });
-assert( ! negSkipResult.ok , "negative skip value shouldn't work, n = " + negSkipResult.n );
-assert( negSkipResult.errmsg.length > 0 , "no error msg for negative skip" );
+var negSkipResult = db.runCommand({count: 't', skip: -2});
+assert(!negSkipResult.ok, "negative skip value shouldn't work, n = " + negSkipResult.n);
+assert(negSkipResult.errmsg.length > 0, "no error msg for negative skip");
diff --git a/jstests/core/count3.js b/jstests/core/count3.js
index a8c3ef5faad..d93df020f0b 100644
--- a/jstests/core/count3.js
+++ b/jstests/core/count3.js
@@ -3,24 +3,21 @@ t = db.count3;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 1 , b : 2 } );
+t.save({a: 1});
+t.save({a: 1, b: 2});
-assert.eq( 2 , t.find( { a : 1 } ).itcount() , "A" );
-assert.eq( 2 , t.find( { a : 1 } ).count() , "B" );
+assert.eq(2, t.find({a: 1}).itcount(), "A");
+assert.eq(2, t.find({a: 1}).count(), "B");
-assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).itcount() , "C" );
-assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).count() , "D" );
+assert.eq(2, t.find({a: 1}, {b: 1}).itcount(), "C");
+assert.eq(2, t.find({a: 1}, {b: 1}).count(), "D");
t.drop();
-t.save( { a : 1 } );
-
-assert.eq( 1 , t.find( { a : 1 } ).itcount() , "E" );
-assert.eq( 1 , t.find( { a : 1 } ).count() , "F" );
-
-assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).itcount() , "G" );
-assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).count() , "H" );
-
+t.save({a: 1});
+assert.eq(1, t.find({a: 1}).itcount(), "E");
+assert.eq(1, t.find({a: 1}).count(), "F");
+assert.eq(1, t.find({a: 1}, {b: 1}).itcount(), "G");
+assert.eq(1, t.find({a: 1}, {b: 1}).count(), "H");
diff --git a/jstests/core/count4.js b/jstests/core/count4.js
index 11a43afbb01..ac0b3d3491f 100644
--- a/jstests/core/count4.js
+++ b/jstests/core/count4.js
@@ -2,16 +2,18 @@
t = db.count4;
t.drop();
-for ( i=0; i<100; i++ ){
- t.save( { x : i } );
+for (i = 0; i < 100; i++) {
+ t.save({x: i});
}
-q = { x : { $gt : 25 , $lte : 75 } };
+q = {
+ x: {$gt: 25, $lte: 75}
+};
-assert.eq( 50 , t.find( q ).count() , "A" );
-assert.eq( 50 , t.find( q ).itcount() , "B" );
+assert.eq(50, t.find(q).count(), "A");
+assert.eq(50, t.find(q).itcount(), "B");
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-assert.eq( 50 , t.find( q ).count() , "C" );
-assert.eq( 50 , t.find( q ).itcount() , "D" );
+assert.eq(50, t.find(q).count(), "C");
+assert.eq(50, t.find(q).itcount(), "D");
diff --git a/jstests/core/count5.js b/jstests/core/count5.js
index 3d7cf04a27c..5a23fde5661 100644
--- a/jstests/core/count5.js
+++ b/jstests/core/count5.js
@@ -2,29 +2,31 @@
t = db.count5;
t.drop();
-for ( i=0; i<100; i++ ){
- t.save( { x : i } );
+for (i = 0; i < 100; i++) {
+ t.save({x: i});
}
-q = { x : { $gt : 25 , $lte : 75 } };
+q = {
+ x: {$gt: 25, $lte: 75}
+};
-assert.eq( 50 , t.find( q ).count() , "A" );
-assert.eq( 50 , t.find( q ).itcount() , "B" );
+assert.eq(50, t.find(q).count(), "A");
+assert.eq(50, t.find(q).itcount(), "B");
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-assert.eq( 50 , t.find( q ).count() , "C" );
-assert.eq( 50 , t.find( q ).itcount() , "D" );
+assert.eq(50, t.find(q).count(), "C");
+assert.eq(50, t.find(q).itcount(), "D");
-assert.eq( 50 , t.find( q ).limit(1).count() , "E" );
-assert.eq( 1 , t.find( q ).limit(1).itcount() , "F" );
+assert.eq(50, t.find(q).limit(1).count(), "E");
+assert.eq(1, t.find(q).limit(1).itcount(), "F");
-assert.eq( 5 , t.find( q ).limit(5).size() , "G" );
-assert.eq( 5 , t.find( q ).skip(5).limit(5).size() , "H" );
-assert.eq( 2 , t.find( q ).skip(48).limit(5).size() , "I" );
+assert.eq(5, t.find(q).limit(5).size(), "G");
+assert.eq(5, t.find(q).skip(5).limit(5).size(), "H");
+assert.eq(2, t.find(q).skip(48).limit(5).size(), "I");
-assert.eq( 20 , t.find().limit(20).size() , "J" );
+assert.eq(20, t.find().limit(20).size(), "J");
-assert.eq( 0 , t.find().skip(120).size() , "K" );
-assert.eq( 1 , db.runCommand( { count: "count5" } )["ok"] , "L" );
-assert.eq( 1 , db.runCommand( { count: "count5", skip: 120 } )["ok"] , "M" );
+assert.eq(0, t.find().skip(120).size(), "K");
+assert.eq(1, db.runCommand({count: "count5"})["ok"], "L");
+assert.eq(1, db.runCommand({count: "count5", skip: 120})["ok"], "M");
diff --git a/jstests/core/count6.js b/jstests/core/count6.js
index 44c5fa33bc7..c1268f66047 100644
--- a/jstests/core/count6.js
+++ b/jstests/core/count6.js
@@ -2,60 +2,64 @@
t = db.jstests_count6;
-function checkCountForObject( obj ) {
+function checkCountForObject(obj) {
t.drop();
- t.ensureIndex( {b:1,a:1} );
-
- function checkCounts( query, expected ) {
- assert.eq( expected, t.count( query ) , "A1" );
- assert.eq( expected, t.find( query ).skip( 0 ).limit( 0 ).count( true ) , "A2" );
+ t.ensureIndex({b: 1, a: 1});
+
+ function checkCounts(query, expected) {
+ assert.eq(expected, t.count(query), "A1");
+ assert.eq(expected, t.find(query).skip(0).limit(0).count(true), "A2");
// Check proper counts with various skip and limit specs.
- for( var skip = 1; skip <= 2; ++skip ) {
- for( var limit = 1; limit <= 2; ++limit ) {
- assert.eq( Math.max( expected - skip, 0 ), t.find( query ).skip( skip ).count( true ) , "B1" );
- assert.eq( Math.min( expected, limit ), t.find( query ).limit( limit ).count( true ) , "B2" );
- assert.eq( Math.min( Math.max( expected - skip, 0 ), limit ), t.find( query ).skip( skip ).limit( limit ).count( true ) , "B4" );
+ for (var skip = 1; skip <= 2; ++skip) {
+ for (var limit = 1; limit <= 2; ++limit) {
+ assert.eq(Math.max(expected - skip, 0), t.find(query).skip(skip).count(true), "B1");
+ assert.eq(Math.min(expected, limit), t.find(query).limit(limit).count(true), "B2");
+ assert.eq(Math.min(Math.max(expected - skip, 0), limit),
+ t.find(query).skip(skip).limit(limit).count(true),
+ "B4");
// Check limit(x) = limit(-x)
- assert.eq( t.find( query ).limit( limit ).count( true ),
- t.find( query ).limit( -limit ).count( true ) , "C1" );
- assert.eq( t.find( query ).skip( skip ).limit( limit ).count( true ),
- t.find( query ).skip( skip ).limit( -limit ).count( true ) , "C2" );
+ assert.eq(t.find(query).limit(limit).count(true),
+ t.find(query).limit(-limit).count(true),
+ "C1");
+ assert.eq(t.find(query).skip(skip).limit(limit).count(true),
+ t.find(query).skip(skip).limit(-limit).count(true),
+ "C2");
}
}
// Check limit(0) has no effect
- assert.eq( expected, t.find( query ).limit( 0 ).count( true ) , "D1" );
- assert.eq( Math.max( expected - skip, 0 ),
- t.find( query ).skip( skip ).limit( 0 ).count( true ) , "D2" );
- assert.eq( expected, t.getDB().runCommand({ count: t.getName(),
- query: query, limit: 0 }).n , "D3" );
- assert.eq( Math.max( expected - skip, 0 ),
- t.getDB().runCommand({ count: t.getName(),
- query: query, limit: 0, skip: skip }).n , "D4" );
+ assert.eq(expected, t.find(query).limit(0).count(true), "D1");
+ assert.eq(
+ Math.max(expected - skip, 0), t.find(query).skip(skip).limit(0).count(true), "D2");
+ assert.eq(
+ expected, t.getDB().runCommand({count: t.getName(), query: query, limit: 0}).n, "D3");
+ assert.eq(Math.max(expected - skip, 0),
+ t.getDB().runCommand({count: t.getName(), query: query, limit: 0, skip: skip}).n,
+ "D4");
}
- for( var i = 0; i < 5; ++i ) {
- checkCounts( {a:obj.a,b:obj.b}, i );
- checkCounts( {b:obj.b,a:obj.a}, i );
- t.insert( obj );
+ for (var i = 0; i < 5; ++i) {
+ checkCounts({a: obj.a, b: obj.b}, i);
+ checkCounts({b: obj.b, a: obj.a}, i);
+ t.insert(obj);
}
- t.insert( {a:true,b:true} );
- t.insert( {a:true,b:1} );
- t.insert( {a:false,b:1} );
- t.insert( {a:false,b:true} );
- t.insert( {a:false,b:false} );
+ t.insert({a: true, b: true});
+ t.insert({a: true, b: 1});
+ t.insert({a: false, b: 1});
+ t.insert({a: false, b: true});
+ t.insert({a: false, b: false});
- checkCounts( {a:obj.a,b:obj.b}, i );
- checkCounts( {b:obj.b,a:obj.a}, i );
+ checkCounts({a: obj.a, b: obj.b}, i);
+ checkCounts({b: obj.b, a: obj.a}, i);
// Check with no query
- checkCounts( {}, 10 );
+ checkCounts({}, 10);
}
// Check fast count mode.
-checkCountForObject( {a:true,b:false} );
+checkCountForObject({a: true, b: false});
// Check normal count mode.
-checkCountForObject( {a:1,b:0} );
+checkCountForObject({a: 1, b: 0});
diff --git a/jstests/core/count7.js b/jstests/core/count7.js
index c2c1260d49b..8a3255d712a 100644
--- a/jstests/core/count7.js
+++ b/jstests/core/count7.js
@@ -3,23 +3,23 @@
t = db.jstests_count7;
t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:'algebra'} );
-t.save( {a:'apple'} );
-t.save( {a:'azores'} );
-t.save( {a:'bumper'} );
-t.save( {a:'supper'} );
-t.save( {a:'termite'} );
-t.save( {a:'zeppelin'} );
-t.save( {a:'ziggurat'} );
-t.save( {a:'zope'} );
+t.ensureIndex({a: 1});
+t.save({a: 'algebra'});
+t.save({a: 'apple'});
+t.save({a: 'azores'});
+t.save({a: 'bumper'});
+t.save({a: 'supper'});
+t.save({a: 'termite'});
+t.save({a: 'zeppelin'});
+t.save({a: 'ziggurat'});
+t.save({a: 'zope'});
-assert.eq( 5, t.count( {a:/p/} ) );
+assert.eq(5, t.count({a: /p/}));
t.remove({});
-t.save( {a:[1,2,3]} );
-t.save( {a:[1,2,3]} );
-t.save( {a:[1]} );
+t.save({a: [1, 2, 3]});
+t.save({a: [1, 2, 3]});
+t.save({a: [1]});
-assert.eq( 2, t.count( {a:{$gt:1}} ) );
+assert.eq(2, t.count({a: {$gt: 1}}));
diff --git a/jstests/core/count9.js b/jstests/core/count9.js
index 888ffe3b544..517322a9310 100644
--- a/jstests/core/count9.js
+++ b/jstests/core/count9.js
@@ -3,26 +3,26 @@
t = db.jstests_count9;
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-t.save( {a:['a','b','a']} );
-assert.eq( 1, t.count( {a:'a'} ) );
+t.save({a: ['a', 'b', 'a']});
+assert.eq(1, t.count({a: 'a'}));
-t.save( {a:['a','b','a']} );
-assert.eq( 2, t.count( {a:'a'} ) );
+t.save({a: ['a', 'b', 'a']});
+assert.eq(2, t.count({a: 'a'}));
t.drop();
-t.ensureIndex( {a:1,b:1} );
+t.ensureIndex({a: 1, b: 1});
-t.save( {a:['a','b','a'],b:'r'} );
-assert.eq( 1, t.count( {a:'a',b:'r'} ) );
-assert.eq( 1, t.count( {a:'a'} ) );
+t.save({a: ['a', 'b', 'a'], b: 'r'});
+assert.eq(1, t.count({a: 'a', b: 'r'}));
+assert.eq(1, t.count({a: 'a'}));
-t.save( {a:['a','b','a'],b:'r'} );
-assert.eq( 2, t.count( {a:'a',b:'r'} ) );
-assert.eq( 2, t.count( {a:'a'} ) );
+t.save({a: ['a', 'b', 'a'], b: 'r'});
+assert.eq(2, t.count({a: 'a', b: 'r'}));
+assert.eq(2, t.count({a: 'a'}));
t.drop();
-t.ensureIndex( {'a.b':1,'a.c':1} );
-t.save( {a:[{b:'b',c:'c'},{b:'b',c:'c'}]} );
-assert.eq( 1, t.count( {'a.b':'b','a.c':'c'} ) );
+t.ensureIndex({'a.b': 1, 'a.c': 1});
+t.save({a: [{b: 'b', c: 'c'}, {b: 'b', c: 'c'}]});
+assert.eq(1, t.count({'a.b': 'b', 'a.c': 'c'}));
diff --git a/jstests/core/count_plan_summary.js b/jstests/core/count_plan_summary.js
index a822c7666b5..48891d21e8e 100644
--- a/jstests/core/count_plan_summary.js
+++ b/jstests/core/count_plan_summary.js
@@ -10,9 +10,8 @@ for (var i = 0; i < 1000; i++) {
// Mock a long-running count operation by sleeping for each of
// the documents in the collection.
-var awaitShell = startParallelShell(
- "db.jstests_count_plan_summary.find({x: 1, $where: 'sleep(100)'}).count()"
-);
+var awaitShell =
+ startParallelShell("db.jstests_count_plan_summary.find({x: 1, $where: 'sleep(100)'}).count()");
// Find the count op in db.currentOp() and check for the plan summary.
assert.soon(function() {
diff --git a/jstests/core/counta.js b/jstests/core/counta.js
index eadec40f4a1..c65df230c72 100644
--- a/jstests/core/counta.js
+++ b/jstests/core/counta.js
@@ -3,17 +3,24 @@
t = db.jstests_counta;
t.drop();
-for( i = 0; i < 10; ++i ) {
- t.save( {a:i} );
+for (i = 0; i < 10; ++i) {
+ t.save({a: i});
}
-// f() is undefined, causing an assertion
-assert.throws(
- function(){
- t.count( { $where:function() { if ( this.a < 5 ) { return true; } else { f(); } } } );
- } );
+// f() is undefined, causing an assertion
+assert.throws(function() {
+ t.count({
+ $where: function() {
+ if (this.a < 5) {
+ return true;
+ } else {
+ f();
+ }
+ }
+ });
+});
// count must return error if collection name is absent
-res=db.runCommand("count");
-assert.eq(res.ok, 0); // must not be OK
-assert(res.code == 2); // should fail with errorcode("BadValue"), not an massert
+res = db.runCommand("count");
+assert.eq(res.ok, 0); // must not be OK
+assert(res.code == 2); // should fail with errorcode("BadValue"), not an massert
diff --git a/jstests/core/countb.js b/jstests/core/countb.js
index 8f7131a5a6c..a36378009c0 100644
--- a/jstests/core/countb.js
+++ b/jstests/core/countb.js
@@ -3,9 +3,9 @@
t = db.jstests_countb;
t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:['a','b']} );
-assert.eq( 0, t.find( {a:{$in:['a'],$gt:'b'}} ).count() );
-assert.eq( 0, t.find( {$and:[{a:'a'},{a:{$gt:'b'}}]} ).count() );
-assert.eq( 1, t.find( {$and:[{a:'a'},{$where:"this.a[1]=='b'"}]} ).count() );
-assert.eq( 0, t.find( {$and:[{a:'a'},{$where:"this.a[1]!='b'"}]} ).count() );
+t.ensureIndex({a: 1});
+t.save({a: ['a', 'b']});
+assert.eq(0, t.find({a: {$in: ['a'], $gt: 'b'}}).count());
+assert.eq(0, t.find({$and: [{a: 'a'}, {a: {$gt: 'b'}}]}).count());
+assert.eq(1, t.find({$and: [{a: 'a'}, {$where: "this.a[1]=='b'"}]}).count());
+assert.eq(0, t.find({$and: [{a: 'a'}, {$where: "this.a[1]!='b'"}]}).count());
diff --git a/jstests/core/countc.js b/jstests/core/countc.js
index 260dbb1f264..ea4aed54903 100644
--- a/jstests/core/countc.js
+++ b/jstests/core/countc.js
@@ -7,118 +7,109 @@
t = db.jstests_countc;
t.drop();
-
// Match a subset of inserted values within a $in operator.
t.drop();
-t.ensureIndex( { a:1 } );
+t.ensureIndex({a: 1});
// Save 'a' values 0, 0.5, 1.5, 2.5 ... 97.5, 98.5, 99.
-t.save( { a:0 } );
-t.save( { a:99 } );
-for( i = 0; i < 99; ++i ) {
- t.save( { a:( i + 0.5 ) } );
+t.save({a: 0});
+t.save({a: 99});
+for (i = 0; i < 99; ++i) {
+ t.save({a: (i + 0.5)});
}
// Query 'a' values $in 0, 1, 2, ..., 99.
vals = [];
-for( i = 0; i < 100; ++i ) {
- vals.push( i );
+for (i = 0; i < 100; ++i) {
+ vals.push(i);
}
// Only values 0 and 99 of the $in set are present in the collection, so the expected count is 2.
-assert.eq( 2, t.count( { a:{ $in:vals } } ) );
-
+assert.eq(2, t.count({a: {$in: vals}}));
// Match 'a' values within upper and lower limits.
t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:[ 1, 2 ] } ); // Will match because 'a' is in range.
-t.save( { a:9 } ); // Will not match because 'a' is not in range.
+t.ensureIndex({a: 1});
+t.save({a: [1, 2]}); // Will match because 'a' is in range.
+t.save({a: 9}); // Will not match because 'a' is not in range.
// Only one document matches.
-assert.eq( 1, t.count( { a:{ $gt:0, $lt:5 } } ) );
-
+assert.eq(1, t.count({a: {$gt: 0, $lt: 5}}));
// Match two nested fields within an array.
t.drop();
-t.ensureIndex( { 'a.b':1, 'a.c':1 } );
-t.save( { a:[ { b:2, c:3 }, {} ] } );
+t.ensureIndex({'a.b': 1, 'a.c': 1});
+t.save({a: [{b: 2, c: 3}, {}]});
// The document does not match because its c value is 3.
-assert.eq( 0, t.count( { 'a.b':2, 'a.c':2 } ) );
-
+assert.eq(0, t.count({'a.b': 2, 'a.c': 2}));
// $gt:string only matches strings.
t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:'a' } ); // Will match.
-t.save( { a:{} } ); // Will not match because {} is not a string.
+t.ensureIndex({a: 1});
+t.save({a: 'a'}); // Will match.
+t.save({a: {}}); // Will not match because {} is not a string.
// Only one document matches.
-assert.eq( 1, t.count( { a:{ $gte:'' } } ) );
-
+assert.eq(1, t.count({a: {$gte: ''}}));
// $lte:date only matches dates.
t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:new Date( 1 ) } ); // Will match.
-t.save( { a:true } ); // Will not match because 'true' is not a date.
+t.ensureIndex({a: 1});
+t.save({a: new Date(1)}); // Will match.
+t.save({a: true}); // Will not match because 'true' is not a date.
// Only one document matches.
-assert.eq( 1, t.count( { a:{ $lte:new Date( 1 ) } } ) );
-
+assert.eq(1, t.count({a: {$lte: new Date(1)}}));
// Querying for 'undefined' triggers an error.
t.drop();
-t.ensureIndex( { a:1 } );
-assert.throws( function() { t.count( { a:undefined } ); } );
-
+t.ensureIndex({a: 1});
+assert.throws(function() {
+ t.count({a: undefined});
+});
// Count using a descending order index.
t.drop();
-t.ensureIndex( { a:-1 } );
-t.save( { a:1 } );
-t.save( { a:2 } );
-t.save( { a:3 } );
-assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
-assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
-assert.eq( 2, t.count( { a:{ $lte:2 } } ) );
-assert.eq( 2, t.count( { a:{ $lt:3 } } ) );
-
+t.ensureIndex({a: -1});
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
+assert.eq(1, t.count({a: {$gt: 2}}));
+assert.eq(1, t.count({a: {$lt: 2}}));
+assert.eq(2, t.count({a: {$lte: 2}}));
+assert.eq(2, t.count({a: {$lt: 3}}));
// Count using a compound index.
t.drop();
-t.ensureIndex( { a:1, b:1 } );
-t.save( { a:1, b:2 } );
-t.save( { a:2, b:1 } );
-t.save( { a:2, b:3 } );
-t.save( { a:3, b:4 } );
-assert.eq( 1, t.count( { a:1 }));
-assert.eq( 2, t.count( { a:2 }));
-assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
-assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
-assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) );
-assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) );
-assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) );
-
+t.ensureIndex({a: 1, b: 1});
+t.save({a: 1, b: 2});
+t.save({a: 2, b: 1});
+t.save({a: 2, b: 3});
+t.save({a: 3, b: 4});
+assert.eq(1, t.count({a: 1}));
+assert.eq(2, t.count({a: 2}));
+assert.eq(1, t.count({a: {$gt: 2}}));
+assert.eq(1, t.count({a: {$lt: 2}}));
+assert.eq(2, t.count({a: 2, b: {$gt: 0}}));
+assert.eq(1, t.count({a: 2, b: {$lt: 3}}));
+assert.eq(1, t.count({a: 1, b: {$lt: 3}}));
// Count using a compound descending order index.
t.drop();
-t.ensureIndex( { a:1, b:-1 } );
-t.save( { a:1, b:2 } );
-t.save( { a:2, b:1 } );
-t.save( { a:2, b:3 } );
-t.save( { a:3, b:4 } );
-assert.eq( 1, t.count( { a:{ $gt:2 } } ) );
-assert.eq( 1, t.count( { a:{ $lt:2 } } ) );
-assert.eq( 2, t.count( { a:2, b:{ $gt:0 } } ) );
-assert.eq( 1, t.count( { a:2, b:{ $lt:3 } } ) );
-assert.eq( 1, t.count( { a:1, b:{ $lt:3 } } ) );
-
+t.ensureIndex({a: 1, b: -1});
+t.save({a: 1, b: 2});
+t.save({a: 2, b: 1});
+t.save({a: 2, b: 3});
+t.save({a: 3, b: 4});
+assert.eq(1, t.count({a: {$gt: 2}}));
+assert.eq(1, t.count({a: {$lt: 2}}));
+assert.eq(2, t.count({a: 2, b: {$gt: 0}}));
+assert.eq(1, t.count({a: 2, b: {$lt: 3}}));
+assert.eq(1, t.count({a: 1, b: {$lt: 3}}));
// Count with a multikey value.
t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:[ 1, 2 ] } );
-assert.eq( 1, t.count( { a:{ $gt:0, $lte:2 } } ) );
-
+t.ensureIndex({a: 1});
+t.save({a: [1, 2]});
+assert.eq(1, t.count({a: {$gt: 0, $lte: 2}}));
// Count with a match constraint on an unindexed field.
t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { a:1, b:1 } );
-t.save( { a:1, b:2 } );
-assert.eq( 1, t.count( { a:1, $where:'this.b == 1' } ) );
+t.ensureIndex({a: 1});
+t.save({a: 1, b: 1});
+t.save({a: 1, b: 2});
+assert.eq(1, t.count({a: 1, $where: 'this.b == 1'}));
diff --git a/jstests/core/coveredIndex1.js b/jstests/core/coveredIndex1.js
index 434988b3b30..328c53ba0bc 100644
--- a/jstests/core/coveredIndex1.js
+++ b/jstests/core/coveredIndex1.js
@@ -11,78 +11,76 @@ t.save({fn: "john", ln: "smith"});
t.save({fn: "jack", ln: "black"});
t.save({fn: "bob", ln: "murray"});
t.save({fn: "aaa", ln: "bbb", obj: {a: 1, b: "blah"}});
-assert.eq( t.findOne({ln: "doe"}).fn, "john", "Cannot find right record" );
-assert.eq( t.count(), 6, "Not right length" );
+assert.eq(t.findOne({ln: "doe"}).fn, "john", "Cannot find right record");
+assert.eq(t.count(), 6, "Not right length");
// use simple index
t.ensureIndex({ln: 1});
-assert( !isIndexOnly(t.find({ln: "doe"}).explain().queryPlanner.winningPlan),
- "Find using covered index but all fields are returned");
-assert( !isIndexOnly(t.find({ln: "doe"}, {ln: 1}).explain().queryPlanner.winningPlan),
- "Find using covered index but _id is returned");
-assert( isIndexOnly(t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().queryPlanner.winningPlan),
- "Find is not using covered index");
+assert(!isIndexOnly(t.find({ln: "doe"}).explain().queryPlanner.winningPlan),
+ "Find using covered index but all fields are returned");
+assert(!isIndexOnly(t.find({ln: "doe"}, {ln: 1}).explain().queryPlanner.winningPlan),
+ "Find using covered index but _id is returned");
+assert(isIndexOnly(t.find({ln: "doe"}, {ln: 1, _id: 0}).explain().queryPlanner.winningPlan),
+ "Find is not using covered index");
// this time, without a query spec
// SERVER-2109
-//assert.eq( t.find({}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered index");
-assert( isIndexOnly(t.find({}, {ln: 1, _id: 0}).hint({ln: 1}).explain().queryPlanner.winningPlan),
- "Find is not using covered index");
+// assert.eq( t.find({}, {ln: 1, _id: 0}).explain().indexOnly, true, "Find is not using covered
+// index");
+assert(isIndexOnly(t.find({}, {ln: 1, _id: 0}).hint({ln: 1}).explain().queryPlanner.winningPlan),
+ "Find is not using covered index");
// use compound index
t.dropIndex({ln: 1});
t.ensureIndex({ln: 1, fn: 1});
// return 1 field
var plan = t.find({ln: "doe"}, {ln: 1, _id: 0}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// return both fields, multiple docs returned
var plan = t.find({ln: "doe"}, {ln: 1, fn: 1, _id: 0}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// match 1 record using both fields
var plan = t.find({ln: "doe", fn: "john"}, {ln: 1, fn: 1, _id: 0}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// change ordering
var plan = t.find({fn: "john", ln: "doe"}, {fn: 1, ln: 1, _id: 0}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// ask from 2nd index key
var plan = t.find({fn: "john"}, {fn: 1, _id: 0}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Find is using covered index, but doesnt have 1st key");
+assert(!isIndexOnly(plan.queryPlanner.winningPlan),
+ "Find is using covered index, but doesnt have 1st key");
// repeat above but with _id field
t.dropIndex({ln: 1, fn: 1});
t.ensureIndex({_id: 1, ln: 1});
// return 1 field
var plan = t.find({_id: 123, ln: "doe"}, {_id: 1}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// match 1 record using both fields
var plan = t.find({_id: 123, ln: "doe"}, {ln: 1}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// change ordering
var plan = t.find({ln: "doe", _id: 123}, {ln: 1, _id: 1}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// ask from 2nd index key
var plan = t.find({ln: "doe"}, {ln: 1}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Find is using covered index, but doesnt have 1st key");
+assert(!isIndexOnly(plan.queryPlanner.winningPlan),
+ "Find is using covered index, but doesnt have 1st key");
// repeat above but with embedded obj
t.dropIndex({_id: 1, ln: 1});
t.ensureIndex({obj: 1});
var plan = t.find({"obj.a": 1}, {obj: 1}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Shouldnt use index when introspecting object");
+assert(!isIndexOnly(plan.queryPlanner.winningPlan), "Shouldnt use index when introspecting object");
var plan = t.find({obj: {a: 1, b: "blah"}}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan), "Index doesnt have all fields to cover");
+assert(!isIndexOnly(plan.queryPlanner.winningPlan), "Index doesnt have all fields to cover");
var plan = t.find({obj: {a: 1, b: "blah"}}, {obj: 1, _id: 0}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// repeat above but with index on sub obj field
t.dropIndex({obj: 1});
t.ensureIndex({"obj.a": 1, "obj.b": 1});
var plan = t.find({"obj.a": 1}, {obj: 1}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Shouldnt use index when introspecting object");
+assert(!isIndexOnly(plan.queryPlanner.winningPlan), "Shouldnt use index when introspecting object");
assert(t.validate().valid);
-
diff --git a/jstests/core/coveredIndex2.js b/jstests/core/coveredIndex2.js
index 0f26037bf43..f7b542008f3 100644
--- a/jstests/core/coveredIndex2.js
+++ b/jstests/core/coveredIndex2.js
@@ -6,23 +6,21 @@ load("jstests/libs/analyze_plan.js");
t.save({a: 1});
t.save({a: 2});
-assert.eq( t.findOne({a: 1}).a, 1, "Cannot find right record" );
-assert.eq( t.count(), 2, "Not right length" );
+assert.eq(t.findOne({a: 1}).a, 1, "Cannot find right record");
+assert.eq(t.count(), 2, "Not right length");
// use simple index
t.ensureIndex({a: 1});
-var plan = t.find({a:1}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Find using covered index but all fields are returned");
-var plan = t.find({a:1}, {a: 1}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Find using covered index but _id is returned");
-var plan = t.find({a:1}, {a: 1, _id: 0}).explain();
-assert( isIndexOnly(plan.queryPlanner.winningPlan),
- "Find is not using covered index");
+var plan = t.find({a: 1}).explain();
+assert(!isIndexOnly(plan.queryPlanner.winningPlan),
+ "Find using covered index but all fields are returned");
+var plan = t.find({a: 1}, {a: 1}).explain();
+assert(!isIndexOnly(plan.queryPlanner.winningPlan), "Find using covered index but _id is returned");
+var plan = t.find({a: 1}, {a: 1, _id: 0}).explain();
+assert(isIndexOnly(plan.queryPlanner.winningPlan), "Find is not using covered index");
// add multikey
-t.save({a:[3,4]});
-var plan = t.find({a:1}, {a: 1, _id: 0}).explain();
-assert( !isIndexOnly(plan.queryPlanner.winningPlan),
- "Find is using covered index even after multikey insert");
+t.save({a: [3, 4]});
+var plan = t.find({a: 1}, {a: 1, _id: 0}).explain();
+assert(!isIndexOnly(plan.queryPlanner.winningPlan),
+ "Find is using covered index even after multikey insert");
diff --git a/jstests/core/coveredIndex3.js b/jstests/core/coveredIndex3.js
index 66180342605..4bfedda888b 100644
--- a/jstests/core/coveredIndex3.js
+++ b/jstests/core/coveredIndex3.js
@@ -1,54 +1,49 @@
// Check proper covered index handling when query and processGetMore yield.
// SERVER-4975
-if ( 0 ) { // SERVER-4975
-
-t = db.jstests_coveredIndex3;
-t2 = db.jstests_coveredIndex3_other;
-t.drop();
-t2.drop();
-
-function doTest( batchSize ) {
-
- // Insert an array, which will make the { a:1 } index multikey and should disable covered index
- // matching.
- p1 = startParallelShell(
- 'for( i = 0; i < 60; ++i ) { \
+if (0) { // SERVER-4975
+
+ t = db.jstests_coveredIndex3;
+ t2 = db.jstests_coveredIndex3_other;
+ t.drop();
+ t2.drop();
+
+ function doTest(batchSize) {
+ // Insert an array, which will make the { a:1 } index multikey and should disable covered
+ // index
+ // matching.
+ p1 = startParallelShell('for( i = 0; i < 60; ++i ) { \
db.jstests_coveredIndex3.save( { a:[ 2000, 2001 ] } ); \
sleep( 300 ); \
- }'
- );
+ }');
- // Frequent writes cause the find operation to yield.
- p2 = startParallelShell(
- 'for( i = 0; i < 1800; ++i ) { \
+ // Frequent writes cause the find operation to yield.
+ p2 = startParallelShell('for( i = 0; i < 1800; ++i ) { \
db.jstests_coveredIndex3_other.save( {} ); \
sleep( 10 ); \
- }'
- );
-
- for( i = 0; i < 30; ++i ) {
- t.drop();
- t.ensureIndex( { a:1 } );
-
- for( j = 0; j < 1000; ++j ) {
- t.save( { a:j } );
+ }');
+
+ for (i = 0; i < 30; ++i) {
+ t.drop();
+ t.ensureIndex({a: 1});
+
+ for (j = 0; j < 1000; ++j) {
+ t.save({a: j});
+ }
+
+ c = t.find({}, {_id: 0, a: 1}).hint({a: 1}).batchSize(batchSize);
+ while (c.hasNext()) {
+ o = c.next();
+ // If o contains a high numeric 'a' value, it must come from an array saved in p1.
+ assert(!(o.a > 1500), 'improper object returned ' + tojson(o));
+ }
}
-
- c = t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).batchSize( batchSize );
- while( c.hasNext() ) {
- o = c.next();
- // If o contains a high numeric 'a' value, it must come from an array saved in p1.
- assert( !( o.a > 1500 ), 'improper object returned ' + tojson( o ) );
- }
- }
- p1();
- p2();
-
-}
-
-doTest( 2000 ); // Test query.
-doTest( 500 ); // Try to test getMore - not clear if this will actually trigger the getMore issue.
+ p1();
+ p2();
+ }
+ doTest(2000); // Test query.
+ doTest(
+ 500); // Try to test getMore - not clear if this will actually trigger the getMore issue.
}
diff --git a/jstests/core/coveredIndex4.js b/jstests/core/coveredIndex4.js
index 136eba603cf..e7c6cc93a76 100644
--- a/jstests/core/coveredIndex4.js
+++ b/jstests/core/coveredIndex4.js
@@ -4,37 +4,36 @@
t = db.jstests_coveredIndex4;
t.drop();
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
orClause = [];
-for( i = 0; i < 200; ++i ) {
- if ( i % 2 == 0 ) {
- t.save( { a:i } );
- orClause.push( { a:i } );
- }
- else {
- t.save( { b:i } );
- orClause.push( { b:i } );
+for (i = 0; i < 200; ++i) {
+ if (i % 2 == 0) {
+ t.save({a: i});
+ orClause.push({a: i});
+ } else {
+ t.save({b: i});
+ orClause.push({b: i});
}
}
-c = t.find( { $or:orClause }, { _id:0, a:1 } );
+c = t.find({$or: orClause}, {_id: 0, a: 1});
// No odd values of a were saved, so we should not see any in the results.
-while( c.hasNext() ) {
+while (c.hasNext()) {
o = c.next();
- if ( o.a ) {
- assert.eq( 0, o.a % 2, 'unexpected result: ' + tojson( o ) );
+ if (o.a) {
+ assert.eq(0, o.a % 2, 'unexpected result: ' + tojson(o));
}
}
-c = t.find( { $or:orClause }, { _id:0, b:1 } );
+c = t.find({$or: orClause}, {_id: 0, b: 1});
// No even values of b were saved, so we should not see any in the results.
-while( c.hasNext() ) {
+while (c.hasNext()) {
o = c.next();
- if ( o.b ) {
- assert.eq( 1, o.b % 2, 'unexpected result: ' + tojson( o ) );
+ if (o.b) {
+ assert.eq(1, o.b % 2, 'unexpected result: ' + tojson(o));
}
}
diff --git a/jstests/core/covered_index_compound_1.js b/jstests/core/covered_index_compound_1.js
index ffe01646ba7..45f17fd7d44 100644
--- a/jstests/core/covered_index_compound_1.js
+++ b/jstests/core/covered_index_compound_1.js
@@ -5,70 +5,77 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_compound_1");
coll.drop();
-for (i=0;i<100;i++) {
- coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)});
+for (i = 0; i < 100; i++) {
+ coll.insert({a: i, b: "strvar_" + (i % 13), c: NumberInt(i % 10)});
}
-coll.ensureIndex({a:1,b:-1,c:1});
+coll.ensureIndex({a: 1, b: -1, c: 1});
// Test equality - all indexed fields queried and projected
-var plan = coll.find({a:10, b:"strvar_10", c:0}, {a:1, b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: 10, b: "strvar_10", c: 0}, {a: 1, b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
- "compound.1.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+ "compound.1.1 - indexOnly should be true on covered query");
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.1 - nscannedObjects should be 0 for covered query");
// Test query on subset of fields queried and project all
-var plan = coll.find({a:26, b:"strvar_0"}, {a:1, b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: 26, b: "strvar_0"}, {a: 1, b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"compound.1.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.2 - nscannedObjects should be 0 for covered query");
// Test query on all fields queried and project subset
-var plan = coll.find({a:38, b:"strvar_12", c: 8}, {b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: 38, b: "strvar_12", c: 8}, {b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"compound.1.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.3 - nscannedObjects should be 0 for covered query");
// Test no query
-var plan = coll.find({}, {b:1, c:1, _id:0}).hint({a:1, b:-1, c:1}).explain("executionStats");
+var plan = coll.find({}, {b: 1, c: 1, _id: 0}).hint({a: 1, b: -1, c: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"compound.1.4 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.4 - nscannedObjects should be 0 for covered query");
// Test range query
-var plan = coll.find({a:{$gt:25,$lt:43}}, {b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: {$gt: 25, $lt: 43}}, {b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"compound.1.5 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.5 - nscannedObjects should be 0 for covered query");
// Test in query
-var plan = coll.find({a:38, b:"strvar_12", c:{$in:[5,8]}}, {b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: 38, b: "strvar_12", c: {$in: [5, 8]}}, {b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"compound.1.6 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.6 - nscannedObjects should be 0 for covered query");
// Test no result
-var plan = coll.find({a:38, b:"strvar_12", c:55},{a:1, b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: 38, b: "strvar_12", c: 55}, {a: 1, b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"compound.1.7 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"compound.1.7 - nscannedObjects should be 0 for covered query");
print('all tests passed');
diff --git a/jstests/core/covered_index_negative_1.js b/jstests/core/covered_index_negative_1.js
index 8afd03f2a58..37a9b4dc3bb 100644
--- a/jstests/core/covered_index_negative_1.js
+++ b/jstests/core/covered_index_negative_1.js
@@ -8,72 +8,84 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_negative_1");
coll.drop();
-for (i=0;i<100;i++) {
- coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10), d: i*10, e: [i, i%10],
- f:i});
+for (i = 0; i < 100; i++) {
+ coll.insert(
+ {a: i, b: "strvar_" + (i % 13), c: NumberInt(i % 10), d: i * 10, e: [i, i % 10], f: i});
}
-coll.ensureIndex({a:1,b:-1,c:1});
-coll.ensureIndex({e:1});
-coll.ensureIndex({d:1});
-coll.ensureIndex({f:"hashed"});
+coll.ensureIndex({a: 1, b: -1, c: 1});
+coll.ensureIndex({e: 1});
+coll.ensureIndex({d: 1});
+coll.ensureIndex({f: "hashed"});
// Test no projection
-var plan = coll.find({a:10, b:"strvar_10", c:0}).hint({a:1, b:-1, c:1}).explain("executionStats");
+var plan =
+ coll.find({a: 10, b: "strvar_10", c: 0}).hint({a: 1, b: -1, c: 1}).explain("executionStats");
assert(!isIndexOnly(plan.queryPlanner.winningPlan),
"negative.1.1 - indexOnly should be false on a non covered query");
-assert.neq(0, plan.executionStats.totalDocsExamined,
+assert.neq(0,
+ plan.executionStats.totalDocsExamined,
"negative.1.1 - docs examined should not be 0 for a non covered query");
// Test projection and not excluding _id
-var plan = coll.find({a:10, b:"strvar_10", c:0},{a:1, b:1, c:1})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({a: 10, b: "strvar_10", c: 0}, {a: 1, b: 1, c: 1})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(!isIndexOnly(plan.queryPlanner.winningPlan),
"negative.1.2 - indexOnly should be false on a non covered query");
-assert.neq(0, plan.executionStats.totalDocsExamined,
+assert.neq(0,
+ plan.executionStats.totalDocsExamined,
"negative.1.2 - docs examined should not be 0 for a non covered query");
// Test projection of non-indexed field
-var plan = coll.find({d:100},{d:1, c:1, _id:0}).hint({d:1}).explain("executionStats");
+var plan = coll.find({d: 100}, {d: 1, c: 1, _id: 0}).hint({d: 1}).explain("executionStats");
assert(!isIndexOnly(plan.queryPlanner.winningPlan),
"negative.1.3 - indexOnly should be false on a non covered query");
-assert.neq(0, plan.executionStats.totalDocsExamined,
+assert.neq(0,
+ plan.executionStats.totalDocsExamined,
"negative.1.3 - docs examined should not be 0 for a non covered query");
// Test query and projection on a multi-key index
-var plan = coll.find({e:99},{e:1, _id:0}).hint({e:1}).explain("executionStats");
+var plan = coll.find({e: 99}, {e: 1, _id: 0}).hint({e: 1}).explain("executionStats");
assert(!isIndexOnly(plan.queryPlanner.winningPlan),
"negative.1.4 - indexOnly should be false on a non covered query");
-assert.neq(0, plan.executionStats.totalDocsExamined,
+assert.neq(0,
+ plan.executionStats.totalDocsExamined,
"negative.1.4 - docs examined should not be 0 for a non covered query");
// Commenting out negative.1.5 and 1.6 pending fix in SERVER-8650
// // Test projection and $natural sort
-// var plan = coll.find({a:{$gt:70}},{a:1, b:1, c:1, _id:0}).sort({$natural:1}).hint({a:1, b:-1, c:1}).explain()
+// var plan = coll.find({a:{$gt:70}},{a:1, b:1, c:1, _id:0}).sort({$natural:1}).hint({a:1, b:-1,
+// c:1}).explain()
// // indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8561
-// assert.eq(true, plan.indexOnly, "negative.1.5 - indexOnly should be false on a non covered query")
-// assert.neq(0, plan.nscannedObjects, "negative.1.5 - nscannedObjects should not be 0 for a non covered query")
+// assert.eq(true, plan.indexOnly, "negative.1.5 - indexOnly should be false on a non covered
+// query")
+// assert.neq(0, plan.nscannedObjects, "negative.1.5 - nscannedObjects should not be 0 for a non
+// covered query")
// // Test sort on non-indexed field
// var plan = coll.find({d:{$lt:1000}},{d:1, _id:0}).sort({c:1}).hint({d:1}).explain()
// //indexOnly should be false but is not due to bug https://jira.mongodb.org/browse/SERVER-8562
-// assert.eq(true, plan.indexOnly, "negative.1.6 - indexOnly should be false on a non covered query")
-// assert.neq(0, plan.nscannedObjects, "negative.1.6 - nscannedObjects should not be 0 for a non covered query")
+// assert.eq(true, plan.indexOnly, "negative.1.6 - indexOnly should be false on a non covered
+// query")
+// assert.neq(0, plan.nscannedObjects, "negative.1.6 - nscannedObjects should not be 0 for a non
+// covered query")
// Test query on non-indexed field
-var plan = coll.find({d:{$lt:1000}},{a:1, b:1, c:1, _id:0})
- .hint({a:1, b:-1, c:1})
+var plan = coll.find({d: {$lt: 1000}}, {a: 1, b: 1, c: 1, _id: 0})
+ .hint({a: 1, b: -1, c: 1})
.explain("executionStats");
assert(!isIndexOnly(plan.queryPlanner.winningPlan),
"negative.1.7 - indexOnly should be false on a non covered query");
-assert.neq(0, plan.executionStats.totalDocsExamined,
+assert.neq(0,
+ plan.executionStats.totalDocsExamined,
"negative.1.7 - docs examined should not be 0 for a non covered query");
// Test query on hashed indexed field
-var plan = coll.find({f:10},{f:1, _id:0}).hint({f:"hashed"}).explain("executionStats");
+var plan = coll.find({f: 10}, {f: 1, _id: 0}).hint({f: "hashed"}).explain("executionStats");
assert(!isIndexOnly(plan.queryPlanner.winningPlan),
"negative.1.8 - indexOnly should be false on a non covered query");
-assert.neq(0, plan.executionStats.totalDocsExamined,
+assert.neq(0,
+ plan.executionStats.totalDocsExamined,
"negative.1.8 - nscannedObjects should not be 0 for a non covered query");
print('all tests passed');
diff --git a/jstests/core/covered_index_simple_1.js b/jstests/core/covered_index_simple_1.js
index 25badb1601f..3827ef4acfc 100644
--- a/jstests/core/covered_index_simple_1.js
+++ b/jstests/core/covered_index_simple_1.js
@@ -5,67 +5,76 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_simple_1");
coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({foo: i});
}
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({foo: i});
}
-for (i=0;i<5;i++) {
- coll.insert({bar:i});
+for (i = 0; i < 5; i++) {
+ coll.insert({bar: i});
}
-coll.insert({foo:"string"});
-coll.insert({foo:{bar:1}});
-coll.insert({foo:null});
-coll.ensureIndex({foo:1});
+coll.insert({foo: "string"});
+coll.insert({foo: {bar: 1}});
+coll.insert({foo: null});
+coll.ensureIndex({foo: 1});
// Test equality with int value
-var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: 1}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.1 - docs examined should be 0 for covered query");
// Test equality with string value
-var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: "string"}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.2 - docs examined should be 0 for covered query");
// Test equality with doc value
-var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: {bar: 1}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.3 - docs examined should be 0 for covered query");
// Test no query
-var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.4 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.4 - docs examined should be 0 for covered query");
// Test range query
-var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$gt: 2, $lt: 6}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.5 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.5 - docs examined should be 0 for covered query");
// Test in query
-var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$in: [5, 8]}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.6 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.6 - docs examined should be 0 for covered query");
// Test no return
-var plan = coll.find({foo:"2"}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: "2"}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.1.7 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.1.7 - nscannedObjects should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/covered_index_simple_2.js b/jstests/core/covered_index_simple_2.js
index f666a9eb4ae..0c947849703 100644
--- a/jstests/core/covered_index_simple_2.js
+++ b/jstests/core/covered_index_simple_2.js
@@ -5,54 +5,62 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_simple_2");
coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({foo: i});
}
-coll.insert({foo:"string"});
-coll.insert({foo:{bar:1}});
-coll.insert({foo:null});
-coll.ensureIndex({foo:1},{unique:true});
+coll.insert({foo: "string"});
+coll.insert({foo: {bar: 1}});
+coll.insert({foo: null});
+coll.ensureIndex({foo: 1}, {unique: true});
// Test equality with int value
-var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: 1}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.2.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.2.1 - docs examined should be 0 for covered query");
// Test equality with string value
-var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: "string"}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.2.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.2.2 - docs examined should be 0 for covered query");
// Test equality with int value on a dotted field
-var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: {bar: 1}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.2.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.2.3 - docs examined should be 0 for covered query");
// Test no query
-var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.2.4 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.2.4 - docs examined should be 0 for covered query");
// Test range query
-var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$gt: 2, $lt: 6}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.2.5 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.2.5 - docs examined should be 0 for covered query");
// Test in query
-var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$in: [5, 8]}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.2.6 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.2.6 - docs examined should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/covered_index_simple_3.js b/jstests/core/covered_index_simple_3.js
index 5338f4bd782..e445396c4c8 100644
--- a/jstests/core/covered_index_simple_3.js
+++ b/jstests/core/covered_index_simple_3.js
@@ -5,73 +5,85 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_simple_3");
coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({foo: i});
}
-for (i=0;i<5;i++) {
- coll.insert({bar:i});
+for (i = 0; i < 5; i++) {
+ coll.insert({bar: i});
}
-coll.insert({foo:"string"});
-coll.insert({foo:{bar:1}});
-coll.insert({foo:null});
-coll.ensureIndex({foo:1}, {sparse:true, unique:true});
+coll.insert({foo: "string"});
+coll.insert({foo: {bar: 1}});
+coll.insert({foo: null});
+coll.ensureIndex({foo: 1}, {sparse: true, unique: true});
// Test equality with int value
-var plan = coll.find({foo:1}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: 1}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.1 - docs examined should be 0 for covered query");
// Test equality with string value
-var plan = coll.find({foo:"string"}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: "string"}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.2 - docs examined should be 0 for covered query");
// Test equality with int value on a dotted field
-var plan = coll.find({foo:{bar:1}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({foo: {bar: 1}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.3 - docs examined should be 0 for covered query");
// Test no query
-var plan = coll.find({}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.4 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.4 - docs examined should be 0 for covered query");
// Test range query
-var plan = coll.find({foo:{$gt:2,$lt:6}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$gt: 2, $lt: 6}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.5 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.5 - docs examined should be 0 for covered query");
// Test in query
-var plan = coll.find({foo:{$in:[5,8]}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$in: [5, 8]}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.6 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.6 - docs examined should be 0 for covered query");
// Test $exists true
-var plan = coll.find({foo:{$exists:true}}, {foo:1, _id:0}).hint({foo:1}).explain("executionStats");
+var plan =
+ coll.find({foo: {$exists: true}}, {foo: 1, _id: 0}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.7 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.7 - docs examined should be 0 for covered query");
// Check that $nin can be covered.
coll.dropIndexes();
coll.ensureIndex({bar: 1});
-var plan = coll.find({bar:{$nin:[5,8]}}, {bar:1, _id:0}).hint({bar:1}).explain("executionStats");
+var plan =
+ coll.find({bar: {$nin: [5, 8]}}, {bar: 1, _id: 0}).hint({bar: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.3.8 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.3.8 - docs examined should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/covered_index_simple_id.js b/jstests/core/covered_index_simple_id.js
index d6db2c3149f..c2550544abd 100644
--- a/jstests/core/covered_index_simple_id.js
+++ b/jstests/core/covered_index_simple_id.js
@@ -5,53 +5,59 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_simple_id");
coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({_id:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({_id: i});
}
-coll.insert({_id:"string"});
-coll.insert({_id:{bar:1}});
-coll.insert({_id:null});
+coll.insert({_id: "string"});
+coll.insert({_id: {bar: 1}});
+coll.insert({_id: null});
// Test equality with int value
-var plan = coll.find({_id:1}, {_id:1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({_id: 1}, {_id: 1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.id.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.id.1 - docs examined should be 0 for covered query");
// Test equality with string value
-var plan = coll.find({_id:"string"}, {_id:1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({_id: "string"}, {_id: 1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.id.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.id.2 - docs examined should be 0 for covered query");
// Test equality with int value on a dotted field
-var plan = coll.find({_id:{bar:1}}, {_id:1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({_id: {bar: 1}}, {_id: 1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.id.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.id.3 - docs examined should be 0 for covered query");
// Test no query
-var plan = coll.find({}, {_id:1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({}, {_id: 1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.id.4 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.id.4 - docs examined should be 0 for covered query");
// Test range query
-var plan = coll.find({_id:{$gt:2,$lt:6}}, {_id:1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({_id: {$gt: 2, $lt: 6}}, {_id: 1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.id.5 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.id.5 - docs examined should be 0 for covered query");
// Test in query
-var plan = coll.find({_id:{$in:[5,8]}}, {_id:1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({_id: {$in: [5, 8]}}, {_id: 1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"simple.id.6 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"simple.id.6 - docs examined should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/covered_index_sort_1.js b/jstests/core/covered_index_sort_1.js
index 3ddd9e7b701..a5984a34f19 100644
--- a/jstests/core/covered_index_sort_1.js
+++ b/jstests/core/covered_index_sort_1.js
@@ -5,41 +5,45 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_sort_1");
coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({foo: i});
}
-for (i=0;i<10;i++) {
- coll.insert({foo:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({foo: i});
}
-for (i=0;i<5;i++) {
- coll.insert({bar:i});
+for (i = 0; i < 5; i++) {
+ coll.insert({bar: i});
}
-coll.insert({foo:"1"});
-coll.insert({foo:{bar:1}});
-coll.insert({foo:null});
-coll.ensureIndex({foo:1});
+coll.insert({foo: "1"});
+coll.insert({foo: {bar: 1}});
+coll.insert({foo: null});
+coll.ensureIndex({foo: 1});
// Test no query and sort ascending
-var plan = coll.find({}, {foo:1, _id:0}).sort({foo:1}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({}, {foo: 1, _id: 0}).sort({foo: 1}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"sort.1.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"sort.1.1 - docs examined should be 0 for covered query");
// Test no query and sort descending
-var plan = coll.find({}, {foo:1, _id:0}).sort({foo:-1}).hint({foo:1}).explain("executionStats");
+var plan = coll.find({}, {foo: 1, _id: 0}).sort({foo: -1}).hint({foo: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"sort.1.2 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"sort.1.2 - docs examined should be 0 for covered query");
// Test range query with sort
-var plan = coll.find({foo:{$gt:2}}, {foo:1, _id:0}).sort({foo:-1})
- .hint({foo:1})
- .explain("executionStats");
+var plan = coll.find({foo: {$gt: 2}}, {foo: 1, _id: 0})
+ .sort({foo: -1})
+ .hint({foo: 1})
+ .explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"sort.1.3 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"sort.1.3 - docs examined should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/covered_index_sort_2.js b/jstests/core/covered_index_sort_2.js
index 75a89ee7618..5ed3dc869c9 100644
--- a/jstests/core/covered_index_sort_2.js
+++ b/jstests/core/covered_index_sort_2.js
@@ -5,18 +5,19 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_sort_2");
coll.drop();
-for (i=0;i<10;i++) {
- coll.insert({_id:i});
+for (i = 0; i < 10; i++) {
+ coll.insert({_id: i});
}
-coll.insert({_id:"1"});
-coll.insert({_id:{bar:1}});
-coll.insert({_id:null});
+coll.insert({_id: "1"});
+coll.insert({_id: {bar: 1}});
+coll.insert({_id: null});
// Test no query
-var plan = coll.find({}, {_id:1}).sort({_id:-1}).hint({_id:1}).explain("executionStats");
+var plan = coll.find({}, {_id: 1}).sort({_id: -1}).hint({_id: 1}).explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"sort.2.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"sort.2.1 - docs examined should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/covered_index_sort_3.js b/jstests/core/covered_index_sort_3.js
index 735b93877ba..1c98fb69be4 100644
--- a/jstests/core/covered_index_sort_3.js
+++ b/jstests/core/covered_index_sort_3.js
@@ -5,19 +5,21 @@ load("jstests/libs/analyze_plan.js");
var coll = db.getCollection("covered_sort_3");
coll.drop();
-for (i=0;i<100;i++) {
- coll.insert({a:i, b:"strvar_"+(i%13), c:NumberInt(i%10)});
+for (i = 0; i < 100; i++) {
+ coll.insert({a: i, b: "strvar_" + (i % 13), c: NumberInt(i % 10)});
}
coll.insert;
-coll.ensureIndex({a:1,b:-1,c:1});
+coll.ensureIndex({a: 1, b: -1, c: 1});
// Test no query, sort on all fields in index order
-var plan = coll.find({}, {b:1, c:1, _id:0}).sort({a:1,b:-1,c:1})
- .hint({a:1, b:-1, c:1})
- .explain("executionStats");
+var plan = coll.find({}, {b: 1, c: 1, _id: 0})
+ .sort({a: 1, b: -1, c: 1})
+ .hint({a: 1, b: -1, c: 1})
+ .explain("executionStats");
assert(isIndexOnly(plan.queryPlanner.winningPlan),
"sort.3.1 - indexOnly should be true on covered query");
-assert.eq(0, plan.executionStats.totalDocsExamined,
+assert.eq(0,
+ plan.executionStats.totalDocsExamined,
"sort.3.1 - docs examined should be 0 for covered query");
-print ('all tests pass');
+print('all tests pass');
diff --git a/jstests/core/create_collection_fail_cleanup.js b/jstests/core/create_collection_fail_cleanup.js
index a1548d35105..1417a54496c 100644
--- a/jstests/core/create_collection_fail_cleanup.js
+++ b/jstests/core/create_collection_fail_cleanup.js
@@ -9,8 +9,7 @@ assert(dbTest.getCollectionNames().length == 0);
var res = dbTest.createCollection("broken", {capped: true, size: -1});
assert.eq(false, res.ok);
-dbTest.getCollectionNames().forEach(
- function(collName) {
- print(collName);
- assert(collName != 'broken');
- });
+dbTest.getCollectionNames().forEach(function(collName) {
+ print(collName);
+ assert(collName != 'broken');
+});
diff --git a/jstests/core/create_indexes.js b/jstests/core/create_indexes.js
index f86208ec3b0..3e069d7478e 100644
--- a/jstests/core/create_indexes.js
+++ b/jstests/core/create_indexes.js
@@ -4,7 +4,8 @@
var isMongos = ("isdbgrid" == db.runCommand("ismaster").msg);
var extractResult = function(obj) {
- if (!isMongos) return obj;
+ if (!isMongos)
+ return obj;
// Sample mongos format:
// {
@@ -36,95 +37,96 @@
// Database does not exist
var collDbNotExist = dbTest.create_indexes_no_db;
- var res = assert.commandWorked(collDbNotExist.runCommand(
- 'createIndexes',
- {indexes: [{key: {x: 1}, name: 'x_1'}]}));
- res = extractResult( res );
- assert( res.createdCollectionAutomatically );
- assert.eq( 1, res.numIndexesBefore );
- assert.eq( 2, res.numIndexesAfter );
+ var res = assert.commandWorked(
+ collDbNotExist.runCommand('createIndexes', {indexes: [{key: {x: 1}, name: 'x_1'}]}));
+ res = extractResult(res);
+ assert(res.createdCollectionAutomatically);
+ assert.eq(1, res.numIndexesBefore);
+ assert.eq(2, res.numIndexesAfter);
assert.isnull(res.note,
'createIndexes.note should not be present in results when adding a new index: ' +
- tojson(res));
+ tojson(res));
// Collection does not exist, but database does
var t = dbTest.create_indexes;
- var res = assert.commandWorked(t.runCommand('createIndexes',
- {indexes: [{key: {x: 1}, name: 'x_1'}]}));
- res = extractResult( res );
- assert( res.createdCollectionAutomatically );
- assert.eq( 1, res.numIndexesBefore );
- assert.eq( 2, res.numIndexesAfter );
+ var res = assert.commandWorked(
+ t.runCommand('createIndexes', {indexes: [{key: {x: 1}, name: 'x_1'}]}));
+ res = extractResult(res);
+ assert(res.createdCollectionAutomatically);
+ assert.eq(1, res.numIndexesBefore);
+ assert.eq(2, res.numIndexesAfter);
assert.isnull(res.note,
'createIndexes.note should not be present in results when adding a new index: ' +
- tojson(res));
+ tojson(res));
// Both database and collection exist
- res = assert.commandWorked(t.runCommand('createIndexes',
- {indexes: [{key: {x: 1}, name: 'x_1'}]}));
- res = extractResult( res );
+ res = assert.commandWorked(
+ t.runCommand('createIndexes', {indexes: [{key: {x: 1}, name: 'x_1'}]}));
+ res = extractResult(res);
assert(!res.createdCollectionAutomatically);
assert.eq(2, res.numIndexesBefore);
- assert.eq(2, res.numIndexesAfter,
+ assert.eq(2,
+ res.numIndexesAfter,
'numIndexesAfter missing from createIndexes result when adding a duplicate index: ' +
- tojson(res));
+ tojson(res));
assert(res.note,
'createIndexes.note should be present in results when adding a duplicate index: ' +
- tojson(res));
-
- res = t.runCommand( "createIndexes", { indexes : [ { key : { "x" : 1 }, name : "x_1" },
- { key : { "y" : 1 }, name : "y_1" } ] } );
- res = extractResult( res );
- assert( !res.createdCollectionAutomatically );
- assert.eq( 2, res.numIndexesBefore );
- assert.eq( 3, res.numIndexesAfter );
-
- res = assert.commandWorked(t.runCommand('createIndexes',
- {indexes: [{key: {a: 1}, name: 'a_1'}, {key: {b: 1}, name: 'b_1'}]}));
- res = extractResult( res );
- assert( !res.createdCollectionAutomatically );
- assert.eq( 3, res.numIndexesBefore );
- assert.eq( 5, res.numIndexesAfter );
+ tojson(res));
+
+ res = t.runCommand("createIndexes",
+ {indexes: [{key: {"x": 1}, name: "x_1"}, {key: {"y": 1}, name: "y_1"}]});
+ res = extractResult(res);
+ assert(!res.createdCollectionAutomatically);
+ assert.eq(2, res.numIndexesBefore);
+ assert.eq(3, res.numIndexesAfter);
+
+ res = assert.commandWorked(t.runCommand(
+ 'createIndexes', {indexes: [{key: {a: 1}, name: 'a_1'}, {key: {b: 1}, name: 'b_1'}]}));
+ res = extractResult(res);
+ assert(!res.createdCollectionAutomatically);
+ assert.eq(3, res.numIndexesBefore);
+ assert.eq(5, res.numIndexesAfter);
assert.isnull(res.note,
'createIndexes.note should not be present in results when adding new indexes: ' +
- tojson(res));
+ tojson(res));
- res = assert.commandWorked(t.runCommand('createIndexes',
- {indexes: [{key: {a: 1}, name: 'a_1'}, {key: {b: 1}, name: 'b_1'}]}));
+ res = assert.commandWorked(t.runCommand(
+ 'createIndexes', {indexes: [{key: {a: 1}, name: 'a_1'}, {key: {b: 1}, name: 'b_1'}]}));
- res = extractResult( res );
- assert.eq( 5, res.numIndexesBefore );
- assert.eq(5, res.numIndexesAfter,
+ res = extractResult(res);
+ assert.eq(5, res.numIndexesBefore);
+ assert.eq(5,
+ res.numIndexesAfter,
'numIndexesAfter missing from createIndexes result when adding duplicate indexes: ' +
- tojson(res));
+ tojson(res));
assert(res.note,
'createIndexes.note should be present in results when adding a duplicate index: ' +
- tojson(res));
+ tojson(res));
- res = t.runCommand( "createIndexes", { indexes : [ {} ] } );
- assert( !res.ok );
+ res = t.runCommand("createIndexes", {indexes: [{}]});
+ assert(!res.ok);
- res = t.runCommand( "createIndexes", { indexes : [ {} , { key : { m : 1 }, name : "asd" } ] } );
- assert( !res.ok );
+ res = t.runCommand("createIndexes", {indexes: [{}, {key: {m: 1}, name: "asd"}]});
+ assert(!res.ok);
- assert.eq( 5, t.getIndexes().length );
+ assert.eq(5, t.getIndexes().length);
- res = t.runCommand( "createIndexes",
- { indexes : [ { key : { "c" : 1 }, sparse : true, name : "c_1" } ] } );
- assert.eq( 6, t.getIndexes().length );
- assert.eq( 1, t.getIndexes().filter( function(z){ return z.sparse; } ).length );
+ res = t.runCommand("createIndexes", {indexes: [{key: {"c": 1}, sparse: true, name: "c_1"}]});
+ assert.eq(6, t.getIndexes().length);
+ assert.eq(1,
+ t.getIndexes().filter(function(z) {
+ return z.sparse;
+ }).length);
- res = t.runCommand( "createIndexes",
- { indexes : [ { key : { "x" : "foo" }, name : "x_1" } ] } );
- assert( !res.ok );
+ res = t.runCommand("createIndexes", {indexes: [{key: {"x": "foo"}, name: "x_1"}]});
+ assert(!res.ok);
- assert.eq( 6, t.getIndexes().length );
+ assert.eq(6, t.getIndexes().length);
- res = t.runCommand( "createIndexes",
- { indexes : [ { key : { "x" : 1 }, name : "" } ] } );
- assert( !res.ok );
+ res = t.runCommand("createIndexes", {indexes: [{key: {"x": 1}, name: ""}]});
+ assert(!res.ok);
- assert.eq( 6, t.getIndexes().length );
+ assert.eq(6, t.getIndexes().length);
// Test that v0 indexes cannot be created.
res = t.runCommand('createIndexes', {indexes: [{key: {d: 1}, name: 'd_1', v: 0}]});
diff --git a/jstests/core/crud_api.js b/jstests/core/crud_api.js
index 0c248884350..f6cc77025c3 100644
--- a/jstests/core/crud_api.js
+++ b/jstests/core/crud_api.js
@@ -32,7 +32,9 @@
if (db.getMongo().writeMode() === 'commands') {
assert.docEq(first, second);
} else {
- var overrideModifiedCount = {modifiedCount: undefined};
+ var overrideModifiedCount = {
+ modifiedCount: undefined
+ };
assert.docEq(Object.merge(first, overrideModifiedCount),
Object.merge(second, overrideModifiedCount));
}
@@ -42,12 +44,12 @@
var deleteManyExecutor = createTestExecutor(coll, 'deleteMany', checkResultObject);
var deleteOneExecutor = createTestExecutor(coll, 'deleteOne', checkResultObject);
var bulkWriteExecutor = createTestExecutor(coll, 'bulkWrite', checkResultObject);
- var findOneAndDeleteExecutor = createTestExecutor(coll, 'findOneAndDelete',
- checkResultObject);
- var findOneAndReplaceExecutor = createTestExecutor(coll, 'findOneAndReplace',
- checkResultObject);
- var findOneAndUpdateExecutor = createTestExecutor(coll, 'findOneAndUpdate',
- checkResultObject);
+ var findOneAndDeleteExecutor =
+ createTestExecutor(coll, 'findOneAndDelete', checkResultObject);
+ var findOneAndReplaceExecutor =
+ createTestExecutor(coll, 'findOneAndReplace', checkResultObject);
+ var findOneAndUpdateExecutor =
+ createTestExecutor(coll, 'findOneAndUpdate', checkResultObject);
var insertManyExecutor = createTestExecutor(coll, 'insertMany', checkResultObject);
var insertOneExecutor = createTestExecutor(coll, 'insertOne', checkResultObject);
var replaceOneExecutor = createTestExecutor(coll, 'replaceOne', checkResultObject);
@@ -61,36 +63,53 @@
//
bulkWriteExecutor({
- insert: [{ _id: 1, c: 1 }, { _id: 2, c: 2 }, { _id: 3, c: 3 }],
- params: [[
- { insertOne: { document: {_id: 4, a: 1 } } }
- , { updateOne: { filter: {_id: 5, a:2}, update: {$set: {a:2}}, upsert:true } }
- , { updateMany: { filter: {_id: 6,a:3}, update: {$set: {a:3}}, upsert:true } }
- , { deleteOne: { filter: {c:1} } }
- , { insertOne: { document: {_id: 7, c: 2 } } }
- , { deleteMany: { filter: {c:2} } }
- , { replaceOne: { filter: {c:3}, replacement: {c:4}, upsert:true } }]],
- result: {
- acknowledged: true, insertedCount:2,
- matchedCount:1, deletedCount: 3,
- upsertedCount:2, insertedIds : {'0' : 4, '4' : 7 }, upsertedIds : { '1' : 5, '2' : 6 }
- },
- expected: [{ "_id" : 3, "c" : 4 }, { "_id" : 4, "a" : 1 }, { "_id" : 5, "a" : 2 }, { "_id" : 6, "a" : 3 }]
+ insert: [{_id: 1, c: 1}, {_id: 2, c: 2}, {_id: 3, c: 3}],
+ params: [[
+ {insertOne: {document: {_id: 4, a: 1}}},
+ {updateOne: {filter: {_id: 5, a: 2}, update: {$set: {a: 2}}, upsert: true}},
+ {updateMany: {filter: {_id: 6, a: 3}, update: {$set: {a: 3}}, upsert: true}},
+ {deleteOne: {filter: {c: 1}}},
+ {insertOne: {document: {_id: 7, c: 2}}},
+ {deleteMany: {filter: {c: 2}}},
+ {replaceOne: {filter: {c: 3}, replacement: {c: 4}, upsert: true}}
+ ]],
+ result: {
+ acknowledged: true,
+ insertedCount: 2,
+ matchedCount: 1,
+ deletedCount: 3,
+ upsertedCount: 2,
+ insertedIds: {'0': 4, '4': 7},
+ upsertedIds: {'1': 5, '2': 6}
+ },
+ expected:
+ [{"_id": 3, "c": 4}, {"_id": 4, "a": 1}, {"_id": 5, "a": 2}, {"_id": 6, "a": 3}]
});
bulkWriteExecutor({
- insert: [{ _id: 1, c: 1 }, { _id: 2, c: 2 }, { _id: 3, c: 3 }],
- params: [[
- { insertOne: { document: { _id: 4, a: 1 } } }
- , { updateOne: { filter: {_id: 5, a:2}, update: {$set: {a:2}}, upsert:true } }
- , { updateMany: { filter: {_id: 6, a:3}, update: {$set: {a:3}}, upsert:true } }
- , { deleteOne: { filter: {c:1} } }
- , { deleteMany: { filter: {c:2} } }
- , { replaceOne: { filter: {c:3}, replacement: {c:4}, upsert:true } }], { ordered: false }],
- result: {
- acknowledged: true, insertedCount:1, matchedCount:1, deletedCount:2, upsertedCount:2, insertedIds : {'0' : 4 }, upsertedIds : { '1' : 5, '2' : 6 }
- },
- expected: [{ "_id" : 3, "c" : 4 }, { "_id" : 4, "a" : 1 }, { "_id" : 5, "a" : 2 }, { "_id" : 6, "a" : 3 }]
+ insert: [{_id: 1, c: 1}, {_id: 2, c: 2}, {_id: 3, c: 3}],
+ params: [
+ [
+ {insertOne: {document: {_id: 4, a: 1}}},
+ {updateOne: {filter: {_id: 5, a: 2}, update: {$set: {a: 2}}, upsert: true}},
+ {updateMany: {filter: {_id: 6, a: 3}, update: {$set: {a: 3}}, upsert: true}},
+ {deleteOne: {filter: {c: 1}}},
+ {deleteMany: {filter: {c: 2}}},
+ {replaceOne: {filter: {c: 3}, replacement: {c: 4}, upsert: true}}
+ ],
+ {ordered: false}
+ ],
+ result: {
+ acknowledged: true,
+ insertedCount: 1,
+ matchedCount: 1,
+ deletedCount: 2,
+ upsertedCount: 2,
+ insertedIds: {'0': 4},
+ upsertedIds: {'1': 5, '2': 6}
+ },
+ expected:
+ [{"_id": 3, "c": 4}, {"_id": 4, "a": 1}, {"_id": 5, "a": 2}, {"_id": 6, "a": 3}]
});
// DeleteMany
@@ -98,30 +117,24 @@
// DeleteMany when many documents match
deleteManyExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }
- ],
- result: {acknowledged: true, deletedCount:2},
- expected: [{_id:1, x: 11}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}],
+ result: {acknowledged: true, deletedCount: 2},
+ expected: [{_id: 1, x: 11}]
});
// DeleteMany when no document matches
deleteManyExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- ],
- result: {acknowledged: true, deletedCount:0},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}],
+ result: {acknowledged: true, deletedCount: 0},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// DeleteMany when many documents match, no write concern
deleteManyExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }, { w : 0 }
- ],
- result: {acknowledged: false},
- expected: [{_id:1, x: 11}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}]
});
//
@@ -130,39 +143,31 @@
// DeleteOne when many documents match
deleteOneExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }
- ],
- result: {acknowledged: true, deletedCount:1},
- expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}],
+ result: {acknowledged: true, deletedCount: 1},
+ expected: [{_id: 1, x: 11}, {_id: 3, x: 33}]
});
// DeleteOne when one document matches
deleteOneExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 2 }
- ],
- result: {acknowledged: true, deletedCount:1},
- expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 2}],
+ result: {acknowledged: true, deletedCount: 1},
+ expected: [{_id: 1, x: 11}, {_id: 3, x: 33}]
});
// DeleteOne when no documents match
deleteOneExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- ],
- result: {acknowledged: true, deletedCount:0},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}],
+ result: {acknowledged: true, deletedCount: 0},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// DeleteOne when many documents match, no write concern
deleteOneExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }, {w:0}
- ],
- result: {acknowledged: false},
- expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 3, x: 33}]
});
//
@@ -171,33 +176,24 @@
// FindOneAndDelete when one document matches
findOneAndDeleteExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 2 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: {x:33},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 2}}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: {x: 33},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}]
});
// FindOneAndDelete when one document matches
findOneAndDeleteExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 2 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: {x:22},
- expected: [{_id:1, x: 11}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 2}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: {x: 22},
+ expected: [{_id: 1, x: 11}, {_id: 3, x: 33}]
});
// FindOneAndDelete when no documents match
findOneAndDeleteExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
//
@@ -206,95 +202,81 @@
// FindOneAndReplace when many documents match returning the document before modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }
- , { x: 32 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: {x:22},
- expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {x: 32}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: {x: 22},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 32}, {_id: 3, x: 33}]
});
// FindOneAndReplace when many documents match returning the document after modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }
- , { x: 32 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: {$gt: 1}},
+ {x: 32},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true}
],
- result: {x:32},
- expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ result: {x: 32},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 32}, {_id: 3, x: 33}]
});
// FindOneAndReplace when one document matches returning the document before modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 2 }
- , { x: 32 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: {x:22},
- expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 2}, {x: 32}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: {x: 22},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 32}, {_id: 3, x: 33}]
});
// FindOneAndReplace when one document matches returning the document after modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 2 }
- , { x: 32 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 2},
+ {x: 32},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true}
],
- result: {x:32},
- expected: [{_id:1, x: 11}, {_id:2, x: 32}, {_id:3, x: 33}]
+ result: {x: 32},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 32}, {_id: 3, x: 33}]
});
// FindOneAndReplace when no documents match returning the document before modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { x: 44 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {x: 44}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
- // FindOneAndReplace when no documents match with upsert returning the document before modification
+ // FindOneAndReplace when no documents match with upsert returning the document before
+ // modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { x: 44 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, upsert:true }
- ],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x:44}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {x: 44}, {projection: {x: 1, _id: 0}, sort: {x: 1}, upsert: true}],
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 44}]
});
// FindOneAndReplace when no documents match returning the document after modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { x: 44 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 4},
+ {x: 44},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true}
],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
- // FindOneAndReplace when no documents match with upsert returning the document after modification
+ // FindOneAndReplace when no documents match with upsert returning the document after
+ // modification
findOneAndReplaceExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { x: 44 }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true, upsert:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 4},
+ {x: 44},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true, upsert: true}
],
- result: {x:44},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 44}]
+ result: {x: 44},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 44}]
});
assert.throws(function() {
- coll.findOneAndReplace({a:1}, {$set:{b:1}});
+ coll.findOneAndReplace({a: 1}, {$set: {b: 1}});
});
//
@@ -303,99 +285,89 @@
// FindOneAndUpdate when many documents match returning the document before modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: {x:22},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {$inc: {x: 1}}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: {x: 22},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 33}]
});
// FindOneAndUpdate when many documents match returning the document after modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: { $gt: 1 } }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument: true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: {$gt: 1}},
+ {$inc: {x: 1}},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true}
],
- result: {x:23},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ result: {x: 23},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 33}]
});
// FindOneAndUpdate when one document matches returning the document before modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 2 }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: {x:22},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 2}, {$inc: {x: 1}}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: {x: 22},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 33}]
});
// FindOneAndUpdate when one document matches returning the document after modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 2 }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument: true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 2},
+ {$inc: {x: 1}},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true}
],
- result: {x:23},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ result: {x: 23},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 33}]
});
// FindOneAndUpdate when no documents match returning the document before modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 } }
- ],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {$inc: {x: 1}}, {projection: {x: 1, _id: 0}, sort: {x: 1}}],
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
- // FindOneAndUpdate when no documents match with upsert returning the document before modification
+ // FindOneAndUpdate when no documents match with upsert returning the document before
+ // modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, upsert:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 4},
+ {$inc: {x: 1}},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, upsert: true}
],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
// FindOneAndUpdate when no documents match returning the document after modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 4},
+ {$inc: {x: 1}},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true}
],
- result: null,
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ result: null,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
- // FindOneAndUpdate when no documents match with upsert returning the document after modification
+ // FindOneAndUpdate when no documents match with upsert returning the document after
+ // modification
findOneAndUpdateExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [
- { _id: 4 }
- , { $inc: { x: 1 } }
- , { projection: { x: 1, _id: 0 }, sort: { x: 1 }, returnNewDocument:true, upsert:true }
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [
+ {_id: 4},
+ {$inc: {x: 1}},
+ {projection: {x: 1, _id: 0}, sort: {x: 1}, returnNewDocument: true, upsert: true}
],
- result: {x:1},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ result: {x: 1},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
assert.throws(function() {
- coll.findOneAndUpdate({a:1}, {});
+ coll.findOneAndUpdate({a: 1}, {});
});
assert.throws(function() {
- coll.findOneAndUpdate({a:1}, {b:1});
+ coll.findOneAndUpdate({a: 1}, {b: 1});
});
//
@@ -404,22 +376,17 @@
// InsertMany with non-existing documents
insertManyExecutor({
- insert: [{ _id:1, x:11 }],
- params: [
- [{_id: 2, x: 22}, {_id:3, x:33}]
- ],
- result: {acknowledged: true, insertedIds: [2, 3]},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}],
+ params: [[{_id: 2, x: 22}, {_id: 3, x: 33}]],
+ result: {acknowledged: true, insertedIds: [2, 3]},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// InsertMany with non-existing documents, no write concern
insertManyExecutor({
- insert: [{ _id:1, x:11 }],
- params: [
- [{_id: 2, x: 22}, {_id:3, x:33}]
- , {w:0}
- ],
- result: {acknowledged: false},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}],
+ params: [[{_id: 2, x: 22}, {_id: 3, x: 33}], {w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
//
@@ -428,21 +395,17 @@
// InsertOne with non-existing documents
insertOneExecutor({
- insert: [{ _id:1, x:11 }],
- params: [
- {_id: 2, x: 22}
- ],
- result: {acknowledged: true, insertedId: 2},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}]
+ insert: [{_id: 1, x: 11}],
+ params: [{_id: 2, x: 22}],
+ result: {acknowledged: true, insertedId: 2},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}]
});
// InsertOne with non-existing documents, no write concern
insertOneExecutor({
- insert: [{ _id:1, x:11 }],
- params: [
- {_id: 2, x: 22}, {w:0}
- ],
- result: {acknowledged: false},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}]
+ insert: [{_id: 1, x: 11}],
+ params: [{_id: 2, x: 22}, {w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}]
});
//
@@ -451,56 +414,56 @@
// ReplaceOne when many documents match
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: { $gt: 1 } }, { x: 111 }],
- result: {acknowledged:true, matchedCount:1, modifiedCount:1},
- expected: [{_id:1, x: 11}, {_id:2, x: 111}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {x: 111}],
+ result: {acknowledged: true, matchedCount: 1, modifiedCount: 1},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 111}, {_id: 3, x: 33}]
});
// ReplaceOne when one document matches
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 1 }, { _id: 1, x: 111 }],
- result: {acknowledged:true, matchedCount:1, modifiedCount:1},
- expected: [{_id:1, x: 111}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 1}, {_id: 1, x: 111}],
+ result: {acknowledged: true, matchedCount: 1, modifiedCount: 1},
+ expected: [{_id: 1, x: 111}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// ReplaceOne when no documents match
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { _id: 4, x: 1 }],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {_id: 4, x: 1}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// ReplaceOne with upsert when no documents match without an id specified
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { x: 1 }, {upsert:true}],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {x: 1}, {upsert: true}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0, upsertedId: 4},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
// ReplaceOne with upsert when no documents match with an id specified
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { _id: 4, x: 1 }, {upsert:true}],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {_id: 4, x: 1}, {upsert: true}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0, upsertedId: 4},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
// ReplaceOne with upsert when no documents match with an id specified, no write concern
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { _id: 4, x: 1 }, {upsert:true, w:0}],
- result: {acknowledged:false},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {_id: 4, x: 1}, {upsert: true, w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
// ReplaceOne with upsert when no documents match with an id specified, no write concern
replaceOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { _id: 4, x: 1 }, {upsert:true, writeConcern:{w:0}}],
- result: {acknowledged:false},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {_id: 4, x: 1}, {upsert: true, writeConcern: {w: 0}}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
assert.throws(function() {
- coll.replaceOne({a:1}, {$set:{b:1}});
+ coll.replaceOne({a: 1}, {$set: {b: 1}});
});
//
@@ -509,46 +472,46 @@
// UpdateMany when many documents match
updateManyExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: { $gt: 1 } }, { $inc: { x: 1 } }],
- result: {acknowledged:true, matchedCount:2, modifiedCount:2},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 34}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {$inc: {x: 1}}],
+ result: {acknowledged: true, matchedCount: 2, modifiedCount: 2},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 34}]
});
// UpdateMany when one document matches
updateManyExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 1 }, { $inc: { x: 1 } }],
- result: {acknowledged:true, matchedCount:1, modifiedCount:1},
- expected: [{_id:1, x: 12}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 1}, {$inc: {x: 1}}],
+ result: {acknowledged: true, matchedCount: 1, modifiedCount: 1},
+ expected: [{_id: 1, x: 12}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// UpdateMany when no documents match
updateManyExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { $inc: { x: 1 } }],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {$inc: {x: 1}}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// UpdateMany with upsert when no documents match
updateManyExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { $inc: { x: 1 } }, { upsert: true }],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {$inc: {x: 1}}, {upsert: true}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0, upsertedId: 4},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
// UpdateMany with upsert when no documents match, no write concern
updateManyExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { $inc: { x: 1 } }, { upsert: true, w: 0 }],
- result: {acknowledged:false},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id:4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {$inc: {x: 1}}, {upsert: true, w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
assert.throws(function() {
- coll.updateMany({a:1}, {});
+ coll.updateMany({a: 1}, {});
});
assert.throws(function() {
- coll.updateMany({a:1}, {b:1});
+ coll.updateMany({a: 1}, {b: 1});
});
//
@@ -557,47 +520,47 @@
// UpdateOne when many documents match
updateOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: { $gt: 1 } }, { $inc: { x: 1 } }],
- result: {acknowledged:true, matchedCount:1, modifiedCount:1},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {$inc: {x: 1}}],
+ result: {acknowledged: true, matchedCount: 1, modifiedCount: 1},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 33}]
});
// UpdateOne when one document matches
updateOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 1 }, { $inc: { x: 1 } }],
- result: {acknowledged:true, matchedCount:1, modifiedCount:1},
- expected: [{_id:1, x: 12}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 1}, {$inc: {x: 1}}],
+ result: {acknowledged: true, matchedCount: 1, modifiedCount: 1},
+ expected: [{_id: 1, x: 12}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// UpdateOne when no documents match
updateOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { $inc: { x: 1 } }],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {$inc: {x: 1}}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// UpdateOne with upsert when no documents match
updateOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: 4 }, { $inc: { x: 1 } }, {upsert:true}],
- result: {acknowledged:true, matchedCount:0, modifiedCount:0, upsertedId: 4},
- expected: [{_id:1, x: 11}, {_id:2, x: 22}, {_id:3, x: 33}, {_id: 4, x: 1}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: 4}, {$inc: {x: 1}}, {upsert: true}],
+ result: {acknowledged: true, matchedCount: 0, modifiedCount: 0, upsertedId: 4},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}, {_id: 4, x: 1}]
});
// UpdateOne when many documents match, no write concern
updateOneExecutor({
- insert: [{ _id: 1, x: 11 }, { _id: 2, x: 22 }, { _id:3, x:33 }],
- params: [{ _id: { $gt: 1 } }, { $inc: { x: 1 } }, {w:0}],
- result: {acknowledged:false},
- expected: [{_id:1, x: 11}, {_id:2, x: 23}, {_id:3, x: 33}]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}, {$inc: {x: 1}}, {w: 0}],
+ result: {acknowledged: false},
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 23}, {_id: 3, x: 33}]
});
assert.throws(function() {
- coll.updateOne({a:1}, {});
+ coll.updateOne({a: 1}, {});
});
assert.throws(function() {
- coll.updateOne({a:1}, {b:1});
+ coll.updateOne({a: 1}, {b: 1});
});
//
@@ -606,45 +569,45 @@
// Simple count of all elements
countExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [{}],
- result: 3,
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{}],
+ result: 3,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple count no arguments
countExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [],
- result: 3,
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [],
+ result: 3,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple count filtered
countExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [{_id: {$gt: 1}}],
- result: 2,
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{_id: {$gt: 1}}],
+ result: 2,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple count of all elements, applying limit
countExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [{}, {limit:1}],
- result: 1,
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{}, {limit: 1}],
+ result: 1,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple count of all elements, applying skip
countExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [{}, {skip:1}],
- result: 2,
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{}, {skip: 1}],
+ result: 2,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple count no arguments, applying hint
countExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: [{}, {hint: { "_id": 1}}],
- result: 3,
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: [{}, {hint: {"_id": 1}}],
+ result: 3,
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
//
@@ -653,31 +616,31 @@
// Simple distinct of field x no filter
distinctExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: ['x'],
- result: [11, 22, 33],
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: ['x'],
+ result: [11, 22, 33],
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple distinct of field x
distinctExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: ['x', {}],
- result: [11, 22, 33],
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: ['x', {}],
+ result: [11, 22, 33],
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple distinct of field x filtered
distinctExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: ['x', {x: { $gt: 11 }}],
- result: [22, 33],
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: ['x', {x: {$gt: 11}}],
+ result: [22, 33],
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
// Simple distinct of field x filtered with maxTimeMS
distinctExecutor({
- insert: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }],
- params: ['x', {x: { $gt: 11 }}, {maxTimeMS:100000}],
- result: [22, 33],
- expected: [{ _id: 1, x:11 }, { _id: 2, x:22 }, { _id: 3, x:33 }]
+ insert: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}],
+ params: ['x', {x: {$gt: 11}}, {maxTimeMS: 100000}],
+ result: [22, 33],
+ expected: [{_id: 1, x: 11}, {_id: 2, x: 22}, {_id: 3, x: 33}]
});
//
@@ -686,20 +649,21 @@
coll.deleteMany({});
// Insert all of them
- coll.insertMany([{a:0, b:0}, {a:1, b:1}]);
+ coll.insertMany([{a: 0, b: 0}, {a: 1, b: 1}]);
// Simple projection
- var result = coll.find({}).sort({a:1}).limit(1).skip(1).projection({_id:0, a:1}).toArray();
- assert.docEq(result, [{a:1}]);
+ var result =
+ coll.find({}).sort({a: 1}).limit(1).skip(1).projection({_id: 0, a: 1}).toArray();
+ assert.docEq(result, [{a: 1}]);
// Simple tailable cursor
- var cursor = coll.find({}).sort({a:1}).tailable();
+ var cursor = coll.find({}).sort({a: 1}).tailable();
assert.eq(34, (cursor._options & ~DBQuery.Option.slaveOk));
- var cursor = coll.find({}).sort({a:1}).tailable(false);
+ var cursor = coll.find({}).sort({a: 1}).tailable(false);
assert.eq(2, (cursor._options & ~DBQuery.Option.slaveOk));
// Check modifiers
- var cursor = coll.find({}).modifiers({$hint:'a_1'});
+ var cursor = coll.find({}).modifiers({$hint: 'a_1'});
assert.eq('a_1', cursor._query['$hint']);
// allowPartialResults
@@ -720,50 +684,48 @@
coll.deleteMany({});
// Insert all of them
- coll.insertMany([{a:0, b:0}, {a:1, b:1}]);
+ coll.insertMany([{a: 0, b: 0}, {a: 1, b: 1}]);
// Simple aggregation with useCursor
- var result = coll.aggregate([{$match: {}}], {useCursor:true}).toArray();
+ var result = coll.aggregate([{$match: {}}], {useCursor: true}).toArray();
assert.eq(2, result.length);
// Simple aggregation with batchSize
- var result = coll.aggregate([{$match: {}}], {batchSize:2}).toArray();
+ var result = coll.aggregate([{$match: {}}], {batchSize: 2}).toArray();
assert.eq(2, result.length);
// Drop collection
coll.drop();
- coll.ensureIndex({a:1}, {unique:true});
+ coll.ensureIndex({a: 1}, {unique: true});
// Should throw duplicate key error
assert.throws(function() {
- coll.insertMany([{a:0, b:0}, {a:0, b:1}]);
+ coll.insertMany([{a: 0, b: 0}, {a: 0, b: 1}]);
});
- assert(coll.findOne({a:0, b:0}) != null);
+ assert(coll.findOne({a: 0, b: 0}) != null);
assert.throws(function() {
- coll.insertOne({a:0, b:0});
+ coll.insertOne({a: 0, b: 0});
});
assert.throws(function() {
- coll.updateOne({b:2}, {$set: {a:0}}, {upsert:true});
+ coll.updateOne({b: 2}, {$set: {a: 0}}, {upsert: true});
});
assert.throws(function() {
- coll.updateMany({b:2}, {$set: {a:0}}, {upsert:true});
+ coll.updateMany({b: 2}, {$set: {a: 0}}, {upsert: true});
});
assert.throws(function() {
- coll.deleteOne({$invalidFieldName:{a:1}});
+ coll.deleteOne({$invalidFieldName: {a: 1}});
});
assert.throws(function() {
- coll.deleteMany({$set:{a:1}});
+ coll.deleteMany({$set: {a: 1}});
});
assert.throws(function() {
- coll.bulkWrite([
- { insertOne: { document: { _id: 4, a: 0 } } }
- ]);
+ coll.bulkWrite([{insertOne: {document: {_id: 4, a: 0}}}]);
});
};
diff --git a/jstests/core/currentop.js b/jstests/core/currentop.js
index 34e96696481..ef948d415be 100644
--- a/jstests/core/currentop.js
+++ b/jstests/core/currentop.js
@@ -4,20 +4,21 @@ print("BEGIN currentop.js");
t = db.jstests_currentop;
t.drop();
-for(i=0;i<100;i++) {
- t.save({ "num": i });
+for (i = 0; i < 100; i++) {
+ t.save({"num": i});
}
print("count:" + t.count());
function ops(q) {
- printjson( db.currentOp().inprog );
+ printjson(db.currentOp().inprog);
return db.currentOp(q).inprog;
}
print("start shell");
-// sleep for a second for each (of 100) documents; can be killed in between documents & test should complete before 100 seconds
+// sleep for a second for each (of 100) documents; can be killed in between documents & test should
+// complete before 100 seconds
s1 = startParallelShell("db.jstests_currentop.count( { '$where': function() { sleep(1000); } } )");
print("sleep");
@@ -33,25 +34,26 @@ print();
// need to wait for read to start
print("wait have some ops");
-assert.soon( function(){
- return ops( { "locks.Collection": "r", "ns": "test.jstests_currentop" } ).length +
- ops({ "locks.Collection": "R", "ns": "test.jstests_currentop" }).length >= 1;
+assert.soon(function() {
+ return ops({"locks.Collection": "r", "ns": "test.jstests_currentop"}).length +
+ ops({"locks.Collection": "R", "ns": "test.jstests_currentop"}).length >=
+ 1;
}, "have_some_ops");
print("ok");
-
-s2 = startParallelShell( "db.jstests_currentop.update({ '$where': function() { sleep(150); } }," +
- " { '$inc': {num: 1} }, false, true );" );
+
+s2 = startParallelShell("db.jstests_currentop.update({ '$where': function() { sleep(150); } }," +
+ " { '$inc': {num: 1} }, false, true );");
o = [];
function f() {
- o = ops({ "ns": "test.jstests_currentop" });
+ o = ops({"ns": "test.jstests_currentop"});
printjson(o);
- var writes = ops({ "locks.Collection": "w", "ns": "test.jstests_currentop" }).length;
+ var writes = ops({"locks.Collection": "w", "ns": "test.jstests_currentop"}).length;
- var readops = ops({ "locks.Collection": "r", "ns": "test.jstests_currentop" });
+ var readops = ops({"locks.Collection": "r", "ns": "test.jstests_currentop"});
print("readops:");
printjson(readops);
var reads = readops.length;
@@ -63,10 +65,10 @@ function f() {
print("go");
-assert.soon( f, "f" );
+assert.soon(f, "f");
// avoid waiting for the operations to complete (if soon succeeded)
-for(var i in o) {
+for (var i in o) {
db.killOp(o[i].opid);
}
@@ -77,4 +79,4 @@ s1({checkExitSuccess: false});
s2({checkExitSuccess: false});
// don't want to pass if timeout killed the js function
-assert( ( new Date() ) - start < 30000 );
+assert((new Date()) - start < 30000);
diff --git a/jstests/core/currentop_predicate.js b/jstests/core/currentop_predicate.js
index ef0dd854464..98df3c9756e 100644
--- a/jstests/core/currentop_predicate.js
+++ b/jstests/core/currentop_predicate.js
@@ -1,12 +1,22 @@
// Tests the use of a match predicate with the currentOp command.
(function() {
// Test a $where predicate that matches the currentOp operation we are running.
- var res = db.adminCommand("currentOp", {$where: function() { return true; }});
+ var res = db.adminCommand("currentOp",
+ {
+ $where: function() {
+ return true;
+ }
+ });
assert.commandWorked(res);
assert.gt(res.inprog.length, 0, tojson(res));
// Test a $where predicate that matches no operations.
- res = db.adminCommand("currentOp", {$where: function() { return false; }});
+ res = db.adminCommand("currentOp",
+ {
+ $where: function() {
+ return false;
+ }
+ });
assert.commandWorked(res);
assert.eq(res.inprog.length, 0, tojson(res));
})();
diff --git a/jstests/core/cursor1.js b/jstests/core/cursor1.js
index 65c62e64f91..c98bec5044c 100644
--- a/jstests/core/cursor1.js
+++ b/jstests/core/cursor1.js
@@ -3,18 +3,18 @@ t = db.cursor1;
t.drop();
big = "";
-while ( big.length < 50000 )
+while (big.length < 50000)
big += "asdasdasdasdsdsdadsasdasdasD";
-num = Math.ceil( 10000000 / big.length );
+num = Math.ceil(10000000 / big.length);
-for ( var i=0; i<num; i++ ){
- t.save( { num : i , str : big } );
+for (var i = 0; i < num; i++) {
+ t.save({num: i, str: big});
}
-assert.eq( num , t.find().count() );
-assert.eq( num , t.find().itcount() );
+assert.eq(num, t.find().count());
+assert.eq(num, t.find().itcount());
-assert.eq( num / 2 , t.find().limit(num/2).itcount() );
+assert.eq(num / 2, t.find().limit(num / 2).itcount());
-t.drop(); // save some space
+t.drop(); // save some space
diff --git a/jstests/core/cursor2.js b/jstests/core/cursor2.js
index 2389a6a5d74..cf496db2c3f 100644
--- a/jstests/core/cursor2.js
+++ b/jstests/core/cursor2.js
@@ -1,24 +1,24 @@
/**
- * test to see if the count returned from the cursor is the number of objects that would be returned
+ * test to see if the count returned from the cursor is the number of objects that would be
+ *returned
*
* BUG 884
*/
function testCursorCountVsArrLen(dbConn) {
-
var coll = dbConn.ed_db_cursor2_ccvsal;
coll.drop();
- coll.save({ a: 1, b : 1});
- coll.save({ a: 2, b : 1});
- coll.save({ a: 3});
+ coll.save({a: 1, b: 1});
+ coll.save({a: 2, b: 1});
+ coll.save({a: 3});
- var fromCount = coll.find({}, {b:1}).count();
- var fromArrLen = coll.find({}, {b:1}).toArray().length;
+ var fromCount = coll.find({}, {b: 1}).count();
+ var fromArrLen = coll.find({}, {b: 1}).toArray().length;
- assert(fromCount == fromArrLen, "count from cursor [" + fromCount + "] != count from arrlen [" + fromArrLen + "]");
+ assert(fromCount == fromArrLen,
+ "count from cursor [" + fromCount + "] != count from arrlen [" + fromArrLen + "]");
}
-
testCursorCountVsArrLen(db);
diff --git a/jstests/core/cursor3.js b/jstests/core/cursor3.js
index fb1d95afb18..cc602d523f0 100644
--- a/jstests/core/cursor3.js
+++ b/jstests/core/cursor3.js
@@ -3,33 +3,34 @@
testNum = 1;
-function checkResults( expected, cursor , testNum ) {
- assert.eq( expected.length, cursor.count() , "testNum: " + testNum + " A : " + tojson( cursor.toArray() ) + " " + tojson( cursor.explain() ) );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ], cursor[ i ][ "a" ] , "testNum: " + testNum + " B" );
+function checkResults(expected, cursor, testNum) {
+ assert.eq(expected.length,
+ cursor.count(),
+ "testNum: " + testNum + " A : " + tojson(cursor.toArray()) + " " +
+ tojson(cursor.explain()));
+ for (i = 0; i < expected.length; ++i) {
+ assert.eq(expected[i], cursor[i]["a"], "testNum: " + testNum + " B");
}
}
t = db.cursor3;
t.drop();
-t.save( { a: 0 } );
-t.save( { a: 1 } );
-t.save( { a: 2 } );
+t.save({a: 0});
+t.save({a: 1});
+t.save({a: 2});
-t.ensureIndex( { a: 1 } );
+t.ensureIndex({a: 1});
+checkResults([1], t.find({a: 1}).sort({a: 1}).hint({a: 1}), testNum++);
+checkResults([1], t.find({a: 1}).sort({a: -1}).hint({a: 1}), testNum++);
+checkResults([1, 2], t.find({a: {$gt: 0}}).sort({a: 1}).hint({a: 1}), testNum++);
+checkResults([2, 1], t.find({a: {$gt: 0}}).sort({a: -1}).hint({a: 1}), testNum++);
+checkResults([1, 2], t.find({a: {$gte: 1}}).sort({a: 1}).hint({a: 1}), testNum++);
+checkResults([2, 1], t.find({a: {$gte: 1}}).sort({a: -1}).hint({a: 1}), testNum++);
-checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ );
-
-checkResults( [ 1, 2 ], t.find( { a: { $gt: 0 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 2, 1 ], t.find( { a: { $gt: 0 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 1, 2 ], t.find( { a: { $gte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 2, 1 ], t.find( { a: { $gte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ );
-
-checkResults( [ 0, 1 ], t.find( { a: { $lt: 2 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 1, 0 ], t.find( { a: { $lt: 2 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 0, 1 ], t.find( { a: { $lte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ );
-checkResults( [ 1, 0 ], t.find( { a: { $lte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ );
+checkResults([0, 1], t.find({a: {$lt: 2}}).sort({a: 1}).hint({a: 1}), testNum++);
+checkResults([1, 0], t.find({a: {$lt: 2}}).sort({a: -1}).hint({a: 1}), testNum++);
+checkResults([0, 1], t.find({a: {$lte: 1}}).sort({a: 1}).hint({a: 1}), testNum++);
+checkResults([1, 0], t.find({a: {$lte: 1}}).sort({a: -1}).hint({a: 1}), testNum++);
diff --git a/jstests/core/cursor4.js b/jstests/core/cursor4.js
index b08a72f62e5..dd7875d0836 100644
--- a/jstests/core/cursor4.js
+++ b/jstests/core/cursor4.js
@@ -1,47 +1,53 @@
// Test inequality bounds with multi-field sorting
-function checkResults( expected, cursor ) {
- assert.eq( expected.length, cursor.count() );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ].a, cursor[ i ].a );
- assert.eq( expected[ i ].b, cursor[ i ].b );
+function checkResults(expected, cursor) {
+ assert.eq(expected.length, cursor.count());
+ for (i = 0; i < expected.length; ++i) {
+ assert.eq(expected[i].a, cursor[i].a);
+ assert.eq(expected[i].b, cursor[i].b);
}
}
-function testConstrainedFindMultiFieldSorting( db ) {
+function testConstrainedFindMultiFieldSorting(db) {
r = db.ed_db_cursor4_cfmfs;
r.drop();
- entries = [ { a: 0, b: 0 },
- { a: 0, b: 1 },
- { a: 1, b: 1 },
- { a: 1, b: 1 },
- { a: 2, b: 0 } ];
- for( i = 0; i < entries.length; ++i )
- r.save( entries[ i ] );
- r.ensureIndex( { a: 1, b: 1 } );
+ entries = [{a: 0, b: 0}, {a: 0, b: 1}, {a: 1, b: 1}, {a: 1, b: 1}, {a: 2, b: 0}];
+ for (i = 0; i < entries.length; ++i)
+ r.save(entries[i]);
+ r.ensureIndex({a: 1, b: 1});
reverseEntries = entries.slice();
reverseEntries.reverse();
- checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( entries.slice( 2, 5 ), r.find( { a: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 0, 3 ), r.find( { a: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 0, 4 ), r.find( { a: { $lt: 2 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 1, 5 ), r.find( { a: { $lt: 2 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( entries.slice( 4, 5 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 2, 4 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( reverseEntries.slice( 0, 1 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 1, 3 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( entries.slice( 0, 1 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( entries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-
- checkResults( reverseEntries.slice( 4, 5 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
- checkResults( reverseEntries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults(entries.slice(2, 4), r.find({a: 1, b: 1}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+ checkResults(entries.slice(2, 4), r.find({a: 1, b: 1}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
+
+ checkResults(entries.slice(2, 5), r.find({a: {$gt: 0}}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+ checkResults(reverseEntries.slice(0, 3),
+ r.find({a: {$gt: 0}}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
+ checkResults(entries.slice(0, 4), r.find({a: {$lt: 2}}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+ checkResults(reverseEntries.slice(1, 5),
+ r.find({a: {$lt: 2}}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
+
+ checkResults(entries.slice(4, 5),
+ r.find({a: {$gt: 0}, b: {$lt: 1}}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+ checkResults(entries.slice(2, 4),
+ r.find({a: {$gt: 0}, b: {$gt: 0}}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+
+ checkResults(reverseEntries.slice(0, 1),
+ r.find({a: {$gt: 0}, b: {$lt: 1}}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
+ checkResults(reverseEntries.slice(1, 3),
+ r.find({a: {$gt: 0}, b: {$gt: 0}}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
+
+ checkResults(entries.slice(0, 1),
+ r.find({a: {$lt: 2}, b: {$lt: 1}}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+ checkResults(entries.slice(1, 4),
+ r.find({a: {$lt: 2}, b: {$gt: 0}}).sort({a: 1, b: 1}).hint({a: 1, b: 1}));
+
+ checkResults(reverseEntries.slice(4, 5),
+ r.find({a: {$lt: 2}, b: {$lt: 1}}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
+ checkResults(reverseEntries.slice(1, 4),
+ r.find({a: {$lt: 2}, b: {$gt: 0}}).sort({a: -1, b: -1}).hint({a: 1, b: 1}));
}
-testConstrainedFindMultiFieldSorting( db );
+testConstrainedFindMultiFieldSorting(db);
diff --git a/jstests/core/cursor5.js b/jstests/core/cursor5.js
index 6434d2b3887..64158eaccec 100644
--- a/jstests/core/cursor5.js
+++ b/jstests/core/cursor5.js
@@ -1,36 +1,46 @@
// Test bounds with subobject indexes.
-function checkResults( expected, cursor ) {
- assert.eq( expected.length, cursor.count() );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ].a.b, cursor[ i ].a.b );
- assert.eq( expected[ i ].a.c, cursor[ i ].a.c );
- assert.eq( expected[ i ].a.d, cursor[ i ].a.d );
- assert.eq( expected[ i ].e, cursor[ i ].e );
+function checkResults(expected, cursor) {
+ assert.eq(expected.length, cursor.count());
+ for (i = 0; i < expected.length; ++i) {
+ assert.eq(expected[i].a.b, cursor[i].a.b);
+ assert.eq(expected[i].a.c, cursor[i].a.c);
+ assert.eq(expected[i].a.d, cursor[i].a.d);
+ assert.eq(expected[i].e, cursor[i].e);
}
}
-function testBoundsWithSubobjectIndexes( db ) {
+function testBoundsWithSubobjectIndexes(db) {
r = db.ed_db_cursor5_bwsi;
r.drop();
- z = [ { a: { b: 1, c: 2, d: 3 }, e: 4 },
- { a: { b: 1, c: 2, d: 3 }, e: 5 },
- { a: { b: 1, c: 2, d: 4 }, e: 4 },
- { a: { b: 1, c: 2, d: 4 }, e: 5 },
- { a: { b: 2, c: 2, d: 3 }, e: 4 },
- { a: { b: 2, c: 2, d: 3 }, e: 5 } ];
- for( i = 0; i < z.length; ++i )
- r.save( z[ i ] );
- idx = { "a.d": 1, a: 1, e: -1 };
- rIdx = { "a.d": -1, a: -1, e: 1 };
- r.ensureIndex( idx );
+ z = [
+ {a: {b: 1, c: 2, d: 3}, e: 4},
+ {a: {b: 1, c: 2, d: 3}, e: 5},
+ {a: {b: 1, c: 2, d: 4}, e: 4},
+ {a: {b: 1, c: 2, d: 4}, e: 5},
+ {a: {b: 2, c: 2, d: 3}, e: 4},
+ {a: {b: 2, c: 2, d: 3}, e: 5}
+ ];
+ for (i = 0; i < z.length; ++i)
+ r.save(z[i]);
+ idx = {
+ "a.d": 1,
+ a: 1,
+ e: -1
+ };
+ rIdx = {
+ "a.d": -1,
+ a: -1,
+ e: 1
+ };
+ r.ensureIndex(idx);
- checkResults( [ z[ 0 ], z[ 4 ], z[ 2 ] ], r.find( { e: 4 } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 1 ], z[ 3 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( idx ).hint( idx ) );
+ checkResults([z[0], z[4], z[2]], r.find({e: 4}).sort(idx).hint(idx));
+ checkResults([z[1], z[3]], r.find({e: {$gt: 4}, "a.b": 1}).sort(idx).hint(idx));
- checkResults( [ z[ 2 ], z[ 4 ], z[ 0 ] ], r.find( { e: 4 } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 3 ], z[ 1 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( rIdx ).hint( idx ) );
+ checkResults([z[2], z[4], z[0]], r.find({e: 4}).sort(rIdx).hint(idx));
+ checkResults([z[3], z[1]], r.find({e: {$gt: 4}, "a.b": 1}).sort(rIdx).hint(idx));
}
-testBoundsWithSubobjectIndexes( db );
+testBoundsWithSubobjectIndexes(db);
diff --git a/jstests/core/cursor6.js b/jstests/core/cursor6.js
index bb0af64cfc9..f793d37bfe5 100644
--- a/jstests/core/cursor6.js
+++ b/jstests/core/cursor6.js
@@ -1,66 +1,68 @@
// Test different directions for compound indexes
-function eq( one, two ) {
- assert.eq( one.a, two.a );
- assert.eq( one.b, two.b );
+function eq(one, two) {
+ assert.eq(one.a, two.a);
+ assert.eq(one.b, two.b);
}
-function check( indexed ) {
+function check(indexed) {
var hint;
- if ( indexed ) {
- hint = { a: 1, b: -1 };
+ if (indexed) {
+ hint = {
+ a: 1,
+ b: -1
+ };
} else {
- hint = { $natural: 1 };
+ hint = {
+ $natural: 1
+ };
}
- f = r.find().sort( { a: 1, b: 1 } ).hint( hint );
- eq( z[ 0 ], f[ 0 ] );
- eq( z[ 1 ], f[ 1 ] );
- eq( z[ 2 ], f[ 2 ] );
- eq( z[ 3 ], f[ 3 ] );
+ f = r.find().sort({a: 1, b: 1}).hint(hint);
+ eq(z[0], f[0]);
+ eq(z[1], f[1]);
+ eq(z[2], f[2]);
+ eq(z[3], f[3]);
- f = r.find().sort( { a: 1, b: -1 } ).hint( hint );
- eq( z[ 1 ], f[ 0 ] );
- eq( z[ 0 ], f[ 1 ] );
- eq( z[ 3 ], f[ 2 ] );
- eq( z[ 2 ], f[ 3 ] );
+ f = r.find().sort({a: 1, b: -1}).hint(hint);
+ eq(z[1], f[0]);
+ eq(z[0], f[1]);
+ eq(z[3], f[2]);
+ eq(z[2], f[3]);
- f = r.find().sort( { a: -1, b: 1 } ).hint( hint );
- eq( z[ 2 ], f[ 0 ] );
- eq( z[ 3 ], f[ 1 ] );
- eq( z[ 0 ], f[ 2 ] );
- eq( z[ 1 ], f[ 3 ] );
+ f = r.find().sort({a: -1, b: 1}).hint(hint);
+ eq(z[2], f[0]);
+ eq(z[3], f[1]);
+ eq(z[0], f[2]);
+ eq(z[1], f[3]);
- f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint );
- eq( z[ 3 ], f[ 0 ] );
- eq( z[ 2 ], f[ 1 ] );
+ f = r.find({a: {$gte: 2}}).sort({a: 1, b: -1}).hint(hint);
+ eq(z[3], f[0]);
+ eq(z[2], f[1]);
- f = r.find( { a: { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint );
- eq( z[ 2 ], f[ 0 ] );
- eq( z[ 3 ], f[ 1 ] );
+ f = r.find({a: {$gte: 2}}).sort({a: -1, b: 1}).hint(hint);
+ eq(z[2], f[0]);
+ eq(z[3], f[1]);
- f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint );
- eq( z[ 2 ], f[ 0 ] );
- eq( z[ 3 ], f[ 1 ] );
+ f = r.find({a: {$gte: 2}}).sort({a: 1, b: 1}).hint(hint);
+ eq(z[2], f[0]);
+ eq(z[3], f[1]);
- f = r.find().sort( { a: -1, b: -1 } ).hint( hint );
- eq( z[ 3 ], f[ 0 ] );
- eq( z[ 2 ], f[ 1 ] );
- eq( z[ 1 ], f[ 2 ] );
- eq( z[ 0 ], f[ 3 ] );
+ f = r.find().sort({a: -1, b: -1}).hint(hint);
+ eq(z[3], f[0]);
+ eq(z[2], f[1]);
+ eq(z[1], f[2]);
+ eq(z[0], f[3]);
}
r = db.ed_db_cursor6;
r.drop();
-z = [ { a: 1, b: 1 },
- { a: 1, b: 2 },
- { a: 2, b: 1 },
- { a: 2, b: 2 } ];
-for( i = 0; i < z.length; ++i )
- r.save( z[ i ] );
+z = [{a: 1, b: 1}, {a: 1, b: 2}, {a: 2, b: 1}, {a: 2, b: 2}];
+for (i = 0; i < z.length; ++i)
+ r.save(z[i]);
-r.ensureIndex( { a: 1, b: -1 } );
+r.ensureIndex({a: 1, b: -1});
-check( false );
-check( true );
+check(false);
+check(true);
diff --git a/jstests/core/cursor7.js b/jstests/core/cursor7.js
index 336beafaa90..6e77a144ba3 100644
--- a/jstests/core/cursor7.js
+++ b/jstests/core/cursor7.js
@@ -1,40 +1,67 @@
// Test bounds with multiple inequalities and sorting.
-function checkResults( expected, cursor ) {
- assert.eq( expected.length, cursor.count() );
- for( i = 0; i < expected.length; ++i ) {
- assert.eq( expected[ i ].a, cursor[ i ].a );
- assert.eq( expected[ i ].b, cursor[ i ].b );
+function checkResults(expected, cursor) {
+ assert.eq(expected.length, cursor.count());
+ for (i = 0; i < expected.length; ++i) {
+ assert.eq(expected[i].a, cursor[i].a);
+ assert.eq(expected[i].b, cursor[i].b);
}
}
-function testMultipleInequalities( db ) {
+function testMultipleInequalities(db) {
r = db.ed_db_cursor_mi;
r.drop();
- z = [ { a: 1, b: 2 },
- { a: 3, b: 4 },
- { a: 5, b: 6 },
- { a: 7, b: 8 } ];
- for( i = 0; i < z.length; ++i )
- r.save( z[ i ] );
- idx = { a: 1, b: 1 };
- rIdx = { a: -1, b: -1 };
- r.ensureIndex( idx );
-
- checkResults( [ z[ 2 ], z[ 3 ] ], r.find( { a: { $gt: 3 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( idx ).hint( idx ) );
-
- checkResults( [ z[ 3 ], z[ 2 ] ], r.find( { a: { $gt: 3 } } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
- checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( rIdx ).hint( idx ) );
-
- checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( rIdx ).hint( idx ) );
-
- checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
- checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
+ z = [{a: 1, b: 2}, {a: 3, b: 4}, {a: 5, b: 6}, {a: 7, b: 8}];
+ for (i = 0; i < z.length; ++i)
+ r.save(z[i]);
+ idx = {
+ a: 1,
+ b: 1
+ };
+ rIdx = {
+ a: -1,
+ b: -1
+ };
+ r.ensureIndex(idx);
+
+ checkResults([z[2], z[3]], r.find({a: {$gt: 3}}).sort(idx).hint(idx));
+ checkResults([z[2]], r.find({a: {$gt: 3, $lt: 7}}).sort(idx).hint(idx));
+ checkResults([z[2]], r.find({a: {$gt: 3, $lt: 7, $lte: 5}}).sort(idx).hint(idx));
+
+ checkResults([z[3], z[2]], r.find({a: {$gt: 3}}).sort(rIdx).hint(idx));
+ checkResults([z[2]], r.find({a: {$gt: 3, $lt: 7}}).sort(rIdx).hint(idx));
+ checkResults([z[2]], r.find({a: {$gt: 3, $lt: 7, $lte: 5}}).sort(rIdx).hint(idx));
+
+ checkResults([z[1], z[2]],
+ r.find({
+ a: {$gt: 1, $lt: 7, $gte: 3, $lte: 5},
+ b: {$gt: 2, $lt: 8, $gte: 4, $lte: 6}
+ })
+ .sort(idx)
+ .hint(idx));
+ checkResults([z[2], z[1]],
+ r.find({
+ a: {$gt: 1, $lt: 7, $gte: 3, $lte: 5},
+ b: {$gt: 2, $lt: 8, $gte: 4, $lte: 6}
+ })
+ .sort(rIdx)
+ .hint(idx));
+
+ checkResults([z[1], z[2]],
+ r.find({
+ a: {$gte: 1, $lte: 7, $gt: 2, $lt: 6},
+ b: {$gte: 2, $lte: 8, $gt: 3, $lt: 7}
+ })
+ .sort(idx)
+ .hint(idx));
+ checkResults([z[2], z[1]],
+ r.find({
+ a: {$gte: 1, $lte: 7, $gt: 2, $lt: 6},
+ b: {$gte: 2, $lte: 8, $gt: 3, $lt: 7}
+ })
+ .sort(rIdx)
+ .hint(idx));
}
-testMultipleInequalities( db );
+testMultipleInequalities(db);
diff --git a/jstests/core/cursora.js b/jstests/core/cursora.js
index 0a69c4e5592..dfd9e28f281 100644
--- a/jstests/core/cursora.js
+++ b/jstests/core/cursora.js
@@ -1,21 +1,21 @@
t = db.cursora;
-function run( n , atomic ){
- if( !isNumber(n) ) {
- print("n:");
- printjson(n);
- assert(isNumber(n), "cursora.js isNumber");
+function run(n, atomic) {
+ if (!isNumber(n)) {
+ print("n:");
+ printjson(n);
+ assert(isNumber(n), "cursora.js isNumber");
}
t.drop();
var bulk = t.initializeUnorderedBulkOp();
- for ( i=0; i<n; i++ )
- bulk.insert( { _id : i } );
+ for (i = 0; i < n; i++)
+ bulk.insert({_id: i});
assert.writeOK(bulk.execute());
- print("cursora.js startParallelShell n:"+n+" atomic:"+atomic);
- join = startParallelShell( "sleep(50);" +
- "db.cursora.remove({" + ( atomic ? "$atomic:true" : "" ) + "});" );
+ print("cursora.js startParallelShell n:" + n + " atomic:" + atomic);
+ join = startParallelShell("sleep(50);" + "db.cursora.remove({" +
+ (atomic ? "$atomic:true" : "") + "});");
var start = null;
var ex = null;
@@ -23,14 +23,16 @@ function run( n , atomic ){
var end = null;
try {
start = new Date();
- num = t.find(function () {
+ num = t.find(function() {
num = 2;
- for (var x = 0; x < 1000; x++) num += 2;
+ for (var x = 0; x < 1000; x++)
+ num += 2;
return num > 0;
- }).sort({ _id: -1 }).itcount();
+ })
+ .sort({_id: -1})
+ .itcount();
end = new Date();
- }
- catch (e) {
+ } catch (e) {
print("cursora.js FAIL " + e);
join();
throw e;
@@ -38,15 +40,16 @@ function run( n , atomic ){
join();
- //print( "cursora.js num: " + num + " time:" + ( end.getTime() - start.getTime() ) )
- assert.eq( 0 , t.count() , "after remove: " + tojson( ex ) );
- // assert.lt( 0 , ex.nYields , "not enough yields : " + tojson( ex ) ); // TODO make this more reliable so cen re-enable assert
- if ( n == num )
- print( "cursora.js warning: shouldn't have counted all n: " + n + " num: " + num );
+ // print( "cursora.js num: " + num + " time:" + ( end.getTime() - start.getTime() ) )
+ assert.eq(0, t.count(), "after remove: " + tojson(ex));
+ // assert.lt( 0 , ex.nYields , "not enough yields : " + tojson( ex ) ); // TODO make this more
+ // reliable so cen re-enable assert
+ if (n == num)
+ print("cursora.js warning: shouldn't have counted all n: " + n + " num: " + num);
}
-run( 1500 );
-run( 5000 );
-run( 1500 , true );
-run( 5000 , true );
+run(1500);
+run(5000);
+run(1500, true);
+run(5000, true);
print("cursora.js SUCCESS");
diff --git a/jstests/core/cursorb.js b/jstests/core/cursorb.js
index 70f49c50454..62c6db802dd 100644
--- a/jstests/core/cursorb.js
+++ b/jstests/core/cursorb.js
@@ -5,13 +5,14 @@ t = db.jstests_cursorb;
t.drop();
// Exhaust a client cursor in get more.
-for( i = 0; i < 200; ++i ) {
- t.save( { a:i } );
+for (i = 0; i < 200; ++i) {
+ t.save({a: i});
}
t.find().itcount();
// Check that the 'cursor not found in map -1' message is not printed. This message indicates an
// attempt to look up a cursor with an invalid id and should never appear in the log.
-log = db.adminCommand( { getLog:'global' } ).log;
-log.forEach( function( line ) { assert( !line.match( /cursor not found in map -1 / ),
- 'Cursor map lookup with id -1.' ); } );
+log = db.adminCommand({getLog: 'global'}).log;
+log.forEach(function(line) {
+ assert(!line.match(/cursor not found in map -1 /), 'Cursor map lookup with id -1.');
+});
diff --git a/jstests/core/datasize2.js b/jstests/core/datasize2.js
index d83894bc189..6cb5b9b10d9 100644
--- a/jstests/core/datasize2.js
+++ b/jstests/core/datasize2.js
@@ -4,30 +4,36 @@
//
(function() {
-"use strict";
+ "use strict";
-var coll = db.foo;
-var adminDB = db.getSiblingDB('admin');
-coll.drop();
+ var coll = db.foo;
+ var adminDB = db.getSiblingDB('admin');
+ coll.drop();
-var N = 1000;
-for (var i = 0; i < N; i++) {
- coll.insert({_id: i, s: "asdasdasdasdasdasdasd"});
-}
+ var N = 1000;
+ for (var i = 0; i < N; i++) {
+ coll.insert({_id: i, s: "asdasdasdasdasdasdasd"});
+ }
-var dataSizeCommand = { "dataSize": "test.foo",
- "keyPattern": { "_id" : 1 },
- "min": { "_id" : 0 },
- "max": { "_id" : N } };
+ var dataSizeCommand = {
+ "dataSize": "test.foo",
+ "keyPattern": {"_id": 1},
+ "min": {"_id": 0},
+ "max": {"_id": N}
+ };
-assert.eq(N, db.runCommand(dataSizeCommand).numObjects,
- "dataSize command on 'test.foo' failed when called on the 'test' DB.");
-assert.eq(N, adminDB.runCommand(dataSizeCommand).numObjects,
- "dataSize command on 'test.foo' failed when called on the 'admin' DB.");
+ assert.eq(N,
+ db.runCommand(dataSizeCommand).numObjects,
+ "dataSize command on 'test.foo' failed when called on the 'test' DB.");
+ assert.eq(N,
+ adminDB.runCommand(dataSizeCommand).numObjects,
+ "dataSize command on 'test.foo' failed when called on the 'admin' DB.");
-dataSizeCommand.maxObjects = 100;
-assert.eq(101, db.runCommand(dataSizeCommand).numObjects,
- "dataSize command with max number of objects set failed on 'test' DB");
-assert.eq(101, db.runCommand(dataSizeCommand).numObjects,
- "dataSize command with max number of objects set failed on 'admin' DB");
+ dataSizeCommand.maxObjects = 100;
+ assert.eq(101,
+ db.runCommand(dataSizeCommand).numObjects,
+ "dataSize command with max number of objects set failed on 'test' DB");
+ assert.eq(101,
+ db.runCommand(dataSizeCommand).numObjects,
+ "dataSize command with max number of objects set failed on 'admin' DB");
})();
diff --git a/jstests/core/date1.js b/jstests/core/date1.js
index 7e893ea1355..65449c662b5 100644
--- a/jstests/core/date1.js
+++ b/jstests/core/date1.js
@@ -1,17 +1,15 @@
t = db.date1;
-
-function go( d , msg ){
+function go(d, msg) {
t.drop();
- t.save({ a: 1, d: d });
-// printjson(d);
-// printjson(t.findOne().d);
- assert.eq( d , t.findOne().d , msg );
+ t.save({a: 1, d: d});
+ // printjson(d);
+ // printjson(t.findOne().d);
+ assert.eq(d, t.findOne().d, msg);
}
-go( new Date() , "A" );
-go( new Date( 1 ) , "B");
-go( new Date( 0 ) , "C (old spidermonkey lib fails this test)");
+go(new Date(), "A");
+go(new Date(1), "B");
+go(new Date(0), "C (old spidermonkey lib fails this test)");
go(new Date(-10), "neg");
-
diff --git a/jstests/core/date2.js b/jstests/core/date2.js
index 9e3af9b7555..2980f10bf7a 100644
--- a/jstests/core/date2.js
+++ b/jstests/core/date2.js
@@ -3,9 +3,11 @@
t = db.jstests_date2;
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-var obj = {a:new Timestamp(0, 1)}; // in old versions this was == to new Date(1)
-t.save( obj );
-assert.eq( 0, t.find( {a:{$gt:new Date(1)}} ).itcount() );
-assert.eq( 1, t.find(obj).itcount() );
+var obj = {
+ a: new Timestamp(0, 1)
+}; // in old versions this was == to new Date(1)
+t.save(obj);
+assert.eq(0, t.find({a: {$gt: new Date(1)}}).itcount());
+assert.eq(1, t.find(obj).itcount());
diff --git a/jstests/core/date3.js b/jstests/core/date3.js
index 8329cca0dfe..9f7204bb52a 100644
--- a/jstests/core/date3.js
+++ b/jstests/core/date3.js
@@ -7,25 +7,25 @@ d1 = new Date(-1000);
dz = new Date(0);
d2 = new Date(1000);
-t.save( {x: 3, d: dz} );
-t.save( {x: 2, d: d2} );
-t.save( {x: 1, d: d1} );
+t.save({x: 3, d: dz});
+t.save({x: 2, d: d2});
+t.save({x: 1, d: d1});
-function test () {
- var list = t.find( {d: {$lt: dz}} );
- assert.eq ( 1, list.size() );
- assert.eq ( 1, list[0].x );
- assert.eq ( d1, list[0].d );
- var list = t.find( {d: {$gt: dz}} );
- assert.eq ( 1, list.size() );
- assert.eq ( 2, list[0].x );
- var list = t.find().sort( {d:1} );
- assert.eq ( 3, list.size() );
- assert.eq ( 1, list[0].x );
- assert.eq ( 3, list[1].x );
- assert.eq ( 2, list[2].x );
+function test() {
+ var list = t.find({d: {$lt: dz}});
+ assert.eq(1, list.size());
+ assert.eq(1, list[0].x);
+ assert.eq(d1, list[0].d);
+ var list = t.find({d: {$gt: dz}});
+ assert.eq(1, list.size());
+ assert.eq(2, list[0].x);
+ var list = t.find().sort({d: 1});
+ assert.eq(3, list.size());
+ assert.eq(1, list[0].x);
+ assert.eq(3, list[1].x);
+ assert.eq(2, list[2].x);
}
test();
-t.ensureIndex( {d: 1} );
+t.ensureIndex({d: 1});
test();
diff --git a/jstests/core/db.js b/jstests/core/db.js
index 66a0bd73ede..cc698f483c7 100644
--- a/jstests/core/db.js
+++ b/jstests/core/db.js
@@ -1,11 +1,14 @@
function testInvalidDBNameThrowsExceptionWithConstructor() {
- assert.throws( function() { return new DB( null, "/\\" ); } );
+ assert.throws(function() {
+ return new DB(null, "/\\");
+ });
}
function testInvalidDBNameThrowsExceptionWithSibling() {
- assert.throws( function() { return db.getSiblingDB( "/\\" ); } );
+ assert.throws(function() {
+ return db.getSiblingDB("/\\");
+ });
}
testInvalidDBNameThrowsExceptionWithConstructor();
testInvalidDBNameThrowsExceptionWithSibling();
-
diff --git a/jstests/core/dbadmin.js b/jstests/core/dbadmin.js
index 94ae45d34c1..43af2df057e 100644
--- a/jstests/core/dbadmin.js
+++ b/jstests/core/dbadmin.js
@@ -1,33 +1,33 @@
load('jstests/aggregation/extras/utils.js');
(function() {
-'use strict';
-
-var t = db.dbadmin;
-t.save( { x : 1 } );
-t.save( { x : 1 } );
-
-var res = db._adminCommand( "listDatabases" );
-assert( res.databases && res.databases.length > 0 , "listDatabases 1 " + tojson(res) );
-
-var now = new Date();
-var x = db._adminCommand( "ismaster" );
-assert( x.ismaster , "ismaster failed: " + tojson( x ) );
-assert( x.localTime, "ismaster didn't include time: " + tojson(x));
-
-var localTimeSkew = x.localTime - now;
-if ( localTimeSkew >= 50 ) {
- print( "Warning: localTimeSkew " + localTimeSkew + " > 50ms." );
-}
-assert.lt( localTimeSkew, 500, "isMaster.localTime" );
-
-var before = db.runCommand( "serverStatus" );
-print(before.uptimeEstimate);
-sleep( 5000 );
-
-var after = db.runCommand( "serverStatus" );
-print(after.uptimeEstimate);
-assert.gte( after.uptimeEstimate, before.uptimeEstimate,
- "uptime estimate should be non-decreasing" );
+ 'use strict';
+
+ var t = db.dbadmin;
+ t.save({x: 1});
+ t.save({x: 1});
+
+ var res = db._adminCommand("listDatabases");
+ assert(res.databases && res.databases.length > 0, "listDatabases 1 " + tojson(res));
+
+ var now = new Date();
+ var x = db._adminCommand("ismaster");
+ assert(x.ismaster, "ismaster failed: " + tojson(x));
+ assert(x.localTime, "ismaster didn't include time: " + tojson(x));
+
+ var localTimeSkew = x.localTime - now;
+ if (localTimeSkew >= 50) {
+ print("Warning: localTimeSkew " + localTimeSkew + " > 50ms.");
+ }
+ assert.lt(localTimeSkew, 500, "isMaster.localTime");
+
+ var before = db.runCommand("serverStatus");
+ print(before.uptimeEstimate);
+ sleep(5000);
+
+ var after = db.runCommand("serverStatus");
+ print(after.uptimeEstimate);
+ assert.gte(
+ after.uptimeEstimate, before.uptimeEstimate, "uptime estimate should be non-decreasing");
})();
diff --git a/jstests/core/dbcase.js b/jstests/core/dbcase.js
index 5663046443f..033608a3f6a 100644
--- a/jstests/core/dbcase.js
+++ b/jstests/core/dbcase.js
@@ -1,26 +1,26 @@
// Check db name duplication constraint SERVER-2111
-a = db.getSisterDB( "dbcasetest_dbnamea" );
-b = db.getSisterDB( "dbcasetest_dbnameA" );
+a = db.getSisterDB("dbcasetest_dbnamea");
+b = db.getSisterDB("dbcasetest_dbnameA");
a.dropDatabase();
b.dropDatabase();
-assert.writeOK( a.foo.save( { x : 1 } ));
+assert.writeOK(a.foo.save({x: 1}));
-res = b.foo.save( { x : 1 } );
-assert.writeError( res );
+res = b.foo.save({x: 1});
+assert.writeError(res);
-assert.neq( -1, db.getMongo().getDBNames().indexOf( a.getName() ) );
-assert.eq( -1, db.getMongo().getDBNames().indexOf( b.getName() ) );
-printjson( db.getMongo().getDBs().databases );
+assert.neq(-1, db.getMongo().getDBNames().indexOf(a.getName()));
+assert.eq(-1, db.getMongo().getDBNames().indexOf(b.getName()));
+printjson(db.getMongo().getDBs().databases);
a.dropDatabase();
b.dropDatabase();
-ai = db.getMongo().getDBNames().indexOf( a.getName() );
-bi = db.getMongo().getDBNames().indexOf( b.getName() );
+ai = db.getMongo().getDBNames().indexOf(a.getName());
+bi = db.getMongo().getDBNames().indexOf(b.getName());
// One of these dbs may exist if there is a slave active, but they must
// not both exist.
-assert( ai == -1 || bi == -1 );
-printjson( db.getMongo().getDBs().databases );
+assert(ai == -1 || bi == -1);
+printjson(db.getMongo().getDBs().databases);
diff --git a/jstests/core/dbcase2.js b/jstests/core/dbcase2.js
index 37c037001d8..2c3517e5e63 100644
--- a/jstests/core/dbcase2.js
+++ b/jstests/core/dbcase2.js
@@ -1,7 +1,8 @@
-// SERVER-2111 Check that an in memory db name will block creation of a db with a similar but differently cased name.
+// SERVER-2111 Check that an in memory db name will block creation of a db with a similar but
+// differently cased name.
-var dbLowerCase = db.getSisterDB( "dbcase2test_dbnamea" );
-var dbUpperCase = db.getSisterDB( "dbcase2test_dbnameA" );
+var dbLowerCase = db.getSisterDB("dbcase2test_dbnamea");
+var dbUpperCase = db.getSisterDB("dbcase2test_dbnameA");
var resultLower = dbLowerCase.c.insert({});
assert.eq(1, resultLower.nInserted);
@@ -10,4 +11,4 @@ var resultUpper = dbUpperCase.c.insert({});
assert.eq(0, resultUpper.nInserted);
assert.writeError(resultUpper);
-assert.eq( -1, db.getMongo().getDBNames().indexOf( "dbcase2test_dbnameA" ) );
+assert.eq(-1, db.getMongo().getDBNames().indexOf("dbcase2test_dbnameA"));
diff --git a/jstests/core/dbhash.js b/jstests/core/dbhash.js
index 7fea4b4d50c..cc202656fbe 100644
--- a/jstests/core/dbhash.js
+++ b/jstests/core/dbhash.js
@@ -6,53 +6,54 @@ a.drop();
b.drop();
// debug SERVER-761
-db.getCollectionNames().forEach( function( x ) {
- v = db[ x ].validate();
- if ( !v.valid ) {
- print( x );
- printjson( v );
- }
- } );
-
-function dbhash( mydb ) {
- var ret = mydb.runCommand( "dbhash" );
- assert.commandWorked( ret, "dbhash failure" );
+db.getCollectionNames().forEach(function(x) {
+ v = db[x].validate();
+ if (!v.valid) {
+ print(x);
+ printjson(v);
+ }
+});
+
+function dbhash(mydb) {
+ var ret = mydb.runCommand("dbhash");
+ assert.commandWorked(ret, "dbhash failure");
return ret;
}
-function gh( coll , mydb ){
- if ( ! mydb ) mydb = db;
- var x = dbhash( mydb ).collections[coll.getName()];
- if ( ! x )
+function gh(coll, mydb) {
+ if (!mydb)
+ mydb = db;
+ var x = dbhash(mydb).collections[coll.getName()];
+ if (!x)
return "";
return x;
}
-function dbh( mydb ){
- return dbhash( mydb ).md5;
+function dbh(mydb) {
+ return dbhash(mydb).md5;
}
-assert.eq( gh( a ) , gh( b ) , "A1" );
+assert.eq(gh(a), gh(b), "A1");
-a.insert( { _id : 5 } );
-assert.neq( gh( a ) , gh( b ) , "A2" );
+a.insert({_id: 5});
+assert.neq(gh(a), gh(b), "A2");
-b.insert( { _id : 5 } );
-assert.eq( gh( a ) , gh( b ) , "A3" );
+b.insert({_id: 5});
+assert.eq(gh(a), gh(b), "A3");
-dba = db.getSisterDB( "dbhasha" );
-dbb = db.getSisterDB( "dbhashb" );
+dba = db.getSisterDB("dbhasha");
+dbb = db.getSisterDB("dbhashb");
dba.dropDatabase();
dbb.dropDatabase();
-assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B1" );
-assert.eq( dbh( dba ) , dbh( dbb ) , "C1" );
+assert.eq(gh(dba.foo, dba), gh(dbb.foo, dbb), "B1");
+assert.eq(dbh(dba), dbh(dbb), "C1");
-dba.foo.insert( { _id : 5 } );
-assert.neq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B2" );
-assert.neq( dbh( dba ) , dbh( dbb ) , "C2" );
+dba.foo.insert({_id: 5});
+assert.neq(gh(dba.foo, dba), gh(dbb.foo, dbb), "B2");
+assert.neq(dbh(dba), dbh(dbb), "C2");
-dbb.foo.insert( { _id : 5 } );
-assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B3" );
-assert.eq( dbh( dba ) , dbh( dbb ) , "C3" );
+dbb.foo.insert({_id: 5});
+assert.eq(gh(dba.foo, dba), gh(dbb.foo, dbb), "B3");
+assert.eq(dbh(dba), dbh(dbb), "C3");
diff --git a/jstests/core/dbhash2.js b/jstests/core/dbhash2.js
index 74cc1fb8422..8e779ca7806 100644
--- a/jstests/core/dbhash2.js
+++ b/jstests/core/dbhash2.js
@@ -1,23 +1,21 @@
-mydb = db.getSisterDB( "config" );
+mydb = db.getSisterDB("config");
t = mydb.foo;
t.drop();
-t.insert( { x : 1 } );
-res1 = mydb.runCommand( "dbhash" );
-assert( res1.fromCache.indexOf( "config.foo" ) == -1 );
+t.insert({x: 1});
+res1 = mydb.runCommand("dbhash");
+assert(res1.fromCache.indexOf("config.foo") == -1);
-res2 = mydb.runCommand( "dbhash" );
-assert( res2.fromCache.indexOf( "config.foo" ) >= 0 );
-assert.eq( res1.collections.foo, res2.collections.foo );
+res2 = mydb.runCommand("dbhash");
+assert(res2.fromCache.indexOf("config.foo") >= 0);
+assert.eq(res1.collections.foo, res2.collections.foo);
-t.insert( { x : 2 } );
-res3 = mydb.runCommand( "dbhash" );
-assert( res3.fromCache.indexOf( "config.foo" ) < 0 );
-assert.neq( res1.collections.foo, res3.collections.foo );
+t.insert({x: 2});
+res3 = mydb.runCommand("dbhash");
+assert(res3.fromCache.indexOf("config.foo") < 0);
+assert.neq(res1.collections.foo, res3.collections.foo);
// Validate dbHash with an empty database does not trigger an fassert/invariant
-assert.commandFailed(db.runCommand( {"dbhash" : "" }));
-
-
+assert.commandFailed(db.runCommand({"dbhash": ""}));
diff --git a/jstests/core/dbref1.js b/jstests/core/dbref1.js
index 4a827662c1a..b5bb06f230d 100644
--- a/jstests/core/dbref1.js
+++ b/jstests/core/dbref1.js
@@ -5,6 +5,6 @@ b = db.dbref1b;
a.drop();
b.drop();
-a.save( { name : "eliot" } );
-b.save( { num : 1 , link : new DBPointer( "dbref1a" , a.findOne()._id ) } );
-assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
+a.save({name: "eliot"});
+b.save({num: 1, link: new DBPointer("dbref1a", a.findOne()._id)});
+assert.eq("eliot", b.findOne().link.fetch().name, "A");
diff --git a/jstests/core/dbref2.js b/jstests/core/dbref2.js
index d1b4870322d..9f3cb4e2ca4 100644
--- a/jstests/core/dbref2.js
+++ b/jstests/core/dbref2.js
@@ -7,14 +7,14 @@ a.drop();
b.drop();
c.drop();
-a.save( { name : "eliot" } );
-b.save( { num : 1 , link : new DBRef( "dbref2a" , a.findOne()._id ) } );
-c.save( { num : 1 , links : [ new DBRef( "dbref2a" , a.findOne()._id ) ] } );
+a.save({name: "eliot"});
+b.save({num: 1, link: new DBRef("dbref2a", a.findOne()._id)});
+c.save({num: 1, links: [new DBRef("dbref2a", a.findOne()._id)]});
-assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
-assert.neq( "el" , b.findOne().link.fetch().name , "B" );
+assert.eq("eliot", b.findOne().link.fetch().name, "A");
+assert.neq("el", b.findOne().link.fetch().name, "B");
// $elemMatch value
-var doc = c.findOne( { links: { $elemMatch: { $ref : "dbref2a", $id : a.findOne()._id } } } );
-assert.eq( "eliot" , doc.links[0].fetch().name , "C" );
-assert.neq( "el" , doc.links[0].fetch().name , "D" );
+var doc = c.findOne({links: {$elemMatch: {$ref: "dbref2a", $id: a.findOne()._id}}});
+assert.eq("eliot", doc.links[0].fetch().name, "C");
+assert.neq("el", doc.links[0].fetch().name, "D");
diff --git a/jstests/core/dbref3.js b/jstests/core/dbref3.js
index 2f3ab8fa79c..5bf0470442d 100644
--- a/jstests/core/dbref3.js
+++ b/jstests/core/dbref3.js
@@ -9,11 +9,11 @@ t.drop();
// true cases
t.insert({sub: {$ref: "foo", $id: "bar"}, dbref: true});
t.insert({sub: {$ref: "foo", $id: "bar", $db: "baz"}, dbref: true});
-t.insert({sub: {$ref: "foo", $id: "bar", db: "baz"}, dbref: true}); // out of spec but accepted
+t.insert({sub: {$ref: "foo", $id: "bar", db: "baz"}, dbref: true}); // out of spec but accepted
t.insert({sub: {$ref: "foo", $id: ObjectId()}, dbref: true});
t.insert({sub: {$ref: "foo", $id: 1}, dbref: true});
-t.insert({sub: {$ref: 123/*not a string*/, $id: "bar"}, dbref: false});
+t.insert({sub: {$ref: 123 /*not a string*/, $id: "bar"}, dbref: false});
t.insert({sub: {$id: "bar", $ref: "foo"}, dbref: false});
t.insert({sub: {$ref: "foo"}, dbref: false});
t.insert({sub: {$id: "foo"}, dbref: false});
@@ -42,4 +42,4 @@ assert.eq(1, distinctDBs.length);
t.insert({sub: {$ref: "foo", $id: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}]}});
var k = t.findOne({'sub.$id': {$elemMatch: {x: 2}}}, {_id: 0, 'sub.$id.$': 1});
print('k = ' + tojson(k));
-assert.eq({sub: {$id: [{x: 2, y:2}]}}, k); \ No newline at end of file
+assert.eq({sub: {$id: [{x: 2, y: 2}]}}, k); \ No newline at end of file
diff --git a/jstests/core/delx.js b/jstests/core/delx.js
index c4e3ca263d6..004b54ee6ca 100644
--- a/jstests/core/delx.js
+++ b/jstests/core/delx.js
@@ -1,31 +1,34 @@
-a = db.getSisterDB("delxa" );
-b = db.getSisterDB("delxb" );
+a = db.getSisterDB("delxa");
+b = db.getSisterDB("delxb");
-function setup( mydb ){
+function setup(mydb) {
mydb.dropDatabase();
- for ( i=0; i<100; i++ ){
- mydb.foo.insert( { _id : i } );
+ for (i = 0; i < 100; i++) {
+ mydb.foo.insert({_id: i});
}
}
-setup( a );
-setup( b );
+setup(a);
+setup(b);
-assert.eq( 100 , a.foo.find().itcount() , "A1" );
-assert.eq( 100 , b.foo.find().itcount() , "A2" );
+assert.eq(100, a.foo.find().itcount(), "A1");
+assert.eq(100, b.foo.find().itcount(), "A2");
-x = a.foo.find().sort( { _id : 1 } ).batchSize( 60 );
-y = b.foo.find().sort( { _id : 1 } ).batchSize( 60 );
+x = a.foo.find().sort({_id: 1}).batchSize(60);
+y = b.foo.find().sort({_id: 1}).batchSize(60);
x.next();
y.next();
-a.foo.remove( { _id : { $gt : 50 } } );
+a.foo.remove({_id: {$gt: 50}});
-assert.eq( 51 , a.foo.find().itcount() , "B1" );
-assert.eq( 100 , b.foo.find().itcount() , "B2" );
+assert.eq(51, a.foo.find().itcount(), "B1");
+assert.eq(100, b.foo.find().itcount(), "B2");
xCount = x.itcount();
-assert( xCount == 59 || xCount == 99, "C1 : " + xCount ); // snapshot or not is ok
-assert.eq( 99 , y.itcount() , "C2" ); // this was asserting because ClientCursor byLoc doesn't take db into consideration
+assert(xCount == 59 || xCount == 99, "C1 : " + xCount); // snapshot or not is ok
+assert.eq(
+ 99,
+ y.itcount(),
+ "C2"); // this was asserting because ClientCursor byLoc doesn't take db into consideration
diff --git a/jstests/core/depth_limit.js b/jstests/core/depth_limit.js
index 3a8ef2460ca..ddb648b4586 100644
--- a/jstests/core/depth_limit.js
+++ b/jstests/core/depth_limit.js
@@ -3,17 +3,23 @@
function test() {
function assertTooBig(obj) {
// This used to crash rather than throwing an exception.
- assert.throws(function(){Object.bsonsize(obj);});
+ assert.throws(function() {
+ Object.bsonsize(obj);
+ });
}
function assertNotTooBig(obj) {
- assert.doesNotThrow(function(){Object.bsonsize(obj);});
+ assert.doesNotThrow(function() {
+ Object.bsonsize(obj);
+ });
}
function objWithDepth(depth) {
var out = 1;
while (depth--) {
- out = {o: out};
+ out = {
+ o: out
+ };
}
return out;
}
@@ -41,8 +47,7 @@ function test() {
assertNotTooBig(objWithDepth(objDepthLimit - 1));
assertTooBig(objWithDepth(objDepthLimit));
-
- var arrayDepthLimit = objDepthLimit - 1; // one lower due to wrapping object
+ var arrayDepthLimit = objDepthLimit - 1; // one lower due to wrapping object
assertNotTooBig({array: arrayWithDepth(arrayDepthLimit - 1)});
assertTooBig({array: arrayWithDepth(arrayDepthLimit)});
}
@@ -53,4 +58,4 @@ test();
// test on server
db.depth_limit.drop();
db.depth_limit.insert({});
-db.depth_limit.find({$where: test}).itcount(); // itcount ensures that cursor is executed on server
+db.depth_limit.find({$where: test}).itcount(); // itcount ensures that cursor is executed on server
diff --git a/jstests/core/distinct1.js b/jstests/core/distinct1.js
index 779f5a12ba1..893e1f6ba65 100644
--- a/jstests/core/distinct1.js
+++ b/jstests/core/distinct1.js
@@ -2,31 +2,30 @@
t = db.distinct1;
t.drop();
-assert.eq( 0 , t.distinct( "a" ).length , "test empty" );
+assert.eq(0, t.distinct("a").length, "test empty");
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 2 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 2});
+t.save({a: 2});
+t.save({a: 3});
+res = t.distinct("a");
+assert.eq("1,2,3", res.toString(), "A1");
-res = t.distinct( "a" );
-assert.eq( "1,2,3" , res.toString() , "A1" );
-
-assert.eq( "1,2" , t.distinct( "a" , { a : { $lt : 3 } } ) , "A2" );
+assert.eq("1,2", t.distinct("a", {a: {$lt: 3}}), "A2");
t.drop();
-t.save( { a : { b : "a" } , c : 12 } );
-t.save( { a : { b : "b" } , c : 12 } );
-t.save( { a : { b : "c" } , c : 12 } );
-t.save( { a : { b : "c" } , c : 12 } );
+t.save({a: {b: "a"}, c: 12});
+t.save({a: {b: "b"}, c: 12});
+t.save({a: {b: "c"}, c: 12});
+t.save({a: {b: "c"}, c: 12});
-res = t.distinct( "a.b" );
-assert.eq( "a,b,c" , res.toString() , "B1" );
-printjson(t._distinct( "a.b" ).stats);
-assert.eq( "COLLSCAN" , t._distinct( "a.b" ).stats.planSummary , "B2" );
+res = t.distinct("a.b");
+assert.eq("a,b,c", res.toString(), "B1");
+printjson(t._distinct("a.b").stats);
+assert.eq("COLLSCAN", t._distinct("a.b").stats.planSummary, "B2");
t.drop();
@@ -34,21 +33,21 @@ t.save({_id: 1, a: 1});
t.save({_id: 2, a: 2});
// Test distinct with _id.
-res = t.distinct( "_id" );
-assert.eq( "1,2", res.toString(), "C1" );
-res = t.distinct( "a", {_id: 1} );
-assert.eq( "1", res.toString(), "C2" );
+res = t.distinct("_id");
+assert.eq("1,2", res.toString(), "C1");
+res = t.distinct("a", {_id: 1});
+assert.eq("1", res.toString(), "C2");
// Test distinct with db.runCommand
t.drop();
-t.save({a :1, b: 2});
-t.save({a :2, b: 2});
-t.save({a :2, b: 1});
-t.save({a :2, b: 2});
-t.save({a :3, b: 2});
-t.save({a :4, b: 1});
-t.save({a :4, b: 1});
+t.save({a: 1, b: 2});
+t.save({a: 2, b: 2});
+t.save({a: 2, b: 1});
+t.save({a: 2, b: 2});
+t.save({a: 3, b: 2});
+t.save({a: 4, b: 1});
+t.save({a: 4, b: 1});
res = db.runCommand({distinct: "distinct1", key: "a"});
assert.commandWorked(res);
diff --git a/jstests/core/distinct2.js b/jstests/core/distinct2.js
index 1517ec5ca3a..fc6ff7779b7 100644
--- a/jstests/core/distinct2.js
+++ b/jstests/core/distinct2.js
@@ -2,12 +2,11 @@
t = db.distinct2;
t.drop();
-t.save({a:null});
-assert.eq( 0 , t.distinct('a.b').length , "A" );
+t.save({a: null});
+assert.eq(0, t.distinct('a.b').length, "A");
t.drop();
-t.save( { a : 1 } );
-assert.eq( [1] , t.distinct( "a" ) , "B" );
-t.save( {} );
-assert.eq( [1] , t.distinct( "a" ) , "C" );
-
+t.save({a: 1});
+assert.eq([1], t.distinct("a"), "B");
+t.save({});
+assert.eq([1], t.distinct("a"), "C");
diff --git a/jstests/core/distinct3.js b/jstests/core/distinct3.js
index c82dc7e9043..6ab21599f97 100644
--- a/jstests/core/distinct3.js
+++ b/jstests/core/distinct3.js
@@ -3,33 +3,33 @@
t = db.jstests_distinct3;
t.drop();
-t.ensureIndex({a:1});
-t.ensureIndex({b:1});
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
var bulk = t.initializeUnorderedBulkOp();
-for( i = 0; i < 50; ++i ) {
- for( j = 0; j < 2; ++j ) {
- bulk.insert({a:i,c:i,d:j});
+for (i = 0; i < 50; ++i) {
+ for (j = 0; j < 2; ++j) {
+ bulk.insert({a: i, c: i, d: j});
}
}
-for( i = 0; i < 100; ++i ) {
- bulk.insert({b:i,c:i+50});
+for (i = 0; i < 100; ++i) {
+ bulk.insert({b: i, c: i + 50});
}
assert.writeOK(bulk.execute());
// Attempt to remove the last match for the {a:1} index scan while distinct is yielding.
-p = startParallelShell( 'for( i = 0; i < 100; ++i ) { ' +
- ' var bulk = db.jstests_distinct3.initializeUnorderedBulkOp();' +
- ' bulk.find( { a:49 } ).remove(); ' +
- ' for( j = 0; j < 20; ++j ) { ' +
- ' bulk.insert( { a:49, c:49, d:j } ); ' +
- ' } ' +
- ' assert.writeOK(bulk.execute()); ' +
- '} ' );
+p = startParallelShell('for( i = 0; i < 100; ++i ) { ' +
+ ' var bulk = db.jstests_distinct3.initializeUnorderedBulkOp();' +
+ ' bulk.find( { a:49 } ).remove(); ' +
+ ' for( j = 0; j < 20; ++j ) { ' +
+ ' bulk.insert( { a:49, c:49, d:j } ); ' +
+ ' } ' +
+ ' assert.writeOK(bulk.execute()); ' +
+ '} ');
-for( i = 0; i < 100; ++i ) {
- count = t.distinct( 'c', {$or:[{a:{$gte:0},d:0},{b:{$gte:0}}]} ).length;
- assert.gt( count, 100 );
+for (i = 0; i < 100; ++i) {
+ count = t.distinct('c', {$or: [{a: {$gte: 0}, d: 0}, {b: {$gte: 0}}]}).length;
+ assert.gt(count, 100);
}
p();
diff --git a/jstests/core/distinct4.js b/jstests/core/distinct4.js
index 1fa2763bf40..fae4b99e59c 100644
--- a/jstests/core/distinct4.js
+++ b/jstests/core/distinct4.js
@@ -6,37 +6,40 @@
var t = db.distinct4;
t.drop();
- t.save({a:null});
- t.save({a:1});
- t.save({a:1});
- t.save({a:2});
- t.save({a:3});
+ t.save({a: null});
+ t.save({a: 1});
+ t.save({a: 1});
+ t.save({a: 2});
+ t.save({a: 3});
- //first argument should be a string or error
+ // first argument should be a string or error
// from shell helper
- assert.throws( function () { t.distinct({a:1}); } );
+ assert.throws(function() {
+ t.distinct({a: 1});
+ });
// from command interface
assert.commandFailedWithCode(t.runCommand("distinct", {"key": {a: 1}}),
ErrorCodes.TypeMismatch);
-
- //second argument should be a document or error
+ // second argument should be a document or error
// from shell helper
- assert.throws( function () { t.distinct('a', '1'); } );
+ assert.throws(function() {
+ t.distinct('a', '1');
+ });
// from command interface
assert.commandFailedWithCode(t.runCommand("distinct", {"key": "a", "query": "a"}),
ErrorCodes.TypeMismatch);
-
-
// empty query clause should not cause error
// from shell helper
- var a = assert.doesNotThrow( function () { return t.distinct('a'); } );
+ var a = assert.doesNotThrow(function() {
+ return t.distinct('a');
+ });
// [ null, 1, 2, 3 ]
assert.eq(4, a.length, tojson(a));
assert.contains(null, a);
@@ -45,6 +48,6 @@
assert.contains(3, a);
// from command interface
- assert.commandWorked( t.runCommand( "distinct", { "key" : "a" } ) );
+ assert.commandWorked(t.runCommand("distinct", {"key": "a"}));
})();
diff --git a/jstests/core/distinct_array1.js b/jstests/core/distinct_array1.js
index d59e4351db2..679395a064b 100644
--- a/jstests/core/distinct_array1.js
+++ b/jstests/core/distinct_array1.js
@@ -1,91 +1,90 @@
t = db.distinct_array1;
t.drop();
-t.save( { a : [1,2,3] } );
-t.save( { a : [2,3,4] } );
-t.save( { a : [3,4,5] } );
-t.save( { a : 9 } );
-
+t.save({a: [1, 2, 3]});
+t.save({a: [2, 3, 4]});
+t.save({a: [3, 4, 5]});
+t.save({a: 9});
// Without index.
-res = t.distinct( "a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "A1" );
+res = t.distinct("a").sort();
+assert.eq("1,2,3,4,5,9", res.toString(), "A1");
// Array element 0 without index.
-res = t.distinct( "a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "A2" );
+res = t.distinct("a.0").sort();
+assert.eq("1,2,3", res.toString(), "A2");
// Array element 1 without index.
-res = t.distinct( "a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "A3" );
+res = t.distinct("a.1").sort();
+assert.eq("2,3,4", res.toString(), "A3");
// With index.
-t.ensureIndex( { a : 1 } );
-res = t.distinct( "a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "A4" );
+t.ensureIndex({a: 1});
+res = t.distinct("a").sort();
+assert.eq("1,2,3,4,5,9", res.toString(), "A4");
// Array element 0 with index.
-res = t.distinct( "a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "A5" );
+res = t.distinct("a.0").sort();
+assert.eq("1,2,3", res.toString(), "A5");
// Array element 1 with index.
-res = t.distinct( "a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "A6" );
+res = t.distinct("a.1").sort();
+assert.eq("2,3,4", res.toString(), "A6");
-//t.drop();
+// t.drop();
-t.save( { a : [{b:"a"}, {b:"d"}] , c : 12 } );
-t.save( { a : [{b:"b"}, {b:"d"}] , c : 12 } );
-t.save( { a : [{b:"c"}, {b:"e"}] , c : 12 } );
-t.save( { a : [{b:"c"}, {b:"f"}] , c : 12 } );
-t.save( { a : [] , c : 12 } );
-t.save( { a : { b : "z"} , c : 12 } );
+t.save({a: [{b: "a"}, {b: "d"}], c: 12});
+t.save({a: [{b: "b"}, {b: "d"}], c: 12});
+t.save({a: [{b: "c"}, {b: "e"}], c: 12});
+t.save({a: [{b: "c"}, {b: "f"}], c: 12});
+t.save({a: [], c: 12});
+t.save({a: {b: "z"}, c: 12});
// Without index.
-res = t.distinct( "a.b" ).sort();
-assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B1" );
+res = t.distinct("a.b").sort();
+assert.eq("a,b,c,d,e,f,z", res.toString(), "B1");
// Array element 0 without index
-res = t.distinct( "a.0.b" ).sort();
-assert.eq( "a,b,c" , res.toString() , "B2" );
+res = t.distinct("a.0.b").sort();
+assert.eq("a,b,c", res.toString(), "B2");
// Array element 1 without index
-res = t.distinct( "a.1.b" ).sort();
-assert.eq( "d,e,f" , res.toString() , "B3" );
+res = t.distinct("a.1.b").sort();
+assert.eq("d,e,f", res.toString(), "B3");
// With index.
-t.ensureIndex( { "a.b" : 1 } );
-res = t.distinct( "a.b" );
+t.ensureIndex({"a.b": 1});
+res = t.distinct("a.b");
res.sort();
-assert.eq( "a,b,c,d,e,f,z" , res.toString() , "B4" );
+assert.eq("a,b,c,d,e,f,z", res.toString(), "B4");
// _id as an document containing an array
-t.save( { _id : { a : [1,2,3] } } );
-t.save( { _id : { a : [2,3,4] } } );
-t.save( { _id : { a : [3,4,5] } } );
-t.save( { _id : { a : 9 } } );
+t.save({_id: {a: [1, 2, 3]}});
+t.save({_id: {a: [2, 3, 4]}});
+t.save({_id: {a: [3, 4, 5]}});
+t.save({_id: {a: 9}});
// Without index.
-res = t.distinct( "_id.a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "C1" );
+res = t.distinct("_id.a").sort();
+assert.eq("1,2,3,4,5,9", res.toString(), "C1");
// Array element 0 without index.
-res = t.distinct( "_id.a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "C2" );
+res = t.distinct("_id.a.0").sort();
+assert.eq("1,2,3", res.toString(), "C2");
// Array element 1 without index.
-res = t.distinct( "_id.a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "C3" );
+res = t.distinct("_id.a.1").sort();
+assert.eq("2,3,4", res.toString(), "C3");
// With index.
-t.ensureIndex( { "_id.a" : 1 } );
-res = t.distinct( "_id.a" ).sort();
-assert.eq( "1,2,3,4,5,9" , res.toString() , "C4" );
+t.ensureIndex({"_id.a": 1});
+res = t.distinct("_id.a").sort();
+assert.eq("1,2,3,4,5,9", res.toString(), "C4");
// Array element 0 with index.
-res = t.distinct( "_id.a.0" ).sort();
-assert.eq( "1,2,3" , res.toString() , "C5" );
+res = t.distinct("_id.a.0").sort();
+assert.eq("1,2,3", res.toString(), "C5");
// Array element 1 with index.
-res = t.distinct( "_id.a.1" ).sort();
-assert.eq( "2,3,4" , res.toString() , "C6" );
+res = t.distinct("_id.a.1").sort();
+assert.eq("2,3,4", res.toString(), "C6");
diff --git a/jstests/core/distinct_index1.js b/jstests/core/distinct_index1.js
index 959e46ebdaf..31faecd376e 100644
--- a/jstests/core/distinct_index1.js
+++ b/jstests/core/distinct_index1.js
@@ -2,74 +2,75 @@
t = db.distinct_index1;
t.drop();
-function r( x ){
- return Math.floor( Math.sqrt( x * 123123 ) ) % 10;
+function r(x) {
+ return Math.floor(Math.sqrt(x * 123123)) % 10;
}
-function d( k , q ){
- return t.runCommand( "distinct" , { key : k , query : q || {} } );
+function d(k, q) {
+ return t.runCommand("distinct", {key: k, query: q || {}});
}
-for ( i=0; i<1000; i++ ){
- o = { a : r(i*5) , b : r(i) };
- t.insert( o );
+for (i = 0; i < 1000; i++) {
+ o = {
+ a: r(i * 5),
+ b: r(i)
+ };
+ t.insert(o);
}
-x = d( "a" );
+x = d("a");
// Collection scan looks at all 1000 documents and gets 1000
// distinct values. Looks at 0 index keys.
-assert.eq( 1000 , x.stats.n , "AA1" );
-assert.eq( 0 , x.stats.nscanned , "AA2" );
-assert.eq( 1000 , x.stats.nscannedObjects , "AA3" );
+assert.eq(1000, x.stats.n, "AA1");
+assert.eq(0, x.stats.nscanned, "AA2");
+assert.eq(1000, x.stats.nscannedObjects, "AA3");
-x = d( "a" , { a : { $gt : 5 } } );
+x = d("a", {a: {$gt: 5}});
// Collection scan looks at all 1000 documents and gets 398
// distinct values which match the query. Looks at 0 index keys.
-assert.eq( 398 , x.stats.n , "AB1" );
-assert.eq( 0 , x.stats.nscanned , "AB2" );
-assert.eq( 1000 , x.stats.nscannedObjects , "AB3" );
+assert.eq(398, x.stats.n, "AB1");
+assert.eq(0, x.stats.nscanned, "AB2");
+assert.eq(1000, x.stats.nscannedObjects, "AB3");
-x = d( "b" , { a : { $gt : 5 } } );
+x = d("b", {a: {$gt: 5}});
// Collection scan looks at all 1000 documents and gets 398
// distinct values which match the query. Looks at 0 index keys.
-assert.eq( 398 , x.stats.n , "AC1" );
-assert.eq( 0 , x.stats.nscanned , "AC2" );
-assert.eq( 1000 , x.stats.nscannedObjects , "AC3" );
+assert.eq(398, x.stats.n, "AC1");
+assert.eq(0, x.stats.nscanned, "AC2");
+assert.eq(1000, x.stats.nscannedObjects, "AC3");
+t.ensureIndex({a: 1});
-
-t.ensureIndex( { a : 1 } );
-
-x = d( "a" );
+x = d("a");
// There are only 10 values. We use the fast distinct hack and only examine each value once.
-assert.eq( 10 , x.stats.n , "BA1" );
-assert.eq( 10 , x.stats.nscanned , "BA2" );
+assert.eq(10, x.stats.n, "BA1");
+assert.eq(10, x.stats.nscanned, "BA2");
-x = d( "a" , { a : { $gt : 5 } } );
+x = d("a", {a: {$gt: 5}});
// Only 4 values of a are >= 5 and we use the fast distinct hack.
-assert.eq(4, x.stats.n , "BB1" );
-assert.eq(4, x.stats.nscanned , "BB2" );
-assert.eq(0, x.stats.nscannedObjects , "BB3" );
+assert.eq(4, x.stats.n, "BB1");
+assert.eq(4, x.stats.nscanned, "BB2");
+assert.eq(0, x.stats.nscannedObjects, "BB3");
-x = d( "b" , { a : { $gt : 5 } } );
+x = d("b", {a: {$gt: 5}});
// We can't use the fast distinct hack here because we're distinct-ing over 'b'.
-assert.eq( 398 , x.stats.n , "BC1" );
-assert.eq( 398 , x.stats.nscanned , "BC2" );
-assert.eq( 398 , x.stats.nscannedObjects , "BC3" );
+assert.eq(398, x.stats.n, "BC1");
+assert.eq(398, x.stats.nscanned, "BC2");
+assert.eq(398, x.stats.nscannedObjects, "BC3");
// Check proper nscannedObjects count when using a query optimizer cursor.
t.dropIndexes();
-t.ensureIndex( { a : 1, b : 1 } );
-x = d( "b" , { a : { $gt : 5 }, b : { $gt : 5 } } );
+t.ensureIndex({a: 1, b: 1});
+x = d("b", {a: {$gt: 5}, b: {$gt: 5}});
printjson(x);
// 171 is the # of results we happen to scan when we don't use a distinct
// hack. When we use the distinct hack we scan 16, currently.
assert.lte(x.stats.n, 171);
-assert.eq(171, x.stats.nscannedObjects , "BD3" );
+assert.eq(171, x.stats.nscannedObjects, "BD3");
// Should use an index scan over the hashed index.
t.dropIndexes();
-t.ensureIndex( { a : "hashed" } );
-x = d( "a", { $or : [ { a : 3 }, { a : 5 } ] } );
-assert.eq( 188, x.stats.n, "DA1" );
-assert.eq( "IXSCAN { a: \"hashed\" }", x.stats.planSummary );
+t.ensureIndex({a: "hashed"});
+x = d("a", {$or: [{a: 3}, {a: 5}]});
+assert.eq(188, x.stats.n, "DA1");
+assert.eq("IXSCAN { a: \"hashed\" }", x.stats.planSummary);
diff --git a/jstests/core/distinct_index2.js b/jstests/core/distinct_index2.js
index 8899a048714..d1b72565102 100644
--- a/jstests/core/distinct_index2.js
+++ b/jstests/core/distinct_index2.js
@@ -1,41 +1,41 @@
t = db.distinct_index2;
t.drop();
-t.ensureIndex( { a : 1 , b : 1 } );
-t.ensureIndex( { c : 1 } );
+t.ensureIndex({a: 1, b: 1});
+t.ensureIndex({c: 1});
// Uniformly distributed dataset.
// If we use a randomly generated dataset, we might not
// generate all the distinct values in the range [0, 10).
-for ( var a=0; a<10; a++ ) {
- for ( var b=0; b<10; b++ ) {
- for ( var c=0; c<10; c++ ) {
- t.insert( { a : a , b : b , c : c } );
+for (var a = 0; a < 10; a++) {
+ for (var b = 0; b < 10; b++) {
+ for (var c = 0; c < 10; c++) {
+ t.insert({a: a, b: b, c: c});
}
}
}
correct = [];
-for ( i=0; i<10; i++ )
- correct.push( i );
+for (i = 0; i < 10; i++)
+ correct.push(i);
-function check( field ){
- res = t.distinct( field );
+function check(field) {
+ res = t.distinct(field);
res = res.sort();
- assert.eq( correct , res , "check: " + field );
+ assert.eq(correct, res, "check: " + field);
- if ( field != "a" ){
- res = t.distinct( field , { a : 1 } );
+ if (field != "a") {
+ res = t.distinct(field, {a: 1});
res = res.sort();
- assert.eq( correct , res , "check 2: " + field );
+ assert.eq(correct, res, "check 2: " + field);
}
}
-check( "a" );
-check( "b" );
-check( "c" );
+check("a");
+check("b");
+check("c");
// hashed index should produce same results.
t.dropIndexes();
-t.ensureIndex( { a : "hashed" } );
-check( "a" );
+t.ensureIndex({a: "hashed"});
+check("a");
diff --git a/jstests/core/distinct_speed1.js b/jstests/core/distinct_speed1.js
index c511de34087..9d7a114e418 100644
--- a/jstests/core/distinct_speed1.js
+++ b/jstests/core/distinct_speed1.js
@@ -2,25 +2,24 @@
t = db.distinct_speed1;
t.drop();
-for ( var i=0; i<10000; i++ ){
- t.save( { x : i % 10 } );
+for (var i = 0; i < 10000; i++) {
+ t.save({x: i % 10});
}
-assert.eq( 10 , t.distinct("x").length , "A1" );
+assert.eq(10, t.distinct("x").length, "A1");
-function fast(){
+function fast() {
t.find().explain("executionStats").executionStats.executionTimeMillis;
}
-function slow(){
+function slow() {
t.distinct("x");
}
-for ( i=0; i<3; i++ ){
- print( "it: " + Date.timeFunc( fast ) );
- print( "di: " + Date.timeFunc( slow ) );
+for (i = 0; i < 3; i++) {
+ print("it: " + Date.timeFunc(fast));
+ print("di: " + Date.timeFunc(slow));
}
-
-t.ensureIndex( { x : 1 } );
-t.distinct( "x" , { x : 5 } );
+t.ensureIndex({x: 1});
+t.distinct("x", {x: 5});
diff --git a/jstests/core/doc_validation.js b/jstests/core/doc_validation.js
index 1230f316b36..92bea28af35 100644
--- a/jstests/core/doc_validation.js
+++ b/jstests/core/doc_validation.js
@@ -34,14 +34,12 @@
// Drop will assert on failure.
coll.drop();
-
// Check that we can only update documents that pass validation.
// Set up valid and invalid docs then set validator.
assert.writeOK(coll.insert({_id: 'valid1', a: 1}));
assert.writeOK(coll.insert({_id: 'invalid2', b: 1}));
- assert.commandWorked(db.runCommand({"collMod": collName,
- "validator" : {a: {$exists: true}}}));
+ assert.commandWorked(db.runCommand({"collMod": collName, "validator": {a: {$exists: true}}}));
// Updates affecting fields not included in validator document
// on a conforming document.
@@ -65,19 +63,17 @@
coll.drop();
-
// Verify can't make a conforming doc fail validation,
// but can update non-conforming doc to pass validation.
assert.writeOK(coll.insert({_id: 'valid1', a: 1}));
assert.writeOK(coll.insert({_id: 'invalid2', b: 1}));
- assert.commandWorked(db.runCommand({"collMod": collName,
- "validator" : {a: {$exists: true}}}));
+ assert.commandWorked(db.runCommand({"collMod": collName, "validator": {a: {$exists: true}}}));
assertFailsValidation(coll.update({_id: 'valid1'}, {$unset: {a: 1}}));
assert.writeOK(coll.update({_id: 'invalid2'}, {$set: {a: 1}}));
// Modify collection to remove validator statement
- assert.commandWorked(db.runCommand({"collMod": collName, "validator" : {}}));
+ assert.commandWorked(db.runCommand({"collMod": collName, "validator": {}}));
// Verify no validation applied to updates.
assert.writeOK(coll.update({_id: 'valid1'}, {$set: {z: 1}}));
diff --git a/jstests/core/doc_validation_invalid_validators.js b/jstests/core/doc_validation_invalid_validators.js
index b77cc07fa3d..aeebae42820 100644
--- a/jstests/core/doc_validation_invalid_validators.js
+++ b/jstests/core/doc_validation_invalid_validators.js
@@ -12,41 +12,36 @@
assert.commandFailed(db.createCollection(collName, {validator: "assert"}));
// Check some disallowed match statements.
- assert.commandFailed(db.createCollection(collName, {validator:
- {$text: "bob"}}));
- assert.commandFailed(db.createCollection(collName, {validator:
- {$where: "this.a == this.b"}}));
- assert.commandFailed(db.createCollection(collName, {validator:
- {$near: {place:"holder"}}}));
- assert.commandFailed(db.createCollection(collName, {validator:
- {$geoNear: {place:"holder"}}}));
- assert.commandFailed(db.createCollection(collName, {validator:
- {$nearSphere: {place:"holder"}}}));
+ assert.commandFailed(db.createCollection(collName, {validator: {$text: "bob"}}));
+ assert.commandFailed(db.createCollection(collName, {validator: {$where: "this.a == this.b"}}));
+ assert.commandFailed(db.createCollection(collName, {validator: {$near: {place: "holder"}}}));
+ assert.commandFailed(db.createCollection(collName, {validator: {$geoNear: {place: "holder"}}}));
+ assert.commandFailed(
+ db.createCollection(collName, {validator: {$nearSphere: {place: "holder"}}}));
// Verify we fail on admin, local and config databases.
- assert.commandFailed(db.getSiblingDB("admin").createCollection(collName,
- {validator: {a: {$exists: true}}}));
+ assert.commandFailed(
+ db.getSiblingDB("admin").createCollection(collName, {validator: {a: {$exists: true}}}));
if (!db.runCommand("isdbgrid").isdbgrid) {
- assert.commandFailed(db.getSiblingDB("local").createCollection(collName,
- {validator: {a: {$exists: true}}}));
+ assert.commandFailed(db.getSiblingDB("local")
+ .createCollection(collName, {validator: {a: {$exists: true}}}));
}
- assert.commandFailed(db.getSiblingDB("config").createCollection(collName,
- {validator: {a: {$exists: true}}}));
+ assert.commandFailed(
+ db.getSiblingDB("config").createCollection(collName, {validator: {a: {$exists: true}}}));
// Create collection with document validator.
assert.commandWorked(db.createCollection(collName, {validator: {a: {$exists: true}}}));
// Verify some invalid match statements can't be passed to collMod.
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$text: "bob"}}));
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$where: "this.a == this.b"}}));
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$near: {place:"holder"}}}));
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$geoNear: {place:"holder"}}}));
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$nearSphere: {place:"holder"}}}));
+ assert.commandFailed(db.runCommand({"collMod": collName, "validator": {$text: "bob"}}));
+ assert.commandFailed(
+ db.runCommand({"collMod": collName, "validator": {$where: "this.a == this.b"}}));
+ assert.commandFailed(
+ db.runCommand({"collMod": collName, "validator": {$near: {place: "holder"}}}));
+ assert.commandFailed(
+ db.runCommand({"collMod": collName, "validator": {$geoNear: {place: "holder"}}}));
+ assert.commandFailed(
+ db.runCommand({"collMod": collName, "validator": {$nearSphere: {place: "holder"}}}));
coll.drop();
@@ -54,11 +49,9 @@
assert.commandWorked(db.createCollection(collName));
// Verify we can't add an invalid validator to a collection without a validator.
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$text: "bob"}}));
- assert.commandFailed(db.runCommand({"collMod": collName, "validator" :
- {$where: "this.a == this.b"}}));
- assert.commandWorked(db.runCommand({"collMod": collName, "validator" :
- {a: {$exists: true}}}));
+ assert.commandFailed(db.runCommand({"collMod": collName, "validator": {$text: "bob"}}));
+ assert.commandFailed(
+ db.runCommand({"collMod": collName, "validator": {$where: "this.a == this.b"}}));
+ assert.commandWorked(db.runCommand({"collMod": collName, "validator": {a: {$exists: true}}}));
coll.drop();
})();
diff --git a/jstests/core/doc_validation_options.js b/jstests/core/doc_validation_options.js
index cfc9263d376..8a96685e48f 100644
--- a/jstests/core/doc_validation_options.js
+++ b/jstests/core/doc_validation_options.js
@@ -9,54 +9,46 @@
var t = db.doc_validation_options;
t.drop();
-
- assert.commandWorked(db.createCollection(t.getName(),
- {validator: {a : 1}}));
- assertFailsValidation(t.insert({a:2}));
- t.insert({a:1});
+ assert.commandWorked(db.createCollection(t.getName(), {validator: {a: 1}}));
+
+ assertFailsValidation(t.insert({a: 2}));
+ t.insert({a: 1});
assert.eq(1, t.count());
// test default to strict
- assertFailsValidation(t.update({}, {$set: {a:2}}));
- assert.eq(1, t.find({a:1}).itcount());
+ assertFailsValidation(t.update({}, {$set: {a: 2}}));
+ assert.eq(1, t.find({a: 1}).itcount());
// check we can do a bad update in warn mode
- assert.commandWorked(t.runCommand("collMod",
- {validationAction: "warn"}));
- t.update({}, {$set: {a:2}});
- assert.eq(1, t.find({a:2}).itcount());
+ assert.commandWorked(t.runCommand("collMod", {validationAction: "warn"}));
+ t.update({}, {$set: {a: 2}});
+ assert.eq(1, t.find({a: 2}).itcount());
// TODO: check log for message?
// make sure persisted
var info = db.getCollectionInfos({name: t.getName()})[0];
assert.eq("warn", info.options.validationAction, tojson(info));
-
+
// check we can go back to enforce strict
- assert.commandWorked(t.runCommand("collMod",
- {validationAction: "error",
- validationLevel: "strict"}));
- assertFailsValidation(t.update({}, {$set: {a:3}}));
- assert.eq(1, t.find({a:2}).itcount());
-
+ assert.commandWorked(
+ t.runCommand("collMod", {validationAction: "error", validationLevel: "strict"}));
+ assertFailsValidation(t.update({}, {$set: {a: 3}}));
+ assert.eq(1, t.find({a: 2}).itcount());
+
// check bad -> bad is ok
- assert.commandWorked(t.runCommand("collMod",
- {validationLevel: "moderate"}));
- t.update({}, {$set: {a:3}});
- assert.eq(1, t.find({a:3}).itcount());
+ assert.commandWorked(t.runCommand("collMod", {validationLevel: "moderate"}));
+ t.update({}, {$set: {a: 3}});
+ assert.eq(1, t.find({a: 3}).itcount());
// test create
t.drop();
- assert.commandWorked(db.createCollection(t.getName(),
- {validator: {a : 1},
- validationAction: "warn"}));
-
- t.insert({a:2});
- t.insert({a:1});
+ assert.commandWorked(
+ db.createCollection(t.getName(), {validator: {a: 1}, validationAction: "warn"}));
+
+ t.insert({a: 2});
+ t.insert({a: 1});
assert.eq(2, t.count());
-
-})();
-
-
+})();
diff --git a/jstests/core/drop.js b/jstests/core/drop.js
index 67be6844813..22ed97ad805 100644
--- a/jstests/core/drop.js
+++ b/jstests/core/drop.js
@@ -3,20 +3,19 @@ var coll = db.jstests_drop;
coll.drop();
res = coll.runCommand("drop");
-assert( !res.ok, tojson( res ) );
-
+assert(!res.ok, tojson(res));
assert.eq(0, coll.getIndexes().length, "A");
coll.save({});
assert.eq(1, coll.getIndexes().length, "B");
-coll.ensureIndex({a : 1});
+coll.ensureIndex({a: 1});
assert.eq(2, coll.getIndexes().length, "C");
-assert.commandWorked(db.runCommand({drop : coll.getName()}));
+assert.commandWorked(db.runCommand({drop: coll.getName()}));
assert.eq(0, coll.getIndexes().length, "D");
-coll.ensureIndex({a : 1});
+coll.ensureIndex({a: 1});
assert.eq(2, coll.getIndexes().length, "E");
-assert.commandWorked(db.runCommand({deleteIndexes : coll.getName(), index : "*"}),
+assert.commandWorked(db.runCommand({deleteIndexes: coll.getName(), index: "*"}),
"delete indexes A");
assert.eq(1, coll.getIndexes().length, "G");
diff --git a/jstests/core/drop2.js b/jstests/core/drop2.js
index f0dee81b592..54f6b8b78ab 100644
--- a/jstests/core/drop2.js
+++ b/jstests/core/drop2.js
@@ -1,23 +1,23 @@
var coll = db.jstests_drop2;
coll.drop();
-function debug( x ) {
- printjson( x );
+function debug(x) {
+ printjson(x);
}
-coll.save( {} );
+coll.save({});
-function getOpId( drop ) {
+function getOpId(drop) {
var inProg = db.currentOp().inprog;
- debug( inProg );
- for ( var id in inProg ) {
- var op = inProg[ id ];
- if ( drop ) {
- if ( op.query && op.query.drop && op.query.drop == coll.getName() ) {
+ debug(inProg);
+ for (var id in inProg) {
+ var op = inProg[id];
+ if (drop) {
+ if (op.query && op.query.drop && op.query.drop == coll.getName()) {
return op.opid;
}
} else {
- if ( op.query && op.query.query && op.query.query.$where && op.ns == (coll + "") ) {
+ if (op.query && op.query.query && op.query.query.$where && op.ns == (coll + "")) {
return op.opid;
}
}
@@ -25,26 +25,28 @@ function getOpId( drop ) {
return null;
}
-var awaitCount = startParallelShell( "print(\"Count thread started\");"
- + "db.getMongo().getCollection(\""
- + (coll + "") + "\")"
- + ".count( { $where: function() {"
- + "while( 1 ) { sleep( 1 ); } } } );"
- + "print(\"Count thread terminating\");" );
+var awaitCount = startParallelShell(
+ "print(\"Count thread started\");" + "db.getMongo().getCollection(\"" + (coll + "") + "\")" +
+ ".count( { $where: function() {" + "while( 1 ) { sleep( 1 ); } } } );" +
+ "print(\"Count thread terminating\");");
countOpId = null;
-assert.soon( function() { countOpId = getOpId( false ); return countOpId; } );
-
-var awaitDrop = startParallelShell( "print(\"Drop thread started\");"
- + "print(\"drop result: \" + "
- + "db.getMongo().getCollection(\""
- + (coll + "") + "\")"
- + ".drop() );"
- + "print(\"Drop thread terminating\")" );
+assert.soon(function() {
+ countOpId = getOpId(false);
+ return countOpId;
+});
+
+var awaitDrop =
+ startParallelShell("print(\"Drop thread started\");" + "print(\"drop result: \" + " +
+ "db.getMongo().getCollection(\"" + (coll + "") + "\")" + ".drop() );" +
+ "print(\"Drop thread terminating\")");
dropOpId = null;
-assert.soon( function() { dropOpId = getOpId( true ); return dropOpId; } );
+assert.soon(function() {
+ dropOpId = getOpId(true);
+ return dropOpId;
+});
-db.killOp( dropOpId );
-db.killOp( countOpId );
+db.killOp(dropOpId);
+db.killOp(countOpId);
var exitCode = awaitCount({checkExitSuccess: false});
assert.neq(0, exitCode, "expected shell to exit abnormally due to JS execution being terminated");
@@ -52,4 +54,4 @@ assert.neq(0, exitCode, "expected shell to exit abnormally due to JS execution b
// The drop operation may or may not have been killed.
awaitDrop({checkExitSuccess: false});
-coll.drop(); // in SERVER-1818, this fails
+coll.drop(); // in SERVER-1818, this fails
diff --git a/jstests/core/drop3.js b/jstests/core/drop3.js
index 65fe072cf97..1215d218e4f 100644
--- a/jstests/core/drop3.js
+++ b/jstests/core/drop3.js
@@ -4,22 +4,22 @@ sub = t.sub;
t.drop();
sub.drop();
-
-for (var i = 0; i < 10; i++){
+for (var i = 0; i < 10; i++) {
t.insert({});
sub.insert({});
}
-var cursor = t.find().batchSize(2);
+var cursor = t.find().batchSize(2);
var subcursor = sub.find().batchSize(2);
cursor.next();
subcursor.next();
-assert.eq( cursor.objsLeftInBatch(), 1 );
-assert.eq( subcursor.objsLeftInBatch(), 1 );
-
-t.drop(); // should invalidate cursor, but not subcursor
+assert.eq(cursor.objsLeftInBatch(), 1);
+assert.eq(subcursor.objsLeftInBatch(), 1);
-assert.throws( function(){ cursor.itcount(); } ); // throws "cursor doesn't exist on server" error on getMore
-assert.eq( subcursor.itcount(), 9 ); //one already seen
+t.drop(); // should invalidate cursor, but not subcursor
+assert.throws(function() {
+ cursor.itcount();
+}); // throws "cursor doesn't exist on server" error on getMore
+assert.eq(subcursor.itcount(), 9); // one already seen
diff --git a/jstests/core/drop_index.js b/jstests/core/drop_index.js
index 938ac0d2547..f2e99baa8f8 100644
--- a/jstests/core/drop_index.js
+++ b/jstests/core/drop_index.js
@@ -2,19 +2,19 @@
t = db.dropIndex;
t.drop();
-t.insert( { _id : 1 , a : 2 , b : 3 } );
-assert.eq( 1 , t.getIndexes().length , "A1" );
+t.insert({_id: 1, a: 2, b: 3});
+assert.eq(1, t.getIndexes().length, "A1");
-t.ensureIndex( { a : 1 } );
-t.ensureIndex( { b : 1 } );
-assert.eq( 3 , t.getIndexes().length , "A2" );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+assert.eq(3, t.getIndexes().length, "A2");
-x = db._dbCommand( { dropIndexes: t.getName() , index : t._genIndexName( { a : 1 } ) } );
-assert.eq( 2 , t.getIndexes().length , "B1 " + tojson(x) );
+x = db._dbCommand({dropIndexes: t.getName(), index: t._genIndexName({a: 1})});
+assert.eq(2, t.getIndexes().length, "B1 " + tojson(x));
-x = db._dbCommand( { dropIndexes: t.getName() , index : { b : 1 } } );
-assert.eq( 1 , t.getIndexes().length , "B2" );
+x = db._dbCommand({dropIndexes: t.getName(), index: {b: 1}});
+assert.eq(1, t.getIndexes().length, "B2");
// ensure you can recreate indexes, even if you don't use dropIndex method
-t.ensureIndex({a:1});
-assert.eq(2 , t.getIndexes().length);
+t.ensureIndex({a: 1});
+assert.eq(2, t.getIndexes().length);
diff --git a/jstests/core/dropdb.js b/jstests/core/dropdb.js
index afe783338c4..1af56da34bf 100644
--- a/jstests/core/dropdb.js
+++ b/jstests/core/dropdb.js
@@ -4,18 +4,19 @@
m = db.getMongo();
baseName = "jstests_dropdb";
-ddb = db.getSisterDB( baseName );
+ddb = db.getSisterDB(baseName);
print("initial dbs: " + tojson(m.getDBNames()));
function check(shouldExist) {
var dbs = m.getDBNames();
- assert.eq(Array.contains(dbs, baseName), shouldExist,
- "DB " + baseName + " should " + (shouldExist ? "" : "not ") + "exist."
- + " dbs: " + tojson(dbs) + "\n" + tojson( m.getDBs() ) );
+ assert.eq(Array.contains(dbs, baseName),
+ shouldExist,
+ "DB " + baseName + " should " + (shouldExist ? "" : "not ") + "exist." + " dbs: " +
+ tojson(dbs) + "\n" + tojson(m.getDBs()));
}
-ddb.c.save( {} );
+ddb.c.save({});
check(true);
var res = ddb.dropDatabase();
@@ -25,6 +26,7 @@ check(false);
var res = ddb.dropDatabase();
assert.commandWorked(res);
-assert.eq(res.dropped, undefined,
+assert.eq(res.dropped,
+ undefined,
"dropped field was populated even though nothing should have been dropped");
check(false);
diff --git a/jstests/core/dropdb_race.js b/jstests/core/dropdb_race.js
index f61b4ef52e2..b4666ecc3ad 100644
--- a/jstests/core/dropdb_race.js
+++ b/jstests/core/dropdb_race.js
@@ -5,7 +5,7 @@ baseName = "jstests_dur_droprace";
d = db.getSisterDB(baseName);
t = d.foo;
-assert(d.adminCommand({ setParameter: 1, syncdelay: 5 }).ok);
+assert(d.adminCommand({setParameter: 1, syncdelay: 5}).ok);
var s = 0;
@@ -17,9 +17,9 @@ for (var pass = 0; pass < 100; pass++) {
var options = ( pass % 4 == 0 )? { writeConcern: { fsync: true }} : undefined;
t.insert({}, options);
}
- t.insert({ x: 1 });
- t.insert({ x: 3 });
- t.ensureIndex({ x: 1 });
+ t.insert({x: 1});
+ t.insert({x: 3});
+ t.ensureIndex({x: 1});
sleep(s);
if (pass % 13 == 0)
t.drop();
@@ -29,13 +29,13 @@ for (var pass = 0; pass < 100; pass++) {
d.dropDatabase();
if (pass % 7 == 0)
- d.runCommand({getLastError:1,j:1});
+ d.runCommand({getLastError: 1, j: 1});
d.getLastError();
s = (s + 1) % 25;
- //print(pass);
+ // print(pass);
if ((new Date()) - start > 60000) {
- print("stopping early");
+ print("stopping early");
break;
}
}
diff --git a/jstests/core/elemMatchProjection.js b/jstests/core/elemMatchProjection.js
index 0ccfdfb161b..97d1be0f081 100644
--- a/jstests/core/elemMatchProjection.js
+++ b/jstests/core/elemMatchProjection.js
@@ -5,245 +5,255 @@ t.drop();
date1 = new Date();
// Insert various styles of arrays
-for ( i = 0; i < 100; i++ ) {
- t.insert({ group: 1, x: [ 1, 2, 3, 4, 5 ] });
- t.insert({ group: 2, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ] });
- t.insert({ group: 3, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ],
- y: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] });
- t.insert({ group: 3, x: [ { a: 1, b: 3 }, { a: -6, c: 3 } ] });
- t.insert({ group: 4, x: [ { a: 1, b: 4 }, { a: -6, c: 3 } ] });
- t.insert({ group: 5, x: [ new Date(), 5, 10, 'string', new ObjectId(), 123.456 ] });
- t.insert({ group: 6, x: [ { a: 'string', b: date1 },
- { a: new ObjectId(), b: 1.2345 },
- { a: 'string2', b: date1 } ] });
- t.insert({ group: 7, x: [ { y: [ 1, 2, 3, 4 ] } ] });
- t.insert({ group: 8, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] });
- t.insert({ group: 9, x: [ { y: [ { a: 1, b: 2 }, {a: 3, b: 4} ] },
- { z: [ { a: 1, b: 2 }, {a: 3, b: 4} ] } ] });
- t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
- y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
- t.insert({ group: 10, x: [ { a: 1, b: 2 }, {a: 3, b: 4} ],
- y: [ { c: 1, d: 2 }, {c: 3, d: 4} ] });
- t.insert({ group: 11, x: [ { a: 1, b: 2 }, { a: 2, c: 3 }, { a:1, d:5 } ],
- covered: [ { aa: 1, bb: 2 }, { aa: 2, cc: 3 }, { aa:1, dd:5 } ] });
- t.insert({ group: 12, x: { y : [ { a: 1, b: 1 }, { a: 1, b: 2} ] } } );
- t.insert({ group: 13, x: [ { a: 1, b: 1 }, {a: 1, b: 2 } ] } );
- t.insert({ group: 13, x: [ { a: 1, b: 2 }, {a: 1, b: 1 } ] } );
+for (i = 0; i < 100; i++) {
+ t.insert({group: 1, x: [1, 2, 3, 4, 5]});
+ t.insert({group: 2, x: [{a: 1, b: 2}, {a: 2, c: 3}, {a: 1, d: 5}]});
+ t.insert({
+ group: 3,
+ x: [{a: 1, b: 2}, {a: 2, c: 3}, {a: 1, d: 5}],
+ y: [{aa: 1, bb: 2}, {aa: 2, cc: 3}, {aa: 1, dd: 5}]
+ });
+ t.insert({group: 3, x: [{a: 1, b: 3}, {a: -6, c: 3}]});
+ t.insert({group: 4, x: [{a: 1, b: 4}, {a: -6, c: 3}]});
+ t.insert({group: 5, x: [new Date(), 5, 10, 'string', new ObjectId(), 123.456]});
+ t.insert({
+ group: 6,
+ x: [{a: 'string', b: date1}, {a: new ObjectId(), b: 1.2345}, {a: 'string2', b: date1}]
+ });
+ t.insert({group: 7, x: [{y: [1, 2, 3, 4]}]});
+ t.insert({group: 8, x: [{y: [{a: 1, b: 2}, {a: 3, b: 4}]}]});
+ t.insert({group: 9, x: [{y: [{a: 1, b: 2}, {a: 3, b: 4}]}, {z: [{a: 1, b: 2}, {a: 3, b: 4}]}]});
+ t.insert({group: 10, x: [{a: 1, b: 2}, {a: 3, b: 4}], y: [{c: 1, d: 2}, {c: 3, d: 4}]});
+ t.insert({group: 10, x: [{a: 1, b: 2}, {a: 3, b: 4}], y: [{c: 1, d: 2}, {c: 3, d: 4}]});
+ t.insert({
+ group: 11,
+ x: [{a: 1, b: 2}, {a: 2, c: 3}, {a: 1, d: 5}],
+ covered: [{aa: 1, bb: 2}, {aa: 2, cc: 3}, {aa: 1, dd: 5}]
+ });
+ t.insert({group: 12, x: {y: [{a: 1, b: 1}, {a: 1, b: 2}]}});
+ t.insert({group: 13, x: [{a: 1, b: 1}, {a: 1, b: 2}]});
+ t.insert({group: 13, x: [{a: 1, b: 2}, {a: 1, b: 1}]});
}
-t.ensureIndex({group:1, 'y.d':1}); // for regular index test (not sure if this is really adding anything useful)
-t.ensureIndex({group:1, covered:1}); // for covered index test
+t.ensureIndex({
+ group: 1,
+ 'y.d': 1
+}); // for regular index test (not sure if this is really adding anything useful)
+t.ensureIndex({group: 1, covered: 1}); // for covered index test
//
// SERVER-828: Positional operator ($) projection tests
//
-assert.eq( 1,
- t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).toArray()[0].x.length,
- "single object match (array length match)" );
-
-assert.eq( 2,
- t.find( { group:3, 'x.a':1 }, { 'x.$':1 } ).toArray()[0].x[0].b,
- "single object match first" );
-
-assert.eq( undefined,
- t.find( { group:3, 'x.a':2 }, { _id:0, 'x.$':1 } ).toArray()[0]._id,
- "single object match with filtered _id" );
-
-assert.eq( 1,
- t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { _id:1 } ).toArray()[0].x.length,
- "sorted single object match with filtered _id (array length match)" );
-
-assert.eq( 1,
- t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':1 } ).toArray()[0].x.length,
- "single object match with elemMatch" );
-
-assert.eq( 1,
- t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':1} } ).toArray()[0].x.length,
- "single object match with elemMatch and positive slice" );
-
-assert.eq( 1,
- t.find( { 'group':2, 'x': { '$elemMatch' : { 'a':1, 'b':2 } } }, { 'x.$':{'$slice':-1} } ).toArray()[0].x.length,
- "single object match with elemMatch and negative slice" );
-
-assert.eq( 1,
- t.find( { 'group':12, 'x.y.a':1 }, { 'x.y.$': 1 } ).toArray()[0].x.y.length,
- "single object match with two level dot notation" );
-
-assert.eq( 1,
- t.find( { group:3, 'x.a':2 }, { 'x.$':1 } ).sort( { x:1 } ).toArray()[0].x.length,
- "sorted object match (array length match)" );
-
-assert.eq( { aa:1, dd:5 },
- t.find( { group:3, 'y.dd':5 }, { 'y.$':1 } ).toArray()[0].y[0],
- "single object match (value match)" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).toArray();
- }, [], "throw on invalid projection (field mismatch)" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':2 }, { 'y.$':1 } ).sort( { x:1 } ).toArray();
- }, [], "throw on invalid sorted projection (field mismatch)" );
-
-assert.throws( function() {x;
- t.find( { group:3, 'x.a':2 }, { 'x.$':1, group:0 } ).sort( { x:1 } ).toArray();
- }, [], "throw on invalid projection combination (include and exclude)" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':1, 'y.aa':1 }, { 'x.$':1, 'y.$':1 } ).toArray();
- }, [], "throw on multiple projections" );
-
-assert.throws( function() {
- t.find( { group:3}, { 'g.$':1 } ).toArray();
- }, [], "throw on invalid projection (non-array field)" );
-
-assert.eq( { aa:1, dd:5 },
- t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).toArray()[0].covered[0],
- "single object match (covered index)" );
-
-assert.eq( { aa:1, dd:5 },
- t.find( { group:11, 'covered.dd':5 }, { 'covered.$':1 } ).sort( { covered:1 } ).toArray()[0].covered[0],
- "single object match (sorted covered index)" );
-
-assert.eq( 1,
- t.find( { group:10, 'y.d': 4 }, { 'y.$':1 } ).toArray()[0].y.length,
- "single object match (regular index" );
+assert.eq(1,
+ t.find({group: 3, 'x.a': 2}, {'x.$': 1}).toArray()[0].x.length,
+ "single object match (array length match)");
+
+assert.eq(2,
+ t.find({group: 3, 'x.a': 1}, {'x.$': 1}).toArray()[0].x[0].b,
+ "single object match first");
+
+assert.eq(undefined,
+ t.find({group: 3, 'x.a': 2}, {_id: 0, 'x.$': 1}).toArray()[0]._id,
+ "single object match with filtered _id");
+
+assert.eq(1,
+ t.find({group: 3, 'x.a': 2}, {'x.$': 1}).sort({_id: 1}).toArray()[0].x.length,
+ "sorted single object match with filtered _id (array length match)");
+
+assert.eq(
+ 1,
+ t.find({'group': 2, 'x': {'$elemMatch': {'a': 1, 'b': 2}}}, {'x.$': 1}).toArray()[0].x.length,
+ "single object match with elemMatch");
+
+assert.eq(1,
+ t.find({'group': 2, 'x': {'$elemMatch': {'a': 1, 'b': 2}}}, {'x.$': {'$slice': 1}})
+ .toArray()[0]
+ .x.length,
+ "single object match with elemMatch and positive slice");
+
+assert.eq(1,
+ t.find({'group': 2, 'x': {'$elemMatch': {'a': 1, 'b': 2}}}, {'x.$': {'$slice': -1}})
+ .toArray()[0]
+ .x.length,
+ "single object match with elemMatch and negative slice");
+
+assert.eq(1,
+ t.find({'group': 12, 'x.y.a': 1}, {'x.y.$': 1}).toArray()[0].x.y.length,
+ "single object match with two level dot notation");
+
+assert.eq(1,
+ t.find({group: 3, 'x.a': 2}, {'x.$': 1}).sort({x: 1}).toArray()[0].x.length,
+ "sorted object match (array length match)");
+
+assert.eq({aa: 1, dd: 5},
+ t.find({group: 3, 'y.dd': 5}, {'y.$': 1}).toArray()[0].y[0],
+ "single object match (value match)");
+
+assert.throws(function() {
+ t.find({group: 3, 'x.a': 2}, {'y.$': 1}).toArray();
+}, [], "throw on invalid projection (field mismatch)");
+
+assert.throws(function() {
+ t.find({group: 3, 'x.a': 2}, {'y.$': 1}).sort({x: 1}).toArray();
+}, [], "throw on invalid sorted projection (field mismatch)");
+
+assert.throws(function() {
+ x;
+ t.find({group: 3, 'x.a': 2}, {'x.$': 1, group: 0}).sort({x: 1}).toArray();
+}, [], "throw on invalid projection combination (include and exclude)");
+
+assert.throws(function() {
+ t.find({group: 3, 'x.a': 1, 'y.aa': 1}, {'x.$': 1, 'y.$': 1}).toArray();
+}, [], "throw on multiple projections");
+
+assert.throws(function() {
+ t.find({group: 3}, {'g.$': 1}).toArray();
+}, [], "throw on invalid projection (non-array field)");
+
+assert.eq({aa: 1, dd: 5},
+ t.find({group: 11, 'covered.dd': 5}, {'covered.$': 1}).toArray()[0].covered[0],
+ "single object match (covered index)");
+
+assert.eq({aa: 1, dd: 5},
+ t.find({group: 11, 'covered.dd': 5}, {'covered.$': 1})
+ .sort({covered: 1})
+ .toArray()[0]
+ .covered[0],
+ "single object match (sorted covered index)");
+
+assert.eq(1,
+ t.find({group: 10, 'y.d': 4}, {'y.$': 1}).toArray()[0].y.length,
+ "single object match (regular index");
if (false) {
+ assert.eq(2, // SERVER-1013: allow multiple positional operators
+ t.find({group: 3, 'y.bb': 2, 'x.d': 5}, {'y.$': 1, 'x.$': 1}).toArray()[0].y[0].bb,
+ "multi match, multi proj 1");
- assert.eq( 2, // SERVER-1013: allow multiple positional operators
- t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].y[0].bb,
- "multi match, multi proj 1" );
-
- assert.eq( 5, // SSERVER-1013: allow multiple positional operators
- t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1, 'x.$':1 } ).toArray()[0].x[0].d,
- "multi match, multi proj 2" );
-
- assert.eq( 2, // SERVER-1243: allow multiple results from same matcher
- t.find( { group:2, x: { $elemMatchAll: { a:1 } } }, { 'x.$':1 } ).toArray()[0].x.length,
- "multi element match, single proj" );
+ assert.eq(5, // SSERVER-1013: allow multiple positional operators
+ t.find({group: 3, 'y.bb': 2, 'x.d': 5}, {'y.$': 1, 'x.$': 1}).toArray()[0].x[0].d,
+ "multi match, multi proj 2");
- assert.eq( 2, // SERVER-1013: multiple array matches with one prositional operator
- t.find( { group:3, 'y.bb':2, 'x.d':5 }, { 'y.$':1 } ).toArray()[0].y[0].bb,
- "multi match, single proj 1" );
+ assert.eq(2, // SERVER-1243: allow multiple results from same matcher
+ t.find({group: 2, x: {$elemMatchAll: {a: 1}}}, {'x.$': 1}).toArray()[0].x.length,
+ "multi element match, single proj");
- assert.eq( 2, // SERVER-1013: multiple array matches with one positional operator
- t.find( { group:3, 'y.cc':3, 'x.b':2 }, { 'x.$':1 } ).toArray()[0].x[0].b,
- "multi match, single proj 2" );
+ assert.eq(2, // SERVER-1013: multiple array matches with one prositional operator
+ t.find({group: 3, 'y.bb': 2, 'x.d': 5}, {'y.$': 1}).toArray()[0].y[0].bb,
+ "multi match, single proj 1");
+ assert.eq(2, // SERVER-1013: multiple array matches with one positional operator
+ t.find({group: 3, 'y.cc': 3, 'x.b': 2}, {'x.$': 1}).toArray()[0].x[0].b,
+ "multi match, single proj 2");
}
//
// SERVER-2238: $elemMatch projections
//
-assert.eq( -6,
- t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x[0].a,
- "single object match" );
-
-assert.eq( 1,
- t.find( { group:4 }, { x: { $elemMatch: { a:-6 } } } ).toArray()[0].x.length,
- "filters non-matching array elements" );
-
-assert.eq( 1,
- t.find( { group:4 }, { x: { $elemMatch: { a:-6, c:3 } } } ).toArray()[0].x.length,
- "filters non-matching array elements with multiple elemMatch criteria" );
-
-assert.eq( 1,
- t.find( { group: 13 }, { 'x' : {'$elemMatch' : { a: {$gt: 0, $lt: 2} } } } ).toArray()[0].x.length,
- "filters non-matching array elements with multiple criteria for a single element in the array" );
-
-assert.eq( 3,
- t.find( { group:4 }, { x: { $elemMatch: { a:{ $lt:1 } } } } ).toArray()[0].x[0].c,
- "object operator match" );
-
-assert.eq( [ 4 ],
- t.find( { group:1 }, { x: { $elemMatch: { $in:[100, 4, -123] } } } ).toArray()[0].x,
- "$in number match" );
-
-assert.eq( [ {a : 1, b : 2} ],
- t.find( { group:2 }, { x: { $elemMatch: { a: { $in:[1] } } } } ).toArray()[0].x,
- "$in number match" );
-
-assert.eq( [1],
- t.find( { group:1 }, { x: { $elemMatch: { $nin:[4, 5, 6] } } } ).toArray()[0].x,
- "$nin number match" );
-
-// but this may become a user assertion, since a single element of an array can't match more than one value
-assert.eq( [ 1],
- t.find( { group:1 }, { x: { $elemMatch: { $all:[1] } } } ).toArray()[0].x,
- "$in number match" );
-
-assert.eq( [ { a: 'string', b: date1 } ],
- t.find( { group:6 }, { x: { $elemMatch: { a:'string' } } } ).toArray()[0].x,
- "mixed object match on string eq" );
-
-assert.eq( [ { a: 'string2', b: date1 } ],
- t.find( { group:6 }, { x: { $elemMatch: { a:/ring2/ } } } ).toArray()[0].x,
- "mixed object match on regexp" );
-
-assert.eq( [ { a: 'string', b: date1 } ],
- t.find( { group:6 }, { x: { $elemMatch: { a: { $type: 2 } } } } ).toArray()[0].x,
- "mixed object match on type" );
-
-assert.eq( [ { a : 2, c : 3} ],
- t.find( { group:2 }, { x: { $elemMatch: { a: { $ne: 1 } } } } ).toArray()[0].x,
- "mixed object match on ne" );
-
-assert.eq( [ {a : 1, d : 5} ],
- t.find( { group:3 }, { x: { $elemMatch: { d: { $exists: true } } } } ).toArray()[0].x,
- "mixed object match on exists" );
-
-assert.eq( [ {a : 2, c : 3} ],
- t.find( { group:3 }, { x: { $elemMatch: { a: { $mod : [2, 0 ] } } } } ).toArray()[0].x,
- "mixed object match on mod" );
-
-assert.eq( {"x" : [ { "a" : 1, "b" : 2 } ], "y" : [ { "c" : 3, "d" : 4 } ] },
- t.find( { group:10 }, { _id : 0,
- x: { $elemMatch: { a: 1 } },
- y: { $elemMatch: { c: 3 } } } ).toArray()[0],
- "multiple $elemMatch on unique fields 1" );
-
-assert.eq( {"x" : [ { "y" : [ { "a" : 1, "b" : 2 }, { "a" : 3, "b" : 4 } ] } ] },
- t.find( { group:8 },
- { _id : 0,
- x: { $elemMatch: { y: { $elemMatch : { a: 3 } } } } } ).toArray()[0],
- "nested $elemMatch" );
-
-assert.throws( function() {
- t.find( { group:3, 'x.a':1 },
- { 'x.$':1, y: { $elemMatch: { aa: 1 } } } ).toArray();
- }, [], "throw on positional operator with $elemMatch" );
+assert.eq(-6,
+ t.find({group: 4}, {x: {$elemMatch: {a: -6}}}).toArray()[0].x[0].a,
+ "single object match");
+
+assert.eq(1,
+ t.find({group: 4}, {x: {$elemMatch: {a: -6}}}).toArray()[0].x.length,
+ "filters non-matching array elements");
+
+assert.eq(1,
+ t.find({group: 4}, {x: {$elemMatch: {a: -6, c: 3}}}).toArray()[0].x.length,
+ "filters non-matching array elements with multiple elemMatch criteria");
+
+assert.eq(
+ 1,
+ t.find({group: 13}, {'x': {'$elemMatch': {a: {$gt: 0, $lt: 2}}}}).toArray()[0].x.length,
+ "filters non-matching array elements with multiple criteria for a single element in the array");
+
+assert.eq(3,
+ t.find({group: 4}, {x: {$elemMatch: {a: {$lt: 1}}}}).toArray()[0].x[0].c,
+ "object operator match");
+
+assert.eq([4],
+ t.find({group: 1}, {x: {$elemMatch: {$in: [100, 4, -123]}}}).toArray()[0].x,
+ "$in number match");
+
+assert.eq([{a: 1, b: 2}],
+ t.find({group: 2}, {x: {$elemMatch: {a: {$in: [1]}}}}).toArray()[0].x,
+ "$in number match");
+
+assert.eq([1],
+ t.find({group: 1}, {x: {$elemMatch: {$nin: [4, 5, 6]}}}).toArray()[0].x,
+ "$nin number match");
+
+// but this may become a user assertion, since a single element of an array can't match more than
+// one value
+assert.eq([1],
+ t.find({group: 1}, {x: {$elemMatch: {$all: [1]}}}).toArray()[0].x,
+ "$in number match");
+
+assert.eq([{a: 'string', b: date1}],
+ t.find({group: 6}, {x: {$elemMatch: {a: 'string'}}}).toArray()[0].x,
+ "mixed object match on string eq");
+
+assert.eq([{a: 'string2', b: date1}],
+ t.find({group: 6}, {x: {$elemMatch: {a: /ring2/}}}).toArray()[0].x,
+ "mixed object match on regexp");
+
+assert.eq([{a: 'string', b: date1}],
+ t.find({group: 6}, {x: {$elemMatch: {a: {$type: 2}}}}).toArray()[0].x,
+ "mixed object match on type");
+
+assert.eq([{a: 2, c: 3}],
+ t.find({group: 2}, {x: {$elemMatch: {a: {$ne: 1}}}}).toArray()[0].x,
+ "mixed object match on ne");
+
+assert.eq([{a: 1, d: 5}],
+ t.find({group: 3}, {x: {$elemMatch: {d: {$exists: true}}}}).toArray()[0].x,
+ "mixed object match on exists");
+
+assert.eq([{a: 2, c: 3}],
+ t.find({group: 3}, {x: {$elemMatch: {a: {$mod: [2, 0]}}}}).toArray()[0].x,
+ "mixed object match on mod");
+
+assert.eq({"x": [{"a": 1, "b": 2}], "y": [{"c": 3, "d": 4}]},
+ t.find({group: 10}, {_id: 0, x: {$elemMatch: {a: 1}}, y: {$elemMatch: {c: 3}}})
+ .toArray()[0],
+ "multiple $elemMatch on unique fields 1");
+
+assert.eq({"x": [{"y": [{"a": 1, "b": 2}, {"a": 3, "b": 4}]}]},
+ t.find({group: 8}, {_id: 0, x: {$elemMatch: {y: {$elemMatch: {a: 3}}}}}).toArray()[0],
+ "nested $elemMatch");
+
+assert.throws(function() {
+ t.find({group: 3, 'x.a': 1}, {'x.$': 1, y: {$elemMatch: {aa: 1}}}).toArray();
+}, [], "throw on positional operator with $elemMatch");
if (false) {
+ assert.eq(2, // SERVER-1243: handle multiple $elemMatch results
+ t.find({group: 4}, {x: {$elemMatchAll: {a: {$lte: 2}}}}).toArray()[0].x.length,
+ "multi object match");
- assert.eq( 2 , // SERVER-1243: handle multiple $elemMatch results
- t.find( { group:4 }, { x: { $elemMatchAll: { a:{ $lte:2 } } } } ).toArray()[0].x.length,
- "multi object match" );
-
- assert.eq( 3 , // SERVER-1243: handle multiple $elemMatch results
- t.find( { group:1 }, { x: { $elemMatchAll: { $in:[1, 2, 3] } } } ).toArray()[0].x.length,
- "$in number match" );
-
- assert.eq( 1 , // SERVER-1243: handle multiple $elemMatch results
- t.find( { group:5 }, { x: { $elemMatchAll: { $ne: 5 } } } ).toArray()[0].x.length,
- "single mixed type match 1" );
+ assert.eq(3, // SERVER-1243: handle multiple $elemMatch results
+ t.find({group: 1}, {x: {$elemMatchAll: {$in: [1, 2, 3]}}}).toArray()[0].x.length,
+ "$in number match");
- assert.eq( 1 , // SERVER-831: handle nested arrays
- t.find( { group:9 }, { 'x.y': { $elemMatch: { a: 1 } } } ).toArray()[0].x.length,
- "single dotted match" );
+ assert.eq(1, // SERVER-1243: handle multiple $elemMatch results
+ t.find({group: 5}, {x: {$elemMatchAll: {$ne: 5}}}).toArray()[0].x.length,
+ "single mixed type match 1");
+ assert.eq(1, // SERVER-831: handle nested arrays
+ t.find({group: 9}, {'x.y': {$elemMatch: {a: 1}}}).toArray()[0].x.length,
+ "single dotted match");
}
//
// Batch/getMore tests
//
// test positional operator across multiple batches
-a = t.find( { group:3, 'x.b':2 }, { 'x.$':1 } ).batchSize(1);
-while ( a.hasNext() ) {
- assert.eq( 2, a.next().x[0].b, "positional getMore test");
+a = t.find({group: 3, 'x.b': 2}, {'x.$': 1}).batchSize(1);
+while (a.hasNext()) {
+ assert.eq(2, a.next().x[0].b, "positional getMore test");
}
// test $elemMatch operator across multiple batches
-a = t.find( { group:3 }, { x:{$elemMatch:{a:1}} } ).batchSize(1);
-while ( a.hasNext() ) {
- assert.eq( 1, a.next().x[0].a, "positional getMore test");
+a = t.find({group: 3}, {x: {$elemMatch: {a: 1}}}).batchSize(1);
+while (a.hasNext()) {
+ assert.eq(1, a.next().x[0].a, "positional getMore test");
}
diff --git a/jstests/core/error2.js b/jstests/core/error2.js
index 2a6a10170a1..6f0b95bc17e 100644
--- a/jstests/core/error2.js
+++ b/jstests/core/error2.js
@@ -4,18 +4,19 @@ f = db.jstests_error2;
f.drop();
-f.save( {a:1} );
+f.save({a: 1});
-assert.throws(
- function(){
- c = f.find({$where : function(){ return a(); }});
- c.next();
- }
-);
-
-assert.throws(
- function(){
- db.eval( function() { return a(); } );
- }
-);
+assert.throws(function() {
+ c = f.find({
+ $where: function() {
+ return a();
+ }
+ });
+ c.next();
+});
+assert.throws(function() {
+ db.eval(function() {
+ return a();
+ });
+});
diff --git a/jstests/core/error5.js b/jstests/core/error5.js
index 4a58f0dcf7a..0b5ab003dda 100644
--- a/jstests/core/error5.js
+++ b/jstests/core/error5.js
@@ -2,7 +2,9 @@
t = db.error5;
t.drop();
-assert.throws( function(){ t.save( 4 ); printjson( t.findOne() ); } , null , "A" );
-t.save( { a : 1 } );
-assert.eq( 1 , t.count() , "B" );
-
+assert.throws(function() {
+ t.save(4);
+ printjson(t.findOne());
+}, null, "A");
+t.save({a: 1});
+assert.eq(1, t.count(), "B");
diff --git a/jstests/core/eval0.js b/jstests/core/eval0.js
index 4375cace839..a0c93da2cab 100644
--- a/jstests/core/eval0.js
+++ b/jstests/core/eval0.js
@@ -1,8 +1,22 @@
-assert.eq( 17 , db.eval( function(){ return 11 + 6; } ) , "A" );
-assert.eq( 17 , db.eval( function( x ){ return 10 + x; } , 7 ) , "B" );
+assert.eq(17,
+ db.eval(function() {
+ return 11 + 6;
+ }),
+ "A");
+assert.eq(17,
+ db.eval(
+ function(x) {
+ return 10 + x;
+ },
+ 7),
+ "B");
// check that functions in system.js work
-db.system.js.insert({_id: "add", value: function(x,y){ return x + y;}});
-assert.eq( 20 , db.eval( "this.add(15, 5);" ) , "C" );
-
+db.system.js.insert({
+ _id: "add",
+ value: function(x, y) {
+ return x + y;
+ }
+});
+assert.eq(20, db.eval("this.add(15, 5);"), "C");
diff --git a/jstests/core/eval1.js b/jstests/core/eval1.js
index 1fdcec66152..8b139cae02a 100644
--- a/jstests/core/eval1.js
+++ b/jstests/core/eval1.js
@@ -2,16 +2,14 @@
t = db.eval1;
t.drop();
-t.save( { _id : 1 , name : "eliot" } );
-t.save( { _id : 2 , name : "sara" } );
+t.save({_id: 1, name: "eliot"});
+t.save({_id: 2, name: "sara"});
-f = function(id){
- return db["eval1"].findOne( { _id : id } ).name;
+f = function(id) {
+ return db["eval1"].findOne({_id: id}).name;
};
-
-assert.eq( "eliot" , f( 1 ) , "A" );
-assert.eq( "sara" , f( 2 ) , "B" );
-assert.eq( "eliot" , db.eval( f , 1 ) , "C" );
-assert.eq( "sara" , db.eval( f , 2 ) , "D" );
-
+assert.eq("eliot", f(1), "A");
+assert.eq("sara", f(2), "B");
+assert.eq("eliot", db.eval(f, 1), "C");
+assert.eq("sara", db.eval(f, 2), "D");
diff --git a/jstests/core/eval2.js b/jstests/core/eval2.js
index 6e39bb4a7bd..598314a6c5b 100644
--- a/jstests/core/eval2.js
+++ b/jstests/core/eval2.js
@@ -1,28 +1,28 @@
t = db.eval2;
t.drop();
-t.save({a:1});
-t.save({a:1});
+t.save({a: 1});
+t.save({a: 1});
-var f = db.group(
- {
- ns: t.getName(),
- key: { a:true},
- cond: { a:1 },
- reduce: function(obj,prev) { prev.csum++; } ,
- initial: { csum: 0}
- }
-);
+var f = db.group({
+ ns: t.getName(),
+ key: {a: true},
+ cond: {a: 1},
+ reduce: function(obj, prev) {
+ prev.csum++;
+ },
+ initial: {csum: 0}
+});
-assert(f[0].a == 1 && f[0].csum == 2 , "on db" );
+assert(f[0].a == 1 && f[0].csum == 2, "on db");
-var f = t.group(
- {
- key: { a:true},
- cond: { a:1 },
- reduce: function(obj,prev) { prev.csum++; } ,
- initial: { csum: 0}
- }
-);
+var f = t.group({
+ key: {a: true},
+ cond: {a: 1},
+ reduce: function(obj, prev) {
+ prev.csum++;
+ },
+ initial: {csum: 0}
+});
-assert(f[0].a == 1 && f[0].csum == 2 , "on coll" );
+assert(f[0].a == 1 && f[0].csum == 2, "on coll");
diff --git a/jstests/core/eval3.js b/jstests/core/eval3.js
index 404d4d863b7..c4f8be21056 100644
--- a/jstests/core/eval3.js
+++ b/jstests/core/eval3.js
@@ -2,20 +2,34 @@
t = db.eval3;
t.drop();
-t.save( { _id : 1 , name : "eliot" } );
-assert.eq( 1 , t.count() , "A" );
+t.save({_id: 1, name: "eliot"});
+assert.eq(1, t.count(), "A");
-function z( a , b ){
- db.eval3.save( { _id : a , name : b } );
+function z(a, b) {
+ db.eval3.save({_id: a, name: b});
return b;
}
-z( 2 , "sara" );
-assert.eq( 2 , t.count() , "B" );
+z(2, "sara");
+assert.eq(2, t.count(), "B");
-assert.eq( "eliot,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
+assert.eq("eliot,sara",
+ t.find()
+ .toArray()
+ .map(function(z) {
+ return z.name;
+ })
+ .sort()
+ .toString());
-assert.eq( "joe" , db.eval( z , 3 , "joe" ) , "C" );
-assert.eq( 3 , t.count() , "D" );
+assert.eq("joe", db.eval(z, 3, "joe"), "C");
+assert.eq(3, t.count(), "D");
-assert.eq( "eliot,joe,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
+assert.eq("eliot,joe,sara",
+ t.find()
+ .toArray()
+ .map(function(z) {
+ return z.name;
+ })
+ .sort()
+ .toString());
diff --git a/jstests/core/eval4.js b/jstests/core/eval4.js
index 31d6ef0c2a8..0d120b393de 100644
--- a/jstests/core/eval4.js
+++ b/jstests/core/eval4.js
@@ -2,22 +2,21 @@
t = db.eval4;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
-assert.eq( 3 , t.count() , "A" );
+assert.eq(3, t.count(), "A");
-function f( x ){
- db.eval4.remove( { a : x } );
+function f(x) {
+ db.eval4.remove({a: x});
}
-f( 2 );
-assert.eq( 2 , t.count() , "B" );
+f(2);
+assert.eq(2, t.count(), "B");
-db.eval( f , 2 );
-assert.eq( 2 , t.count() , "C" );
-
-db.eval( f , 3 );
-assert.eq( 1 , t.count() , "D" );
+db.eval(f, 2);
+assert.eq(2, t.count(), "C");
+db.eval(f, 3);
+assert.eq(1, t.count(), "D");
diff --git a/jstests/core/eval5.js b/jstests/core/eval5.js
index a9223a555a6..46bd679dd77 100644
--- a/jstests/core/eval5.js
+++ b/jstests/core/eval5.js
@@ -2,22 +2,15 @@
t = db.eval5;
t.drop();
-t.save( { a : 1 , b : 2 , c : 3 } );
+t.save({a: 1, b: 2, c: 3});
-assert.eq( 3 ,
- db.eval(
- function(z){
- return db.eval5.find().toArray()[0].c;
- }
- ) ,
- "something weird A"
- );
+assert.eq(3,
+ db.eval(function(z) {
+ return db.eval5.find().toArray()[0].c;
+ }),
+ "something weird A");
-assert.isnull(
- db.eval(
- function(z){
- return db.eval5.find( {} , { a : 1 } ).toArray()[0].c;
- }
- ),
- "field spec didn't work"
- );
+assert.isnull(db.eval(function(z) {
+ return db.eval5.find({}, {a: 1}).toArray()[0].c;
+}),
+ "field spec didn't work");
diff --git a/jstests/core/eval6.js b/jstests/core/eval6.js
index 5fe096974c6..31258f6917b 100644
--- a/jstests/core/eval6.js
+++ b/jstests/core/eval6.js
@@ -2,14 +2,12 @@
t = db.eval6;
t.drop();
-t.save( { a : 1 } );
+t.save({a: 1});
-db.eval(
- function(){
- o = db.eval6.findOne();
- o.b = 2;
- db.eval6.save( o );
- }
-);
+db.eval(function() {
+ o = db.eval6.findOne();
+ o.b = 2;
+ db.eval6.save(o);
+});
-assert.eq( 2 , t.findOne().b );
+assert.eq(2, t.findOne().b);
diff --git a/jstests/core/eval7.js b/jstests/core/eval7.js
index 3d706a2eaa7..89f395d5128 100644
--- a/jstests/core/eval7.js
+++ b/jstests/core/eval7.js
@@ -1,3 +1,5 @@
-assert.eq( 6 , db.eval( "5 + 1" ) , "A" );
-assert.throws( function(z){ db.eval( "5 + function x; + 1" );} );
+assert.eq(6, db.eval("5 + 1"), "A");
+assert.throws(function(z) {
+ db.eval("5 + function x; + 1");
+});
diff --git a/jstests/core/eval8.js b/jstests/core/eval8.js
index e2ec3db31a8..24f710f4b9f 100644
--- a/jstests/core/eval8.js
+++ b/jstests/core/eval8.js
@@ -2,18 +2,21 @@
t = db.eval8;
t.drop();
-x = { a : 1 , b : 2 };
-t.save( x );
+x = {
+ a: 1,
+ b: 2
+};
+t.save(x);
x = t.findOne();
-assert( x.a && x.b , "A" );
+assert(x.a && x.b, "A");
delete x.b;
-assert( x.a && ! x.b , "B" );
+assert(x.a && !x.b, "B");
x.b = 3;
-assert( x.a && x.b , "C" );
-assert.eq( 3 , x.b , "D" );
+assert(x.a && x.b, "C");
+assert.eq(3, x.b, "D");
-t.save( x );
+t.save(x);
y = t.findOne();
-assert.eq( tojson( x ) , tojson( y ) , "E" );
+assert.eq(tojson(x), tojson(y), "E");
diff --git a/jstests/core/eval9.js b/jstests/core/eval9.js
index a6d8560416e..6998345bf13 100644
--- a/jstests/core/eval9.js
+++ b/jstests/core/eval9.js
@@ -1,22 +1,23 @@
-a = [ 1 , "asd" , null , [ 2 , 3 ] , new Date() , { x : 1 } ];
+a = [1, "asd", null, [2, 3], new Date(), {x: 1}];
-for ( var i=0; i<a.length; i++ ){
- var ret = db.eval( "function( a , i ){ return a[i]; }" , a , i );
- assert.eq( typeof( a[i] ) , typeof( ret ) , "type test" );
- assert.eq( a[i] , ret , "val test: " + typeof( a[i] ) );
+for (var i = 0; i < a.length; i++) {
+ var ret = db.eval("function( a , i ){ return a[i]; }", a, i);
+ assert.eq(typeof(a[i]), typeof(ret), "type test");
+ assert.eq(a[i], ret, "val test: " + typeof(a[i]));
}
db.eval9.drop();
-db.eval9.save( { a : 17 } );
+db.eval9.save({a: 17});
-assert.eq( 1 , db.eval( "return db.eval9.find().toArray()" ).length , "A" );
-assert.eq( 17 , db.eval( "return db.eval9.find().toArray()" )[0].a , "B" );
+assert.eq(1, db.eval("return db.eval9.find().toArray()").length, "A");
+assert.eq(17, db.eval("return db.eval9.find().toArray()")[0].a, "B");
// just to make sure these things don't crash (but may throw an exception)
try {
- db.eval( "return db.eval9.find()" );
- db.eval( "return db.eval9" );
- db.eval( "return db" );
- db.eval( "return print" );
-} catch (ex) { } \ No newline at end of file
+ db.eval("return db.eval9.find()");
+ db.eval("return db.eval9");
+ db.eval("return db");
+ db.eval("return print");
+} catch (ex) {
+} \ No newline at end of file
diff --git a/jstests/core/eval_mr.js b/jstests/core/eval_mr.js
index 84a929035d6..84036b1e0d5 100644
--- a/jstests/core/eval_mr.js
+++ b/jstests/core/eval_mr.js
@@ -7,9 +7,15 @@
assert.writeOK(db.eval_mr.insert({val: 2}));
var runBasicMapReduce = function() {
return db.eval_mr.runCommand("mapReduce",
- {map: function() { emit(0, this.val); },
- reduce: function(id, values) { return Array.sum(values); },
- out: {replace: "eval_mr_out"}});
+ {
+ map: function() {
+ emit(0, this.val);
+ },
+ reduce: function(id, values) {
+ return Array.sum(values);
+ },
+ out: {replace: "eval_mr_out"}
+ });
};
assert.commandWorked(runBasicMapReduce());
assert.eq(3, db.eval_mr_out.findOne().value);
diff --git a/jstests/core/eval_nolock.js b/jstests/core/eval_nolock.js
index 4701df9b7f7..9511784becb 100644
--- a/jstests/core/eval_nolock.js
+++ b/jstests/core/eval_nolock.js
@@ -2,15 +2,15 @@
t = db.eval_nolock;
t.drop();
-for ( i=0; i<10; i++ )
- t.insert( { _id : i } );
+for (i = 0; i < 10; i++)
+ t.insert({_id: i});
-res = db.runCommand( { eval :
- function(){
- db.eval_nolock.insert( { _id : 123 } );
- return db.eval_nolock.count();
- }
- , nolock : true } );
-
-assert.eq( 11 , res.retval , "A" );
+res = db.runCommand({
+ eval: function() {
+ db.eval_nolock.insert({_id: 123});
+ return db.eval_nolock.count();
+ },
+ nolock: true
+});
+assert.eq(11, res.retval, "A");
diff --git a/jstests/core/evala.js b/jstests/core/evala.js
index 88d479127c0..7ccf33ac754 100644
--- a/jstests/core/evala.js
+++ b/jstests/core/evala.js
@@ -2,8 +2,7 @@
t = db.evala;
t.drop();
-t.save( { x : 5 } );
-
-assert.eq( 5 , db.eval( "function(){ return db.evala.findOne().x; }" ) , "A" );
-assert.eq( 5 , db.eval( "/* abc */function(){ return db.evala.findOne().x; }" ) , "B" );
+t.save({x: 5});
+assert.eq(5, db.eval("function(){ return db.evala.findOne().x; }"), "A");
+assert.eq(5, db.eval("/* abc */function(){ return db.evala.findOne().x; }"), "B");
diff --git a/jstests/core/evalb.js b/jstests/core/evalb.js
index 5e8fac05537..3391c4cc4f2 100644
--- a/jstests/core/evalb.js
+++ b/jstests/core/evalb.js
@@ -3,39 +3,44 @@
// Use a reserved database name to avoid a conflict in the parallel test suite.
var stddb = db;
-var db = db.getSisterDB( 'evalb' );
+var db = db.getSisterDB('evalb');
function profileCursor() {
- return db.system.profile.find( { user:username + "@" + db.getName() } );
+ return db.system.profile.find({user: username + "@" + db.getName()});
}
function lastOp() {
- return profileCursor().sort( { $natural:-1 } ).next();
+ return profileCursor().sort({$natural: -1}).next();
}
try {
-
username = 'jstests_evalb_user';
db.dropUser(username);
db.createUser({user: username, pwd: 'password', roles: jsTest.basicUserRoles});
- db.auth( username, 'password' );
+ db.auth(username, 'password');
t = db.evalb;
t.drop();
- t.save( { x:3 } );
+ t.save({x: 3});
- assert.eq( 3, db.eval( function() { return db.evalb.findOne().x; } ), 'A' );
+ assert.eq(3,
+ db.eval(function() {
+ return db.evalb.findOne().x;
+ }),
+ 'A');
- db.setProfilingLevel( 2 );
+ db.setProfilingLevel(2);
- assert.eq( 3, db.eval( function() { return db.evalb.findOne().x; } ), 'B' );
+ assert.eq(3,
+ db.eval(function() {
+ return db.evalb.findOne().x;
+ }),
+ 'B');
o = lastOp();
- assert( tojson( o ).indexOf( 'findOne().x' ) > 0, 'C : ' + tojson( o ) );
-}
-finally {
-
+ assert(tojson(o).indexOf('findOne().x') > 0, 'C : ' + tojson(o));
+} finally {
db.setProfilingLevel(0);
db = stddb;
}
diff --git a/jstests/core/evalc.js b/jstests/core/evalc.js
index 36ea04037d0..0d55790afe3 100644
--- a/jstests/core/evalc.js
+++ b/jstests/core/evalc.js
@@ -4,22 +4,23 @@ t.drop();
t2 = db.evalc_done;
t2.drop();
-for( i = 0; i < 10; ++i ) {
- t.save( {i:i} );
+for (i = 0; i < 10; ++i) {
+ t.save({i: i});
}
// SERVER-1610
-assert.eq( 0 , t2.count() , "X1" );
+assert.eq(0, t2.count(), "X1");
-s = startParallelShell( "print( 'starting forked:' + Date() ); for ( i=0; i<10*1000; i++ ){ db.currentOp(); } print( 'ending forked:' + Date() ); db.evalc_done.insert( { x : 1 } ); " );
+s = startParallelShell(
+ "print( 'starting forked:' + Date() ); for ( i=0; i<10*1000; i++ ){ db.currentOp(); } print( 'ending forked:' + Date() ); db.evalc_done.insert( { x : 1 } ); ");
-print( "starting eval: " + Date() );
+print("starting eval: " + Date());
assert.soon(function() {
- db.eval( "db.jstests_evalc.count( {i:10} );" );
+ db.eval("db.jstests_evalc.count( {i:10} );");
return t2.count() > 0;
}, 'parallel shell failed to update ' + t2.getFullName(), 120000, 10);
-print( "end eval: " + Date() );
+print("end eval: " + Date());
s();
diff --git a/jstests/core/evald.js b/jstests/core/evald.js
index 7e516e9f7d6..8049d2ba8ae 100644
--- a/jstests/core/evald.js
+++ b/jstests/core/evald.js
@@ -1,25 +1,26 @@
t = db.jstests_evald;
t.drop();
-function debug( x ) {
-// printjson( x );
+function debug(x) {
+ // printjson( x );
}
-for( i = 0; i < 10; ++i ) {
- t.save( {i:i} );
+for (i = 0; i < 10; ++i) {
+ t.save({i: i});
}
-function op( ev, where ) {
+function op(ev, where) {
p = db.currentOp().inprog;
- debug( p );
- for ( var i in p ) {
- var o = p[ i ];
- if ( where ) {
- if ( o.active && o.query && o.query.query && o.query.query.$where && o.ns == "test.jstests_evald" ) {
+ debug(p);
+ for (var i in p) {
+ var o = p[i];
+ if (where) {
+ if (o.active && o.query && o.query.query && o.query.query.$where &&
+ o.ns == "test.jstests_evald") {
return o.opid;
}
} else {
- if ( o.active && o.query && o.query.$eval && o.query.$eval == ev ) {
+ if (o.active && o.query && o.query.$eval && o.query.$eval == ev) {
return o.opid;
}
}
@@ -27,31 +28,34 @@ function op( ev, where ) {
return -1;
}
-function doIt( ev, wait, where ) {
+function doIt(ev, wait, where) {
var awaitShell;
- if ( where ) {
- awaitShell = startParallelShell( ev );
+ if (where) {
+ awaitShell = startParallelShell(ev);
} else {
- awaitShell = startParallelShell( "db.eval( '" + ev + "' )" );
+ awaitShell = startParallelShell("db.eval( '" + ev + "' )");
}
o = null;
- assert.soon( function() { o = op( ev, where ); return o != -1; } );
+ assert.soon(function() {
+ o = op(ev, where);
+ return o != -1;
+ });
- if ( wait ) {
- sleep( 2000 );
+ if (wait) {
+ sleep(2000);
}
- debug( "going to kill" );
+ debug("going to kill");
- db.killOp( o );
+ db.killOp(o);
- debug( "sent kill" );
+ debug("sent kill");
var exitCode = awaitShell({checkExitSuccess: false});
- assert.neq(0, exitCode,
- "expected shell to exit abnormally due to JS execution being terminated");
+ assert.neq(
+ 0, exitCode, "expected shell to exit abnormally due to JS execution being terminated");
}
// nested scope with nested invoke()
@@ -75,24 +79,18 @@ doIt("while(1) { for( var i = 0; i < 10000; ++i ) {;} db.jstests_evald.count();
// try/catch with tight-loop kill tests.
// native callback with nested invoke(), drop JS exceptions
doIt("while(1) { " +
- " for(var i = 0; i < 10000; ++i) {;} " +
- " try { " +
- " db.jstests_evald.count({i:10}); " +
- " } catch (e) {} " +
- "}", true );
+ " for(var i = 0; i < 10000; ++i) {;} " +
+ " try { " +
+ " db.jstests_evald.count({i:10}); " +
+ " } catch (e) {} " + "}",
+ true);
// native callback, drop JS exceptions
-doIt("while(1) { " +
- " try { " +
- " while(1) { " +
- " sleep(1); " +
- " } " +
- " } catch (e) {} " +
- "}", true );
+doIt("while(1) { " + " try { " + " while(1) { " +
+ " sleep(1); " + " } " + " } catch (e) {} " + "}",
+ true);
// no native callback and drop JS exceptions
-doIt("while(1) { " +
- " try { " +
- " while(1) {;} " +
- " } catch (e) {} " +
- "}", true );
+doIt("while(1) { " + " try { " + " while(1) {;} " +
+ " } catch (e) {} " + "}",
+ true);
diff --git a/jstests/core/evale.js b/jstests/core/evale.js
index a0f81942479..1ddc8519fc6 100644
--- a/jstests/core/evale.js
+++ b/jstests/core/evale.js
@@ -1,5 +1,11 @@
t = db.jstests_evale;
t.drop();
-db.eval( function() { return db.jstests_evale.count( { $where:function() { return true; } } ); } );
-db.eval( "db.jstests_evale.count( { $where:function() { return true; } } )" ); \ No newline at end of file
+db.eval(function() {
+ return db.jstests_evale.count({
+ $where: function() {
+ return true;
+ }
+ });
+});
+db.eval("db.jstests_evale.count( { $where:function() { return true; } } )"); \ No newline at end of file
diff --git a/jstests/core/evalg.js b/jstests/core/evalg.js
index 280e5261ef9..570464cbce2 100644
--- a/jstests/core/evalg.js
+++ b/jstests/core/evalg.js
@@ -1,11 +1,12 @@
// SERVER-17499: Test behavior of getMore on aggregation cursor under eval command.
db.evalg.drop();
-for (var i = 0; i < 102; ++i){
+for (var i = 0; i < 102; ++i) {
db.evalg.insert({});
}
-assert.eq(102, db.eval(function() {
- var cursor = db.evalg.aggregate();
- assert(cursor.hasNext());
- assert.eq(101, cursor.objsLeftInBatch());
- return cursor.itcount();
-}));
+assert.eq(102,
+ db.eval(function() {
+ var cursor = db.evalg.aggregate();
+ assert(cursor.hasNext());
+ assert.eq(101, cursor.objsLeftInBatch());
+ return cursor.itcount();
+ }));
diff --git a/jstests/core/exists.js b/jstests/core/exists.js
index e41a7cfde04..a29f0cb1d45 100644
--- a/jstests/core/exists.js
+++ b/jstests/core/exists.js
@@ -1,49 +1,48 @@
t = db.jstests_exists;
t.drop();
-t.save( {} );
-t.save( {a:1} );
-t.save( {a:{b:1}} );
-t.save( {a:{b:{c:1}}} );
-t.save( {a:{b:{c:{d:null}}}} );
-
-function dotest( n ){
-
- assert.eq( 5, t.count() , n );
- assert.eq( 1, t.count( {a:null} ) , n );
- assert.eq( 2, t.count( {'a.b':null} ) , n );
- assert.eq( 3, t.count( {'a.b.c':null} ) , n );
- assert.eq( 5, t.count( {'a.b.c.d':null} ) , n );
-
- assert.eq( 5, t.count() , n );
- assert.eq( 4, t.count( {a:{$ne:null}} ) , n );
- assert.eq( 3, t.count( {'a.b':{$ne:null}} ) , n );
- assert.eq( 2, t.count( {'a.b.c':{$ne:null}} ) , n );
- assert.eq( 0, t.count( {'a.b.c.d':{$ne:null}} ) , n );
-
- assert.eq( 4, t.count( {a: {$exists:true}} ) , n );
- assert.eq( 3, t.count( {'a.b': {$exists:true}} ) , n );
- assert.eq( 2, t.count( {'a.b.c': {$exists:true}} ) , n );
- assert.eq( 1, t.count( {'a.b.c.d': {$exists:true}} ) , n );
-
- assert.eq( 1, t.count( {a: {$exists:false}} ) , n );
- assert.eq( 2, t.count( {'a.b': {$exists:false}} ) , n );
- assert.eq( 3, t.count( {'a.b.c': {$exists:false}} ) , n );
- assert.eq( 4, t.count( {'a.b.c.d': {$exists:false}} ) , n );
+t.save({});
+t.save({a: 1});
+t.save({a: {b: 1}});
+t.save({a: {b: {c: 1}}});
+t.save({a: {b: {c: {d: null}}}});
+
+function dotest(n) {
+ assert.eq(5, t.count(), n);
+ assert.eq(1, t.count({a: null}), n);
+ assert.eq(2, t.count({'a.b': null}), n);
+ assert.eq(3, t.count({'a.b.c': null}), n);
+ assert.eq(5, t.count({'a.b.c.d': null}), n);
+
+ assert.eq(5, t.count(), n);
+ assert.eq(4, t.count({a: {$ne: null}}), n);
+ assert.eq(3, t.count({'a.b': {$ne: null}}), n);
+ assert.eq(2, t.count({'a.b.c': {$ne: null}}), n);
+ assert.eq(0, t.count({'a.b.c.d': {$ne: null}}), n);
+
+ assert.eq(4, t.count({a: {$exists: true}}), n);
+ assert.eq(3, t.count({'a.b': {$exists: true}}), n);
+ assert.eq(2, t.count({'a.b.c': {$exists: true}}), n);
+ assert.eq(1, t.count({'a.b.c.d': {$exists: true}}), n);
+
+ assert.eq(1, t.count({a: {$exists: false}}), n);
+ assert.eq(2, t.count({'a.b': {$exists: false}}), n);
+ assert.eq(3, t.count({'a.b.c': {$exists: false}}), n);
+ assert.eq(4, t.count({'a.b.c.d': {$exists: false}}), n);
}
-dotest( "before index" );
-t.ensureIndex( { "a" : 1 } );
-t.ensureIndex( { "a.b" : 1 } );
-t.ensureIndex( { "a.b.c" : 1 } );
-t.ensureIndex( { "a.b.c.d" : 1 } );
-dotest( "after index" );
-assert.eq( 1, t.find( {a: {$exists:false}} ).hint( {a:1} ).itcount() );
-
+dotest("before index");
+t.ensureIndex({"a": 1});
+t.ensureIndex({"a.b": 1});
+t.ensureIndex({"a.b.c": 1});
+t.ensureIndex({"a.b.c.d": 1});
+dotest("after index");
+assert.eq(1, t.find({a: {$exists: false}}).hint({a: 1}).itcount());
+
t.drop();
-t.save( {r:[{s:1}]} );
-assert( t.findOne( {'r.s':{$exists:true}} ) );
-assert( !t.findOne( {'r.s':{$exists:false}} ) );
-assert( !t.findOne( {'r.t':{$exists:true}} ) );
-assert( t.findOne( {'r.t':{$exists:false}} ) );
+t.save({r: [{s: 1}]});
+assert(t.findOne({'r.s': {$exists: true}}));
+assert(!t.findOne({'r.s': {$exists: false}}));
+assert(!t.findOne({'r.t': {$exists: true}}));
+assert(t.findOne({'r.t': {$exists: false}}));
diff --git a/jstests/core/exists2.js b/jstests/core/exists2.js
index 90ff25e0feb..5d8a0d80f91 100644
--- a/jstests/core/exists2.js
+++ b/jstests/core/exists2.js
@@ -2,15 +2,14 @@
t = db.exists2;
t.drop();
-t.save( { a : 1 , b : 1 } );
-t.save( { a : 1 , b : 1 , c : 1 } );
+t.save({a: 1, b: 1});
+t.save({a: 1, b: 1, c: 1});
-assert.eq( 2 , t.find().itcount() , "A1" );
-assert.eq( 2 , t.find( { a : 1 , b : 1 } ).itcount() , "A2" );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "A3" );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "A4" );
-
-t.ensureIndex( { a : 1 , b : 1 , c : 1 } );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "B1" );
-assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : false } } ).itcount() , "B2" );
+assert.eq(2, t.find().itcount(), "A1");
+assert.eq(2, t.find({a: 1, b: 1}).itcount(), "A2");
+assert.eq(1, t.find({a: 1, b: 1, c: {"$exists": true}}).itcount(), "A3");
+assert.eq(1, t.find({a: 1, b: 1, c: {"$exists": false}}).itcount(), "A4");
+t.ensureIndex({a: 1, b: 1, c: 1});
+assert.eq(1, t.find({a: 1, b: 1, c: {"$exists": true}}).itcount(), "B1");
+assert.eq(1, t.find({a: 1, b: 1, c: {"$exists": false}}).itcount(), "B2");
diff --git a/jstests/core/exists3.js b/jstests/core/exists3.js
index c61f022939c..e4ce03437bb 100644
--- a/jstests/core/exists3.js
+++ b/jstests/core/exists3.js
@@ -5,17 +5,17 @@ t.drop();
t.insert({a: 1, b: 2});
-assert.eq( 1, t.find({}).sort({c: -1}).itcount() );
-assert.eq( 1, t.count({c: {$exists: false}}) );
-assert.eq( 1, t.find({c: {$exists: false}}).itcount() );
-assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() );
+assert.eq(1, t.find({}).sort({c: -1}).itcount());
+assert.eq(1, t.count({c: {$exists: false}}));
+assert.eq(1, t.find({c: {$exists: false}}).itcount());
+assert.eq(1, t.find({c: {$exists: false}}).sort({c: -1}).itcount());
-// now we have an index on the sort key
+// now we have an index on the sort key
t.ensureIndex({c: -1});
-assert.eq( 1, t.find({c: {$exists: false}}).sort({c: -1}).itcount() );
-assert.eq( 1, t.find({c: {$exists: false}}).itcount() );
-// still ok without the $exists
-assert.eq( 1, t.find({}).sort({c: -1}).itcount() );
-// and ok with a convoluted $not $exists
-assert.eq( 1, t.find({c: {$not: {$exists: true}}}).sort({c: -1}).itcount() );
+assert.eq(1, t.find({c: {$exists: false}}).sort({c: -1}).itcount());
+assert.eq(1, t.find({c: {$exists: false}}).itcount());
+// still ok without the $exists
+assert.eq(1, t.find({}).sort({c: -1}).itcount());
+// and ok with a convoluted $not $exists
+assert.eq(1, t.find({c: {$not: {$exists: true}}}).sort({c: -1}).itcount());
diff --git a/jstests/core/exists4.js b/jstests/core/exists4.js
index fb801ed62e9..097a3462da9 100644
--- a/jstests/core/exists4.js
+++ b/jstests/core/exists4.js
@@ -3,18 +3,43 @@
t = db.jstests_exists4;
t.drop();
-t.ensureIndex({date: -1, country_code: 1, user_id: 1}, {unique: 1, background: 1});
-t.insert({ date: new Date("08/27/2010"), tot_visit: 100});
-t.insert({ date: new Date("08/27/2010"), country_code: "IT", tot_visit: 77});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", tot_visit: 23});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "and...@spacca.org", tot_visit: 11});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@gmail.com", tot_visit: 5});
-t.insert({ date: new Date("08/27/2010"), country_code: "ES", user_id: "andrea.spa...@progloedizioni.com", tot_visit: 7});
+t.ensureIndex({date: -1, country_code: 1, user_id: 1}, {unique: 1, background: 1});
+t.insert({date: new Date("08/27/2010"), tot_visit: 100});
+t.insert({date: new Date("08/27/2010"), country_code: "IT", tot_visit: 77});
+t.insert({date: new Date("08/27/2010"), country_code: "ES", tot_visit: 23});
+t.insert({
+ date: new Date("08/27/2010"),
+ country_code: "ES",
+ user_id: "and...@spacca.org",
+ tot_visit: 11
+});
+t.insert({
+ date: new Date("08/27/2010"),
+ country_code: "ES",
+ user_id: "andrea.spa...@gmail.com",
+ tot_visit: 5
+});
+t.insert({
+ date: new Date("08/27/2010"),
+ country_code: "ES",
+ user_id: "andrea.spa...@progloedizioni.com",
+ tot_visit: 7
+});
-assert.eq( 6, t.find({date: new Date("08/27/2010")}).count() );
-assert.eq( 5, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}}).count() );
-assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: {$exists: false}}).count() );
-assert.eq( 1, t.find({date: new Date("08/27/2010"), country_code: null}).count() );
-assert.eq( 3, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: true}}).count() );
-assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: false}}).count() );
-assert.eq( 2, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: null}).count() );
+assert.eq(6, t.find({date: new Date("08/27/2010")}).count());
+assert.eq(5, t.find({date: new Date("08/27/2010"), country_code: {$exists: true}}).count());
+assert.eq(1, t.find({date: new Date("08/27/2010"), country_code: {$exists: false}}).count());
+assert.eq(1, t.find({date: new Date("08/27/2010"), country_code: null}).count());
+assert.eq(
+ 3,
+ t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: {$exists: true}})
+ .count());
+assert.eq(2,
+ t.find({
+ date: new Date("08/27/2010"),
+ country_code: {$exists: true},
+ user_id: {$exists: false}
+ }).count());
+assert.eq(2,
+ t.find({date: new Date("08/27/2010"), country_code: {$exists: true}, user_id: null})
+ .count());
diff --git a/jstests/core/exists5.js b/jstests/core/exists5.js
index a90a94f908f..2f4b1a9b8de 100644
--- a/jstests/core/exists5.js
+++ b/jstests/core/exists5.js
@@ -3,31 +3,31 @@
t = db.jstests_exists5;
t.drop();
-t.save( {a:1} );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) );
-assert.eq( 1, t.count( {'c.d':{$not:{$exists:true}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) );
-assert.eq( 0, t.count( {'c.d':{$not:{$exists:false}}} ) );
+t.save({a: 1});
+assert.eq(1, t.count({'a.b': {$exists: false}}));
+assert.eq(1, t.count({'a.b': {$not: {$exists: true}}}));
+assert.eq(1, t.count({'c.d': {$not: {$exists: true}}}));
+assert.eq(0, t.count({'a.b': {$exists: true}}));
+assert.eq(0, t.count({'a.b': {$not: {$exists: false}}}));
+assert.eq(0, t.count({'c.d': {$not: {$exists: false}}}));
t.drop();
-t.save( {a:{b:1}} );
-assert.eq( 1, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) );
+t.save({a: {b: 1}});
+assert.eq(1, t.count({'a.b': {$exists: true}}));
+assert.eq(1, t.count({'a.b': {$not: {$exists: false}}}));
+assert.eq(0, t.count({'a.b': {$exists: false}}));
+assert.eq(0, t.count({'a.b': {$not: {$exists: true}}}));
t.drop();
-t.save( {a:[1]} );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:true}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:false}}} ) );
+t.save({a: [1]});
+assert.eq(1, t.count({'a.b': {$exists: false}}));
+assert.eq(1, t.count({'a.b': {$not: {$exists: true}}}));
+assert.eq(0, t.count({'a.b': {$exists: true}}));
+assert.eq(0, t.count({'a.b': {$not: {$exists: false}}}));
t.drop();
-t.save( {a:[{b:1}]} );
-assert.eq( 1, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.b':{$not:{$exists:false}}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.b':{$not:{$exists:true}}} ) );
+t.save({a: [{b: 1}]});
+assert.eq(1, t.count({'a.b': {$exists: true}}));
+assert.eq(1, t.count({'a.b': {$not: {$exists: false}}}));
+assert.eq(0, t.count({'a.b': {$exists: false}}));
+assert.eq(0, t.count({'a.b': {$not: {$exists: true}}}));
diff --git a/jstests/core/exists6.js b/jstests/core/exists6.js
index 79d4885283d..67ab7e5345d 100644
--- a/jstests/core/exists6.js
+++ b/jstests/core/exists6.js
@@ -3,23 +3,23 @@
t = db.jstests_exists6;
t.drop();
-t.ensureIndex( {b:1} );
-t.save( {} );
-t.save( {b:1} );
-t.save( {b:null} );
+t.ensureIndex({b: 1});
+t.save({});
+t.save({b: 1});
+t.save({b: null});
-assert.eq( 2, t.find({b:{$exists:true}}).itcount() );
-assert.eq( 2, t.find({b:{$not:{$exists:false}}}).itcount() );
-assert.eq( 1, t.find({b:{$exists:false}}).itcount() );
-assert.eq( 1, t.find({b:{$not:{$exists:true}}}).itcount() );
+assert.eq(2, t.find({b: {$exists: true}}).itcount());
+assert.eq(2, t.find({b: {$not: {$exists: false}}}).itcount());
+assert.eq(1, t.find({b: {$exists: false}}).itcount());
+assert.eq(1, t.find({b: {$not: {$exists: true}}}).itcount());
// Now check existence of second compound field.
-t.ensureIndex( {a:1,b:1} );
-t.save( {a:1} );
-t.save( {a:1,b:1} );
-t.save( {a:1,b:null} );
+t.ensureIndex({a: 1, b: 1});
+t.save({a: 1});
+t.save({a: 1, b: 1});
+t.save({a: 1, b: null});
-assert.eq( 2, t.find({a:1,b:{$exists:true}}).itcount() );
-assert.eq( 2, t.find({a:1,b:{$not:{$exists:false}}}).itcount() );
-assert.eq( 1, t.find({a:1,b:{$exists:false}}).itcount() );
-assert.eq( 1, t.find({a:1,b:{$not:{$exists:true}}}).itcount() );
+assert.eq(2, t.find({a: 1, b: {$exists: true}}).itcount());
+assert.eq(2, t.find({a: 1, b: {$not: {$exists: false}}}).itcount());
+assert.eq(1, t.find({a: 1, b: {$exists: false}}).itcount());
+assert.eq(1, t.find({a: 1, b: {$not: {$exists: true}}}).itcount());
diff --git a/jstests/core/exists7.js b/jstests/core/exists7.js
index ce278ae1a57..ab02a41d445 100644
--- a/jstests/core/exists7.js
+++ b/jstests/core/exists7.js
@@ -6,16 +6,16 @@ t.drop();
function testIntegerExistsSpec() {
t.remove({});
- t.save( {} );
- t.save( {a:1} );
- t.save( {a:2} );
- t.save( {a:3, b:3} );
- t.save( {a:4, b:4} );
+ t.save({});
+ t.save({a: 1});
+ t.save({a: 2});
+ t.save({a: 3, b: 3});
+ t.save({a: 4, b: 4});
- assert.eq( 2, t.count( {b:{$exists:1}} ) );
- assert.eq( 3, t.count( {b:{$exists:0}} ) );
+ assert.eq(2, t.count({b: {$exists: 1}}));
+ assert.eq(3, t.count({b: {$exists: 0}}));
}
testIntegerExistsSpec();
-t.ensureIndex( {b:1} );
+t.ensureIndex({b: 1});
testIntegerExistsSpec();
diff --git a/jstests/core/exists8.js b/jstests/core/exists8.js
index ca62ebeb9ab..4a8f66461a3 100644
--- a/jstests/core/exists8.js
+++ b/jstests/core/exists8.js
@@ -3,74 +3,74 @@
t = db.jstests_exists8;
t.drop();
-t.save( {a:[1]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:true}} ) );
+t.save({a: [1]});
+assert.eq(1, t.count({'a.0': {$exists: true}}));
+assert.eq(1, t.count({'a.1': {$exists: false}}));
+assert.eq(0, t.count({'a.0': {$exists: false}}));
+assert.eq(0, t.count({'a.1': {$exists: true}}));
t.remove({});
-t.save( {a:[1,2]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
+t.save({a: [1, 2]});
+assert.eq(1, t.count({'a.0': {$exists: true}}));
+assert.eq(0, t.count({'a.1': {$exists: false}}));
+assert.eq(0, t.count({'a.0': {$exists: false}}));
+assert.eq(1, t.count({'a.1': {$exists: true}}));
t.remove({});
-t.save( {a:[{}]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:true}} ) );
+t.save({a: [{}]});
+assert.eq(1, t.count({'a.0': {$exists: true}}));
+assert.eq(1, t.count({'a.1': {$exists: false}}));
+assert.eq(0, t.count({'a.0': {$exists: false}}));
+assert.eq(0, t.count({'a.1': {$exists: true}}));
t.remove({});
-t.save( {a:[{},{}]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.1':{$exists:true}} ) );
+t.save({a: [{}, {}]});
+assert.eq(1, t.count({'a.0': {$exists: true}}));
+assert.eq(0, t.count({'a.1': {$exists: false}}));
+assert.eq(0, t.count({'a.0': {$exists: false}}));
+assert.eq(1, t.count({'a.1': {$exists: true}}));
t.remove({});
-t.save( {a:[{'b':2},{'a':1}]} );
-assert.eq( 1, t.count( {'a.a':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1.a':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.a':{$exists:false}} ) );
+t.save({a: [{'b': 2}, {'a': 1}]});
+assert.eq(1, t.count({'a.a': {$exists: true}}));
+assert.eq(1, t.count({'a.1.a': {$exists: true}}));
+assert.eq(1, t.count({'a.0.a': {$exists: false}}));
t.remove({});
-t.save( {a:[[1]]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.0':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.0.0':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.0.0.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.0.0':{$exists:false}} ) );
+t.save({a: [[1]]});
+assert.eq(1, t.count({'a.0': {$exists: true}}));
+assert.eq(1, t.count({'a.0.0': {$exists: true}}));
+assert.eq(0, t.count({'a.0.0': {$exists: false}}));
+assert.eq(0, t.count({'a.0.0.0': {$exists: true}}));
+assert.eq(1, t.count({'a.0.0.0': {$exists: false}}));
t.remove({});
-t.save( {a:[[[1]]]} );
-assert.eq( 1, t.count( {'a.0.0.0':{$exists:true}} ) );
+t.save({a: [[[1]]]});
+assert.eq(1, t.count({'a.0.0.0': {$exists: true}}));
t.remove({});
-t.save( {a:[[{b:1}]]} );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 1, t.count( {'a.0.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.0.b':{$exists:false}} ) );
+t.save({a: [[{b: 1}]]});
+assert.eq(0, t.count({'a.b': {$exists: true}}));
+assert.eq(1, t.count({'a.b': {$exists: false}}));
+assert.eq(1, t.count({'a.0.b': {$exists: true}}));
+assert.eq(0, t.count({'a.0.b': {$exists: false}}));
t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 0, t.count( {'a.0.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0.b':{$exists:false}} ) );
+t.save({a: [[], [{b: 1}]]});
+assert.eq(0, t.count({'a.0.b': {$exists: true}}));
+assert.eq(1, t.count({'a.0.b': {$exists: false}}));
t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 1, t.count( {'a.1.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1.b':{$exists:false}} ) );
+t.save({a: [[], [{b: 1}]]});
+assert.eq(1, t.count({'a.1.b': {$exists: true}}));
+assert.eq(0, t.count({'a.1.b': {$exists: false}}));
t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 1, t.count( {'a.1.0.b':{$exists:true}} ) );
-assert.eq( 0, t.count( {'a.1.0.b':{$exists:false}} ) );
+t.save({a: [[], [{b: 1}]]});
+assert.eq(1, t.count({'a.1.0.b': {$exists: true}}));
+assert.eq(0, t.count({'a.1.0.b': {$exists: false}}));
t.remove({});
-t.save( {a:[[],[{b:1}]]} );
-assert.eq( 0, t.count( {'a.1.1.b':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.1.1.b':{$exists:false}} ) );
+t.save({a: [[], [{b: 1}]]});
+assert.eq(0, t.count({'a.1.1.b': {$exists: true}}));
+assert.eq(1, t.count({'a.1.1.b': {$exists: false}}));
diff --git a/jstests/core/exists9.js b/jstests/core/exists9.js
index 75b09018797..aaa7563e7df 100644
--- a/jstests/core/exists9.js
+++ b/jstests/core/exists9.js
@@ -4,37 +4,37 @@ t = db.jstests_exists9;
t.drop();
// Check existence of missing nested field.
-t.save( {a:{}} );
-assert.eq( 1, t.count( {'a.b':{$exists:false}} ) );
-assert.eq( 0, t.count( {'a.b':{$exists:true}} ) );
+t.save({a: {}});
+assert.eq(1, t.count({'a.b': {$exists: false}}));
+assert.eq(0, t.count({'a.b': {$exists: true}}));
// With index.
-t.ensureIndex( {'a.b':1} );
-assert.eq( 1, t.find( {'a.b':{$exists:false}} ).hint( {'a.b':1} ).itcount() );
-assert.eq( 0, t.find( {'a.b':{$exists:true}} ).hint( {'a.b':1} ).itcount() );
+t.ensureIndex({'a.b': 1});
+assert.eq(1, t.find({'a.b': {$exists: false}}).hint({'a.b': 1}).itcount());
+assert.eq(0, t.find({'a.b': {$exists: true}}).hint({'a.b': 1}).itcount());
t.drop();
// Check that an empty array 'exists'.
-t.save( {} );
-t.save( {a:[]} );
-assert.eq( 1, t.count( {a:{$exists:true}} ) );
-assert.eq( 1, t.count( {a:{$exists:false}} ) );
+t.save({});
+t.save({a: []});
+assert.eq(1, t.count({a: {$exists: true}}));
+assert.eq(1, t.count({a: {$exists: false}}));
// With index.
-t.ensureIndex( {a:1} );
-assert.eq( 1, t.find( {a:{$exists:true}} ).hint( {a:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:false}} ).hint( {a:1} ).itcount() );
+t.ensureIndex({a: 1});
+assert.eq(1, t.find({a: {$exists: true}}).hint({a: 1}).itcount());
+assert.eq(1, t.find({a: {$exists: false}}).hint({a: 1}).itcount());
t.drop();
// Check that an indexed field within an empty array does not exist.
-t.save( {a:{'0':1}} );
-t.save( {a:[]} );
-assert.eq( 1, t.count( {'a.0':{$exists:true}} ) );
-assert.eq( 1, t.count( {'a.0':{$exists:false}} ) );
+t.save({a: {'0': 1}});
+t.save({a: []});
+assert.eq(1, t.count({'a.0': {$exists: true}}));
+assert.eq(1, t.count({'a.0': {$exists: false}}));
// With index.
-t.ensureIndex( {'a.0':1} );
-assert.eq( 1, t.find( {'a.0':{$exists:true}} ).hint( {'a.0':1} ).itcount() );
-assert.eq( 1, t.find( {'a.0':{$exists:false}} ).hint( {'a.0':1} ).itcount() );
+t.ensureIndex({'a.0': 1});
+assert.eq(1, t.find({'a.0': {$exists: true}}).hint({'a.0': 1}).itcount());
+assert.eq(1, t.find({'a.0': {$exists: false}}).hint({'a.0': 1}).itcount());
diff --git a/jstests/core/existsa.js b/jstests/core/existsa.js
index 0d2472ca328..466a5e94a63 100644
--- a/jstests/core/existsa.js
+++ b/jstests/core/existsa.js
@@ -3,99 +3,103 @@
t = db.jstests_existsa;
t.drop();
-t.save( {} );
-t.save( { a:1 } );
-t.save( { a:{ x:1 }, b:1 } );
+t.save({});
+t.save({a: 1});
+t.save({a: {x: 1}, b: 1});
/** Configure testing of an index { <indexKeyField>:1 }. */
-function setIndex( _indexKeyField ) {
+function setIndex(_indexKeyField) {
indexKeyField = _indexKeyField;
indexKeySpec = {};
- indexKeySpec[ indexKeyField ] = 1;
- t.ensureIndex( indexKeySpec, { sparse:true } );
+ indexKeySpec[indexKeyField] = 1;
+ t.ensureIndex(indexKeySpec, {sparse: true});
}
-setIndex( 'a' );
+setIndex('a');
/** @return count when hinting the index to use. */
-function hintedCount( query ) {
- return t.find( query ).hint( indexKeySpec ).itcount();
+function hintedCount(query) {
+ return t.find(query).hint(indexKeySpec).itcount();
}
/** The query field does not exist and the sparse index is not used without a hint. */
-function assertMissing( query, expectedMissing, expectedIndexedMissing ) {
+function assertMissing(query, expectedMissing, expectedIndexedMissing) {
expectedMissing = expectedMissing || 1;
expectedIndexedMissing = expectedIndexedMissing || 0;
- assert.eq( expectedMissing, t.count( query ) );
+ assert.eq(expectedMissing, t.count(query));
// We also shouldn't get a different count depending on whether
// an index is used or not.
- assert.eq( expectedIndexedMissing, hintedCount( query ) );
+ assert.eq(expectedIndexedMissing, hintedCount(query));
}
/** The query field exists and the sparse index is used without a hint. */
-function assertExists( query, expectedExists ) {
+function assertExists(query, expectedExists) {
expectedExists = expectedExists || 2;
- assert.eq( expectedExists, t.count( query ) );
+ assert.eq(expectedExists, t.count(query));
// An $exists:true predicate generates no index filters. Add another predicate on the index key
// to trigger use of the index.
andClause = {};
- andClause[ indexKeyField ] = { $ne:null };
- Object.extend( query, { $and:[ andClause ] } );
- assert.eq( expectedExists, t.count( query ) );
- assert.eq( expectedExists, hintedCount( query ) );
+ andClause[indexKeyField] = {
+ $ne: null
+ };
+ Object.extend(query, {$and: [andClause]});
+ assert.eq(expectedExists, t.count(query));
+ assert.eq(expectedExists, hintedCount(query));
}
/** The query field exists and the sparse index is not used without a hint. */
-function assertExistsUnindexed( query, expectedExists ) {
+function assertExistsUnindexed(query, expectedExists) {
expectedExists = expectedExists || 2;
- assert.eq( expectedExists, t.count( query ) );
+ assert.eq(expectedExists, t.count(query));
// Even with another predicate on the index key, the sparse index is disallowed.
andClause = {};
- andClause[ indexKeyField ] = { $ne:null };
- Object.extend( query, { $and:[ andClause ] } );
- assert.eq( expectedExists, t.count( query ) );
- assert.eq( expectedExists, hintedCount( query ) );
+ andClause[indexKeyField] = {
+ $ne: null
+ };
+ Object.extend(query, {$and: [andClause]});
+ assert.eq(expectedExists, t.count(query));
+ assert.eq(expectedExists, hintedCount(query));
}
// $exists:false queries match the proper number of documents and disallow the sparse index.
-assertMissing( { a:{ $exists:false } } );
-assertMissing( { a:{ $not:{ $exists:true } } } );
-assertMissing( { $and:[ { a:{ $exists:false } } ] } );
-assertMissing( { $or:[ { a:{ $exists:false } } ] } );
-assertMissing( { $nor:[ { a:{ $exists:true } } ] } );
-assertMissing( { 'a.x':{ $exists:false } }, 2, 1 );
+assertMissing({a: {$exists: false}});
+assertMissing({a: {$not: {$exists: true}}});
+assertMissing({$and: [{a: {$exists: false}}]});
+assertMissing({$or: [{a: {$exists: false}}]});
+assertMissing({$nor: [{a: {$exists: true}}]});
+assertMissing({'a.x': {$exists: false}}, 2, 1);
// Currently a sparse index is disallowed even if the $exists:false query is on a different field.
-assertMissing( { b:{ $exists:false } }, 2, 1 );
-assertMissing( { b:{ $exists:false }, a:{ $ne:6 } }, 2, 1 );
-assertMissing( { b:{ $not:{ $exists:true } } }, 2, 1 );
+assertMissing({b: {$exists: false}}, 2, 1);
+assertMissing({b: {$exists: false}, a: {$ne: 6}}, 2, 1);
+assertMissing({b: {$not: {$exists: true}}}, 2, 1);
// Top level $exists:true queries match the proper number of documents
// and use the sparse index on { a : 1 }.
-assertExists( { a:{ $exists:true } } );
+assertExists({a: {$exists: true}});
// Nested $exists queries match the proper number of documents and disallow the sparse index.
-assertExistsUnindexed( { $nor:[ { a:{ $exists:false } } ] } );
-assertExistsUnindexed( { $nor:[ { 'a.x':{ $exists:false } } ] }, 1 );
-assertExistsUnindexed( { a:{ $not:{ $exists:false } } } );
+assertExistsUnindexed({$nor: [{a: {$exists: false}}]});
+assertExistsUnindexed({$nor: [{'a.x': {$exists: false}}]}, 1);
+assertExistsUnindexed({a: {$not: {$exists: false}}});
// Nested $exists queries disallow the sparse index in some cases where it is not strictly
// necessary to do so. (Descriptive tests.)
-assertExistsUnindexed( { $nor:[ { b:{ $exists:false } } ] }, 1 ); // Unindexed field.
-assertExists( { $or:[ { a:{ $exists:true } } ] } ); // $exists:true not $exists:false.
+assertExistsUnindexed({$nor: [{b: {$exists: false}}]}, 1); // Unindexed field.
+assertExists({$or: [{a: {$exists: true}}]}); // $exists:true not $exists:false.
// Behavior is similar with $elemMatch.
t.drop();
-t.save( { a:[ {} ] } );
-t.save( { a:[ { b:1 } ] } );
-t.save( { a:[ { b:1 } ] } );
-setIndex( 'a.b' );
+t.save({a: [{}]});
+t.save({a: [{b: 1}]});
+t.save({a: [{b: 1}]});
+setIndex('a.b');
-assertMissing( { a:{ $elemMatch:{ b:{ $exists:false } } } } );
+assertMissing({a: {$elemMatch: {b: {$exists: false}}}});
// A $elemMatch predicate is treated as nested, and the index should be used for $exists:true.
-assertExists( { a:{ $elemMatch:{ b:{ $exists:true } } } } );
+assertExists({a: {$elemMatch: {b: {$exists: true}}}});
// A non sparse index will not be disallowed.
t.drop();
-t.save( {} );
-t.ensureIndex( { a:1 } );
-assert.eq( 1, t.find( { a:{ $exists:false } } ).itcount() );
+t.save({});
+t.ensureIndex({a: 1});
+assert.eq(1, t.find({a: {$exists: false}}).itcount());
diff --git a/jstests/core/existsb.js b/jstests/core/existsb.js
index a212be145c0..d46266cdd16 100644
--- a/jstests/core/existsb.js
+++ b/jstests/core/existsb.js
@@ -23,54 +23,54 @@
t = db.jstests_existsb;
t.drop();
-t.save( {} );
-t.save( { a: 1 } );
-t.save( { b: 1 } );
-t.save( { a: 1, b: null } );
-t.save( { a: 1, b: 1 } );
+t.save({});
+t.save({a: 1});
+t.save({b: 1});
+t.save({a: 1, b: null});
+t.save({a: 1, b: 1});
/** run a series of checks, just on the number of docs found */
function checkExistsNull() {
// Basic cases
- assert.eq( 3, t.count({ a:{ $exists: true }}) );
- assert.eq( 2, t.count({ a:{ $exists: false }}) );
- assert.eq( 3, t.count({ b:{ $exists: true }}) );
- assert.eq( 2, t.count({ b:{ $exists: false }}) );
+ assert.eq(3, t.count({a: {$exists: true}}));
+ assert.eq(2, t.count({a: {$exists: false}}));
+ assert.eq(3, t.count({b: {$exists: true}}));
+ assert.eq(2, t.count({b: {$exists: false}}));
// With negations
- assert.eq( 3, t.count({ a:{ $not:{ $exists: false }}}) );
- assert.eq( 2, t.count({ a:{ $not:{ $exists: true }}}) );
- assert.eq( 3, t.count({ b:{ $not:{ $exists: false }}}) );
- assert.eq( 2, t.count({ b:{ $not:{ $exists: true }}}) );
+ assert.eq(3, t.count({a: {$not: {$exists: false}}}));
+ assert.eq(2, t.count({a: {$not: {$exists: true}}}));
+ assert.eq(3, t.count({b: {$not: {$exists: false}}}));
+ assert.eq(2, t.count({b: {$not: {$exists: true}}}));
// Both fields
- assert.eq( 2, t.count({ a:1, b: { $exists: true }}) );
- assert.eq( 1, t.count({ a:1, b: { $exists: false }}) );
- assert.eq( 1, t.count({ a:{ $exists: true }, b:1}) );
- assert.eq( 1, t.count({ a:{ $exists: false }, b:1}) );
+ assert.eq(2, t.count({a: 1, b: {$exists: true}}));
+ assert.eq(1, t.count({a: 1, b: {$exists: false}}));
+ assert.eq(1, t.count({a: {$exists: true}, b: 1}));
+ assert.eq(1, t.count({a: {$exists: false}, b: 1}));
// Both fields, both $exists
- assert.eq( 2, t.count({ a:{ $exists: true }, b:{ $exists: true }}) );
- assert.eq( 1, t.count({ a:{ $exists: true }, b:{ $exists: false }}) );
- assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: true }}) );
- assert.eq( 1, t.count({ a:{ $exists: false }, b:{ $exists: false }}) );
+ assert.eq(2, t.count({a: {$exists: true}, b: {$exists: true}}));
+ assert.eq(1, t.count({a: {$exists: true}, b: {$exists: false}}));
+ assert.eq(1, t.count({a: {$exists: false}, b: {$exists: true}}));
+ assert.eq(1, t.count({a: {$exists: false}, b: {$exists: false}}));
}
// with no index, make sure we get correct results
checkExistsNull();
// try with a standard index
-t.ensureIndex({ a : 1 });
+t.ensureIndex({a: 1});
checkExistsNull();
// try with a sparse index
t.dropIndexes();
-t.ensureIndex({ a : 1 }, { sparse:true });
+t.ensureIndex({a: 1}, {sparse: true});
checkExistsNull();
// try with a compound index
t.dropIndexes();
-t.ensureIndex({ a : 1, b : 1 });
+t.ensureIndex({a: 1, b: 1});
checkExistsNull();
// try with sparse compound index
t.dropIndexes();
-t.ensureIndex({ a : 1, b : 1 }, { sparse:true });
+t.ensureIndex({a: 1, b: 1}, {sparse: true});
checkExistsNull();
diff --git a/jstests/core/explain1.js b/jstests/core/explain1.js
index 2022a189a07..3c7d8b9df2c 100644
--- a/jstests/core/explain1.js
+++ b/jstests/core/explain1.js
@@ -2,23 +2,25 @@
t = db.explain1;
t.drop();
-for ( var i=0; i<100; i++ ){
- t.save( { x : i } );
+for (var i = 0; i < 100; i++) {
+ t.save({x: i});
}
-q = { x : { $gt : 50 } };
+q = {
+ x: {$gt: 50}
+};
-assert.eq( 49 , t.find( q ).count() , "A" );
-assert.eq( 49 , t.find( q ).itcount() , "B" );
-assert.eq( 20 , t.find( q ).limit(20).itcount() , "C" );
+assert.eq(49, t.find(q).count(), "A");
+assert.eq(49, t.find(q).itcount(), "B");
+assert.eq(20, t.find(q).limit(20).itcount(), "C");
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-assert.eq( 49 , t.find( q ).count() , "D" );
-assert.eq( 49 , t.find( q ).itcount() , "E" );
-assert.eq( 20 , t.find( q ).limit(20).itcount() , "F" );
+assert.eq(49, t.find(q).count(), "D");
+assert.eq(49, t.find(q).itcount(), "E");
+assert.eq(20, t.find(q).limit(20).itcount(), "F");
-assert.eq( 49 , t.find(q).explain("executionStats").executionStats.nReturned , "G" );
-assert.eq( 20 , t.find(q).limit(20).explain("executionStats").executionStats.nReturned , "H" );
-assert.eq( 20 , t.find(q).limit(-20).explain("executionStats").executionStats.nReturned , "I" );
-assert.eq( 49 , t.find(q).batchSize(20).explain("executionStats").executionStats.nReturned , "J" );
+assert.eq(49, t.find(q).explain("executionStats").executionStats.nReturned, "G");
+assert.eq(20, t.find(q).limit(20).explain("executionStats").executionStats.nReturned, "H");
+assert.eq(20, t.find(q).limit(-20).explain("executionStats").executionStats.nReturned, "I");
+assert.eq(49, t.find(q).batchSize(20).explain("executionStats").executionStats.nReturned, "J");
diff --git a/jstests/core/explain2.js b/jstests/core/explain2.js
index 799f5323598..a0a65de4fe9 100644
--- a/jstests/core/explain2.js
+++ b/jstests/core/explain2.js
@@ -3,22 +3,22 @@
t = db.jstests_explain2;
t.drop();
-t.ensureIndex( { a:1 } );
-for( i = 1000; i < 4000; i += 1000 ) {
- t.save( { a:i } );
+t.ensureIndex({a: 1});
+for (i = 1000; i < 4000; i += 1000) {
+ t.save({a: i});
}
// Run a query with one $or clause per a-value, each of which sleeps for 'a' milliseconds.
function slow() {
- sleep( this.a );
+ sleep(this.a);
return true;
}
clauses = [];
-for( i = 1000; i < 4000; i += 1000 ) {
- clauses.push( { a:i, $where:slow } );
+for (i = 1000; i < 4000; i += 1000) {
+ clauses.push({a: i, $where: slow});
}
-explain = t.find( { $or:clauses } ).explain( true );
-printjson( explain );
+explain = t.find({$or: clauses}).explain(true);
+printjson(explain);
// Verify the duration of the whole query, and of each clause.
-assert.gt( explain.executionStats.executionTimeMillis, 1000 - 500 + 2000 - 500 + 3000 - 500 );
+assert.gt(explain.executionStats.executionTimeMillis, 1000 - 500 + 2000 - 500 + 3000 - 500);
diff --git a/jstests/core/explain3.js b/jstests/core/explain3.js
index 738f8570a0d..64db7686699 100644
--- a/jstests/core/explain3.js
+++ b/jstests/core/explain3.js
@@ -3,21 +3,21 @@
t = db.jstests_explain3;
t.drop();
-t.ensureIndex( {i:1} );
-for( var i = 0; i < 10000; ++i ) {
- t.save( {i:i,j:0} );
+t.ensureIndex({i: 1});
+for (var i = 0; i < 10000; ++i) {
+ t.save({i: i, j: 0});
}
-s = startParallelShell( "sleep( 20 ); db.jstests_explain3.dropIndex( {i:1} );" );
+s = startParallelShell("sleep( 20 ); db.jstests_explain3.dropIndex( {i:1} );");
try {
- t.find( {i:{$gt:-1},j:1} ).hint( {i:1} ).explain();
+ t.find({i: {$gt: -1}, j: 1}).hint({i: 1}).explain();
} catch (e) {
- print( "got exception" );
- printjson( e );
+ print("got exception");
+ printjson(e);
}
s();
// Sanity check to make sure mongod didn't seg fault.
-assert.eq( 10000, t.count() );
+assert.eq(10000, t.count());
diff --git a/jstests/core/explain4.js b/jstests/core/explain4.js
index effd080d8fd..fe67516fe61 100644
--- a/jstests/core/explain4.js
+++ b/jstests/core/explain4.js
@@ -3,16 +3,13 @@
t = db.jstests_explain4;
t.drop();
-t.ensureIndex( { a:1 } );
+t.ensureIndex({a: 1});
-for( i = 0; i < 10; ++i ) {
- t.save( { a:i, b:0 } );
+for (i = 0; i < 10; ++i) {
+ t.save({a: i, b: 0});
}
-explain = t.find( { a:{ $gte:0 }, b:0 } ).sort( { a:1 } )
- .hint( { a:1 } )
- .limit( 5 )
- .explain( true );
+explain = t.find({a: {$gte: 0}, b: 0}).sort({a: 1}).hint({a: 1}).limit(5).explain(true);
// Five results are expected, matching the limit spec.
-assert.eq( 5, explain.executionStats.nReturned );
+assert.eq(5, explain.executionStats.nReturned);
diff --git a/jstests/core/explain5.js b/jstests/core/explain5.js
index eb8e5d9f4a2..35841ac0789 100644
--- a/jstests/core/explain5.js
+++ b/jstests/core/explain5.js
@@ -3,31 +3,27 @@
t = db.jstests_explain5;
t.drop();
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
-for( i = 0; i < 1000; ++i ) {
- t.save( { a:i, b:i%3 } );
+for (i = 0; i < 1000; ++i) {
+ t.save({a: i, b: i % 3});
}
// Query with an initial set of documents.
-var explain1 = t.find( { a:{ $gte:0 }, b:2 } ).sort( { a:1 } )
- .hint( { a:1 } )
- .explain("executionStats");
+var explain1 = t.find({a: {$gte: 0}, b: 2}).sort({a: 1}).hint({a: 1}).explain("executionStats");
printjson(explain1);
var stats1 = explain1.executionStats;
-assert.eq( 333, stats1.nReturned, 'wrong nReturned for explain1' );
-assert.eq( 1000, stats1.totalKeysExamined, 'wrong totalKeysExamined for explain1' );
+assert.eq(333, stats1.nReturned, 'wrong nReturned for explain1');
+assert.eq(1000, stats1.totalKeysExamined, 'wrong totalKeysExamined for explain1');
-for( i = 1000; i < 2000; ++i ) {
- t.save( { a:i, b:i%3 } );
+for (i = 1000; i < 2000; ++i) {
+ t.save({a: i, b: i % 3});
}
// Query with some additional documents.
-var explain2 = t.find( { a:{ $gte:0 }, b:2 } ).sort( { a:1 } )
- .hint ( { a:1 } )
- .explain("executionStats");
+var explain2 = t.find({a: {$gte: 0}, b: 2}).sort({a: 1}).hint({a: 1}).explain("executionStats");
printjson(explain2);
var stats2 = explain2.executionStats;
-assert.eq( 666, stats2.nReturned, 'wrong nReturned for explain2' );
-assert.eq( 2000, stats2.totalKeysExamined, 'wrong totalKeysExamined for explain2' );
+assert.eq(666, stats2.nReturned, 'wrong nReturned for explain2');
+assert.eq(2000, stats2.totalKeysExamined, 'wrong totalKeysExamined for explain2');
diff --git a/jstests/core/explain6.js b/jstests/core/explain6.js
index 7bcc09b8f2a..4b8c75fa4eb 100644
--- a/jstests/core/explain6.js
+++ b/jstests/core/explain6.js
@@ -4,32 +4,31 @@
t = db.jstests_explain6;
t.drop();
-t.ensureIndex( { a:1, b:1 } );
-t.ensureIndex( { b:1, a:1 } );
+t.ensureIndex({a: 1, b: 1});
+t.ensureIndex({b: 1, a: 1});
-t.save( { a:0, b:1 } );
-t.save( { a:1, b:0 } );
+t.save({a: 0, b: 1});
+t.save({a: 1, b: 0});
-explain = t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).explain( true );
+explain = t.find({a: {$gte: 0}, b: {$gte: 0}}).explain(true);
-assert.eq( 2, explain.executionStats.nReturned );
-assert.eq( 2, explain.executionStats.totalKeysExamined );
-assert.eq( 2, explain.executionStats.totalDocsExamined );
+assert.eq(2, explain.executionStats.nReturned);
+assert.eq(2, explain.executionStats.totalKeysExamined);
+assert.eq(2, explain.executionStats.totalDocsExamined);
// A limit of 2.
-explain = t.find( { a:{ $gte:0 }, b:{ $gte:0 } } ).limit( -2 ).explain( true );
-assert.eq( 2, explain.executionStats.nReturned );
+explain = t.find({a: {$gte: 0}, b: {$gte: 0}}).limit(-2).explain(true);
+assert.eq(2, explain.executionStats.nReturned);
// A $or query.
-explain = t.find( { $or:[ { a:{ $gte:0 }, b:{ $gte:1 } },
- { a:{ $gte:1 }, b:{ $gte:0 } } ] } ).explain( true );
-assert.eq( 2, explain.executionStats.nReturned );
+explain = t.find({$or: [{a: {$gte: 0}, b: {$gte: 1}}, {a: {$gte: 1}, b: {$gte: 0}}]}).explain(true);
+assert.eq(2, explain.executionStats.nReturned);
// A non $or case where totalKeysExamined != number of results
t.remove({});
-t.save( { a:'0', b:'1' } );
-t.save( { a:'1', b:'0' } );
-explain = t.find( { a:/0/, b:/1/ } ).explain( true );
-assert.eq( 1, explain.executionStats.nReturned );
-assert.eq( 2, explain.executionStats.totalKeysExamined );
+t.save({a: '0', b: '1'});
+t.save({a: '1', b: '0'});
+explain = t.find({a: /0/, b: /1/}).explain(true);
+assert.eq(1, explain.executionStats.nReturned);
+assert.eq(2, explain.executionStats.totalKeysExamined);
diff --git a/jstests/core/explain_batch_size.js b/jstests/core/explain_batch_size.js
index 8331e158ff8..7f94adb13ce 100644
--- a/jstests/core/explain_batch_size.js
+++ b/jstests/core/explain_batch_size.js
@@ -8,12 +8,12 @@ t = db.explain_batch_size;
t.drop();
var n = 3;
-for (i=0; i<n; i++) {
- t.save( { x : i } );
+for (i = 0; i < n; i++) {
+ t.save({x: i});
}
var q = {};
-assert.eq( n , t.find( q ).count() , "A" );
-assert.eq( n , t.find( q ).itcount() , "B" );
-assert.eq( n , t.find( q ).batchSize(1).explain("executionStats").executionStats.nReturned , "C" );
+assert.eq(n, t.find(q).count(), "A");
+assert.eq(n, t.find(q).itcount(), "B");
+assert.eq(n, t.find(q).batchSize(1).explain("executionStats").executionStats.nReturned, "C");
diff --git a/jstests/core/explain_count.js b/jstests/core/explain_count.js
index 6e42657fd3b..4943c511252 100644
--- a/jstests/core/explain_count.js
+++ b/jstests/core/explain_count.js
@@ -20,8 +20,7 @@ function checkCountExplain(explain, nCounted) {
var countStage = execStages.shards[0].executionStages;
assert.eq(countStage.stage, "COUNT", "root stage on shard is not COUNT");
assert.eq(countStage.nCounted, nCounted, "wrong count result");
- }
- else {
+ } else {
assert.eq(execStages.stage, "COUNT", "root stage is not COUNT");
assert.eq(execStages.nCounted, nCounted, "wrong count result");
}
@@ -46,13 +45,13 @@ explain = db.runCommand({explain: {count: collName, limit: -3}, verbosity: "exec
checkCountExplain(explain, 0);
assert.eq(0, db.runCommand({count: collName, limit: -3, skip: 4}).n);
-explain = db.runCommand({explain: {count: collName, limit: -3, skip: 4},
- verbosity: "executionStats"});
+explain =
+ db.runCommand({explain: {count: collName, limit: -3, skip: 4}, verbosity: "executionStats"});
checkCountExplain(explain, 0);
assert.eq(0, db.runCommand({count: collName, query: {a: 1}, limit: -3, skip: 4}).n);
-explain = db.runCommand({explain: {count: collName, query: {a: 1}, limit: -3, skip: 4},
- verbosity: "executionStats"});
+explain = db.runCommand(
+ {explain: {count: collName, query: {a: 1}, limit: -3, skip: 4}, verbosity: "executionStats"});
checkCountExplain(explain, 0);
// Now add a bit of data to the collection.
@@ -83,26 +82,25 @@ checkCountExplain(explain, 3);
// Trivial count with both limit and skip.
assert.eq(3, db.runCommand({count: collName, limit: -3, skip: 4}).n);
-explain = db.runCommand({explain: {count: collName, limit: -3, skip: 4},
- verbosity: "executionStats"});
+explain =
+ db.runCommand({explain: {count: collName, limit: -3, skip: 4}, verbosity: "executionStats"});
checkCountExplain(explain, 3);
// With a query.
assert.eq(10, db.runCommand({count: collName, query: {a: 1}}).n);
-explain = db.runCommand({explain: {count: collName, query: {a: 1}},
- verbosity: "executionStats"});
+explain = db.runCommand({explain: {count: collName, query: {a: 1}}, verbosity: "executionStats"});
checkCountExplain(explain, 10);
// With a query and skip.
assert.eq(7, db.runCommand({count: collName, query: {a: 1}, skip: 3}).n);
-explain = db.runCommand({explain: {count: collName, query: {a: 1}, skip: 3},
- verbosity: "executionStats"});
+explain = db.runCommand(
+ {explain: {count: collName, query: {a: 1}, skip: 3}, verbosity: "executionStats"});
checkCountExplain(explain, 7);
// With a query and limit.
assert.eq(3, db.runCommand({count: collName, query: {a: 1}, limit: 3}).n);
-explain = db.runCommand({explain: {count: collName, query: {a: 1}, limit: 3},
- verbosity: "executionStats"});
+explain = db.runCommand(
+ {explain: {count: collName, query: {a: 1}, limit: 3}, verbosity: "executionStats"});
checkCountExplain(explain, 3);
// Insert one more doc for the last few tests.
@@ -110,12 +108,12 @@ t.insert({a: 2});
// Case where all results are skipped.
assert.eq(0, db.runCommand({count: collName, query: {a: 2}, skip: 2}).n);
-explain = db.runCommand({explain: {count: collName, query: {a: 2}, skip: 2},
- verbosity: "executionStats"});
+explain = db.runCommand(
+ {explain: {count: collName, query: {a: 2}, skip: 2}, verbosity: "executionStats"});
checkCountExplain(explain, 0);
// Case where we have a limit, but we don't hit it.
assert.eq(1, db.runCommand({count: collName, query: {a: 2}, limit: 2}).n);
-explain = db.runCommand({explain: {count: collName, query: {a: 2}, limit: 2},
- verbosity: "executionStats"});
+explain = db.runCommand(
+ {explain: {count: collName, query: {a: 2}, limit: 2}, verbosity: "executionStats"});
checkCountExplain(explain, 1);
diff --git a/jstests/core/explain_delete.js b/jstests/core/explain_delete.js
index a3508e71e29..eeee5c23c14 100644
--- a/jstests/core/explain_delete.js
+++ b/jstests/core/explain_delete.js
@@ -24,34 +24,19 @@ function checkNWouldDelete(explain, nWouldDelete) {
var deleteStage = execStages.shards[0].executionStages;
assert.eq(deleteStage.stage, "DELETE");
assert.eq(deleteStage.nWouldDelete, nWouldDelete);
- }
- else {
+ } else {
assert.eq(execStages.stage, "DELETE");
assert.eq(execStages.nWouldDelete, nWouldDelete);
}
}
// Explain delete against an empty collection.
-explain = db.runCommand({
- explain: {
- delete: collName,
- deletes: [
- {q: {a: 1}, limit: 0}
- ]
- }
-});
+explain = db.runCommand({explain: {delete: collName, deletes: [{q: {a: 1}, limit: 0}]}});
checkNWouldDelete(explain, 0);
// Add an index but no data, and check that the explain still works.
t.ensureIndex({a: 1});
-explain = db.runCommand({
- explain: {
- delete: collName,
- deletes: [
- {q: {a: 1}, limit: 0}
- ]
- }
-});
+explain = db.runCommand({explain: {delete: collName, deletes: [{q: {a: 1}, limit: 0}]}});
checkNWouldDelete(explain, 0);
// Add some copies of the same document.
@@ -61,25 +46,13 @@ for (var i = 0; i < 10; i++) {
assert.eq(10, t.count());
// Run an explain which shows that all 10 documents *would* be deleted.
-explain = db.runCommand({
- explain: {
- delete: collName,
- deletes: [
- {q: {a: 1}, limit: 0}
- ]
- }
-});
+explain = db.runCommand({explain: {delete: collName, deletes: [{q: {a: 1}, limit: 0}]}});
checkNWouldDelete(explain, 10);
// Make sure all 10 documents are still there.
assert.eq(10, t.count());
// If we run the same thing without the explain, then all 10 docs should be deleted.
-var deleteResult = db.runCommand({
- delete: collName,
- deletes: [
- {q: {a: 1}, limit: 0}
- ]
-});
+var deleteResult = db.runCommand({delete: collName, deletes: [{q: {a: 1}, limit: 0}]});
assert.commandWorked(deleteResult);
assert.eq(0, t.count());
diff --git a/jstests/core/explain_distinct.js b/jstests/core/explain_distinct.js
index ad359016530..37d5a485516 100644
--- a/jstests/core/explain_distinct.js
+++ b/jstests/core/explain_distinct.js
@@ -19,10 +19,7 @@
distinctCmd.query = query;
}
- return coll.runCommand({
- explain: distinctCmd,
- verbosity: 'executionStats'
- });
+ return coll.runCommand({explain: distinctCmd, verbosity: 'executionStats'});
}
coll.drop();
@@ -33,16 +30,16 @@
assert(planHasStage(explain.queryPlanner.winningPlan, "EOF"));
// Insert the data to perform distinct() on.
- for (var i = 0; i < 10; i ++) {
+ for (var i = 0; i < 10; i++) {
assert.writeOK(coll.insert({a: 1, b: 1}));
assert.writeOK(coll.insert({a: 2, c: 1}));
}
- assert.commandFailed(runDistinctExplain(coll, {}, {})); // Bad keyString.
- assert.commandFailed(runDistinctExplain(coll, 'a', 'a')); // Bad query.
- assert.commandFailed(runDistinctExplain(coll, 'b', {$not: 1})); // Bad query.
- assert.commandFailed(runDistinctExplain(coll, 'a', {$not: 1})); // Bad query.
- assert.commandFailed(runDistinctExplain(coll, '_id', {$not: 1})); // Bad query.
+ assert.commandFailed(runDistinctExplain(coll, {}, {})); // Bad keyString.
+ assert.commandFailed(runDistinctExplain(coll, 'a', 'a')); // Bad query.
+ assert.commandFailed(runDistinctExplain(coll, 'b', {$not: 1})); // Bad query.
+ assert.commandFailed(runDistinctExplain(coll, 'a', {$not: 1})); // Bad query.
+ assert.commandFailed(runDistinctExplain(coll, '_id', {$not: 1})); // Bad query.
// Ensure that server accepts a distinct command with no 'query' field.
assert.commandWorked(runDistinctExplain(coll, '', null));
diff --git a/jstests/core/explain_execution_error.js b/jstests/core/explain_execution_error.js
index 280e4e3250d..1eb08c9cc21 100644
--- a/jstests/core/explain_execution_error.js
+++ b/jstests/core/explain_execution_error.js
@@ -15,8 +15,7 @@ function assertExecError(explain) {
var execStats = explain.executionStats;
if (execStats.executionStages.stage == "SINGLE_SHARD") {
errorObj = execStats.executionStages.shards[0];
- }
- else {
+ } else {
errorObj = execStats;
}
@@ -34,8 +33,7 @@ function assertExecSuccess(explain) {
var execStats = explain.executionStats;
if (execStats.executionStages.stage == "SINGLE_SHARD") {
errorObj = execStats.executionStages.shards[0];
- }
- else {
+ } else {
errorObj = execStats;
}
@@ -63,11 +61,7 @@ assert.throws(function() {
// Explain of this query should succeed at query planner verbosity.
result = db.runCommand({
- explain: {
- find: t.getName(),
- filter: {a: {$exists: true}},
- sort: {b: 1}
- },
+ explain: {find: t.getName(), filter: {a: {$exists: true}}, sort: {b: 1}},
verbosity: "queryPlanner"
});
assert.commandWorked(result);
@@ -76,11 +70,7 @@ assert("queryPlanner" in result);
// Explaining the same query at execution stats verbosity should succeed, but indicate that the
// underlying operation failed.
result = db.runCommand({
- explain: {
- find: t.getName(),
- filter: {a: {$exists: true}},
- sort: {b: 1}
- },
+ explain: {find: t.getName(), filter: {a: {$exists: true}}, sort: {b: 1}},
verbosity: "executionStats"
});
assert.commandWorked(result);
@@ -90,11 +80,7 @@ assertExecError(result);
// The underlying operation should also report a failure at allPlansExecution verbosity.
result = db.runCommand({
- explain: {
- find: t.getName(),
- filter: {a: {$exists: true}},
- sort: {b: 1}
- },
+ explain: {find: t.getName(), filter: {a: {$exists: true}}, sort: {b: 1}},
verbosity: "allPlansExecution"
});
assert.commandWorked(result);
@@ -115,22 +101,14 @@ assert.eq(40, t.find({c: {$lt: 40}}).sort({b: 1}).itcount());
// The explain should succeed at all verbosity levels because the query itself succeeds.
// First test "queryPlanner" verbosity.
result = db.runCommand({
- explain: {
- find: t.getName(),
- filter: {c: {$lt: 40}},
- sort: {b: 1}
- },
+ explain: {find: t.getName(), filter: {c: {$lt: 40}}, sort: {b: 1}},
verbosity: "queryPlanner"
});
assert.commandWorked(result);
assert("queryPlanner" in result);
result = db.runCommand({
- explain: {
- find: t.getName(),
- filter: {c: {$lt: 40}},
- sort: {b: 1}
- },
+ explain: {find: t.getName(), filter: {c: {$lt: 40}}, sort: {b: 1}},
verbosity: "executionStats"
});
assert.commandWorked(result);
@@ -140,11 +118,7 @@ assertExecSuccess(result);
// We expect allPlansExecution verbosity to show execution stats for both candidate plans.
result = db.runCommand({
- explain: {
- find: t.getName(),
- filter: {c: {$lt: 40}},
- sort: {b: 1}
- },
+ explain: {find: t.getName(), filter: {c: {$lt: 40}}, sort: {b: 1}},
verbosity: "allPlansExecution"
});
assert.commandWorked(result);
diff --git a/jstests/core/explain_find.js b/jstests/core/explain_find.js
index 2e2699ea05b..820e6dffbcd 100644
--- a/jstests/core/explain_find.js
+++ b/jstests/core/explain_find.js
@@ -10,25 +10,14 @@ for (var i = 0; i < 10; i++) {
t.insert({_id: i, a: i});
}
-var explain = db.runCommand({
- explain: {
- find: collName,
- filter: {a: {$lte: 2}}
- },
- verbosity: "executionStats"
-});
+var explain = db.runCommand(
+ {explain: {find: collName, filter: {a: {$lte: 2}}}, verbosity: "executionStats"});
printjson(explain);
assert.commandWorked(explain);
assert.eq(3, explain.executionStats.nReturned);
-explain = db.runCommand({
- explain: {
- find: collName,
- min: {a: 4},
- max: {a: 6}
- },
- verbosity: "executionStats"
-});
+explain = db.runCommand(
+ {explain: {find: collName, min: {a: 4}, max: {a: 6}}, verbosity: "executionStats"});
printjson(explain);
assert.commandWorked(explain);
assert.eq(2, explain.executionStats.nReturned);
diff --git a/jstests/core/explain_find_and_modify.js b/jstests/core/explain_find_and_modify.js
index 94040cf95f8..346e7029cd1 100644
--- a/jstests/core/explain_find_and_modify.js
+++ b/jstests/core/explain_find_and_modify.js
@@ -13,10 +13,15 @@
var t = db.getCollection(cName);
// Different types of findAndModify explain requests.
- var explainRemove = {explain: {findAndModify: cName, remove: true, query: {_id: 0}}};
- var explainUpdate = {explain: {findAndModify: cName, update: {$inc: {i: 1}}, query: {_id: 0}}};
- var explainUpsert = {explain:
- {findAndModify: cName, update: {$inc: {i: 1}}, query: {_id: 0}, upsert: true}};
+ var explainRemove = {
+ explain: {findAndModify: cName, remove: true, query: {_id: 0}}
+ };
+ var explainUpdate = {
+ explain: {findAndModify: cName, update: {$inc: {i: 1}}, query: {_id: 0}}
+ };
+ var explainUpsert = {
+ explain: {findAndModify: cName, update: {$inc: {i: 1}}, query: {_id: 0}, upsert: true}
+ };
// 1. Explaining findAndModify should never create a database.
@@ -55,24 +60,38 @@
assert.commandFailed(db.runCommand({remove: true, new: true}));
// 4. Explaining findAndModify should not modify any contents of the collection.
- var onlyDoc = {_id: 0, i: 1};
+ var onlyDoc = {
+ _id: 0,
+ i: 1
+ };
assert.writeOK(t.insert(onlyDoc));
// Explaining a delete should not delete anything.
- var matchingRemoveCmd = {findAndModify: cName, remove: true, query: {_id: onlyDoc._id}};
+ var matchingRemoveCmd = {
+ findAndModify: cName,
+ remove: true,
+ query: {_id: onlyDoc._id}
+ };
var res = db.runCommand({explain: matchingRemoveCmd});
assert.commandWorked(res);
assert.eq(t.find().itcount(), 1, "Explaining a remove should not remove any documents.");
// Explaining an update should not update anything.
- var matchingUpdateCmd = {findAndModify: cName, update: {x: "x"}, query: {_id: onlyDoc._id}};
+ var matchingUpdateCmd = {
+ findAndModify: cName,
+ update: {x: "x"},
+ query: {_id: onlyDoc._id}
+ };
var res = db.runCommand({explain: matchingUpdateCmd});
assert.commandWorked(res);
assert.eq(t.findOne(), onlyDoc, "Explaining an update should not update any documents.");
// Explaining an upsert should not insert anything.
var matchingUpsertCmd = {
- findAndModify: cName, update: {x: "x"}, query: {_id: "non-match"}, upsert: true
+ findAndModify: cName,
+ update: {x: "x"},
+ query: {_id: "non-match"},
+ upsert: true
};
var res = db.runCommand({explain: matchingUpsertCmd});
assert.commandWorked(res);
@@ -85,139 +104,105 @@
var testCases = [
// -------------------------------------- Removes ----------------------------------------
{
- // Non-matching remove command.
- cmd: {remove: true, query: {_id: "no-match"}},
- expectedResult: {
- executionStats: {
- nReturned: 0,
- executionSuccess: true,
- executionStages: {
- stage: "DELETE",
- nWouldDelete: 0
- }
- }
- }
+ // Non-matching remove command.
+ cmd: {remove: true, query: {_id: "no-match"}},
+ expectedResult: {
+ executionStats: {
+ nReturned: 0,
+ executionSuccess: true,
+ executionStages: {stage: "DELETE", nWouldDelete: 0}
+ }
+ }
},
{
- // Matching remove command.
- cmd: {remove: true, query: {_id: onlyDoc._id}},
- expectedResult: {
- executionStats: {
- nReturned: 1,
- executionSuccess: true,
- executionStages: {
- stage: "DELETE",
- nWouldDelete: 1
- }
- }
- }
+ // Matching remove command.
+ cmd: {remove: true, query: {_id: onlyDoc._id}},
+ expectedResult: {
+ executionStats: {
+ nReturned: 1,
+ executionSuccess: true,
+ executionStages: {stage: "DELETE", nWouldDelete: 1}
+ }
+ }
},
// -------------------------------------- Updates ----------------------------------------
{
- // Non-matching update query.
- cmd: {update: {$inc: {i: 1}}, query: {_id: "no-match"}},
- expectedResult: {
- executionStats: {
- nReturned: 0,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 0,
- wouldInsert: false
- }
- }
- }
+ // Non-matching update query.
+ cmd: {update: {$inc: {i: 1}}, query: {_id: "no-match"}},
+ expectedResult: {
+ executionStats: {
+ nReturned: 0,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 0, wouldInsert: false}
+ }
+ }
},
{
- // Non-matching update query, returning new doc.
- cmd: {update: {$inc: {i: 1}}, query: {_id: "no-match"}, new: true},
- expectedResult: {
- executionStats: {
- nReturned: 0,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 0,
- wouldInsert: false
- }
- }
- }
+ // Non-matching update query, returning new doc.
+ cmd: {update: {$inc: {i: 1}}, query: {_id: "no-match"}, new: true},
+ expectedResult: {
+ executionStats: {
+ nReturned: 0,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 0, wouldInsert: false}
+ }
+ }
},
{
- // Matching update query.
- cmd: {update: {$inc: {i: 1}}, query: {_id: onlyDoc._id}},
- expectedResult: {
- executionStats: {
- nReturned: 1,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 1,
- wouldInsert: false
- }
- }
- }
+ // Matching update query.
+ cmd: {update: {$inc: {i: 1}}, query: {_id: onlyDoc._id}},
+ expectedResult: {
+ executionStats: {
+ nReturned: 1,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 1, wouldInsert: false}
+ }
+ }
},
{
- // Matching update query, returning new doc.
- cmd: {update: {$inc: {i: 1}}, query: {_id: onlyDoc._id}, new: true},
- expectedResult: {
- executionStats: {
- nReturned: 1,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 1,
- wouldInsert: false
- }
- }
- }
+ // Matching update query, returning new doc.
+ cmd: {update: {$inc: {i: 1}}, query: {_id: onlyDoc._id}, new: true},
+ expectedResult: {
+ executionStats: {
+ nReturned: 1,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 1, wouldInsert: false}
+ }
+ }
},
// -------------------------------------- Upserts ----------------------------------------
{
- // Non-matching upsert query.
- cmd: {update: {$inc: {i: 1}}, upsert: true, query: {_id: "no-match"}},
- expectedResult: {
- executionStats: {
- nReturned: 0,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 0,
- wouldInsert: true
- }
- }
- }
+ // Non-matching upsert query.
+ cmd: {update: {$inc: {i: 1}}, upsert: true, query: {_id: "no-match"}},
+ expectedResult: {
+ executionStats: {
+ nReturned: 0,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 0, wouldInsert: true}
+ }
+ }
},
{
- // Non-matching upsert query, returning new doc.
- cmd: {update: {$inc: {i: 1}}, upsert: true, query: {_id: "no-match"}, new: true},
- expectedResult: {
- executionStats: {
- nReturned: 1,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 0,
- wouldInsert: true
- }
- }
- }
+ // Non-matching upsert query, returning new doc.
+ cmd: {update: {$inc: {i: 1}}, upsert: true, query: {_id: "no-match"}, new: true},
+ expectedResult: {
+ executionStats: {
+ nReturned: 1,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 0, wouldInsert: true}
+ }
+ }
},
{
- // Matching upsert query, returning new doc.
- cmd: {update: {$inc: {i: 1}}, upsert: true, query: {_id: onlyDoc._id}, new: true},
- expectedResult: {
- executionStats: {
- nReturned: 1,
- executionSuccess: true,
- executionStages: {
- stage: "UPDATE",
- nWouldModify: 1,
- wouldInsert: false
- }
- }
- }
+ // Matching upsert query, returning new doc.
+ cmd: {update: {$inc: {i: 1}}, upsert: true, query: {_id: onlyDoc._id}, new: true},
+ expectedResult: {
+ executionStats: {
+ nReturned: 1,
+ executionSuccess: true,
+ executionStages: {stage: "UPDATE", nWouldModify: 1, wouldInsert: false}
+ }
+ }
}
];
@@ -288,26 +273,23 @@
function assertExplainResultsMatch(explainOut, expectedMatches, preMsg, currentPath) {
// This is only used recursively, to keep track of where we are in the document.
var isRootLevel = typeof currentPath === "undefined";
- Object.keys(expectedMatches).forEach(function(key) {
- var totalFieldName = isRootLevel ? key : currentPath + "." + key;
- assert(explainOut.hasOwnProperty(key),
- preMsg + "Explain's output does not have a value for " + key);
- if (typeof expectedMatches[key] === "object") {
- // Sub-doc, recurse to match on it's fields
- assertExplainResultsMatch(explainOut[key],
- expectedMatches[key],
- preMsg,
- totalFieldName);
- }
- else {
- assert.eq(
- explainOut[key],
- expectedMatches[key],
- preMsg + "Explain's " + totalFieldName + " (" + explainOut[key] + ")" +
- " does not match expected value (" + expectedMatches[key] + ")."
- );
- }
- });
+ Object.keys(expectedMatches)
+ .forEach(function(key) {
+ var totalFieldName = isRootLevel ? key : currentPath + "." + key;
+ assert(explainOut.hasOwnProperty(key),
+ preMsg + "Explain's output does not have a value for " + key);
+ if (typeof expectedMatches[key] === "object") {
+ // Sub-doc, recurse to match on it's fields
+ assertExplainResultsMatch(
+ explainOut[key], expectedMatches[key], preMsg, totalFieldName);
+ } else {
+ assert.eq(explainOut[key],
+ expectedMatches[key],
+ preMsg + "Explain's " + totalFieldName + " (" + explainOut[key] +
+ ")" + " does not match expected value (" + expectedMatches[key] +
+ ").");
+ }
+ });
}
/**
@@ -340,8 +322,7 @@
}
function assertCollDoesNotExist(cName, msg) {
- assert.eq(db.getCollectionNames().indexOf(cName),
- -1,
- msg + "collection " + cName + " exists.");
+ assert.eq(
+ db.getCollectionNames().indexOf(cName), -1, msg + "collection " + cName + " exists.");
}
})();
diff --git a/jstests/core/explain_missing_collection.js b/jstests/core/explain_missing_collection.js
index 93af3ed8fd0..0d1eae844f8 100644
--- a/jstests/core/explain_missing_collection.js
+++ b/jstests/core/explain_missing_collection.js
@@ -20,7 +20,7 @@
// .group()
missingColl.drop();
explainColl = missingColl.explain("executionStats");
- explain = explainColl.group({key: "a", initial: {}, reduce: function() { } });
+ explain = explainColl.group({key: "a", initial: {}, reduce: function() {}});
assert.commandWorked(explain);
assert("executionStats" in explain);
diff --git a/jstests/core/explain_missing_database.js b/jstests/core/explain_missing_database.js
index 5fff4502361..598cd7a9e11 100644
--- a/jstests/core/explain_missing_database.js
+++ b/jstests/core/explain_missing_database.js
@@ -20,7 +20,7 @@
// .group()
explainMissingDb.dropDatabase();
explainColl = explainMissingDb.collection.explain("executionStats");
- explain = explainColl.group({key: "a", initial: {}, reduce: function() { } });
+ explain = explainColl.group({key: "a", initial: {}, reduce: function() {}});
assert.commandWorked(explain);
assert("executionStats" in explain);
diff --git a/jstests/core/explain_multi_plan.js b/jstests/core/explain_multi_plan.js
index abcec153816..f74078c717d 100644
--- a/jstests/core/explain_multi_plan.js
+++ b/jstests/core/explain_multi_plan.js
@@ -2,7 +2,7 @@
* Tests running explain on a variety of explainable commands (find, update, remove, etc.) when
* there are multiple plans available. This is a regression test for SERVER-20849 and SERVER-21376.
*/
-(function () {
+(function() {
"use strict";
var coll = db.explainMultiPlan;
coll.drop();
@@ -49,12 +49,13 @@
});
assert.doesNotThrow(function() {
- coll.explain("allPlansExecution").group({
- key: {a: 1},
- cond: {a: {$gte: 1}},
- reduce: function (curr, result) {},
- initial: {}
- });
+ coll.explain("allPlansExecution")
+ .group({
+ key: {a: 1},
+ cond: {a: {$gte: 1}},
+ reduce: function(curr, result) {},
+ initial: {}
+ });
});
// SERVER-21376: Make sure the 'rejectedPlans' field is filled in appropriately.
diff --git a/jstests/core/explain_shell_helpers.js b/jstests/core/explain_shell_helpers.js
index 836092bb0d4..3c05b760e71 100644
--- a/jstests/core/explain_shell_helpers.js
+++ b/jstests/core/explain_shell_helpers.js
@@ -191,7 +191,7 @@ assert(!explainQuery.hasNext());
// .forEach()
var results = [];
-t.explain().find().forEach(function (res) {
+t.explain().find().forEach(function(res) {
results.push(res);
});
assert.eq(1, results.length);
@@ -257,7 +257,7 @@ assert(planHasStage(explain.queryPlanner.winningPlan, "COUNT_SCAN"));
// .group()
//
-explain = t.explain().group({key: "a", initial: {}, reduce: function() { } });
+explain = t.explain().group({key: "a", initial: {}, reduce: function() {}});
assert.commandWorked(explain);
//
@@ -393,8 +393,8 @@ assert.eq(1, explain.executionStats.totalDocsExamined);
assert.eq(10, t.count());
// findAndModify with upsert flag set that should do an insert.
-explain = t.explain("executionStats").findAndModify(
- {query: {a: 15}, update: {$set: {b: 3}}, upsert: true});
+explain = t.explain("executionStats")
+ .findAndModify({query: {a: 15}, update: {$set: {b: 3}}, upsert: true});
assert.commandWorked(explain);
stage = explain.executionStats.executionStages;
if ("SINGLE_SHARD" === stage.stage) {
@@ -435,7 +435,7 @@ assert.throws(function() {
// Missing "initial" for explaining a group.
assert.throws(function() {
- t.explain().group({key: "a", reduce: function() { } });
+ t.explain().group({key: "a", reduce: function() {}});
});
// Can't specify both remove and update in a findAndModify
diff --git a/jstests/core/explain_upsert.js b/jstests/core/explain_upsert.js
index 41282e9bf51..1ac254291f1 100644
--- a/jstests/core/explain_upsert.js
+++ b/jstests/core/explain_upsert.js
@@ -6,14 +6,8 @@ t.drop();
var explain;
// Explained upsert against an empty collection should succeed and be a no-op.
-explain = db.runCommand({
- explain: {
- update: t.getName(),
- updates: [
- {q: {a: 1}, u: {a: 1}, upsert: true}
- ]
- }
-});
+explain = db.runCommand(
+ {explain: {update: t.getName(), updates: [{q: {a: 1}, u: {a: 1}, upsert: true}]}});
assert.commandWorked(explain);
// Collection should still not exist.
@@ -24,13 +18,7 @@ assert(!t.drop());
t.insert({a: 3});
// An explained upsert against a non-empty collection should also succeed as a no-op.
-explain = db.runCommand({
- explain: {
- update: t.getName(),
- updates: [
- {q: {a: 1}, u: {a: 1}, upsert: true}
- ]
- }
-});
+explain = db.runCommand(
+ {explain: {update: t.getName(), updates: [{q: {a: 1}, u: {a: 1}, upsert: true}]}});
assert.commandWorked(explain);
assert.eq(1, t.count());
diff --git a/jstests/core/filemd5.js b/jstests/core/filemd5.js
index 62f69bd657f..b43dccf7036 100644
--- a/jstests/core/filemd5.js
+++ b/jstests/core/filemd5.js
@@ -1,11 +1,10 @@
db.fs.chunks.drop();
-db.fs.chunks.insert({files_id:1,n:0,data:new BinData(0,"test")});
+db.fs.chunks.insert({files_id: 1, n: 0, data: new BinData(0, "test")});
-x = db.runCommand({"filemd5":1,"root":"fs"});
-assert( ! x.ok , tojson(x) );
-
-db.fs.chunks.ensureIndex({files_id:1,n:1});
-x = db.runCommand({"filemd5":1,"root":"fs"});
-assert( x.ok , tojson(x) );
+x = db.runCommand({"filemd5": 1, "root": "fs"});
+assert(!x.ok, tojson(x));
+db.fs.chunks.ensureIndex({files_id: 1, n: 1});
+x = db.runCommand({"filemd5": 1, "root": "fs"});
+assert(x.ok, tojson(x));
diff --git a/jstests/core/find1.js b/jstests/core/find1.js
index e6d68540bdf..a09c0822a47 100644
--- a/jstests/core/find1.js
+++ b/jstests/core/find1.js
@@ -5,49 +5,50 @@ lookAtDocumentMetrics = false;
// QUERY MIGRATION
// New system is still not connected to server status
-if ( db.serverStatus().metrics ) {
+if (db.serverStatus().metrics) {
// var ss = db.serverStatus();
- // lookAtDocumentMetrics = ss.metrics.document != null && ss.metrics.queryExecutor.scanned != null;
+ // lookAtDocumentMetrics = ss.metrics.document != null && ss.metrics.queryExecutor.scanned !=
+ // null;
}
-print( "lookAtDocumentMetrics: " + lookAtDocumentMetrics );
+print("lookAtDocumentMetrics: " + lookAtDocumentMetrics);
-if ( lookAtDocumentMetrics ) {
+if (lookAtDocumentMetrics) {
// ignore mongos
nscannedStart = db.serverStatus().metrics.queryExecutor.scanned;
}
-
-t.save( { a : 1 , b : "hi" } );
-t.save( { a : 2 , b : "hi" } );
+t.save({a: 1, b: "hi"});
+t.save({a: 2, b: "hi"});
// Basic test of .snapshot().
-assert( t.find().snapshot()[0].a == 1 , ".snapshot() simple test 1" );
+assert(t.find().snapshot()[0].a == 1, ".snapshot() simple test 1");
var q = t.findOne();
-q.c = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz";
-t.save(q); // will move a:1 object to after a:2 in the file
-assert( t.find().snapshot()[0].a == 1 , ".snapshot() simple test 2" );
+q.c =
+ "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz";
+t.save(q); // will move a:1 object to after a:2 in the file
+assert(t.find().snapshot()[0].a == 1, ".snapshot() simple test 2");
-assert( t.findOne( { a : 1 } ).b != null , "A" );
-assert( t.findOne( { a : 1 } , { a : 1 } ).b == null , "B");
+assert(t.findOne({a: 1}).b != null, "A");
+assert(t.findOne({a: 1}, {a: 1}).b == null, "B");
-assert( t.find( { a : 1 } )[0].b != null , "C" );
-assert( t.find( { a : 1 } , { a : 1 } )[0].b == null , "D" );
-assert( t.find( { a : 1 } , { a : 1 } ).sort( { a : 1 } )[0].b == null , "D" );
+assert(t.find({a: 1})[0].b != null, "C");
+assert(t.find({a: 1}, {a: 1})[0].b == null, "D");
+assert(t.find({a: 1}, {a: 1}).sort({a: 1})[0].b == null, "D");
id = t.findOne()._id;
-assert( t.findOne( id ) , "E" );
-assert( t.findOne( id ).a , "F" );
-assert( t.findOne( id ).b , "G" );
+assert(t.findOne(id), "E");
+assert(t.findOne(id).a, "F");
+assert(t.findOne(id).b, "G");
-assert( t.findOne( id , { a : 1 } ).a , "H" );
-assert( ! t.findOne( id , { a : 1 } ).b , "I" );
+assert(t.findOne(id, {a: 1}).a, "H");
+assert(!t.findOne(id, {a: 1}).b, "I");
-assert(t.validate().valid,"not valid");
+assert(t.validate().valid, "not valid");
-if ( lookAtDocumentMetrics ) {
+if (lookAtDocumentMetrics) {
// ignore mongos
nscannedEnd = db.serverStatus().metrics.queryExecutor.scanned;
- assert.lte( nscannedStart + 16, nscannedEnd );
+ assert.lte(nscannedStart + 16, nscannedEnd);
}
diff --git a/jstests/core/find2.js b/jstests/core/find2.js
index f72203419bc..2992bb683f1 100644
--- a/jstests/core/find2.js
+++ b/jstests/core/find2.js
@@ -1,16 +1,16 @@
// Test object id sorting.
-function testObjectIdFind( db ) {
+function testObjectIdFind(db) {
r = db.ed_db_find2_oif;
r.drop();
- for( i = 0; i < 3; ++i )
- r.save( {} );
+ for (i = 0; i < 3; ++i)
+ r.save({});
- f = r.find().sort( { _id: 1 } );
- assert.eq( 3, f.count() );
- assert( f[ 0 ]._id < f[ 1 ]._id );
- assert( f[ 1 ]._id < f[ 2 ]._id );
+ f = r.find().sort({_id: 1});
+ assert.eq(3, f.count());
+ assert(f[0]._id < f[1]._id);
+ assert(f[1]._id < f[2]._id);
}
-testObjectIdFind( db );
+testObjectIdFind(db);
diff --git a/jstests/core/find3.js b/jstests/core/find3.js
index a5e4b7a4d66..42c06065e9e 100644
--- a/jstests/core/find3.js
+++ b/jstests/core/find3.js
@@ -1,10 +1,10 @@
t = db.find3;
t.drop();
-for ( i=1; i<=50; i++)
- t.save( { a : i } );
+for (i = 1; i <= 50; i++)
+ t.save({a: i});
-assert.eq( 50 , t.find().toArray().length );
-assert.eq( 20 , t.find().limit(20).toArray().length );
+assert.eq(50, t.find().toArray().length);
+assert.eq(20, t.find().limit(20).toArray().length);
assert(t.validate().valid);
diff --git a/jstests/core/find4.js b/jstests/core/find4.js
index eb9ff60e33c..7a5ebf79578 100644
--- a/jstests/core/find4.js
+++ b/jstests/core/find4.js
@@ -2,25 +2,33 @@
t = db.find4;
t.drop();
-t.save( { a : 1123 , b : 54332 } );
+t.save({a: 1123, b: 54332});
-o = t.find( {} , {} )[0];
-assert.eq( 1123 , o.a , "A" );
-assert.eq( 54332 , o.b , "B" );
-assert( o._id.str , "C" );
+o = t.find({}, {})[0];
+assert.eq(1123, o.a, "A");
+assert.eq(54332, o.b, "B");
+assert(o._id.str, "C");
-o = t.find( {} , { a : 1 } )[0];
-assert.eq( 1123 , o.a , "D" );
-assert( o._id.str , "E" );
-assert( ! o.b , "F" );
+o = t.find({}, {a: 1})[0];
+assert.eq(1123, o.a, "D");
+assert(o._id.str, "E");
+assert(!o.b, "F");
-o = t.find( {} , { b : 1 } )[0];
-assert.eq( 54332 , o.b , "G" );
-assert( o._id.str , "H" );
-assert( ! o.a , "I" );
+o = t.find({}, {b: 1})[0];
+assert.eq(54332, o.b, "G");
+assert(o._id.str, "H");
+assert(!o.a, "I");
t.drop();
-t.save( { a : 1 , b : 1 } );
-t.save( { a : 2 , b : 2 } );
-assert.eq( "1-1,2-2" , t.find().map( function(z){ return z.a + "-" + z.b; } ).toString() );
-assert.eq( "1-undefined,2-undefined" , t.find( {} , { a : 1 }).map( function(z){ return z.a + "-" + z.b; } ).toString() );
+t.save({a: 1, b: 1});
+t.save({a: 2, b: 2});
+assert.eq("1-1,2-2",
+ t.find().map(function(z) {
+ return z.a + "-" + z.b;
+ }).toString());
+assert.eq("1-undefined,2-undefined",
+ t.find({}, {a: 1})
+ .map(function(z) {
+ return z.a + "-" + z.b;
+ })
+ .toString());
diff --git a/jstests/core/find5.js b/jstests/core/find5.js
index ab648906122..33ba96ea103 100644
--- a/jstests/core/find5.js
+++ b/jstests/core/find5.js
@@ -5,47 +5,47 @@ t.drop();
t.save({a: 1});
t.save({b: 5});
-assert.eq( 2 , t.find({}, {b:1}).count(), "A");
+assert.eq(2, t.find({}, {b: 1}).count(), "A");
-function getIds( f ){
- return t.find( {} , f ).map( function(z){ return z._id; } );
+function getIds(f) {
+ return t.find({}, f).map(function(z) {
+ return z._id;
+ });
}
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( {} ) ) , "B1 " );
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { a : 1 } ) ) , "B2 " );
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { b : 1 } ) ) , "B3 " );
-assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { c : 1 } ) ) , "B4 " );
+assert.eq(Array.tojson(getIds(null)), Array.tojson(getIds({})), "B1 ");
+assert.eq(Array.tojson(getIds(null)), Array.tojson(getIds({a: 1})), "B2 ");
+assert.eq(Array.tojson(getIds(null)), Array.tojson(getIds({b: 1})), "B3 ");
+assert.eq(Array.tojson(getIds(null)), Array.tojson(getIds({c: 1})), "B4 ");
-x = t.find( {} , { a : 1 } )[0];
-assert.eq( 1 , x.a , "C1" );
-assert.isnull( x.b , "C2" );
+x = t.find({}, {a: 1})[0];
+assert.eq(1, x.a, "C1");
+assert.isnull(x.b, "C2");
-x = t.find( {} , { a : 1 } )[1];
-assert.isnull( x.a , "C3" );
-assert.isnull( x.b , "C4" );
+x = t.find({}, {a: 1})[1];
+assert.isnull(x.a, "C3");
+assert.isnull(x.b, "C4");
-x = t.find( {} , { b : 1 } )[0];
-assert.isnull( x.a , "C5" );
-assert.isnull( x.b , "C6" );
+x = t.find({}, {b: 1})[0];
+assert.isnull(x.a, "C5");
+assert.isnull(x.b, "C6");
-x = t.find( {} , { b : 1 } )[1];
-assert.isnull( x.a , "C7" );
-assert.eq( 5 , x.b , "C8" );
+x = t.find({}, {b: 1})[1];
+assert.isnull(x.a, "C7");
+assert.eq(5, x.b, "C8");
t.drop();
+t.save({a: 1, b: {c: 2, d: 3, e: 4}});
+assert.eq(2, t.find({}, {"b.c": 1}).toArray()[0].b.c, "D");
-t.save( { a : 1 , b : { c : 2 , d : 3 , e : 4 } } );
-assert.eq( 2 , t.find( {} , { "b.c" : 1 } ).toArray()[0].b.c , "D" );
+o = t.find({}, {"b.c": 1, "b.d": 1}).toArray()[0];
+assert(o.b.c, "E 1");
+assert(o.b.d, "E 2");
+assert(!o.b.e, "E 3");
-o = t.find( {} , { "b.c" : 1 , "b.d" : 1 } ).toArray()[0];
-assert( o.b.c , "E 1" );
-assert( o.b.d , "E 2" );
-assert( !o.b.e , "E 3" );
-
-assert( ! t.find( {} , { "b.c" : 1 } ).toArray()[0].b.d , "F" );
+assert(!t.find({}, {"b.c": 1}).toArray()[0].b.d, "F");
t.drop();
-t.save( { a : { b : { c : 1 } } } );
-assert.eq( 1 , t.find( {} , { "a.b.c" : 1 } )[0].a.b.c , "G" );
-
+t.save({a: {b: {c: 1}}});
+assert.eq(1, t.find({}, {"a.b.c": 1})[0].a.b.c, "G");
diff --git a/jstests/core/find6.js b/jstests/core/find6.js
index 0739c38aa9f..96d76192814 100644
--- a/jstests/core/find6.js
+++ b/jstests/core/find6.js
@@ -2,40 +2,38 @@
t = db.find6;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 1 , b : 1 } );
+t.save({a: 1});
+t.save({a: 1, b: 1});
-assert.eq( 2 , t.find().count() , "A" );
-assert.eq( 1 , t.find( { b : null } ).count() , "B" );
-assert.eq( 1 , t.find( "function() { return this.b == null; }" ).itcount() , "C" );
-assert.eq( 1 , t.find( "function() { return this.b == null; }" ).count() , "D" );
+assert.eq(2, t.find().count(), "A");
+assert.eq(1, t.find({b: null}).count(), "B");
+assert.eq(1, t.find("function() { return this.b == null; }").itcount(), "C");
+assert.eq(1, t.find("function() { return this.b == null; }").count(), "D");
/* test some stuff with dot array notation */
q = db.find6a;
q.drop();
-q.insert( { "a" : [ { "0" : 1 } ] } );
-q.insert( { "a" : [ { "0" : 2 } ] } );
-q.insert( { "a" : [ 1 ] } );
-q.insert( { "a" : [ 9, 1 ] } );
+q.insert({"a": [{"0": 1}]});
+q.insert({"a": [{"0": 2}]});
+q.insert({"a": [1]});
+q.insert({"a": [9, 1]});
-function f() {
-
- assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da1");
- assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da2");
-
- assert.eq( 1, q.find( { 'a.0' : { $gt : 8 } } ).count(), "da3");
- assert.eq( 0, q.find( { 'a.0' : { $lt : 0 } } ).count(), "da4");
+function f() {
+ assert.eq(2, q.find({'a.0': 1}).count(), "da1");
+ assert.eq(2, q.find({'a.0': 1}).count(), "da2");
+ assert.eq(1, q.find({'a.0': {$gt: 8}}).count(), "da3");
+ assert.eq(0, q.find({'a.0': {$lt: 0}}).count(), "da4");
}
-for( var pass = 0; pass <= 1 ; pass++ ) {
+for (var pass = 0; pass <= 1; pass++) {
f();
- q.ensureIndex({a:1});
+ q.ensureIndex({a: 1});
}
t = db.multidim;
t.drop();
-t.insert({"a" : [ [ ], 1, [ 3, 4 ] ] });
-assert.eq(1, t.find({"a.2":[3,4]}).count(), "md1");
-assert.eq(1, t.find({"a.2.1":4}).count(), "md2");
-assert.eq(0, t.find({"a.2.1":3}).count(), "md3");
+t.insert({"a": [[], 1, [3, 4]]});
+assert.eq(1, t.find({"a.2": [3, 4]}).count(), "md1");
+assert.eq(1, t.find({"a.2.1": 4}).count(), "md2");
+assert.eq(0, t.find({"a.2.1": 3}).count(), "md3");
diff --git a/jstests/core/find7.js b/jstests/core/find7.js
index ca4c7d449bf..ed18dcbb0ff 100644
--- a/jstests/core/find7.js
+++ b/jstests/core/find7.js
@@ -1,8 +1,10 @@
t = db.find7;
t.drop();
-x = { "_id" : { "d" : 3649, "w" : "signed" }, "u" : { "3649" : 5 } };
-t.insert(x );
-assert.eq( x , t.findOne() , "A1" );
-assert.eq( x , t.findOne( { _id : x._id } ) , "A2" );
-
+x = {
+ "_id": {"d": 3649, "w": "signed"},
+ "u": {"3649": 5}
+};
+t.insert(x);
+assert.eq(x, t.findOne(), "A1");
+assert.eq(x, t.findOne({_id: x._id}), "A2");
diff --git a/jstests/core/find8.js b/jstests/core/find8.js
index 3622eba8ae6..14930a056e7 100644
--- a/jstests/core/find8.js
+++ b/jstests/core/find8.js
@@ -3,21 +3,21 @@
t = db.jstests_find8;
t.drop();
-t.save( {a:[1,10]} );
-assert.eq( 1, t.count( { a: { $gt:2,$lt:5} } ) );
+t.save({a: [1, 10]});
+assert.eq(1, t.count({a: {$gt: 2, $lt: 5}}));
// Check that we can do a query with 'invalid' range.
-assert.eq( 1, t.count( { a: { $gt:5,$lt:2} } ) );
+assert.eq(1, t.count({a: {$gt: 5, $lt: 2}}));
-t.save( {a:[-1,12]} );
+t.save({a: [-1, 12]});
// Check that we can do a query with 'invalid' range and sort.
-assert.eq( 2, t.find( { a: { $gt:5,$lt:2} } ).sort( {a:1} ).itcount() );
-assert.eq( 2, t.find( { a: { $gt:5,$lt:2} } ).sort( {$natural:-1} ).itcount() );
+assert.eq(2, t.find({a: {$gt: 5, $lt: 2}}).sort({a: 1}).itcount());
+assert.eq(2, t.find({a: {$gt: 5, $lt: 2}}).sort({$natural: -1}).itcount());
// SERVER-2864
-if( 0 ) {
-t.find( { a: { $gt:5,$lt:2} } ).itcount();
-// Check that we can record a plan for an 'invalid' range.
-assert( t.find( { a: { $gt:5,$lt:2} } ).explain( true ).oldPlan );
+if (0) {
+ t.find({a: {$gt: 5, $lt: 2}}).itcount();
+ // Check that we can record a plan for an 'invalid' range.
+ assert(t.find({a: {$gt: 5, $lt: 2}}).explain(true).oldPlan);
}
diff --git a/jstests/core/find9.js b/jstests/core/find9.js
index 8c2b7ac282b..1c56e8c850d 100644
--- a/jstests/core/find9.js
+++ b/jstests/core/find9.js
@@ -3,26 +3,26 @@
t = db.jstests_find9;
t.drop();
-big = new Array( 500000 ).toString();
-for( i = 0; i < 60; ++i ) {
- t.save( { a:i, b:big } );
+big = new Array(500000).toString();
+for (i = 0; i < 60; ++i) {
+ t.save({a: i, b: big});
}
// Check size limit with a simple query.
-assert.eq( 60, t.find( {}, { a:1 } ).objsLeftInBatch() ); // Projection resizes the result set.
-assert.gt( 60, t.find().objsLeftInBatch() );
+assert.eq(60, t.find({}, {a: 1}).objsLeftInBatch()); // Projection resizes the result set.
+assert.gt(60, t.find().objsLeftInBatch());
// Check size limit on a query with an explicit batch size.
-assert.eq( 60, t.find( {}, { a:1 } ).batchSize( 80 ).objsLeftInBatch() );
-assert.gt( 60, t.find().batchSize( 80 ).objsLeftInBatch() );
+assert.eq(60, t.find({}, {a: 1}).batchSize(80).objsLeftInBatch());
+assert.gt(60, t.find().batchSize(80).objsLeftInBatch());
-for( i = 0; i < 60; ++i ) {
- t.save( { a:i, b:big } );
+for (i = 0; i < 60; ++i) {
+ t.save({a: i, b: big});
}
// Check size limit with get more.
-c = t.find().batchSize( 80 );
-while( c.hasNext() ) {
- assert.gt( 60, c.objsLeftInBatch() );
+c = t.find().batchSize(80);
+while (c.hasNext()) {
+ assert.gt(60, c.objsLeftInBatch());
c.next();
}
diff --git a/jstests/core/find_and_modify.js b/jstests/core/find_and_modify.js
index afaeda3d9a9..cf2f8804d9e 100644
--- a/jstests/core/find_and_modify.js
+++ b/jstests/core/find_and_modify.js
@@ -2,48 +2,60 @@ t = db.find_and_modify;
t.drop();
// fill db
-for(var i=1; i<=10; i++) {
- t.insert({priority:i, inprogress:false, value:0});
+for (var i = 1; i <= 10; i++) {
+ t.insert({priority: i, inprogress: false, value: 0});
}
// returns old
-out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}});
+out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value: 1}}});
assert.eq(out.value, 0);
assert.eq(out.inprogress, false);
t.update({_id: out._id}, {$set: {inprogress: false}});
// returns new
-out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}, 'new': true});
+out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value: 1}}, 'new': true});
assert.eq(out.value, 2);
assert.eq(out.inprogress, true);
t.update({_id: out._id}, {$set: {inprogress: false}});
// update highest priority
-out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
+out = t.findAndModify(
+ {query: {inprogress: false}, sort: {priority: -1}, update: {$set: {inprogress: true}}});
assert.eq(out.priority, 10);
// update next highest priority
-out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
+out = t.findAndModify(
+ {query: {inprogress: false}, sort: {priority: -1}, update: {$set: {inprogress: true}}});
assert.eq(out.priority, 9);
// remove lowest priority
-out = t.findAndModify({sort:{priority:1}, remove:true});
+out = t.findAndModify({sort: {priority: 1}, remove: true});
assert.eq(out.priority, 1);
// remove next lowest priority
-out = t.findAndModify({sort:{priority:1}, remove:1});
+out = t.findAndModify({sort: {priority: 1}, remove: 1});
assert.eq(out.priority, 2);
// return null (was {} before 1.5.4) if no matches (drivers may handle this differently)
-out = t.findAndModify({query:{no_such_field:1}, remove:1});
+out = t.findAndModify({query: {no_such_field: 1}, remove: 1});
assert.eq(out, null);
// make sure we fail with conflicting params to findAndModify SERVER-16601
-t.insert({x:1});
-assert.throws(function() { t.findAndModify({query:{x:1}, update:{y:2}, remove:true}); });
-assert.throws(function() { t.findAndModify({query:{x:1}, update:{y:2}, remove:true, sort: {x:1}}); });
-assert.throws(function() { t.findAndModify({query:{x:1}, update:{y:2}, remove:true, upsert:true}); });
-assert.throws(function() { t.findAndModify({query:{x:1}, update:{y:2}, new:true, remove:true}); });
-assert.throws(function() { t.findAndModify({query:{x:1}, upsert:true, remove:true}); });
+t.insert({x: 1});
+assert.throws(function() {
+ t.findAndModify({query: {x: 1}, update: {y: 2}, remove: true});
+});
+assert.throws(function() {
+ t.findAndModify({query: {x: 1}, update: {y: 2}, remove: true, sort: {x: 1}});
+});
+assert.throws(function() {
+ t.findAndModify({query: {x: 1}, update: {y: 2}, remove: true, upsert: true});
+});
+assert.throws(function() {
+ t.findAndModify({query: {x: 1}, update: {y: 2}, new: true, remove: true});
+});
+assert.throws(function() {
+ t.findAndModify({query: {x: 1}, upsert: true, remove: true});
+});
//
// SERVER-17387: Find and modify should throw in the case of invalid projection.
@@ -57,8 +69,7 @@ var cmdRes = db.runCommand({
query: {_id: "miss"},
update: {$inc: {y: 1}},
fields: {foo: {$pop: ["bar"]}},
- upsert: true,
- new: true
+ upsert: true, new: true
});
assert.commandFailed(cmdRes);
@@ -70,8 +81,7 @@ cmdRes = db.runCommand({
query: {_id: "found"},
update: {$inc: {y: 1}},
fields: {foo: {$pop: ["bar"]}},
- upsert: true,
- new: true
+ upsert: true, new: true
});
assert.commandFailed(cmdRes);
@@ -80,8 +90,7 @@ cmdRes = db.runCommand({
findAndModify: t.getName(),
query: {_id: "found"},
update: {$inc: {y: 1}},
- fields: {foo: {$pop: ["bar"]}},
- new: true
+ fields: {foo: {$pop: ["bar"]}}, new: true
});
assert.commandFailed(cmdRes);
@@ -109,12 +118,8 @@ assert.commandFailed(cmdRes);
//
t.drop();
-cmdRes = db.runCommand({
- findAndModify: t.getName(),
- query: {_id: "miss"},
- update: {$inc: {y: 1}},
- upsert: true
-});
+cmdRes = db.runCommand(
+ {findAndModify: t.getName(), query: {_id: "miss"}, update: {$inc: {y: 1}}, upsert: true});
assert.commandWorked(cmdRes);
assert("value" in cmdRes);
assert.eq(null, cmdRes.value);
@@ -123,8 +128,7 @@ cmdRes = db.runCommand({
findAndModify: t.getName(),
query: {_id: "missagain"},
update: {$inc: {y: 1}},
- upsert: true,
- new: true
+ upsert: true, new: true
});
assert.commandWorked(cmdRes);
assert("value" in cmdRes);
diff --git a/jstests/core/find_and_modify2.js b/jstests/core/find_and_modify2.js
index 2c8ab5b3bb6..e9bc8f5b23a 100644
--- a/jstests/core/find_and_modify2.js
+++ b/jstests/core/find_and_modify2.js
@@ -1,16 +1,16 @@
t = db.find_and_modify2;
t.drop();
-t.insert({_id:1, i:0, j:0});
+t.insert({_id: 1, i: 0, j: 0});
-out = t.findAndModify({update: {$inc: {i:1}}, 'new': true, fields: {i:1}});
-assert.eq(out, {_id:1, i:1});
+out = t.findAndModify({update: {$inc: {i: 1}}, 'new': true, fields: {i: 1}});
+assert.eq(out, {_id: 1, i: 1});
-out = t.findAndModify({update: {$inc: {i:1}}, fields: {i:0}});
-assert.eq(out, {_id:1, j:0});
+out = t.findAndModify({update: {$inc: {i: 1}}, fields: {i: 0}});
+assert.eq(out, {_id: 1, j: 0});
-out = t.findAndModify({update: {$inc: {i:1}}, fields: {_id:0, j:1}});
-assert.eq(out, {j:0});
+out = t.findAndModify({update: {$inc: {i: 1}}, fields: {_id: 0, j: 1}});
+assert.eq(out, {j: 0});
-out = t.findAndModify({update: {$inc: {i:1}}, fields: {_id:0, j:1}, 'new': true});
-assert.eq(out, {j:0});
+out = t.findAndModify({update: {$inc: {i: 1}}, fields: {_id: 0, j: 1}, 'new': true});
+assert.eq(out, {j: 0});
diff --git a/jstests/core/find_and_modify3.js b/jstests/core/find_and_modify3.js
index 5dd24726d30..a1a88aeecb5 100644
--- a/jstests/core/find_and_modify3.js
+++ b/jstests/core/find_and_modify3.js
@@ -1,21 +1,33 @@
t = db.find_and_modify3;
t.drop();
-t.insert({_id:0, other:0, comments:[{i:0, j:0}, {i:1, j:1}]});
-t.insert({_id:1, other:1, comments:[{i:0, j:0}, {i:1, j:1}]}); // this is the only one that gets modded
-t.insert({_id:2, other:2, comments:[{i:0, j:0}, {i:1, j:1}]});
+t.insert({_id: 0, other: 0, comments: [{i: 0, j: 0}, {i: 1, j: 1}]});
+t.insert({
+ _id: 1,
+ other: 1,
+ comments: [{i: 0, j: 0}, {i: 1, j: 1}]
+}); // this is the only one that gets modded
+t.insert({_id: 2, other: 2, comments: [{i: 0, j: 0}, {i: 1, j: 1}]});
-orig0 = t.findOne({_id:0});
-orig2 = t.findOne({_id:2});
+orig0 = t.findOne({_id: 0});
+orig2 = t.findOne({_id: 2});
-out = t.findAndModify({query: {_id:1, 'comments.i':0}, update: {$set: {'comments.$.j':2}}, 'new': true, sort:{other:1}});
-assert.eq(out.comments[0], {i:0, j:2});
-assert.eq(out.comments[1], {i:1, j:1});
-assert.eq(t.findOne({_id:0}), orig0);
-assert.eq(t.findOne({_id:2}), orig2);
+out = t.findAndModify({
+ query: {_id: 1, 'comments.i': 0},
+ update: {$set: {'comments.$.j': 2}}, 'new': true,
+ sort: {other: 1}
+});
+assert.eq(out.comments[0], {i: 0, j: 2});
+assert.eq(out.comments[1], {i: 1, j: 1});
+assert.eq(t.findOne({_id: 0}), orig0);
+assert.eq(t.findOne({_id: 2}), orig2);
-out = t.findAndModify({query: {other:1, 'comments.i':1}, update: {$set: {'comments.$.j':3}}, 'new': true, sort:{other:1}});
-assert.eq(out.comments[0], {i:0, j:2});
-assert.eq(out.comments[1], {i:1, j:3});
-assert.eq(t.findOne({_id:0}), orig0);
-assert.eq(t.findOne({_id:2}), orig2);
+out = t.findAndModify({
+ query: {other: 1, 'comments.i': 1},
+ update: {$set: {'comments.$.j': 3}}, 'new': true,
+ sort: {other: 1}
+});
+assert.eq(out.comments[0], {i: 0, j: 2});
+assert.eq(out.comments[1], {i: 1, j: 3});
+assert.eq(t.findOne({_id: 0}), orig0);
+assert.eq(t.findOne({_id: 2}), orig2);
diff --git a/jstests/core/find_and_modify4.js b/jstests/core/find_and_modify4.js
index 04abc2f1ce7..b6be565b70a 100644
--- a/jstests/core/find_and_modify4.js
+++ b/jstests/core/find_and_modify4.js
@@ -2,32 +2,31 @@ t = db.find_and_modify4;
t.drop();
// this is the best way to build auto-increment
-function getNextVal(counterName){
+function getNextVal(counterName) {
var ret = t.findAndModify({
- query: {_id: counterName},
- update: {$inc: {val: 1}},
- upsert: true,
- 'new': true,
- });
+ query: {_id: counterName},
+ update: {$inc: {val: 1}},
+ upsert: true, 'new': true,
+ });
return ret;
}
-assert.eq(getNextVal("a"), {_id:"a", val:1});
-assert.eq(getNextVal("a"), {_id:"a", val:2});
-assert.eq(getNextVal("a"), {_id:"a", val:3});
-assert.eq(getNextVal("z"), {_id:"z", val:1});
-assert.eq(getNextVal("z"), {_id:"z", val:2});
-assert.eq(getNextVal("a"), {_id:"a", val:4});
+assert.eq(getNextVal("a"), {_id: "a", val: 1});
+assert.eq(getNextVal("a"), {_id: "a", val: 2});
+assert.eq(getNextVal("a"), {_id: "a", val: 3});
+assert.eq(getNextVal("z"), {_id: "z", val: 1});
+assert.eq(getNextVal("z"), {_id: "z", val: 2});
+assert.eq(getNextVal("a"), {_id: "a", val: 4});
t.drop();
-function helper(upsert){
+function helper(upsert) {
return t.findAndModify({
- query: {_id: "asdf"},
- update: {$inc: {val: 1}},
- upsert: upsert,
- 'new': false // the default
- });
+ query: {_id: "asdf"},
+ update: {$inc: {val: 1}},
+ upsert: upsert,
+ 'new': false // the default
+ });
}
// upsert:false so nothing there before and after
@@ -37,19 +36,12 @@ assert.eq(t.count(), 0);
// upsert:true so nothing there before; something there after
assert.eq(helper(true), null);
assert.eq(t.count(), 1);
-assert.eq(helper(true), {_id: 'asdf', val: 1});
-assert.eq(helper(false), {_id: 'asdf', val: 2}); // upsert only matters when obj doesn't exist
-assert.eq(helper(true), {_id: 'asdf', val: 3});
-
+assert.eq(helper(true), {_id: 'asdf', val: 1});
+assert.eq(helper(false), {_id: 'asdf', val: 2}); // upsert only matters when obj doesn't exist
+assert.eq(helper(true), {_id: 'asdf', val: 3});
// _id created if not specified
-var out = t.findAndModify({
- query: {a:1},
- update: {$set: {b: 2}},
- upsert: true,
- 'new': true
- });
+var out = t.findAndModify({query: {a: 1}, update: {$set: {b: 2}}, upsert: true, 'new': true});
assert.neq(out._id, undefined);
assert.eq(out.a, 1);
assert.eq(out.b, 2);
-
diff --git a/jstests/core/find_and_modify_concurrent_update.js b/jstests/core/find_and_modify_concurrent_update.js
index 2dd1e182008..3986ac62ea9 100644
--- a/jstests/core/find_and_modify_concurrent_update.js
+++ b/jstests/core/find_and_modify_concurrent_update.js
@@ -13,15 +13,12 @@
assert.writeOK(t.insert({_id: 1, a: 1, b: 1}));
var join = startParallelShell(
- "db.find_and_modify_concurrent.update({a: 1, b: 1}, {$inc: {a: 1}});"
- );
+ "db.find_and_modify_concurrent.update({a: 1, b: 1}, {$inc: {a: 1}});");
// Due to the sleep, we expect this find and modify to yield before updating the
// document.
- var res = t.findAndModify({
- query: {a: 1, b: 1, $where: "sleep(100); return true;"},
- update: {$inc: {a: 1}}
- });
+ var res = t.findAndModify(
+ {query: {a: 1, b: 1, $where: "sleep(100); return true;"}, update: {$inc: {a: 1}}});
join();
var docs = t.find().toArray();
diff --git a/jstests/core/find_and_modify_empty_coll.js b/jstests/core/find_and_modify_empty_coll.js
index 9c231fb2d1f..2d3a2ee8ffd 100644
--- a/jstests/core/find_and_modify_empty_coll.js
+++ b/jstests/core/find_and_modify_empty_coll.js
@@ -8,8 +8,8 @@
assert.eq(null, coll.findAndModify({remove: true}));
assert.eq(null, coll.findAndModify({update: {$inc: {i: 1}}}));
- var upserted = coll.findAndModify(
- {query: {_id: 0}, update: {$inc: {i: 1}}, upsert: true, new: true});
+ var upserted =
+ coll.findAndModify({query: {_id: 0}, update: {$inc: {i: 1}}, upsert: true, new: true});
assert.eq(upserted, {_id: 0, i: 1});
coll.drop();
diff --git a/jstests/core/find_and_modify_empty_update.js b/jstests/core/find_and_modify_empty_update.js
index 3d72a4ff9f3..ccfb1a8201f 100644
--- a/jstests/core/find_and_modify_empty_update.js
+++ b/jstests/core/find_and_modify_empty_update.js
@@ -46,7 +46,7 @@ assert.eq(coll.findOne({_id: 0}), {_id: 0});
// Test update:{} with a sort, upsert:true, and new:true.
coll.remove({});
-ret = coll.findAndModify({query: {_id: 0, a: 1}, update: {}, upsert: true, sort: {a: 1},
- new: true});
+ret =
+ coll.findAndModify({query: {_id: 0, a: 1}, update: {}, upsert: true, sort: {a: 1}, new: true});
assert.eq(ret, {_id: 0});
assert.eq(coll.findOne({_id: 0}), {_id: 0});
diff --git a/jstests/core/find_and_modify_server6226.js b/jstests/core/find_and_modify_server6226.js
index 21d1bdad6f8..e847f6e5697 100644
--- a/jstests/core/find_and_modify_server6226.js
+++ b/jstests/core/find_and_modify_server6226.js
@@ -2,6 +2,5 @@
t = db.find_and_modify_server6226;
t.drop();
-ret = t.findAndModify( { query : { _id : 1 } , update : { "$inc" : { i : 1 } } , upsert : true } );
-assert.isnull( ret );
-
+ret = t.findAndModify({query: {_id: 1}, update: {"$inc": {i: 1}}, upsert: true});
+assert.isnull(ret);
diff --git a/jstests/core/find_and_modify_server6254.js b/jstests/core/find_and_modify_server6254.js
index 9850a4f3fb9..c2bfa3628a1 100644
--- a/jstests/core/find_and_modify_server6254.js
+++ b/jstests/core/find_and_modify_server6254.js
@@ -2,9 +2,8 @@
t = db.find_and_modify_server6254;
t.drop();
-t.insert( { x : 1 } );
-ret = t.findAndModify( { query : { x : 1 } , update : { $set : { x : 2 } } , new : true } );
-assert.eq( 2 , ret.x , tojson( ret ) );
-
-assert.eq( 1 , t.count() );
+t.insert({x: 1});
+ret = t.findAndModify({query: {x: 1}, update: {$set: {x: 2}}, new: true});
+assert.eq(2, ret.x, tojson(ret));
+assert.eq(1, t.count());
diff --git a/jstests/core/find_and_modify_server6582.js b/jstests/core/find_and_modify_server6582.js
index a48cc962fea..79a0b31d4ed 100644
--- a/jstests/core/find_and_modify_server6582.js
+++ b/jstests/core/find_and_modify_server6582.js
@@ -2,17 +2,14 @@
t = db.find_and_modify_server6582;
t.drop();
-x = t.runCommand( "findAndModify" , {query:{f:1}, update:{$set:{f:2}}, upsert:true, new:true});
+x = t.runCommand("findAndModify", {query: {f: 1}, update: {$set: {f: 2}}, upsert: true, new: true});
le = x.lastErrorObject;
-assert.eq( le.updatedExisting, false );
-assert.eq( le.n, 1 );
-assert.eq( le.upserted, x.value._id );
+assert.eq(le.updatedExisting, false);
+assert.eq(le.n, 1);
+assert.eq(le.upserted, x.value._id);
t.drop();
-t.insert( { f : 1 } );
-x = t.runCommand( "findAndModify" , {query:{f:1}, remove : true } );
+t.insert({f: 1});
+x = t.runCommand("findAndModify", {query: {f: 1}, remove: true});
le = x.lastErrorObject;
-assert.eq( le.n, 1 );
-
-
-
+assert.eq(le.n, 1);
diff --git a/jstests/core/find_and_modify_server6588.js b/jstests/core/find_and_modify_server6588.js
index 68d7f0739dc..9c546daba72 100644
--- a/jstests/core/find_and_modify_server6588.js
+++ b/jstests/core/find_and_modify_server6588.js
@@ -1,22 +1,35 @@
t = db.find_and_modify_sever6588;
-initial = { _id : 1 , a : [ { b : 1 } ] , z : 1 };
-up = { "$set" : { "a.$.b" : 2 } };
-q = { _id : 1 , "a.b" : 1 };
-correct = { _id : 1 , a : [ { b : 2 } ] , z : 1 };
+initial = {
+ _id: 1,
+ a: [{b: 1}],
+ z: 1
+};
+up = {
+ "$set": {"a.$.b": 2}
+};
+q = {
+ _id: 1,
+ "a.b": 1
+};
+correct = {
+ _id: 1,
+ a: [{b: 2}],
+ z: 1
+};
t.drop();
-t.insert( initial );
-t.update( q , up );
-assert.eq( correct , t.findOne() );
+t.insert(initial);
+t.update(q, up);
+assert.eq(correct, t.findOne());
t.drop();
-t.insert( initial );
-x = t.findAndModify( { query : q , update : up } );
-assert.eq( correct , t.findOne() );
+t.insert(initial);
+x = t.findAndModify({query: q, update: up});
+assert.eq(correct, t.findOne());
t.drop();
-t.insert( initial );
-x = t.findAndModify( { query : { z : 1 , "a.b" : 1 } , update : up } );
-assert.eq( correct , t.findOne() );
+t.insert(initial);
+x = t.findAndModify({query: {z: 1, "a.b": 1}, update: up});
+assert.eq(correct, t.findOne());
diff --git a/jstests/core/find_and_modify_server6659.js b/jstests/core/find_and_modify_server6659.js
index f5f89b051e6..6b3f958c0e0 100644
--- a/jstests/core/find_and_modify_server6659.js
+++ b/jstests/core/find_and_modify_server6659.js
@@ -2,6 +2,6 @@
t = db.find_and_modify_server6659;
t.drop();
-x = t.findAndModify({query:{f:1}, update:{$set:{f:2}}, upsert:true, new:true});
-assert.eq( 2, x.f );
-assert.eq( 2, t.findOne().f );
+x = t.findAndModify({query: {f: 1}, update: {$set: {f: 2}}, upsert: true, new: true});
+assert.eq(2, x.f);
+assert.eq(2, t.findOne().f);
diff --git a/jstests/core/find_and_modify_server6865.js b/jstests/core/find_and_modify_server6865.js
index 8e1c21e19a6..b38c0b1bee4 100644
--- a/jstests/core/find_and_modify_server6865.js
+++ b/jstests/core/find_and_modify_server6865.js
@@ -59,295 +59,247 @@
//
// Simple query that uses an inclusion projection.
- testFAMWorked(
- {_id: 42, a: [1, 2], b: 3},
- {query: {_id: 42}, fields: {_id: 0, b: 1}, remove: true},
- {b: 3}
- );
+ testFAMWorked({_id: 42, a: [1, 2], b: 3},
+ {query: {_id: 42}, fields: {_id: 0, b: 1}, remove: true},
+ {b: 3});
// Simple query that uses an exclusion projection.
- testFAMWorked(
- {_id: 42, a: [1, 2], b: 3, c: 4},
- {query: {_id: 42}, fields: {a: 0, b: 0}, remove: true},
- {_id: 42, c: 4}
- );
+ testFAMWorked({_id: 42, a: [1, 2], b: 3, c: 4},
+ {query: {_id: 42}, fields: {a: 0, b: 0}, remove: true},
+ {_id: 42, c: 4});
// Simple query that uses $elemMatch in the projection.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{query: {_id: 42}, fields: {b: {$elemMatch: {value: 2}}}, remove: true},
- {_id: 42, b: [{name: 'second', value: 2}]}
- );
+ {_id: 42, b: [{name: 'second', value: 2}]});
// Query on an array of values while using a positional projection.
- testFAMWorked(
- {_id: 42, a: [1, 2]},
- {query: {a: 2}, fields: {'a.$': 1}, remove: true},
- {_id: 42, a: [2]}
- );
+ testFAMWorked({_id: 42, a: [1, 2]},
+ {query: {a: 2}, fields: {'a.$': 1}, remove: true},
+ {_id: 42, a: [2]});
// Query on an array of objects while using a positional projection.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{query: {_id: 42, 'b.name': 'third'}, fields: {'b.$': 1}, remove: true},
- {_id: 42, b: [{name: 'third', value: 3}]}
- );
+ {_id: 42, b: [{name: 'third', value: 3}]});
// Query on an array of objects while using a position projection.
// Verifies that the projection {'b.$.value': 1} is treated the
// same as {'b.$': 1}.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{query: {_id: 42, 'b.name': 'third'}, fields: {'b.$.value': 1}, remove: true},
- {_id: 42, b: [{name: 'third', value: 3}]}
- );
+ {_id: 42, b: [{name: 'third', value: 3}]});
// Query on an array of objects using $elemMatch while using an inclusion projection.
testFAMWorked(
- {_id: 42, a: 5, b: [{name: 'john', value: 1},
- {name: 'jess', value: 2},
- {name: 'jeff', value: 3}]
+ {
+ _id: 42,
+ a: 5,
+ b: [{name: 'john', value: 1}, {name: 'jess', value: 2}, {name: 'jeff', value: 3}]
},
{
- query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
- fields: {_id: 0, a: 5},
- remove: true
+ query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
+ fields: {_id: 0, a: 5},
+ remove: true
},
- {a: 5}
- );
+ {a: 5});
// Query on an array of objects using $elemMatch while using the positional
// operator in the projection.
testFAMWorked(
- {_id: 42, b: [{name: 'john', value: 1},
- {name: 'jess', value: 2},
- {name: 'jeff', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'john', value: 1}, {name: 'jess', value: 2}, {name: 'jeff', value: 3}]
},
{
- query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
- fields: {_id: 0, 'b.$': 1},
- remove: true
+ query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
+ fields: {_id: 0, 'b.$': 1},
+ remove: true
},
- {b: [{name: 'john', value: 1}]}
- );
+ {b: [{name: 'john', value: 1}]});
//
// Update operations with new=false
//
// Simple query that uses an inclusion projection.
- testFAMWorked(
- {_id: 42, a: [1, 2], b: 3},
- {query: {_id: 42}, fields: {_id: 0, b: 1}, update: {$inc: {b: 1}}, new: false},
- {b: 3}
- );
+ testFAMWorked({_id: 42, a: [1, 2], b: 3},
+ {query: {_id: 42}, fields: {_id: 0, b: 1}, update: {$inc: {b: 1}}, new: false},
+ {b: 3});
// Simple query that uses an exclusion projection.
- testFAMWorked(
- {_id: 42, a: [1, 2], b: 3, c: 4},
- {query: {_id: 42}, fields: {a: 0, b: 0}, update: {$set: {c: 5}}, new: false},
- {_id: 42, c: 4}
- );
+ testFAMWorked({_id: 42, a: [1, 2], b: 3, c: 4},
+ {query: {_id: 42}, fields: {a: 0, b: 0}, update: {$set: {c: 5}}, new: false},
+ {_id: 42, c: 4});
// Simple query that uses $elemMatch in the projection.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{
- query: {_id: 42},
- fields: {b: {$elemMatch: {value: 2}}},
- update: {$set: {name: '2nd'}},
- new: false
+ query: {_id: 42},
+ fields: {b: {$elemMatch: {value: 2}}},
+ update: {$set: {name: '2nd'}}, new: false
},
- {_id: 42, b: [{name: 'second', value: 2}]}
- );
+ {_id: 42, b: [{name: 'second', value: 2}]});
// Query on an array of values while using a positional projection.
testFAMWorked(
{_id: 42, a: [1, 2]},
{query: {a: 2}, fields: {'a.$': 1}, update: {$set: {'b.kind': 'xyz'}}, new: false},
- {_id: 42, a: [2]}
- );
+ {_id: 42, a: [2]});
// Query on an array of objects while using a positional projection.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{
- query: {_id: 42, 'b.name': 'third'},
- fields: {'b.$': 1},
- update: {$set: {'b.$.kind': 'xyz'}},
- new: false
+ query: {_id: 42, 'b.name': 'third'},
+ fields: {'b.$': 1},
+ update: {$set: {'b.$.kind': 'xyz'}}, new: false
},
- {_id: 42, b: [{name: 'third', value: 3}]}
- );
+ {_id: 42, b: [{name: 'third', value: 3}]});
// Query on an array of objects while using $elemMatch in the projection,
// where the matched array element is modified.
testFAMWorked(
{_id: 1, a: [{x: 1, y: 1}, {x: 1, y: 2}]},
- {
- query: {_id: 1},
- fields: {a: {$elemMatch: {x: 1}}},
- update: {$pop: {a: -1}},
- new: false
- },
- {_id: 1, a: [{x: 1, y: 1}]}
- );
+ {query: {_id: 1}, fields: {a: {$elemMatch: {x: 1}}}, update: {$pop: {a: -1}}, new: false},
+ {_id: 1, a: [{x: 1, y: 1}]});
// Query on an array of objects using $elemMatch while using an inclusion projection.
testFAMWorked(
- {_id: 42, a: 5, b: [{name: 'john', value: 1},
- {name: 'jess', value: 2},
- {name: 'jeff', value: 3}]
+ {
+ _id: 42,
+ a: 5,
+ b: [{name: 'john', value: 1}, {name: 'jess', value: 2}, {name: 'jeff', value: 3}]
},
{
- query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
- fields: {_id: 0, a: 5},
- update: {$inc: {a: 6}},
- new: false
+ query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
+ fields: {_id: 0, a: 5},
+ update: {$inc: {a: 6}}, new: false
},
- {a: 5}
- );
+ {a: 5});
// Query on an array of objects using $elemMatch while using the positional
// operator in the projection.
testFAMWorked(
- {_id: 42, b: [{name: 'john', value: 1},
- {name: 'jess', value: 2},
- {name: 'jeff', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'john', value: 1}, {name: 'jess', value: 2}, {name: 'jeff', value: 3}]
},
{
- query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
- fields: {_id: 0, 'b.$': 1},
- update: {$set: {name: 'james'}},
- new: false
+ query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
+ fields: {_id: 0, 'b.$': 1},
+ update: {$set: {name: 'james'}}, new: false
},
- {b: [{name: 'john', value: 1}]}
- );
+ {b: [{name: 'john', value: 1}]});
//
// Update operations with new=true
//
// Simple query that uses an inclusion projection.
- testFAMWorked(
- {_id: 42, a: [1, 2], b: 3},
- {query: {_id: 42}, fields: {_id: 0, b: 1}, update: {$inc: {b: 1}}, new: true},
- {b: 4}
- );
+ testFAMWorked({_id: 42, a: [1, 2], b: 3},
+ {query: {_id: 42}, fields: {_id: 0, b: 1}, update: {$inc: {b: 1}}, new: true},
+ {b: 4});
// Simple query that uses an exclusion projection.
- testFAMWorked(
- {_id: 42, a: [1, 2], b: 3, c: 4},
- {query: {_id: 42}, fields: {a: 0, b: 0}, update: {$set: {c: 5}}, new: true},
- {_id: 42, c: 5}
- );
+ testFAMWorked({_id: 42, a: [1, 2], b: 3, c: 4},
+ {query: {_id: 42}, fields: {a: 0, b: 0}, update: {$set: {c: 5}}, new: true},
+ {_id: 42, c: 5});
// Simple query that uses $elemMatch in the projection.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{
- query: {_id: 42},
- fields: {b: {$elemMatch: {value: 2}}},
- update: {$set: {'b.1.name': '2nd'}},
- new: true
+ query: {_id: 42},
+ fields: {b: {$elemMatch: {value: 2}}},
+ update: {$set: {'b.1.name': '2nd'}}, new: true
},
- {_id: 42, b: [{name: '2nd', value: 2}]}
- );
+ {_id: 42, b: [{name: '2nd', value: 2}]});
// Query on an array of values while using a positional projection.
testFAMFailed(
{_id: 42, a: [1, 2]},
- {query: {a: 2}, fields: {'a.$': 1}, update: {$set: {'b.kind': 'xyz'}}, new: true}
- );
+ {query: {a: 2}, fields: {'a.$': 1}, update: {$set: {'b.kind': 'xyz'}}, new: true});
// Query on an array of objects while using a positional projection.
testFAMFailed(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{
- query: {_id: 42, 'b.name': 'third'},
- fields: {'b.$': 1},
- update: {$set: {'b.$.kind': 'xyz'}},
- new: true
- }
- );
+ query: {_id: 42, 'b.name': 'third'},
+ fields: {'b.$': 1},
+ update: {$set: {'b.$.kind': 'xyz'}}, new: true
+ });
// Query on an array of objects while using $elemMatch in the projection.
testFAMWorked(
- {_id: 42, b: [{name: 'first', value: 1},
- {name: 'second', value: 2},
- {name: 'third', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'first', value: 1}, {name: 'second', value: 2}, {name: 'third', value: 3}]
},
{
- query: {_id: 42},
- fields: {b: {$elemMatch: {value: 2}}, c: 1},
- update: {$set: {c: 'xyz'}},
- new: true
+ query: {_id: 42},
+ fields: {b: {$elemMatch: {value: 2}}, c: 1},
+ update: {$set: {c: 'xyz'}}, new: true
},
- {_id: 42, b: [{name: 'second', value: 2}], c: 'xyz'}
- );
+ {_id: 42, b: [{name: 'second', value: 2}], c: 'xyz'});
// Query on an array of objects while using $elemMatch in the projection,
// where the matched array element is modified.
testFAMWorked(
{_id: 1, a: [{x: 1, y: 1}, {x: 1, y: 2}]},
- {
- query: {_id: 1},
- fields: {a: {$elemMatch: {x: 1}}},
- update: {$pop: {a: -1}},
- new: true
- },
- {_id: 1, a: [{x: 1, y: 2}]}
- );
+ {query: {_id: 1}, fields: {a: {$elemMatch: {x: 1}}}, update: {$pop: {a: -1}}, new: true},
+ {_id: 1, a: [{x: 1, y: 2}]});
// Query on an array of objects using $elemMatch while using an inclusion projection.
testFAMWorked(
- {_id: 42, a: 5, b: [{name: 'john', value: 1},
- {name: 'jess', value: 2},
- {name: 'jeff', value: 3}]
+ {
+ _id: 42,
+ a: 5,
+ b: [{name: 'john', value: 1}, {name: 'jess', value: 2}, {name: 'jeff', value: 3}]
},
{
- query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
- fields: {_id: 0, a: 5},
- update: {$inc: {a: 6}},
- new: true
+ query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
+ fields: {_id: 0, a: 5},
+ update: {$inc: {a: 6}}, new: true
},
- {a: 11}
- );
+ {a: 11});
// Query on an array of objects using $elemMatch while using the positional
// operator in the projection.
testFAMFailed(
- {_id: 42, b: [{name: 'john', value: 1},
- {name: 'jess', value: 2},
- {name: 'jeff', value: 3}]
+ {
+ _id: 42,
+ b: [{name: 'john', value: 1}, {name: 'jess', value: 2}, {name: 'jeff', value: 3}]
},
{
- query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
- fields: {_id: 0, 'b.$': 1},
- update: {$set: {name: 'james'}},
- new: true
- }
- );
+ query: {b: {$elemMatch: {name: 'john', value: {$lt: 2}}}},
+ fields: {_id: 0, 'b.$': 1},
+ update: {$set: {name: 'james'}}, new: true
+ });
})();
diff --git a/jstests/core/find_and_modify_server6909.js b/jstests/core/find_and_modify_server6909.js
index 2f688459698..8e807e0c893 100644
--- a/jstests/core/find_and_modify_server6909.js
+++ b/jstests/core/find_and_modify_server6909.js
@@ -1,21 +1,22 @@
c = db.find_and_modify_server6906;
-
c.drop();
-c.insert( { _id : 5 , a:{ b:1 } } );
-ret = c.findAndModify( { query:{ 'a.b':1 },
- update:{ $set:{ 'a.b':2 } }, // Ensure the query on 'a.b' no longer matches.
- new:true } );
-assert.eq( 5, ret._id );
-assert.eq( 2, ret.a.b );
-
+c.insert({_id: 5, a: {b: 1}});
+ret = c.findAndModify({
+ query: {'a.b': 1},
+ update: {$set: {'a.b': 2}}, // Ensure the query on 'a.b' no longer matches.
+ new: true
+});
+assert.eq(5, ret._id);
+assert.eq(2, ret.a.b);
c.drop();
-c.insert( { _id : null , a:{ b:1 } } );
-ret = c.findAndModify( { query:{ 'a.b':1 },
- update:{ $set:{ 'a.b':2 } }, // Ensure the query on 'a.b' no longer matches.
- new:true } );
-assert.eq( 2, ret.a.b );
-
+c.insert({_id: null, a: {b: 1}});
+ret = c.findAndModify({
+ query: {'a.b': 1},
+ update: {$set: {'a.b': 2}}, // Ensure the query on 'a.b' no longer matches.
+ new: true
+});
+assert.eq(2, ret.a.b);
diff --git a/jstests/core/find_and_modify_server6993.js b/jstests/core/find_and_modify_server6993.js
index b8a31915372..bf8ed52c9c7 100644
--- a/jstests/core/find_and_modify_server6993.js
+++ b/jstests/core/find_and_modify_server6993.js
@@ -1,9 +1,9 @@
c = db.find_and_modify_server6993;
c.drop();
-
-c.insert( { a:[ 1, 2 ] } );
-
-c.findAndModify( { query:{ a:1 }, update:{ $set:{ 'a.$':5 } } } );
-
-assert.eq( 5, c.findOne().a[ 0 ] );
+
+c.insert({a: [1, 2]});
+
+c.findAndModify({query: {a: 1}, update: {$set: {'a.$': 5}}});
+
+assert.eq(5, c.findOne().a[0]);
diff --git a/jstests/core/find_and_modify_server7660.js b/jstests/core/find_and_modify_server7660.js
index 4828dff4e49..d344d773dca 100644
--- a/jstests/core/find_and_modify_server7660.js
+++ b/jstests/core/find_and_modify_server7660.js
@@ -2,17 +2,10 @@
t = db.find_and_modify_server7660;
t.drop();
-a = t.findAndModify({
- query : { foo : 'bar' },
- update : { $set : { bob : 'john' } },
- sort: { foo : 1},
- upsert: true,
- new : true
-});
+a = t.findAndModify(
+ {query: {foo: 'bar'}, update: {$set: {bob: 'john'}}, sort: {foo: 1}, upsert: true, new: true});
b = t.findOne();
-assert.eq( a, b );
-assert.eq( "bar", a.foo );
-assert.eq( "john", a.bob );
-
-
+assert.eq(a, b);
+assert.eq("bar", a.foo);
+assert.eq("john", a.bob);
diff --git a/jstests/core/find_and_modify_where.js b/jstests/core/find_and_modify_where.js
index fe13a6894fd..e3d5604559a 100644
--- a/jstests/core/find_and_modify_where.js
+++ b/jstests/core/find_and_modify_where.js
@@ -2,9 +2,8 @@
t = db.find_and_modify_where;
t.drop();
-t.insert( { _id : 1 , x : 1 } );
+t.insert({_id: 1, x: 1});
-res = t.findAndModify( { query : { $where : "return this.x == 1" } , update : { $set : { y : 1 } } } );
-
-assert.eq( 1 , t.findOne().y );
+res = t.findAndModify({query: {$where: "return this.x == 1"}, update: {$set: {y: 1}}});
+assert.eq(1, t.findOne().y);
diff --git a/jstests/core/find_dedup.js b/jstests/core/find_dedup.js
index 401384ceb7a..a9160df7562 100644
--- a/jstests/core/find_dedup.js
+++ b/jstests/core/find_dedup.js
@@ -20,16 +20,21 @@ t.save({_id: 2, a: 1, b: 1});
t.save({_id: 3, a: 2, b: 2});
t.save({_id: 4, a: 3, b: 3});
t.save({_id: 5, a: 3, b: 3});
-checkDedup({$or: [{a:{$gte:0,$lte:2},b:{$gte:0,$lte:2}},
- {a:{$gte:1,$lte:3},b:{$gte:1,$lte:3}},
- {a:{$gte:1,$lte:4},b:{$gte:1,$lte:4}}]},
- [1, 2, 3, 4, 5]);
+checkDedup(
+ {
+ $or: [
+ {a: {$gte: 0, $lte: 2}, b: {$gte: 0, $lte: 2}},
+ {a: {$gte: 1, $lte: 3}, b: {$gte: 1, $lte: 3}},
+ {a: {$gte: 1, $lte: 4}, b: {$gte: 1, $lte: 4}}
+ ]
+ },
+ [1, 2, 3, 4, 5]);
// Deduping multikey
t.drop();
t.save({_id: 1, a: [1, 2, 3], b: [4, 5, 6]});
t.save({_id: 2, a: [1, 2, 3], b: [4, 5, 6]});
-assert.eq( 2, t.count() );
+assert.eq(2, t.count());
checkDedup({$or: [{a: {$in: [1, 2]}}, {b: {$in: [4, 5]}}]}, [1, 2]);
-t.ensureIndex( { a : 1 } );
+t.ensureIndex({a: 1});
checkDedup({$or: [{a: {$in: [1, 2]}}, {b: {$in: [4, 5]}}]}, [1, 2]);
diff --git a/jstests/core/find_getmore_bsonsize.js b/jstests/core/find_getmore_bsonsize.js
index fdad2b1f1d6..904a9c33ab0 100644
--- a/jstests/core/find_getmore_bsonsize.js
+++ b/jstests/core/find_getmore_bsonsize.js
@@ -74,7 +74,10 @@
bigStr += bigStr;
}
bigStr = bigStr.substring(0, (16 * oneMB) - 32);
- var maxSizeDoc = {_id: 0, padding: bigStr};
+ var maxSizeDoc = {
+ _id: 0,
+ padding: bigStr
+ };
assert.eq(Object.bsonsize(maxSizeDoc), 16 * oneMB);
assert.writeOK(coll.insert(maxSizeDoc));
diff --git a/jstests/core/find_getmore_cmd.js b/jstests/core/find_getmore_cmd.js
index b9d12c41a19..3f3d50993e7 100644
--- a/jstests/core/find_getmore_cmd.js
+++ b/jstests/core/find_getmore_cmd.js
@@ -45,11 +45,8 @@
assert.gt(cmdRes.cursor.id, NumberLong(0));
assert.eq(cmdRes.cursor.ns, coll.getFullName());
assert.eq(cmdRes.cursor.firstBatch.length, 10);
- cmdRes = db.runCommand({
- getMore: cmdRes.cursor.id,
- collection: collName,
- batchSize: NumberInt(5)
- });
+ cmdRes =
+ db.runCommand({getMore: cmdRes.cursor.id, collection: collName, batchSize: NumberInt(5)});
assert.gt(cmdRes.cursor.id, NumberLong(0));
assert.eq(cmdRes.cursor.ns, coll.getFullName());
assert.eq(cmdRes.cursor.nextBatch.length, 5);
@@ -60,11 +57,8 @@
assert.gt(cmdRes.cursor.id, NumberLong(0));
assert.eq(cmdRes.cursor.ns, coll.getFullName());
assert.eq(cmdRes.cursor.firstBatch.length, 0);
- cmdRes = db.runCommand({
- getMore: cmdRes.cursor.id,
- collection: collName,
- batchSize: NumberInt(5)
- });
+ cmdRes =
+ db.runCommand({getMore: cmdRes.cursor.id, collection: collName, batchSize: NumberInt(5)});
assert.gt(cmdRes.cursor.id, NumberLong(0));
assert.eq(cmdRes.cursor.ns, coll.getFullName());
assert.eq(cmdRes.cursor.nextBatch.length, 5);
@@ -75,11 +69,8 @@
assert.gt(cmdRes.cursor.id, NumberLong(0));
assert.eq(cmdRes.cursor.ns, coll.getFullName());
assert.eq(cmdRes.cursor.firstBatch.length, 10);
- cmdRes = db.runCommand({
- getMore: cmdRes.cursor.id,
- collection: collName,
- batchSize: NumberInt(11)
- });
+ cmdRes =
+ db.runCommand({getMore: cmdRes.cursor.id, collection: collName, batchSize: NumberInt(11)});
assert.eq(cmdRes.cursor.id, NumberLong(0));
assert.eq(cmdRes.cursor.ns, coll.getFullName());
assert.eq(cmdRes.cursor.nextBatch.length, 10);
diff --git a/jstests/core/find_size.js b/jstests/core/find_size.js
index 0293c3e2b56..0327a20085f 100644
--- a/jstests/core/find_size.js
+++ b/jstests/core/find_size.js
@@ -15,7 +15,7 @@ assert.eq(1, t.count({arr: {$size: NumberInt(4)}}));
// bad inputs
var badInputs = [-1, NumberLong(-10000), "str", 3.2, 0.1, NumberLong(-9223372036854775808)];
-badInputs.forEach(function(x) {
+badInputs.forEach(function(x) {
assert.commandFailed(db.runCommand({count: t.getName(), query: {arr: {$size: x}}}),
- "$size argument " + x + " should have failed");
+ "$size argument " + x + " should have failed");
});
diff --git a/jstests/core/finda.js b/jstests/core/finda.js
index 4017ce91ad5..711b70f2e25 100644
--- a/jstests/core/finda.js
+++ b/jstests/core/finda.js
@@ -6,101 +6,99 @@ t.drop();
numDocs = 200;
function clearQueryPlanCache() {
- t.ensureIndex( { c:1 } );
- t.dropIndex( { c:1 } );
+ t.ensureIndex({c: 1});
+ t.dropIndex({c: 1});
}
-function assertAllFound( matches ) {
-// printjson( matches );
- found = new Array( numDocs );
- for( var i = 0; i < numDocs; ++i ) {
- found[ i ] = false;
+function assertAllFound(matches) {
+ // printjson( matches );
+ found = new Array(numDocs);
+ for (var i = 0; i < numDocs; ++i) {
+ found[i] = false;
}
- for( var i in matches ) {
- m = matches[ i ];
- found[ m._id ] = true;
+ for (var i in matches) {
+ m = matches[i];
+ found[m._id] = true;
}
- for( var i = 0; i < numDocs; ++i ) {
- assert( found[ i ], i.toString() );
+ for (var i = 0; i < numDocs; ++i) {
+ assert(found[i], i.toString());
}
}
-function makeCursor( query, projection, sort, batchSize, returnKey ) {
+function makeCursor(query, projection, sort, batchSize, returnKey) {
print("\n*** query:");
printjson(query);
print("proj:");
printjson(projection);
- cursor = t.find( query, projection );
- if ( sort ) {
- cursor.sort( sort );
+ cursor = t.find(query, projection);
+ if (sort) {
+ cursor.sort(sort);
print("sort:");
printjson(sort);
}
- if ( batchSize ) {
- cursor.batchSize( batchSize );
+ if (batchSize) {
+ cursor.batchSize(batchSize);
print("bs: " + batchSize);
}
- if ( returnKey ) {
+ if (returnKey) {
cursor.returnKey();
}
return cursor;
}
-function checkCursorWithBatchSizeProjection( query, projection, sort, batchSize,
- expectedLeftInBatch ) {
+function checkCursorWithBatchSizeProjection(
+ query, projection, sort, batchSize, expectedLeftInBatch) {
clearQueryPlanCache();
- cursor = makeCursor( query, projection, sort, batchSize );
+ cursor = makeCursor(query, projection, sort, batchSize);
// XXX: this
- assert.eq( expectedLeftInBatch, cursor.objsLeftInBatch() );
- assertAllFound( cursor.toArray() );
+ assert.eq(expectedLeftInBatch, cursor.objsLeftInBatch());
+ assertAllFound(cursor.toArray());
}
-function checkCursorWithBatchSize( query, sort, batchSize, expectedLeftInBatch ) {
- checkCursorWithBatchSizeProjection( query, {}, sort, batchSize, expectedLeftInBatch );
- checkCursorWithBatchSizeProjection( query, { a:1, _id:1 }, sort, batchSize,
- expectedLeftInBatch );
+function checkCursorWithBatchSize(query, sort, batchSize, expectedLeftInBatch) {
+ checkCursorWithBatchSizeProjection(query, {}, sort, batchSize, expectedLeftInBatch);
+ checkCursorWithBatchSizeProjection(query, {a: 1, _id: 1}, sort, batchSize, expectedLeftInBatch);
// In the cases tested, when expectedLeftInBatch is high enough takeover will occur during
// the query operation rather than getMore and the last few matches should properly return keys
// from the a,_id index.
clearQueryPlanCache();
- if ( expectedLeftInBatch > 110 ) {
- cursor = makeCursor( query, {}, sort, batchSize, true );
+ if (expectedLeftInBatch > 110) {
+ cursor = makeCursor(query, {}, sort, batchSize, true);
lastNonAIndexResult = -1;
- for( var i = 0; i < expectedLeftInBatch; ++i ) {
+ for (var i = 0; i < expectedLeftInBatch; ++i) {
next = cursor.next();
// Identify the query plan used by checking the fields of a returnKey query.
- if ( !friendlyEqual( [ 'a', '_id' ], Object.keySet( next ) ) ) {
+ if (!friendlyEqual(['a', '_id'], Object.keySet(next))) {
lastNonAIndexResult = i;
}
}
// The last results should come from the a,_id index.
- assert.lt( lastNonAIndexResult, expectedLeftInBatch - 5 );
+ assert.lt(lastNonAIndexResult, expectedLeftInBatch - 5);
}
}
-function queryWithPlanTypes( withDups ) {
+function queryWithPlanTypes(withDups) {
t.drop();
- for( var i = 1; i < numDocs; ++i ) {
- t.save( { _id:i, a:i, b:0 } );
+ for (var i = 1; i < numDocs; ++i) {
+ t.save({_id: i, a: i, b: 0});
}
- if ( withDups ) {
- t.save( { _id:0, a:[ 0, numDocs ], b:0 } ); // Add a dup on a:1 index.
+ if (withDups) {
+ t.save({_id: 0, a: [0, numDocs], b: 0}); // Add a dup on a:1 index.
+ } else {
+ t.save({_id: 0, a: 0, b: 0});
}
- else {
- t.save( { _id:0, a:0, b:0 } );
- }
- t.ensureIndex( { a:1, _id:1 } ); // Include _id for a covered index projection.
+ t.ensureIndex({a: 1, _id: 1}); // Include _id for a covered index projection.
// All plans in order.
- checkCursorWithBatchSize( { a:{ $gte:0 } }, null, 150, 150 );
+ checkCursorWithBatchSize({a: {$gte: 0}}, null, 150, 150);
// All plans out of order.
- checkCursorWithBatchSize( { a:{ $gte:0 } }, { c:1 }, null, 101 );
+ checkCursorWithBatchSize({a: {$gte: 0}}, {c: 1}, null, 101);
// Some plans in order, some out of order.
- checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, 150, 150 );
- checkCursorWithBatchSize( { a:{ $gte:0 }, b:0 }, { a:1 }, null, 101 );
+ checkCursorWithBatchSize({a: {$gte: 0}, b: 0}, {a: 1}, 150, 150);
+ checkCursorWithBatchSize({a: {$gte: 0}, b: 0}, {a: 1}, null, 101);
}
-queryWithPlanTypes( false );
-queryWithPlanTypes( true );
+queryWithPlanTypes(false);
+queryWithPlanTypes(true);
diff --git a/jstests/core/fm1.js b/jstests/core/fm1.js
index de1df03edcb..cff14b029d9 100644
--- a/jstests/core/fm1.js
+++ b/jstests/core/fm1.js
@@ -2,11 +2,9 @@
t = db.fm1;
t.drop();
-t.insert({foo:{bar:1}});
-t.find({},{foo:1}).toArray();
-t.find({},{'foo.bar':1}).toArray();
-t.find({},{'baz':1}).toArray();
-t.find({},{'baz.qux':1}).toArray();
-t.find({},{'foo.qux':1}).toArray();
-
-
+t.insert({foo: {bar: 1}});
+t.find({}, {foo: 1}).toArray();
+t.find({}, {'foo.bar': 1}).toArray();
+t.find({}, {'baz': 1}).toArray();
+t.find({}, {'baz.qux': 1}).toArray();
+t.find({}, {'foo.qux': 1}).toArray();
diff --git a/jstests/core/fm2.js b/jstests/core/fm2.js
index 93284c0c611..14fa8e06466 100644
--- a/jstests/core/fm2.js
+++ b/jstests/core/fm2.js
@@ -2,8 +2,7 @@
t = db.fm2;
t.drop();
-t.insert( { "one" : { "two" : {"three":"four"} } } );
-
-x = t.find({},{"one.two":1})[0];
-assert.eq( 1 , Object.keySet( x.one ).length , "ks l 1" );
+t.insert({"one": {"two": {"three": "four"}}});
+x = t.find({}, {"one.two": 1})[0];
+assert.eq(1, Object.keySet(x.one).length, "ks l 1");
diff --git a/jstests/core/fm3.js b/jstests/core/fm3.js
index ebe79f16dc4..301ce3d56ab 100644
--- a/jstests/core/fm3.js
+++ b/jstests/core/fm3.js
@@ -1,37 +1,36 @@
t = db.fm3;
t.drop();
-t.insert( {a:[{c:{e:1, f:1}}, {d:2}, 'z'], b:1} );
+t.insert({a: [{c: {e: 1, f: 1}}, {d: 2}, 'z'], b: 1});
-
-res = t.findOne({}, {a:1});
-assert.eq(res.a, [{c:{e:1, f:1}}, {d:2}, 'z'], "one a");
+res = t.findOne({}, {a: 1});
+assert.eq(res.a, [{c: {e: 1, f: 1}}, {d: 2}, 'z'], "one a");
assert.eq(res.b, undefined, "one b");
-res = t.findOne({}, {a:0});
+res = t.findOne({}, {a: 0});
assert.eq(res.a, undefined, "two a");
assert.eq(res.b, 1, "two b");
-res = t.findOne({}, {'a.d':1});
-assert.eq(res.a, [{}, {d:2}], "three a");
+res = t.findOne({}, {'a.d': 1});
+assert.eq(res.a, [{}, {d: 2}], "three a");
assert.eq(res.b, undefined, "three b");
-res = t.findOne({}, {'a.d':0});
-assert.eq(res.a, [{c:{e:1, f:1}}, {}, 'z'], "four a");
+res = t.findOne({}, {'a.d': 0});
+assert.eq(res.a, [{c: {e: 1, f: 1}}, {}, 'z'], "four a");
assert.eq(res.b, 1, "four b");
-res = t.findOne({}, {'a.c':1});
-assert.eq(res.a, [{c:{e:1, f:1}}, {}], "five a");
+res = t.findOne({}, {'a.c': 1});
+assert.eq(res.a, [{c: {e: 1, f: 1}}, {}], "five a");
assert.eq(res.b, undefined, "five b");
-res = t.findOne({}, {'a.c':0});
-assert.eq(res.a, [{}, {d:2}, 'z'], "six a");
+res = t.findOne({}, {'a.c': 0});
+assert.eq(res.a, [{}, {d: 2}, 'z'], "six a");
assert.eq(res.b, 1, "six b");
-res = t.findOne({}, {'a.c.e':1});
-assert.eq(res.a, [{c:{e:1}}, {}], "seven a");
+res = t.findOne({}, {'a.c.e': 1});
+assert.eq(res.a, [{c: {e: 1}}, {}], "seven a");
assert.eq(res.b, undefined, "seven b");
-res = t.findOne({}, {'a.c.e':0});
-assert.eq(res.a, [{c:{f:1}}, {d:2}, 'z'], "eight a");
+res = t.findOne({}, {'a.c.e': 0});
+assert.eq(res.a, [{c: {f: 1}}, {d: 2}, 'z'], "eight a");
assert.eq(res.b, 1, "eight b");
diff --git a/jstests/core/fm4.js b/jstests/core/fm4.js
index c90041cf485..6a1aa5a44b5 100644
--- a/jstests/core/fm4.js
+++ b/jstests/core/fm4.js
@@ -1,16 +1,16 @@
t = db.fm4;
t.drop();
-t.insert({_id:1, a:1, b:1});
+t.insert({_id: 1, a: 1, b: 1});
-assert.eq( t.findOne({}, {_id:1}), {_id:1}, 1);
-assert.eq( t.findOne({}, {_id:0}), {a:1, b:1}, 2);
+assert.eq(t.findOne({}, {_id: 1}), {_id: 1}, 1);
+assert.eq(t.findOne({}, {_id: 0}), {a: 1, b: 1}, 2);
-assert.eq( t.findOne({}, {_id:1, a:1}), {_id:1, a:1}, 3);
-assert.eq( t.findOne({}, {_id:0, a:1}), {a:1}, 4);
+assert.eq(t.findOne({}, {_id: 1, a: 1}), {_id: 1, a: 1}, 3);
+assert.eq(t.findOne({}, {_id: 0, a: 1}), {a: 1}, 4);
-assert.eq( t.findOne({}, {_id:0, a:0}), {b:1}, 6);
-assert.eq( t.findOne({}, { a:0}), {_id:1, b:1}, 5);
+assert.eq(t.findOne({}, {_id: 0, a: 0}), {b: 1}, 6);
+assert.eq(t.findOne({}, {a: 0}), {_id: 1, b: 1}, 5);
// not sure if we want to suport this since it is the same as above
-//assert.eq( t.findOne({}, {_id:1, a:0}), {_id:1, b:1}, 5)
+// assert.eq( t.findOne({}, {_id:1, a:0}), {_id:1, b:1}, 5)
diff --git a/jstests/core/fsync.js b/jstests/core/fsync.js
index 99aceb83c9e..57762ce8c78 100644
--- a/jstests/core/fsync.js
+++ b/jstests/core/fsync.js
@@ -8,81 +8,81 @@
* - Confirm that the pseudo commands and eval can perform fsyncLock/Unlock
*/
(function() {
-"use strict";
-
-// Start with a clean DB
-var fsyncLockDB = db.getSisterDB('fsyncLockTestDB');
-fsyncLockDB.dropDatabase();
-
-// Tests the db.fsyncLock/fsyncUnlock features
-var storageEngine = db.serverStatus().storageEngine.name;
-
-// As of SERVER-18899 fsyncLock/fsyncUnlock will error when called on a storage engine
-// that does not support the begin/end backup commands.
-var supportsFsync = db.fsyncLock();
-
-if (!supportsFsync.ok) {
- assert.commandFailedWithCode(supportsFsync, ErrorCodes.CommandNotSupported);
- jsTestLog("Skipping test for " + storageEngine + " as it does not support fsync");
- return;
-}
-db.fsyncUnlock();
-
-var resFail = fsyncLockDB.runCommand({fsync:1, lock:1});
-
-// Start with a clean DB
-var fsyncLockDB = db.getSisterDB('fsyncLockTestDB');
-fsyncLockDB.dropDatabase();
-
-// Test it doesn't work unless invoked against the admin DB
-var resFail = fsyncLockDB.runCommand({fsync:1, lock:1});
-assert(!resFail.ok, "fsyncLock command succeeded against DB other than admin.");
-
-// Uses admin automatically and locks the server for writes
-var fsyncLockRes = db.fsyncLock();
-assert(fsyncLockRes.ok, "fsyncLock command failed against admin DB");
-assert(db.currentOp().fsyncLock, "Value in db.currentOp incorrect for fsyncLocked server");
-
-// Make sure writes are blocked. Spawn a write operation in a separate shell and make sure it
-// is blocked. There is really now way to do that currently, so just check that the write didn't
-// go through.
-var writeOpHandle = startParallelShell("db.getSisterDB('fsyncLockTestDB').coll.insert({x:1});");
-sleep(1000);
-
-// Make sure reads can still run even though there is a pending write and also that the write
-// didn't get through
-assert.eq(0, fsyncLockDB.coll.count({}));
-
-// Unlock and make sure the insert succeeded
-var fsyncUnlockRes = db.fsyncUnlock();
-assert(fsyncUnlockRes.ok, "fsyncUnlock command failed");
-assert(db.currentOp().fsyncLock == null, "fsyncUnlock is not null in db.currentOp");
-
-// Make sure the db is unlocked and the initial write made it through.
-writeOpHandle();
-fsyncLockDB.coll.insert({x:2});
-
-assert.eq(2, fsyncLockDB.coll.count({}));
-
-// Issue the fsyncLock and fsyncUnlock a second time, to ensure that we can
-// run this command repeatedly with no problems.
-var fsyncLockRes = db.fsyncLock();
-assert(fsyncLockRes.ok, "Second execution of fsyncLock command failed");
-
-var fsyncUnlockRes = db.fsyncUnlock();
-assert(fsyncUnlockRes.ok, "Second execution of fsyncUnlock command failed");
-
-// Ensure eval is not allowed to invoke fsyncLock
-assert(!db.eval('db.fsyncLock()').ok, "eval('db.fsyncLock()') should fail.");
-
-// Check that the fsyncUnlock pseudo-command (a lookup on cmd.$sys.unlock)
-// still has the same effect as a legitimate 'fsyncUnlock' command
-// TODO: remove this in in the release following MongoDB 3.2 when pseudo-commands
-// are removed
-var fsyncCommandRes = db.fsyncLock();
-assert(fsyncLockRes.ok, "fsyncLock command failed against admin DB");
-assert(db.currentOp().fsyncLock, "Value in db.currentOp incorrect for fsyncLocked server");
-var fsyncPseudoCommandRes = db.getSiblingDB("admin").$cmd.sys.unlock.findOne();
-assert(fsyncPseudoCommandRes.ok, "fsyncUnlock pseudo-command failed");
-assert(db.currentOp().fsyncLock == null, "fsyncUnlock is not null in db.currentOp");
+ "use strict";
+
+ // Start with a clean DB
+ var fsyncLockDB = db.getSisterDB('fsyncLockTestDB');
+ fsyncLockDB.dropDatabase();
+
+ // Tests the db.fsyncLock/fsyncUnlock features
+ var storageEngine = db.serverStatus().storageEngine.name;
+
+ // As of SERVER-18899 fsyncLock/fsyncUnlock will error when called on a storage engine
+ // that does not support the begin/end backup commands.
+ var supportsFsync = db.fsyncLock();
+
+ if (!supportsFsync.ok) {
+ assert.commandFailedWithCode(supportsFsync, ErrorCodes.CommandNotSupported);
+ jsTestLog("Skipping test for " + storageEngine + " as it does not support fsync");
+ return;
+ }
+ db.fsyncUnlock();
+
+ var resFail = fsyncLockDB.runCommand({fsync: 1, lock: 1});
+
+ // Start with a clean DB
+ var fsyncLockDB = db.getSisterDB('fsyncLockTestDB');
+ fsyncLockDB.dropDatabase();
+
+ // Test it doesn't work unless invoked against the admin DB
+ var resFail = fsyncLockDB.runCommand({fsync: 1, lock: 1});
+ assert(!resFail.ok, "fsyncLock command succeeded against DB other than admin.");
+
+ // Uses admin automatically and locks the server for writes
+ var fsyncLockRes = db.fsyncLock();
+ assert(fsyncLockRes.ok, "fsyncLock command failed against admin DB");
+ assert(db.currentOp().fsyncLock, "Value in db.currentOp incorrect for fsyncLocked server");
+
+ // Make sure writes are blocked. Spawn a write operation in a separate shell and make sure it
+ // is blocked. There is really now way to do that currently, so just check that the write didn't
+ // go through.
+ var writeOpHandle = startParallelShell("db.getSisterDB('fsyncLockTestDB').coll.insert({x:1});");
+ sleep(1000);
+
+ // Make sure reads can still run even though there is a pending write and also that the write
+ // didn't get through
+ assert.eq(0, fsyncLockDB.coll.count({}));
+
+ // Unlock and make sure the insert succeeded
+ var fsyncUnlockRes = db.fsyncUnlock();
+ assert(fsyncUnlockRes.ok, "fsyncUnlock command failed");
+ assert(db.currentOp().fsyncLock == null, "fsyncUnlock is not null in db.currentOp");
+
+ // Make sure the db is unlocked and the initial write made it through.
+ writeOpHandle();
+ fsyncLockDB.coll.insert({x: 2});
+
+ assert.eq(2, fsyncLockDB.coll.count({}));
+
+ // Issue the fsyncLock and fsyncUnlock a second time, to ensure that we can
+ // run this command repeatedly with no problems.
+ var fsyncLockRes = db.fsyncLock();
+ assert(fsyncLockRes.ok, "Second execution of fsyncLock command failed");
+
+ var fsyncUnlockRes = db.fsyncUnlock();
+ assert(fsyncUnlockRes.ok, "Second execution of fsyncUnlock command failed");
+
+ // Ensure eval is not allowed to invoke fsyncLock
+ assert(!db.eval('db.fsyncLock()').ok, "eval('db.fsyncLock()') should fail.");
+
+ // Check that the fsyncUnlock pseudo-command (a lookup on cmd.$sys.unlock)
+ // still has the same effect as a legitimate 'fsyncUnlock' command
+ // TODO: remove this in in the release following MongoDB 3.2 when pseudo-commands
+ // are removed
+ var fsyncCommandRes = db.fsyncLock();
+ assert(fsyncLockRes.ok, "fsyncLock command failed against admin DB");
+ assert(db.currentOp().fsyncLock, "Value in db.currentOp incorrect for fsyncLocked server");
+ var fsyncPseudoCommandRes = db.getSiblingDB("admin").$cmd.sys.unlock.findOne();
+ assert(fsyncPseudoCommandRes.ok, "fsyncUnlock pseudo-command failed");
+ assert(db.currentOp().fsyncLock == null, "fsyncUnlock is not null in db.currentOp");
}());
diff --git a/jstests/core/fts1.js b/jstests/core/fts1.js
index 5bdaa926b45..23364b2ecb7 100644
--- a/jstests/core/fts1.js
+++ b/jstests/core/fts1.js
@@ -1,20 +1,20 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text1;
t.drop();
-t.ensureIndex( { x : "text" } );
+t.ensureIndex({x: "text"});
-assert.eq( [] , queryIDS( t , "az" ) , "A0" );
+assert.eq([], queryIDS(t, "az"), "A0");
-t.save( { _id : 1 , x : "az b c" } );
-t.save( { _id : 2 , x : "az b" } );
-t.save( { _id : 3 , x : "b c" } );
-t.save( { _id : 4 , x : "b c d" } );
+t.save({_id: 1, x: "az b c"});
+t.save({_id: 2, x: "az b"});
+t.save({_id: 3, x: "b c"});
+t.save({_id: 4, x: "b c d"});
-assert.eq( [1,2,3,4] , queryIDS( t , "c az" ) , "A1" );
-assert.eq( [4] , queryIDS( t , "d" ) , "A2" );
+assert.eq([1, 2, 3, 4], queryIDS(t, "c az"), "A1");
+assert.eq([4], queryIDS(t, "d"), "A2");
idx = t.getIndexes()[1];
-assert( idx.v >= 1, tojson( idx ) );
-assert( idx.textIndexVersion >= 1, tojson( idx ) );
+assert(idx.v >= 1, tojson(idx));
+assert(idx.textIndexVersion >= 1, tojson(idx));
diff --git a/jstests/core/fts2.js b/jstests/core/fts2.js
index 11b74a76b0b..cf0b875c220 100644
--- a/jstests/core/fts2.js
+++ b/jstests/core/fts2.js
@@ -1,21 +1,19 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text2;
t.drop();
-t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } );
-t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } );
+t.save({_id: 1, x: "az b x", y: "c d m", z: 1});
+t.save({_id: 2, x: "c d y", y: "az b n", z: 2});
-t.ensureIndex( { x : "text" } , { weights : { x : 10 , y : 1 } } );
+t.ensureIndex({x: "text"}, {weights: {x: 10, y: 1}});
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
-
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
+assert.eq([1, 2], queryIDS(t, "az"), "A1");
+assert.eq([2, 1], queryIDS(t, "d"), "A2");
+assert.eq([1], queryIDS(t, "x"), "A3");
+assert.eq([2], queryIDS(t, "y"), "A4");
+assert.eq([1], queryIDS(t, "az", {z: 1}), "B1");
+assert.eq([1], queryIDS(t, "d", {z: 1}), "B2");
diff --git a/jstests/core/fts3.js b/jstests/core/fts3.js
index 8c550259d10..64e37d95105 100644
--- a/jstests/core/fts3.js
+++ b/jstests/core/fts3.js
@@ -1,20 +1,19 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text3;
t.drop();
-t.save( { _id : 1 , x : "az b x" , y : "c d m" , z : 1 } );
-t.save( { _id : 2 , x : "c d y" , y : "az b n" , z : 2 } );
+t.save({_id: 1, x: "az b x", y: "c d m", z: 1});
+t.save({_id: 2, x: "c d y", y: "az b n", z: 2});
-t.ensureIndex( { x : "text" , z : 1 } , { weights : { x : 10 , y : 1 } } );
+t.ensureIndex({x: "text", z: 1}, {weights: {x: 10, y: 1}});
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+assert.eq([1, 2], queryIDS(t, "az"), "A1");
+assert.eq([2, 1], queryIDS(t, "d"), "A2");
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
+assert.eq([1], queryIDS(t, "x"), "A3");
+assert.eq([2], queryIDS(t, "y"), "A4");
+assert.eq([1], queryIDS(t, "az", {z: 1}), "B1");
+assert.eq([1], queryIDS(t, "d", {z: 1}), "B2");
diff --git a/jstests/core/fts4.js b/jstests/core/fts4.js
index fe35bdafe44..13a9e73cd10 100644
--- a/jstests/core/fts4.js
+++ b/jstests/core/fts4.js
@@ -1,20 +1,19 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text4;
t.drop();
-t.save( { _id : 1 , x : [ "az" , "b" , "x" ] , y : [ "c" , "d" , "m" ] , z : 1 } );
-t.save( { _id : 2 , x : [ "c" , "d" , "y" ] , y : [ "az" , "b" , "n" ] , z : 2 } );
+t.save({_id: 1, x: ["az", "b", "x"], y: ["c", "d", "m"], z: 1});
+t.save({_id: 2, x: ["c", "d", "y"], y: ["az", "b", "n"], z: 2});
-t.ensureIndex( { y : "text" , z : 1 } , { weights : { x : 10 } } );
+t.ensureIndex({y: "text", z: 1}, {weights: {x: 10}});
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+assert.eq([1, 2], queryIDS(t, "az"), "A1");
+assert.eq([2, 1], queryIDS(t, "d"), "A2");
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
-
-assert.eq( [1] , queryIDS( t , "az" , { z : 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z : 1 } ) , "B2" );
+assert.eq([1], queryIDS(t, "x"), "A3");
+assert.eq([2], queryIDS(t, "y"), "A4");
+assert.eq([1], queryIDS(t, "az", {z: 1}), "B1");
+assert.eq([1], queryIDS(t, "d", {z: 1}), "B2");
diff --git a/jstests/core/fts5.js b/jstests/core/fts5.js
index a95917f5d26..d3d6bb4de44 100644
--- a/jstests/core/fts5.js
+++ b/jstests/core/fts5.js
@@ -1,19 +1,19 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text5;
t.drop();
-t.save( { _id: 1 , x: [ { a: "az" } , { a: "b" } , { a: "x" } ] , y: [ "c" , "d" , "m" ] , z: 1 } );
-t.save( { _id: 2 , x: [ { a: "c" } , { a: "d" } , { a: "y" } ] , y: [ "az" , "b" , "n" ] , z: 2 } );
+t.save({_id: 1, x: [{a: "az"}, {a: "b"}, {a: "x"}], y: ["c", "d", "m"], z: 1});
+t.save({_id: 2, x: [{a: "c"}, {a: "d"}, {a: "y"}], y: ["az", "b", "n"], z: 2});
-t.ensureIndex( { y: "text" , z: 1 } , { weights: { "x.a": 10 } } );
+t.ensureIndex({y: "text", z: 1}, {weights: {"x.a": 10}});
-assert.eq( [1,2] , queryIDS( t , "az" ) , "A1" );
-assert.eq( [2,1] , queryIDS( t , "d" ) , "A2" );
+assert.eq([1, 2], queryIDS(t, "az"), "A1");
+assert.eq([2, 1], queryIDS(t, "d"), "A2");
-assert.eq( [1] , queryIDS( t , "x" ) , "A3" );
-assert.eq( [2] , queryIDS( t , "y" ) , "A4" );
+assert.eq([1], queryIDS(t, "x"), "A3");
+assert.eq([2], queryIDS(t, "y"), "A4");
-assert.eq( [1] , queryIDS( t , "az" , { z: 1 } ) , "B1" );
-assert.eq( [1] , queryIDS( t , "d" , { z: 1 } ) , "B2" );
+assert.eq([1], queryIDS(t, "az", {z: 1}), "B1");
+assert.eq([1], queryIDS(t, "d", {z: 1}), "B2");
diff --git a/jstests/core/fts_blog.js b/jstests/core/fts_blog.js
index 78b9ef34ecc..9f35836ef37 100644
--- a/jstests/core/fts_blog.js
+++ b/jstests/core/fts_blog.js
@@ -1,26 +1,20 @@
t = db.text_blog;
t.drop();
-t.save( { _id : 1 , title : "my blog post" , text : "this is a new blog i am writing. yay" } );
-t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am writing. yay" } );
-t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } );
+t.save({_id: 1, title: "my blog post", text: "this is a new blog i am writing. yay"});
+t.save({_id: 2, title: "my 2nd post", text: "this is a new blog i am writing. yay"});
+t.save({_id: 3, title: "knives are Fun", text: "this is a new blog i am writing. yay"});
// default weight is 1
// specify weights if you want a field to be more meaningull
-t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } );
-
-res = t.find( { "$text" : { "$search" : "blog" } } , { score: { "$meta" : "textScore" } } ).sort( { score: { "$meta" : "textScore" } });
-assert.eq( 3, res.length());
-assert.eq( 1, res[0]._id );
-
-res = t.find( { "$text" : { "$search" : "write" } }, { score: { "$meta" : "textScore" } } );
-assert.eq( 3, res.length() );
-assert.eq( res[0].score, res[1].score );
-assert.eq( res[0].score, res[2].score );
-
-
-
-
-
+t.ensureIndex({"title": "text", text: "text"}, {weights: {title: 10}});
+res = t.find({"$text": {"$search": "blog"}}, {score: {"$meta": "textScore"}})
+ .sort({score: {"$meta": "textScore"}});
+assert.eq(3, res.length());
+assert.eq(1, res[0]._id);
+res = t.find({"$text": {"$search": "write"}}, {score: {"$meta": "textScore"}});
+assert.eq(3, res.length());
+assert.eq(res[0].score, res[1].score);
+assert.eq(res[0].score, res[2].score);
diff --git a/jstests/core/fts_blogwild.js b/jstests/core/fts_blogwild.js
index e220bd89032..dad96cd2836 100644
--- a/jstests/core/fts_blogwild.js
+++ b/jstests/core/fts_blogwild.js
@@ -1,40 +1,40 @@
t = db.text_blogwild;
t.drop();
-t.save( { _id: 1 , title: "my blog post" , text: "this is a new blog i am writing. yay eliot" } );
-t.save( { _id: 2 , title: "my 2nd post" , text: "this is a new blog i am writing. yay" } );
-t.save( { _id: 3 , title: "knives are Fun for writing eliot" , text: "this is a new blog i am writing. yay" } );
+t.save({_id: 1, title: "my blog post", text: "this is a new blog i am writing. yay eliot"});
+t.save({_id: 2, title: "my 2nd post", text: "this is a new blog i am writing. yay"});
+t.save({
+ _id: 3,
+ title: "knives are Fun for writing eliot",
+ text: "this is a new blog i am writing. yay"
+});
// default weight is 1
// specify weights if you want a field to be more meaningull
-t.ensureIndex( { dummy: "text" } , { weights: "$**" } );
+t.ensureIndex({dummy: "text"}, {weights: "$**"});
-res = t.find( { "$text" : { "$search": "blog" } } );
-assert.eq( 3 , res.length() , "A1" );
+res = t.find({"$text": {"$search": "blog"}});
+assert.eq(3, res.length(), "A1");
-res = t.find( { "$text" : { "$search": "write" } } );
-assert.eq( 3 , res.length() , "B1" );
+res = t.find({"$text": {"$search": "write"}});
+assert.eq(3, res.length(), "B1");
// mixing
-t.dropIndex( "dummy_text" );
-assert.eq( 1 , t.getIndexKeys().length , "C1" );
-t.ensureIndex( { dummy: "text" } , { weights: { "$**": 1 , title: 2 } } );
-
-
-res = t.find( { "$text" : { "$search": "write" } }, { score: { "$meta" : "textScore" } } ).sort( { score: { "$meta" : "textScore" } });
-assert.eq( 3 , res.length() , "C2" );
-assert.eq( 3 , res[0]._id , "C3" );
-
-res = t.find( { "$text" : { "$search": "blog" } }, { score: { "$meta" : "textScore" } } ).sort( { score: { "$meta" : "textScore" } });
-assert.eq( 3 , res.length() , "D1" );
-assert.eq( 1 , res[0]._id , "D2" );
-
-res = t.find( { "$text" : { "$search": "eliot" } }, { score: { "$meta" : "textScore" } } ).sort( { score: { "$meta" : "textScore" } });
-assert.eq( 2 , res.length() , "E1" );
-assert.eq( 3 , res[0]._id , "E2" );
-
-
-
-
-
-
+t.dropIndex("dummy_text");
+assert.eq(1, t.getIndexKeys().length, "C1");
+t.ensureIndex({dummy: "text"}, {weights: {"$**": 1, title: 2}});
+
+res = t.find({"$text": {"$search": "write"}}, {score: {"$meta": "textScore"}})
+ .sort({score: {"$meta": "textScore"}});
+assert.eq(3, res.length(), "C2");
+assert.eq(3, res[0]._id, "C3");
+
+res = t.find({"$text": {"$search": "blog"}}, {score: {"$meta": "textScore"}})
+ .sort({score: {"$meta": "textScore"}});
+assert.eq(3, res.length(), "D1");
+assert.eq(1, res[0]._id, "D2");
+
+res = t.find({"$text": {"$search": "eliot"}}, {score: {"$meta": "textScore"}})
+ .sort({score: {"$meta": "textScore"}});
+assert.eq(2, res.length(), "E1");
+assert.eq(3, res[0]._id, "E2");
diff --git a/jstests/core/fts_casesensitive.js b/jstests/core/fts_casesensitive.js
index e49de5c1f7f..5b0e0832130 100644
--- a/jstests/core/fts_casesensitive.js
+++ b/jstests/core/fts_casesensitive.js
@@ -8,7 +8,9 @@ coll.drop();
assert.writeOK(coll.insert({_id: 0, a: "The Quick Brown Fox Jumps Over The Lazy Dog"}));
assert.commandWorked(coll.ensureIndex({a: "text"}));
-assert.throws(function() { queryIDS(coll, "hello", null, {$caseSensitive: "invalid"}); });
+assert.throws(function() {
+ queryIDS(coll, "hello", null, {$caseSensitive: "invalid"});
+});
assert.eq([0], queryIDS(coll, "The quick Brown", null, {$caseSensitive: true}));
assert.eq([0], queryIDS(coll, "Jumped", null, {$caseSensitive: true}));
diff --git a/jstests/core/fts_diacritic_and_caseinsensitive.js b/jstests/core/fts_diacritic_and_caseinsensitive.js
index 7a65a56e2fc..898735f3140 100644
--- a/jstests/core/fts_diacritic_and_caseinsensitive.js
+++ b/jstests/core/fts_diacritic_and_caseinsensitive.js
@@ -9,7 +9,7 @@ load('jstests/libs/fts.js');
coll.drop();
assert.writeOK(coll.insert({
- _id: 0,
+ _id: 0,
a: "O próximo Vôo à Noite sobre o Atlântico, Põe Freqüentemente o único Médico."
}));
diff --git a/jstests/core/fts_diacritic_and_casesensitive.js b/jstests/core/fts_diacritic_and_casesensitive.js
index 6f6ef4439e0..397b6033f88 100644
--- a/jstests/core/fts_diacritic_and_casesensitive.js
+++ b/jstests/core/fts_diacritic_and_casesensitive.js
@@ -1,4 +1,5 @@
-// Integration tests for {$diacriticSensitive: true, $caseSensitive: true} option to $text query operator.
+// Integration tests for {$diacriticSensitive: true, $caseSensitive: true} option to $text query
+// operator.
load('jstests/libs/fts.js');
@@ -9,56 +10,57 @@ load('jstests/libs/fts.js');
coll.drop();
assert.writeOK(coll.insert({
- _id: 0,
+ _id: 0,
a: "O próximo Vôo à Noite sobre o Atlântico, Põe Freqüentemente o único Médico."
}));
assert.commandWorked(coll.ensureIndex({a: "text"}, {default_language: "portuguese"}));
- assert.eq([0], queryIDS(
- coll, "próximo vôo à", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([0], queryIDS(
- coll, "Atlântico", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([0], queryIDS(
- coll, "\"próximo\"", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([0], queryIDS(
- coll, "\"Põe\" Atlântico", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([0], queryIDS(
- coll, "\"próximo Vôo\" \"único Médico\"", null,
- {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([0], queryIDS(
- coll, "\"próximo Vôo\" -\"único médico\"", null,
- {$diacriticSensitive: true, $caseSensitive: true}
- ));
+ assert.eq(
+ [0],
+ queryIDS(coll, "próximo vôo à", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [0], queryIDS(coll, "Atlântico", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [0],
+ queryIDS(coll, "\"próximo\"", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [0],
+ queryIDS(
+ coll, "\"Põe\" Atlântico", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq([0],
+ queryIDS(coll,
+ "\"próximo Vôo\" \"único Médico\"",
+ null,
+ {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq([0],
+ queryIDS(coll,
+ "\"próximo Vôo\" -\"único médico\"",
+ null,
+ {$diacriticSensitive: true, $caseSensitive: true}));
- assert.eq([], queryIDS(
- coll, "À", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "Próximo", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "proximo vôo à", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "À -próximo -Vôo", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "à proximo -Vôo", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "mo Vô", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "\"único médico\"", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
- assert.eq([], queryIDS(
- coll, "\"próximo Vôo\" -\"único Médico\"", null, {$diacriticSensitive: true, $caseSensitive: true}
- ));
+ assert.eq([], queryIDS(coll, "À", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq([],
+ queryIDS(coll, "Próximo", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [],
+ queryIDS(coll, "proximo vôo à", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [],
+ queryIDS(
+ coll, "À -próximo -Vôo", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [],
+ queryIDS(coll, "à proximo -Vôo", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq([], queryIDS(coll, "mo Vô", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq(
+ [],
+ queryIDS(
+ coll, "\"único médico\"", null, {$diacriticSensitive: true, $caseSensitive: true}));
+ assert.eq([],
+ queryIDS(coll,
+ "\"próximo Vôo\" -\"único Médico\"",
+ null,
+ {$diacriticSensitive: true, $caseSensitive: true}));
})(); \ No newline at end of file
diff --git a/jstests/core/fts_diacriticsensitive.js b/jstests/core/fts_diacriticsensitive.js
index c38978e3c4b..29e7784a785 100644
--- a/jstests/core/fts_diacriticsensitive.js
+++ b/jstests/core/fts_diacriticsensitive.js
@@ -9,7 +9,7 @@ load('jstests/libs/fts.js');
coll.drop();
assert.writeOK(coll.insert({
- _id: 0,
+ _id: 0,
a: "O próximo vôo à noite sobre o Atlântico, põe freqüentemente o único médico."
}));
@@ -23,12 +23,12 @@ load('jstests/libs/fts.js');
assert.eq([0], queryIDS(coll, "atlântico", null, {$diacriticSensitive: true}));
assert.eq([0], queryIDS(coll, "\"próximo\"", null, {$diacriticSensitive: true}));
assert.eq([0], queryIDS(coll, "\"põe\" atlântico", null, {$diacriticSensitive: true}));
- assert.eq([0], queryIDS(
- coll, "\"próximo vôo\" \"único médico\"", null, {$diacriticSensitive: true}
- ));
- assert.eq([0], queryIDS(
- coll, "\"próximo vôo\" -\"unico médico\"", null, {$diacriticSensitive: true}
- ));
+ assert.eq(
+ [0],
+ queryIDS(coll, "\"próximo vôo\" \"único médico\"", null, {$diacriticSensitive: true}));
+ assert.eq(
+ [0],
+ queryIDS(coll, "\"próximo vôo\" -\"unico médico\"", null, {$diacriticSensitive: true}));
assert.eq([], queryIDS(coll, "à", null, {$diacriticSensitive: true}));
assert.eq([], queryIDS(coll, "proximo", null, {$diacriticSensitive: true}));
@@ -37,8 +37,8 @@ load('jstests/libs/fts.js');
assert.eq([], queryIDS(coll, "à proximo -vôo", null, {$diacriticSensitive: true}));
assert.eq([], queryIDS(coll, "mo vô", null, {$diacriticSensitive: true}));
assert.eq([], queryIDS(coll, "\"unico medico\"", null, {$diacriticSensitive: true}));
- assert.eq([], queryIDS(
- coll, "\"próximo vôo\" -\"único médico\"", null, {$diacriticSensitive: true
- }));
+ assert.eq(
+ [],
+ queryIDS(coll, "\"próximo vôo\" -\"único médico\"", null, {$diacriticSensitive: true}));
})();
diff --git a/jstests/core/fts_explain.js b/jstests/core/fts_explain.js
index de55e98ddc8..225be626d2c 100644
--- a/jstests/core/fts_explain.js
+++ b/jstests/core/fts_explain.js
@@ -10,7 +10,7 @@ assert.commandWorked(res);
res = coll.insert({content: "some data"});
assert.writeOK(res);
-var explain = coll.find({$text:{$search: "\"a\" -b -\"c\""}}).explain(true);
+var explain = coll.find({$text: {$search: "\"a\" -b -\"c\""}}).explain(true);
var stage = explain.executionStats.executionStages;
if ("SINGLE_SHARD" === stage.stage) {
stage = stage.shards[0].executionStages;
diff --git a/jstests/core/fts_index.js b/jstests/core/fts_index.js
index 5410b8c4ca2..8cda28096d2 100644
--- a/jstests/core/fts_index.js
+++ b/jstests/core/fts_index.js
@@ -16,30 +16,49 @@ coll.getDB().createCollection(coll.getName());
// Spec passes text-specific index validation.
assert.commandWorked(coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanish"}));
-assert.eq( 1, coll.getIndexes().filter( function(z){ return z.name == indexName; } ).length );
+assert.eq(1,
+ coll.getIndexes().filter(function(z) {
+ return z.name == indexName;
+ }).length);
coll.dropIndexes();
// Spec fails text-specific index validation ("spanglish" unrecognized).
-assert.commandFailed(coll.ensureIndex({a: "text"}, {name: indexName, default_language: "spanglish"}));
-assert.eq( 0, coll.getIndexes().filter( function(z){ return z.name == indexName; } ).length );
+assert.commandFailed(coll.ensureIndex({a: "text"},
+ {name: indexName, default_language: "spanglish"}));
+assert.eq(0,
+ coll.getIndexes().filter(function(z) {
+ return z.name == indexName;
+ }).length);
coll.dropIndexes();
// Spec passes general index validation.
assert.commandWorked(coll.ensureIndex({"$**": "text"}, {name: indexName}));
-assert.eq( 1, coll.getIndexes().filter( function(z){ return z.name == indexName; } ).length );
+assert.eq(1,
+ coll.getIndexes().filter(function(z) {
+ return z.name == indexName;
+ }).length);
coll.dropIndexes();
// Spec fails general index validation ("a.$**" invalid field name for key).
assert.commandFailed(coll.ensureIndex({"a.$**": "text"}, {name: indexName}));
-assert.eq( 0, coll.getIndexes().filter( function(z){ return z.name == indexName; } ).length );
+assert.eq(0,
+ coll.getIndexes().filter(function(z) {
+ return z.name == indexName;
+ }).length);
coll.dropIndexes();
// SERVER-19519 Spec fails if '_fts' is specified on a non-text index.
assert.commandFailed(coll.ensureIndex({_fts: 1}, {name: indexName}));
-assert.eq( 0, coll.getIndexes().filter( function(z){ return z.name == indexName; } ).length );
+assert.eq(0,
+ coll.getIndexes().filter(function(z) {
+ return z.name == indexName;
+ }).length);
coll.dropIndexes();
assert.commandFailed(coll.ensureIndex({_fts: "text"}, {name: indexName}));
-assert.eq( 0, coll.getIndexes().filter( function(z){ return z.name == indexName; } ).length );
+assert.eq(0,
+ coll.getIndexes().filter(function(z) {
+ return z.name == indexName;
+ }).length);
coll.dropIndexes();
//
@@ -60,12 +79,12 @@ coll.drop();
// Can insert documents with valid language_override into text-indexed collection.
assert.commandWorked(coll.ensureIndex({a: "text"}));
coll.insert({a: ""});
-assert.writeOK( coll.insert({a: "", language: "spanish"}));
+assert.writeOK(coll.insert({a: "", language: "spanish"}));
coll.drop();
// Can't insert documents with invalid language_override into text-indexed collection.
assert.commandWorked(coll.ensureIndex({a: "text"}));
-assert.writeError( coll.insert({a: "", language: "spanglish"}));
+assert.writeError(coll.insert({a: "", language: "spanglish"}));
coll.drop();
//
@@ -142,7 +161,7 @@ assert.commandWorked(coll.ensureIndex({a: "text"}));
var longstring = "";
var longstring2 = "";
-for(var i = 0; i < 1024 * 1024; ++i) {
+for (var i = 0; i < 1024 * 1024; ++i) {
longstring = longstring + "a";
longstring2 = longstring2 + "b";
}
@@ -157,5 +176,4 @@ coll.dropIndexes();
assert.commandFailed(coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}, {weights: {}}));
assert.commandFailed(coll.ensureIndex({a: 1, _fts: "text", _ftsx: 1, c: 1}));
-
coll.drop();
diff --git a/jstests/core/fts_index2.js b/jstests/core/fts_index2.js
index aa17df1514a..fa0129acc5c 100644
--- a/jstests/core/fts_index2.js
+++ b/jstests/core/fts_index2.js
@@ -11,6 +11,6 @@ assert.commandWorked(coll1.ensureIndex({"$**": "text"}));
assert.eq(1, coll1.count({$text: {$search: "content"}}));
// Rename within same database.
-assert.commandWorked(coll1.getDB().adminCommand({renameCollection: coll1.getFullName(),
- to: coll2.getFullName() }));
+assert.commandWorked(
+ coll1.getDB().adminCommand({renameCollection: coll1.getFullName(), to: coll2.getFullName()}));
assert.eq(1, coll2.count({$text: {$search: "content"}}));
diff --git a/jstests/core/fts_index_version1.js b/jstests/core/fts_index_version1.js
index 1095c5828ac..0b1c869a3a5 100644
--- a/jstests/core/fts_index_version1.js
+++ b/jstests/core/fts_index_version1.js
@@ -10,8 +10,7 @@ assert.eq(1, coll.count({$text: {$search: "run"}}));
// Test search with a "language alias" only recognized in textIndexVersion:1 (note that the stopword
// machinery doesn't recognize these aliases).
coll.drop();
-assert.commandWorked(coll.ensureIndex({a: "text"},
- {default_language: "eng", textIndexVersion: 1}));
+assert.commandWorked(coll.ensureIndex({a: "text"}, {default_language: "eng", textIndexVersion: 1}));
assert.writeOK(coll.insert({a: "running"}));
assert.eq(1, coll.count({$text: {$search: "run"}}));
diff --git a/jstests/core/fts_index_version2.js b/jstests/core/fts_index_version2.js
index 53557c8c6d6..05fecab36ee 100644
--- a/jstests/core/fts_index_version2.js
+++ b/jstests/core/fts_index_version2.js
@@ -9,13 +9,12 @@ load('jstests/libs/fts.js');
coll.drop();
assert.writeOK(coll.insert({
- _id: 0,
+ _id: 0,
a: "O próximo Vôo à Noite sobre o Atlântico, Põe Freqüentemente o único Médico."
}));
assert.commandWorked(
- coll.ensureIndex({a: "text"}, {default_language: "portuguese", textIndexVersion: 2}
- ));
+ coll.ensureIndex({a: "text"}, {default_language: "portuguese", textIndexVersion: 2}));
assert.eq([0], queryIDS(coll, "próximo vôo à", null));
assert.eq([0], queryIDS(coll, "atlântico", null));
diff --git a/jstests/core/fts_mix.js b/jstests/core/fts_mix.js
index 4ef8da0a28d..5142497fb41 100644
--- a/jstests/core/fts_mix.js
+++ b/jstests/core/fts_mix.js
@@ -1,5 +1,5 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
// test collection
tc = db.text_mix;
@@ -7,151 +7,204 @@ tc.drop();
// creation of collection documents
// content generated using wikipedia random article
-tc.save( { _id: 1, title: "Olivia Shakespear",text: "Olivia Shakespear (born Olivia Tucker; 17 March 1863 – 3 October 1938) was a British novelist, playwright, and patron of the arts. She wrote six books that are described as \"marriage problem\" novels. Her works sold poorly, sometimes only a few hundred copies. Her last novel, Uncle Hilary, is considered her best. She wrote two plays in collaboration with Florence Farr." } );
-tc.save( { _id: 2, title: "Mahim Bora", text: "Mahim Bora (born 1926) is an Indian writer and educationist from Assam state. He was born at a tea estate of Sonitpur district. He is an M.A. in Assamese literature from Gauhati University and had been a teacher in the Nowgong College for most of his teaching career. He has now retired and lives at Nagaon. Bora spent a good part of his childhood in the culture-rich surroundings of rural Nagaon, where the river Kalong was the life-blood of a community. His impressionable mind was to capture a myriad memories of that childhood, later to find expression in his poems, short stories and novels with humour, irony and pathos woven into their texture. When this river was dammed up, its disturbing effect was on the entire community dependant on nature's bounty." } );
-tc.save( { _id: 3, title: "A break away!", text: "A break away! is an 1891 painting by Australian artist Tom Roberts. The painting depicts a mob of thirsty sheep stampeding towards a dam. A drover on horseback is attempting to turn the mob before they drown or crush each other in their desire to drink. The painting, an \"icon of Australian art\", is part of a series of works by Roberts that \"captures what was an emerging spirit of national identity.\" Roberts painted the work at Corowa. The painting depicts a time of drought, with little grass and the soil kicked up as dust. The work itself is a reflection on the pioneering days of the pastoral industry, which were coming to an end by the 1890s." } );
-tc.save( { _id: 4, title: "Linn-Kristin Riegelhuth Koren", text: "Linn-Kristin Riegelhuth Koren (born 1 August 1984, in Ski) is a Norwegian handballer playing for Larvik HK and the Norwegian national team. She is commonly known as Linka. Outside handball she is a qualified nurse." } );
-tc.save( { _id: 5, title: "Morten Jensen", text: "Morten Jensen (born December 2, 1982 in Lynge) is a Danish athlete. He primarily participates in long jump, 100 metres and 200 metres. He competed at the World Championships in 2005 and 2007, the 2006 World Indoor Championships, the 2006 European Championships, the 2007 World Championships and the 2008 Olympic Games without qualifying for the final round. He was runner-up in the 2010 Finnish Elite Games rankings, just missing out to Levern Spencer for that year's jackpot. He holds the Danish record in both long jump and 100 metres. He also holds the Danish indoor record in the 200 metres. He has been a part of the Sparta teamsine 2005, before then he was a part of FIF Hillerd. His coach was Leif Dahlberg after the 2010 European Championships he change to Lars Nielsen and Anders Miller." } );
-tc.save( { _id: 6, title: "Janet Laurence", text: "Janet Laurence (born 1947) is a Sydney based Australian artist who works in mixed media and installation. Her work has been included in major survey exhibitions, nationally and internationally and is regularly exhibited in Sydney, Melbourne and Japan. Her work explores a relationship to the natural world, often from an architectural context. It extends from the gallery space into the urban fabric, and has been realized in many site specific projects, often involving collaborations with architects, landscape architects and environmental scientists. She has received many grants and awards including a Rockefeller Residency in 1997. Laurence was a Trustee of the Art Gallery of NSW from 1995 to 2005. Laurence was the subject of John Beard's winning entry for the 2007 Archibald Prize." } );
-tc.save( { _id: 7, title: "Glen-Coats Baronets", text: "The Glen-Coats Baronetcy, of Ferguslie Park in the Parish of Abbey in the County of Renfrew, was a title in the Baronetage of the United Kingdom. It was created on 25 June 1894 for Thomas Glen-Coats, Director of the thread-making firm of J. & P. Coats, Ltd, and later Liberal Member of Parliament for Renfrewshire West. Born Thomas Coats, he assumed the additional surname of Glen, which was that of his maternal grandfather. He was succeeded by his son, the second Baronet. He won a gold medal in sailing at the 1908 Summer Olympics. The title became extinct on his death in 1954. Two other members of the Coats family also gained distinction. George Coats, 1st Baron Glentanar, was the younger brother of the first Baronet, while Sir James Coats, 1st Baronet (see Coats Baronets), was the first cousin of the first Baronet." } );
-tc.save( { _id: 8, title: "Grapeleaf Skeletonizer", text: "The Grapeleaf Skeletonizer, Harrisina americana is a moth in the family Zygaenidae. It is widespread in the eastern half of the United States, and commonly noticed defoliating grapes, especially of the Virginia creeper (Parthenocissus quinquefolia). The western grapeleaf skeletonizer, Harrisina brillians is very similar to and slightly larger than H. americana, but their distributions are different. Members of this family all produce hydrogen cyanide, a potent antipredator toxin." } );
-tc.save( { _id: 9, title: "Physics World", text: "Physics World is the membership magazine of the Institute of Physics, one of the largest physical societies in the world. It is an international monthly magazine covering all areas of physics, both pure and applied, and is aimed at physicists in research, industry and education worldwide. It was launched in 1988 by IOP Publishing Ltd and has established itself as one of the world's leading physics magazines. The magazine is sent free to members of the Institute of Physics, who can also access a digital edition of the magazine, although selected articles can be read by anyone for free online. It was redesigned in September 2005 and has an audited circulation of just under 35000. The current editor is Matin Durrani. Also on the team are Dens Milne (associate editor), Michael Banks (news editor), Louise Mayor (features editor) and Margaret Harris (reviews and careers editor). Hamish Johnston is the editor of the magazine's website physicsworld.com and James Dacey is its reporter." } );
-tc.save( { _id: 10, title: "Mallacoota, Victoria", text: "Mallacoota is a small town in the East Gippsland region of Victoria, Australia. At the 2006 census, Mallacoota had a population of 972. At holiday times, particularly Easter and Christmas, the population increases by about 8,000. It is one of the most isolated towns in the state of Victoria, 25 kilometres off the Princes Highway and 523 kilometres (325 mi) from Melbourne. It is 526 kilometres (327 mi) from Sydney, New South Wales. It is halfway between Melbourne and Sydney when travelling via Princes Highway, though that is a long route between Australia's two main cities. It is the last official township on Victoria's east coast before the border with New South Wales. Mallacoota has a regional airport (Mallacoota Airport) YMCO (XMC) consisting of a grassed field for private light planes. It is known for its wild flowers, abalone industry, the inlet estuary consisting of Top Lake and Bottom Lake, and Croajingolong National Park that surround it. It is a popular and beautiful holiday spot for boating, fishing, walking the wilderness coast, swimming, birdwatching, and surfing. The Mallacoota Arts Council runs events throughout each year. Mallacoota Inlet is one of the main villages along the wilderness coast walk from NSW to Victoria, Australia." } );
+tc.save({
+ _id: 1,
+ title: "Olivia Shakespear",
+ text:
+ "Olivia Shakespear (born Olivia Tucker; 17 March 1863 – 3 October 1938) was a British novelist, playwright, and patron of the arts. She wrote six books that are described as \"marriage problem\" novels. Her works sold poorly, sometimes only a few hundred copies. Her last novel, Uncle Hilary, is considered her best. She wrote two plays in collaboration with Florence Farr."
+});
+tc.save({
+ _id: 2,
+ title: "Mahim Bora",
+ text:
+ "Mahim Bora (born 1926) is an Indian writer and educationist from Assam state. He was born at a tea estate of Sonitpur district. He is an M.A. in Assamese literature from Gauhati University and had been a teacher in the Nowgong College for most of his teaching career. He has now retired and lives at Nagaon. Bora spent a good part of his childhood in the culture-rich surroundings of rural Nagaon, where the river Kalong was the life-blood of a community. His impressionable mind was to capture a myriad memories of that childhood, later to find expression in his poems, short stories and novels with humour, irony and pathos woven into their texture. When this river was dammed up, its disturbing effect was on the entire community dependant on nature's bounty."
+});
+tc.save({
+ _id: 3,
+ title: "A break away!",
+ text:
+ "A break away! is an 1891 painting by Australian artist Tom Roberts. The painting depicts a mob of thirsty sheep stampeding towards a dam. A drover on horseback is attempting to turn the mob before they drown or crush each other in their desire to drink. The painting, an \"icon of Australian art\", is part of a series of works by Roberts that \"captures what was an emerging spirit of national identity.\" Roberts painted the work at Corowa. The painting depicts a time of drought, with little grass and the soil kicked up as dust. The work itself is a reflection on the pioneering days of the pastoral industry, which were coming to an end by the 1890s."
+});
+tc.save({
+ _id: 4,
+ title: "Linn-Kristin Riegelhuth Koren",
+ text:
+ "Linn-Kristin Riegelhuth Koren (born 1 August 1984, in Ski) is a Norwegian handballer playing for Larvik HK and the Norwegian national team. She is commonly known as Linka. Outside handball she is a qualified nurse."
+});
+tc.save({
+ _id: 5,
+ title: "Morten Jensen",
+ text:
+ "Morten Jensen (born December 2, 1982 in Lynge) is a Danish athlete. He primarily participates in long jump, 100 metres and 200 metres. He competed at the World Championships in 2005 and 2007, the 2006 World Indoor Championships, the 2006 European Championships, the 2007 World Championships and the 2008 Olympic Games without qualifying for the final round. He was runner-up in the 2010 Finnish Elite Games rankings, just missing out to Levern Spencer for that year's jackpot. He holds the Danish record in both long jump and 100 metres. He also holds the Danish indoor record in the 200 metres. He has been a part of the Sparta teamsine 2005, before then he was a part of FIF Hillerd. His coach was Leif Dahlberg after the 2010 European Championships he change to Lars Nielsen and Anders Miller."
+});
+tc.save({
+ _id: 6,
+ title: "Janet Laurence",
+ text:
+ "Janet Laurence (born 1947) is a Sydney based Australian artist who works in mixed media and installation. Her work has been included in major survey exhibitions, nationally and internationally and is regularly exhibited in Sydney, Melbourne and Japan. Her work explores a relationship to the natural world, often from an architectural context. It extends from the gallery space into the urban fabric, and has been realized in many site specific projects, often involving collaborations with architects, landscape architects and environmental scientists. She has received many grants and awards including a Rockefeller Residency in 1997. Laurence was a Trustee of the Art Gallery of NSW from 1995 to 2005. Laurence was the subject of John Beard's winning entry for the 2007 Archibald Prize."
+});
+tc.save({
+ _id: 7,
+ title: "Glen-Coats Baronets",
+ text:
+ "The Glen-Coats Baronetcy, of Ferguslie Park in the Parish of Abbey in the County of Renfrew, was a title in the Baronetage of the United Kingdom. It was created on 25 June 1894 for Thomas Glen-Coats, Director of the thread-making firm of J. & P. Coats, Ltd, and later Liberal Member of Parliament for Renfrewshire West. Born Thomas Coats, he assumed the additional surname of Glen, which was that of his maternal grandfather. He was succeeded by his son, the second Baronet. He won a gold medal in sailing at the 1908 Summer Olympics. The title became extinct on his death in 1954. Two other members of the Coats family also gained distinction. George Coats, 1st Baron Glentanar, was the younger brother of the first Baronet, while Sir James Coats, 1st Baronet (see Coats Baronets), was the first cousin of the first Baronet."
+});
+tc.save({
+ _id: 8,
+ title: "Grapeleaf Skeletonizer",
+ text:
+ "The Grapeleaf Skeletonizer, Harrisina americana is a moth in the family Zygaenidae. It is widespread in the eastern half of the United States, and commonly noticed defoliating grapes, especially of the Virginia creeper (Parthenocissus quinquefolia). The western grapeleaf skeletonizer, Harrisina brillians is very similar to and slightly larger than H. americana, but their distributions are different. Members of this family all produce hydrogen cyanide, a potent antipredator toxin."
+});
+tc.save({
+ _id: 9,
+ title: "Physics World",
+ text:
+ "Physics World is the membership magazine of the Institute of Physics, one of the largest physical societies in the world. It is an international monthly magazine covering all areas of physics, both pure and applied, and is aimed at physicists in research, industry and education worldwide. It was launched in 1988 by IOP Publishing Ltd and has established itself as one of the world's leading physics magazines. The magazine is sent free to members of the Institute of Physics, who can also access a digital edition of the magazine, although selected articles can be read by anyone for free online. It was redesigned in September 2005 and has an audited circulation of just under 35000. The current editor is Matin Durrani. Also on the team are Dens Milne (associate editor), Michael Banks (news editor), Louise Mayor (features editor) and Margaret Harris (reviews and careers editor). Hamish Johnston is the editor of the magazine's website physicsworld.com and James Dacey is its reporter."
+});
+tc.save({
+ _id: 10,
+ title: "Mallacoota, Victoria",
+ text:
+ "Mallacoota is a small town in the East Gippsland region of Victoria, Australia. At the 2006 census, Mallacoota had a population of 972. At holiday times, particularly Easter and Christmas, the population increases by about 8,000. It is one of the most isolated towns in the state of Victoria, 25 kilometres off the Princes Highway and 523 kilometres (325 mi) from Melbourne. It is 526 kilometres (327 mi) from Sydney, New South Wales. It is halfway between Melbourne and Sydney when travelling via Princes Highway, though that is a long route between Australia's two main cities. It is the last official township on Victoria's east coast before the border with New South Wales. Mallacoota has a regional airport (Mallacoota Airport) YMCO (XMC) consisting of a grassed field for private light planes. It is known for its wild flowers, abalone industry, the inlet estuary consisting of Top Lake and Bottom Lake, and Croajingolong National Park that surround it. It is a popular and beautiful holiday spot for boating, fishing, walking the wilderness coast, swimming, birdwatching, and surfing. The Mallacoota Arts Council runs events throughout each year. Mallacoota Inlet is one of the main villages along the wilderness coast walk from NSW to Victoria, Australia."
+});
// begin tests
// -------------------------------------------- INDEXING & WEIGHTING -------------------------------
// start with basic index, one item with default weight
-tc.ensureIndex( { "title": "text" } );
+tc.ensureIndex({"title": "text"});
// test the single result case..
-res = tc.find( { "$text": { "$search": "Victoria" } } );
-assert.eq( 1, res.length() );
-assert.eq( 10, res[0]._id );
+res = tc.find({"$text": {"$search": "Victoria"}});
+assert.eq(1, res.length());
+assert.eq(10, res[0]._id);
tc.dropIndexes();
// now let's see about multiple fields, with specific weighting
-tc.ensureIndex( { "title": "text", "text": "text" }, { weights: { "title": 10 } } );
-assert.eq( [9,7,8], queryIDS( tc, "members physics" ) );
+tc.ensureIndex({"title": "text", "text": "text"}, {weights: {"title": 10}});
+assert.eq([9, 7, 8], queryIDS(tc, "members physics"));
tc.dropIndexes();
// test all-1 weighting with "$**"
-tc.ensureIndex( { "$**": "text" } );
-assert.eq( [2,8,7], queryIDS( tc, "family tea estate" ) );
+tc.ensureIndex({"$**": "text"});
+assert.eq([2, 8, 7], queryIDS(tc, "family tea estate"));
tc.dropIndexes();
// non-1 weight on "$**" + other weight specified for some field
-tc.ensureIndex( { "$**": "text" }, { weights: { "$**": 10, "text": 2 } } );
-assert.eq( [7,5], queryIDS( tc, "Olympic Games gold medal" ) );
+tc.ensureIndex({"$**": "text"}, {weights: {"$**": 10, "text": 2}});
+assert.eq([7, 5], queryIDS(tc, "Olympic Games gold medal"));
tc.dropIndexes();
-// -------------------------------------------- "search"ING ------------------------------------------
+// -------------------------------------------- "search"ING
+// ------------------------------------------
// go back to "$**": 1, "title": 10.. and test more specific "search" functionality!
-tc.ensureIndex( { "$**": "text" }, { weights: { "title": 10 } } );
+tc.ensureIndex({"$**": "text"}, {weights: {"title": 10}});
// -------------------------------------------- STEMMING -------------------------------------------
// tests stemming for basic plural case
-res = tc.find( { "$text": { "$search": "member" } } );
-res2 = tc.find( { "$text": { "$search": "members" } } );
-assert.eq( getIDS( res ), getIDS( res2 ) );
+res = tc.find({"$text": {"$search": "member"}});
+res2 = tc.find({"$text": {"$search": "members"}});
+assert.eq(getIDS(res), getIDS(res2));
// "search" for something with potential 's bug.
-res = tc.find( { "$text": { "$search": "magazine's" } } );
-res2 = tc.find( { "$text": { "$search": "magazine" } } );
-assert.eq( getIDS( res ), getIDS( res2 ) );
+res = tc.find({"$text": {"$search": "magazine's"}});
+res2 = tc.find({"$text": {"$search": "magazine"}});
+assert.eq(getIDS(res), getIDS(res2));
// -------------------------------------------- LANGUAGE -------------------------------------------
-assert.throws(tc.find( { "$text": { "$search": "member", $language: "spanglish" } } ));
-assert.doesNotThrow(function() {tc.find( { "$text": { "$search": "member", $language: "english" } });} );
+assert.throws(tc.find({"$text": {"$search": "member", $language: "spanglish"}}));
+assert.doesNotThrow(function() {
+ tc.find({"$text": {"$search": "member", $language: "english"}});
+});
// -------------------------------------------- LIMIT RESULTS --------------------------------------
// ensure limit limits results
-assert.eq( [2], queryIDS( tc, "rural river dam", null, null, 1) );
+assert.eq([2], queryIDS(tc, "rural river dam", null, null, 1));
// ensure top results are the same regardless of limit
// make sure that this uses a case where it wouldn't be otherwise..
-res = tc.find( { "$text": { "$search": "united kingdom british princes" }}).limit(1);
-res2 = tc.find( { "$text": { "$search": "united kingdom british princes" } } );
-assert.eq( 1, res.length() );
-assert.eq( 4, res2.length() );
-assert.eq( res[0]._id, res2[0]._id );
+res = tc.find({"$text": {"$search": "united kingdom british princes"}}).limit(1);
+res2 = tc.find({"$text": {"$search": "united kingdom british princes"}});
+assert.eq(1, res.length());
+assert.eq(4, res2.length());
+assert.eq(res[0]._id, res2[0]._id);
// -------------------------------------------- PROJECTION -----------------------------------------
// test projection.. show just title and id
-res = tc.find( { "$text": { "$search": "Morten Jensen" }}, { title: 1 } );
-assert.eq( 1, res.length() );
-assert.eq( 5, res[0]._id );
-assert.eq( null, res[0].text );
-assert.neq( null, res[0].title );
-assert.neq( null, res[0]._id );
+res = tc.find({"$text": {"$search": "Morten Jensen"}}, {title: 1});
+assert.eq(1, res.length());
+assert.eq(5, res[0]._id);
+assert.eq(null, res[0].text);
+assert.neq(null, res[0].title);
+assert.neq(null, res[0]._id);
// test negative projection, ie. show everything but text
-res = tc.find( { "$text": { "$search": "handball" }}, { text: 0 } );
-assert.eq( 1, res.length() );
-assert.eq( 4, res[0]._id );
-assert.eq( null, res[0].text );
-assert.neq( null, res[0].title );
-assert.neq( null, res[0]._id );
+res = tc.find({"$text": {"$search": "handball"}}, {text: 0});
+assert.eq(1, res.length());
+assert.eq(4, res[0]._id);
+assert.eq(null, res[0].text);
+assert.neq(null, res[0].title);
+assert.neq(null, res[0]._id);
// test projection only title, no id
-res = tc.find( { "$text": { "$search": "Mahim Bora" }}, { _id: 0, title: 1 } );
-assert.eq( 1, res.length() );
-assert.eq( "Mahim Bora", res[0].title );
-assert.eq( null, res[0].text );
-assert.neq( null, res[0].title );
-assert.eq( null, res[0]._id );
+res = tc.find({"$text": {"$search": "Mahim Bora"}}, {_id: 0, title: 1});
+assert.eq(1, res.length());
+assert.eq("Mahim Bora", res[0].title);
+assert.eq(null, res[0].text);
+assert.neq(null, res[0].title);
+assert.eq(null, res[0]._id);
// -------------------------------------------- NEGATION -------------------------------------------
// test negation
-assert.eq( [8], queryIDS( tc, "United -Kingdom" ) );
-assert.eq( -1, tc.findOne( { _id : 8 } ).text.search(/Kingdom/i) );
+assert.eq([8], queryIDS(tc, "United -Kingdom"));
+assert.eq(-1, tc.findOne({_id: 8}).text.search(/Kingdom/i));
// test negation edge cases... hyphens, double dash, etc.
-assert.eq( [4], queryIDS( tc, "Linn-Kristin" ) );
+assert.eq([4], queryIDS(tc, "Linn-Kristin"));
// -------------------------------------------- PHRASE MATCHING ------------------------------------
// test exact phrase matching on
-assert.eq( [7], queryIDS( tc, "\"Summer Olympics\"" ) );
-assert.neq( -1, tc.findOne( { _id: 7 } ).text.indexOf("Summer Olympics") );
+assert.eq([7], queryIDS(tc, "\"Summer Olympics\""));
+assert.neq(-1, tc.findOne({_id: 7}).text.indexOf("Summer Olympics"));
// phrasematch with other stuff.. negation, other terms, etc.
-assert.eq( [10], queryIDS( tc, "\"wild flowers\" Sydney" ) );
+assert.eq([10], queryIDS(tc, "\"wild flowers\" Sydney"));
-assert.eq( [3], queryIDS( tc, "\"industry\" -Melbourne -Physics" ) );
+assert.eq([3], queryIDS(tc, "\"industry\" -Melbourne -Physics"));
// -------------------------------------------- EDGE CASES -----------------------------------------
// test empty string
-res = tc.find( { "$text": { "$search": "" } } );
-assert.eq( 0, res.length() );
+res = tc.find({"$text": {"$search": ""}});
+assert.eq(0, res.length());
// test string with a space in it
-res = tc.find( { "$text": { "$search": " " } } );
-assert.eq( 0, res.length() );
+res = tc.find({"$text": {"$search": " "}});
+assert.eq(0, res.length());
// -------------------------------------------- FILTERING ------------------------------------------
-assert.eq( [2], queryIDS( tc, "Mahim" ) );
-assert.eq( [2], queryIDS( tc, "Mahim", { _id: 2 } ) );
-assert.eq( [], queryIDS( tc, "Mahim", { _id: 1 } ) );
-assert.eq( [], queryIDS( tc, "Mahim", { _id: { $gte: 4 } } ) );
-assert.eq( [2], queryIDS( tc, "Mahim", { _id: { $lte: 4 } } ) );
+assert.eq([2], queryIDS(tc, "Mahim"));
+assert.eq([2], queryIDS(tc, "Mahim", {_id: 2}));
+assert.eq([], queryIDS(tc, "Mahim", {_id: 1}));
+assert.eq([], queryIDS(tc, "Mahim", {_id: {$gte: 4}}));
+assert.eq([2], queryIDS(tc, "Mahim", {_id: {$lte: 4}}));
// using regex conditional filtering
-assert.eq( [9], queryIDS( tc, "members", { title: { $regex: /Phy.*/i } } ) );
+assert.eq([9], queryIDS(tc, "members", {title: {$regex: /Phy.*/i}}));
// -------------------------------------------------------------------------------------------------
-assert( tc.validate().valid );
+assert(tc.validate().valid);
diff --git a/jstests/core/fts_partition1.js b/jstests/core/fts_partition1.js
index 52874f6628b..fc32507f430 100644
--- a/jstests/core/fts_partition1.js
+++ b/jstests/core/fts_partition1.js
@@ -1,22 +1,23 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text_parition1;
t.drop();
-t.insert( { _id : 1 , x : 1 , y : "foo" } );
-t.insert( { _id : 2 , x : 1 , y : "bar" } );
-t.insert( { _id : 3 , x : 2 , y : "foo" } );
-t.insert( { _id : 4 , x : 2 , y : "bar" } );
+t.insert({_id: 1, x: 1, y: "foo"});
+t.insert({_id: 2, x: 1, y: "bar"});
+t.insert({_id: 3, x: 2, y: "foo"});
+t.insert({_id: 4, x: 2, y: "bar"});
-t.ensureIndex( { x : 1, y : "text" } );
+t.ensureIndex({x: 1, y: "text"});
-assert.throws(t.find( { "$text": { "$search" : "foo" } } ));
+assert.throws(t.find({"$text": {"$search": "foo"}}));
-assert.eq( [ 1 ], queryIDS( t, "foo" , { x : 1 } ) );
+assert.eq([1], queryIDS(t, "foo", {x: 1}));
-res = t.find( { "$text": { "$search" : "foo" }, x : 1 }, { score: { "$meta" : "textScore" } } );
-assert( res[0].score > 0, tojson(res.toArray()));
+res = t.find({"$text": {"$search": "foo"}, x: 1}, {score: {"$meta": "textScore"}});
+assert(res[0].score > 0, tojson(res.toArray()));
// repeat "search" with "language" specified, SERVER-8999
-res = t.find( { "$text": { "$search" : "foo" , "$language" : "english" }, x : 1 }, { score: { "$meta" : "textScore" } } );
-assert( res[0].score > 0, tojson(res.toArray()));
+res = t.find({"$text": {"$search": "foo", "$language": "english"}, x: 1},
+ {score: {"$meta": "textScore"}});
+assert(res[0].score > 0, tojson(res.toArray()));
diff --git a/jstests/core/fts_partition_no_multikey.js b/jstests/core/fts_partition_no_multikey.js
index f77dc053f85..4c249522c30 100644
--- a/jstests/core/fts_partition_no_multikey.js
+++ b/jstests/core/fts_partition_no_multikey.js
@@ -2,12 +2,12 @@
t = db.fts_partition_no_multikey;
t.drop();
-t.ensureIndex( { x : 1, y : "text" } );
+t.ensureIndex({x: 1, y: "text"});
-assert.writeOK( t.insert( { x : 5 , y : "this is fun" } ));
+assert.writeOK(t.insert({x: 5, y: "this is fun"}));
-assert.writeError( t.insert( { x : [] , y : "this is fun" } ));
+assert.writeError(t.insert({x: [], y: "this is fun"}));
-assert.writeError( t.insert( { x : [1] , y : "this is fun" } ));
+assert.writeError(t.insert({x: [1], y: "this is fun"}));
-assert.writeError( t.insert( { x : [1,2] , y : "this is fun" } ));
+assert.writeError(t.insert({x: [1, 2], y: "this is fun"}));
diff --git a/jstests/core/fts_phrase.js b/jstests/core/fts_phrase.js
index 471fedbfb42..d36df8aaeb0 100644
--- a/jstests/core/fts_phrase.js
+++ b/jstests/core/fts_phrase.js
@@ -2,24 +2,20 @@
t = db.text_phrase;
t.drop();
-t.save( { _id : 1 , title : "my blog post" , text : "i am writing a blog. yay" } );
-t.save( { _id : 2 , title : "my 2nd post" , text : "this is a new blog i am typing. yay" } );
-t.save( { _id : 3 , title : "knives are Fun" , text : "this is a new blog i am writing. yay" } );
-
-t.ensureIndex( { "title" : "text" , text : "text" } , { weights : { title : 10 } } );
-
-res = t.find( { "$text" : { "$search" : "blog write" } }, { score: { "$meta" : "textScore" } } ).sort( { score: { "$meta" : "textScore" } });
-assert.eq( 3, res.length() );
-assert.eq( 1, res[0]._id );
-assert( res[0].score > (res[1].score*2), tojson(res.toArray()));
-
-res = t.find( { "$text" : { "$search" : "write blog" } }, { score: { "$meta" : "textScore" } } ).sort( { score: { "$meta" : "textScore" } });
-assert.eq( 3, res.length() );
-assert.eq( 1, res[0]._id );
-assert( res[0].score > (res[1].score*2), tojson(res.toArray()));
-
-
-
-
-
-
+t.save({_id: 1, title: "my blog post", text: "i am writing a blog. yay"});
+t.save({_id: 2, title: "my 2nd post", text: "this is a new blog i am typing. yay"});
+t.save({_id: 3, title: "knives are Fun", text: "this is a new blog i am writing. yay"});
+
+t.ensureIndex({"title": "text", text: "text"}, {weights: {title: 10}});
+
+res = t.find({"$text": {"$search": "blog write"}}, {score: {"$meta": "textScore"}})
+ .sort({score: {"$meta": "textScore"}});
+assert.eq(3, res.length());
+assert.eq(1, res[0]._id);
+assert(res[0].score > (res[1].score * 2), tojson(res.toArray()));
+
+res = t.find({"$text": {"$search": "write blog"}}, {score: {"$meta": "textScore"}})
+ .sort({score: {"$meta": "textScore"}});
+assert.eq(3, res.length());
+assert.eq(1, res[0]._id);
+assert(res[0].score > (res[1].score * 2), tojson(res.toArray()));
diff --git a/jstests/core/fts_proj.js b/jstests/core/fts_proj.js
index ecd60e83a65..b59c02cc293 100644
--- a/jstests/core/fts_proj.js
+++ b/jstests/core/fts_proj.js
@@ -1,20 +1,16 @@
t = db.text_proj;
t.drop();
-t.save( { _id : 1 , x : "a", y: "b", z : "c"});
-t.save( { _id : 2 , x : "d", y: "e", z : "f"});
-t.save( { _id : 3 , x : "a", y: "g", z : "h"});
-
-t.ensureIndex( { x : "text"} , { default_language : "none" } );
-
-res = t.find( { "$text": {"$search" : "a"}} );
-assert.eq( 2, res.length() );
-assert( res[0].y, tojson(res.toArray()));
-
-res = t.find( { "$text": {"$search" : "a"}}, {x: 1} );
-assert.eq( 2, res.length() );
-assert( !res[0].y, tojson(res.toArray()));
-
+t.save({_id: 1, x: "a", y: "b", z: "c"});
+t.save({_id: 2, x: "d", y: "e", z: "f"});
+t.save({_id: 3, x: "a", y: "g", z: "h"});
+t.ensureIndex({x: "text"}, {default_language: "none"});
+res = t.find({"$text": {"$search": "a"}});
+assert.eq(2, res.length());
+assert(res[0].y, tojson(res.toArray()));
+res = t.find({"$text": {"$search": "a"}}, {x: 1});
+assert.eq(2, res.length());
+assert(!res[0].y, tojson(res.toArray()));
diff --git a/jstests/core/fts_projection.js b/jstests/core/fts_projection.js
index 60bb445a7b3..50fe4755fc3 100644
--- a/jstests/core/fts_projection.js
+++ b/jstests/core/fts_projection.js
@@ -8,10 +8,11 @@ db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
t.insert({_id: 0, a: "textual content"});
t.insert({_id: 1, a: "additional content", b: -1});
t.insert({_id: 2, a: "irrelevant content"});
-t.ensureIndex({a:"text"});
+t.ensureIndex({a: "text"});
// Project the text score.
-var results = t.find({$text: {$search: "textual content -irrelevant"}}, {_idCopy:0, score:{$meta: "textScore"}}).toArray();
+var results = t.find({$text: {$search: "textual content -irrelevant"}},
+ {_idCopy: 0, score: {$meta: "textScore"}}).toArray();
// printjson(results);
// Scores should exist.
assert.eq(results.length, 2);
@@ -28,7 +29,8 @@ scores[results[1]._id] = results[1].score;
//
// Project text score into 2 fields.
-results = t.find({$text: {$search: "textual content -irrelevant"}}, {otherScore: {$meta: "textScore"}, score:{$meta: "textScore"}}).toArray();
+results = t.find({$text: {$search: "textual content -irrelevant"}},
+ {otherScore: {$meta: "textScore"}, score: {$meta: "textScore"}}).toArray();
assert.eq(2, results.length);
for (var i = 0; i < results.length; ++i) {
assert.close(scores[results[i]._id], results[i].score);
@@ -38,18 +40,22 @@ for (var i = 0; i < results.length; ++i) {
// printjson(results);
// Project text score into "x.$" shouldn't crash
-assert.throws(function() { t.find({$text: {$search: "textual content -irrelevant"}}, {'x.$': {$meta: "textScore"}}).toArray(); });
+assert.throws(function() {
+ t.find({$text: {$search: "textual content -irrelevant"}}, {'x.$': {$meta: "textScore"}})
+ .toArray();
+});
// TODO: We can't project 'x.y':1 and 'x':1 (yet).
// Clobber an existing field and behave nicely.
-results = t.find({$text: {$search: "textual content -irrelevant"}},
- {b: {$meta: "textScore"}}).toArray();
+results =
+ t.find({$text: {$search: "textual content -irrelevant"}}, {b: {$meta: "textScore"}}).toArray();
assert.eq(2, results.length);
for (var i = 0; i < results.length; ++i) {
- assert.close(scores[results[i]._id], results[i].b,
- i + ': existing field in ' + tojson(results[i], '', true) +
- ' is not clobbered with score');
+ assert.close(
+ scores[results[i]._id],
+ results[i].b,
+ i + ': existing field in ' + tojson(results[i], '', true) + ' is not clobbered with score');
}
assert.neq(-1, results[0].b);
@@ -59,35 +65,40 @@ var results = t.find({a: /text/}, {score: {$meta: "textScore"}}).toArray();
// printjson(results);
// No textScore proj. with nested fields
-assert.throws(function() { t.find({$text: {$search: "blah"}}, {'x.y':{$meta: "textScore"}}).toArray(); });
+assert.throws(function() {
+ t.find({$text: {$search: "blah"}}, {'x.y': {$meta: "textScore"}}).toArray();
+});
// SERVER-12173
// When $text operator is in $or, should evaluate first
results = t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {_id: 1}]},
- {_idCopy:0, score:{$meta: "textScore"}}).toArray();
+ {_idCopy: 0, score: {$meta: "textScore"}}).toArray();
printjson(results);
assert.eq(2, results.length);
for (var i = 0; i < results.length; ++i) {
- assert.close(scores[results[i]._id], results[i].score,
+ assert.close(scores[results[i]._id],
+ results[i].score,
i + ': TEXT under OR invalid score: ' + tojson(results[i], '', true));
}
// SERVER-12592
-// When $text operator is in $or, all non-$text children must be indexed. Otherwise, we should produce
+// When $text operator is in $or, all non-$text children must be indexed. Otherwise, we should
+// produce
// a readable error.
var errorMessage = '';
-assert.throws( function() {
+assert.throws(function() {
try {
t.find({$or: [{$text: {$search: "textual content -irrelevant"}}, {b: 1}]}).itcount();
- }
- catch (e) {
+ } catch (e) {
errorMessage = e;
throw e;
}
}, null, 'Expected error from failed TEXT under OR planning');
-assert.neq(-1, errorMessage.message.indexOf('TEXT'),
+assert.neq(-1,
+ errorMessage.message.indexOf('TEXT'),
'message from failed text planning does not mention TEXT: ' + errorMessage);
-assert.neq(-1, errorMessage.message.indexOf('OR'),
+assert.neq(-1,
+ errorMessage.message.indexOf('OR'),
'message from failed text planning does not mention OR: ' + errorMessage);
// Scores should exist.
@@ -96,4 +107,3 @@ assert(results[0].score,
"invalid text score for " + tojson(results[0], '', true) + " when $text is in $or");
assert(results[1].score,
"invalid text score for " + tojson(results[0], '', true) + " when $text is in $or");
-
diff --git a/jstests/core/fts_querylang.js b/jstests/core/fts_querylang.js
index 4685b6fa550..2b13485699e 100644
--- a/jstests/core/fts_querylang.js
+++ b/jstests/core/fts_querylang.js
@@ -23,18 +23,25 @@ assert.neq(results[0]._id, 2);
assert.neq(results[1]._id, 2);
// Test sort with basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).toArray();
+results =
+ t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).toArray();
assert.eq(results.length, 2);
assert.eq(results[0]._id, 0);
assert.eq(results[1]._id, 1);
// Test skip with basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).skip(1).toArray();
+results = t.find({$text: {$search: "textual content -irrelevant"}})
+ .sort({unindexedField: 1})
+ .skip(1)
+ .toArray();
assert.eq(results.length, 1);
assert.eq(results[0]._id, 1);
// Test limit with basic text query.
-results = t.find({$text: {$search: "textual content -irrelevant"}}).sort({unindexedField: 1}).limit(1).toArray();
+results = t.find({$text: {$search: "textual content -irrelevant"}})
+ .sort({unindexedField: 1})
+ .limit(1)
+ .toArray();
assert.eq(results.length, 1);
assert.eq(results[0]._id, 0);
@@ -44,19 +51,17 @@ assert.eq(results[0]._id, 0);
// framework.
// Test $and of basic text query with indexed expression.
-results = t.find({$text: {$search: "content -irrelevant"},
- _id: 1}).toArray();
+results = t.find({$text: {$search: "content -irrelevant"}, _id: 1}).toArray();
assert.eq(results.length, 1);
assert.eq(results[0]._id, 1);
// Test $and of basic text query with indexed expression, and bad language
assert.throws(function() {
- t.find({$text: {$search: "content -irrelevant", $language: "spanglish"}, _id: 1})
- .itcount();});
+ t.find({$text: {$search: "content -irrelevant", $language: "spanglish"}, _id: 1}).itcount();
+});
// Test $and of basic text query with unindexed expression.
-results = t.find({$text: {$search: "content -irrelevant"},
- unindexedField: 1}).toArray();
+results = t.find({$text: {$search: "content -irrelevant"}, unindexedField: 1}).toArray();
assert.eq(results.length, 1);
assert.eq(results[0]._id, 1);
@@ -70,14 +75,15 @@ cursor = t.find({$text: {$search: "contents", $language: "EN"}});
assert.eq(true, cursor.hasNext());
cursor = t.find({$text: {$search: "contents", $language: "spanglish"}});
-assert.throws(function() { cursor.next(); });
+assert.throws(function() {
+ cursor.next();
+});
// TODO Test $and of basic text query with geo expression.
// Test update with $text.
t.update({$text: {$search: "textual content -irrelevant"}}, {$set: {b: 1}}, {multi: true});
-assert.eq(2, t.find({b: 1}).itcount(),
- 'incorrect number of documents updated');
+assert.eq(2, t.find({b: 1}).itcount(), 'incorrect number of documents updated');
// TODO Test remove with $text, once it is enabled with the new query framework.
diff --git a/jstests/core/fts_score_sort.js b/jstests/core/fts_score_sort.js
index 59fb852a774..3ca22fe947d 100644
--- a/jstests/core/fts_score_sort.js
+++ b/jstests/core/fts_score_sort.js
@@ -8,10 +8,13 @@ db.adminCommand({setParameter: 1, newQueryFrameworkEnabled: true});
t.insert({_id: 0, a: "textual content"});
t.insert({_id: 1, a: "additional content"});
t.insert({_id: 2, a: "irrelevant content"});
-t.ensureIndex({a:"text"});
+t.ensureIndex({a: "text"});
// Sort by the text score.
-var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({score: {$meta: "textScore"}}).toArray();
+var results =
+ t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}})
+ .sort({score: {$meta: "textScore"}})
+ .toArray();
// printjson(results);
assert.eq(results.length, 2);
assert.eq(results[0]._id, 0);
@@ -19,7 +22,10 @@ assert.eq(results[1]._id, 1);
assert(results[0].score > results[1].score);
// Sort by {_id descending, score} and verify the order is right.
-var results = t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}}).sort({_id: -1, score: {$meta: "textScore"}}).toArray();
+var results =
+ t.find({$text: {$search: "textual content -irrelevant"}}, {score: {$meta: "textScore"}})
+ .sort({_id: -1, score: {$meta: "textScore"}})
+ .toArray();
printjson(results);
assert.eq(results.length, 2);
assert.eq(results[0]._id, 1);
diff --git a/jstests/core/fts_spanish.js b/jstests/core/fts_spanish.js
index 7c8ccecd577..74d71cceddf 100644
--- a/jstests/core/fts_spanish.js
+++ b/jstests/core/fts_spanish.js
@@ -1,30 +1,29 @@
-load( "jstests/libs/fts.js" );
+load("jstests/libs/fts.js");
t = db.text_spanish;
t.drop();
-t.save( { _id: 1, title: "mi blog", text: "Este es un blog de prueba" } );
-t.save( { _id: 2, title: "mi segundo post", text: "Este es un blog de prueba" } );
-t.save( { _id: 3, title: "cuchillos son divertidos", text: "este es mi tercer blog stemmed" } );
-t.save( { _id: 4, language: "en", title: "My fourth blog", text: "This stemmed blog is in english" } );
+t.save({_id: 1, title: "mi blog", text: "Este es un blog de prueba"});
+t.save({_id: 2, title: "mi segundo post", text: "Este es un blog de prueba"});
+t.save({_id: 3, title: "cuchillos son divertidos", text: "este es mi tercer blog stemmed"});
+t.save({_id: 4, language: "en", title: "My fourth blog", text: "This stemmed blog is in english"});
// default weight is 1
// specify weights if you want a field to be more meaningull
-t.ensureIndex( { "title": "text", text: "text" }, { weights: { title: 10 },
- default_language: "es" } );
+t.ensureIndex({"title": "text", text: "text"}, {weights: {title: 10}, default_language: "es"});
-res = t.find( { "$text" : { "$search" : "blog" } } );
-assert.eq( 4, res.length() );
+res = t.find({"$text": {"$search": "blog"}});
+assert.eq(4, res.length());
-assert.eq( [4], queryIDS( t, "stem" ) );
-assert.eq( [3], queryIDS( t, "stemmed" ) );
-assert.eq( [4], queryIDS( t, "stemmed", null, { "$language" : "en" } ) );
+assert.eq([4], queryIDS(t, "stem"));
+assert.eq([3], queryIDS(t, "stemmed"));
+assert.eq([4], queryIDS(t, "stemmed", null, {"$language": "en"}));
-assert.eq( [1,2], queryIDS( t, "prueba" ) );
+assert.eq([1, 2], queryIDS(t, "prueba"));
-assert.writeError( t.save( { _id: 5, language: "spanglish", title: "", text: "" } ));
+assert.writeError(t.save({_id: 5, language: "spanglish", title: "", text: ""}));
t.dropIndexes();
-res = t.ensureIndex( { "title": "text", text: "text" }, { default_language: "spanglish" } );
+res = t.ensureIndex({"title": "text", text: "text"}, {default_language: "spanglish"});
assert.neq(null, res);
diff --git a/jstests/core/geo1.js b/jstests/core/geo1.js
index e1dc23fe153..724ae31a3ce 100644
--- a/jstests/core/geo1.js
+++ b/jstests/core/geo1.js
@@ -2,36 +2,39 @@
t = db.geo1;
t.drop();
-idx = { loc : "2d" , zip : 1 };
+idx = {
+ loc: "2d",
+ zip: 1
+};
-t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } );
-t.insert( { zip : "10024" , loc : [ 40.786387 , 73.97709 ] } );
-assert.writeOK( t.insert( { zip : "94061" , loc : [ 37.463911 , 122.23396 ] } ));
+t.insert({zip: "06525", loc: [41.352964, 73.01212]});
+t.insert({zip: "10024", loc: [40.786387, 73.97709]});
+assert.writeOK(t.insert({zip: "94061", loc: [37.463911, 122.23396]}));
// test "2d" has to be first
-assert.eq( 1 , t.getIndexKeys().length , "S1" );
-t.ensureIndex( { zip : 1 , loc : "2d" } );
-assert.eq( 1 , t.getIndexKeys().length , "S2" );
+assert.eq(1, t.getIndexKeys().length, "S1");
+t.ensureIndex({zip: 1, loc: "2d"});
+assert.eq(1, t.getIndexKeys().length, "S2");
-t.ensureIndex( idx );
-assert.eq( 2 , t.getIndexKeys().length , "S3" );
+t.ensureIndex(idx);
+assert.eq(2, t.getIndexKeys().length, "S3");
-assert.eq( 3 , t.count() , "B1" );
-assert.writeError( t.insert( { loc : [ 200 , 200 ] } ));
-assert.eq( 3 , t.count() , "B3" );
+assert.eq(3, t.count(), "B1");
+assert.writeError(t.insert({loc: [200, 200]}));
+assert.eq(3, t.count(), "B3");
// test normal access
-wb = t.findOne( { zip : "06525" } );
-assert( wb , "C1" );
+wb = t.findOne({zip: "06525"});
+assert(wb, "C1");
-assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" );
-assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" );
+assert.eq("06525", t.find({loc: wb.loc}).hint({"$natural": 1})[0].zip, "C2");
+assert.eq("06525", t.find({loc: wb.loc})[0].zip, "C3");
// assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" )
// test config options
t.drop();
-t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } );
-assert.writeOK( t.insert( { loc : [ 200 , 200 ] } ));
+t.ensureIndex({loc: "2d"}, {min: -500, max: 500, bits: 4});
+assert.writeOK(t.insert({loc: [200, 200]}));
diff --git a/jstests/core/geo10.js b/jstests/core/geo10.js
index 5c26fbb3609..10879fc5d80 100644
--- a/jstests/core/geo10.js
+++ b/jstests/core/geo10.js
@@ -3,11 +3,14 @@
coll = db.geo10;
coll.drop();
-assert.commandWorked( db.geo10.ensureIndex( { c : '2d', t : 1 }, { min : 0, max : Math.pow( 2, 40 ) } ));
-assert.eq( 2, db.geo10.getIndexes().length, "A3" );
+assert.commandWorked(db.geo10.ensureIndex({c: '2d', t: 1}, {min: 0, max: Math.pow(2, 40)}));
+assert.eq(2, db.geo10.getIndexes().length, "A3");
-assert.writeOK( db.geo10.insert( { c : [ 1, 1 ], t : 1 } ));
-assert.writeOK( db.geo10.insert( { c : [ 3600, 3600 ], t : 1 } ));
-assert.writeOK( db.geo10.insert( { c : [ 0.001, 0.001 ], t : 1 } ));
+assert.writeOK(db.geo10.insert({c: [1, 1], t: 1}));
+assert.writeOK(db.geo10.insert({c: [3600, 3600], t: 1}));
+assert.writeOK(db.geo10.insert({c: [0.001, 0.001], t: 1}));
-printjson( db.geo10.find({ c : { $within : { $box : [[0.001, 0.001], [Math.pow(2, 40) - 0.001, Math.pow(2, 40) - 0.001]] } }, t : 1 }).toArray() );
+printjson(db.geo10.find({
+ c: {$within: {$box: [[0.001, 0.001], [Math.pow(2, 40) - 0.001, Math.pow(2, 40) - 0.001]]}},
+ t: 1
+}).toArray());
diff --git a/jstests/core/geo2.js b/jstests/core/geo2.js
index 4317d044f76..0b7e91c18bc 100644
--- a/jstests/core/geo2.js
+++ b/jstests/core/geo2.js
@@ -4,42 +4,40 @@ t.drop();
n = 1;
arr = [];
-for ( var x=-100; x<100; x+=2 ){
- for ( var y=-100; y<100; y+=2 ){
- arr.push( { _id : n++ , loc : [ x , y ] } );
+for (var x = -100; x < 100; x += 2) {
+ for (var y = -100; y < 100; y += 2) {
+ arr.push({_id: n++, loc: [x, y]});
}
}
-t.insert( arr );
-assert.eq( t.count(), 100 * 100 );
-assert.eq( t.count(), n - 1 );
+t.insert(arr);
+assert.eq(t.count(), 100 * 100);
+assert.eq(t.count(), n - 1);
+t.ensureIndex({loc: "2d"});
-t.ensureIndex( { loc : "2d" } );
+fast = db.runCommand({geoNear: t.getName(), near: [50, 50], num: 10});
-fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
-
-function a( cur ){
+function a(cur) {
var total = 0;
var outof = 0;
- while ( cur.hasNext() ){
+ while (cur.hasNext()) {
var o = cur.next();
- total += Geo.distance( [ 50 , 50 ] , o.loc );
+ total += Geo.distance([50, 50], o.loc);
outof++;
}
- return total/outof;
+ return total / outof;
}
-assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B1" );
-assert.close( 1.33333 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(3) ) , "B2" );
-assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B3" );
-
-printjson( t.find( { loc : { $near : [ 50 , 50 ] } } ).explain() );
+assert.close(fast.stats.avgDistance, a(t.find({loc: {$near: [50, 50]}}).limit(10)), "B1");
+assert.close(1.33333, a(t.find({loc: {$near: [50, 50]}}).limit(3)), "B2");
+assert.close(fast.stats.avgDistance, a(t.find({loc: {$near: [50, 50]}}).limit(10)), "B3");
+printjson(t.find({loc: {$near: [50, 50]}}).explain());
-assert.lt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(50) ) , "C1" );
-assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 , 3 ] } } ).limit(50) ) , "C2" );
-assert.gt( 3 , a( t.find( { loc : { $near : [ 50 , 50 ] , $maxDistance : 3 } } ).limit(50) ) , "C3" );
+assert.lt(3, a(t.find({loc: {$near: [50, 50]}}).limit(50)), "C1");
+assert.gt(3, a(t.find({loc: {$near: [50, 50, 3]}}).limit(50)), "C2");
+assert.gt(3, a(t.find({loc: {$near: [50, 50], $maxDistance: 3}}).limit(50)), "C3");
// SERVER-8974 - test if $geoNear operator works with 2d index as well
-var geoNear_cursor = t.find( { loc : { $geoNear : [50, 50] } } ).limit(100);
-assert.eq( geoNear_cursor.count(true), 100 );
+var geoNear_cursor = t.find({loc: {$geoNear: [50, 50]}}).limit(100);
+assert.eq(geoNear_cursor.count(true), 100);
diff --git a/jstests/core/geo3.js b/jstests/core/geo3.js
index a11c24ed338..da3d8641049 100644
--- a/jstests/core/geo3.js
+++ b/jstests/core/geo3.js
@@ -4,80 +4,83 @@ t.drop();
n = 1;
arr = [];
-for ( var x=-100; x<100; x+=2 ){
- for ( var y=-100; y<100; y+=2 ){
- arr.push( { _id : n++ , loc : [ x , y ] , a : Math.abs( x ) % 5 , b : Math.abs( y ) % 5 } );
+for (var x = -100; x < 100; x += 2) {
+ for (var y = -100; y < 100; y += 2) {
+ arr.push({_id: n++, loc: [x, y], a: Math.abs(x) % 5, b: Math.abs(y) % 5});
}
}
-t.insert( arr );
-assert.eq( t.count(), 100 * 100 );
-assert.eq( t.count(), n - 1 );
+t.insert(arr);
+assert.eq(t.count(), 100 * 100);
+assert.eq(t.count(), n - 1);
+t.ensureIndex({loc: "2d"});
-t.ensureIndex( { loc : "2d" } );
-
-fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
+fast = db.runCommand({geoNear: t.getName(), near: [50, 50], num: 10});
// test filter
-filtered1 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
-assert.eq( 10 , filtered1.results.length , "B1" );
-filtered1.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B2: " + tojson( z ) ); } );
-//printjson( filtered1.stats );
+filtered1 = db.runCommand({geoNear: t.getName(), near: [50, 50], num: 10, query: {a: 2}});
+assert.eq(10, filtered1.results.length, "B1");
+filtered1.results.forEach(function(z) {
+ assert.eq(2, z.obj.a, "B2: " + tojson(z));
+});
+// printjson( filtered1.stats );
-function avgA( q , len ){
- if ( ! len )
+function avgA(q, len) {
+ if (!len)
len = 10;
- var realq = { loc : { $near : [ 50 , 50 ] } };
- if ( q )
- Object.extend( realq , q );
- var as =
- t.find( realq ).limit(len).map(
- function(z){
- return z.a;
- }
- );
- assert.eq( len , as.length , "length in avgA" );
- return Array.avg( as );
+ var realq = {
+ loc: {$near: [50, 50]}
+ };
+ if (q)
+ Object.extend(realq, q);
+ var as = t.find(realq).limit(len).map(function(z) {
+ return z.a;
+ });
+ assert.eq(len, as.length, "length in avgA");
+ return Array.avg(as);
}
-function testFiltering( msg ){
- assert.gt( 2 , avgA( {} ) , msg + " testFiltering 1 " );
- assert.eq( 2 , avgA( { a : 2 } ) , msg + " testFiltering 2 " );
- assert.eq( 4 , avgA( { a : 4 } ) , msg + " testFiltering 3 " );
+function testFiltering(msg) {
+ assert.gt(2, avgA({}), msg + " testFiltering 1 ");
+ assert.eq(2, avgA({a: 2}), msg + " testFiltering 2 ");
+ assert.eq(4, avgA({a: 4}), msg + " testFiltering 3 ");
}
-testFiltering( "just loc" );
-
-t.dropIndex( { loc : "2d" } );
-assert.eq( 1 , t.getIndexKeys().length , "setup 3a" );
-t.ensureIndex( { loc : "2d" , a : 1 } );
-assert.eq( 2 , t.getIndexKeys().length , "setup 3b" );
+testFiltering("just loc");
-filtered2 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
-assert.eq( 10 , filtered2.results.length , "B3" );
-filtered2.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B4: " + tojson( z ) ); } );
+t.dropIndex({loc: "2d"});
+assert.eq(1, t.getIndexKeys().length, "setup 3a");
+t.ensureIndex({loc: "2d", a: 1});
+assert.eq(2, t.getIndexKeys().length, "setup 3b");
-assert.eq( filtered1.stats.avgDistance , filtered2.stats.avgDistance , "C1" );
-assert.gt( filtered1.stats.objectsLoaded , filtered2.stats.objectsLoaded , "C3" );
+filtered2 = db.runCommand({geoNear: t.getName(), near: [50, 50], num: 10, query: {a: 2}});
+assert.eq(10, filtered2.results.length, "B3");
+filtered2.results.forEach(function(z) {
+ assert.eq(2, z.obj.a, "B4: " + tojson(z));
+});
-testFiltering( "loc and a" );
+assert.eq(filtered1.stats.avgDistance, filtered2.stats.avgDistance, "C1");
+assert.gt(filtered1.stats.objectsLoaded, filtered2.stats.objectsLoaded, "C3");
-t.dropIndex( { loc : "2d" , a : 1 } );
-assert.eq( 1 , t.getIndexKeys().length , "setup 4a" );
-t.ensureIndex( { loc : "2d" , b : 1 } );
-assert.eq( 2 , t.getIndexKeys().length , "setup 4b" );
+testFiltering("loc and a");
-testFiltering( "loc and b" );
+t.dropIndex({loc: "2d", a: 1});
+assert.eq(1, t.getIndexKeys().length, "setup 4a");
+t.ensureIndex({loc: "2d", b: 1});
+assert.eq(2, t.getIndexKeys().length, "setup 4b");
+testFiltering("loc and b");
-q = { loc : { $near : [ 50 , 50 ] } };
-assert.eq( 100 , t.find( q ).limit(100).itcount() , "D1" );
-assert.eq( 100 , t.find( q ).limit(100).size() , "D2" );
+q = {
+ loc: {$near: [50, 50]}
+};
+assert.eq(100, t.find(q).limit(100).itcount(), "D1");
+assert.eq(100, t.find(q).limit(100).size(), "D2");
-assert.eq( 20 , t.find( q ).limit(20).itcount() , "D3" );
-assert.eq( 20 , t.find( q ).limit(20).size() , "D4" );
+assert.eq(20, t.find(q).limit(20).itcount(), "D3");
+assert.eq(20, t.find(q).limit(20).size(), "D4");
// SERVER-14039 Wrong limit after skip with $nearSphere, 2d index
-assert.eq( 10 , t.find( q ).skip(10).limit(10).itcount() , "D5" );
-assert.eq( 10 , t.find( q ).skip(10).limit(10).size() , "D6" );
+assert.eq(10, t.find(q).skip(10).limit(10).itcount(), "D5");
+assert.eq(10, t.find(q).skip(10).limit(10).size(), "D6");
diff --git a/jstests/core/geo5.js b/jstests/core/geo5.js
index 1a0830113e9..bbaa84c1d17 100644
--- a/jstests/core/geo5.js
+++ b/jstests/core/geo5.js
@@ -1,18 +1,17 @@
t = db.geo5;
t.drop();
-t.insert( { p : [ 0,0 ] } );
-t.ensureIndex( { p : "2d" } );
+t.insert({p: [0, 0]});
+t.ensureIndex({p: "2d"});
-res = t.runCommand( "geoNear" , { near : [1,1] } );
-assert.eq( 1 , res.results.length , "A1" );
+res = t.runCommand("geoNear", {near: [1, 1]});
+assert.eq(1, res.results.length, "A1");
-t.insert( { p : [ 1,1 ] } );
-t.insert( { p : [ -1,-1 ] } );
-res = t.runCommand( "geoNear" , { near : [50,50] } );
-assert.eq( 3 , res.results.length , "A2" );
-
-t.insert( { p : [ -1,-1 ] } );
-res = t.runCommand( "geoNear" , { near : [50,50] } );
-assert.eq( 4 , res.results.length , "A3" );
+t.insert({p: [1, 1]});
+t.insert({p: [-1, -1]});
+res = t.runCommand("geoNear", {near: [50, 50]});
+assert.eq(3, res.results.length, "A2");
+t.insert({p: [-1, -1]});
+res = t.runCommand("geoNear", {near: [50, 50]});
+assert.eq(4, res.results.length, "A3");
diff --git a/jstests/core/geo6.js b/jstests/core/geo6.js
index e57f8a6b6f6..3d681fe9b7e 100644
--- a/jstests/core/geo6.js
+++ b/jstests/core/geo6.js
@@ -2,23 +2,22 @@
t = db.geo6;
t.drop();
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
-assert.eq( 0 , t.find().itcount() , "pre0" );
-assert.eq( 0 , t.find( { loc : { $near : [50,50] } } ).itcount() , "pre1" );
+assert.eq(0, t.find().itcount(), "pre0");
+assert.eq(0, t.find({loc: {$near: [50, 50]}}).itcount(), "pre1");
-t.insert( { _id : 1 , loc : [ 1 , 1 ] } );
-t.insert( { _id : 2 , loc : [ 1 , 2 ] } );
-t.insert( { _id : 3 } );
+t.insert({_id: 1, loc: [1, 1]});
+t.insert({_id: 2, loc: [1, 2]});
+t.insert({_id: 3});
-assert.eq( 3 , t.find().itcount() , "A1" );
-assert.eq( 2 , t.find().hint( { loc : "2d" } ).itcount() , "A2" );
-assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).itcount() , "A3" );
+assert.eq(3, t.find().itcount(), "A1");
+assert.eq(2, t.find().hint({loc: "2d"}).itcount(), "A2");
+assert.eq(2, t.find({loc: {$near: [50, 50]}}).itcount(), "A3");
-t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).forEach(printjson);
-assert.eq( 1 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).next()._id , "B1" );
-assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : -1 } ).next()._id , "B1" );
+t.find({loc: {$near: [50, 50]}}).sort({_id: 1}).forEach(printjson);
+assert.eq(1, t.find({loc: {$near: [50, 50]}}).sort({_id: 1}).next()._id, "B1");
+assert.eq(2, t.find({loc: {$near: [50, 50]}}).sort({_id: -1}).next()._id, "B1");
-
-t.insert( { _id : 4 , loc : [] } );
-assert.eq( 4 , t.find().itcount() , "C1" );
+t.insert({_id: 4, loc: []});
+assert.eq(4, t.find().itcount(), "C1");
diff --git a/jstests/core/geo7.js b/jstests/core/geo7.js
index f353f75d789..b7563e9f155 100644
--- a/jstests/core/geo7.js
+++ b/jstests/core/geo7.js
@@ -2,19 +2,19 @@
t = db.geo7;
t.drop();
-t.insert({_id:1,y:[1,1]});
-t.insert({_id:2,y:[1,1],z:3});
-t.insert({_id:3,y:[1,1],z:4});
-t.insert({_id:4,y:[1,1],z:5});
+t.insert({_id: 1, y: [1, 1]});
+t.insert({_id: 2, y: [1, 1], z: 3});
+t.insert({_id: 3, y: [1, 1], z: 4});
+t.insert({_id: 4, y: [1, 1], z: 5});
-t.ensureIndex({y:"2d",z:1});
+t.ensureIndex({y: "2d", z: 1});
-assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A1" );
+assert.eq(1, t.find({y: [1, 1], z: 3}).itcount(), "A1");
-t.dropIndex({y:"2d",z:1});
+t.dropIndex({y: "2d", z: 1});
-t.ensureIndex({y:"2d"});
-assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A2" );
+t.ensureIndex({y: "2d"});
+assert.eq(1, t.find({y: [1, 1], z: 3}).itcount(), "A2");
-t.insert( { _id : 5 , y : 5 } );
-assert.eq( 5 , t.findOne( { y : 5 } )._id , "B1" );
+t.insert({_id: 5, y: 5});
+assert.eq(5, t.findOne({y: 5})._id, "B1");
diff --git a/jstests/core/geo9.js b/jstests/core/geo9.js
index 1e295911393..201bee7dfa5 100644
--- a/jstests/core/geo9.js
+++ b/jstests/core/geo9.js
@@ -2,27 +2,27 @@
t = db.geo9;
t.drop();
-t.save( { _id : 1 , a : [ 10 , 10 ] , b : [ 50 , 50 ] } );
-t.save( { _id : 2 , a : [ 11 , 11 ] , b : [ 51 , 52 ] } );
-t.save( { _id : 3 , a : [ 12 , 12 ] , b : [ 52 , 52 ] } );
+t.save({_id: 1, a: [10, 10], b: [50, 50]});
+t.save({_id: 2, a: [11, 11], b: [51, 52]});
+t.save({_id: 3, a: [12, 12], b: [52, 52]});
-t.save( { _id : 4 , a : [ 50 , 50 ] , b : [ 10 , 10 ] } );
-t.save( { _id : 5 , a : [ 51 , 51 ] , b : [ 11 , 11 ] } );
-t.save( { _id : 6 , a : [ 52 , 52 ] , b : [ 12 , 12 ] } );
+t.save({_id: 4, a: [50, 50], b: [10, 10]});
+t.save({_id: 5, a: [51, 51], b: [11, 11]});
+t.save({_id: 6, a: [52, 52], b: [12, 12]});
-t.ensureIndex( { a : "2d" } );
-t.ensureIndex( { b : "2d" } );
+t.ensureIndex({a: "2d"});
+t.ensureIndex({b: "2d"});
-function check( field ){
+function check(field) {
var q = {};
- q[field] = { $near : [ 11 , 11 ] };
- arr = t.find( q ).limit(3).map(
- function(z){
- return Geo.distance( [ 11 , 11 ] , z[field] );
- }
- );
- assert.eq( 2 * Math.sqrt( 2 ) , Array.sum( arr ) , "test " + field );
+ q[field] = {
+ $near: [11, 11]
+ };
+ arr = t.find(q).limit(3).map(function(z) {
+ return Geo.distance([11, 11], z[field]);
+ });
+ assert.eq(2 * Math.sqrt(2), Array.sum(arr), "test " + field);
}
-check( "a" );
-check( "b" );
+check("a");
+check("b");
diff --git a/jstests/core/geo_2d_with_geojson_point.js b/jstests/core/geo_2d_with_geojson_point.js
index b5afc8b77b8..aaadf4be333 100644
--- a/jstests/core/geo_2d_with_geojson_point.js
+++ b/jstests/core/geo_2d_with_geojson_point.js
@@ -11,10 +11,6 @@ var geoJSONPoint = {
coordinates: [0, 0]
};
-print(assert.throws(
- function() {
- t.findOne({
- loc: {$near: {$geometry: geoJSONPoint}}});
- },
- [],
- 'querying 2d index with GeoJSON point.'));
+print(assert.throws(function() {
+ t.findOne({loc: {$near: {$geometry: geoJSONPoint}}});
+}, [], 'querying 2d index with GeoJSON point.'));
diff --git a/jstests/core/geo_allowedcomparisons.js b/jstests/core/geo_allowedcomparisons.js
index e1a36d495eb..576e764820a 100644
--- a/jstests/core/geo_allowedcomparisons.js
+++ b/jstests/core/geo_allowedcomparisons.js
@@ -2,22 +2,30 @@
t = db.geo_allowedcomparisons;
// Any GeoJSON object can intersect with any geojson object.
-geojsonPoint = { "type" : "Point", "coordinates": [ 0, 0 ] };
-oldPoint = [0,0];
+geojsonPoint = {
+ "type": "Point",
+ "coordinates": [0, 0]
+};
+oldPoint = [0, 0];
// GeoJSON polygons can contain any geojson object and OLD points.
-geojsonPoly = { "type" : "Polygon",
- "coordinates" : [ [ [-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]]]};
+geojsonPoly = {
+ "type": "Polygon",
+ "coordinates": [[[-5, -5], [-5, 5], [5, 5], [5, -5], [-5, -5]]]
+};
// This can be contained by GJ polygons, intersected by anything GJ and old points.
-geojsonLine = { "type" : "LineString", "coordinates": [ [ 0, 0], [1, 1]]};
+geojsonLine = {
+ "type": "LineString",
+ "coordinates": [[0, 0], [1, 1]]
+};
// $centerSphere can contain old or new points.
oldCenterSphere = [[0, 0], Math.PI / 180];
// $box can contain old points.
-oldBox = [[-5,-5], [5,5]];
+oldBox = [[-5, -5], [5, 5]];
// $polygon can contain old points.
-oldPolygon = [[-5,-5], [-5,5], [5,5], [5,-5], [-5,-5]];
+oldPolygon = [[-5, -5], [-5, 5], [5, 5], [5, -5], [-5, -5]];
// $center can contain old points.
oldCenter = [[0, 0], 1];
@@ -39,7 +47,10 @@ assert.writeError(t.insert({geo: oldCenter}));
// Verify that even if we can't index them, we can use them in a matcher.
t.insert({gj: geojsonLine});
t.insert({gj: geojsonPoly});
-geojsonPoint2 = { "type" : "Point", "coordinates": [ 0, 0.001 ] };
+geojsonPoint2 = {
+ "type": "Point",
+ "coordinates": [0, 0.001]
+};
t.insert({gjp: geojsonPoint2});
// We convert between old and new style points.
@@ -56,17 +67,22 @@ function runTests() {
assert.eq(1, t.find({geo: {$geoWithin: {$center: oldCenter}}}).itcount());
assert.eq(1, t.find({geo: {$geoWithin: {$centerSphere: oldCenterSphere}}}).itcount());
// Using geojson with 2d-style geoWithin syntax should choke.
- assert.throws(function() { return t.find({geo: {$geoWithin: {$polygon: geojsonPoly}}})
- .itcount();});
+ assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$polygon: geojsonPoly}}}).itcount();
+ });
// Using old polygon w/new syntax should choke too.
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldPolygon}}})
- .itcount();});
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldBox}}})
- .itcount();});
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenter}}})
- .itcount();});
- assert.throws(function() { return t.find({geo: {$geoWithin: {$geometry: oldCenterSphere}}})
- .itcount();});
+ assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$geometry: oldPolygon}}}).itcount();
+ });
+ assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$geometry: oldBox}}}).itcount();
+ });
+ assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$geometry: oldCenter}}}).itcount();
+ });
+ assert.throws(function() {
+ return t.find({geo: {$geoWithin: {$geometry: oldCenterSphere}}}).itcount();
+ });
// Even if we only have a 2d index, the 2d suitability function should
// allow the matcher to deal with this. If we have a 2dsphere index we use it.
assert.eq(1, t.find({geo: {$geoWithin: {$geometry: geojsonPoly}}}).itcount());
@@ -83,7 +99,7 @@ t.dropIndex({geo: "2d"});
runTests();
// 2dsphere index now.
-assert.commandWorked( t.ensureIndex({geo: "2dsphere"}) );
+assert.commandWorked(t.ensureIndex({geo: "2dsphere"}));
// 2dsphere does not support arrays of points.
assert.writeError(t.insert({geo: [geojsonPoint2, geojsonPoint]}));
runTests();
diff --git a/jstests/core/geo_array0.js b/jstests/core/geo_array0.js
index c83223cef05..42b9c758e45 100644
--- a/jstests/core/geo_array0.js
+++ b/jstests/core/geo_array0.js
@@ -3,24 +3,24 @@ t = db.geoarray;
function test(index) {
t.drop();
- t.insert( { zip : "10001", loc : [[ 10, 10 ], [ 50, 50 ]] } );
- t.insert( { zip : "10002", loc : [[ 20, 20 ], [ 50, 50 ]] } );
- var res = t.insert( { zip : "10003", loc : [[ 30, 30 ], [ 50, 50 ]] } );
- assert.writeOK( res );
+ t.insert({zip: "10001", loc: [[10, 10], [50, 50]]});
+ t.insert({zip: "10002", loc: [[20, 20], [50, 50]]});
+ var res = t.insert({zip: "10003", loc: [[30, 30], [50, 50]]});
+ assert.writeOK(res);
if (index) {
- assert.commandWorked(t.ensureIndex( { loc : "2d", zip : 1 } ));
- assert.eq( 2, t.getIndexKeys().length );
+ assert.commandWorked(t.ensureIndex({loc: "2d", zip: 1}));
+ assert.eq(2, t.getIndexKeys().length);
}
- res = t.insert( { zip : "10004", loc : [[ 40, 40 ], [ 50, 50 ]] } );
- assert.writeOK( res );
+ res = t.insert({zip: "10004", loc: [[40, 40], [50, 50]]});
+ assert.writeOK(res);
// test normal access
- printjson( t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() );
- assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
- assert.eq( 4, t.find( { loc : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
+ printjson(t.find({loc: {$within: {$box: [[0, 0], [45, 45]]}}}).toArray());
+ assert.eq(4, t.find({loc: {$within: {$box: [[0, 0], [45, 45]]}}}).count());
+ assert.eq(4, t.find({loc: {$within: {$box: [[45, 45], [50, 50]]}}}).count());
}
-//test(false); // this was removed as part of SERVER-6400
+// test(false); // this was removed as part of SERVER-6400
test(true);
diff --git a/jstests/core/geo_array1.js b/jstests/core/geo_array1.js
index c37c80b21e0..08b6060f3cc 100644
--- a/jstests/core/geo_array1.js
+++ b/jstests/core/geo_array1.js
@@ -5,31 +5,31 @@ function test(index) {
t.drop();
var locObj = [];
- // Add locations everywhere
- for ( var i = 0; i < 10; i++ ) {
- for ( var j = 0; j < 10; j++ ) {
- if ( j % 2 == 0 )
- locObj.push( [ i, j ] );
- else
- locObj.push( { x : i, y : j } );
- }
+ // Add locations everywhere
+ for (var i = 0; i < 10; i++) {
+ for (var j = 0; j < 10; j++) {
+ if (j % 2 == 0)
+ locObj.push([i, j]);
+ else
+ locObj.push({x: i, y: j});
}
+ }
// Add docs with all these locations
- for( var i = 0; i < 300; i++ ){
- t.insert( { loc : locObj } );
+ for (var i = 0; i < 300; i++) {
+ t.insert({loc: locObj});
}
if (index) {
- t.ensureIndex( { loc : "2d" } );
+ t.ensureIndex({loc: "2d"});
}
// Pull them back
- for ( var i = 0; i < 10; i++ ) {
- for ( var j = 0; j < 10; j++ ) {
- assert.eq(300, t.find({loc: {$within: {$box: [[i - 0.5, j - 0.5 ],
- [i + 0.5,j + 0.5]]}}})
- .count());
+ for (var i = 0; i < 10; i++) {
+ for (var j = 0; j < 10; j++) {
+ assert.eq(
+ 300,
+ t.find({loc: {$within: {$box: [[i - 0.5, j - 0.5], [i + 0.5, j + 0.5]]}}}).count());
}
}
}
diff --git a/jstests/core/geo_array2.js b/jstests/core/geo_array2.js
index 68ecb65323e..33aad98930a 100644
--- a/jstests/core/geo_array2.js
+++ b/jstests/core/geo_array2.js
@@ -10,154 +10,146 @@ Random.setRandomSeed();
// Test the semantics of near / nearSphere / etc. queries with multiple keys per object
-for( var i = -1; i < 2; i++ ){
- for(var j = -1; j < 2; j++ ){
-
- locObj = [];
-
- if( i != 0 || j != 0 )
- locObj.push( { x : i * 50 + Random.rand(),
- y : j * 50 + Random.rand() } );
- locObj.push( { x : Random.rand(),
- y : Random.rand() } );
- locObj.push( { x : Random.rand(),
- y : Random.rand() } );
-
- t.insert({ name : "" + i + "" + j , loc : locObj , type : "A" });
- t.insert({ name : "" + i + "" + j , loc : locObj , type : "B" });
- }
-}
+for (var i = -1; i < 2; i++) {
+ for (var j = -1; j < 2; j++) {
+ locObj = [];
-assert.commandWorked(t.ensureIndex({ loc : "2d" , type : 1 }));
-
-print( "Starting testing phase... ");
-
-for( var t = 0; t < 2; t++ ){
-
-var type = t == 0 ? "A" : "B";
-
-for( var i = -1; i < 2; i++ ){
- for(var j = -1; j < 2; j++ ){
-
- var center = [ i * 50 , j * 50 ];
- var count = i == 0 && j == 0 ? 9 : 1;
- var objCount = 1;
-
- // Do near check
-
- var nearResults = db.runCommand( { geoNear : "geoarray2" ,
- near : center ,
- num : count,
- query : { type : type } } ).results;
- //printjson( nearResults )
-
- var objsFound = {};
- var lastResult = 0;
- for( var k = 0; k < nearResults.length; k++ ){
-
- // All distances should be small, for the # of results
- assert.gt( 1.5 , nearResults[k].dis );
- // Distances should be increasing
- assert.lte( lastResult, nearResults[k].dis );
- // Objs should be of the right type
- assert.eq( type, nearResults[k].obj.type );
-
- lastResult = nearResults[k].dis;
-
- var objKey = "" + nearResults[k].obj._id;
-
- if( objKey in objsFound ) objsFound[ objKey ]++;
- else objsFound[ objKey ] = 1;
-
- }
-
- // Make sure we found the right objects each time
- // Note: Multiple objects could be found for diff distances.
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] );
- }
-
-
- // Do nearSphere check
-
- // Earth Radius from geoconstants.h
- var eRad = 6378.1;
-
- nearResults = db.geoarray2.find( { loc : { $nearSphere : center , $maxDistance : 500 /* km */ / eRad }, type : type } ).toArray();
-
- assert.eq( nearResults.length , count );
-
- objsFound = {};
- lastResult = 0;
- for( var k = 0; k < nearResults.length; k++ ){
- var objKey = "" + nearResults[k]._id;
- if( objKey in objsFound ) objsFound[ objKey ]++;
- else objsFound[ objKey ] = 1;
-
- }
-
- // Make sure we found the right objects each time
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] );
- }
-
-
-
- // Within results do not return duplicate documents
-
- var count = i == 0 && j == 0 ? 9 : 1;
- var objCount = i == 0 && j == 0 ? 1 : 1;
-
- // Do within check
- objsFound = {};
-
- var box = [ [center[0] - 1, center[1] - 1] , [center[0] + 1, center[1] + 1] ];
-
- //printjson( box )
-
- var withinResults = db.geoarray2.find({ loc : { $within : { $box : box } } , type : type }).toArray();
-
- assert.eq( withinResults.length , count );
-
- for( var k = 0; k < withinResults.length; k++ ){
- var objKey = "" + withinResults[k]._id;
- if( objKey in objsFound ) objsFound[ objKey ]++;
- else objsFound[ objKey ] = 1;
- }
-
- //printjson( objsFound )
-
- // Make sure we found the right objects each time
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] );
- }
-
-
- // Do within check (circle)
- objsFound = {};
-
- withinResults = db.geoarray2.find({ loc : { $within : { $center : [ center, 1.5 ] } } , type : type }).toArray();
-
- assert.eq( withinResults.length , count );
-
- for( var k = 0; k < withinResults.length; k++ ){
- var objKey = "" + withinResults[k]._id;
- if( objKey in objsFound ) objsFound[ objKey ]++;
- else objsFound[ objKey ] = 1;
- }
-
- // Make sure we found the right objects each time
- for( var q in objsFound ){
- assert.eq( objCount , objsFound[q] );
- }
-
-
-
- }
-}
+ if (i != 0 || j != 0)
+ locObj.push({x: i * 50 + Random.rand(), y: j * 50 + Random.rand()});
+ locObj.push({x: Random.rand(), y: Random.rand()});
+ locObj.push({x: Random.rand(), y: Random.rand()});
+ t.insert({name: "" + i + "" + j, loc: locObj, type: "A"});
+ t.insert({name: "" + i + "" + j, loc: locObj, type: "B"});
+ }
}
+assert.commandWorked(t.ensureIndex({loc: "2d", type: 1}));
+
+print("Starting testing phase... ");
+
+for (var t = 0; t < 2; t++) {
+ var type = t == 0 ? "A" : "B";
+
+ for (var i = -1; i < 2; i++) {
+ for (var j = -1; j < 2; j++) {
+ var center = [i * 50, j * 50];
+ var count = i == 0 && j == 0 ? 9 : 1;
+ var objCount = 1;
+
+ // Do near check
+
+ var nearResults =
+ db.runCommand(
+ {geoNear: "geoarray2", near: center, num: count, query: {type: type}})
+ .results;
+ // printjson( nearResults )
+
+ var objsFound = {};
+ var lastResult = 0;
+ for (var k = 0; k < nearResults.length; k++) {
+ // All distances should be small, for the # of results
+ assert.gt(1.5, nearResults[k].dis);
+ // Distances should be increasing
+ assert.lte(lastResult, nearResults[k].dis);
+ // Objs should be of the right type
+ assert.eq(type, nearResults[k].obj.type);
+
+ lastResult = nearResults[k].dis;
+
+ var objKey = "" + nearResults[k].obj._id;
+
+ if (objKey in objsFound)
+ objsFound[objKey]++;
+ else
+ objsFound[objKey] = 1;
+ }
+
+ // Make sure we found the right objects each time
+ // Note: Multiple objects could be found for diff distances.
+ for (var q in objsFound) {
+ assert.eq(objCount, objsFound[q]);
+ }
+
+ // Do nearSphere check
+
+ // Earth Radius from geoconstants.h
+ var eRad = 6378.1;
+
+ nearResults = db.geoarray2.find({
+ loc: {$nearSphere: center, $maxDistance: 500 /* km */ / eRad},
+ type: type
+ }).toArray();
+
+ assert.eq(nearResults.length, count);
+ objsFound = {};
+ lastResult = 0;
+ for (var k = 0; k < nearResults.length; k++) {
+ var objKey = "" + nearResults[k]._id;
+ if (objKey in objsFound)
+ objsFound[objKey]++;
+ else
+ objsFound[objKey] = 1;
+ }
+ // Make sure we found the right objects each time
+ for (var q in objsFound) {
+ assert.eq(objCount, objsFound[q]);
+ }
+ // Within results do not return duplicate documents
+
+ var count = i == 0 && j == 0 ? 9 : 1;
+ var objCount = i == 0 && j == 0 ? 1 : 1;
+
+ // Do within check
+ objsFound = {};
+
+ var box = [[center[0] - 1, center[1] - 1], [center[0] + 1, center[1] + 1]];
+
+ // printjson( box )
+
+ var withinResults =
+ db.geoarray2.find({loc: {$within: {$box: box}}, type: type}).toArray();
+
+ assert.eq(withinResults.length, count);
+
+ for (var k = 0; k < withinResults.length; k++) {
+ var objKey = "" + withinResults[k]._id;
+ if (objKey in objsFound)
+ objsFound[objKey]++;
+ else
+ objsFound[objKey] = 1;
+ }
+
+ // printjson( objsFound )
+
+ // Make sure we found the right objects each time
+ for (var q in objsFound) {
+ assert.eq(objCount, objsFound[q]);
+ }
+
+ // Do within check (circle)
+ objsFound = {};
+
+ withinResults =
+ db.geoarray2.find({loc: {$within: {$center: [center, 1.5]}}, type: type}).toArray();
+
+ assert.eq(withinResults.length, count);
+
+ for (var k = 0; k < withinResults.length; k++) {
+ var objKey = "" + withinResults[k]._id;
+ if (objKey in objsFound)
+ objsFound[objKey]++;
+ else
+ objsFound[objKey] = 1;
+ }
+
+ // Make sure we found the right objects each time
+ for (var q in objsFound) {
+ assert.eq(objCount, objsFound[q]);
+ }
+ }
+ }
+}
diff --git a/jstests/core/geo_big_polygon.js b/jstests/core/geo_big_polygon.js
index 8022a5c74f5..6f278c59147 100644
--- a/jstests/core/geo_big_polygon.js
+++ b/jstests/core/geo_big_polygon.js
@@ -5,102 +5,115 @@
var coll = db.geo_big_polygon;
coll.drop();
-//coll.ensureIndex({ loc : "2dsphere" });
-
-coll.getMongo().getDB("admin").runCommand({ setParameter : 1, verboseQueryLogging : true });
-
-var bigCRS = { type : "name",
- properties : { name : "urn:x-mongodb:crs:strictwinding:EPSG:4326" } };
-
-var bigPoly20 = { type : "Polygon", coordinates : [[[10.0, 10.0],
- [-10.0, 10.0],
- [-10.0, -10.0],
- [10.0, -10.0],
- [10.0, 10.0]]],
- crs : bigCRS };
-
-var bigPoly20Comp = { type : "Polygon", coordinates : [[[10.0, 10.0],
- [10.0, -10.0],
- [-10.0, -10.0],
- [-10.0, 10.0],
- [10.0, 10.0]]],
- crs : bigCRS };
-
-var poly10 = { type : "Polygon", coordinates : [[[5.0, 5.0],
- [5.0, -5.0],
- [-5.0, -5.0],
- [-5.0, 5.0],
- [5.0, 5.0]]] };
-
-var line10 = { type : "LineString", coordinates : [[5.0, 5.0],
- [5.0, -5.0],
- [-5.0, -5.0],
- [-5.0, 5.0],
- [5.0, 5.0]] };
-
-var centerPoint = { type : "Point", coordinates : [0, 0] };
-
-var polarPoint = { type : "Point", coordinates : [85, 85] };
-
-var lineEquator = { type : "LineString", coordinates : [[-20, 0], [20, 0]] };
-
-assert.writeOK(coll.insert({ loc : poly10 }));
-assert.writeOK(coll.insert({ loc : line10 }));
-assert.writeOK(coll.insert({ loc : centerPoint }));
-assert.writeOK(coll.insert({ loc : polarPoint }));
-assert.writeOK(coll.insert({ loc : lineEquator }));
+// coll.ensureIndex({ loc : "2dsphere" });
+
+coll.getMongo().getDB("admin").runCommand({setParameter: 1, verboseQueryLogging: true});
+
+var bigCRS = {
+ type: "name",
+ properties: {name: "urn:x-mongodb:crs:strictwinding:EPSG:4326"}
+};
+
+var bigPoly20 = {
+ type: "Polygon",
+ coordinates: [[[10.0, 10.0], [-10.0, 10.0], [-10.0, -10.0], [10.0, -10.0], [10.0, 10.0]]],
+ crs: bigCRS
+};
+
+var bigPoly20Comp = {
+ type: "Polygon",
+ coordinates: [[[10.0, 10.0], [10.0, -10.0], [-10.0, -10.0], [-10.0, 10.0], [10.0, 10.0]]],
+ crs: bigCRS
+};
+
+var poly10 = {
+ type: "Polygon",
+ coordinates: [[[5.0, 5.0], [5.0, -5.0], [-5.0, -5.0], [-5.0, 5.0], [5.0, 5.0]]]
+};
+
+var line10 = {
+ type: "LineString",
+ coordinates: [[5.0, 5.0], [5.0, -5.0], [-5.0, -5.0], [-5.0, 5.0], [5.0, 5.0]]
+};
+
+var centerPoint = {
+ type: "Point",
+ coordinates: [0, 0]
+};
+
+var polarPoint = {
+ type: "Point",
+ coordinates: [85, 85]
+};
+
+var lineEquator = {
+ type: "LineString",
+ coordinates: [[-20, 0], [20, 0]]
+};
+
+assert.writeOK(coll.insert({loc: poly10}));
+assert.writeOK(coll.insert({loc: line10}));
+assert.writeOK(coll.insert({loc: centerPoint}));
+assert.writeOK(coll.insert({loc: polarPoint}));
+assert.writeOK(coll.insert({loc: lineEquator}));
assert.eq(coll.find({}).count(), 5);
jsTest.log("Starting query...");
-assert.eq(coll.find({ loc : { $geoWithin : { $geometry : bigPoly20 } } }).count(), 3);
-assert.eq(coll.find({ loc : { $geoIntersects : { $geometry : bigPoly20 } } }).count(), 4);
-assert.eq(coll.find({ loc : { $geoWithin : { $geometry : bigPoly20Comp } } }).count(), 1);
-assert.eq(coll.find({ loc : { $geoIntersects : { $geometry : bigPoly20Comp } } }).count(), 2);
+assert.eq(coll.find({loc: {$geoWithin: {$geometry: bigPoly20}}}).count(), 3);
+assert.eq(coll.find({loc: {$geoIntersects: {$geometry: bigPoly20}}}).count(), 4);
+assert.eq(coll.find({loc: {$geoWithin: {$geometry: bigPoly20Comp}}}).count(), 1);
+assert.eq(coll.find({loc: {$geoIntersects: {$geometry: bigPoly20Comp}}}).count(), 2);
-assert.commandWorked(coll.ensureIndex({ loc : "2dsphere" }));
-
-assert.eq(coll.find({ loc : { $geoWithin : { $geometry : bigPoly20 } } }).count(), 3);
-assert.eq(coll.find({ loc : { $geoIntersects : { $geometry : bigPoly20 } } }).count(), 4);
-assert.eq(coll.find({ loc : { $geoWithin : { $geometry : bigPoly20Comp } } }).count(), 1);
-assert.eq(coll.find({ loc : { $geoIntersects : { $geometry : bigPoly20Comp } } }).count(), 2);
+assert.commandWorked(coll.ensureIndex({loc: "2dsphere"}));
+assert.eq(coll.find({loc: {$geoWithin: {$geometry: bigPoly20}}}).count(), 3);
+assert.eq(coll.find({loc: {$geoIntersects: {$geometry: bigPoly20}}}).count(), 4);
+assert.eq(coll.find({loc: {$geoWithin: {$geometry: bigPoly20Comp}}}).count(), 1);
+assert.eq(coll.find({loc: {$geoIntersects: {$geometry: bigPoly20Comp}}}).count(), 2);
// Test not indexing and querying big polygon
assert.commandWorked(coll.dropIndexes());
// 1. Without index, insert succeeds, but query ignores big polygon.
-var bigPoly10 = { type : "Polygon", coordinates : [[[5.0, 5.0],
- [-5.0, 5.0],
- [-5.0, -5.0],
- [5.0, -5.0],
- [5.0, 5.0]]],
- crs : bigCRS };
+var bigPoly10 = {
+ type: "Polygon",
+ coordinates: [[[5.0, 5.0], [-5.0, 5.0], [-5.0, -5.0], [5.0, -5.0], [5.0, 5.0]]],
+ crs: bigCRS
+};
-assert.writeOK(coll.insert({ _id: "bigPoly10", loc: bigPoly10}));
+assert.writeOK(coll.insert({_id: "bigPoly10", loc: bigPoly10}));
-assert.eq(coll.find({ loc : { $geoWithin : { $geometry : bigPoly20 } } }).count(), 3);
-assert.eq(coll.find({ loc : { $geoIntersects : { $geometry : bigPoly20 } } }).count(), 4);
-assert.eq(coll.find({ loc : { $geoWithin : { $geometry : bigPoly20Comp } } }).count(), 1);
-assert.eq(coll.find({ loc : { $geoIntersects : { $geometry : bigPoly20Comp } } }).count(), 2);
+assert.eq(coll.find({loc: {$geoWithin: {$geometry: bigPoly20}}}).count(), 3);
+assert.eq(coll.find({loc: {$geoIntersects: {$geometry: bigPoly20}}}).count(), 4);
+assert.eq(coll.find({loc: {$geoWithin: {$geometry: bigPoly20Comp}}}).count(), 1);
+assert.eq(coll.find({loc: {$geoIntersects: {$geometry: bigPoly20Comp}}}).count(), 2);
// 2. Building index fails due to big polygon
-assert.commandFailed(coll.ensureIndex({ loc : "2dsphere" }));
+assert.commandFailed(coll.ensureIndex({loc: "2dsphere"}));
// 3. After removing big polygon, index builds successfully
assert.writeOK(coll.remove({_id: "bigPoly10"}));
-assert.commandWorked(coll.ensureIndex({ loc : "2dsphere" }));
+assert.commandWorked(coll.ensureIndex({loc: "2dsphere"}));
// 4. With index, insert fails.
-assert.writeError(coll.insert({ _id: "bigPoly10", loc: bigPoly10}));
+assert.writeError(coll.insert({_id: "bigPoly10", loc: bigPoly10}));
// Query geometries that don't support big CRS should error out.
-var bigPoint = { type: "Point", coordinates: [0, 0], crs: bigCRS };
-var bigLine = { type : "LineString", coordinates : [[-20, 0], [20, 0]], crs: bigCRS };
+var bigPoint = {
+ type: "Point",
+ coordinates: [0, 0],
+ crs: bigCRS
+};
+var bigLine = {
+ type: "LineString",
+ coordinates: [[-20, 0], [20, 0]],
+ crs: bigCRS
+};
assert.throws(function() {
- coll.find( { loc : { $geoIntersects : { $geometry : bigPoint }}}).itcount();
+ coll.find({loc: {$geoIntersects: {$geometry: bigPoint}}}).itcount();
});
assert.throws(function() {
- coll.find( { loc : { $geoIntersects : { $geometry : bigLine }}}).itcount();
+ coll.find({loc: {$geoIntersects: {$geometry: bigLine}}}).itcount();
});
diff --git a/jstests/core/geo_big_polygon2.js b/jstests/core/geo_big_polygon2.js
index 9fb9ffead3d..46ac327b7e0 100644
--- a/jstests/core/geo_big_polygon2.js
+++ b/jstests/core/geo_big_polygon2.js
@@ -7,35 +7,27 @@
var crs84CRS = {
type: "name",
- properties: {
- name: "urn:ogc:def:crs:OGC:1.3:CRS84"
- }
+ properties: {name: "urn:ogc:def:crs:OGC:1.3:CRS84"}
};
var epsg4326CRS = {
type: "name",
- properties: {
- name: "EPSG:4326"
- }
+ properties: {name: "EPSG:4326"}
};
var strictCRS = {
type: "name",
- properties: {
- name: "urn:x-mongodb:crs:strictwinding:EPSG:4326"
- }
+ properties: {name: "urn:x-mongodb:crs:strictwinding:EPSG:4326"}
};
// invalid CRS name
var badCRS = {
type: "name",
- properties: {
- name: "urn:x-mongodb:crs:invalid:EPSG:4326"
- }
+ properties: {name: "urn:x-mongodb:crs:invalid:EPSG:4326"}
};
// helper to generate a line along a longitudinal
function genLonLine(lon, startLat, endLat, latStep) {
var line = [];
for (var lat = startLat; lat <= endLat; lat += latStep) {
- line.push( [ lon, lat ] );
+ line.push([lon, lat]);
}
return line;
}
@@ -49,570 +41,392 @@ coll.drop();
// coordinates are longitude, latitude
// strictCRS (big polygon) cannot be stored in the collection
var objects = [
+ {name: "boat ramp", geo: {type: "Point", coordinates: [-97.927117, 30.327376]}},
+ {name: "on equator", geo: {type: "Point", coordinates: [-97.9, 0]}},
+ {name: "just north of equator", geo: {type: "Point", coordinates: [-97.9, 0.1]}},
+ {name: "just south of equator", geo: {type: "Point", coordinates: [-97.9, -0.1]}},
+ {
+ name: "north pole - crs84CRS",
+ geo: {type: "Point", coordinates: [-97.9, 90.0], crs: crs84CRS}
+ },
+ {
+ name: "south pole - epsg4326CRS",
+ geo: {type: "Point", coordinates: [-97.9, -90.0], crs: epsg4326CRS}
+ },
+ {
+ name: "short line string: PA, LA, 4corners, ATX, Mansfield, FL, Reston, NYC",
+ geo: {
+ type: "LineString",
+ coordinates: [
+ [-122.1611953, 37.4420407],
+ [-118.283638, 34.028517],
+ [-109.045223, 36.9990835],
+ [-97.850404, 30.3921555],
+ [-97.904187, 30.395457],
+ [-86.600836, 30.398147],
+ [-77.357837, 38.9589935],
+ [-73.987723, 40.7575074]
+ ]
+ }
+ },
+ {
+ name: "1024 point long line string from south pole to north pole",
+ geo: {type: "LineString", coordinates: genLonLine(2.349902, -90.0, 90.0, 180.0 / 1024)}
+ },
{
- name: "boat ramp",
- geo: {
- type: "Point",
- coordinates: [ -97.927117, 30.327376 ]
- }
- },
- {
- name: "on equator",
- geo: {
- type: "Point",
- coordinates: [ -97.9 , 0 ]
- }
- },
- {
- name: "just north of equator",
- geo: {
- type: "Point",
- coordinates: [ -97.9 , 0.1 ]
- }
- },
- {
- name: "just south of equator",
- geo: {
- type: "Point",
- coordinates: [ -97.9 , -0.1 ]
- }
- },
- {
- name: "north pole - crs84CRS",
- geo: {
- type: "Point",
- coordinates: [ -97.9 , 90.0 ],
- crs: crs84CRS
- }
- },
- {
- name: "south pole - epsg4326CRS",
- geo: {
- type: "Point",
- coordinates: [ -97.9 , -90.0 ],
- crs: epsg4326CRS
- }
+ name: "line crossing equator - epsg4326CRS",
+ geo: {
+ type: "LineString",
+ coordinates: [[-77.0451853, -12.0553442], [-76.7784557, 18.0098528]],
+ crs: epsg4326CRS
+ }
},
{
- name: "short line string: PA, LA, 4corners, ATX, Mansfield, FL, Reston, NYC",
- geo: {
- type: "LineString",
- coordinates: [
- [ -122.1611953, 37.4420407 ],
- [ -118.283638, 34.028517 ],
- [ -109.045223, 36.9990835 ],
- [ -97.850404, 30.3921555 ],
- [ -97.904187, 30.395457 ],
- [ -86.600836, 30.398147 ],
- [ -77.357837, 38.9589935 ],
- [ -73.987723, 40.7575074 ]
- ]
- }
+ name: "GeoJson polygon",
+ geo: {
+ type: "Polygon",
+ coordinates:
+ [[[-80.0, 30.0], [-40.0, 30.0], [-40.0, 60.0], [-80.0, 60.0], [-80.0, 30.0]]]
+ }
},
{
- name: "1024 point long line string from south pole to north pole",
- geo: {
- type: "LineString",
- coordinates: genLonLine(2.349902, -90.0, 90.0, 180.0 / 1024)
- }
+ name: "polygon w/ hole",
+ geo: {
+ type: "Polygon",
+ coordinates: [
+ [[-80.0, 30.0], [-40.0, 30.0], [-40.0, 60.0], [-80.0, 60.0], [-80.0, 30.0]],
+ [[-70.0, 40.0], [-60.0, 40.0], [-60.0, 50.0], [-70.0, 50.0], [-70.0, 40.0]]
+ ]
+ }
+ },
+ {
+ name: "polygon w/ two holes",
+ geo: {
+ type: "Polygon",
+ coordinates: [
+ [[-80.0, 30.0], [-40.0, 30.0], [-40.0, 60.0], [-80.0, 60.0], [-80.0, 30.0]],
+ [[-70.0, 40.0], [-60.0, 40.0], [-60.0, 50.0], [-70.0, 50.0], [-70.0, 40.0]],
+ [[-55.0, 40.0], [-45.0, 40.0], [-45.0, 50.0], [-55.0, 50.0], [-55.0, 40.0]]
+ ]
+ }
},
{
- name: "line crossing equator - epsg4326CRS",
- geo: {
- type: "LineString",
- coordinates: [
- [ -77.0451853, -12.0553442 ],
- [ -76.7784557, 18.0098528 ]
- ],
- crs: epsg4326CRS
- }
- },
- {
- name: "GeoJson polygon",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -80.0, 30.0 ],
- [ -40.0, 30.0 ],
- [ -40.0, 60.0 ],
- [ -80.0, 60.0 ],
- [ -80.0, 30.0 ] ]
- ]
- }
- },
- {
- name: "polygon w/ hole",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -80.0, 30.0 ],
- [ -40.0, 30.0 ],
- [ -40.0, 60.0 ],
- [-80.0, 60.0 ],
- [ -80.0, 30.0 ] ],
- [ [ -70.0, 40.0 ],
- [ -60.0, 40.0 ],
- [ -60.0, 50.0 ],
- [ -70.0, 50.0 ],
- [ -70.0, 40.0 ] ]
- ]
- }
- },
- {
- name: "polygon w/ two holes",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -80.0, 30.0 ],
- [ -40.0, 30.0 ],
- [ -40.0, 60.0 ],
- [ -80.0, 60.0 ],
- [ -80.0, 30.0 ] ],
- [ [ -70.0, 40.0 ],
- [ -60.0, 40.0 ],
- [ -60.0, 50.0 ],
- [ -70.0, 50.0 ],
- [ -70.0, 40.0 ] ],
- [ [ -55.0, 40.0 ],
- [ -45.0, 40.0 ],
- [ -45.0, 50.0 ],
- [ -55.0, 50.0 ],
- [ -55.0, 40.0 ] ]
- ]
- }
- },
- {
- name: "polygon covering North pole",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -120.0, 89.0 ],
- [ 0.0, 89.0 ],
- [ 120.0, 89.0 ],
- [ -120.0, 89.0 ] ]
- ]
- }
- },
- {
- name: "polygon covering South pole",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -120.0, -89.0 ],
- [ 0.0, -89.0 ],
- [ 120.0, -89.0 ],
- [ -120.0, -89.0 ] ]
- ]
- }
- },
- {
- name: "big polygon/rectangle covering both poles",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -130.0, 89.0 ],
- [ -120.0, 89.0 ],
- [ -120.0, -89.0 ],
- [ -130.0, -89.0 ],
- [ -130.0, 89.0 ] ]
- ],
- crs: strictCRS
- }
- },
- {
- name: "polygon (triangle) w/ hole at North pole",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -120.0, 80.0 ],
- [ 0.0, 80.0 ],
- [ 120.0, 80.0 ],
- [-120.0, 80.0 ] ],
- [ [ -120.0, 88.0 ],
- [ 0.0, 88.0 ],
- [ 120.0, 88.0 ],
- [-120.0, 88.0 ] ]
- ]
- }
- },
- {
- name: "polygon with edge on equator",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -120.0, 0.0 ],
- [ 120.0, 0.0 ],
- [ 0.0, 90.0 ],
- [ -120.0, 0.0 ] ]
- ]
- }
- },
- {
- name: "polygon just inside single hemisphere (Northern) - China, California, Europe",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ 120.0, 0.000001 ],
- [ -120.0, 0.000001 ],
- [ 0.0, 0.000001 ],
- [ 120.0, 0.000001 ] ]
- ]
- }
- },
- {
- name: "polygon inside Northern hemisphere",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ 120.0, 80.0 ],
- [ -120.0, 80.0 ],
- [ 0.0, 80.0 ],
- [ 120.0, 80.0 ] ]
- ]
- }
- },
- {
- name: "polygon just inside a single hemisphere (Southern) - Pacific, Indonesia, Africa",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -120.0, -0.000001 ],
- [ 120.0, -0.000001 ],
- [ 0.0, -0.000001 ],
- [ -120.0, -0.000001 ] ]
- ]
- }
- },
- {
- name: "polygon inside Southern hemisphere",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -120.0, -80.0 ],
- [ 120.0, -80.0 ],
- [ 0.0, -80.0 ],
- [ -120.0, -80.0 ] ]
- ]
- }
- },
- {
- name: "single point (MultiPoint): Palo Alto",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ -122.1611953, 37.4420407 ]
- ]
- }
- },
- {
- name: "multiple points(MultiPoint): PA, LA, 4corners, ATX, Mansfield, FL, Reston, NYC",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ -122.1611953, 37.4420407 ],
- [ -118.283638, 34.028517 ],
- [ -109.045223, 36.9990835 ],
- [ -97.850404, 30.3921555 ],
- [ -97.904187, 30.395457 ],
- [ -86.600836, 30.398147 ],
- [ -77.357837, 38.9589935 ],
- [ -73.987723, 40.7575074 ]
- ]
- }
- },
- {
- name: "two points (MultiPoint): Shenzhen, Guangdong, China",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ 114.0538788, 22.5551603 ],
- [ 114.022837, 22.44395 ]
- ]
- }
- },
- {
- name: "two points (MultiPoint) but only one in: Shenzhen, Guangdong, China",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ 114.0538788, 22.5551603 ],
- [ 113.743858, 23.025815 ]
- ]
- }
- },
- {
- name: "multi line string: new zealand bays",
- geo: {
- type: "MultiLineString",
- coordinates: [
- [ [ 172.803869, -43.592789 ],
- [ 172.659335, -43.620348 ],
- [ 172.684038, -43.636528 ],
- [ 172.820922, -43.605325 ] ],
- [ [ 172.830497, -43.607768 ],
- [ 172.813263, -43.656319 ],
- [ 172.823096, -43.660996 ],
- [ 172.850943, -43.607609 ] ],
- [ [ 172.912056, -43.623148 ],
- [ 172.887696, -43.670897 ],
- [ 172.900469, -43.676178 ],
- [ 172.931735, -43.622839 ] ]
- ]
- }
- },
- {
- name: "multi polygon: new zealand north and south islands",
- geo: {
- type: "MultiPolygon",
- coordinates: [
- [
- [ [ 165.773255, -45.902933 ],
- [ 169.398419, -47.261538 ],
- [ 174.672744, -41.767722 ],
- [ 172.288845, -39.897992 ],
- [ 165.773255, -45.902933 ] ]
- ],
- [
- [ [ 173.166448, -39.778262 ],
- [ 175.342744, -42.677333 ],
- [ 179.913373, -37.224362 ],
- [ 171.475953, -32.688871 ],
- [ 173.166448, -39.778262 ] ]
- ]
- ]
- }
- },
- {
- name: "geometry collection: point in Australia and triangle around Australia",
- geo: {
- type: "GeometryCollection",
- geometries: [
- {
- name: "center of Australia",
- type: "Point",
- coordinates: [ 133.985885, -27.240790 ]
- },
- {
- name: "Triangle around Australia",
- type: "Polygon",
- coordinates: [
- [ [ 97.423178, -44.735405 ],
- [ 169.845050, -38.432287 ],
- [ 143.824366, 15.966509 ],
- [ 97.423178, -44.735405 ] ]
- ]
- }
- ]
- }
+ name: "polygon covering North pole",
+ geo: {
+ type: "Polygon",
+ coordinates: [[[-120.0, 89.0], [0.0, 89.0], [120.0, 89.0], [-120.0, 89.0]]]
+ }
+ },
+ {
+ name: "polygon covering South pole",
+ geo: {
+ type: "Polygon",
+ coordinates: [[[-120.0, -89.0], [0.0, -89.0], [120.0, -89.0], [-120.0, -89.0]]]
+ }
+ },
+ {
+ name: "big polygon/rectangle covering both poles",
+ geo: {
+ type: "Polygon",
+ coordinates:
+ [[[-130.0, 89.0], [-120.0, 89.0], [-120.0, -89.0], [-130.0, -89.0], [-130.0, 89.0]]],
+ crs: strictCRS
+ }
+ },
+ {
+ name: "polygon (triangle) w/ hole at North pole",
+ geo: {
+ type: "Polygon",
+ coordinates: [
+ [[-120.0, 80.0], [0.0, 80.0], [120.0, 80.0], [-120.0, 80.0]],
+ [[-120.0, 88.0], [0.0, 88.0], [120.0, 88.0], [-120.0, 88.0]]
+ ]
+ }
+ },
+ {
+ name: "polygon with edge on equator",
+ geo: {
+ type: "Polygon",
+ coordinates: [[[-120.0, 0.0], [120.0, 0.0], [0.0, 90.0], [-120.0, 0.0]]]
+ }
+ },
+ {
+ name: "polygon just inside single hemisphere (Northern) - China, California, Europe",
+ geo: {
+ type: "Polygon",
+ coordinates:
+ [[[120.0, 0.000001], [-120.0, 0.000001], [0.0, 0.000001], [120.0, 0.000001]]]
+ }
+ },
+ {
+ name: "polygon inside Northern hemisphere",
+ geo: {
+ type: "Polygon",
+ coordinates: [[[120.0, 80.0], [-120.0, 80.0], [0.0, 80.0], [120.0, 80.0]]]
+ }
+ },
+ {
+ name: "polygon just inside a single hemisphere (Southern) - Pacific, Indonesia, Africa",
+ geo: {
+ type: "Polygon",
+ coordinates:
+ [[[-120.0, -0.000001], [120.0, -0.000001], [0.0, -0.000001], [-120.0, -0.000001]]]
+ }
+ },
+ {
+ name: "polygon inside Southern hemisphere",
+ geo: {
+ type: "Polygon",
+ coordinates: [[[-120.0, -80.0], [120.0, -80.0], [0.0, -80.0], [-120.0, -80.0]]]
+ }
+ },
+ {
+ name: "single point (MultiPoint): Palo Alto",
+ geo: {type: "MultiPoint", coordinates: [[-122.1611953, 37.4420407]]}
+ },
+ {
+ name: "multiple points(MultiPoint): PA, LA, 4corners, ATX, Mansfield, FL, Reston, NYC",
+ geo: {
+ type: "MultiPoint",
+ coordinates: [
+ [-122.1611953, 37.4420407],
+ [-118.283638, 34.028517],
+ [-109.045223, 36.9990835],
+ [-97.850404, 30.3921555],
+ [-97.904187, 30.395457],
+ [-86.600836, 30.398147],
+ [-77.357837, 38.9589935],
+ [-73.987723, 40.7575074]
+ ]
+ }
+ },
+ {
+ name: "two points (MultiPoint): Shenzhen, Guangdong, China",
+ geo: {type: "MultiPoint", coordinates: [[114.0538788, 22.5551603], [114.022837, 22.44395]]}
+ },
+ {
+ name: "two points (MultiPoint) but only one in: Shenzhen, Guangdong, China",
+ geo:
+ {type: "MultiPoint", coordinates: [[114.0538788, 22.5551603], [113.743858, 23.025815]]}
+ },
+ {
+ name: "multi line string: new zealand bays",
+ geo: {
+ type: "MultiLineString",
+ coordinates: [
+ [
+ [172.803869, -43.592789],
+ [172.659335, -43.620348],
+ [172.684038, -43.636528],
+ [172.820922, -43.605325]
+ ],
+ [
+ [172.830497, -43.607768],
+ [172.813263, -43.656319],
+ [172.823096, -43.660996],
+ [172.850943, -43.607609]
+ ],
+ [
+ [172.912056, -43.623148],
+ [172.887696, -43.670897],
+ [172.900469, -43.676178],
+ [172.931735, -43.622839]
+ ]
+ ]
+ }
+ },
+ {
+ name: "multi polygon: new zealand north and south islands",
+ geo: {
+ type: "MultiPolygon",
+ coordinates: [
+ [[
+ [165.773255, -45.902933],
+ [169.398419, -47.261538],
+ [174.672744, -41.767722],
+ [172.288845, -39.897992],
+ [165.773255, -45.902933]
+ ]],
+ [[
+ [173.166448, -39.778262],
+ [175.342744, -42.677333],
+ [179.913373, -37.224362],
+ [171.475953, -32.688871],
+ [173.166448, -39.778262]
+ ]]
+ ]
+ }
+ },
+ {
+ name: "geometry collection: point in Australia and triangle around Australia",
+ geo: {
+ type: "GeometryCollection",
+ geometries: [
+ {name: "center of Australia", type: "Point", coordinates: [133.985885, -27.240790]},
+ {
+ name: "Triangle around Australia",
+ type: "Polygon",
+ coordinates: [[
+ [97.423178, -44.735405],
+ [169.845050, -38.432287],
+ [143.824366, 15.966509],
+ [97.423178, -44.735405]
+ ]]
+ }
+ ]
+ }
}
];
-
// Test various polygons which are not queryable
var badPolys = [
{
- name: "Polygon with bad CRS",
- type: "Polygon",
- coordinates: [
- [ [ 114.0834046, 22.6648202 ],
- [ 113.8293457, 22.3819359 ],
- [ 114.2736054, 22.4047911 ],
- [ 114.0834046, 22.6648202 ] ]
- ],
- crs: badCRS
- },
- {
- name: "Open polygon < 3 sides",
- type: "Polygon",
- coordinates: [
- [ [ 114.0834046, 22.6648202 ],
- [ 113.8293457, 22.3819359 ] ]
- ],
- crs: strictCRS
- },
- {
- name: "Open polygon > 3 sides",
- type: "Polygon",
- coordinates: [
- [ [ 114.0834046, 22.6648202 ],
- [ 113.8293457, 22.3819359 ],
- [ 114.2736054, 22.4047911 ],
- [ 114.1, 22.5 ] ]
- ],
- crs: strictCRS
- },
- {
- name: "duplicate non-adjacent points",
- type: "Polygon",
- coordinates: [
- [ [ 114.0834046, 22.6648202 ],
- [ 113.8293457, 22.3819359 ],
- [ 114.2736054, 22.4047911 ],
- [ 113.8293457, 22.3819359 ],
- [ -65.9165954, 22.6648202 ],
- [ 114.0834046, 22.6648202 ] ]
- ],
- crs: strictCRS
- },
- {
- name: "One hole in polygon",
- type: "Polygon",
- coordinates: [
- [ [ -80.0, 30.0 ],
- [ -40.0, 30.0 ],
- [ -40.0, 60.0 ],
- [ -80.0, 60.0 ],
- [ -80.0, 30.0 ] ],
- [ [ -70.0, 40.0 ],
- [ -60.0, 40.0 ],
- [ -60.0, 50.0 ],
- [ -70.0, 50.0 ],
- [ -70.0, 40.0 ] ]
- ],
- crs: strictCRS
- },
- {
- name: "2 holes in polygon",
- type: "Polygon",
- coordinates: [
- [ [ -80.0, 30.0 ],
- [ -40.0, 30.0 ],
- [ -40.0, 60.0 ],
- [ -80.0, 60.0 ],
- [ -80.0, 30.0 ] ],
- [ [ -70.0, 40.0 ],
- [ -60.0, 40.0 ],
- [ -60.0, 50.0 ],
- [ -70.0, 50.0 ],
- [ -70.0, 40.0 ] ],
- [ [ -55.0, 40.0 ],
- [ -45.0, 40.0 ],
- [ -45.0, 50.0 ],
- [ -55.0, 50.0 ],
- [ -55.0, 40.0 ] ]
- ],
- crs: strictCRS
- },
- {
- name: "complex polygon (edges cross)",
- type: "Polygon",
- coordinates: [
- [ [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 10.0, 20.0 ],
- [ 20.0, 20.0 ],
- [ 10.0, 10.0 ] ]
- ],
- crs: strictCRS
+ name: "Polygon with bad CRS",
+ type: "Polygon",
+ coordinates: [[
+ [114.0834046, 22.6648202],
+ [113.8293457, 22.3819359],
+ [114.2736054, 22.4047911],
+ [114.0834046, 22.6648202]
+ ]],
+ crs: badCRS
+ },
+ {
+ name: "Open polygon < 3 sides",
+ type: "Polygon",
+ coordinates: [[[114.0834046, 22.6648202], [113.8293457, 22.3819359]]],
+ crs: strictCRS
+ },
+ {
+ name: "Open polygon > 3 sides",
+ type: "Polygon",
+ coordinates: [[
+ [114.0834046, 22.6648202],
+ [113.8293457, 22.3819359],
+ [114.2736054, 22.4047911],
+ [114.1, 22.5]
+ ]],
+ crs: strictCRS
+ },
+ {
+ name: "duplicate non-adjacent points",
+ type: "Polygon",
+ coordinates: [[
+ [114.0834046, 22.6648202],
+ [113.8293457, 22.3819359],
+ [114.2736054, 22.4047911],
+ [113.8293457, 22.3819359],
+ [-65.9165954, 22.6648202],
+ [114.0834046, 22.6648202]
+ ]],
+ crs: strictCRS
+ },
+ {
+ name: "One hole in polygon",
+ type: "Polygon",
+ coordinates: [
+ [[-80.0, 30.0], [-40.0, 30.0], [-40.0, 60.0], [-80.0, 60.0], [-80.0, 30.0]],
+ [[-70.0, 40.0], [-60.0, 40.0], [-60.0, 50.0], [-70.0, 50.0], [-70.0, 40.0]]
+ ],
+ crs: strictCRS
+ },
+ {
+ name: "2 holes in polygon",
+ type: "Polygon",
+ coordinates: [
+ [[-80.0, 30.0], [-40.0, 30.0], [-40.0, 60.0], [-80.0, 60.0], [-80.0, 30.0]],
+ [[-70.0, 40.0], [-60.0, 40.0], [-60.0, 50.0], [-70.0, 50.0], [-70.0, 40.0]],
+ [[-55.0, 40.0], [-45.0, 40.0], [-45.0, 50.0], [-55.0, 50.0], [-55.0, 40.0]]
+ ],
+ crs: strictCRS
+ },
+ {
+ name: "complex polygon (edges cross)",
+ type: "Polygon",
+ coordinates: [[[10.0, 10.0], [20.0, 10.0], [10.0, 20.0], [20.0, 20.0], [10.0, 10.0]]],
+ crs: strictCRS
}
];
-
// Closed polygons used in query (3, 4, 5, 6-sided)
var polys = [
{
- name: "3 sided closed polygon",
- type: "Polygon", // triangle
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 15.0, 17.0 ],
- [ 10.0, 10.0 ]
- ] ],
- crs: strictCRS,
- nW: 0, nI: 1
- },
- {
- name: "3 sided closed polygon (non-big)",
- type: "Polygon", // triangle
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 15.0, 17.0 ],
- [ 10.0, 10.0 ]
- ] ],
- nW: 0, nI: 1
- },
- {
- name: "4 sided closed polygon",
- type: "Polygon", // rectangle
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 20.0, 20.0 ],
- [ 10.0, 20.0 ],
- [ 10.0, 10.0 ]
- ] ],
- crs: strictCRS,
- nW: 0, nI: 1
- },
- {
- name: "4 sided closed polygon (non-big)",
- type: "Polygon", // rectangle
- coordinates: [
- [ [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 20.0, 20.0 ],
- [ 10.0, 20.0 ],
- [ 10.0, 10.0 ] ]
- ],
- nW: 0, nI: 1
- },
- {
- name: "5 sided closed polygon",
- type: "Polygon", // pentagon
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 25.0, 18.0 ],
- [ 15.0, 25.0 ],
- [ 5.0, 18.0 ],
- [ 10.0, 10.0 ]
- ] ],
- crs: strictCRS,
- nW: 0, nI: 1
- },
- {
- name: "5 sided closed polygon (non-big)",
- type: "Polygon", // pentagon
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 25.0, 18.0 ],
- [ 15.0, 25.0 ],
- [ 5.0, 18.0 ],
- [ 10.0, 10.0 ]
- ] ],
- nW: 0, nI: 1
- },
- {
- name: "6 sided closed polygon",
- type: "Polygon", // hexagon
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 15.0, 10.0 ],
- [ 22.0, 15.0 ],
- [ 15.0, 20.0 ],
- [ 10.0, 20.0 ],
- [ 7.0, 15.0 ],
- [ 10.0, 10.0 ]
- ] ],
- crs: strictCRS,
- nW: 0, nI: 1
- },
- {
- name: "6 sided closed polygon (non-big)",
- type: "Polygon", // hexagon
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 15.0, 10.0 ],
- [ 22.0, 15.0 ],
- [ 15.0, 20.0 ],
- [ 10.0, 20.0 ],
- [ 7.0, 15.0 ],
- [ 10.0, 10.0 ]
- ] ],
- nW: 0, nI: 1
+ name: "3 sided closed polygon",
+ type: "Polygon", // triangle
+ coordinates: [[[10.0, 10.0], [20.0, 10.0], [15.0, 17.0], [10.0, 10.0]]],
+ crs: strictCRS,
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "3 sided closed polygon (non-big)",
+ type: "Polygon", // triangle
+ coordinates: [[[10.0, 10.0], [20.0, 10.0], [15.0, 17.0], [10.0, 10.0]]],
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "4 sided closed polygon",
+ type: "Polygon", // rectangle
+ coordinates: [[[10.0, 10.0], [20.0, 10.0], [20.0, 20.0], [10.0, 20.0], [10.0, 10.0]]],
+ crs: strictCRS,
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "4 sided closed polygon (non-big)",
+ type: "Polygon", // rectangle
+ coordinates: [[[10.0, 10.0], [20.0, 10.0], [20.0, 20.0], [10.0, 20.0], [10.0, 10.0]]],
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "5 sided closed polygon",
+ type: "Polygon", // pentagon
+ coordinates:
+ [[[10.0, 10.0], [20.0, 10.0], [25.0, 18.0], [15.0, 25.0], [5.0, 18.0], [10.0, 10.0]]],
+ crs: strictCRS,
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "5 sided closed polygon (non-big)",
+ type: "Polygon", // pentagon
+ coordinates:
+ [[[10.0, 10.0], [20.0, 10.0], [25.0, 18.0], [15.0, 25.0], [5.0, 18.0], [10.0, 10.0]]],
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "6 sided closed polygon",
+ type: "Polygon", // hexagon
+ coordinates: [[
+ [10.0, 10.0],
+ [15.0, 10.0],
+ [22.0, 15.0],
+ [15.0, 20.0],
+ [10.0, 20.0],
+ [7.0, 15.0],
+ [10.0, 10.0]
+ ]],
+ crs: strictCRS,
+ nW: 0,
+ nI: 1
+ },
+ {
+ name: "6 sided closed polygon (non-big)",
+ type: "Polygon", // hexagon
+ coordinates: [[
+ [10.0, 10.0],
+ [15.0, 10.0],
+ [22.0, 15.0],
+ [15.0, 20.0],
+ [10.0, 20.0],
+ [7.0, 15.0],
+ [10.0, 10.0]
+ ]],
+ nW: 0,
+ nI: 1
}
];
@@ -628,23 +442,23 @@ function nGonGenerator(N, D, clockwise, LON, LAT) {
// edge lengths will be uneven with this quick & dirty approach
N = (N % 2 == 1) ? N + 1 : N;
var eps = 2 * D / N;
- var lat=0;
- var lon=0;
+ var lat = 0;
+ var lon = 0;
var pts = [];
var i = 0;
// produce longitude values in pairs
// traverse with left foot outside the circle (clockwise) to define the big polygon
for (i = 0, lat = D / 2; i <= N / 2; ++i, lat -= eps) {
- if ( lat < (-D / 2) ) {
+ if (lat < (-D / 2)) {
// set fixing lat
lat = (-D / 2);
}
- lon = Math.sqrt( (D / 2) * (D / 2) - (lat * lat) );
+ lon = Math.sqrt((D / 2) * (D / 2) - (lat * lat));
newlat = lat + LAT;
newlon = lon + LON;
conjugateLon = LON - lon;
- pts[i] = [ newlon, newlat ];
- pts[N-i] = [ conjugateLon, newlat ];
+ pts[i] = [newlon, newlat];
+ pts[N - i] = [conjugateLon, newlat];
}
// Reverse points if counterclockwise
if (!clockwise) {
@@ -674,83 +488,67 @@ var totalObjects = getNumberOfValidObjects(objects);
var nsidedPolys = [
// Big Polygon centered on 0, 0
{
- name: "4 sided polygon centered on 0, 0",
- type: "Polygon",
- coordinates: [
- nGonGenerator(4, 30, true, 0, 0)
- ],
- crs: strictCRS,
- nW: totalObjects - 3,
- nI: totalObjects
+ name: "4 sided polygon centered on 0, 0",
+ type: "Polygon",
+ coordinates: [nGonGenerator(4, 30, true, 0, 0)],
+ crs: strictCRS,
+ nW: totalObjects - 3,
+ nI: totalObjects
},
// Non-big polygons have counterclockwise coordinates
{
- name: "4 sided polygon centered on 0, 0 (non-big)",
- type: "Polygon",
- coordinates: [
- nGonGenerator(4, 30, false, 0, 0)
- ],
- nW: 0,
- nI: 3
- },
- {
- name: "100 sided polygon centered on 0, 0",
- type: "Polygon",
- coordinates: [
- nGonGenerator(100, 20, true, 0, 0)
- ],
- crs: strictCRS,
- nW: totalObjects - 3,
- nI: totalObjects
- },
- {
- name: "100 sided polygon centered on 0, 0 (non-big)",
- type: "Polygon",
- coordinates: [
- nGonGenerator(100, 20, false, 0, 0)
- ],
- nW: 0,
- nI: 3
- },
- {
- name: "5000 sided polygon centered on 0, 0 (non-big)",
- type: "Polygon",
- coordinates: [
- nGonGenerator(5000, 89.99, false, 0, 0)
- ],
- nW: 0,
- nI: 3
- },
- {
- name: "25000 sided polygon centered on 0, 0",
- type: "Polygon",
- coordinates: [
- nGonGenerator(25000, 89.99, true, 0, 0)
- ],
- crs: strictCRS,
- nW: totalObjects - 3,
- nI: totalObjects
+ name: "4 sided polygon centered on 0, 0 (non-big)",
+ type: "Polygon",
+ coordinates: [nGonGenerator(4, 30, false, 0, 0)],
+ nW: 0,
+ nI: 3
+ },
+ {
+ name: "100 sided polygon centered on 0, 0",
+ type: "Polygon",
+ coordinates: [nGonGenerator(100, 20, true, 0, 0)],
+ crs: strictCRS,
+ nW: totalObjects - 3,
+ nI: totalObjects
+ },
+ {
+ name: "100 sided polygon centered on 0, 0 (non-big)",
+ type: "Polygon",
+ coordinates: [nGonGenerator(100, 20, false, 0, 0)],
+ nW: 0,
+ nI: 3
+ },
+ {
+ name: "5000 sided polygon centered on 0, 0 (non-big)",
+ type: "Polygon",
+ coordinates: [nGonGenerator(5000, 89.99, false, 0, 0)],
+ nW: 0,
+ nI: 3
+ },
+ {
+ name: "25000 sided polygon centered on 0, 0",
+ type: "Polygon",
+ coordinates: [nGonGenerator(25000, 89.99, true, 0, 0)],
+ crs: strictCRS,
+ nW: totalObjects - 3,
+ nI: totalObjects
},
// Big polygon centered on Shenzen
{
- name: "4 sided polygon centered on Shenzen",
- type: "Polygon",
- coordinates: [
- nGonGenerator(4, 5, true, 114.1, 22.55)
- ],
- crs: strictCRS,
- nW: totalObjects - 3,
- nI: totalObjects - 2
- },
- {
- name: "4 sided polygon centered on Shenzen (non-big)",
- type: "Polygon",
- coordinates: [
- nGonGenerator(4, 5, false, 114.1, 22.55)
- ],
- crs: strictCRS,
- nW: 2,
- nI: 3
+ name: "4 sided polygon centered on Shenzen",
+ type: "Polygon",
+ coordinates: [nGonGenerator(4, 5, true, 114.1, 22.55)],
+ crs: strictCRS,
+ nW: totalObjects - 3,
+ nI: totalObjects - 2
+ },
+ {
+ name: "4 sided polygon centered on Shenzen (non-big)",
+ type: "Polygon",
+ coordinates: [nGonGenerator(4, 5, false, 114.1, 22.55)],
+ crs: strictCRS,
+ nW: 2,
+ nI: 3
}
];
@@ -768,19 +566,11 @@ objects.forEach(function(o) {
});
// Try creating other index types
-assert.commandWorked(
- coll.ensureIndex({geo: "2dsphere", a: 1}),
- "compound index, geo");
+assert.commandWorked(coll.ensureIndex({geo: "2dsphere", a: 1}), "compound index, geo");
// These other index types will fail because of the GeoJSON documents
-assert.commandFailed(
- coll.ensureIndex({geo: "2dsphere", a: "text"}),
- "compound index, geo & text");
-assert.commandFailed(
- coll.ensureIndex({geo: "geoHaystack" }, {bucketSize:1}),
- "geoHaystack index");
-assert.commandFailed(
- coll.ensureIndex({geo: "2d"}),
- "2d index");
+assert.commandFailed(coll.ensureIndex({geo: "2dsphere", a: "text"}), "compound index, geo & text");
+assert.commandFailed(coll.ensureIndex({geo: "geoHaystack"}, {bucketSize: 1}), "geoHaystack index");
+assert.commandFailed(coll.ensureIndex({geo: "2d"}), "2d index");
totalObjects = coll.count();
@@ -794,7 +584,7 @@ indexes.forEach(function(index) {
if (index != "none") {
// Create index
- assert.commandWorked(coll.ensureIndex({geo: index}), "create " + index + " index");
+ assert.commandWorked(coll.ensureIndex({geo: index}), "create " + index + " index");
}
// These polygons should not be queryable
@@ -802,24 +592,23 @@ indexes.forEach(function(index) {
// within
assert.throws(function() {
- coll.count({geo: {$geoWithin: {$geometry: p}}});},
- null,
- "within " + p.name);
+ coll.count({geo: {$geoWithin: {$geometry: p}}});
+ }, null, "within " + p.name);
// intersection
assert.throws(function() {
- coll.count({geo: {$geoIntersects: {$geometry: p}}});},
- null,
- "intersects " + p.name);
+ coll.count({geo: {$geoIntersects: {$geometry: p}}});
+ }, null, "intersects " + p.name);
});
-
// Tests for closed polygons
polys.forEach(function(p) {
// geoWithin query
var docArray = [];
- var q = {geo: {$geoWithin: {$geometry: p}}};
+ var q = {
+ geo: {$geoWithin: {$geometry: p}}
+ };
// Test query in aggregate
docArray = coll.aggregate({$match: q}).toArray();
assert.eq(p.nW, docArray.length, "aggregate within " + p.name);
@@ -827,7 +616,9 @@ indexes.forEach(function(index) {
assert.eq(p.nW, docArray.length, "within " + p.name);
// geoIntersects query
- q = {geo: {$geoIntersects: {$geometry: p}}};
+ q = {
+ geo: {$geoIntersects: {$geometry: p}}
+ };
// Test query in aggregate
docArray = coll.aggregate({$match: q}).toArray();
assert.eq(p.nI, docArray.length, "aggregate intersects " + p.name);
@@ -836,7 +627,7 @@ indexes.forEach(function(index) {
// Update on matching docs
var result = coll.update(q, {$set: {stored: ObjectId()}}, {multi: true});
// only check nModified if write commands are enabled
- if ( coll.getMongo().writeMode() == "commands" ) {
+ if (coll.getMongo().writeMode() == "commands") {
assert.eq(p.nI, result.nModified, "update " + p.name);
}
// Remove & restore matching docs
@@ -853,14 +644,11 @@ indexes.forEach(function(index) {
nsidedPolys.forEach(function(p) {
// within
- assert.eq(p.nW,
- coll.count({geo: {$geoWithin: {$geometry: p}}}),
- "within " + p.name);
+ assert.eq(p.nW, coll.count({geo: {$geoWithin: {$geometry: p}}}), "within " + p.name);
// intersects
- assert.eq(p.nI,
- coll.count({geo: {$geoIntersects: {$geometry: p}}}),
- "intersection " + p.name);
+ assert.eq(
+ p.nI, coll.count({geo: {$geoIntersects: {$geometry: p}}}), "intersection " + p.name);
});
diff --git a/jstests/core/geo_big_polygon3.js b/jstests/core/geo_big_polygon3.js
index bf155c842ae..049064ebc5b 100644
--- a/jstests/core/geo_big_polygon3.js
+++ b/jstests/core/geo_big_polygon3.js
@@ -12,21 +12,15 @@
var crs84CRS = {
type: "name",
- properties: {
- name: "urn:ogc:def:crs:OGC:1.3:CRS84"
- }
+ properties: {name: "urn:ogc:def:crs:OGC:1.3:CRS84"}
};
var epsg4326CRS = {
type: "name",
- properties: {
- name: "EPSG:4326"
- }
+ properties: {name: "EPSG:4326"}
};
var strictCRS = {
type: "name",
- properties: {
- name: "urn:x-mongodb:crs:strictwinding:EPSG:4326"
- }
+ properties: {name: "urn:x-mongodb:crs:strictwinding:EPSG:4326"}
};
var coll = db.geo_bigpoly_edgecases;
@@ -35,59 +29,39 @@ coll.drop();
// Edge cases producing error
// These non-polygon objects cannot be queried because they are strictCRS
var objects = [
+ {name: "point with strictCRS", type: "Point", coordinates: [-97.9, 0], crs: strictCRS},
{
- name: "point with strictCRS",
- type: "Point",
- coordinates: [ -97.9 , 0 ],
- crs: strictCRS
- },
- {
- name: "multipoint with strictCRS",
- type: "MultiPoint",
- coordinates: [
- [ -97.9 , 0 ],
- [ -10.9 , 0 ]
- ],
- crs: strictCRS
+ name: "multipoint with strictCRS",
+ type: "MultiPoint",
+ coordinates: [[-97.9, 0], [-10.9, 0]],
+ crs: strictCRS
},
{
- name: "line with strictCRS",
- type: "LineString",
- coordinates: [
- [ -122.1611953, 37.4420407 ],
- [ -118.283638, 34.028517 ]
- ],
- crs: strictCRS
+ name: "line with strictCRS",
+ type: "LineString",
+ coordinates: [[-122.1611953, 37.4420407], [-118.283638, 34.028517]],
+ crs: strictCRS
}
];
-
objects.forEach(function(o) {
// within
assert.throws(function() {
- coll.count({geo: {$geoWithin: {$geometry: o}}});},
- null,
- "within " + o.name);
+ coll.count({geo: {$geoWithin: {$geometry: o}}});
+ }, null, "within " + o.name);
// intersection
assert.throws(function() {
- coll.count({geo: {$geoIntersects: {$geometry: o}}});},
- null,
- "intersection " + o.name);
+ coll.count({geo: {$geoIntersects: {$geometry: o}}});
+ }, null, "intersection " + o.name);
});
-
// Big Polygon query for $nearSphere & geoNear should fail
var bigPoly = {
name: "3 sided closed polygon",
type: "Polygon", // triangle
- coordinates: [ [
- [ 10.0, 10.0 ],
- [ 20.0, 10.0 ],
- [ 15.0, 17.0 ],
- [ 10.0, 10.0 ]
- ] ],
+ coordinates: [[[10.0, 10.0], [20.0, 10.0], [15.0, 17.0], [10.0, 10.0]]],
crs: strictCRS
};
@@ -96,39 +70,29 @@ assert.commandWorked(coll.ensureIndex({geo: "2dsphere"}), "2dsphere index");
// $nearSphere on big polygon should fail
assert.throws(function() {
- coll.count({geo: {$nearSphere: {$geometry: bigPoly}}});},
- null,
- "nearSphere " + bigPoly.name);
+ coll.count({geo: {$nearSphere: {$geometry: bigPoly}}});
+}, null, "nearSphere " + bigPoly.name);
// geoNear on big polygon should fail
-assert.commandFailed(
- db.runCommand({
- geoNear: coll.getName(),
- near: bigPoly,
- spherical: true
- }),
- "geoNear " + bigPoly.name);
+assert.commandFailed(db.runCommand({geoNear: coll.getName(), near: bigPoly, spherical: true}),
+ "geoNear " + bigPoly.name);
// aggregate $geoNear on big polygon should fail
-assert.commandFailed(
- db.runCommand({
- aggregate: coll.getName(),
- pipeline: [
- {$geoNear:
- {near: bigPoly, distanceField: "geo.calculated", spherical: true}}]
- }),
- "aggregate $geoNear " + bigPoly.name);
+assert.commandFailed(db.runCommand({
+ aggregate: coll.getName(),
+ pipeline: [{$geoNear: {near: bigPoly, distanceField: "geo.calculated", spherical: true}}]
+}),
+ "aggregate $geoNear " + bigPoly.name);
// mapReduce on big polygon should work
-assert.commandWorked(
- db.runCommand({
- mapReduce: coll.getName(),
- map: function() {},
- reduce: function() {},
- query: {geo: {$geoIntersects: {$geometry: bigPoly}}},
- out: {inline:1 },
- }),
- "mapReduce " + bigPoly.name);
+assert.commandWorked(db.runCommand({
+ mapReduce: coll.getName(),
+ map: function() {},
+ reduce: function() {},
+ query: {geo: {$geoIntersects: {$geometry: bigPoly}}},
+ out: {inline: 1},
+}),
+ "mapReduce " + bigPoly.name);
// Tests that stored objects with strictCRS will be ignored by query
// If strictCRS is removed from the document then they will be found
@@ -138,48 +102,37 @@ assert.commandWorked(coll.dropIndex({geo: "2dsphere"}), "drop 2dsphere index");
objects = [
{
- name: "NYC Times Square - point",
- geo: {
- type: "Point",
- coordinates: [ -73.9857 , 40.7577 ],
- crs: strictCRS
- }
+ name: "NYC Times Square - point",
+ geo: {type: "Point", coordinates: [-73.9857, 40.7577], crs: strictCRS}
},
{
- name: "NYC CitiField & JFK - multipoint",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ -73.8458 , 40.7569 ],
- [ -73.7789 , 40.6397 ]
- ],
- crs: strictCRS
- }
+ name: "NYC CitiField & JFK - multipoint",
+ geo: {
+ type: "MultiPoint",
+ coordinates: [[-73.8458, 40.7569], [-73.7789, 40.6397]],
+ crs: strictCRS
+ }
},
{
- name: "NYC - Times Square to CitiField to JFK - line/string",
- geo: {
- type: "LineString",
- coordinates: [
- [ -73.9857 , 40.7577 ],
- [ -73.8458 , 40.7569 ],
- [ -73.7789 , 40.6397 ]
- ],
- crs: strictCRS
- }
+ name: "NYC - Times Square to CitiField to JFK - line/string",
+ geo: {
+ type: "LineString",
+ coordinates: [[-73.9857, 40.7577], [-73.8458, 40.7569], [-73.7789, 40.6397]],
+ crs: strictCRS
+ }
},
{
- name: "NYC - Times Square to CitiField to JFK to Times Square - polygon",
- geo: {
- type: "Polygon",
- coordinates: [
- [ [ -73.9857 , 40.7577 ],
- [ -73.7789 , 40.6397 ],
- [ -73.8458 , 40.7569 ],
- [ -73.9857 , 40.7577 ] ]
- ],
- crs: strictCRS
- }
+ name: "NYC - Times Square to CitiField to JFK to Times Square - polygon",
+ geo: {
+ type: "Polygon",
+ coordinates: [[
+ [-73.9857, 40.7577],
+ [-73.7789, 40.6397],
+ [-73.8458, 40.7569],
+ [-73.9857, 40.7577]
+ ]],
+ crs: strictCRS
+ }
}
];
@@ -193,33 +146,25 @@ objects.forEach(function(o) {
var poly = {
name: "4 sided polygon around NYC",
type: "Polygon", // triangle
- coordinates: [ [
- [ -74.5, 40.5 ],
- [ -72.0, 40.5 ],
- [ -72.00, 41.0 ],
- [ -74.5, 41.0 ],
- [ -74.5, 40.5 ]
- ] ],
+ coordinates: [[[-74.5, 40.5], [-72.0, 40.5], [-72.00, 41.0], [-74.5, 41.0], [-74.5, 40.5]]],
crs: strictCRS
};
assert.eq(0,
- coll.count({geo: {$geoWithin: {$geometry: poly}}}),
- "ignore objects with strictCRS within");
+ coll.count({geo: {$geoWithin: {$geometry: poly}}}),
+ "ignore objects with strictCRS within");
assert.eq(0,
- coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
- "ignore objects with strictCRS intersects");
+ coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
+ "ignore objects with strictCRS intersects");
// Now remove the strictCRS and find all the objects
-coll.update({},{$unset: {"geo.crs": ""}}, {multi: true});
+coll.update({}, {$unset: {"geo.crs": ""}}, {multi: true});
var totalDocs = coll.count();
+assert.eq(totalDocs, coll.count({geo: {$geoWithin: {$geometry: poly}}}), "no strictCRS within");
assert.eq(totalDocs,
- coll.count({geo: {$geoWithin: {$geometry: poly}}}),
- "no strictCRS within");
-assert.eq(totalDocs,
- coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
- "no strictCRS intersects");
+ coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
+ "no strictCRS intersects");
// Clear collection
coll.remove({});
@@ -228,66 +173,44 @@ coll.remove({});
// Objects should be found from query
objects = [
{
- name: "NYC Times Square - point crs84CRS",
- geo: {
- type: "Point",
- coordinates: [ -73.9857 , 40.7577 ],
- crs: crs84CRS
- }
+ name: "NYC Times Square - point crs84CRS",
+ geo: {type: "Point", coordinates: [-73.9857, 40.7577], crs: crs84CRS}
},
{
- name: "NYC Times Square - point epsg4326CRS",
- geo: {
- type: "Point",
- coordinates: [ -73.9857 , 40.7577 ],
- crs: epsg4326CRS
- }
+ name: "NYC Times Square - point epsg4326CRS",
+ geo: {type: "Point", coordinates: [-73.9857, 40.7577], crs: epsg4326CRS}
},
{
- name: "NYC CitiField & JFK - multipoint crs84CRS",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ -73.8458 , 40.7569 ],
- [ -73.7789 , 40.6397 ]
- ],
- crs: crs84CRS
- }
+ name: "NYC CitiField & JFK - multipoint crs84CRS",
+ geo: {
+ type: "MultiPoint",
+ coordinates: [[-73.8458, 40.7569], [-73.7789, 40.6397]],
+ crs: crs84CRS
+ }
},
{
- name: "NYC CitiField & JFK - multipoint epsg4326CRS",
- geo: {
- type: "MultiPoint",
- coordinates: [
- [ -73.8458 , 40.7569 ],
- [ -73.7789 , 40.6397 ]
- ],
- crs: epsg4326CRS
- }
+ name: "NYC CitiField & JFK - multipoint epsg4326CRS",
+ geo: {
+ type: "MultiPoint",
+ coordinates: [[-73.8458, 40.7569], [-73.7789, 40.6397]],
+ crs: epsg4326CRS
+ }
},
{
- name: "NYC - Times Square to CitiField to JFK - line/string crs84CRS",
- geo: {
- type: "LineString",
- coordinates: [
- [ -73.9857 , 40.7577 ],
- [ -73.8458 , 40.7569 ],
- [ -73.7789 , 40.6397 ]
- ],
- crs: crs84CRS
- }
+ name: "NYC - Times Square to CitiField to JFK - line/string crs84CRS",
+ geo: {
+ type: "LineString",
+ coordinates: [[-73.9857, 40.7577], [-73.8458, 40.7569], [-73.7789, 40.6397]],
+ crs: crs84CRS
+ }
},
{
- name: "NYC - Times Square to CitiField to JFK - line/string epsg4326CRS",
- geo: {
- type: "LineString",
- coordinates: [
- [ -73.9857 , 40.7577 ],
- [ -73.8458 , 40.7569 ],
- [ -73.7789 , 40.6397 ]
- ],
- crs: epsg4326CRS
- }
+ name: "NYC - Times Square to CitiField to JFK - line/string epsg4326CRS",
+ geo: {
+ type: "LineString",
+ coordinates: [[-73.9857, 40.7577], [-73.8458, 40.7569], [-73.7789, 40.6397]],
+ crs: epsg4326CRS
+ }
}
];
@@ -300,19 +223,18 @@ objects.forEach(function(o) {
totalDocs = coll.count();
assert.eq(totalDocs,
- coll.count({geo: {$geoWithin: {$geometry: poly}}}),
- "crs84CRS or epsg4326CRS within");
+ coll.count({geo: {$geoWithin: {$geometry: poly}}}),
+ "crs84CRS or epsg4326CRS within");
assert.eq(totalDocs,
- coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
- "crs84CRS or epsg4326CRS intersects");
+ coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
+ "crs84CRS or epsg4326CRS intersects");
// Add index and look again for stored point & spherical CRS documents
assert.commandWorked(coll.ensureIndex({geo: "2dsphere"}), "2dsphere index");
assert.eq(totalDocs,
- coll.count({geo: {$geoWithin: {$geometry: poly}}}),
- "2dsphere index - crs84CRS or epsg4326CRS within");
+ coll.count({geo: {$geoWithin: {$geometry: poly}}}),
+ "2dsphere index - crs84CRS or epsg4326CRS within");
assert.eq(totalDocs,
- coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
- "2dsphere index - crs84CRS or epsg4326CRS intersects");
-
+ coll.count({geo: {$geoIntersects: {$geometry: poly}}}),
+ "2dsphere index - crs84CRS or epsg4326CRS intersects");
diff --git a/jstests/core/geo_borders.js b/jstests/core/geo_borders.js
index 4768ff8503f..f0a47339591 100644
--- a/jstests/core/geo_borders.js
+++ b/jstests/core/geo_borders.js
@@ -9,9 +9,9 @@ max = 1;
step = 1;
numItems = 0;
-for ( var x = min; x <= max; x += step ) {
- for ( var y = min; y <= max; y += step ) {
- t.insert( { loc : { x : x, y : y } } );
+for (var x = min; x <= max; x += step) {
+ for (var y = min; y <= max; y += step) {
+ t.insert({loc: {x: x, y: y}});
numItems++;
}
}
@@ -20,13 +20,12 @@ overallMin = -1;
overallMax = 1;
// Create a point index slightly smaller than the points we have
-var res = t.ensureIndex({ loc: "2d" },
- { max: overallMax - epsilon / 2,
- min: overallMin + epsilon / 2 });
+var res =
+ t.ensureIndex({loc: "2d"}, {max: overallMax - epsilon / 2, min: overallMin + epsilon / 2});
assert.commandFailed(res);
// Create a point index only slightly bigger than the points we have
-res = t.ensureIndex( { loc : "2d" }, { max : overallMax + epsilon, min : overallMin - epsilon } );
+res = t.ensureIndex({loc: "2d"}, {max: overallMax + epsilon, min: overallMin - epsilon});
assert.commandWorked(res);
// ************
@@ -34,86 +33,129 @@ assert.commandWorked(res);
// ************
// If the bounds are bigger than the box itself, just clip at the borders
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMin - 2 * epsilon, overallMin - 2 * epsilon ],
- [ overallMax + 2 * epsilon, overallMax + 2 * epsilon ] ] } } } ).count() );
+assert.eq(numItems,
+ t.find({
+ loc: {
+ $within: {
+ $box: [
+ [overallMin - 2 * epsilon, overallMin - 2 * epsilon],
+ [overallMax + 2 * epsilon, overallMax + 2 * epsilon]
+ ]
+ }
+ }
+ }).count());
// Check this works also for bounds where only a single dimension is off-bounds
-assert.eq( numItems - 5, t.find(
- { loc : { $within : { $box : [
- [ overallMin - 2 * epsilon, overallMin - 0.5 * epsilon ],
- [ overallMax - epsilon, overallMax - epsilon ] ] } } } ).count() );
+assert.eq(numItems - 5,
+ t.find({
+ loc: {
+ $within: {
+ $box: [
+ [overallMin - 2 * epsilon, overallMin - 0.5 * epsilon],
+ [overallMax - epsilon, overallMax - epsilon]
+ ]
+ }
+ }
+ }).count());
// Make sure we can get at least close to the bounds of the index
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMin - epsilon / 2, overallMin - epsilon / 2 ],
- [ overallMax + epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() );
+assert.eq(numItems,
+ t.find({
+ loc: {
+ $within: {
+ $box: [
+ [overallMin - epsilon / 2, overallMin - epsilon / 2],
+ [overallMax + epsilon / 2, overallMax + epsilon / 2]
+ ]
+ }
+ }
+ }).count());
// Make sure we can get at least close to the bounds of the index
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMax + epsilon / 2, overallMax + epsilon / 2 ],
- [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() );
+assert.eq(numItems,
+ t.find({
+ loc: {
+ $within: {
+ $box: [
+ [overallMax + epsilon / 2, overallMax + epsilon / 2],
+ [overallMin - epsilon / 2, overallMin - epsilon / 2]
+ ]
+ }
+ }
+ }).count());
// Check that swapping min/max has good behavior
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMax + epsilon / 2, overallMax + epsilon / 2 ],
- [ overallMin - epsilon / 2, overallMin - epsilon / 2 ] ] } } } ).count() );
-
-assert.eq( numItems, t.find(
- { loc : { $within : { $box : [
- [ overallMax + epsilon / 2, overallMin - epsilon / 2 ],
- [ overallMin - epsilon / 2, overallMax + epsilon / 2 ] ] } } } ).count() );
+assert.eq(numItems,
+ t.find({
+ loc: {
+ $within: {
+ $box: [
+ [overallMax + epsilon / 2, overallMax + epsilon / 2],
+ [overallMin - epsilon / 2, overallMin - epsilon / 2]
+ ]
+ }
+ }
+ }).count());
+
+assert.eq(numItems,
+ t.find({
+ loc: {
+ $within: {
+ $box: [
+ [overallMax + epsilon / 2, overallMin - epsilon / 2],
+ [overallMin - epsilon / 2, overallMax + epsilon / 2]
+ ]
+ }
+ }
+ }).count());
// **************
// Circle tests
// **************
-center = ( overallMax + overallMin ) / 2;
-center = [ center, center ];
+center = (overallMax + overallMin) / 2;
+center = [center, center];
radius = overallMax;
-offCenter = [ center[0] + radius, center[1] + radius ];
-onBounds = [ offCenter[0] + epsilon, offCenter[1] + epsilon ];
-offBounds = [ onBounds[0] + epsilon, onBounds[1] + epsilon ];
-onBoundsNeg = [ -onBounds[0], -onBounds[1] ];
+offCenter = [center[0] + radius, center[1] + radius];
+onBounds = [offCenter[0] + epsilon, offCenter[1] + epsilon];
+offBounds = [onBounds[0] + epsilon, onBounds[1] + epsilon];
+onBoundsNeg = [-onBounds[0], -onBounds[1]];
// Make sure we can get all points when radius is exactly at full bounds
-assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + epsilon ] } } } ).count() );
+assert.lt(0, t.find({loc: {$within: {$center: [center, radius + epsilon]}}}).count());
// Make sure we can get points when radius is over full bounds
-assert.lt( 0, t.find( { loc : { $within : { $center : [ center, radius + 2 * epsilon ] } } } ).count() );
+assert.lt(0, t.find({loc: {$within: {$center: [center, radius + 2 * epsilon]}}}).count());
// Make sure we can get points when radius is over full bounds, off-centered
-assert.lt( 0, t.find( { loc : { $within : { $center : [ offCenter, radius + 2 * epsilon ] } } } ).count() );
+assert.lt(0, t.find({loc: {$within: {$center: [offCenter, radius + 2 * epsilon]}}}).count());
// Make sure we get correct corner point when center is in bounds
// (x bounds wrap, so could get other corner)
-cornerPt = t.findOne( { loc : { $within : { $center : [ offCenter, step / 2 ] } } } );
-assert.eq( cornerPt.loc.y, overallMax );
+cornerPt = t.findOne({loc: {$within: {$center: [offCenter, step / 2]}}});
+assert.eq(cornerPt.loc.y, overallMax);
// Make sure we get correct corner point when center is on bounds
// NOTE: Only valid points on MIN bounds
-cornerPt = t
- .findOne( { loc : { $within : { $center : [ onBoundsNeg, Math.sqrt( 2 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
-assert.eq( cornerPt.loc.y, overallMin );
+cornerPt = t.findOne(
+ {loc: {$within: {$center: [onBoundsNeg, Math.sqrt(2 * epsilon * epsilon) + (step / 2)]}}});
+assert.eq(cornerPt.loc.y, overallMin);
// Make sure we can't get corner point when center is over bounds
// TODO: SERVER-5800 clean up wrapping rules for different CRS queries - not sure this is an error
/*
assert.throws(function(){
- t.findOne( { loc : { $within : { $center : [ offBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
+ t.findOne( { loc : { $within : { $center : [ offBounds, Math.sqrt( 8 * epsilon * epsilon ) + (
+step / 2 ) ] } } } );
});
*/
-
// Make sure we can't get corner point when center is on max bounds
// Broken - see SERVER-13581
-//assert.throws(function(){
-// t.findOne( { loc : { $within : { $center : [ onBounds, Math.sqrt( 8 * epsilon * epsilon ) + ( step / 2 ) ] } } } );
+// assert.throws(function(){
+// t.findOne( { loc : { $within : { $center : [ onBounds, Math.sqrt( 8 * epsilon * epsilon ) + (
+// step / 2 ) ] } } } );
//});
// ***********
@@ -121,10 +163,10 @@ assert.throws(function(){
// ***********
// Make sure we can get all nearby points to point in range
-assert.eq( overallMax, t.find( { loc : { $near : offCenter } } ).next().loc.y );
+assert.eq(overallMax, t.find({loc: {$near: offCenter}}).next().loc.y);
// Make sure we can get all nearby points to point on boundary
-assert.eq( overallMin, t.find( { loc : { $near : onBoundsNeg } } ).next().loc.y );
+assert.eq(overallMin, t.find({loc: {$near: onBoundsNeg}}).next().loc.y);
// Make sure we can't get all nearby points to point over boundary
// TODO: SERVER-9986 clean up wrapping rules for different CRS queries - not sure this is an error
@@ -135,33 +177,35 @@ assert.throws(function(){
*/
// Make sure we can't get all nearby points to point on max boundary
-//Broken - see SERVER-13581
-//assert.throws(function(){
+// Broken - see SERVER-13581
+// assert.throws(function(){
// t.findOne( { loc : { $near : onBoundsNeg } } );
//});
// Make sure we can get all nearby points within one step (4 points in top
// corner)
-assert.eq( 4, t.find( { loc : { $near : offCenter, $maxDistance : step * 1.9 } } ).count() );
+assert.eq(4, t.find({loc: {$near: offCenter, $maxDistance: step * 1.9}}).count());
// **************
// Command Tests
// **************
// Make sure we can get all nearby points to point in range
-assert.eq( overallMax, db.runCommand( { geoNear : "borders", near : offCenter } ).results[0].obj.loc.y );
+assert.eq(overallMax, db.runCommand({geoNear: "borders", near: offCenter}).results[0].obj.loc.y);
// Make sure we can get all nearby points to point on boundary
-assert.eq( overallMin, db.runCommand( { geoNear : "borders", near : onBoundsNeg } ).results[0].obj.loc.y );
+assert.eq(overallMin, db.runCommand({geoNear: "borders", near: onBoundsNeg}).results[0].obj.loc.y);
// Make sure we can't get all nearby points to point over boundary
-//TODO: SERVER-9986 clean up wrapping rules for different CRS queries - not sure this is an error
+// TODO: SERVER-9986 clean up wrapping rules for different CRS queries - not sure this is an error
/*
assert.commandFailed( db.runCommand( { geoNear : "borders", near : offBounds } ));
*/
// Make sure we can't get all nearby points to point on max boundary
-assert.commandWorked( db.runCommand( { geoNear : "borders", near : onBounds } ));
+assert.commandWorked(db.runCommand({geoNear: "borders", near: onBounds}));
// Make sure we can get all nearby points within one step (4 points in top
// corner)
-assert.eq( 4, db.runCommand( { geoNear : "borders", near : offCenter, maxDistance : step * 1.5 } ).results.length );
+assert.eq(4,
+ db.runCommand({geoNear: "borders", near: offCenter, maxDistance: step * 1.5})
+ .results.length);
diff --git a/jstests/core/geo_box1.js b/jstests/core/geo_box1.js
index ee21f02df0c..45e9aab9118 100644
--- a/jstests/core/geo_box1.js
+++ b/jstests/core/geo_box1.js
@@ -3,46 +3,42 @@ t = db.geo_box1;
t.drop();
num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
+for (x = 0; x <= 20; x++) {
+ for (y = 0; y <= 20; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
}
}
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
-searches = [
- [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
- [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
-];
+searches = [[[1, 2], [4, 5]], [[1, 1], [2, 2]], [[0, 2], [4, 5]], [[1, 1], [2, 8]], ];
-
-for ( i=0; i<searches.length; i++ ){
+for (i = 0; i < searches.length; i++) {
b = searches[i];
- //printjson( b );
-
- q = { loc : { $within : { $box : b } } };
- numWanetd = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
- assert.eq( numWanetd , t.find(q).itcount() , "itcount: " + tojson( q ) );
- printjson( t.find(q).explain() );
+ // printjson( b );
+
+ q = {
+ loc: {$within: {$box: b}}
+ };
+ numWanetd = (1 + b[1][0] - b[0][0]) * (1 + b[1][1] - b[0][1]);
+ assert.eq(numWanetd, t.find(q).itcount(), "itcount: " + tojson(q));
+ printjson(t.find(q).explain());
}
+assert.eq(0, t.find({loc: {$within: {$box: [[100, 100], [110, 110]]}}}).itcount(), "E1");
+assert.eq(0, t.find({loc: {$within: {$box: [[100, 100], [110, 110]]}}}).count(), "E2");
+assert.eq(num, t.find({loc: {$within: {$box: [[0, 0], [110, 110]]}}}).count(), "E3");
+assert.eq(num, t.find({loc: {$within: {$box: [[0, 0], [110, 110]]}}}).itcount(), "E4");
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" );
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" );
-
-
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" );
-
-assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" );
+assert.eq(57, t.find({loc: {$within: {$box: [[0, 0], [110, 110]]}}}).limit(57).itcount(), "E5");
// SERVER-13621
// Eetect and invert the $box coordinates when they're specified incorrectly.
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 110 , 110 ], [ 0 , 0 ] ] } } } ).count() , "E5" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 110 , 0 ], [ 0 , 110 ] ] } } } ).count() , "E6" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 110 ], [ 110 , 0 ] ] } } } ).count() , "E7" );
+assert.eq(num, t.find({loc: {$within: {$box: [[110, 110], [0, 0]]}}}).count(), "E5");
+assert.eq(num, t.find({loc: {$within: {$box: [[110, 0], [0, 110]]}}}).count(), "E6");
+assert.eq(num, t.find({loc: {$within: {$box: [[0, 110], [110, 0]]}}}).count(), "E7");
diff --git a/jstests/core/geo_box1_noindex.js b/jstests/core/geo_box1_noindex.js
index abf21266dac..36e932105a6 100644
--- a/jstests/core/geo_box1_noindex.js
+++ b/jstests/core/geo_box1_noindex.js
@@ -3,36 +3,36 @@ t = db.geo_box1_noindex;
t.drop();
num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
+for (x = 0; x <= 20; x++) {
+ for (y = 0; y <= 20; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
}
}
-searches = [
- [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
- [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
- [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
-];
+searches = [[[1, 2], [4, 5]], [[1, 1], [2, 2]], [[0, 2], [4, 5]], [[1, 1], [2, 8]], ];
-for ( i=0; i<searches.length; i++ ){
+for (i = 0; i < searches.length; i++) {
b = searches[i];
- q = { loc : { $within : { $box : b } } };
- numWanted = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
- assert.eq( numWanted , t.find(q).itcount() , "itcount: " + tojson( q ) );
- printjson( t.find(q).explain() );
+ q = {
+ loc: {$within: {$box: b}}
+ };
+ numWanted = (1 + b[1][0] - b[0][0]) * (1 + b[1][1] - b[0][1]);
+ assert.eq(numWanted, t.find(q).itcount(), "itcount: " + tojson(q));
+ printjson(t.find(q).explain());
}
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" );
-assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" );
-assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" );
+assert.eq(0, t.find({loc: {$within: {$box: [[100, 100], [110, 110]]}}}).itcount(), "E1");
+assert.eq(0, t.find({loc: {$within: {$box: [[100, 100], [110, 110]]}}}).count(), "E2");
+assert.eq(num, t.find({loc: {$within: {$box: [[0, 0], [110, 110]]}}}).count(), "E3");
+assert.eq(num, t.find({loc: {$within: {$box: [[0, 0], [110, 110]]}}}).itcount(), "E4");
+assert.eq(57, t.find({loc: {$within: {$box: [[0, 0], [110, 110]]}}}).limit(57).itcount(), "E5");
// SERVER-13621
// Eetect and invert the $box coordinates when they're specified incorrectly.
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 110 , 110 ], [ 0 , 0 ] ] } } } ).count() , "E5" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 110 , 0 ], [ 0 , 110 ] ] } } } ).count() , "E6" );
-assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 110 ], [ 110 , 0 ] ] } } } ).count() , "E7" );
+assert.eq(num, t.find({loc: {$within: {$box: [[110, 110], [0, 0]]}}}).count(), "E5");
+assert.eq(num, t.find({loc: {$within: {$box: [[110, 0], [0, 110]]}}}).count(), "E6");
+assert.eq(num, t.find({loc: {$within: {$box: [[0, 110], [110, 0]]}}}).count(), "E7");
diff --git a/jstests/core/geo_box2.js b/jstests/core/geo_box2.js
index c20a1701874..74f9695f9b2 100644
--- a/jstests/core/geo_box2.js
+++ b/jstests/core/geo_box2.js
@@ -3,16 +3,16 @@ t = db.geo_box2;
t.drop();
-for (i=1; i<10; i++) {
- for(j=1; j<10; j++) {
- t.insert({loc : [i,j]});
- }
+for (i = 1; i < 10; i++) {
+ for (j = 1; j < 10; j++) {
+ t.insert({loc: [i, j]});
+ }
}
-t.ensureIndex({"loc" : "2d"} );
-assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "A1" );
+t.ensureIndex({"loc": "2d"});
+assert.eq(9, t.find({loc: {$within: {$box: [[4, 4], [6, 6]]}}}).itcount(), "A1");
-t.dropIndex( { "loc" : "2d" } );
+t.dropIndex({"loc": "2d"});
-t.ensureIndex({"loc" : "2d"} , {"min" : 0, "max" : 10});
-assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "B1" );
+t.ensureIndex({"loc": "2d"}, {"min": 0, "max": 10});
+assert.eq(9, t.find({loc: {$within: {$box: [[4, 4], [6, 6]]}}}).itcount(), "B1");
diff --git a/jstests/core/geo_box3.js b/jstests/core/geo_box3.js
index 8941f637518..7f9dd12ea60 100644
--- a/jstests/core/geo_box3.js
+++ b/jstests/core/geo_box3.js
@@ -4,33 +4,33 @@
// bounding box.
// This is the bug reported in SERVER-994.
-t=db.geo_box3;
+t = db.geo_box3;
t.drop();
-t.insert({ point : { x : -15000000, y : 10000000 } });
-t.ensureIndex( { point : "2d" } , { min : -21000000 , max : 21000000 } );
-var c=t.find({point: {"$within": {"$box": [[-20000000, 7000000], [0, 15000000]]} } });
+t.insert({point: {x: -15000000, y: 10000000}});
+t.ensureIndex({point: "2d"}, {min: -21000000, max: 21000000});
+var c = t.find({point: {"$within": {"$box": [[-20000000, 7000000], [0, 15000000]]}}});
assert.eq(1, c.count(), "A1");
// Same thing, modulo 1000000.
-t=db.geo_box3;
+t = db.geo_box3;
t.drop();
-t.insert({ point : { x : -15, y : 10 } });
-t.ensureIndex( { point : "2d" } , { min : -21 , max : 21 } );
-var c=t.find({point: {"$within": {"$box": [[-20, 7], [0, 15]]} } });
+t.insert({point: {x: -15, y: 10}});
+t.ensureIndex({point: "2d"}, {min: -21, max: 21});
+var c = t.find({point: {"$within": {"$box": [[-20, 7], [0, 15]]}}});
assert.eq(1, c.count(), "B1");
// Two more examples, one where the index is centered at the origin,
// one not.
-t=db.geo_box3;
+t = db.geo_box3;
t.drop();
-t.insert({ point : { x : 1.0 , y : 1.0 } });
-t.ensureIndex( { point : "2d" } , { min : -2 , max : 2 } );
-var c=t.find({point: {"$within": {"$box": [[.1, .1], [1.99, 1.99]]} } });
+t.insert({point: {x: 1.0, y: 1.0}});
+t.ensureIndex({point: "2d"}, {min: -2, max: 2});
+var c = t.find({point: {"$within": {"$box": [[.1, .1], [1.99, 1.99]]}}});
assert.eq(1, c.count(), "C1");
-t=db.geo_box3;
+t = db.geo_box3;
t.drop();
-t.insert({ point : { x : 3.9 , y : 3.9 } });
-t.ensureIndex( { point : "2d" } , { min : 0 , max : 4 } );
-var c=t.find({point: {"$within": {"$box": [[2.05, 2.05], [3.99, 3.99]]} } });
+t.insert({point: {x: 3.9, y: 3.9}});
+t.ensureIndex({point: "2d"}, {min: 0, max: 4});
+var c = t.find({point: {"$within": {"$box": [[2.05, 2.05], [3.99, 3.99]]}}});
assert.eq(1, c.count(), "D1");
diff --git a/jstests/core/geo_center_sphere1.js b/jstests/core/geo_center_sphere1.js
index 2c61a54588b..f3b39b552cd 100644
--- a/jstests/core/geo_center_sphere1.js
+++ b/jstests/core/geo_center_sphere1.js
@@ -2,66 +2,77 @@ t = db.geo_center_sphere1;
function test(index) {
t.drop();
- skip = 8; // lower for more rigor, higher for more speed (tested with .5, .678, 1, 2, 3, and 4)
+ skip = 8; // lower for more rigor, higher for more speed (tested with .5, .678, 1, 2, 3, and 4)
- searches = [
+ searches = [
// x , y rad
- [ [ 5 , 0 ] , 0.05 ] , // ~200 miles
- [ [ 135 , 0 ] , 0.05 ] ,
+ [[5, 0], 0.05], // ~200 miles
+ [[135, 0], 0.05],
- [ [ 5 , 70 ] , 0.05 ] ,
- [ [ 135 , 70 ] , 0.05 ] ,
- [ [ 5 , 85 ] , 0.05 ] ,
+ [[5, 70], 0.05],
+ [[135, 70], 0.05],
+ [[5, 85], 0.05],
- [ [ 20 , 0 ] , 0.25 ] , // ~1000 miles
- [ [ 20 , -45 ] , 0.25 ] ,
- [ [ -20 , 60 ] , 0.25 ] ,
- [ [ -20 , -70 ] , 0.25 ] ,
+ [[20, 0], 0.25], // ~1000 miles
+ [[20, -45], 0.25],
+ [[-20, 60], 0.25],
+ [[-20, -70], 0.25],
];
- correct = searches.map( function(z){ return []; } );
+ correct = searches.map(function(z) {
+ return [];
+ });
num = 0;
var bulk = t.initializeUnorderedBulkOp();
- for ( x=-179; x<=179; x += skip ){
- for ( y=-89; y<=89; y += skip ){
- o = { _id : num++ , loc : [ x , y ] };
- bulk.insert( o );
- for ( i=0; i<searches.length; i++ ){
- if ( Geo.sphereDistance( [ x , y ] , searches[i][0] ) <= searches[i][1])
- correct[i].push( o );
+ for (x = -179; x <= 179; x += skip) {
+ for (y = -89; y <= 89; y += skip) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ bulk.insert(o);
+ for (i = 0; i < searches.length; i++) {
+ if (Geo.sphereDistance([x, y], searches[i][0]) <= searches[i][1])
+ correct[i].push(o);
}
}
- gc(); // needed with low skip values
+ gc(); // needed with low skip values
}
assert.writeOK(bulk.execute());
if (index) {
- t.ensureIndex( { loc : index } );
+ t.ensureIndex({loc: index});
}
- for ( i=0; i<searches.length; i++ ){
+ for (i = 0; i < searches.length; i++) {
print('------------');
- print( tojson( searches[i] ) + "\t" + correct[i].length );
- q = { loc : { $within : { $centerSphere : searches[i] } } };
+ print(tojson(searches[i]) + "\t" + correct[i].length);
+ q = {
+ loc: {$within: {$centerSphere: searches[i]}}
+ };
- //correct[i].forEach( printjson )
- //printjson( q );
- //t.find( q ).forEach( printjson )
-
- //printjson(t.find( q ).explain())
+ // correct[i].forEach( printjson )
+ // printjson( q );
+ // t.find( q ).forEach( printjson )
+
+ // printjson(t.find( q ).explain())
+
+ // printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
+ // printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
- //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
- //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
-
var numExpected = correct[i].length;
- var x = correct[i].map( function(z){ return z._id; } );
- var y = t.find(q).map( function(z){ return z._id; } );
+ var x = correct[i].map(function(z) {
+ return z._id;
+ });
+ var y = t.find(q).map(function(z) {
+ return z._id;
+ });
missing = [];
- epsilon = 0.001; // allow tenth of a percent error due to conversions
- for (var j=0; j<x.length; j++){
- if (!Array.contains(y, x[j])){
+ epsilon = 0.001; // allow tenth of a percent error due to conversions
+ for (var j = 0; j < x.length; j++) {
+ if (!Array.contains(y, x[j])) {
missing.push(x[j]);
var obj = t.findOne({_id: x[j]});
var dist = Geo.sphereDistance(searches[i][0], obj.loc);
@@ -70,8 +81,8 @@ function test(index) {
numExpected -= 1;
}
}
- for (var j=0; j<y.length; j++){
- if (!Array.contains(x, y[j])){
+ for (var j = 0; j < y.length; j++) {
+ if (!Array.contains(x, y[j])) {
missing.push(y[j]);
var obj = t.findOne({_id: y[j]});
var dist = Geo.sphereDistance(searches[i][0], obj.loc);
@@ -81,16 +92,16 @@ function test(index) {
}
}
-
- assert.eq( numExpected , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
- assert.eq( numExpected , t.find( q ).count() , "count : " + tojson( searches[i] ) );
+ assert.eq(numExpected, t.find(q).itcount(), "itcount : " + tojson(searches[i]));
+ assert.eq(numExpected, t.find(q).count(), "count : " + tojson(searches[i]));
if (index == "2d") {
- var explain = t.find( q ).explain("executionStats");
- print( 'explain for ' + tojson( q , '' , true ) + ' = ' + tojson( explain ) );
+ var explain = t.find(q).explain("executionStats");
+ print('explain for ' + tojson(q, '', true) + ' = ' + tojson(explain));
// The index should be at least minimally effective in preventing the full collection
// scan.
- assert.gt( t.find().count(), explain.executionStats.totalKeysExamined ,
- "nscanned : " + tojson( searches[i] ) );
+ assert.gt(t.find().count(),
+ explain.executionStats.totalKeysExamined,
+ "nscanned : " + tojson(searches[i]));
}
}
}
diff --git a/jstests/core/geo_center_sphere2.js b/jstests/core/geo_center_sphere2.js
index a569c4d5c96..ac8f09cbe77 100644
--- a/jstests/core/geo_center_sphere2.js
+++ b/jstests/core/geo_center_sphere2.js
@@ -5,39 +5,42 @@
// multiple documents, and so requires simultaneous testing.
//
-function deg2rad(arg) { return arg * Math.PI / 180.0; }
-function rad2deg(arg) { return arg * 180.0 / Math.PI; }
+function deg2rad(arg) {
+ return arg * Math.PI / 180.0;
+}
+function rad2deg(arg) {
+ return arg * 180.0 / Math.PI;
+}
function computexscandist(y, maxDistDegrees) {
- return maxDistDegrees / Math.min(Math.cos(deg2rad(Math.min(89.0, y + maxDistDegrees))),
- Math.cos(deg2rad(Math.max(-89.0, y - maxDistDegrees))));
+ return maxDistDegrees /
+ Math.min(Math.cos(deg2rad(Math.min(89.0, y + maxDistDegrees))),
+ Math.cos(deg2rad(Math.max(-89.0, y - maxDistDegrees))));
}
function pointIsOK(startPoint, radius) {
yscandist = rad2deg(radius) + 0.01;
xscandist = computexscandist(startPoint[1], yscandist);
- return (startPoint[0] + xscandist < 180)
- && (startPoint[0] - xscandist > -180)
- && (startPoint[1] + yscandist < 90)
- && (startPoint[1] - yscandist > -90);
+ return (startPoint[0] + xscandist < 180) && (startPoint[0] - xscandist > -180) &&
+ (startPoint[1] + yscandist < 90) && (startPoint[1] - yscandist > -90);
}
var numTests = 30;
-for ( var test = 0; test < numTests; test++ ) {
- Random.srand( 1337 + test );
+for (var test = 0; test < numTests; test++) {
+ Random.srand(1337 + test);
- var radius = 5000 * Random.rand(); // km
- radius = radius / 6378.1; // radians; earth radius from geoconstants.h
- var numDocs = Math.floor( 400 * Random.rand() );
+ var radius = 5000 * Random.rand(); // km
+ radius = radius / 6378.1; // radians; earth radius from geoconstants.h
+ var numDocs = Math.floor(400 * Random.rand());
// TODO: Wrapping uses the error value to figure out what would overlap...
- var bits = Math.floor( 5 + Random.rand() * 28 );
+ var bits = Math.floor(5 + Random.rand() * 28);
var maxPointsPerDoc = 50;
var t = db.sphere;
var randomPoint = function() {
- return [ Random.rand() * 360 - 180, Random.rand() * 180 - 90 ];
+ return [Random.rand() * 360 - 180, Random.rand() * 180 - 90];
};
// Get a start point that doesn't require wrapping
@@ -47,7 +50,7 @@ for ( var test = 0; test < numTests; test++ ) {
do {
t.drop();
startPoint = randomPoint();
- t.ensureIndex( { loc : "2d" }, { bits : bits } );
+ t.ensureIndex({loc: "2d"}, {bits: bits});
} while (!pointIsOK(startPoint, radius));
var pointsIn = 0;
@@ -57,19 +60,18 @@ for ( var test = 0; test < numTests; test++ ) {
var totalPoints = 0;
var bulk = t.initializeUnorderedBulkOp();
- for ( var i = 0; i < numDocs; i++ ) {
-
- var numPoints = Math.floor( Random.rand() * maxPointsPerDoc + 1 );
+ for (var i = 0; i < numDocs; i++) {
+ var numPoints = Math.floor(Random.rand() * maxPointsPerDoc + 1);
var docIn = false;
var multiPoint = [];
totalPoints += numPoints;
- for ( var p = 0; p < numPoints; p++ ) {
+ for (var p = 0; p < numPoints; p++) {
var point = randomPoint();
- multiPoint.push( point );
+ multiPoint.push(point);
- if ( Geo.sphereDistance( startPoint, point ) <= radius ) {
+ if (Geo.sphereDistance(startPoint, point) <= radius) {
pointsIn++;
docIn = true;
} else {
@@ -77,54 +79,64 @@ for ( var test = 0; test < numTests; test++ ) {
}
}
- bulk.insert( { loc : multiPoint } );
+ bulk.insert({loc: multiPoint});
- if ( docIn )
+ if (docIn)
docsIn++;
else
docsOut++;
-
}
- printjson( { test: test, radius : radius, bits : bits, numDocs : numDocs,
- pointsIn : pointsIn, docsIn : docsIn, pointsOut : pointsOut,
- docsOut : docsOut } );
+ printjson({
+ test: test,
+ radius: radius,
+ bits: bits,
+ numDocs: numDocs,
+ pointsIn: pointsIn,
+ docsIn: docsIn,
+ pointsOut: pointsOut,
+ docsOut: docsOut
+ });
assert.writeOK(bulk.execute());
- assert.eq( docsIn + docsOut, numDocs );
- assert.eq( pointsIn + pointsOut, totalPoints );
+ assert.eq(docsIn + docsOut, numDocs);
+ assert.eq(pointsIn + pointsOut, totalPoints);
// $centerSphere
- assert.eq( docsIn, t.find({ loc: { $within:
- { $centerSphere: [ startPoint, radius ]}}} ).count() );
+ assert.eq(docsIn, t.find({loc: {$within: {$centerSphere: [startPoint, radius]}}}).count());
// $nearSphere
- var cursor = t.find({ loc: { $nearSphere: startPoint, $maxDistance: radius }});
- var results = cursor.limit( 2 * pointsIn ).toArray();
+ var cursor = t.find({loc: {$nearSphere: startPoint, $maxDistance: radius}});
+ var results = cursor.limit(2 * pointsIn).toArray();
- assert.eq( docsIn, results.length );
+ assert.eq(docsIn, results.length);
var distance = 0;
- for ( var i = 0; i < results.length; i++ ) {
+ for (var i = 0; i < results.length; i++) {
var minNewDistance = radius + 1;
- for( var j = 0; j < results[i].loc.length; j++ ){
- var newDistance = Geo.sphereDistance( startPoint, results[i].loc[j] );
- if( newDistance < minNewDistance && newDistance >= distance ) {
+ for (var j = 0; j < results[i].loc.length; j++) {
+ var newDistance = Geo.sphereDistance(startPoint, results[i].loc[j]);
+ if (newDistance < minNewDistance && newDistance >= distance) {
minNewDistance = newDistance;
}
}
- //print( "Dist from : " + results[i].loc[j] + " to " + startPoint + " is "
+ // print( "Dist from : " + results[i].loc[j] + " to " + startPoint + " is "
// + minNewDistance + " vs " + radius )
- assert.lte( minNewDistance, radius );
- assert.gte( minNewDistance, distance );
+ assert.lte(minNewDistance, radius);
+ assert.gte(minNewDistance, distance);
distance = minNewDistance;
}
// geoNear
- results = db.runCommand({ geoNear: "sphere", near: startPoint, maxDistance: radius,
- num : 2 * pointsIn, spherical : true } ).results;
+ results = db.runCommand({
+ geoNear: "sphere",
+ near: startPoint,
+ maxDistance: radius,
+ num: 2 * pointsIn,
+ spherical: true
+ }).results;
/*
printjson( results );
@@ -135,26 +147,25 @@ for ( var test = 0; test < numTests; test++ ) {
}
*/
- assert.eq( docsIn, results.length );
+ assert.eq(docsIn, results.length);
var distance = 0;
- for ( var i = 0; i < results.length; i++ ) {
+ for (var i = 0; i < results.length; i++) {
var retDistance = results[i].dis;
// print( "Dist from : " + results[i].loc + " to " + startPoint + " is "
// + retDistance + " vs " + radius )
var distInObj = false;
- for ( var j = 0; j < results[i].obj.loc.length && distInObj == false; j++ ) {
- var newDistance = Geo.sphereDistance( startPoint, results[i].obj.loc[j] );
- distInObj = ( newDistance >= retDistance - 0.0001 &&
- newDistance <= retDistance + 0.0001 );
+ for (var j = 0; j < results[i].obj.loc.length && distInObj == false; j++) {
+ var newDistance = Geo.sphereDistance(startPoint, results[i].obj.loc[j]);
+ distInObj =
+ (newDistance >= retDistance - 0.0001 && newDistance <= retDistance + 0.0001);
}
- assert( distInObj );
- assert.lte( retDistance, radius );
- assert.gte( retDistance, distance );
+ assert(distInObj);
+ assert.lte(retDistance, radius);
+ assert.gte(retDistance, distance);
distance = retDistance;
}
}
-
diff --git a/jstests/core/geo_circle1.js b/jstests/core/geo_circle1.js
index c4b79e645ab..a679a408b32 100644
--- a/jstests/core/geo_circle1.js
+++ b/jstests/core/geo_circle1.js
@@ -2,45 +2,48 @@
t = db.geo_circle1;
t.drop();
-searches = [
- [ [ 5 , 5 ] , 3 ] ,
- [ [ 5 , 5 ] , 1 ] ,
- [ [ 5 , 5 ] , 5 ] ,
- [ [ 0 , 5 ] , 5 ] ,
-];
-correct = searches.map( function(z){ return []; } );
+searches = [[[5, 5], 3], [[5, 5], 1], [[5, 5], 5], [[0, 5], 5], ];
+correct = searches.map(function(z) {
+ return [];
+});
num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
- for ( i=0; i<searches.length; i++ )
- if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
- correct[i].push( o );
+for (x = 0; x <= 20; x++) {
+ for (y = 0; y <= 20; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
+ for (i = 0; i < searches.length; i++)
+ if (Geo.distance([x, y], searches[i][0]) <= searches[i][1])
+ correct[i].push(o);
}
}
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
-for ( i=0; i<searches.length; i++ ){
- //print( tojson( searches[i] ) + "\t" + correct[i].length )
- q = { loc : { $within : { $center : searches[i] } } };
+for (i = 0; i < searches.length; i++) {
+ // print( tojson( searches[i] ) + "\t" + correct[i].length )
+ q = {
+ loc: {$within: {$center: searches[i]}}
+ };
- //correct[i].forEach( printjson )
- //printjson( q );
- //t.find( q ).forEach( printjson )
+ // correct[i].forEach( printjson )
+ // printjson( q );
+ // t.find( q ).forEach( printjson )
- //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
- //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
-
- assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
- assert.eq( correct[i].length , t.find( q ).count() , "count : " + tojson( searches[i] ) );
- var explain = t.find( q ).explain("executionStats");
- print( 'explain for ' + tojson( q , '' , true ) + ' = ' + tojson( explain ) );
+ // printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
+ // printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
+
+ assert.eq(correct[i].length, t.find(q).itcount(), "itcount : " + tojson(searches[i]));
+ assert.eq(correct[i].length, t.find(q).count(), "count : " + tojson(searches[i]));
+ var explain = t.find(q).explain("executionStats");
+ print('explain for ' + tojson(q, '', true) + ' = ' + tojson(explain));
// The index should be at least minimally effective in preventing the full collection
// scan.
- assert.gt( t.find().count(), explain.executionStats.totalKeysExamined,
- "nscanned : " + tojson( searches[i] ) );
+ assert.gt(t.find().count(),
+ explain.executionStats.totalKeysExamined,
+ "nscanned : " + tojson(searches[i]));
}
diff --git a/jstests/core/geo_circle1_noindex.js b/jstests/core/geo_circle1_noindex.js
index 304f1404b64..872883dbf74 100644
--- a/jstests/core/geo_circle1_noindex.js
+++ b/jstests/core/geo_circle1_noindex.js
@@ -2,28 +2,30 @@
t = db.geo_circle1_noindex;
t.drop();
-searches = [
- [ [ 5 , 5 ] , 3 ] ,
- [ [ 5 , 5 ] , 1 ] ,
- [ [ 5 , 5 ] , 5 ] ,
- [ [ 0 , 5 ] , 5 ] ,
-];
-correct = searches.map( function(z){ return []; } );
+searches = [[[5, 5], 3], [[5, 5], 1], [[5, 5], 5], [[0, 5], 5], ];
+correct = searches.map(function(z) {
+ return [];
+});
num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
- for ( i=0; i<searches.length; i++ )
- if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
- correct[i].push( o );
+for (x = 0; x <= 20; x++) {
+ for (y = 0; y <= 20; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
+ for (i = 0; i < searches.length; i++)
+ if (Geo.distance([x, y], searches[i][0]) <= searches[i][1])
+ correct[i].push(o);
}
}
-for ( i=0; i<searches.length; i++ ){
- q = { loc : { $within : { $center : searches[i] } } };
- assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
- assert.eq( correct[i].length , t.find( q ).count() , "count : " + tojson( searches[i] ) );
+for (i = 0; i < searches.length; i++) {
+ q = {
+ loc: {$within: {$center: searches[i]}}
+ };
+ assert.eq(correct[i].length, t.find(q).itcount(), "itcount : " + tojson(searches[i]));
+ assert.eq(correct[i].length, t.find(q).count(), "count : " + tojson(searches[i]));
}
diff --git a/jstests/core/geo_circle2.js b/jstests/core/geo_circle2.js
index 9a3b3c94860..6c89098f684 100644
--- a/jstests/core/geo_circle2.js
+++ b/jstests/core/geo_circle2.js
@@ -2,25 +2,28 @@
t = db.geo_circle2;
t.drop();
-t.ensureIndex({loc : "2d", categories:1}, {"name":"placesIdx", "min": -100, "max": 100});
+t.ensureIndex({loc: "2d", categories: 1}, {"name": "placesIdx", "min": -100, "max": 100});
-t.insert({ "uid" : 368900 , "loc" : { "x" : -36 , "y" : -8} ,"categories" : [ "sports" , "hotel" , "restaurant"]});
-t.insert({ "uid" : 555344 , "loc" : { "x" : 13 , "y" : 29} ,"categories" : [ "sports" , "hotel"]});
-t.insert({ "uid" : 855878 , "loc" : { "x" : 38 , "y" : 30} ,"categories" : [ "sports" , "hotel"]});
-t.insert({ "uid" : 917347 , "loc" : { "x" : 15 , "y" : 46} ,"categories" : [ "hotel"]});
-t.insert({ "uid" : 647874 , "loc" : { "x" : 25 , "y" : 23} ,"categories" : [ "hotel" , "restaurant"]});
-t.insert({ "uid" : 518482 , "loc" : { "x" : 4 , "y" : 25} ,"categories" : [ ]});
-t.insert({ "uid" : 193466 , "loc" : { "x" : -39 , "y" : 22} ,"categories" : [ "sports" , "hotel"]});
-t.insert({ "uid" : 622442 , "loc" : { "x" : -24 , "y" : -46} ,"categories" : [ "hotel"]});
-t.insert({ "uid" : 297426 , "loc" : { "x" : 33 , "y" : -49} ,"categories" : [ "hotel"]});
-t.insert({ "uid" : 528464 , "loc" : { "x" : -43 , "y" : 48} ,"categories" : [ "restaurant"]});
-t.insert({ "uid" : 90579 , "loc" : { "x" : -4 , "y" : -23} ,"categories" : [ "restaurant"]});
-t.insert({ "uid" : 368895 , "loc" : { "x" : -8 , "y" : 14} ,"categories" : [ "sports" ]});
-t.insert({ "uid" : 355844 , "loc" : { "x" : 34 , "y" : -4} ,"categories" : [ "sports" , "hotel"]});
+t.insert(
+ {"uid": 368900, "loc": {"x": -36, "y": -8}, "categories": ["sports", "hotel", "restaurant"]});
+t.insert({"uid": 555344, "loc": {"x": 13, "y": 29}, "categories": ["sports", "hotel"]});
+t.insert({"uid": 855878, "loc": {"x": 38, "y": 30}, "categories": ["sports", "hotel"]});
+t.insert({"uid": 917347, "loc": {"x": 15, "y": 46}, "categories": ["hotel"]});
+t.insert({"uid": 647874, "loc": {"x": 25, "y": 23}, "categories": ["hotel", "restaurant"]});
+t.insert({"uid": 518482, "loc": {"x": 4, "y": 25}, "categories": []});
+t.insert({"uid": 193466, "loc": {"x": -39, "y": 22}, "categories": ["sports", "hotel"]});
+t.insert({"uid": 622442, "loc": {"x": -24, "y": -46}, "categories": ["hotel"]});
+t.insert({"uid": 297426, "loc": {"x": 33, "y": -49}, "categories": ["hotel"]});
+t.insert({"uid": 528464, "loc": {"x": -43, "y": 48}, "categories": ["restaurant"]});
+t.insert({"uid": 90579, "loc": {"x": -4, "y": -23}, "categories": ["restaurant"]});
+t.insert({"uid": 368895, "loc": {"x": -8, "y": 14}, "categories": ["sports"]});
+t.insert({"uid": 355844, "loc": {"x": 34, "y": -4}, "categories": ["sports", "hotel"]});
-
-assert.eq( 10 , t.find({ "loc" : { "$within" : { "$center" : [ { "x" : 0 ,"y" : 0} , 50]}} } ).itcount() , "A" );
-assert.eq( 6 , t.find({ "loc" : { "$within" : { "$center" : [ { "x" : 0 ,"y" : 0} , 50]}}, "categories" : "sports" } ).itcount() , "B" );
+assert.eq(10, t.find({"loc": {"$within": {"$center": [{"x": 0, "y": 0}, 50]}}}).itcount(), "A");
+assert.eq(6,
+ t.find({"loc": {"$within": {"$center": [{"x": 0, "y": 0}, 50]}}, "categories": "sports"})
+ .itcount(),
+ "B");
// When not a $near or $within query, geo index should not be used. Fails if geo index is used.
-assert.eq( 1 , t.find({ "loc" : { "x" : -36, "y" : -8}, "categories" : "sports" }).itcount(), "C" );
+assert.eq(1, t.find({"loc": {"x": -36, "y": -8}, "categories": "sports"}).itcount(), "C");
diff --git a/jstests/core/geo_circle2a.js b/jstests/core/geo_circle2a.js
index e6d9d829782..1a0cc06a7f8 100644
--- a/jstests/core/geo_circle2a.js
+++ b/jstests/core/geo_circle2a.js
@@ -1,37 +1,40 @@
-// From SERVER-2381
-// Tests to make sure that nested multi-key indexing works for geo indexes and is not used for direct position
+// From SERVER-2381
+// Tests to make sure that nested multi-key indexing works for geo indexes and is not used for
+// direct position
// lookups
var coll = db.geo_circle2a;
coll.drop();
-coll.insert({ p : [1112,3473], t : [{ k : 'a', v : 'b' }, { k : 'c', v : 'd' }] });
-coll.ensureIndex({ p : '2d', 't.k' : 1 }, { min : 0, max : 10000 });
+coll.insert({p: [1112, 3473], t: [{k: 'a', v: 'b'}, {k: 'c', v: 'd'}]});
+coll.ensureIndex({p: '2d', 't.k': 1}, {min: 0, max: 10000});
// Succeeds, since on direct lookup should not use the index
-assert(1 == coll.find({p:[1112,3473],'t.k':'a'}).count(), "A");
+assert(1 == coll.find({p: [1112, 3473], 't.k': 'a'}).count(), "A");
// Succeeds and uses the geo index
-assert(1 == coll.find({p:{$within:{$box:[[1111,3472],[1113,3475]]}}, 't.k' : 'a' }).count(), "B");
-
+assert(1 == coll.find({p: {$within: {$box: [[1111, 3472], [1113, 3475]]}}, 't.k': 'a'}).count(),
+ "B");
coll.drop();
-coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' }, { k : 'key2', v : 123 } ] });
-coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' } ] });
+coll.insert({point: [1, 10], tags: [{k: 'key', v: 'value'}, {k: 'key2', v: 123}]});
+coll.insert({point: [1, 10], tags: [{k: 'key', v: 'value'}]});
-coll.ensureIndex({ point : "2d" , "tags.k" : 1, "tags.v" : 1 });
+coll.ensureIndex({point: "2d", "tags.k": 1, "tags.v": 1});
// Succeeds, since should now lookup multi-keys correctly
-assert(2 == coll.find({ point : { $within : { $box : [[0,0],[12,12]] } } }).count(), "C");
+assert(2 == coll.find({point: {$within: {$box: [[0, 0], [12, 12]]}}}).count(), "C");
// Succeeds, and should not use geoindex
-assert(2 == coll.find({ point : [1, 10] }).count(), "D");
-assert(2 == coll.find({ point : [1, 10], "tags.v" : "value" }).count(), "E");
-assert(1 == coll.find({ point : [1, 10], "tags.v" : 123 }).count(), "F");
-
+assert(2 == coll.find({point: [1, 10]}).count(), "D");
+assert(2 == coll.find({point: [1, 10], "tags.v": "value"}).count(), "E");
+assert(1 == coll.find({point: [1, 10], "tags.v": 123}).count(), "F");
coll.drop();
-coll.insert({ point:[ 1, 10 ], tags : [ { k : { 'hello' : 'world'}, v : 'value' }, { k : 'key2', v : 123 } ] });
-coll.insert({ point:[ 1, 10 ], tags : [ { k : 'key', v : 'value' } ] });
+coll.insert({point: [1, 10], tags: [{k: {'hello': 'world'}, v: 'value'}, {k: 'key2', v: 123}]});
+coll.insert({point: [1, 10], tags: [{k: 'key', v: 'value'}]});
-coll.ensureIndex({ point : "2d" , "tags.k" : 1, "tags.v" : 1 });
+coll.ensureIndex({point: "2d", "tags.k": 1, "tags.v": 1});
// Succeeds, should be able to look up the complex element
-assert(1 == coll.find({ point : { $within : { $box : [[0,0],[12,12]] } }, 'tags.k' : { 'hello' : 'world' } }).count(), "G"); \ No newline at end of file
+assert(1 ==
+ coll.find({point: {$within: {$box: [[0, 0], [12, 12]]}}, 'tags.k': {'hello': 'world'}})
+ .count(),
+ "G"); \ No newline at end of file
diff --git a/jstests/core/geo_circle3.js b/jstests/core/geo_circle3.js
index 96907ce9706..4e1fde4aa89 100644
--- a/jstests/core/geo_circle3.js
+++ b/jstests/core/geo_circle3.js
@@ -2,22 +2,21 @@
db.places.drop();
n = 0;
-db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 52 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 52 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 52 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 52.0001 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 5, "y" : 51.9999 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 52.0001 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 52.0001 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 4.9999, "y" : 51.9999 } });
-db.places.save({ "_id": n++, "loc" : { "x" : 5.0001, "y" : 51.9999 } });
-db.places.ensureIndex( { loc : "2d" } );
-radius=0.0001;
-center=[5,52];
-//print(db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).count())
+db.places.save({"_id": n++, "loc": {"x": 4.9999, "y": 52}});
+db.places.save({"_id": n++, "loc": {"x": 5, "y": 52}});
+db.places.save({"_id": n++, "loc": {"x": 5.0001, "y": 52}});
+db.places.save({"_id": n++, "loc": {"x": 5, "y": 52.0001}});
+db.places.save({"_id": n++, "loc": {"x": 5, "y": 51.9999}});
+db.places.save({"_id": n++, "loc": {"x": 4.9999, "y": 52.0001}});
+db.places.save({"_id": n++, "loc": {"x": 5.0001, "y": 52.0001}});
+db.places.save({"_id": n++, "loc": {"x": 4.9999, "y": 51.9999}});
+db.places.save({"_id": n++, "loc": {"x": 5.0001, "y": 51.9999}});
+db.places.ensureIndex({loc: "2d"});
+radius = 0.0001;
+center = [5, 52];
+// print(db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).count())
// FIXME: we want an assert, e.g., that there be 5 answers in the find().
-db.places.find({"loc" : {"$within" : {"$center" : [center, radius]}}}).forEach(printjson);
-
+db.places.find({"loc": {"$within": {"$center": [center, radius]}}}).forEach(printjson);
// the result:
// { "_id" : ObjectId("4bb1f2f088df513435bcb4e1"), "loc" : { "x" : 5, "y" : 52 } }
diff --git a/jstests/core/geo_circle4.js b/jstests/core/geo_circle4.js
index 3113d54b38a..0d2b74b2cba 100644
--- a/jstests/core/geo_circle4.js
+++ b/jstests/core/geo_circle4.js
@@ -2,29 +2,31 @@
function test(index) {
db.server848.drop();
- radius=0.0001;
- center=[5,52];
+ radius = 0.0001;
+ center = [5, 52];
- db.server848.save({ "_id": 1, "loc" : { "x" : 4.9999, "y" : 52 } });
- db.server848.save({ "_id": 2, "loc" : { "x" : 5, "y" : 52 } });
- db.server848.save({ "_id": 3, "loc" : { "x" : 5.0001, "y" : 52 } });
- db.server848.save({ "_id": 4, "loc" : { "x" : 5, "y" : 52.0001 } });
- db.server848.save({ "_id": 5, "loc" : { "x" : 5, "y" : 51.9999 } });
- db.server848.save({ "_id": 6, "loc" : { "x" : 4.9999, "y" : 52.0001 } });
- db.server848.save({ "_id": 7, "loc" : { "x" : 5.0001, "y" : 52.0001 } });
- db.server848.save({ "_id": 8, "loc" : { "x" : 4.9999, "y" : 51.9999 } });
- db.server848.save({ "_id": 9, "loc" : { "x" : 5.0001, "y" : 51.9999 } });
+ db.server848.save({"_id": 1, "loc": {"x": 4.9999, "y": 52}});
+ db.server848.save({"_id": 2, "loc": {"x": 5, "y": 52}});
+ db.server848.save({"_id": 3, "loc": {"x": 5.0001, "y": 52}});
+ db.server848.save({"_id": 4, "loc": {"x": 5, "y": 52.0001}});
+ db.server848.save({"_id": 5, "loc": {"x": 5, "y": 51.9999}});
+ db.server848.save({"_id": 6, "loc": {"x": 4.9999, "y": 52.0001}});
+ db.server848.save({"_id": 7, "loc": {"x": 5.0001, "y": 52.0001}});
+ db.server848.save({"_id": 8, "loc": {"x": 4.9999, "y": 51.9999}});
+ db.server848.save({"_id": 9, "loc": {"x": 5.0001, "y": 51.9999}});
if (index) {
- db.server848.ensureIndex( { loc : "2d" } );
+ db.server848.ensureIndex({loc: "2d"});
}
- r=db.server848.find({"loc" : {"$within" : {"$center" : [center, radius]}}}, {_id:1});
- assert.eq(5, r.count(), "A1");
+ r = db.server848.find({"loc": {"$within": {"$center": [center, radius]}}}, {_id: 1});
+ assert.eq(5, r.count(), "A1");
// FIXME: surely code like this belongs in utils.js.
- a=r.toArray();
- x=[];
- for (k in a) { x.push(a[k]["_id"]); }
+ a = r.toArray();
+ x = [];
+ for (k in a) {
+ x.push(a[k]["_id"]);
+ }
x.sort();
- assert.eq([1,2,3,4,5], x, "B1");
+ assert.eq([1, 2, 3, 4, 5], x, "B1");
}
test(false);
diff --git a/jstests/core/geo_circle5.js b/jstests/core/geo_circle5.js
index 32b5744cea4..1b3d67e91b0 100644
--- a/jstests/core/geo_circle5.js
+++ b/jstests/core/geo_circle5.js
@@ -2,20 +2,20 @@
db.server1238.drop();
db.server1238.remove({});
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 1});
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 2});
-db.server1238.ensureIndex( { loc : "2d" } , { min : -21000000 , max : 21000000 } );
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 3});
-db.server1238.save({ loc: [ 5000000, 900000 ], id: 4});
+db.server1238.save({loc: [5000000, 900000], id: 1});
+db.server1238.save({loc: [5000000, 900000], id: 2});
+db.server1238.ensureIndex({loc: "2d"}, {min: -21000000, max: 21000000});
+db.server1238.save({loc: [5000000, 900000], id: 3});
+db.server1238.save({loc: [5000000, 900000], id: 4});
-c1=db.server1238.find({"loc" : {"$within" : {"$center" : [[5000000, 900000], 1.0]}}}).count();
-
-c2=db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000], 5.0]}}}).count();
+c1 = db.server1238.find({"loc": {"$within": {"$center": [[5000000, 900000], 1.0]}}}).count();
+c2 = db.server1238.find({"loc": {"$within": {"$center": [[5000001, 900000], 5.0]}}}).count();
assert.eq(4, c1, "A1");
assert.eq(c1, c2, "B1");
-//print(db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000], 5.0]}}}).toArray());
+// print(db.server1238.find({"loc" : {"$within" : {"$center" : [[5000001, 900000],
+// 5.0]}}}).toArray());
// [
// {
// "_id" : ObjectId("4c173306f5d9d34a46cb7b11"),
@@ -25,4 +25,4 @@ assert.eq(c1, c2, "B1");
// ],
// "id" : 4
// }
-// ]
+// ]
diff --git a/jstests/core/geo_distinct.js b/jstests/core/geo_distinct.js
index eccb517ed83..705bf1cc7ce 100644
--- a/jstests/core/geo_distinct.js
+++ b/jstests/core/geo_distinct.js
@@ -10,25 +10,25 @@ var res;
//
coll.drop();
-coll.insert( { loc: { type: 'Point', coordinates: [ 10, 20 ] } } );
-coll.insert( { loc: { type: 'Point', coordinates: [ 10, 20 ] } } );
-coll.insert( { loc: { type: 'Point', coordinates: [ 20, 30 ] } } );
-coll.insert( { loc: { type: 'Point', coordinates: [ 20, 30 ] } } );
-assert.eq( 4, coll.count() );
+coll.insert({loc: {type: 'Point', coordinates: [10, 20]}});
+coll.insert({loc: {type: 'Point', coordinates: [10, 20]}});
+coll.insert({loc: {type: 'Point', coordinates: [20, 30]}});
+coll.insert({loc: {type: 'Point', coordinates: [20, 30]}});
+assert.eq(4, coll.count());
// Test distinct on GeoJSON points with/without a 2dsphere index.
-res = coll.runCommand( 'distinct', { key: 'loc' } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ { type: 'Point', coordinates: [ 10, 20 ] },
- { type: 'Point', coordinates: [ 20, 30 ] } ] );
+res = coll.runCommand('distinct', {key: 'loc'});
+assert.commandWorked(res);
+assert.eq(res.values.sort(),
+ [{type: 'Point', coordinates: [10, 20]}, {type: 'Point', coordinates: [20, 30]}]);
-assert.commandWorked( coll.ensureIndex( { loc: '2dsphere' } ) );
+assert.commandWorked(coll.ensureIndex({loc: '2dsphere'}));
-res = coll.runCommand( 'distinct', { key: 'loc' } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ { type: 'Point', coordinates: [ 10, 20 ] },
- { type: 'Point', coordinates: [ 20, 30 ] } ] );
+res = coll.runCommand('distinct', {key: 'loc'});
+assert.commandWorked(res);
+assert.eq(res.values.sort(),
+ [{type: 'Point', coordinates: [10, 20]}, {type: 'Point', coordinates: [20, 30]}]);
// Test distinct on legacy points with/without a 2d index.
@@ -38,69 +38,72 @@ assert.eq( res.values.sort(), [ { type: 'Point', coordinates: [ 10, 20 ] },
coll.dropIndexes();
-res = coll.runCommand( 'distinct', { key: 'loc.coordinates' } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ 10, 20, 30 ] );
+res = coll.runCommand('distinct', {key: 'loc.coordinates'});
+assert.commandWorked(res);
+assert.eq(res.values.sort(), [10, 20, 30]);
-assert.commandWorked( coll.ensureIndex( { 'loc.coordinates': '2d' } ) );
+assert.commandWorked(coll.ensureIndex({'loc.coordinates': '2d'}));
-res = coll.runCommand( 'distinct', { key: 'loc.coordinates' } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ 10, 20, 30 ] );
+res = coll.runCommand('distinct', {key: 'loc.coordinates'});
+assert.commandWorked(res);
+assert.eq(res.values.sort(), [10, 20, 30]);
//
// 2. Test distinct with geo predicates for 'query'.
//
coll.drop();
-for (var i=0; i<50; ++i) {
- coll.insert( { zone: 1, loc: { type: 'Point', coordinates: [ -20, -20 ] } } );
- coll.insert( { zone: 2, loc: { type: 'Point', coordinates: [ -10, -10 ] } } );
- coll.insert( { zone: 3, loc: { type: 'Point', coordinates: [ 0, 0 ] } } );
- coll.insert( { zone: 4, loc: { type: 'Point', coordinates: [ 10, 10 ] } } );
- coll.insert( { zone: 5, loc: { type: 'Point', coordinates: [ 20, 20 ] } } );
+for (var i = 0; i < 50; ++i) {
+ coll.insert({zone: 1, loc: {type: 'Point', coordinates: [-20, -20]}});
+ coll.insert({zone: 2, loc: {type: 'Point', coordinates: [-10, -10]}});
+ coll.insert({zone: 3, loc: {type: 'Point', coordinates: [0, 0]}});
+ coll.insert({zone: 4, loc: {type: 'Point', coordinates: [10, 10]}});
+ coll.insert({zone: 5, loc: {type: 'Point', coordinates: [20, 20]}});
}
-var originGeoJSON = { type: 'Point', coordinates: [ 0, 0 ] };
+var originGeoJSON = {
+ type: 'Point',
+ coordinates: [0, 0]
+};
// Test distinct with $nearSphere query predicate.
// A. Unindexed key, no geo index on query predicate.
-res = coll.runCommand( 'distinct', { key: 'zone',
- query: { loc: { $nearSphere: { $geometry: originGeoJSON,
- $maxDistance: 1 } } } } );
-assert.commandFailed( res );
+res = coll.runCommand(
+ 'distinct',
+ {key: 'zone', query: {loc: {$nearSphere: {$geometry: originGeoJSON, $maxDistance: 1}}}});
+assert.commandFailed(res);
// B. Unindexed key, with 2dsphere index on query predicate.
-assert.commandWorked( coll.ensureIndex( { loc: '2dsphere' } ) );
-res = coll.runCommand( 'distinct', { key: 'zone',
- query: { loc: { $nearSphere: { $geometry: originGeoJSON,
- $maxDistance: 1 } } } } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ 3 ] );
+assert.commandWorked(coll.ensureIndex({loc: '2dsphere'}));
+res = coll.runCommand(
+ 'distinct',
+ {key: 'zone', query: {loc: {$nearSphere: {$geometry: originGeoJSON, $maxDistance: 1}}}});
+assert.commandWorked(res);
+assert.eq(res.values.sort(), [3]);
// C. Indexed key, with 2dsphere index on query predicate.
-assert.commandWorked( coll.ensureIndex( { zone: 1 } ) );
-res = coll.runCommand( 'distinct', { key: 'zone',
- query: { loc: { $nearSphere: { $geometry: originGeoJSON,
- $maxDistance: 1 } } } } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ 3 ] );
+assert.commandWorked(coll.ensureIndex({zone: 1}));
+res = coll.runCommand(
+ 'distinct',
+ {key: 'zone', query: {loc: {$nearSphere: {$geometry: originGeoJSON, $maxDistance: 1}}}});
+assert.commandWorked(res);
+assert.eq(res.values.sort(), [3]);
// Test distinct with $near query predicate.
coll.dropIndexes();
// A. Unindexed key, no geo index on query predicate.
-res = coll.runCommand( 'distinct', { key: 'zone',
- query: { 'loc.coordinates': { $near: [ 0, 0 ], $maxDistance: 1 } } } );
-assert.commandFailed( res );
+res = coll.runCommand('distinct',
+ {key: 'zone', query: {'loc.coordinates': {$near: [0, 0], $maxDistance: 1}}});
+assert.commandFailed(res);
// B. Unindexed key, with 2d index on query predicate.
-assert.commandWorked( coll.ensureIndex( { 'loc.coordinates': '2d' } ) );
-res = coll.runCommand( 'distinct', { key: 'zone',
- query: { 'loc.coordinates': { $near: [ 0, 0 ], $maxDistance: 1 } } } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ 3 ] );
+assert.commandWorked(coll.ensureIndex({'loc.coordinates': '2d'}));
+res = coll.runCommand('distinct',
+ {key: 'zone', query: {'loc.coordinates': {$near: [0, 0], $maxDistance: 1}}});
+assert.commandWorked(res);
+assert.eq(res.values.sort(), [3]);
// C. Indexed key, with 2d index on query predicate.
-assert.commandWorked( coll.ensureIndex( { zone: 1 } ) );
-res = coll.runCommand( 'distinct', { key: 'zone',
- query: { 'loc.coordinates': { $near: [ 0, 0 ], $maxDistance: 1 } } } );
-assert.commandWorked( res );
-assert.eq( res.values.sort(), [ 3 ] );
+assert.commandWorked(coll.ensureIndex({zone: 1}));
+res = coll.runCommand('distinct',
+ {key: 'zone', query: {'loc.coordinates': {$near: [0, 0], $maxDistance: 1}}});
+assert.commandWorked(res);
+assert.eq(res.values.sort(), [3]);
diff --git a/jstests/core/geo_exactfetch.js b/jstests/core/geo_exactfetch.js
index 43023897a4e..4af4032045f 100644
--- a/jstests/core/geo_exactfetch.js
+++ b/jstests/core/geo_exactfetch.js
@@ -10,8 +10,8 @@ function test(indexname) {
t.dropIndex({lon_lat: indexname, population: -1});
}
-t.insert({ city: "B", lon_lat: [-71.34895, 42.46037], population: 1000});
-t.insert({ city: "A", lon_lat: [1.48736, 42.55327], population: 100});
+t.insert({city: "B", lon_lat: [-71.34895, 42.46037], population: 1000});
+t.insert({city: "A", lon_lat: [1.48736, 42.55327], population: 100});
test("2d");
test("2dsphere");
diff --git a/jstests/core/geo_fiddly_box.js b/jstests/core/geo_fiddly_box.js
index cfda0a99757..f5cd3ddcc6b 100644
--- a/jstests/core/geo_fiddly_box.js
+++ b/jstests/core/geo_fiddly_box.js
@@ -1,26 +1,27 @@
// Reproduces simple test for SERVER-2832
-// The setup to reproduce was/is to create a set of points where the
+// The setup to reproduce was/is to create a set of points where the
// "expand" portion of the geo-lookup expands the 2d range in only one
// direction (so points are required on either side of the expanding range)
t = db.geo_fiddly_box;
t.drop();
-t.ensureIndex({ loc : "2d" });
+t.ensureIndex({loc: "2d"});
-t.insert({ "loc" : [3, 1] });
-t.insert({ "loc" : [3, 0.5] });
-t.insert({ "loc" : [3, 0.25] });
-t.insert({ "loc" : [3, -0.01] });
-t.insert({ "loc" : [3, -0.25] });
-t.insert({ "loc" : [3, -0.5] });
-t.insert({ "loc" : [3, -1] });
+t.insert({"loc": [3, 1]});
+t.insert({"loc": [3, 0.5]});
+t.insert({"loc": [3, 0.25]});
+t.insert({"loc": [3, -0.01]});
+t.insert({"loc": [3, -0.25]});
+t.insert({"loc": [3, -0.5]});
+t.insert({"loc": [3, -1]});
// OK!
-print( t.count() );
-assert.eq( 7, t.count({ "loc" : { "$within" : { "$box" : [ [2, -2], [46, 2] ] } } }), "Not all locations found!" );
-
+print(t.count());
+assert.eq(7,
+ t.count({"loc": {"$within": {"$box": [[2, -2], [46, 2]]}}}),
+ "Not all locations found!");
// Test normal lookup of a small square of points as a sanity check.
@@ -31,16 +32,24 @@ step = 1;
numItems = 0;
t.drop();
-t.ensureIndex({ loc : "2d" }, { max : max + epsilon / 2, min : min - epsilon / 2 });
+t.ensureIndex({loc: "2d"}, {max: max + epsilon / 2, min: min - epsilon / 2});
-for(var x = min; x <= max; x += step){
- for(var y = min; y <= max; y += step){
- t.insert({ "loc" : { x : x, y : y } });
- numItems++;
- }
+for (var x = min; x <= max; x += step) {
+ for (var y = min; y <= max; y += step) {
+ t.insert({"loc": {x: x, y: y}});
+ numItems++;
+ }
}
-assert.eq( numItems, t.count({ loc : { $within : { $box : [[min - epsilon / 3,
- min - epsilon / 3],
- [max + epsilon / 3,
- max + epsilon / 3]] } } }), "Not all locations found!");
+assert.eq(numItems,
+ t.count({
+ loc: {
+ $within: {
+ $box: [
+ [min - epsilon / 3, min - epsilon / 3],
+ [max + epsilon / 3, max + epsilon / 3]
+ ]
+ }
+ }
+ }),
+ "Not all locations found!");
diff --git a/jstests/core/geo_fiddly_box2.js b/jstests/core/geo_fiddly_box2.js
index c0be0f2c8dc..6a40b5032b5 100644
--- a/jstests/core/geo_fiddly_box2.js
+++ b/jstests/core/geo_fiddly_box2.js
@@ -1,32 +1,33 @@
// Reproduces simple test for SERVER-2115
-// The setup to reproduce is to create a set of points and a really big bounds so that we are required to do
+// The setup to reproduce is to create a set of points and a really big bounds so that we are
+// required to do
// exact lookups on the points to get correct results.
t = db.geo_fiddly_box2;
t.drop();
-t.insert( { "letter" : "S", "position" : [ -3, 0 ] } );
-t.insert( { "letter" : "C", "position" : [ -2, 0 ] } );
-t.insert( { "letter" : "R", "position" : [ -1, 0 ] } );
-t.insert( { "letter" : "A", "position" : [ 0, 0 ] } );
-t.insert( { "letter" : "B", "position" : [ 1, 0 ] } );
-t.insert( { "letter" : "B", "position" : [ 2, 0 ] } );
-t.insert( { "letter" : "L", "position" : [ 3, 0 ] } );
-t.insert( { "letter" : "E", "position" : [ 4, 0 ] } );
+t.insert({"letter": "S", "position": [-3, 0]});
+t.insert({"letter": "C", "position": [-2, 0]});
+t.insert({"letter": "R", "position": [-1, 0]});
+t.insert({"letter": "A", "position": [0, 0]});
+t.insert({"letter": "B", "position": [1, 0]});
+t.insert({"letter": "B", "position": [2, 0]});
+t.insert({"letter": "L", "position": [3, 0]});
+t.insert({"letter": "E", "position": [4, 0]});
-t.ensureIndex( { position : "2d" } );
-result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } );
-assert.eq( 4, result.count() );
+t.ensureIndex({position: "2d"});
+result = t.find({"position": {"$within": {"$box": [[-3, -1], [0, 1]]}}});
+assert.eq(4, result.count());
-t.dropIndex( { position : "2d" } );
-t.ensureIndex( { position : "2d" }, { min : -10000000, max : 10000000 } );
+t.dropIndex({position: "2d"});
+t.ensureIndex({position: "2d"}, {min: -10000000, max: 10000000});
-result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } );
-assert.eq( 4, result.count() );
+result = t.find({"position": {"$within": {"$box": [[-3, -1], [0, 1]]}}});
+assert.eq(4, result.count());
-t.dropIndex( { position : "2d" } );
-t.ensureIndex( { position : "2d" }, { min : -1000000000, max : 1000000000 } );
+t.dropIndex({position: "2d"});
+t.ensureIndex({position: "2d"}, {min: -1000000000, max: 1000000000});
-result = t.find( { "position" : { "$within" : { "$box" : [ [ -3, -1 ], [ 0, 1 ] ] } } } );
-assert.eq( 4, result.count() );
+result = t.find({"position": {"$within": {"$box": [[-3, -1], [0, 1]]}}});
+assert.eq(4, result.count());
diff --git a/jstests/core/geo_group.js b/jstests/core/geo_group.js
index b19c35ec738..9ee5a76b7ea 100644
--- a/jstests/core/geo_group.js
+++ b/jstests/core/geo_group.js
@@ -3,35 +3,39 @@ t.drop();
n = 1;
var bulk = t.initializeUnorderedBulkOp();
-for ( var x=-100; x<100; x+=2 ){
- for ( var y=-100; y<100; y+=2 ){
- bulk.insert( { _id : n++ , loc : [ x , y ] } );
+for (var x = -100; x < 100; x += 2) {
+ for (var y = -100; y < 100; y += 2) {
+ bulk.insert({_id: n++, loc: [x, y]});
}
}
assert.writeOK(bulk.execute());
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
// Test basic count with $near
assert.eq(t.find().count(), 10000);
-assert.eq(t.find( { loc : { $within : {$center : [[56,8], 10]}}}).count(), 81);
-assert.eq(t.find( { loc : { $near : [56, 8, 10] } } ).count(), 81);
+assert.eq(t.find({loc: {$within: {$center: [[56, 8], 10]}}}).count(), 81);
+assert.eq(t.find({loc: {$near: [56, 8, 10]}}).count(), 81);
// Test basic group that effectively does a count
-assert.eq(
- t.group( {
- reduce : function (obj, prev) { prev.sums = { count : prev.sums.count + 1}; },
- initial : { sums:{count:0} } }
- ),
- [ { "sums" : { "count" : 10000 } } ]
-);
+assert.eq(t.group({
+ reduce: function(obj, prev) {
+ prev.sums = {
+ count: prev.sums.count + 1
+ };
+ },
+ initial: {sums: {count: 0}}
+}),
+ [{"sums": {"count": 10000}}]);
// Test basic group + $near that does a count
-assert.eq(
- t.group( {
- reduce : function (obj, prev) { prev.sums = { count : prev.sums.count + 1}; },
- initial : { sums:{count:0} },
- cond : { loc : { $near : [56, 8, 10] } } }
- ),
- [ { "sums" : { "count" : 81 } } ]
-);
+assert.eq(t.group({
+ reduce: function(obj, prev) {
+ prev.sums = {
+ count: prev.sums.count + 1
+ };
+ },
+ initial: {sums: {count: 0}},
+ cond: {loc: {$near: [56, 8, 10]}}
+}),
+ [{"sums": {"count": 81}}]);
diff --git a/jstests/core/geo_haystack1.js b/jstests/core/geo_haystack1.js
index f29f407c52e..5abb166a6f9 100644
--- a/jstests/core/geo_haystack1.js
+++ b/jstests/core/geo_haystack1.js
@@ -2,39 +2,41 @@
t = db.geo_haystack1;
t.drop();
-function distance( a , b ){
+function distance(a, b) {
var x = a[0] - b[0];
var y = a[1] - b[1];
- return Math.sqrt( ( x * x ) + ( y * y ) );
+ return Math.sqrt((x * x) + (y * y));
}
-function distanceTotal( a , arr , f ){
+function distanceTotal(a, arr, f) {
var total = 0;
- for ( var i=0; i<arr.length; i++ ){
- total += distance( a , arr[i][f] );
+ for (var i = 0; i < arr.length; i++) {
+ total += distance(a, arr[i][f]);
}
return total;
}
-queries = [
- { near : [ 7 , 8 ] , maxDistance : 3 , search : { z : 3 } } ,
-];
-
-answers = queries.map( function(){ return { totalDistance : 0 , results : [] }; } );
+queries = [{near: [7, 8], maxDistance: 3, search: {z: 3}}, ];
+answers = queries.map(function() {
+ return {
+ totalDistance: 0,
+ results: []
+ };
+});
n = 0;
-for ( x=0; x<20; x++ ){
- for ( y=0; y<20; y++ ){
- t.insert( { _id : n , loc : [ x , y ] , z : n % 5 } );
-
- for ( i=0; i<queries.length; i++ ){
- var d = distance( queries[i].near , [ x , y ] );
- if ( d > queries[i].maxDistance )
+for (x = 0; x < 20; x++) {
+ for (y = 0; y < 20; y++) {
+ t.insert({_id: n, loc: [x, y], z: n % 5});
+
+ for (i = 0; i < queries.length; i++) {
+ var d = distance(queries[i].near, [x, y]);
+ if (d > queries[i].maxDistance)
continue;
- if ( queries[i].search.z != n % 5 )
+ if (queries[i].search.z != n % 5)
continue;
- answers[i].results.push( { _id : n , loc : [ x , y ]} );
+ answers[i].results.push({_id: n, loc: [x, y]});
answers[i].totalDistance += d;
}
@@ -42,18 +44,18 @@ for ( x=0; x<20; x++ ){
}
}
-t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } );
-
-for ( i=0; i<queries.length; i++ ){
- print( "---------" );
- printjson( queries[i] );
- res = t.runCommand( "geoSearch" , queries[i] );
- print( "\t" + tojson( res.stats ) );
- print( "\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n );
- assert.eq( answers[i].results.length , res.stats.n, "num:"+ i + " number matches" );
- assert.eq( answers[i].totalDistance , distanceTotal( queries[i].near , res.results , "loc" ), "num:"+ i + " totalDistance" );
- //printjson( res );
- //printjson( answers[i].length );
+t.ensureIndex({loc: "geoHaystack", z: 1}, {bucketSize: .7});
+
+for (i = 0; i < queries.length; i++) {
+ print("---------");
+ printjson(queries[i]);
+ res = t.runCommand("geoSearch", queries[i]);
+ print("\t" + tojson(res.stats));
+ print("\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n);
+ assert.eq(answers[i].results.length, res.stats.n, "num:" + i + " number matches");
+ assert.eq(answers[i].totalDistance,
+ distanceTotal(queries[i].near, res.results, "loc"),
+ "num:" + i + " totalDistance");
+ // printjson( res );
+ // printjson( answers[i].length );
}
-
-
diff --git a/jstests/core/geo_haystack2.js b/jstests/core/geo_haystack2.js
index dee935b9b2b..cb684239a63 100644
--- a/jstests/core/geo_haystack2.js
+++ b/jstests/core/geo_haystack2.js
@@ -2,40 +2,41 @@
t = db.geo_haystack2;
t.drop();
-function distance( a , b ){
+function distance(a, b) {
var x = a[0] - b[0];
var y = a[1] - b[1];
- return Math.sqrt( ( x * x ) + ( y * y ) );
+ return Math.sqrt((x * x) + (y * y));
}
-function distanceTotal( a , arr , f ){
+function distanceTotal(a, arr, f) {
var total = 0;
- for ( var i=0; i<arr.length; i++ ){
- total += distance( a , arr[i][f] );
+ for (var i = 0; i < arr.length; i++) {
+ total += distance(a, arr[i][f]);
}
return total;
}
-queries = [
- { near : [ 7 , 8 ] , maxDistance : 3 , search : { z : 3 } } ,
-];
-
-answers = queries.map( function(){ return { totalDistance : 0 , results : [] }; } );
+queries = [{near: [7, 8], maxDistance: 3, search: {z: 3}}, ];
+answers = queries.map(function() {
+ return {
+ totalDistance: 0,
+ results: []
+ };
+});
n = 0;
-for ( x=0; x<20; x++ ){
- for ( y=0; y<20; y++ ){
- t.insert( { _id : n , loc : [ x , y ] , z : [ n % 10 , ( n + 5 ) % 10 ] } );
-
- for ( i=0; i<queries.length; i++ ){
- var d = distance( queries[i].near , [ x , y ] );
- if ( d > queries[i].maxDistance )
+for (x = 0; x < 20; x++) {
+ for (y = 0; y < 20; y++) {
+ t.insert({_id: n, loc: [x, y], z: [n % 10, (n + 5) % 10]});
+
+ for (i = 0; i < queries.length; i++) {
+ var d = distance(queries[i].near, [x, y]);
+ if (d > queries[i].maxDistance)
continue;
- if ( queries[i].search.z != n % 10 &&
- queries[i].search.z != ( n + 5 ) % 10 )
+ if (queries[i].search.z != n % 10 && queries[i].search.z != (n + 5) % 10)
continue;
- answers[i].results.push( { _id : n , loc : [ x , y ] } );
+ answers[i].results.push({_id: n, loc: [x, y]});
answers[i].totalDistance += d;
}
@@ -43,18 +44,18 @@ for ( x=0; x<20; x++ ){
}
}
-t.ensureIndex( { loc : "geoHaystack" , z : 1 } , { bucketSize : .7 } );
-
-for ( i=0; i<queries.length; i++ ){
- print( "---------" );
- printjson( queries[i] );
- res = t.runCommand( "geoSearch" , queries[i] );
- print( "\t" + tojson( res.stats ) );
- print( "\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n );
- assert.eq( answers[i].results.length , res.stats.n, "num:"+ i + " number matches" );
- assert.eq( answers[i].totalDistance , distanceTotal( queries[i].near , res.results , "loc" ), "num:"+ i + " totalDistance" );
- //printjson( res );
- //printjson( answers[i].length );
+t.ensureIndex({loc: "geoHaystack", z: 1}, {bucketSize: .7});
+
+for (i = 0; i < queries.length; i++) {
+ print("---------");
+ printjson(queries[i]);
+ res = t.runCommand("geoSearch", queries[i]);
+ print("\t" + tojson(res.stats));
+ print("\tshould have: " + answers[i].results.length + "\t actually got: " + res.stats.n);
+ assert.eq(answers[i].results.length, res.stats.n, "num:" + i + " number matches");
+ assert.eq(answers[i].totalDistance,
+ distanceTotal(queries[i].near, res.results, "loc"),
+ "num:" + i + " totalDistance");
+ // printjson( res );
+ // printjson( answers[i].length );
}
-
-
diff --git a/jstests/core/geo_haystack3.js b/jstests/core/geo_haystack3.js
index 4c55e94ad7f..1357ccf4f51 100644
--- a/jstests/core/geo_haystack3.js
+++ b/jstests/core/geo_haystack3.js
@@ -5,52 +5,50 @@ t.drop();
// Tests for geo haystack validity
//
-t.insert({ pos : "invalid" });
-assert.commandFailed(t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 }));
+t.insert({pos: "invalid"});
+assert.commandFailed(t.ensureIndex({pos: "geoHaystack", type: 1}, {bucketSize: 1}));
t.drop();
-t.insert({ pos : [] });
-assert.commandFailed(t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 }));
+t.insert({pos: []});
+assert.commandFailed(t.ensureIndex({pos: "geoHaystack", type: 1}, {bucketSize: 1}));
t.drop();
-t.insert({ pos : [1, 2] });
-assert.commandWorked(t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 }));
+t.insert({pos: [1, 2]});
+assert.commandWorked(t.ensureIndex({pos: "geoHaystack", type: 1}, {bucketSize: 1}));
t.drop();
-t.insert({ pos : {x : 1, y : 2} });
-assert.commandWorked(t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 }));
+t.insert({pos: {x: 1, y: 2}});
+assert.commandWorked(t.ensureIndex({pos: "geoHaystack", type: 1}, {bucketSize: 1}));
t.drop();
-t.insert({ pos : {x : 1, y : 2, z : 3} });
-assert.commandWorked(t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 }));
+t.insert({pos: {x: 1, y: 2, z: 3}});
+assert.commandWorked(t.ensureIndex({pos: "geoHaystack", type: 1}, {bucketSize: 1}));
t.drop();
//
// Tests for geo haystack search
//
-t.insert({ pos : { long : 34, lat : 33 }});
-t.insert({ pos : { long : 34.2, lat : 33.3 }, type : ["bar", "restaurant" ]});
-t.insert({ pos : { long : 34.2, lat : 37.3 }, type : ["bar", "chicken" ]});
-t.insert({ pos : { long : 59.1, lat : 87.2 }, type : ["baz", "office" ]});
-t.ensureIndex({ pos : "geoHaystack", type : 1 }, { bucketSize : 1 });
+t.insert({pos: {long: 34, lat: 33}});
+t.insert({pos: {long: 34.2, lat: 33.3}, type: ["bar", "restaurant"]});
+t.insert({pos: {long: 34.2, lat: 37.3}, type: ["bar", "chicken"]});
+t.insert({pos: {long: 59.1, lat: 87.2}, type: ["baz", "office"]});
+t.ensureIndex({pos: "geoHaystack", type: 1}, {bucketSize: 1});
// This only matches the first insert. What do we want? First 3 or just the first?
-res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : {}, limit : 30 });
+res = t.runCommand("geoSearch", {near: [33, 33], maxDistance: 6, search: {}, limit: 30});
assert.eq(1, res.stats.n, "Right # of matches");
assert.eq(34, res.results[0].pos.long, "expected longitude");
assert.eq(33, res.results[0].pos.lat, "expected latitude");
// This matches the middle 2 of the 4 elements above.
-res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
- limit : 2 });
+res = t.runCommand("geoSearch", {near: [33, 33], maxDistance: 6, search: {type: "bar"}, limit: 2});
assert.eq(2, res.stats.n, "Right # of matches");
assert.eq("bar", res.results[0].type[0], "expected value for type");
assert.eq("bar", res.results[1].type[0], "expected value for type");
assert.neq(res.results[0].type[1], res.results[1].type[1], "should get 2 diff results");
// This is a test for the limit being reached/only 1 returned.
-res = t.runCommand("geoSearch", { near : [33, 33], maxDistance : 6, search : { type : "bar" },
- limit : 1 });
+res = t.runCommand("geoSearch", {near: [33, 33], maxDistance: 6, search: {type: "bar"}, limit: 1});
assert.eq(1, res.stats.n, "Right # of matches");
assert.eq("bar", res.results[0].type[0], "expected value for type");
diff --git a/jstests/core/geo_invalid_2d_params.js b/jstests/core/geo_invalid_2d_params.js
index 72bb8e443fe..738fa1ad353 100644
--- a/jstests/core/geo_invalid_2d_params.js
+++ b/jstests/core/geo_invalid_2d_params.js
@@ -1,8 +1,8 @@
var t = db.geo_invalid_2d_params;
t.drop();
-assert.commandFailed(t.ensureIndex({ loc : "2d" }, { bits : 33 }));
-assert.commandFailed(t.ensureIndex({ loc : "2d" }, { min : -1, max : -1 }));
-assert.commandFailed(t.ensureIndex({ loc : "2d" }, { bits : -1 }));
-assert.commandFailed(t.ensureIndex({ loc : "2d" }, { min : 10, max : 9 }));
-assert.commandWorked(t.ensureIndex({ loc : "2d" }, { bits : 1, min : -1, max : 1 })); \ No newline at end of file
+assert.commandFailed(t.ensureIndex({loc: "2d"}, {bits: 33}));
+assert.commandFailed(t.ensureIndex({loc: "2d"}, {min: -1, max: -1}));
+assert.commandFailed(t.ensureIndex({loc: "2d"}, {bits: -1}));
+assert.commandFailed(t.ensureIndex({loc: "2d"}, {min: 10, max: 9}));
+assert.commandWorked(t.ensureIndex({loc: "2d"}, {bits: 1, min: -1, max: 1})); \ No newline at end of file
diff --git a/jstests/core/geo_invalid_polygon.js b/jstests/core/geo_invalid_polygon.js
index 1e31e0b2fe5..c3d244a504f 100644
--- a/jstests/core/geo_invalid_polygon.js
+++ b/jstests/core/geo_invalid_polygon.js
@@ -7,15 +7,7 @@ t.drop();
// "Exterior shell of polygon is invalid".
var geometry = {
type: "Polygon",
- coordinates: [
- [
- [ 0, 0 ],
- [ 0, 1 ],
- [ 1, 1 ],
- [-2,-1 ],
- [ 0, 0 ]
- ]
- ]
+ coordinates: [[[0, 0], [0, 1], [1, 1], [-2, -1], [0, 0]]]
};
t.insert({_id: 42, geometry: geometry});
@@ -23,8 +15,5 @@ var err = t.createIndex({geometry: '2dsphere'});
assert.commandFailed(err);
// Document's _id should be in error message.
-assert(
- -1 != err.errmsg.indexOf('42'),
- "Error message didn't contain document _id.\nMessage: \"" + err.errmsg
- + '"\n'
-);
+assert(-1 != err.errmsg.indexOf('42'),
+ "Error message didn't contain document _id.\nMessage: \"" + err.errmsg + '"\n');
diff --git a/jstests/core/geo_mapreduce.js b/jstests/core/geo_mapreduce.js
index 810c2605426..e15a4911763 100644
--- a/jstests/core/geo_mapreduce.js
+++ b/jstests/core/geo_mapreduce.js
@@ -4,46 +4,50 @@
// setup test collection
db.apples.drop();
-db.apples.insert( { "geo" : { "lat" : 32.68331909, "long" : 69.41610718 }, "apples" : 5 } );
-db.apples.insert( { "geo" : { "lat" : 35.01860809, "long" : 70.92027283 }, "apples" : 2 } );
-db.apples.insert( { "geo" : { "lat" : 31.11639023, "long" : 64.19970703 }, "apples" : 11 } );
-db.apples.insert( { "geo" : { "lat" : 32.64500046, "long" : 69.36251068 }, "apples" : 4 } );
-db.apples.insert( { "geo" : { "lat" : 33.23638916, "long" : 69.81360626 }, "apples" : 9 } );
-db.apples.ensureIndex( { "geo" : "2d" } );
-
-center = [ 32.68, 69.41 ];
-radius = 10 / 111; // 10km; 1 arcdegree ~= 111km
-geo_query = { geo : { '$within' : { '$center' : [ center, radius ] } } };
+db.apples.insert({"geo": {"lat": 32.68331909, "long": 69.41610718}, "apples": 5});
+db.apples.insert({"geo": {"lat": 35.01860809, "long": 70.92027283}, "apples": 2});
+db.apples.insert({"geo": {"lat": 31.11639023, "long": 64.19970703}, "apples": 11});
+db.apples.insert({"geo": {"lat": 32.64500046, "long": 69.36251068}, "apples": 4});
+db.apples.insert({"geo": {"lat": 33.23638916, "long": 69.81360626}, "apples": 9});
+db.apples.ensureIndex({"geo": "2d"});
+
+center = [32.68, 69.41];
+radius = 10 / 111; // 10km; 1 arcdegree ~= 111km
+geo_query = {
+ geo: {'$within': {'$center': [center, radius]}}
+};
// geo query on collection works fine
-res = db.apples.find( geo_query );
-assert.eq( 2, res.count() );
+res = db.apples.find(geo_query);
+assert.eq(2, res.count());
// map function
m = function() {
- emit( null, { "apples" : this.apples } );
+ emit(null, {"apples": this.apples});
};
// reduce function
r = function(key, values) {
- var total = 0;
- for ( var i = 0; i < values.length; i++ ) {
- total += values[i].apples;
- }
- return { "apples" : total };
+ var total = 0;
+ for (var i = 0; i < values.length; i++) {
+ total += values[i].apples;
+ }
+ return {
+ "apples": total
+ };
};
// mapreduce without geo query works fine
-res = db.apples.mapReduce( m, r, { out : { inline : 1 } } );
+res = db.apples.mapReduce(m, r, {out: {inline: 1}});
-printjson( res );
+printjson(res);
total = res.results[0];
-assert.eq( 31, total.value.apples );
+assert.eq(31, total.value.apples);
// mapreduce with regular query works fine too
-res = db.apples.mapReduce( m, r, { out : { inline : 1 }, query : { apples : { '$lt' : 9 } } } );
+res = db.apples.mapReduce(m, r, {out: {inline: 1}, query: {apples: {'$lt': 9}}});
total = res.results[0];
-assert.eq( 11, total.value.apples );
+assert.eq(11, total.value.apples);
// mapreduce with geo query gives error on mongodb version 1.6.2
// uncaught exception: map reduce failed: {
@@ -51,6 +55,6 @@ assert.eq( 11, total.value.apples );
// "assertionCode" : 13285,
// "errmsg" : "db assertion failure",
// "ok" : 0 }
-res = db.apples.mapReduce( m, r, { out : { inline : 1 }, query : geo_query } );
+res = db.apples.mapReduce(m, r, {out: {inline: 1}, query: geo_query});
total = res.results[0];
-assert.eq( 9, total.value.apples );
+assert.eq(9, total.value.apples);
diff --git a/jstests/core/geo_mapreduce2.js b/jstests/core/geo_mapreduce2.js
index 3911d02cf81..d71eb8ef216 100644
--- a/jstests/core/geo_mapreduce2.js
+++ b/jstests/core/geo_mapreduce2.js
@@ -3,34 +3,39 @@
var coll = db.geoMR2;
coll.drop();
-for( var i = 0; i < 300; i++ )
- coll.insert({ i : i, location : [ 10, 20 ] });
-
-coll.ensureIndex({ location : "2d" });
+for (var i = 0; i < 300; i++)
+ coll.insert({i: i, location: [10, 20]});
+
+coll.ensureIndex({location: "2d"});
// map function
m = function() {
- emit( null, { count : this.i } );
+ emit(null, {count: this.i});
};
// reduce function
-r = function( key, values ) {
-
+r = function(key, values) {
+
var total = 0;
- for ( var i = 0; i < values.length; i++ ) {
+ for (var i = 0; i < values.length; i++) {
total += values[i].count;
}
-
- return { count : total };
+
+ return {
+ count: total
+ };
};
-try{ coll.mapReduce( m, r,
- { out : coll.getName() + "_mr",
- sort : { _id : 1 },
- query : { 'location' : { $within : { $centerSphere : [[ 10, 20 ], 0.01 ] } } } });
+try {
+ coll.mapReduce(m,
+ r,
+ {
+ out: coll.getName() + "_mr",
+ sort: {_id: 1},
+ query: {'location': {$within: {$centerSphere: [[10, 20], 0.01]}}}
+ });
-}
-catch( e ){
+} catch (e) {
// This should occur, since we can't in-mem sort for mreduce
- printjson( e );
+ printjson(e);
}
diff --git a/jstests/core/geo_max.js b/jstests/core/geo_max.js
index 1dcbf39c907..03771ea34d4 100644
--- a/jstests/core/geo_max.js
+++ b/jstests/core/geo_max.js
@@ -5,47 +5,58 @@ load("jstests/libs/geo_near_random.js");
var test = new GeoNearRandomTest("geo_near_max");
-test.insertPts(/*numPts*/1000, /*indexBounds*/{min:-180, max:180}, /*scale*/0.9);
+test.insertPts(/*numPts*/ 1000, /*indexBounds*/ {min: -180, max: 180}, /*scale*/ 0.9);
-test.t.insert({loc: [ 180, 0]});
+test.t.insert({loc: [180, 0]});
test.t.insert({loc: [-180, 0]});
-test.t.insert({loc: [ 179.999, 0]});
+test.t.insert({loc: [179.999, 0]});
test.t.insert({loc: [-179.999, 0]});
-assertXIsNegative = function(obj) { assert.lt(obj.loc[0], 0); };
-assertXIsPositive = function(obj) { assert.gt(obj.loc[0], 0); };
+assertXIsNegative = function(obj) {
+ assert.lt(obj.loc[0], 0);
+};
+assertXIsPositive = function(obj) {
+ assert.gt(obj.loc[0], 0);
+};
-assert.eq(test.t.count({loc:{$within: {$center:[[ 180, 0], 1]}}}), 2);
-assert.eq(test.t.count({loc:{$within: {$center:[[-180, 0], 1]}}}), 2);
-test.t.find({loc:{$within: {$center:[[ 180, 0], 1]}}}).forEach(assertXIsPositive);
-test.t.find({loc:{$within: {$center:[[-180, 0], 1]}}}).forEach(assertXIsNegative);
+assert.eq(test.t.count({loc: {$within: {$center: [[180, 0], 1]}}}), 2);
+assert.eq(test.t.count({loc: {$within: {$center: [[-180, 0], 1]}}}), 2);
+test.t.find({loc: {$within: {$center: [[180, 0], 1]}}}).forEach(assertXIsPositive);
+test.t.find({loc: {$within: {$center: [[-180, 0], 1]}}}).forEach(assertXIsNegative);
-var oneDegree = Math.PI / 180; // in radians
+var oneDegree = Math.PI / 180; // in radians
// errors out due to SERVER-1760
if (0) {
-assert.eq(test.t.count({loc:{$within: {$centerSphere:[[ 180, 0], oneDegree]}}}), 2);
-assert.eq(test.t.count({loc:{$within: {$centerSphere:[[-180, 0], oneDegree]}}}), 2);
-test.t.find({loc:{$within: {$centerSphere:[[ 180, 0], oneDegree]}}}).forEach(assertXIsPositive);
-test.t.find({loc:{$within: {$centerSphere:[[-180, 0], oneDegree]}}}).forEach(assertXIsNegative);
+ assert.eq(test.t.count({loc: {$within: {$centerSphere: [[180, 0], oneDegree]}}}), 2);
+ assert.eq(test.t.count({loc: {$within: {$centerSphere: [[-180, 0], oneDegree]}}}), 2);
+ test.t.find({loc: {$within: {$centerSphere: [[180, 0], oneDegree]}}})
+ .forEach(assertXIsPositive);
+ test.t.find({loc: {$within: {$centerSphere: [[-180, 0], oneDegree]}}})
+ .forEach(assertXIsNegative);
}
-assert.eq(test.t.count({loc:{$within: {$box:[[ 180, 0.1], [ 179, -0.1]]}}}), 2);
-assert.eq(test.t.count({loc:{$within: {$box:[[-180, 0.1], [-179, -0.1]]}}}), 2);
-test.t.find({loc:{$within: {$box:[[ 180, 0.1], [ 179, -0.1]]}}}).forEach(assertXIsPositive);
-test.t.find({loc:{$within: {$box:[[-180, 0.1], [-179, -0.1]]}}}).forEach(assertXIsNegative);
+assert.eq(test.t.count({loc: {$within: {$box: [[180, 0.1], [179, -0.1]]}}}), 2);
+assert.eq(test.t.count({loc: {$within: {$box: [[-180, 0.1], [-179, -0.1]]}}}), 2);
+test.t.find({loc: {$within: {$box: [[180, 0.1], [179, -0.1]]}}}).forEach(assertXIsPositive);
+test.t.find({loc: {$within: {$box: [[-180, 0.1], [-179, -0.1]]}}}).forEach(assertXIsNegative);
-assert.eq(test.t.count({loc:{$within: {$polygon:[[ 180, 0], [ 179, 0], [ 179.5, 0.5]]}}}), 2);
-assert.eq(test.t.count({loc:{$within: {$polygon:[[-180, 0], [-179, 0], [ 179.5, 0.5]]}}}), 2);
-test.t.find({loc:{$within: {$polygon:[[ 180, 0], [ 179, 0], [ 179.5, 0.5]]}}}).forEach(assertXIsPositive);
-test.t.find({loc:{$within: {$polygon:[[-180, 0], [-179, 0], [ 179.5, 0.5]]}}}).forEach(assertXIsNegative);
+assert.eq(test.t.count({loc: {$within: {$polygon: [[180, 0], [179, 0], [179.5, 0.5]]}}}), 2);
+assert.eq(test.t.count({loc: {$within: {$polygon: [[-180, 0], [-179, 0], [179.5, 0.5]]}}}), 2);
+test.t.find({loc: {$within: {$polygon: [[180, 0], [179, 0], [179.5, 0.5]]}}})
+ .forEach(assertXIsPositive);
+test.t.find({loc: {$within: {$polygon: [[-180, 0], [-179, 0], [179.5, 0.5]]}}})
+ .forEach(assertXIsNegative);
-assert.eq(test.t.find({loc:{$near:[ 180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [ 180, 0]}, {loc: [ 179.999, 0]}]);
-assert.eq(test.t.find({loc:{$near:[-180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [-180, 0]}, {loc: [-179.999, 0]}]);
+assert.eq(test.t.find({loc: {$near: [180, 0]}}, {_id: 0}).limit(2).toArray(),
+ [{loc: [180, 0]}, {loc: [179.999, 0]}]);
+assert.eq(test.t.find({loc: {$near: [-180, 0]}}, {_id: 0}).limit(2).toArray(),
+ [{loc: [-180, 0]}, {loc: [-179.999, 0]}]);
// These will need to change when SERVER-1760 is fixed
-printjson(test.t.find({loc:{$nearSphere:[ 180, 0]}}, {_id:0}).limit(2).explain());
-assert.eq(test.t.find({loc:{$nearSphere:[ 180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [ 180, 0]}, {loc: [ 179.999, 0]}]);
-printjson(test.t.find({loc:{$nearSphere:[-180, 0]}}, {_id:0}).limit(2).explain());
-assert.eq(test.t.find({loc:{$nearSphere:[-180, 0]}}, {_id:0}).limit(2).toArray(), [{loc: [-180, 0]}, {loc: [-179.999, 0]}]);
-
+printjson(test.t.find({loc: {$nearSphere: [180, 0]}}, {_id: 0}).limit(2).explain());
+assert.eq(test.t.find({loc: {$nearSphere: [180, 0]}}, {_id: 0}).limit(2).toArray(),
+ [{loc: [180, 0]}, {loc: [179.999, 0]}]);
+printjson(test.t.find({loc: {$nearSphere: [-180, 0]}}, {_id: 0}).limit(2).explain());
+assert.eq(test.t.find({loc: {$nearSphere: [-180, 0]}}, {_id: 0}).limit(2).toArray(),
+ [{loc: [-180, 0]}, {loc: [-179.999, 0]}]);
diff --git a/jstests/core/geo_mindistance.js b/jstests/core/geo_mindistance.js
index b429eacb708..6a2329bc524 100644
--- a/jstests/core/geo_mindistance.js
+++ b/jstests/core/geo_mindistance.js
@@ -8,10 +8,11 @@ t.drop();
// Useful constants and functions.
//
-var km = 1000,
- earthRadiusMeters = 6378.1 * km;
+var km = 1000, earthRadiusMeters = 6378.1 * km;
-function metersToRadians(m) { return m / earthRadiusMeters; }
+function metersToRadians(m) {
+ return m / earthRadiusMeters;
+}
/* Count documents within some radius of (0, 0), in kilometers.
* With this function we can use the existing $maxDistance option to test
@@ -46,8 +47,11 @@ for (var x = 0; x <= 10; x += 1) {
/* $minDistance is supported for 2dsphere index only, not 2d or geoHaystack. */
t.ensureIndex({loc: "2dsphere"});
-var n_docs = t.count(),
- geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
+var n_docs = t.count(), geoJSONPoint =
+ {
+ type: 'Point',
+ coordinates: [0, 0]
+ },
legacyPoint = [0, 0];
//
@@ -55,31 +59,23 @@ var n_docs = t.count(),
// min/maxDistance are in meters.
//
-var n_min1400_count = t.find({loc: {
- $near: {$geometry: geoJSONPoint, $minDistance: 1400 * km
-}}}).count();
-
-assert.eq(
- n_docs - n_docs_within(1400),
- n_min1400_count,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points $near (0, 0) with $minDistance 1400 km, got "
- + n_min1400_count
-);
-
-var n_bw500_and_1000_count = t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: 500 * km,
- $maxDistance: 1000 * km
-}}}).count();
-
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- n_bw500_and_1000_count,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points $near (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + n_bw500_and_1000_count
-);
+var n_min1400_count =
+ t.find({loc: {$near: {$geometry: geoJSONPoint, $minDistance: 1400 * km}}}).count();
+
+assert.eq(n_docs - n_docs_within(1400),
+ n_min1400_count,
+ "Expected " + (n_docs - n_docs_within(1400)) +
+ " points $near (0, 0) with $minDistance 1400 km, got " + n_min1400_count);
+
+var n_bw500_and_1000_count = t.find({
+ loc: {$near: {$geometry: geoJSONPoint, $minDistance: 500 * km, $maxDistance: 1000 * km}}
+}).count();
+
+assert.eq(n_docs_within(1000) - n_docs_within(500),
+ n_bw500_and_1000_count,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500)) +
+ " points $near (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got " +
+ n_bw500_and_1000_count);
//
// $nearSphere with 2dsphere index can take a legacy or GeoJSON point.
@@ -87,63 +83,49 @@ assert.eq(
// min/maxDistance are in radians.
//
-n_min1400_count = t.find({loc: {
- $nearSphere: legacyPoint, $minDistance: metersToRadians(1400 * km)
-}}).count();
-
-assert.eq(
- n_docs - n_docs_within(1400),
- n_min1400_count,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points $nearSphere (0, 0) with $minDistance 1400 km, got "
- + n_min1400_count
-);
-
-n_bw500_and_1000_count = t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: metersToRadians(500 * km),
- $maxDistance: metersToRadians(1000 * km)
-}}).count();
-
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- n_bw500_and_1000_count,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + n_bw500_and_1000_count
-);
+n_min1400_count =
+ t.find({loc: {$nearSphere: legacyPoint, $minDistance: metersToRadians(1400 * km)}}).count();
+
+assert.eq(n_docs - n_docs_within(1400),
+ n_min1400_count,
+ "Expected " + (n_docs - n_docs_within(1400)) +
+ " points $nearSphere (0, 0) with $minDistance 1400 km, got " + n_min1400_count);
+
+n_bw500_and_1000_count = t.find({
+ loc: {
+ $nearSphere: legacyPoint,
+ $minDistance: metersToRadians(500 * km),
+ $maxDistance: metersToRadians(1000 * km)
+ }
+}).count();
+
+assert.eq(n_docs_within(1000) - n_docs_within(500),
+ n_bw500_and_1000_count,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500)) +
+ " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got " +
+ n_bw500_and_1000_count);
//
// Test $nearSphere with GeoJSON point.
// min/maxDistance are in meters.
//
-n_min1400_count = t.find({loc: {
- $nearSphere: geoJSONPoint, $minDistance: 1400 * km
-}}).count();
-
-assert.eq(
- n_docs - n_docs_within(1400),
- n_min1400_count,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points $nearSphere (0, 0) with $minDistance 1400 km, got "
- + n_min1400_count
-);
-
-n_bw500_and_1000_count = t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: 500 * km,
- $maxDistance: 1000 * km
-}}).count();
-
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- n_bw500_and_1000_count,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + n_bw500_and_1000_count
-);
+n_min1400_count = t.find({loc: {$nearSphere: geoJSONPoint, $minDistance: 1400 * km}}).count();
+
+assert.eq(n_docs - n_docs_within(1400),
+ n_min1400_count,
+ "Expected " + (n_docs - n_docs_within(1400)) +
+ " points $nearSphere (0, 0) with $minDistance 1400 km, got " + n_min1400_count);
+
+n_bw500_and_1000_count =
+ t.find({loc: {$nearSphere: geoJSONPoint, $minDistance: 500 * km, $maxDistance: 1000 * km}})
+ .count();
+assert.eq(n_docs_within(1000) - n_docs_within(500),
+ n_bw500_and_1000_count,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500)) +
+ " points $nearSphere (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got " +
+ n_bw500_and_1000_count);
//
// Test geoNear command with GeoJSON point.
@@ -156,13 +138,10 @@ var cmdResult = db.runCommand({
minDistance: 1400 * km,
spherical: true // spherical required for 2dsphere index
});
-assert.eq(
- n_docs - n_docs_within(1400),
- cmdResult.results.length,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points geoNear (0, 0) with $minDistance 1400 km, got "
- + cmdResult.results.length
-);
+assert.eq(n_docs - n_docs_within(1400),
+ cmdResult.results.length,
+ "Expected " + (n_docs - n_docs_within(1400)) +
+ " points geoNear (0, 0) with $minDistance 1400 km, got " + cmdResult.results.length);
cmdResult = db.runCommand({
geoNear: t.getName(),
@@ -171,13 +150,11 @@ cmdResult = db.runCommand({
maxDistance: 1000 * km,
spherical: true
});
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- cmdResult.results.length,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + cmdResult.results.length
-);
+assert.eq(n_docs_within(1000) - n_docs_within(500),
+ cmdResult.results.length,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500)) +
+ " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got " +
+ cmdResult.results.length);
//
// Test geoNear command with legacy point.
@@ -190,13 +167,10 @@ cmdResult = db.runCommand({
minDistance: metersToRadians(1400 * km),
spherical: true // spherical required for 2dsphere index
});
-assert.eq(
- n_docs - n_docs_within(1400),
- cmdResult.results.length,
- "Expected " + (n_docs - n_docs_within(1400))
- + " points geoNear (0, 0) with $minDistance 1400 km, got "
- + cmdResult.results.length
-);
+assert.eq(n_docs - n_docs_within(1400),
+ cmdResult.results.length,
+ "Expected " + (n_docs - n_docs_within(1400)) +
+ " points geoNear (0, 0) with $minDistance 1400 km, got " + cmdResult.results.length);
cmdResult = db.runCommand({
geoNear: t.getName(),
@@ -205,10 +179,8 @@ cmdResult = db.runCommand({
maxDistance: metersToRadians(1000 * km),
spherical: true
});
-assert.eq(
- n_docs_within(1000) - n_docs_within(500),
- cmdResult.results.length,
- "Expected " + (n_docs_within(1000) - n_docs_within(500))
- + " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got "
- + cmdResult.results.length
-);
+assert.eq(n_docs_within(1000) - n_docs_within(500),
+ cmdResult.results.length,
+ "Expected " + (n_docs_within(1000) - n_docs_within(500)) +
+ " points geoNear (0, 0) with $minDistance 500 km and $maxDistance 1000 km, got " +
+ cmdResult.results.length);
diff --git a/jstests/core/geo_mindistance_boundaries.js b/jstests/core/geo_mindistance_boundaries.js
index 80e933827b6..6cbae8015e9 100644
--- a/jstests/core/geo_mindistance_boundaries.js
+++ b/jstests/core/geo_mindistance_boundaries.js
@@ -12,79 +12,65 @@ t.ensureIndex({loc: "2dsphere"});
// Useful constants.
//
-var km = 1000,
- earthRadiusMeters = 6378.1 * km,
- geoJSONPoint = {type: 'Point', coordinates: [0, 0]},
+var km = 1000, earthRadiusMeters = 6378.1 * km, geoJSONPoint =
+ {
+ type: 'Point',
+ coordinates: [0, 0]
+ },
// One degree of longitude at the equator, about 111 km.
- degreeInMeters = 2 * Math.PI * earthRadiusMeters / 360,
- metersEpsilon = Number.MIN_VALUE;
+ degreeInMeters = 2 * Math.PI * earthRadiusMeters / 360, metersEpsilon = Number.MIN_VALUE;
/* Grow epsilon's exponent until epsilon exceeds the margin of error for the
* representation of degreeInMeters. The server uses 64-bit math, too, so we'll
* find the smallest epsilon the server can detect.
*/
-while (degreeInMeters + metersEpsilon == degreeInMeters) { metersEpsilon *= 2; }
+while (degreeInMeters + metersEpsilon == degreeInMeters) {
+ metersEpsilon *= 2;
+}
//
// Test boundary conditions for $near and GeoJSON, in meters.
//
-
// minDistance must be within the args to $near, not on the side.
-assert.throws(function() { t.find({loc:{$near:{$geometry: geoJSONPoint},
- $minDistance:0.1}}).itcount();});
+assert.throws(function() {
+ t.find({loc: {$near: {$geometry: geoJSONPoint}, $minDistance: 0.1}}).itcount();
+});
-assert.eq(
- 1, t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters
- }}}).itcount(),
- "Expected to find (0, 1) within $minDistance 1 degree from origin"
-);
+assert.eq(1,
+ t.find({loc: {$near: {$geometry: geoJSONPoint, $minDistance: degreeInMeters}}}).itcount(),
+ "Expected to find (0, 1) within $minDistance 1 degree from origin");
assert.eq(
- 1, t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters - metersEpsilon
- }}}).itcount(),
- "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
-);
+ 1,
+ t.find({loc: {$near: {$geometry: geoJSONPoint, $minDistance: degreeInMeters - metersEpsilon}}})
+ .itcount(),
+ "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin");
assert.eq(
- 0, t.find({loc: {
- $near: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters + metersEpsilon
- }}}).itcount(),
- "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
-);
+ 0,
+ t.find({loc: {$near: {$geometry: geoJSONPoint, $minDistance: degreeInMeters + metersEpsilon}}})
+ .itcount(),
+ "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin");
//
// Test boundary conditions for $nearSphere and GeoJSON, in meters.
//
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: {$geometry: geoJSONPoint,
- $minDistance: degreeInMeters
- }}}).itcount(),
- "Expected to find (0, 1) within $minDistance 1 degree from origin"
-);
+assert.eq(1,
+ t.find({loc: {$nearSphere: {$geometry: geoJSONPoint, $minDistance: degreeInMeters}}})
+ .itcount(),
+ "Expected to find (0, 1) within $minDistance 1 degree from origin");
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: degreeInMeters - metersEpsilon
- }}).itcount(),
- "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
-);
+assert.eq(1,
+ t.find({loc: {$nearSphere: geoJSONPoint, $minDistance: degreeInMeters - metersEpsilon}})
+ .itcount(),
+ "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin");
-assert.eq(
- 0, t.find({loc: {
- $nearSphere: geoJSONPoint,
- $minDistance: degreeInMeters + metersEpsilon
- }}).itcount(),
- "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
-);
+assert.eq(0,
+ t.find({loc: {$nearSphere: geoJSONPoint, $minDistance: degreeInMeters + metersEpsilon}})
+ .itcount(),
+ "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin");
//
// Test boundary conditions for $nearSphere and a legacy point, in radians.
@@ -93,32 +79,22 @@ assert.eq(
// supported.
//
-var legacyPoint = [0, 0],
- degreeInRadians = 2 * Math.PI / 360,
- radiansEpsilon = Number.MIN_VALUE;
+var legacyPoint = [0, 0], degreeInRadians = 2 * Math.PI / 360, radiansEpsilon = Number.MIN_VALUE;
-while (1 + radiansEpsilon == 1) { radiansEpsilon *= 2; }
+while (1 + radiansEpsilon == 1) {
+ radiansEpsilon *= 2;
+}
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: degreeInRadians
- }}).itcount(),
- "Expected to find (0, 1) within $minDistance 1 degree from origin"
-);
+assert.eq(1,
+ t.find({loc: {$nearSphere: legacyPoint, $minDistance: degreeInRadians}}).itcount(),
+ "Expected to find (0, 1) within $minDistance 1 degree from origin");
-assert.eq(
- 1, t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: degreeInRadians - radiansEpsilon
- }}).itcount(),
- "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin"
-);
+assert.eq(1,
+ t.find({loc: {$nearSphere: legacyPoint, $minDistance: degreeInRadians - radiansEpsilon}})
+ .itcount(),
+ "Expected to find (0, 1) within $minDistance (1 degree - epsilon) from origin");
-assert.eq(
- 0, t.find({loc: {
- $nearSphere: legacyPoint,
- $minDistance: degreeInRadians + radiansEpsilon
- }}).itcount(),
- "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin"
-);
+assert.eq(0,
+ t.find({loc: {$nearSphere: legacyPoint, $minDistance: degreeInRadians + radiansEpsilon}})
+ .itcount(),
+ "Expected *not* to find (0, 1) within $minDistance (1 degree + epsilon) from origin");
diff --git a/jstests/core/geo_multikey0.js b/jstests/core/geo_multikey0.js
index 7d0ea57e329..827dd9a41a1 100644
--- a/jstests/core/geo_multikey0.js
+++ b/jstests/core/geo_multikey0.js
@@ -4,23 +4,28 @@ t = db.jstests_geo_multikey0;
t.drop();
// Check that conflicting constraints are satisfied by parallel array elements.
-t.save( {loc:[{x:20,y:30},{x:30,y:40}]} );
-assert.eq( 1, t.count( {loc:{x:20,y:30},$and:[{loc:{$gt:{x:20,y:35},$lt:{x:20,y:34}}}]} ) );
+t.save({loc: [{x: 20, y: 30}, {x: 30, y: 40}]});
+assert.eq(
+ 1, t.count({loc: {x: 20, y: 30}, $and: [{loc: {$gt: {x: 20, y: 35}, $lt: {x: 20, y: 34}}}]}));
-// Check that conflicting constraints are satisfied by parallel array elements with a 2d index on loc.
-if ( 0 ) { // SERVER-3793
-t.ensureIndex( {loc:'2d'} );
-assert.eq( 1, t.count( {loc:{x:20,y:30},$and:[{loc:{$gt:{x:20,y:35},$lt:{x:20,y:34}}}]} ) );
+// Check that conflicting constraints are satisfied by parallel array elements with a 2d index on
+// loc.
+if (0) { // SERVER-3793
+ t.ensureIndex({loc: '2d'});
+ assert.eq(
+ 1,
+ t.count({loc: {x: 20, y: 30}, $and: [{loc: {$gt: {x: 20, y: 35}, $lt: {x: 20, y: 34}}}]}));
}
t.drop();
// Check that conflicting constraints are satisfied by parallel array elements of x.
-t.save( {loc:[20,30],x:[1,2]} );
-assert.eq( 1, t.count( {loc:[20,30],x:{$gt:1.7,$lt:1.2}} ) );
+t.save({loc: [20, 30], x: [1, 2]});
+assert.eq(1, t.count({loc: [20, 30], x: {$gt: 1.7, $lt: 1.2}}));
-// Check that conflicting constraints are satisfied by parallel array elements of x with a 2d index on loc,x.
-if ( 0 ) { // SERVER-3793
-t.ensureIndex( {loc:'2d',x:1} );
-assert.eq( 1, t.count( {loc:[20,30],x:{$gt:1.7,$lt:1.2}} ) );
+// Check that conflicting constraints are satisfied by parallel array elements of x with a 2d index
+// on loc,x.
+if (0) { // SERVER-3793
+ t.ensureIndex({loc: '2d', x: 1});
+ assert.eq(1, t.count({loc: [20, 30], x: {$gt: 1.7, $lt: 1.2}}));
}
diff --git a/jstests/core/geo_multikey1.js b/jstests/core/geo_multikey1.js
index 7bf5cfaafe1..9c092f4ec31 100644
--- a/jstests/core/geo_multikey1.js
+++ b/jstests/core/geo_multikey1.js
@@ -5,15 +5,14 @@ t.drop();
locArr = [];
arr = [];
-for( i = 0; i < 10; ++i ) {
- locArr.push( [i,i+1] );
- arr.push( i );
+for (i = 0; i < 10; ++i) {
+ locArr.push([i, i + 1]);
+ arr.push(i);
}
-t.save( {loc:locArr,a:arr,b:arr,c:arr} );
+t.save({loc: locArr, a: arr, b: arr, c: arr});
// Parallel arrays are allowed for geo indexes.
-assert.commandWorked(t.ensureIndex( {loc:'2d',a:1,b:1,c:1} ));
+assert.commandWorked(t.ensureIndex({loc: '2d', a: 1, b: 1, c: 1}));
// Parallel arrays are not allowed for normal indexes.
-assert.commandFailed(t.ensureIndex( {loc:1,a:1,b:1,c:1} ));
-
+assert.commandFailed(t.ensureIndex({loc: 1, a: 1, b: 1, c: 1}));
diff --git a/jstests/core/geo_multinest0.js b/jstests/core/geo_multinest0.js
index c3f6fa5c29e..30b66b4adbb 100644
--- a/jstests/core/geo_multinest0.js
+++ b/jstests/core/geo_multinest0.js
@@ -3,58 +3,48 @@
t = db.geonest;
t.drop();
-t.insert( { zip : "10001", data : [ { loc : [ 10, 10 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } );
-t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } );
-var res = t.insert( { zip : "10003", data : [ { loc : [ 30, 30 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } );
-assert.writeOK( res );
+t.insert({zip: "10001", data: [{loc: [10, 10], type: "home"}, {loc: [50, 50], type: "work"}]});
+t.insert({zip: "10002", data: [{loc: [20, 20], type: "home"}, {loc: [50, 50], type: "work"}]});
+var res =
+ t.insert({zip: "10003", data: [{loc: [30, 30], type: "home"}, {loc: [50, 50], type: "work"}]});
+assert.writeOK(res);
-assert.commandWorked(t.ensureIndex( { "data.loc" : "2d", zip : 1 } ));
-assert.eq( 2, t.getIndexKeys().length );
+assert.commandWorked(t.ensureIndex({"data.loc": "2d", zip: 1}));
+assert.eq(2, t.getIndexKeys().length);
-res = t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } );
-assert.writeOK( res );
+res =
+ t.insert({zip: "10004", data: [{loc: [40, 40], type: "home"}, {loc: [50, 50], type: "work"}]});
+assert.writeOK(res);
// test normal access
-printjson( t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() );
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
-
-
+printjson(t.find({"data.loc": {$within: {$box: [[0, 0], [45, 45]]}}}).toArray());
+assert.eq(4, t.find({"data.loc": {$within: {$box: [[0, 0], [45, 45]]}}}).count());
+assert.eq(4, t.find({"data.loc": {$within: {$box: [[45, 45], [50, 50]]}}}).count());
// Try a complex nesting
t = db.geonest;
t.drop();
-t.insert( { zip : "10001", data : [ { loc : [ [ 10, 10 ], { lat : 50, long : 50 } ], type : "home" } ] } );
-t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } );
-res = t.insert({ zip: "10003", data: [{ loc: [{ x: 30, y: 30 }, [ 50, 50 ]], type: "home" }]});
-assert( !res.hasWriteError() );
+t.insert({zip: "10001", data: [{loc: [[10, 10], {lat: 50, long: 50}], type: "home"}]});
+t.insert({zip: "10002", data: [{loc: [20, 20], type: "home"}, {loc: [50, 50], type: "work"}]});
+res = t.insert({zip: "10003", data: [{loc: [{x: 30, y: 30}, [50, 50]], type: "home"}]});
+assert(!res.hasWriteError());
-assert.commandWorked(t.ensureIndex( { "data.loc" : "2d", zip : 1 } ));
-assert.eq( 2, t.getIndexKeys().length );
+assert.commandWorked(t.ensureIndex({"data.loc": "2d", zip: 1}));
+assert.eq(2, t.getIndexKeys().length);
-res = t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
- { loc : [ 50, 50 ], type : "work" } ] } );
+res =
+ t.insert({zip: "10004", data: [{loc: [40, 40], type: "home"}, {loc: [50, 50], type: "work"}]});
-assert.writeOK( res );
+assert.writeOK(res);
// test normal access
-printjson( t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).toArray() );
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 0, 0 ], [ 45, 45 ] ] } } } ).count() );
-
-assert.eq( 4, t.find( { "data.loc" : { $within : { $box : [ [ 45, 45 ], [ 50, 50 ] ] } } } ).count() );
-
+printjson(t.find({"data.loc": {$within: {$box: [[0, 0], [45, 45]]}}}).toArray());
+assert.eq(4, t.find({"data.loc": {$within: {$box: [[0, 0], [45, 45]]}}}).count());
+assert.eq(4, t.find({"data.loc": {$within: {$box: [[45, 45], [50, 50]]}}}).count());
diff --git a/jstests/core/geo_multinest1.js b/jstests/core/geo_multinest1.js
index f93138e1fd0..78e4c38e49b 100644
--- a/jstests/core/geo_multinest1.js
+++ b/jstests/core/geo_multinest1.js
@@ -3,34 +3,29 @@
t = db.multinest;
t.drop();
-t.insert( { zip : "10001", data : [ { loc : [ 10, 10 ], type : "home" },
- { loc : [ 29, 29 ], type : "work" } ] } );
-t.insert( { zip : "10002", data : [ { loc : [ 20, 20 ], type : "home" },
- { loc : [ 39, 39 ], type : "work" } ] } );
-var res = t.insert( { zip : "10003", data : [ { loc : [ 30, 30 ], type : "home" },
- { loc : [ 49, 49 ], type : "work" } ] } );
-assert.writeOK( res );
+t.insert({zip: "10001", data: [{loc: [10, 10], type: "home"}, {loc: [29, 29], type: "work"}]});
+t.insert({zip: "10002", data: [{loc: [20, 20], type: "home"}, {loc: [39, 39], type: "work"}]});
+var res =
+ t.insert({zip: "10003", data: [{loc: [30, 30], type: "home"}, {loc: [49, 49], type: "work"}]});
+assert.writeOK(res);
-assert.commandWorked(t.ensureIndex( { "data.loc" : "2d", zip : 1 } ));
-assert.eq( 2, t.getIndexKeys().length );
+assert.commandWorked(t.ensureIndex({"data.loc": "2d", zip: 1}));
+assert.eq(2, t.getIndexKeys().length);
-res = t.insert( { zip : "10004", data : [ { loc : [ 40, 40 ], type : "home" },
- { loc : [ 59, 59 ], type : "work" } ] } );
-assert.writeOK( res );
+res =
+ t.insert({zip: "10004", data: [{loc: [40, 40], type: "home"}, {loc: [59, 59], type: "work"}]});
+assert.writeOK(res);
// test normal access
-var result = t.find({ "data.loc" : { $near : [0, 0] } }).toArray();
+var result = t.find({"data.loc": {$near: [0, 0]}}).toArray();
-printjson( result );
+printjson(result);
-assert.eq( 4, result.length );
+assert.eq(4, result.length);
-var order = [ 1, 2, 3, 4 ];
+var order = [1, 2, 3, 4];
-for( var i = 0; i < result.length; i++ ){
- assert.eq( "1000" + order[i], result[i].zip );
+for (var i = 0; i < result.length; i++) {
+ assert.eq("1000" + order[i], result[i].zip);
}
-
-
-
diff --git a/jstests/core/geo_near_random1.js b/jstests/core/geo_near_random1.js
index 5c75b458957..1e7f2bb587d 100644
--- a/jstests/core/geo_near_random1.js
+++ b/jstests/core/geo_near_random1.js
@@ -5,23 +5,25 @@ var test = new GeoNearRandomTest("geo_near_random1");
test.insertPts(50);
-// test.testPt() runs geoNear commands at the given coordinates with
-// limits from 1 to nPts(# of inserted points). At the nth run, it
-// compares the first (n - 1) results with the result of the (n - 1)th
+// test.testPt() runs geoNear commands at the given coordinates with
+// limits from 1 to nPts(# of inserted points). At the nth run, it
+// compares the first (n - 1) results with the result of the (n - 1)th
// run to make sure they are identical. It also makes sure that the
// distances are in increasing order. The test runs in O(N^2).
// Test $near with a 2dindex
-test.testPt([0,0]);
+test.testPt([0, 0]);
test.testPt(test.mkPt());
test.testPt(test.mkPt());
test.testPt(test.mkPt());
test.testPt(test.mkPt());
-opts = {sphere: 1};
+opts = {
+ sphere: 1
+};
-// Test $nearSphere with a 2d index
-test.testPt([0,0], opts);
+// Test $nearSphere with a 2d index
+test.testPt([0, 0], opts);
test.testPt(test.mkPt(), opts);
test.testPt(test.mkPt(), opts);
test.testPt(test.mkPt(), opts);
@@ -30,7 +32,7 @@ test.testPt(test.mkPt(), opts);
// Test $nearSphere with a 2dsphere index
assert.commandWorked(db.geo_near_random1.dropIndex({loc: '2d'}));
assert.commandWorked(db.geo_near_random1.ensureIndex({loc: '2dsphere'}));
-test.testPt([0,0], opts);
+test.testPt([0, 0], opts);
test.testPt(test.mkPt(), opts);
test.testPt(test.mkPt(), opts);
test.testPt(test.mkPt(), opts);
diff --git a/jstests/core/geo_near_random2.js b/jstests/core/geo_near_random2.js
index af48c9d072e..0cbf374446d 100644
--- a/jstests/core/geo_near_random2.js
+++ b/jstests/core/geo_near_random2.js
@@ -5,15 +5,18 @@ var test = new GeoNearRandomTest("geo_near_random2");
test.insertPts(5000);
-// test.testPt() runs geoNear commands at the given coordinates with
-// limits from 1 to nPts(# of inserted points). At the nth run, it
-// compares the first (n - 1) results with the result of the (n - 1)th
+// test.testPt() runs geoNear commands at the given coordinates with
+// limits from 1 to nPts(# of inserted points). At the nth run, it
+// compares the first (n - 1) results with the result of the (n - 1)th
// run to make sure they are identical. It also makes sure that the
// distances are in increasing order. The test runs in O(N^2).
// Test $near with 2d index
-opts = {sphere: 0, nToTest: test.nPts*0.01};
-test.testPt([0,0], opts);
+opts = {
+ sphere: 0,
+ nToTest: test.nPts * 0.01
+};
+test.testPt([0, 0], opts);
test.testPt(test.mkPt(), opts);
test.testPt(test.mkPt(), opts);
test.testPt(test.mkPt(), opts);
@@ -22,8 +25,8 @@ test.testPt(test.mkPt(), opts);
opts.sphere = 1;
// Test $nearSphere with 2d index
-test.testPt([0,0], opts);
-// test.mkPt(0.8) generates a random point in the maximum
+test.testPt([0, 0], opts);
+// test.mkPt(0.8) generates a random point in the maximum
// lat long bounds scaled by 0.8
test.testPt(test.mkPt(0.8), opts);
test.testPt(test.mkPt(0.8), opts);
@@ -33,7 +36,7 @@ test.testPt(test.mkPt(0.8), opts);
// Test $nearSphere with 2dsphere index
assert.commandWorked(db.geo_near_random2.dropIndex({loc: '2d'}));
assert.commandWorked(db.geo_near_random2.ensureIndex({loc: '2dsphere'}));
-test.testPt([0,0], opts);
+test.testPt([0, 0], opts);
test.testPt(test.mkPt(0.8), opts);
test.testPt(test.mkPt(0.8), opts);
test.testPt(test.mkPt(0.8), opts);
diff --git a/jstests/core/geo_nearwithin.js b/jstests/core/geo_nearwithin.js
index 2b0462ebe3d..69eaac51ffe 100644
--- a/jstests/core/geo_nearwithin.js
+++ b/jstests/core/geo_nearwithin.js
@@ -9,19 +9,31 @@ for (var x = -points; x < points; x += 1) {
}
}
-t.ensureIndex({ geo : "2d" });
+t.ensureIndex({geo: "2d"});
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 1]}}}});
+resNear = db.runCommand(
+ {geoNear: t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 1]}}}});
assert.eq(resNear.results.length, 5);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 0]}}}});
+resNear = db.runCommand(
+ {geoNear: t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 0]}}}});
assert.eq(resNear.results.length, 1);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 0.5]}}}});
+resNear = db.runCommand(
+ {geoNear: t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 0.5]}}}});
assert.eq(resNear.results.length, 1);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 1.5]}}}});
+resNear = db.runCommand(
+ {geoNear: t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[1, 0], 1.5]}}}});
assert.eq(resNear.results.length, 9);
// We want everything distance >1 from us but <1.5
// These points are (-+1, -+1)
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], query: {$and: [{geo: {$within: {$center: [[0, 0], 1.5]}}},
- {geo: {$not: {$within: {$center: [[0,0], 1]}}}}]}});
+resNear = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ query: {
+ $and: [
+ {geo: {$within: {$center: [[0, 0], 1.5]}}},
+ {geo: {$not: {$within: {$center: [[0, 0], 1]}}}}
+ ]
+ }
+});
assert.eq(resNear.results.length, 4);
diff --git a/jstests/core/geo_oob_sphere.js b/jstests/core/geo_oob_sphere.js
index 7403cc99610..40249766355 100644
--- a/jstests/core/geo_oob_sphere.js
+++ b/jstests/core/geo_oob_sphere.js
@@ -5,29 +5,36 @@
t = db.geooobsphere;
t.drop();
-t.insert({ loc : { x : 30, y : 89 } });
-t.insert({ loc : { x : 30, y : 89 } });
-t.insert({ loc : { x : 30, y : 89 } });
-t.insert({ loc : { x : 30, y : 89 } });
-t.insert({ loc : { x : 30, y : 89 } });
-t.insert({ loc : { x : 30, y : 89 } });
-t.insert({ loc : { x : 30, y : 91 } });
+t.insert({loc: {x: 30, y: 89}});
+t.insert({loc: {x: 30, y: 89}});
+t.insert({loc: {x: 30, y: 89}});
+t.insert({loc: {x: 30, y: 89}});
+t.insert({loc: {x: 30, y: 89}});
+t.insert({loc: {x: 30, y: 89}});
+t.insert({loc: {x: 30, y: 91}});
-assert.commandWorked(t.ensureIndex({ loc : "2d" }));
+assert.commandWorked(t.ensureIndex({loc: "2d"}));
-assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 91 ], $maxDistance : 0.25 } }).count(); } );
+assert.throws(function() {
+ t.find({loc: {$nearSphere: [30, 91], $maxDistance: 0.25}}).count();
+});
// TODO: SERVER-9986 - it's not clear that throwing is correct behavior here
-// assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 89 ], $maxDistance : 0.25 } }).count() } );
+// assert.throws( function() { t.find({ loc : { $nearSphere : [ 30, 89 ], $maxDistance : 0.25 }
+// }).count() } );
-assert.throws( function() { t.find({ loc : { $within : { $centerSphere : [[ -180, -91 ], 0.25] } } }).count(); } );
+assert.throws(function() {
+ t.find({loc: {$within: {$centerSphere: [[-180, -91], 0.25]}}}).count();
+});
var res;
-res = db.runCommand({ geoNear : "geooobsphere", near : [179, -91], maxDistance : 0.25, spherical : true });
-assert.commandFailed( res );
-printjson( res );
+res =
+ db.runCommand({geoNear: "geooobsphere", near: [179, -91], maxDistance: 0.25, spherical: true});
+assert.commandFailed(res);
+printjson(res);
// TODO: SERVER-9986 - it's not clear that throwing is correct behavior here
-// res = db.runCommand({ geoNear : "geooobsphere", near : [30, 89], maxDistance : 0.25, spherical : true })
+// res = db.runCommand({ geoNear : "geooobsphere", near : [30, 89], maxDistance : 0.25, spherical :
+// true })
// assert.commandFailed( res )
// printjson( res )
diff --git a/jstests/core/geo_operator_crs.js b/jstests/core/geo_operator_crs.js
index 99aec03062a..b9e242309dc 100644
--- a/jstests/core/geo_operator_crs.js
+++ b/jstests/core/geo_operator_crs.js
@@ -9,27 +9,29 @@ coll.drop();
// Test 2dsphere index
//
-assert.commandWorked(coll.ensureIndex({ geo : "2dsphere" }));
+assert.commandWorked(coll.ensureIndex({geo: "2dsphere"}));
var legacyZeroPt = [0, 0];
-var jsonZeroPt = { type : "Point", coordinates : [0, 0] };
+var jsonZeroPt = {
+ type: "Point",
+ coordinates: [0, 0]
+};
var legacy90Pt = [90, 0];
-var json90Pt = { type : "Point", coordinates : [90, 0] };
+var json90Pt = {
+ type: "Point",
+ coordinates: [90, 0]
+};
-assert.writeOK(coll.insert({ geo : json90Pt }));
+assert.writeOK(coll.insert({geo: json90Pt}));
var earthRadiusMeters = 6378.1 * 1000;
var result = null;
-result = coll.getDB().runCommand({ geoNear : coll.getName(),
- near : legacyZeroPt,
- spherical : true });
+result = coll.getDB().runCommand({geoNear: coll.getName(), near: legacyZeroPt, spherical: true});
assert.commandWorked(result);
assert.close(result.results[0].dis, Math.PI / 2);
-result = coll.getDB().runCommand({ geoNear : coll.getName(),
- near : jsonZeroPt,
- spherical : true });
+result = coll.getDB().runCommand({geoNear: coll.getName(), near: jsonZeroPt, spherical: true});
assert.commandWorked(result);
assert.close(result.results[0].dis, (Math.PI / 2) * earthRadiusMeters);
@@ -40,13 +42,11 @@ assert.commandWorked(coll.dropIndexes());
// Test 2d Index
//
-assert.commandWorked(coll.ensureIndex({ geo : "2d" }));
+assert.commandWorked(coll.ensureIndex({geo: "2d"}));
-assert.writeOK(coll.insert({ geo : legacy90Pt }));
+assert.writeOK(coll.insert({geo: legacy90Pt}));
-result = coll.getDB().runCommand({ geoNear : coll.getName(),
- near : legacyZeroPt,
- spherical : true });
+result = coll.getDB().runCommand({geoNear: coll.getName(), near: legacyZeroPt, spherical: true});
assert.commandWorked(result);
assert.close(result.results[0].dis, Math.PI / 2);
@@ -56,11 +56,7 @@ assert.close(result.results[0].dis, Math.PI / 2);
// Test with a 2d and 2dsphere index
//
-assert.commandWorked(coll.ensureIndex({ geo : "2dsphere" }));
-result = coll.getDB().runCommand({ geoNear : coll.getName(),
- near : jsonZeroPt,
- spherical : true });
+assert.commandWorked(coll.ensureIndex({geo: "2dsphere"}));
+result = coll.getDB().runCommand({geoNear: coll.getName(), near: jsonZeroPt, spherical: true});
assert.commandWorked(result);
assert.close(result.results[0].dis, (Math.PI / 2) * earthRadiusMeters);
-
-
diff --git a/jstests/core/geo_or.js b/jstests/core/geo_or.js
index 17c7340faff..20eb7b7dce1 100644
--- a/jstests/core/geo_or.js
+++ b/jstests/core/geo_or.js
@@ -17,39 +17,68 @@ t.ensureIndex({loc: indexname});
assert.eq(1, t.find({loc: p}).itcount(), indexname);
// $or supports at most one $near clause
-assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}}]}).itcount(),
+assert.eq(2,
+ t.find({$or: [{loc: {$nearSphere: p}}]}).itcount(),
'geo query not supported by $or. index type: ' + indexname);
assert.throws(function() {
- assert.eq(2, t.find({$or: [{loc: {$nearSphere: p}},
- {loc: {$nearSphere: q}}]}).itcount(),
+ assert.eq(2,
+ t.find({$or: [{loc: {$nearSphere: p}}, {loc: {$nearSphere: q}}]}).itcount(),
'geo query not supported by $or. index type: ' + indexname);
}, null, '$or with multiple $near clauses');
// the following tests should match the points in the collection
-assert.eq(2, t.find({$or: [
- {loc: {$geoWithin: {$centerSphere: [p, 10]}}},
- {loc: {$geoWithin: {$centerSphere: [p, 10]}}}
- ]}).itcount(),
+assert.eq(2,
+ t.find({
+ $or: [
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}},
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}}
+ ]
+ }).itcount(),
'multiple $geoWithin clauses not supported by $or. index type: ' + indexname);
-assert.eq(2, t.find({$or: [
- {loc: {$geoIntersects: {$geometry: {type: 'LineString', coordinates: [p, q]}}}},
- {loc: {$geoIntersects: {$geometry: {type: 'LineString',
- coordinates: [[0,0], [1,1]]}}}}
- ]}).itcount(),
- 'multiple $geoIntersects LineString clauses not supported by $or. index type: ' + indexname);
-assert.eq(2, t.find({$or: [
- {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: p}}}},
- {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: q}}}}
- ]}).itcount(),
+assert.eq(2,
+ t.find({
+ $or: [
+ {loc: {$geoIntersects: {$geometry: {type: 'LineString', coordinates: [p, q]}}}},
+ {
+ loc: {
+ $geoIntersects:
+ {$geometry: {type: 'LineString', coordinates: [[0, 0], [1, 1]]}}
+ }
+ }
+ ]
+ }).itcount(),
+ 'multiple $geoIntersects LineString clauses not supported by $or. index type: ' +
+ indexname);
+assert.eq(2,
+ t.find({
+ $or: [
+ {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: p}}}},
+ {loc: {$geoIntersects: {$geometry: {type: 'Point', coordinates: q}}}}
+ ]
+ }).itcount(),
'multiple $geoIntersects Point clauses not supported by $or. index type: ' + indexname);
-assert.eq(2, t.find({$or: [
- {loc: {$geoIntersects: {$geometry: {type: 'Polygon',
- coordinates: [[[0, 0], p, q, [0, 0]]]}}}},
- {loc: {$geoIntersects: {$geometry:
- {type: 'Polygon', coordinates: [[[0, 0], [1, 1], [0, 1], [0, 0]]]}}}}
- ]}).itcount(),
- 'multiple $geoIntersects Polygon clauses not supported by $or. index type: ' + indexname);
+assert.eq(
+ 2,
+ t.find({
+ $or: [
+ {
+ loc: {
+ $geoIntersects:
+ {$geometry: {type: 'Polygon', coordinates: [[[0, 0], p, q, [0, 0]]]}}
+ }
+ },
+ {
+ loc: {
+ $geoIntersects: {
+ $geometry:
+ {type: 'Polygon', coordinates: [[[0, 0], [1, 1], [0, 1], [0, 0]]]}
+ }
+ }
+ }
+ ]
+ }).itcount(),
+ 'multiple $geoIntersects Polygon clauses not supported by $or. index type: ' + indexname);
t.dropIndexes();
@@ -57,6 +86,11 @@ var indexname = "2d";
t.ensureIndex({loc: indexname});
-assert.eq(2, t.find({$or: [{loc: {$geoWithin: {$centerSphere: [p, 10]}}},
- {loc: {$geoWithin: {$centerSphere: [p, 10]}}}]}).itcount(),
+assert.eq(2,
+ t.find({
+ $or: [
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}},
+ {loc: {$geoWithin: {$centerSphere: [p, 10]}}}
+ ]
+ }).itcount(),
'multiple $geoWithin clauses not supported by $or. index type: ' + indexname);
diff --git a/jstests/core/geo_poly_edge.js b/jstests/core/geo_poly_edge.js
index ce93607f1d3..380fe533861 100644
--- a/jstests/core/geo_poly_edge.js
+++ b/jstests/core/geo_poly_edge.js
@@ -2,21 +2,21 @@
// Tests polygon edge cases
//
-var coll = db.getCollection( 'jstests_geo_poly_edge' );
+var coll = db.getCollection('jstests_geo_poly_edge');
coll.drop();
-coll.ensureIndex({ loc : "2d" });
+coll.ensureIndex({loc: "2d"});
-coll.insert({ loc : [10, 10] });
-coll.insert({ loc : [10, -10] });
+coll.insert({loc: [10, 10]});
+coll.insert({loc: [10, -10]});
-assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, -10 ]] } } }).itcount(), 2 );
+assert.eq(coll.find({loc: {$within: {$polygon: [[10, 10], [10, 10], [10, -10]]}}}).itcount(), 2);
-assert.eq( coll.find({ loc : { $within : { $polygon : [[ 10, 10 ], [ 10, 10 ], [ 10, 10 ]] } } }).itcount(), 1 );
+assert.eq(coll.find({loc: {$within: {$polygon: [[10, 10], [10, 10], [10, 10]]}}}).itcount(), 1);
+coll.insert({loc: [179, 0]});
+coll.insert({loc: [0, 179]});
-coll.insert({ loc : [179, 0] });
-coll.insert({ loc : [0, 179] });
-
-assert.eq( coll.find({ loc : { $within : { $polygon : [[0, 0], [1000, 0], [1000, 1000], [0, 1000]] } } }).itcount(), 3 );
-
+assert.eq(coll.find({loc: {$within: {$polygon: [[0, 0], [1000, 0], [1000, 1000], [0, 1000]]}}})
+ .itcount(),
+ 3);
diff --git a/jstests/core/geo_poly_line.js b/jstests/core/geo_poly_line.js
index 2b61d464b05..fe00e0483e4 100644
--- a/jstests/core/geo_poly_line.js
+++ b/jstests/core/geo_poly_line.js
@@ -3,15 +3,14 @@
t = db.geo_polygon5;
t.drop();
-t.insert({loc:[0,0]});
-t.insert({loc:[1,0]});
-t.insert({loc:[2,0]});
-t.insert({loc:[3,0]});
-t.insert({loc:[4,0]});
+t.insert({loc: [0, 0]});
+t.insert({loc: [1, 0]});
+t.insert({loc: [2, 0]});
+t.insert({loc: [3, 0]});
+t.insert({loc: [4, 0]});
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
-printjson( t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).toArray() );
-
-assert.eq( 5, t.find({ loc: { "$within": { "$polygon" : [[0,0], [2,0], [4,0]] } } }).itcount() );
+printjson(t.find({loc: {"$within": {"$polygon": [[0, 0], [2, 0], [4, 0]]}}}).toArray());
+assert.eq(5, t.find({loc: {"$within": {"$polygon": [[0, 0], [2, 0], [4, 0]]}}}).itcount());
diff --git a/jstests/core/geo_polygon1.js b/jstests/core/geo_polygon1.js
index de2652e69bc..487df91a167 100644
--- a/jstests/core/geo_polygon1.js
+++ b/jstests/core/geo_polygon1.js
@@ -6,68 +6,81 @@ t = db.geo_polygon1;
t.drop();
num = 0;
-for ( x=1; x < 9; x++ ){
- for ( y= 1; y < 9; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
+for (x = 1; x < 9; x++) {
+ for (y = 1; y < 9; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
}
}
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
-triangle = [[0,0], [1,1], [0,2]];
+triangle = [[0, 0], [1, 1], [0, 2]];
// Look at only a small slice of the data within a triangle
-assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" );
+assert.eq(1, t.find({loc: {"$within": {"$polygon": triangle}}}).count(), "Triangle Test");
-boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
+boxBounds = [[0, 0], [0, 10], [10, 10], [10, 0]];
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" );
+assert.eq(num, t.find({loc: {"$within": {"$polygon": boxBounds}}}).count(), "Bounding Box Test");
-//Make sure we can add object-based polygons
-assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() );
+// Make sure we can add object-based polygons
+assert.eq(
+ num,
+ t.find({loc: {$within: {$polygon: {a: [-10, -10], b: [-10, 10], c: [10, 10], d: [10, -10]}}}})
+ .count());
// Look in a box much bigger than the one we have data in
-boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" );
+boxBounds = [[-100, -100], [-100, 100], [100, 100], [100, -100]];
+assert.eq(num,
+ t.find({loc: {"$within": {"$polygon": boxBounds}}}).count(),
+ "Big Bounding Box Test");
t.drop();
pacman = [
- [0,2], [0,4], [2,6], [4,6], // Head
- [6,4], [4,3], [6,2], // Mouth
- [4,0], [2,0] // Bottom
- ];
+ [0, 2],
+ [0, 4],
+ [2, 6],
+ [4, 6], // Head
+ [6, 4],
+ [4, 3],
+ [6, 2], // Mouth
+ [4, 0],
+ [2, 0] // Bottom
+];
-t.save({loc: [1,3] }); // Add a point that's in
-assert.commandWorked(t.ensureIndex( { loc : "2d" } ));
+t.save({loc: [1, 3]}); // Add a point that's in
+assert.commandWorked(t.ensureIndex({loc: "2d"}));
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" );
+assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman single point");
-t.save({ loc : [5, 3] }); // Add a point that's out right in the mouth opening
-t.save({ loc : [3, 7] }); // Add a point above the center of the head
-t.save({ loc : [3,-1] }); // Add a point below the center of the bottom
+t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening
+t.save({loc: [3, 7]}); // Add a point above the center of the head
+t.save({loc: [3, -1]}); // Add a point below the center of the bottom
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" );
+assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman double point");
// Make sure we can't add bad polygons
okay = true;
-try{
- t.find( { loc : { $within : { $polygon : [1, 2] } } } ).toArray();
+try {
+ t.find({loc: {$within: {$polygon: [1, 2]}}}).toArray();
okay = false;
+} catch (e) {
}
-catch(e){}
assert(okay);
-try{
- t.find( { loc : { $within : { $polygon : [[1, 2]] } } } ).toArray();
+try {
+ t.find({loc: {$within: {$polygon: [[1, 2]]}}}).toArray();
okay = false;
+} catch (e) {
}
-catch(e){}
assert(okay);
-try{
- t.find( { loc : { $within : { $polygon : [[1, 2], [2, 3]] } } } ).toArray();
+try {
+ t.find({loc: {$within: {$polygon: [[1, 2], [2, 3]]}}}).toArray();
okay = false;
+} catch (e) {
}
-catch(e){}
assert(okay);
-
diff --git a/jstests/core/geo_polygon1_noindex.js b/jstests/core/geo_polygon1_noindex.js
index 56d1cc64f59..672f53ebd90 100644
--- a/jstests/core/geo_polygon1_noindex.js
+++ b/jstests/core/geo_polygon1_noindex.js
@@ -4,43 +4,57 @@ t = db.geo_polygon1_noindex;
t.drop();
num = 0;
-for ( x=1; x < 9; x++ ){
- for ( y= 1; y < 9; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
+for (x = 1; x < 9; x++) {
+ for (y = 1; y < 9; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
}
}
-triangle = [[0,0], [1,1], [0,2]];
+triangle = [[0, 0], [1, 1], [0, 2]];
// Look at only a small slice of the data within a triangle
-assert.eq( 1 , t.find({ loc: { "$within": { "$polygon" : triangle }}} ).count() , "Triangle Test" );
+assert.eq(1, t.find({loc: {"$within": {"$polygon": triangle}}}).count(), "Triangle Test");
-boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
+boxBounds = [[0, 0], [0, 10], [10, 10], [10, 0]];
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Bounding Box Test" );
+assert.eq(num, t.find({loc: {"$within": {"$polygon": boxBounds}}}).count(), "Bounding Box Test");
-//Make sure we can add object-based polygons
-assert.eq( num, t.find( { loc : { $within : { $polygon : { a : [-10, -10], b : [-10, 10], c : [10, 10], d : [10, -10] } } } } ).count() );
+// Make sure we can add object-based polygons
+assert.eq(
+ num,
+ t.find({loc: {$within: {$polygon: {a: [-10, -10], b: [-10, 10], c: [10, 10], d: [10, -10]}}}})
+ .count());
// Look in a box much bigger than the one we have data in
-boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
-assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).count() , "Big Bounding Box Test" );
+boxBounds = [[-100, -100], [-100, 100], [100, 100], [100, -100]];
+assert.eq(num,
+ t.find({loc: {"$within": {"$polygon": boxBounds}}}).count(),
+ "Big Bounding Box Test");
t.drop();
pacman = [
- [0,2], [0,4], [2,6], [4,6], // Head
- [6,4], [4,3], [6,2], // Mouth
- [4,0], [2,0] // Bottom
- ];
-
-assert.writeOK(t.save({loc: [1,3] })); // Add a point that's in
-
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman single point" );
-
-t.save({ loc : [5, 3] }); // Add a point that's out right in the mouth opening
-t.save({ loc : [3, 7] }); // Add a point above the center of the head
-t.save({ loc : [3,-1] }); // Add a point below the center of the bottom
-
-assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).count() , "Pacman double point" );
+ [0, 2],
+ [0, 4],
+ [2, 6],
+ [4, 6], // Head
+ [6, 4],
+ [4, 3],
+ [6, 2], // Mouth
+ [4, 0],
+ [2, 0] // Bottom
+];
+
+assert.writeOK(t.save({loc: [1, 3]})); // Add a point that's in
+
+assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman single point");
+
+t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening
+t.save({loc: [3, 7]}); // Add a point above the center of the head
+t.save({loc: [3, -1]}); // Add a point below the center of the bottom
+
+assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman double point");
diff --git a/jstests/core/geo_polygon2.js b/jstests/core/geo_polygon2.js
index c6857341d79..8a011beba0c 100644
--- a/jstests/core/geo_polygon2.js
+++ b/jstests/core/geo_polygon2.js
@@ -8,28 +8,27 @@
var numTests = 4;
-for ( var test = 0; test < numTests; test++ ) {
-
- Random.srand( 1337 + test );
+for (var test = 0; test < numTests; test++) {
+ Random.srand(1337 + test);
var numTurtles = 4;
- var gridSize = [ 20, 20 ];
+ var gridSize = [20, 20];
var turtleSteps = 500;
- var bounds = [ Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001 ];
+ var bounds = [Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001];
var rotation = Math.PI * Random.rand();
- var bits = Math.floor( Random.rand() * 32 );
+ var bits = Math.floor(Random.rand() * 32);
- printjson( { test : test, rotation : rotation, bits : bits });
+ printjson({test: test, rotation: rotation, bits: bits});
- var rotatePoint = function( x, y ) {
+ var rotatePoint = function(x, y) {
- if( y == undefined ){
+ if (y == undefined) {
y = x[1];
x = x[0];
}
- xp = x * Math.cos( rotation ) - y * Math.sin( rotation );
- yp = y * Math.cos( rotation ) + x * Math.sin( rotation );
+ xp = x * Math.cos(rotation) - y * Math.sin(rotation);
+ yp = y * Math.cos(rotation) + x * Math.sin(rotation);
var scaleX = (bounds[1] - bounds[0]) / 360;
var scaleY = (bounds[1] - bounds[0]) / 360;
@@ -41,21 +40,21 @@ for ( var test = 0; test < numTests; test++ ) {
};
var grid = [];
- for ( var i = 0; i < gridSize[0]; i++ ) {
- grid.push( new Array( gridSize[1] ) );
+ for (var i = 0; i < gridSize[0]; i++) {
+ grid.push(new Array(gridSize[1]));
}
grid.toString = function() {
var gridStr = "";
- for ( var j = grid[0].length - 1; j >= -1; j-- ) {
- for ( var i = 0; i < grid.length; i++ ) {
- if ( i == 0 )
- gridStr += ( j == -1 ? " " : ( j % 10) ) + ": ";
- if ( j != -1 )
- gridStr += "[" + ( grid[i][j] != undefined ? grid[i][j] : " " ) + "]";
+ for (var j = grid[0].length - 1; j >= -1; j--) {
+ for (var i = 0; i < grid.length; i++) {
+ if (i == 0)
+ gridStr += (j == -1 ? " " : (j % 10)) + ": ";
+ if (j != -1)
+ gridStr += "[" + (grid[i][j] != undefined ? grid[i][j] : " ") + "]";
else
- gridStr += " " + ( i % 10 ) + " ";
+ gridStr += " " + (i % 10) + " ";
}
gridStr += "\n";
}
@@ -64,89 +63,82 @@ for ( var test = 0; test < numTests; test++ ) {
};
var turtles = [];
- for ( var i = 0; i < numTurtles; i++ ) {
-
- var up = ( i % 2 == 0 ) ? i - 1 : 0;
- var left = ( i % 2 == 1 ) ? ( i - 1 ) - 1 : 0;
+ for (var i = 0; i < numTurtles; i++) {
+ var up = (i % 2 == 0) ? i - 1 : 0;
+ var left = (i % 2 == 1) ? (i - 1) - 1 : 0;
turtles[i] = [
- [ Math.floor( gridSize[0] / 2 ), Math.floor( gridSize[1] / 2 ) ],
- [ Math.floor( gridSize[0] / 2 ) + left, Math.floor( gridSize[1] / 2 ) + up ] ];
+ [Math.floor(gridSize[0] / 2), Math.floor(gridSize[1] / 2)],
+ [Math.floor(gridSize[0] / 2) + left, Math.floor(gridSize[1] / 2) + up]
+ ];
grid[turtles[i][1][0]][turtles[i][1][1]] = i;
-
}
- grid[Math.floor( gridSize[0] / 2 )][Math.floor( gridSize[1] / 2 )] = "S";
+ grid[Math.floor(gridSize[0] / 2)][Math.floor(gridSize[1] / 2)] = "S";
// print( grid.toString() )
var pickDirections = function() {
- var up = Math.floor( Random.rand() * 3 );
- if ( up == 2 )
+ var up = Math.floor(Random.rand() * 3);
+ if (up == 2)
up = -1;
- if ( up == 0 ) {
- var left = Math.floor( Random.rand() * 3 );
- if ( left == 2 )
+ if (up == 0) {
+ var left = Math.floor(Random.rand() * 3);
+ if (left == 2)
left = -1;
} else
left = 0;
- if ( Random.rand() < 0.5 ) {
+ if (Random.rand() < 0.5) {
var swap = left;
left = up;
up = swap;
}
- return [ left, up ];
+ return [left, up];
};
- for ( var s = 0; s < turtleSteps; s++ ) {
-
- for ( var t = 0; t < numTurtles; t++ ) {
-
+ for (var s = 0; s < turtleSteps; s++) {
+ for (var t = 0; t < numTurtles; t++) {
var dirs = pickDirections();
var up = dirs[0];
var left = dirs[1];
var lastTurtle = turtles[t][turtles[t].length - 1];
- var nextTurtle = [ lastTurtle[0] + left, lastTurtle[1] + up ];
+ var nextTurtle = [lastTurtle[0] + left, lastTurtle[1] + up];
- if ( nextTurtle[0] >= gridSize[0] ||
- nextTurtle[1] >= gridSize[1] ||
- nextTurtle[0] < 0 ||
- nextTurtle[1] < 0 )
+ if (nextTurtle[0] >= gridSize[0] || nextTurtle[1] >= gridSize[1] || nextTurtle[0] < 0 ||
+ nextTurtle[1] < 0)
continue;
- if ( grid[nextTurtle[0]][nextTurtle[1]] == undefined ) {
- turtles[t].push( nextTurtle );
+ if (grid[nextTurtle[0]][nextTurtle[1]] == undefined) {
+ turtles[t].push(nextTurtle);
grid[nextTurtle[0]][nextTurtle[1]] = t;
}
-
}
}
turtlePaths = [];
- for ( var t = 0; t < numTurtles; t++ ) {
-
+ for (var t = 0; t < numTurtles; t++) {
turtlePath = [];
var nextSeg = function(currTurtle, prevTurtle) {
var pathX = currTurtle[0];
- if ( currTurtle[1] < prevTurtle[1] ) {
+ if (currTurtle[1] < prevTurtle[1]) {
pathX = currTurtle[0] + 1;
pathY = prevTurtle[1];
- } else if ( currTurtle[1] > prevTurtle[1] ) {
+ } else if (currTurtle[1] > prevTurtle[1]) {
pathX = currTurtle[0];
pathY = currTurtle[1];
- } else if ( currTurtle[0] < prevTurtle[0] ) {
+ } else if (currTurtle[0] < prevTurtle[0]) {
pathX = prevTurtle[0];
pathY = currTurtle[1];
- } else if ( currTurtle[0] > prevTurtle[0] ) {
+ } else if (currTurtle[0] > prevTurtle[0]) {
pathX = currTurtle[0];
pathY = currTurtle[1] + 1;
}
@@ -155,25 +147,21 @@ for ( var test = 0; test < numTests; test++ ) {
// : "
// + [pathX, pathY]);
- return [ pathX, pathY ];
+ return [pathX, pathY];
};
- for ( var s = 1; s < turtles[t].length; s++ ) {
-
+ for (var s = 1; s < turtles[t].length; s++) {
currTurtle = turtles[t][s];
prevTurtle = turtles[t][s - 1];
- turtlePath.push( nextSeg( currTurtle, prevTurtle ) );
-
+ turtlePath.push(nextSeg(currTurtle, prevTurtle));
}
- for ( var s = turtles[t].length - 2; s >= 0; s-- ) {
-
+ for (var s = turtles[t].length - 2; s >= 0; s--) {
currTurtle = turtles[t][s];
prevTurtle = turtles[t][s + 1];
- turtlePath.push( nextSeg( currTurtle, prevTurtle ) );
-
+ turtlePath.push(nextSeg(currTurtle, prevTurtle));
}
// printjson( turtlePath )
@@ -183,38 +171,37 @@ for ( var test = 0; test < numTests; test++ ) {
grid[lastTurtle[0]][lastTurtle[1]] = undefined;
fixedTurtlePath = [];
- for ( var s = 1; s < turtlePath.length; s++ ) {
-
- if ( turtlePath[s - 1][0] == turtlePath[s][0] &&
- turtlePath[s - 1][1] == turtlePath[s][1] ) {
+ for (var s = 1; s < turtlePath.length; s++) {
+ if (turtlePath[s - 1][0] == turtlePath[s][0] &&
+ turtlePath[s - 1][1] == turtlePath[s][1]) {
continue;
}
var up = turtlePath[s][1] - turtlePath[s - 1][1];
var right = turtlePath[s][0] - turtlePath[s - 1][0];
- var addPoint = ( up != 0 && right != 0 );
+ var addPoint = (up != 0 && right != 0);
- if ( addPoint && up != right ) {
- fixedTurtlePath.push( [ turtlePath[s][0], turtlePath[s - 1][1] ] );
- } else if ( addPoint ) {
- fixedTurtlePath.push( [ turtlePath[s - 1][0], turtlePath[s][1] ] );
+ if (addPoint && up != right) {
+ fixedTurtlePath.push([turtlePath[s][0], turtlePath[s - 1][1]]);
+ } else if (addPoint) {
+ fixedTurtlePath.push([turtlePath[s - 1][0], turtlePath[s][1]]);
}
- fixedTurtlePath.push( turtlePath[s] );
+ fixedTurtlePath.push(turtlePath[s]);
}
// printjson( fixedTurtlePath )
- turtlePaths.push( fixedTurtlePath );
+ turtlePaths.push(fixedTurtlePath);
}
// Uncomment to print polygon shape
// print( grid.toString() )
var polygon = [];
- for ( var t = 0; t < turtlePaths.length; t++ ) {
- for ( var s = 0; s < turtlePaths[t].length; s++ ) {
- polygon.push( rotatePoint( turtlePaths[t][s] ) );
+ for (var t = 0; t < turtlePaths.length; t++) {
+ for (var s = 0; s < turtlePaths[t].length; s++) {
+ polygon.push(rotatePoint(turtlePaths[t][s]));
}
}
@@ -230,34 +217,33 @@ for ( var test = 0; test < numTests; test++ ) {
var allPointsIn = [];
var allPointsOut = [];
- for ( var j = grid[0].length - 1; j >= 0; j-- ) {
- for ( var i = 0; i < grid.length; i++ ) {
- var point = rotatePoint( [ i + 0.5, j + 0.5 ] );
+ for (var j = grid[0].length - 1; j >= 0; j--) {
+ for (var i = 0; i < grid.length; i++) {
+ var point = rotatePoint([i + 0.5, j + 0.5]);
- t.insert( { loc : point } );
- if ( grid[i][j] != undefined ){
- allPointsIn.push( point );
+ t.insert({loc: point});
+ if (grid[i][j] != undefined) {
+ allPointsIn.push(point);
pointsIn++;
- }
- else{
- allPointsOut.push( point );
+ } else {
+ allPointsOut.push(point);
pointsOut++;
}
}
}
- var res = t.ensureIndex({ loc: "2d" }, { bits: 1 + bits, max: bounds[1], min: bounds[0] });
- assert.commandWorked( res );
+ var res = t.ensureIndex({loc: "2d"}, {bits: 1 + bits, max: bounds[1], min: bounds[0]});
+ assert.commandWorked(res);
- t.insert( { loc : allPointsIn } );
- t.insert( { loc : allPointsOut } );
- allPoints = allPointsIn.concat( allPointsOut );
- t.insert( { loc : allPoints } );
+ t.insert({loc: allPointsIn});
+ t.insert({loc: allPointsOut});
+ allPoints = allPointsIn.concat(allPointsOut);
+ t.insert({loc: allPoints});
- print( "Points : " );
- printjson( { pointsIn : pointsIn, pointsOut : pointsOut } );
- //print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() )
+ print("Points : ");
+ printjson({pointsIn: pointsIn, pointsOut: pointsOut});
+ // print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() )
- assert.eq( gridSize[0] * gridSize[1] + 3, t.find().count() );
- assert.eq( 2 + pointsIn, t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() );
+ assert.eq(gridSize[0] * gridSize[1] + 3, t.find().count());
+ assert.eq(2 + pointsIn, t.find({loc: {"$within": {"$polygon": polygon}}}).count());
}
diff --git a/jstests/core/geo_polygon3.js b/jstests/core/geo_polygon3.js
index 495ecb189b1..887e81701cd 100644
--- a/jstests/core/geo_polygon3.js
+++ b/jstests/core/geo_polygon3.js
@@ -4,51 +4,61 @@
var numTests = 31;
-for( var n = 0; n < numTests; n++ ){
-
- t = db.geo_polygon3;
- t.drop();
-
- num = 0;
- for ( x=1; x < 9; x++ ){
- for ( y= 1; y < 9; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
- }
- }
-
- t.ensureIndex( { loc : "2d" }, { bits : 2 + n } );
-
- triangle = [[0,0], [1,1], [0,2]];
-
- // Look at only a small slice of the data within a triangle
- assert.eq( 1 , t.find( { loc: { "$within": { "$polygon" : triangle }}} ).itcount() , "Triangle Test" );
-
-
- boxBounds = [ [0,0], [0,10], [10,10], [10,0] ];
-
- assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Bounding Box Test" );
-
- // Look in a box much bigger than the one we have data in
- boxBounds = [[-100,-100], [-100, 100], [100,100], [100,-100]];
- assert.eq( num , t.find( { loc : { "$within" : { "$polygon" : boxBounds } } } ).itcount() , "Big Bounding Box Test" );
-
- t.drop();
-
- pacman = [
- [0,2], [0,4], [2,6], [4,6], // Head
- [6,4], [4,3], [6,2], // Mouth
- [4,0], [2,0] // Bottom
- ];
-
- t.save({loc: [1,3] }); // Add a point that's in
- t.ensureIndex( { loc : "2d" }, { bits : 2 + t } );
-
- assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman single point" );
-
- t.save({ loc : [5, 3] }); // Add a point that's out right in the mouth opening
- t.save({ loc : [3, 7] }); // Add a point above the center of the head
- t.save({ loc : [3,-1] }); // Add a point below the center of the bottom
-
- assert.eq( 1 , t.find({loc : { $within : { $polygon : pacman }}} ).itcount() , "Pacman double point" );
+for (var n = 0; n < numTests; n++) {
+ t = db.geo_polygon3;
+ t.drop();
+
+ num = 0;
+ for (x = 1; x < 9; x++) {
+ for (y = 1; y < 9; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
+ }
+ }
+
+ t.ensureIndex({loc: "2d"}, {bits: 2 + n});
+
+ triangle = [[0, 0], [1, 1], [0, 2]];
+
+ // Look at only a small slice of the data within a triangle
+ assert.eq(1, t.find({loc: {"$within": {"$polygon": triangle}}}).itcount(), "Triangle Test");
+
+ boxBounds = [[0, 0], [0, 10], [10, 10], [10, 0]];
+
+ assert.eq(
+ num, t.find({loc: {"$within": {"$polygon": boxBounds}}}).itcount(), "Bounding Box Test");
+
+ // Look in a box much bigger than the one we have data in
+ boxBounds = [[-100, -100], [-100, 100], [100, 100], [100, -100]];
+ assert.eq(num,
+ t.find({loc: {"$within": {"$polygon": boxBounds}}}).itcount(),
+ "Big Bounding Box Test");
+
+ t.drop();
+
+ pacman = [
+ [0, 2],
+ [0, 4],
+ [2, 6],
+ [4, 6], // Head
+ [6, 4],
+ [4, 3],
+ [6, 2], // Mouth
+ [4, 0],
+ [2, 0] // Bottom
+ ];
+
+ t.save({loc: [1, 3]}); // Add a point that's in
+ t.ensureIndex({loc: "2d"}, {bits: 2 + t});
+
+ assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).itcount(), "Pacman single point");
+
+ t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening
+ t.save({loc: [3, 7]}); // Add a point above the center of the head
+ t.save({loc: [3, -1]}); // Add a point below the center of the bottom
+
+ assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).itcount(), "Pacman double point");
}
diff --git a/jstests/core/geo_queryoptimizer.js b/jstests/core/geo_queryoptimizer.js
index f75afad645e..af55507ff86 100644
--- a/jstests/core/geo_queryoptimizer.js
+++ b/jstests/core/geo_queryoptimizer.js
@@ -2,26 +2,25 @@
t = db.geo_qo1;
t.drop();
-t.ensureIndex({loc:"2d"});
+t.ensureIndex({loc: "2d"});
-t.insert({'issue':0});
-t.insert({'issue':1});
-t.insert({'issue':2});
-t.insert({'issue':2, 'loc':[30.12,-118]});
-t.insert({'issue':1, 'loc':[30.12,-118]});
-t.insert({'issue':0, 'loc':[30.12,-118]});
+t.insert({'issue': 0});
+t.insert({'issue': 1});
+t.insert({'issue': 2});
+t.insert({'issue': 2, 'loc': [30.12, -118]});
+t.insert({'issue': 1, 'loc': [30.12, -118]});
+t.insert({'issue': 0, 'loc': [30.12, -118]});
-assert.eq( 6 , t.find().itcount() , "A1" );
+assert.eq(6, t.find().itcount(), "A1");
-assert.eq( 2 , t.find({'issue':0}).itcount() , "A2" );
+assert.eq(2, t.find({'issue': 0}).itcount(), "A2");
-assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "A3" );
+assert.eq(1, t.find({'issue': 0, 'loc': {$near: [30.12, -118]}}).itcount(), "A3");
-assert.eq( 2 , t.find({'issue':0}).itcount() , "B1" );
+assert.eq(2, t.find({'issue': 0}).itcount(), "B1");
-assert.eq( 6 , t.find().itcount() , "B2" );
+assert.eq(6, t.find().itcount(), "B2");
-assert.eq( 2 , t.find({'issue':0}).itcount() , "B3" );
-
-assert.eq( 1 , t.find({'issue':0,'loc':{$near:[30.12,-118]}}).itcount() , "B4" );
+assert.eq(2, t.find({'issue': 0}).itcount(), "B3");
+assert.eq(1, t.find({'issue': 0, 'loc': {$near: [30.12, -118]}}).itcount(), "B4");
diff --git a/jstests/core/geo_regex0.js b/jstests/core/geo_regex0.js
index ae7fddabcf3..1add7f4e0c3 100644
--- a/jstests/core/geo_regex0.js
+++ b/jstests/core/geo_regex0.js
@@ -4,15 +4,20 @@
t = db.regex0;
t.drop();
-t.ensureIndex( { point : '2d', words : 1 } );
-t.insert( { point : [ 1, 1 ], words : [ 'foo', 'bar' ] } );
-
-regex = { words : /^f/ };
-geo = { point : { $near : [ 1, 1 ] } };
-both = { point : { $near : [ 1, 1 ] }, words : /^f/ };
-
-assert.eq(1, t.find( regex ).count() );
-assert.eq(1, t.find( geo ).count() );
-assert.eq(1, t.find( both ).count() );
+t.ensureIndex({point: '2d', words: 1});
+t.insert({point: [1, 1], words: ['foo', 'bar']});
+regex = {
+ words: /^f/
+};
+geo = {
+ point: {$near: [1, 1]}
+};
+both = {
+ point: {$near: [1, 1]},
+ words: /^f/
+};
+assert.eq(1, t.find(regex).count());
+assert.eq(1, t.find(geo).count());
+assert.eq(1, t.find(both).count());
diff --git a/jstests/core/geo_s2cursorlimitskip.js b/jstests/core/geo_s2cursorlimitskip.js
index cbf360a45b0..868b57de39f 100644
--- a/jstests/core/geo_s2cursorlimitskip.js
+++ b/jstests/core/geo_s2cursorlimitskip.js
@@ -14,12 +14,16 @@ var random = Random.rand;
* fetch the rest of the points and again verify that the
* number of query and getmore operations are correct.
*/
-function sign() { return random() > 0.5 ? 1 : -1; }
-function insertRandomPoints(num, minDist, maxDist){
- for(var i = 0; i < num; i++){
+function sign() {
+ return random() > 0.5 ? 1 : -1;
+}
+function insertRandomPoints(num, minDist, maxDist) {
+ for (var i = 0; i < num; i++) {
var lat = sign() * (minDist + random() * (maxDist - minDist));
var lng = sign() * (minDist + random() * (maxDist - minDist));
- var point = { geo: { type: "Point", coordinates: [lng, lat] } };
+ var point = {
+ geo: {type: "Point", coordinates: [lng, lat]}
+ };
assert.writeOK(t.insert(point));
}
}
@@ -31,8 +35,8 @@ var batchSize = 4;
// Insert points between 0.01 and 1.0 away.
insertRandomPoints(totalPointCount, 0.01, 1.0);
-var cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}})
- .batchSize(batchSize);
+var cursor = t.find({geo: {$geoNear: {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}})
+ .batchSize(batchSize);
assert.eq(cursor.count(), totalPointCount);
// Disable profiling in order to drop the system.profile collection.
@@ -71,14 +75,20 @@ assert(!cursor.hasNext());
var someLimit = 23;
// Make sure limit does something.
-cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit);
+cursor = t.find({geo: {$geoNear: {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}})
+ .limit(someLimit);
// Count doesn't work here -- ignores limit/skip, so we use itcount.
assert.eq(cursor.itcount(), someLimit);
// Make sure skip works by skipping some stuff ourselves.
var someSkip = 3;
-cursor = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).limit(someLimit + someSkip);
-for (var i = 0; i < someSkip; ++i) { cursor.next(); }
-var cursor2 = t.find({geo: {$geoNear : {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}}).skip(someSkip).limit(someLimit);
+cursor = t.find({geo: {$geoNear: {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}})
+ .limit(someLimit + someSkip);
+for (var i = 0; i < someSkip; ++i) {
+ cursor.next();
+}
+var cursor2 = t.find({geo: {$geoNear: {$geometry: {type: "Point", coordinates: [0.0, 0.0]}}}})
+ .skip(someSkip)
+ .limit(someLimit);
while (cursor.hasNext()) {
assert(cursor2.hasNext());
assert.eq(cursor.next(), cursor2.next());
diff --git a/jstests/core/geo_s2dedupnear.js b/jstests/core/geo_s2dedupnear.js
index 1b6f11ce504..21378893720 100644
--- a/jstests/core/geo_s2dedupnear.js
+++ b/jstests/core/geo_s2dedupnear.js
@@ -3,9 +3,11 @@
t = db.geo_s2dedupnear;
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
-var x = { "type" : "Polygon",
- "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]};
+t.ensureIndex({geo: "2dsphere"});
+var x = {
+ "type": "Polygon",
+ "coordinates": [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]]
+};
t.insert({geo: x});
-res = t.find({geo: {$geoNear: {"type" : "Point", "coordinates" : [31, 41]}}});
+res = t.find({geo: {$geoNear: {"type": "Point", "coordinates": [31, 41]}}});
assert.eq(res.itcount(), 1);
diff --git a/jstests/core/geo_s2descindex.js b/jstests/core/geo_s2descindex.js
index 26c422bc04f..d6dca95213d 100644
--- a/jstests/core/geo_s2descindex.js
+++ b/jstests/core/geo_s2descindex.js
@@ -5,13 +5,24 @@
var coll = db.getCollection("twodspheredesc");
var descriptors = [["field1", -1], ["field2", -1], ["coordinates", "2dsphere"]];
-var docA = {field1 : "a", field2 : 1, coordinates : [-118.2400013, 34.073893]};
-var docB = {field1 : "b", field2 : 1, coordinates : [-118.2400012, 34.073894]};
+var docA = {
+ field1: "a",
+ field2: 1,
+ coordinates: [-118.2400013, 34.073893]
+};
+var docB = {
+ field1: "b",
+ field2: 1,
+ coordinates: [-118.2400012, 34.073894]
+};
// Try both regular and near index cursors
-var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893],
- 0.44915760491198753]}}};
-var queryNear = {coordinates : {$geoNear : {"type" : "Point", "coordinates" : [0, 0]}}};
+var query = {
+ coordinates: {$geoWithin: {$centerSphere: [[-118.240013, 34.073893], 0.44915760491198753]}}
+};
+var queryNear = {
+ coordinates: {$geoNear: {"type": "Point", "coordinates": [0, 0]}}
+};
//
// The idea here is we try "2dsphere" indexes in combination with descending
@@ -19,10 +30,9 @@ var queryNear = {coordinates : {$geoNear : {"type" : "Point", "coordinates" : [0
// positions and ensure that we return correct results.
//
-for ( var t = 0; t < descriptors.length; t++) {
-
+for (var t = 0; t < descriptors.length; t++) {
var descriptor = {};
- for ( var i = 0; i < descriptors.length; i++) {
+ for (var i = 0; i < descriptors.length; i++) {
descriptor[descriptors[i][0]] = descriptors[i][1];
}
@@ -34,10 +44,10 @@ for ( var t = 0; t < descriptors.length; t++) {
coll.insert(docA);
coll.insert(docB);
- assert.eq(1, coll.count(Object.merge(query, {field1 : "a"})));
- assert.eq(1, coll.count(Object.merge(query, {field1 : "b"})));
- assert.eq(2, coll.count(Object.merge(query, {field2 : 1})));
- assert.eq(0, coll.count(Object.merge(query, {field2 : 0})));
+ assert.eq(1, coll.count(Object.merge(query, {field1: "a"})));
+ assert.eq(1, coll.count(Object.merge(query, {field1: "b"})));
+ assert.eq(2, coll.count(Object.merge(query, {field2: 1})));
+ assert.eq(0, coll.count(Object.merge(query, {field2: 0})));
var firstEls = descriptors.splice(1);
descriptors = firstEls.concat(descriptors);
@@ -50,15 +60,16 @@ for ( var t = 0; t < descriptors.length; t++) {
jsTest.log("Trying case found in wild...");
coll.drop();
-coll.ensureIndex({coordinates : "2dsphere", field : -1});
-coll.insert({coordinates : [-118.240013, 34.073893]});
-var query = {coordinates : {$geoWithin : {$centerSphere : [[-118.240013, 34.073893],
- 0.44915760491198753]}},
- field : 1};
+coll.ensureIndex({coordinates: "2dsphere", field: -1});
+coll.insert({coordinates: [-118.240013, 34.073893]});
+var query = {
+ coordinates: {$geoWithin: {$centerSphere: [[-118.240013, 34.073893], 0.44915760491198753]}},
+ field: 1
+};
assert.eq(null, coll.findOne(query));
coll.remove({});
-coll.insert({coordinates : [-118.240013, 34.073893], field : 1});
+coll.insert({coordinates: [-118.240013, 34.073893], field: 1});
assert.neq(null, coll.findOne(query));
jsTest.log("Success!");
diff --git a/jstests/core/geo_s2disjoint_holes.js b/jstests/core/geo_s2disjoint_holes.js
index bb6b8a4ef2c..a3988e9a614 100644
--- a/jstests/core/geo_s2disjoint_holes.js
+++ b/jstests/core/geo_s2disjoint_holes.js
@@ -8,17 +8,17 @@
// http://geojson.org/geojson-spec.html#polygon
//
-var t = db.geo_s2disjoint_holes,
- coordinates = [
- // One square.
- [[9, 9], [9, 11], [11, 11], [11, 9], [9, 9]],
- // Another disjoint square.
- [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]
- ],
- poly = {
- type: 'Polygon',
- coordinates: coordinates
- },
+var t = db.geo_s2disjoint_holes, coordinates = [
+ // One square.
+ [[9, 9], [9, 11], [11, 11], [11, 9], [9, 9]],
+ // Another disjoint square.
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]
+],
+ poly =
+ {
+ type: 'Polygon',
+ coordinates: coordinates
+ },
multiPoly = {
type: 'MultiPolygon',
// Multi-polygon's coordinates are wrapped in one more array.
@@ -32,19 +32,13 @@ jsTest.log("We're going to print some error messages, don't be alarmed.");
//
// Can't query with a polygon or multi-polygon that has a non-contained hole.
//
-print(assert.throws(
- function() {
- t.findOne({geo: {$geoWithin: {$geometry: poly}}});
- },
- [],
- "parsing a polygon with non-overlapping holes."));
+print(assert.throws(function() {
+ t.findOne({geo: {$geoWithin: {$geometry: poly}}});
+}, [], "parsing a polygon with non-overlapping holes."));
-print(assert.throws(
- function() {
- t.findOne({geo: {$geoWithin: {$geometry: multiPoly}}});
- },
- [],
- "parsing a multi-polygon with non-overlapping holes."));
+print(assert.throws(function() {
+ t.findOne({geo: {$geoWithin: {$geometry: multiPoly}}});
+}, [], "parsing a multi-polygon with non-overlapping holes."));
//
// Can't insert a bad polygon or a bad multi-polygon with a 2dsphere index.
diff --git a/jstests/core/geo_s2dupe_points.js b/jstests/core/geo_s2dupe_points.js
index 5b9a30e61c5..63e4369d2fa 100644
--- a/jstests/core/geo_s2dupe_points.js
+++ b/jstests/core/geo_s2dupe_points.js
@@ -15,53 +15,72 @@ function testDuplicates(shapeName, shapeWithDupes, shapeWithoutDupes) {
assert.neq(shapeWithoutDupes, t.findOne({_id: shapeName}).geo);
// can query with $geoIntersects inserted doc using both the duplicated and de-duplicated docs
- assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithDupes.geo } } } ).itcount(), 1);
- assert.eq(t.find({ geo: { $geoIntersects: { $geometry : shapeWithoutDupes } } } ).itcount(), 1);
+ assert.eq(t.find({geo: {$geoIntersects: {$geometry: shapeWithDupes.geo}}}).itcount(), 1);
+ assert.eq(t.find({geo: {$geoIntersects: {$geometry: shapeWithoutDupes}}}).itcount(), 1);
// direct document equality in queries is preserved
- assert.eq(t.find({ geo: shapeWithoutDupes} ).itcount(), 0);
- assert.eq(t.find({ geo: shapeWithDupes.geo } ).itcount(), 1);
+ assert.eq(t.find({geo: shapeWithoutDupes}).itcount(), 0);
+ assert.eq(t.find({geo: shapeWithDupes.geo}).itcount(), 1);
}
// LineString
-var lineWithDupes = { _id: "line", geo: { type: "LineString",
- coordinates: [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6] ]
- }
+var lineWithDupes = {
+ _id: "line",
+ geo: {type: "LineString", coordinates: [[40, 5], [40, 5], [40, 5], [41, 6], [41, 6]]}
+};
+var lineWithoutDupes = {
+ type: "LineString",
+ coordinates: [[40, 5], [41, 6]]
};
-var lineWithoutDupes = { type: "LineString", coordinates: [ [40,5], [41,6] ] };
// Polygon
-var polygonWithDupes = { _id: "poly", geo: { type: "Polygon",
- coordinates: [
- [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ],
- [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0], [-2.0, -2.0] ]
- ] }
+var polygonWithDupes = {
+ _id: "poly",
+ geo: {
+ type: "Polygon",
+ coordinates: [
+ [[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
+ [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0], [-2.0, -2.0]]
+ ]
+ }
};
-var polygonWithoutDupes = { type: "Polygon",
+var polygonWithoutDupes = {
+ type: "Polygon",
coordinates: [
- [ [-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0] ],
- [ [-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0] ]
+ [[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
+ [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
]
};
// MultiPolygon
-var multiPolygonWithDupes = { _id: "multi", geo: { type: "MultiPolygon", coordinates: [
- [
- [ [102.0, 2.0], [103.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ]
- ],
- [
- [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
- [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.8, 0.8], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
+var multiPolygonWithDupes = {
+ _id: "multi",
+ geo: {
+ type: "MultiPolygon",
+ coordinates: [
+ [[[102.0, 2.0], [103.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
+ [
+ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
+ [
+ [100.2, 0.2],
+ [100.8, 0.2],
+ [100.8, 0.8],
+ [100.8, 0.8],
+ [100.8, 0.8],
+ [100.2, 0.8],
+ [100.2, 0.2]
+ ]
+ ]
]
- ]
-} };
-var multiPolygonWithoutDupes = { type: "MultiPolygon", coordinates: [
- [
- [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ]
- ],
+ }
+};
+var multiPolygonWithoutDupes = {
+ type: "MultiPolygon",
+ coordinates: [
+ [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
[
- [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
- [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
+ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
+ [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]
]
]
};
diff --git a/jstests/core/geo_s2edgecases.js b/jstests/core/geo_s2edgecases.js
index 6cb8ff63809..3dabfdf0bcb 100755..100644
--- a/jstests/core/geo_s2edgecases.js
+++ b/jstests/core/geo_s2edgecases.js
@@ -1,40 +1,61 @@
t = db.geo_s2edgecases;
t.drop();
-roundworldpoint = { "type" : "Point", "coordinates": [ 180, 0 ] };
+roundworldpoint = {
+ "type": "Point",
+ "coordinates": [180, 0]
+};
// Opposite the equator
-roundworld = { "type" : "Polygon",
- "coordinates" : [ [ [179,1], [-179,1], [-179,-1], [179,-1], [179,1]]]};
-t.insert({geo : roundworld});
+roundworld = {
+ "type": "Polygon",
+ "coordinates": [[[179, 1], [-179, 1], [-179, -1], [179, -1], [179, 1]]]
+};
+t.insert({geo: roundworld});
-roundworld2 = { "type" : "Polygon",
- "coordinates" : [ [ [179,1], [179,-1], [-179,-1], [-179,1], [179,1]]]};
-t.insert({geo : roundworld2});
+roundworld2 = {
+ "type": "Polygon",
+ "coordinates": [[[179, 1], [179, -1], [-179, -1], [-179, 1], [179, 1]]]
+};
+t.insert({geo: roundworld2});
// North pole
-santapoint = { "type" : "Point", "coordinates": [ 180, 90 ] };
-santa = { "type" : "Polygon",
- "coordinates" : [ [ [179,89], [179,90], [-179,90], [-179,89], [179,89]]]};
-t.insert({geo : santa});
-santa2 = { "type" : "Polygon",
- "coordinates" : [ [ [179,89], [-179,89], [-179,90], [179,90], [179,89]]]};
-t.insert({geo : santa2});
+santapoint = {
+ "type": "Point",
+ "coordinates": [180, 90]
+};
+santa = {
+ "type": "Polygon",
+ "coordinates": [[[179, 89], [179, 90], [-179, 90], [-179, 89], [179, 89]]]
+};
+t.insert({geo: santa});
+santa2 = {
+ "type": "Polygon",
+ "coordinates": [[[179, 89], [-179, 89], [-179, 90], [179, 90], [179, 89]]]
+};
+t.insert({geo: santa2});
// South pole
-penguinpoint = { "type" : "Point", "coordinates": [ 0, -90 ] };
-penguin1 = { "type" : "Polygon",
- "coordinates" : [ [ [0,-89], [0,-90], [179,-90], [179,-89], [0,-89]]]};
-t.insert({geo : penguin1});
-penguin2 = { "type" : "Polygon",
- "coordinates" : [ [ [0,-89], [179,-89], [179,-90], [0,-90], [0,-89]]]};
-t.insert({geo : penguin2});
+penguinpoint = {
+ "type": "Point",
+ "coordinates": [0, -90]
+};
+penguin1 = {
+ "type": "Polygon",
+ "coordinates": [[[0, -89], [0, -90], [179, -90], [179, -89], [0, -89]]]
+};
+t.insert({geo: penguin1});
+penguin2 = {
+ "type": "Polygon",
+ "coordinates": [[[0, -89], [179, -89], [179, -90], [0, -90], [0, -89]]]
+};
+t.insert({geo: penguin2});
-t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } );
+t.ensureIndex({geo: "2dsphere", nonGeo: 1});
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : roundworldpoint} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": roundworldpoint}}});
assert.eq(res.count(), 2);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : santapoint} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": santapoint}}});
assert.eq(res.count(), 2);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : penguinpoint} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": penguinpoint}}});
assert.eq(res.count(), 2);
diff --git a/jstests/core/geo_s2exact.js b/jstests/core/geo_s2exact.js
index 29150d63376..3acd5b68969 100644
--- a/jstests/core/geo_s2exact.js
+++ b/jstests/core/geo_s2exact.js
@@ -10,12 +10,20 @@ function test(geometry) {
t.dropIndex({geo: "2dsphere"});
}
-pointA = { "type" : "Point", "coordinates": [ 40, 5 ] };
+pointA = {
+ "type": "Point",
+ "coordinates": [40, 5]
+};
test(pointA);
-someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]};
+someline = {
+ "type": "LineString",
+ "coordinates": [[40, 5], [41, 6]]
+};
test(someline);
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]};
+somepoly = {
+ "type": "Polygon",
+ "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]
+};
test(somepoly);
diff --git a/jstests/core/geo_s2explain.js b/jstests/core/geo_s2explain.js
index e5035713e38..c8d32e00379 100644
--- a/jstests/core/geo_s2explain.js
+++ b/jstests/core/geo_s2explain.js
@@ -4,44 +4,48 @@
var t = db.jstests_geo_s2explain;
t.drop();
-var point1 = { loc : { type : "Point", coordinates : [10, 10] } };
-var point2 = { loc : { type : "Point", coordinates : [10.001, 10] } };
-assert.writeOK( t.insert( [ point1, point2] ) );
-
-assert.commandWorked( t.ensureIndex( { loc : "2dsphere"} ) );
-
-var explain = t.find( {
- loc: { $nearSphere : { type : "Point", coordinates : [10, 10] } }
- } ).limit(1).explain("executionStats");
+var point1 = {
+ loc: {type: "Point", coordinates: [10, 10]}
+};
+var point2 = {
+ loc: {type: "Point", coordinates: [10.001, 10]}
+};
+assert.writeOK(t.insert([point1, point2]));
+
+assert.commandWorked(t.ensureIndex({loc: "2dsphere"}));
+
+var explain = t.find({loc: {$nearSphere: {type: "Point", coordinates: [10, 10]}}})
+ .limit(1)
+ .explain("executionStats");
var inputStage = explain.executionStats.executionStages.inputStage;
-assert.eq( 1, inputStage.searchIntervals.length );
+assert.eq(1, inputStage.searchIntervals.length);
// Populates the collection with a few hundred points at varying distances
var points = [];
-for ( var i = 10; i < 70; i+=0.1 ) {
- points.push({ loc : { type : "Point", coordinates : [i, i] } });
+for (var i = 10; i < 70; i += 0.1) {
+ points.push({loc: {type: "Point", coordinates: [i, i]}});
}
-assert.writeOK( t.insert( points ) );
+assert.writeOK(t.insert(points));
-explain = t.find( {
- loc: { $nearSphere : { type : "Point", coordinates : [10, 10] } }
- } ).limit(10).explain("executionStats");
+explain = t.find({loc: {$nearSphere: {type: "Point", coordinates: [10, 10]}}})
+ .limit(10)
+ .explain("executionStats");
inputStage = explain.executionStats.executionStages.inputStage;
-assert.eq( inputStage.inputStages.length, inputStage.searchIntervals.length );
+assert.eq(inputStage.inputStages.length, inputStage.searchIntervals.length);
-explain = t.find( {
- loc: { $nearSphere : { type : "Point", coordinates : [10, 10] } }
- } ).limit(50).explain("executionStats");
+explain = t.find({loc: {$nearSphere: {type: "Point", coordinates: [10, 10]}}})
+ .limit(50)
+ .explain("executionStats");
inputStage = explain.executionStats.executionStages.inputStage;
-assert.eq( inputStage.inputStages.length, inputStage.searchIntervals.length );
+assert.eq(inputStage.inputStages.length, inputStage.searchIntervals.length);
-explain = t.find( {
- loc: { $nearSphere : { type : "Point", coordinates : [10, 10] } }
- } ).limit(200).explain("executionStats");
+explain = t.find({loc: {$nearSphere: {type: "Point", coordinates: [10, 10]}}})
+ .limit(200)
+ .explain("executionStats");
inputStage = explain.executionStats.executionStages.inputStage;
-assert.eq( inputStage.inputStages.length, inputStage.searchIntervals.length );
+assert.eq(inputStage.inputStages.length, inputStage.searchIntervals.length);
diff --git a/jstests/core/geo_s2holesameasshell.js b/jstests/core/geo_s2holesameasshell.js
index 91c05ca4979..29f00b88f7a 100644
--- a/jstests/core/geo_s2holesameasshell.js
+++ b/jstests/core/geo_s2holesameasshell.js
@@ -3,21 +3,30 @@ var t = db.geo_s2holessameasshell;
t.drop();
t.ensureIndex({geo: "2dsphere"});
-var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]};
-var edgePoint = {"type": "Point", "coordinates": [0, 0.5]};
-var cornerPoint = {"type": "Point", "coordinates": [0, 0]};
+var centerPoint = {
+ "type": "Point",
+ "coordinates": [0.5, 0.5]
+};
+var edgePoint = {
+ "type": "Point",
+ "coordinates": [0, 0.5]
+};
+var cornerPoint = {
+ "type": "Point",
+ "coordinates": [0, 0]
+};
// Various "edge" cases. None of them should be returned by the non-polygon
// polygon below.
-t.insert({geo : centerPoint});
-t.insert({geo : edgePoint});
-t.insert({geo : cornerPoint});
+t.insert({geo: centerPoint});
+t.insert({geo: edgePoint});
+t.insert({geo: cornerPoint});
// This generates an empty covering.
-var polygonWithFullHole = { "type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
- ]
+var polygonWithFullHole = {
+ "type": "Polygon",
+ "coordinates":
+ [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]], [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]]
};
// No keys for insert should error.
@@ -25,14 +34,17 @@ assert.writeError(t.insert({geo: polygonWithFullHole}));
// No covering to search over should give an empty result set.
assert.throws(function() {
- return t.find({geo: {$geoWithin: {$geometry: polygonWithFullHole}}}).count();});
+ return t.find({geo: {$geoWithin: {$geometry: polygonWithFullHole}}}).count();
+});
// Similar polygon to the one above, but is covered by two holes instead of
// one.
-var polygonWithTwoHolesCoveringWholeArea = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0,0], [0,0.5], [1, 0.5], [1, 0], [0, 0]],
- [[0,0.5], [0,1], [1, 1], [1, 0.5], [0, 0.5]]
+var polygonWithTwoHolesCoveringWholeArea = {
+ "type": "Polygon",
+ "coordinates": [
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]],
+ [[0, 0], [0, 0.5], [1, 0.5], [1, 0], [0, 0]],
+ [[0, 0.5], [0, 1], [1, 1], [1, 0.5], [0, 0.5]]
]
};
@@ -41,4 +53,5 @@ assert.writeError(t.insert({geo: polygonWithTwoHolesCoveringWholeArea}));
// No covering to search over should give an empty result set.
assert.throws(function() {
- return t.find({geo: {$geoWithin: {$geometry: polygonWithTwoHolesCoveringWholeArea}}}).count();});
+ return t.find({geo: {$geoWithin: {$geometry: polygonWithTwoHolesCoveringWholeArea}}}).count();
+});
diff --git a/jstests/core/geo_s2index.js b/jstests/core/geo_s2index.js
index 1909fb95783..cc25b4fabfe 100755..100644
--- a/jstests/core/geo_s2index.js
+++ b/jstests/core/geo_s2index.js
@@ -2,131 +2,164 @@ t = db.geo_s2index;
t.drop();
// We internally drop adjacent duplicate points in lines.
-someline = { "type" : "LineString", "coordinates": [ [40,5], [40,5], [ 40, 5], [41, 6], [41,6]]};
-t.insert( {geo : someline , nonGeo: "someline"});
+someline = {
+ "type": "LineString",
+ "coordinates": [[40, 5], [40, 5], [40, 5], [41, 6], [41, 6]]
+};
+t.insert({geo: someline, nonGeo: "someline"});
t.ensureIndex({geo: "2dsphere"});
-foo = t.find({geo: {$geoIntersects: {$geometry: {type: "Point", coordinates: [40,5]}}}}).next();
+foo = t.find({geo: {$geoIntersects: {$geometry: {type: "Point", coordinates: [40, 5]}}}}).next();
assert.eq(foo.geo, someline);
t.dropIndex({geo: "2dsphere"});
-pointA = { "type" : "Point", "coordinates": [ 40, 5 ] };
-t.insert( {geo : pointA , nonGeo: "pointA"});
-
-pointD = { "type" : "Point", "coordinates": [ 41.001, 6.001 ] };
-t.insert( {geo : pointD , nonGeo: "pointD"});
-
-pointB = { "type" : "Point", "coordinates": [ 41, 6 ] };
-t.insert( {geo : pointB , nonGeo: "pointB"});
-
-pointC = { "type" : "Point", "coordinates": [ 41, 6 ] };
-t.insert( {geo : pointC} );
+pointA = {
+ "type": "Point",
+ "coordinates": [40, 5]
+};
+t.insert({geo: pointA, nonGeo: "pointA"});
+
+pointD = {
+ "type": "Point",
+ "coordinates": [41.001, 6.001]
+};
+t.insert({geo: pointD, nonGeo: "pointD"});
+
+pointB = {
+ "type": "Point",
+ "coordinates": [41, 6]
+};
+t.insert({geo: pointB, nonGeo: "pointB"});
+
+pointC = {
+ "type": "Point",
+ "coordinates": [41, 6]
+};
+t.insert({geo: pointC});
// Add a point within the polygon but not on the border. Don't want to be on
// the path of the polyline.
-pointE = { "type" : "Point", "coordinates": [ 40.6, 5.4 ] };
-t.insert( {geo : pointE} );
+pointE = {
+ "type": "Point",
+ "coordinates": [40.6, 5.4]
+};
+t.insert({geo: pointE});
// Make sure we can index this without error.
t.insert({nonGeo: "noGeoField!"});
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]};
-t.insert( {geo : somepoly, nonGeo: "somepoly" });
+somepoly = {
+ "type": "Polygon",
+ "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]
+};
+t.insert({geo: somepoly, nonGeo: "somepoly"});
-var res = t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } );
+var res = t.ensureIndex({geo: "2dsphere", nonGeo: 1});
// We have a point without any geo data. Don't error.
assert.commandWorked(res);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointA} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": pointA}}});
assert.eq(res.itcount(), 3);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointB} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": pointB}}});
assert.eq(res.itcount(), 4);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : pointD} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": pointD}}});
assert.eq(res.itcount(), 1);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : someline} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": someline}}});
assert.eq(res.itcount(), 5);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } });
+res = t.find({"geo": {"$geoIntersects": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 6);
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } });
+res = t.find({"geo": {"$within": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 6);
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } }).limit(1);
+res = t.find({"geo": {"$geoIntersects": {"$geometry": somepoly}}}).limit(1);
assert.eq(res.itcount(), 1);
-res = t.find({ "nonGeo": "pointA",
- "geo" : { "$geoIntersects" : { "$geometry" : somepoly} } });
+res = t.find({"nonGeo": "pointA", "geo": {"$geoIntersects": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 1);
// Don't crash mongod if we give it bad input.
t.drop();
-t.ensureIndex({loc: "2dsphere", x:1});
-t.save({loc: [0,0]});
-assert.throws(function() { return t.count({loc: {$foo:[0,0]}}); });
-assert.throws(function() { return t.find({ "nonGeo": "pointA",
- "geo" : { "$geoIntersects" : { "$geometry" : somepoly},
- "$near": {"$geometry" : somepoly }}}).count();});
+t.ensureIndex({loc: "2dsphere", x: 1});
+t.save({loc: [0, 0]});
+assert.throws(function() {
+ return t.count({loc: {$foo: [0, 0]}});
+});
+assert.throws(function() {
+ return t.find({
+ "nonGeo": "pointA",
+ "geo": {"$geoIntersects": {"$geometry": somepoly}, "$near": {"$geometry": somepoly}}
+ }).count();
+});
// If we specify a datum, it has to be valid (WGS84).
t.drop();
t.ensureIndex({loc: "2dsphere"});
-res = t.insert({ loc: { type: 'Point',
- coordinates: [40, 5],
- crs: { type: 'name', properties: { name: 'EPSG:2000' }}}});
+res = t.insert({
+ loc: {
+ type: 'Point',
+ coordinates: [40, 5],
+ crs: {type: 'name', properties: {name: 'EPSG:2000'}}
+ }
+});
assert.writeError(res);
assert.eq(0, t.find().itcount());
-res = t.insert({ loc: { type: 'Point', coordinates: [40, 5] }});
+res = t.insert({loc: {type: 'Point', coordinates: [40, 5]}});
assert.writeOK(res);
-res = t.insert({ loc: { type: 'Point',
- coordinates: [40, 5],
- crs: { type: 'name', properties: {name :'EPSG:4326' }}}});
+res = t.insert({
+ loc: {
+ type: 'Point',
+ coordinates: [40, 5],
+ crs: {type: 'name', properties: {name: 'EPSG:4326'}}
+ }
+});
assert.writeOK(res);
-res = t.insert({ loc: { type:'Point',
- coordinates: [40, 5],
- crs: { type: 'name',
- properties: { name: 'urn:ogc:def:crs:OGC:1.3:CRS84'}}}});
+res = t.insert({
+ loc: {
+ type: 'Point',
+ coordinates: [40, 5],
+ crs: {type: 'name', properties: {name: 'urn:ogc:def:crs:OGC:1.3:CRS84'}}
+ }
+});
assert.writeOK(res);
// We can pass level parameters and we verify that they're valid.
// 0 <= coarsestIndexedLevel <= finestIndexedLevel <= 30.
t.drop();
-t.save({loc: [0,0]});
-res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 17, coarsestIndexedLevel: 5 });
+t.save({loc: [0, 0]});
+res = t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 17, coarsestIndexedLevel: 5});
assert.commandWorked(res);
// Ensure the index actually works at a basic level
-assert.neq(null,
- t.findOne({ loc : { $geoNear : { $geometry : { type: 'Point', coordinates: [0, 0] } } } }));
+assert.neq(null, t.findOne({loc: {$geoNear: {$geometry: {type: 'Point', coordinates: [0, 0]}}}}));
t.drop();
-t.save({loc: [0,0]});
-res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 31, coarsestIndexedLevel: 5 });
+t.save({loc: [0, 0]});
+res = t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 31, coarsestIndexedLevel: 5});
assert.commandFailed(res);
t.drop();
-t.save({loc: [0,0]});
-res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 30, coarsestIndexedLevel: 0 });
+t.save({loc: [0, 0]});
+res = t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 30, coarsestIndexedLevel: 0});
assert.commandWorked(res);
-//Ensure the index actually works at a basic level
-assert.neq(null,
- t.findOne({ loc : { $geoNear : { $geometry : { type: 'Point', coordinates: [0, 0] } } } }));
+// Ensure the index actually works at a basic level
+assert.neq(null, t.findOne({loc: {$geoNear: {$geometry: {type: 'Point', coordinates: [0, 0]}}}}));
t.drop();
-t.save({loc: [0,0]});
-res = t.ensureIndex({ loc: "2dsphere" }, { finestIndexedLevel: 30, coarsestIndexedLevel: -1 });
+t.save({loc: [0, 0]});
+res = t.ensureIndex({loc: "2dsphere"}, {finestIndexedLevel: 30, coarsestIndexedLevel: -1});
assert.commandFailed(res);
// SERVER-21491 Verify that 2dsphere index options require correct types.
-res = t.ensureIndex({ loc: '2dsphere' }, { '2dsphereIndexVersion': 'NOT_A_NUMBER' });
+res = t.ensureIndex({loc: '2dsphere'}, {'2dsphereIndexVersion': 'NOT_A_NUMBER'});
assert.commandFailed(res);
-res = t.ensureIndex({ loc: '2dsphere' }, { finestIndexedLevel: 'NOT_A_NUMBER' });
+res = t.ensureIndex({loc: '2dsphere'}, {finestIndexedLevel: 'NOT_A_NUMBER'});
assert.commandFailedWithCode(res, ErrorCodes.TypeMismatch);
-res = t.ensureIndex({ loc: '2dsphere' }, { coarsestIndexedLevel: 'NOT_A_NUMBER' });
+res = t.ensureIndex({loc: '2dsphere'}, {coarsestIndexedLevel: 'NOT_A_NUMBER'});
assert.commandFailedWithCode(res, ErrorCodes.TypeMismatch);
// Ensure polygon which previously triggered an assertion error in SERVER-19674
@@ -134,10 +167,8 @@ assert.commandFailedWithCode(res, ErrorCodes.TypeMismatch);
t.drop();
t.insert({
loc: {
- "type" : "Polygon",
- "coordinates" : [
- [[-45, 0], [-44.875, 0], [-44.875, 0.125], [-45, 0.125], [-45,0]]
- ]
+ "type": "Polygon",
+ "coordinates": [[[-45, 0], [-44.875, 0], [-44.875, 0.125], [-45, 0.125], [-45, 0]]]
}
});
res = t.createIndex({loc: "2dsphere"});
diff --git a/jstests/core/geo_s2indexoldformat.js b/jstests/core/geo_s2indexoldformat.js
index 4ed0afba8dd..43974f695cb 100755..100644
--- a/jstests/core/geo_s2indexoldformat.js
+++ b/jstests/core/geo_s2indexoldformat.js
@@ -3,26 +3,26 @@
t = db.geo_s2indexoldformat;
t.drop();
-t.insert( {geo : [40, 5], nonGeo: ["pointA"]});
-t.insert( {geo : [41.001, 6.001], nonGeo: ["pointD"]});
-t.insert( {geo : [41, 6], nonGeo: ["pointB"]});
-t.insert( {geo : [41, 6]} );
-t.insert( {geo : {x:40.6, y:5.4}} );
+t.insert({geo: [40, 5], nonGeo: ["pointA"]});
+t.insert({geo: [41.001, 6.001], nonGeo: ["pointD"]});
+t.insert({geo: [41, 6], nonGeo: ["pointB"]});
+t.insert({geo: [41, 6]});
+t.insert({geo: {x: 40.6, y: 5.4}});
-t.ensureIndex( { geo : "2dsphere", nonGeo: 1 } );
+t.ensureIndex({geo: "2dsphere", nonGeo: 1});
-res = t.find({ "geo" : { "$geoIntersects" : { "$geometry": {x:40, y:5}}}});
+res = t.find({"geo": {"$geoIntersects": {"$geometry": {x: 40, y: 5}}}});
assert.eq(res.count(), 1);
-res = t.find({ "geo" : { "$geoIntersects" : {"$geometry": [41,6]}}});
+res = t.find({"geo": {"$geoIntersects": {"$geometry": [41, 6]}}});
assert.eq(res.count(), 2);
// We don't support legacy polygons in 2dsphere.
-assert.writeError(t.insert( {geo : [[40,5],[40,6],[41,6],[41,5]], nonGeo: ["somepoly"] }));
-assert.writeError(t.insert( {geo : {a:{x:40,y:5},b:{x:40,y:6},c:{x:41,y:6},d:{x:41,y:5}}}));
+assert.writeError(t.insert({geo: [[40, 5], [40, 6], [41, 6], [41, 5]], nonGeo: ["somepoly"]}));
+assert.writeError(
+ t.insert({geo: {a: {x: 40, y: 5}, b: {x: 40, y: 6}, c: {x: 41, y: 6}, d: {x: 41, y: 5}}}));
// Test "Can't canonicalize query: BadValue bad geo query" error.
assert.throws(function() {
- t.findOne({ "geo" : { "$geoIntersects" : {"$geometry": [[40,5],[40,6],[41,6],[41,5]]}}});
+ t.findOne({"geo": {"$geoIntersects": {"$geometry": [[40, 5], [40, 6], [41, 6], [41, 5]]}}});
});
-
diff --git a/jstests/core/geo_s2indexversion1.js b/jstests/core/geo_s2indexversion1.js
index 0899eb404cd..49aa80dbbca 100644
--- a/jstests/core/geo_s2indexversion1.js
+++ b/jstests/core/geo_s2indexversion1.js
@@ -71,7 +71,9 @@ coll.drop();
res = coll.ensureIndex({geo: "2dsphere"});
assert.commandWorked(res);
-var specObj = coll.getIndexes().filter( function(z){ return z.name == "geo_2dsphere"; } )[0];
+var specObj = coll.getIndexes().filter(function(z) {
+ return z.name == "geo_2dsphere";
+})[0];
assert.eq(3, specObj["2dsphereIndexVersion"]);
coll.drop();
@@ -104,38 +106,78 @@ coll.drop();
// Test compatibility of various GeoJSON objects with both 2dsphere index versions.
//
-var pointDoc = {geo: {type: "Point", coordinates: [40, 5]}};
-var lineStringDoc = {geo: {type: "LineString", coordinates: [[40, 5], [41, 6]]}};
-var polygonDoc = {geo: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 1], [0, 0]]]}};
-var multiPointDoc = {geo: {type: "MultiPoint",
- coordinates: [[-73.9580, 40.8003], [-73.9498, 40.7968],
- [-73.9737, 40.7648], [-73.9814, 40.7681]]}};
-var multiLineStringDoc = {geo: {type: "MultiLineString",
- coordinates: [[[-73.96943, 40.78519], [-73.96082, 40.78095]],
- [[-73.96415, 40.79229], [-73.95544, 40.78854]],
- [[-73.97162, 40.78205], [-73.96374, 40.77715]],
- [[-73.97880, 40.77247], [-73.97036, 40.76811]]]}};
-var multiPolygonDoc = {geo: {type: "MultiPolygon",
- coordinates: [[[[-73.958, 40.8003], [-73.9498, 40.7968],
- [-73.9737, 40.7648], [-73.9814, 40.7681],
- [-73.958, 40.8003]]],
- [[[-73.958, 40.8003], [-73.9498, 40.7968],
- [-73.9737, 40.7648], [-73.958, 40.8003]]]]}};
-var geometryCollectionDoc = {geo: {type: "GeometryCollection",
- geometries: [{type: "MultiPoint",
- coordinates: [[-73.9580, 40.8003],
- [-73.9498, 40.7968],
- [-73.9737, 40.7648],
- [-73.9814, 40.7681]]},
- {type: "MultiLineString",
- coordinates: [[[-73.96943, 40.78519],
- [-73.96082, 40.78095]],
- [[-73.96415, 40.79229],
- [-73.95544, 40.78854]],
- [[-73.97162, 40.78205],
- [-73.96374, 40.77715]],
- [[-73.97880, 40.77247],
- [-73.97036, 40.76811]]]}]}};
+var pointDoc = {
+ geo: {type: "Point", coordinates: [40, 5]}
+};
+var lineStringDoc = {
+ geo: {type: "LineString", coordinates: [[40, 5], [41, 6]]}
+};
+var polygonDoc = {
+ geo: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 1], [0, 0]]]}
+};
+var multiPointDoc = {
+ geo: {
+ type: "MultiPoint",
+ coordinates:
+ [[-73.9580, 40.8003], [-73.9498, 40.7968], [-73.9737, 40.7648], [-73.9814, 40.7681]]
+ }
+};
+var multiLineStringDoc = {
+ geo: {
+ type: "MultiLineString",
+ coordinates: [
+ [[-73.96943, 40.78519], [-73.96082, 40.78095]],
+ [[-73.96415, 40.79229], [-73.95544, 40.78854]],
+ [[-73.97162, 40.78205], [-73.96374, 40.77715]],
+ [[-73.97880, 40.77247], [-73.97036, 40.76811]]
+ ]
+ }
+};
+var multiPolygonDoc = {
+ geo: {
+ type: "MultiPolygon",
+ coordinates: [
+ [[
+ [-73.958, 40.8003],
+ [-73.9498, 40.7968],
+ [-73.9737, 40.7648],
+ [-73.9814, 40.7681],
+ [-73.958, 40.8003]
+ ]],
+ [[
+ [-73.958, 40.8003],
+ [-73.9498, 40.7968],
+ [-73.9737, 40.7648],
+ [-73.958, 40.8003]
+ ]]
+ ]
+ }
+};
+var geometryCollectionDoc = {
+ geo: {
+ type: "GeometryCollection",
+ geometries: [
+ {
+ type: "MultiPoint",
+ coordinates: [
+ [-73.9580, 40.8003],
+ [-73.9498, 40.7968],
+ [-73.9737, 40.7648],
+ [-73.9814, 40.7681]
+ ]
+ },
+ {
+ type: "MultiLineString",
+ coordinates: [
+ [[-73.96943, 40.78519], [-73.96082, 40.78095]],
+ [[-73.96415, 40.79229], [-73.95544, 40.78854]],
+ [[-73.97162, 40.78205], [-73.96374, 40.77715]],
+ [[-73.97880, 40.77247], [-73.97036, 40.76811]]
+ ]
+ }
+ ]
+ }
+};
// {2dsphereIndexVersion: 2} indexes allow all supported GeoJSON objects.
res = coll.ensureIndex({geo: "2dsphere"}, {"2dsphereIndexVersion": 2});
diff --git a/jstests/core/geo_s2intersection.js b/jstests/core/geo_s2intersection.js
index 287d52dfe10..bf65c02c0c2 100644
--- a/jstests/core/geo_s2intersection.js
+++ b/jstests/core/geo_s2intersection.js
@@ -1,33 +1,25 @@
var t = db.geo_s2intersectinglines;
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
+t.ensureIndex({geo: "2dsphere"});
/* All the tests in this file are generally confirming intersections based upon
* these three geo objects.
*/
var canonLine = {
name: 'canonLine',
- geo: {
- type: "LineString",
- coordinates: [[0.0, 0.0], [1.0, 0.0]]
- }
+ geo: {type: "LineString", coordinates: [[0.0, 0.0], [1.0, 0.0]]}
};
var canonPoint = {
name: 'canonPoint',
- geo: {
- type: "Point",
- coordinates: [10.0, 10.0]
- }
+ geo: {type: "Point", coordinates: [10.0, 10.0]}
};
var canonPoly = {
name: 'canonPoly',
geo: {
type: "Polygon",
- coordinates: [
- [[50.0, 50.0], [51.0, 50.0], [51.0, 51.0], [50.0, 51.0], [50.0, 50.0]]
- ]
+ coordinates: [[[50.0, 50.0], [51.0, 50.0], [51.0, 51.0], [50.0, 51.0], [50.0, 50.0]]]
}
};
@@ -35,33 +27,34 @@ t.insert(canonLine);
t.insert(canonPoint);
t.insert(canonPoly);
-
-//Case 1: Basic sanity intersection.
-var testLine = {type: "LineString",
- coordinates: [[0.5, 0.5], [0.5, -0.5]]};
+// Case 1: Basic sanity intersection.
+var testLine = {
+ type: "LineString",
+ coordinates: [[0.5, 0.5], [0.5, -0.5]]
+};
var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 1);
assert.eq(result[0]['name'], 'canonLine');
-
-//Case 2: Basic Polygon intersection.
+// Case 2: Basic Polygon intersection.
// we expect that the canonLine should intersect with this polygon.
-var testPoly = {type: "Polygon",
- coordinates: [
- [[0.4, -0.1],[0.4, 0.1], [0.6, 0.1], [0.6, -0.1], [0.4, -0.1]]
- ]};
+var testPoly = {
+ type: "Polygon",
+ coordinates: [[[0.4, -0.1], [0.4, 0.1], [0.6, 0.1], [0.6, -0.1], [0.4, -0.1]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testPoly}}});
assert.eq(result.count(), 1);
assert.eq(result[0]['name'], 'canonLine');
-
-//Case 3: Intersects the vertex of a line.
+// Case 3: Intersects the vertex of a line.
// When a line intersects the vertex of a line, we expect this to
// count as a geoIntersection.
-testLine = {type: "LineString",
- coordinates: [[0.0, 0.5], [0.0, -0.5]]};
+testLine = {
+ type: "LineString",
+ coordinates: [[0.0, 0.5], [0.0, -0.5]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 1);
@@ -70,35 +63,41 @@ assert.eq(result[0]['name'], 'canonLine');
// Case 4: Sanity no intersection.
// This line just misses the canonLine in the negative direction. This
// should not count as a geoIntersection.
-testLine = {type: "LineString",
- coordinates: [[-0.1, 0.5], [-0.1, -0.5]]};
+testLine = {
+ type: "LineString",
+ coordinates: [[-0.1, 0.5], [-0.1, -0.5]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 0);
-
// Case 5: Overlapping line - only partially overlaps.
// Undefined behaviour: does intersect
-testLine = {type: "LineString",
- coordinates: [[-0.5, 0.0], [0.5, 0.0]]};
+testLine = {
+ type: "LineString",
+ coordinates: [[-0.5, 0.0], [0.5, 0.0]]
+};
var result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 1);
assert.eq(result[0]['name'], 'canonLine');
-
// Case 6: Contained line - this line is fully contained by the canonLine
// Undefined behaviour: doesn't intersect.
-testLine = {type: "LineString",
- coordinates: [[0.1, 0.0], [0.9, 0.0]]};
+testLine = {
+ type: "LineString",
+ coordinates: [[0.1, 0.0], [0.9, 0.0]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 0);
// Case 7: Identical line in the identical position.
// Undefined behaviour: does intersect.
-testLine = {type: "LineString",
- coordinates: [[0.0, 0.0], [1.0, 0.0]]};
+testLine = {
+ type: "LineString",
+ coordinates: [[0.0, 0.0], [1.0, 0.0]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 1);
@@ -106,8 +105,10 @@ assert.eq(result[0]['name'], 'canonLine');
// Case 8: Point intersection - we search with a line that intersects
// with the canonPoint.
-testLine = {type: "LineString",
- coordinates: [[10.0, 11.0], [10.0, 9.0]]};
+testLine = {
+ type: "LineString",
+ coordinates: [[10.0, 11.0], [10.0, 9.0]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testLine}}});
assert.eq(result.count(), 1);
@@ -116,25 +117,30 @@ assert.eq(result[0]['name'], 'canonPoint');
// Case 9: Point point intersection
// as above but with an identical point to the canonPoint. We expect an
// intersection here.
-testPoint = {type: "Point",
- coordinates: [10.0, 10.0]};
+testPoint = {
+ type: "Point",
+ coordinates: [10.0, 10.0]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
assert.eq(result.count(), 1);
assert.eq(result[0]['name'], 'canonPoint');
-
-//Case 10: Sanity point non-intersection.
-var testPoint = {type: "Point",
- coordinates: [12.0, 12.0]};
+// Case 10: Sanity point non-intersection.
+var testPoint = {
+ type: "Point",
+ coordinates: [12.0, 12.0]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
assert.eq(result.count(), 0);
// Case 11: Point polygon intersection
// verify that a point inside a polygon $geoIntersects.
-testPoint = {type: "Point",
- coordinates: [50.5, 50.5]};
+testPoint = {
+ type: "Point",
+ coordinates: [50.5, 50.5]
+};
result = t.find({geo: {$geoIntersects: {$geometry: testPoint}}});
assert.eq(result.count(), 1);
@@ -144,10 +150,14 @@ assert.eq(result[0]['name'], 'canonPoly');
// $geoIntersects predicates.
t.drop();
t.ensureIndex({a: "2dsphere"});
-t.insert({a: {type: "Polygon", coordinates: [[[0,0], [3,6], [6,0], [0,0]]]}});
+t.insert({a: {type: "Polygon", coordinates: [[[0, 0], [3, 6], [6, 0], [0, 0]]]}});
-var firstPoint = {$geometry: {type: "Point", coordinates: [3.0, 1.0]}};
-var secondPoint = {$geometry: {type: "Point", coordinates: [4.0, 1.0]}};
+var firstPoint = {
+ $geometry: {type: "Point", coordinates: [3.0, 1.0]}
+};
+var secondPoint = {
+ $geometry: {type: "Point", coordinates: [4.0, 1.0]}
+};
// First point should intersect with the polygon.
result = t.find({a: {$geoIntersects: firstPoint}});
@@ -159,6 +169,5 @@ assert.eq(result.count(), 1);
// Both points intersect with the polygon, so the $and of
// two $geoIntersects should as well.
-result = t.find({$and: [{a: {$geoIntersects: firstPoint}},
- {a: {$geoIntersects: secondPoint}}]});
+result = t.find({$and: [{a: {$geoIntersects: firstPoint}}, {a: {$geoIntersects: secondPoint}}]});
assert.eq(result.count(), 1);
diff --git a/jstests/core/geo_s2largewithin.js b/jstests/core/geo_s2largewithin.js
index bd4ccafdae1..2bb0fb557b2 100644
--- a/jstests/core/geo_s2largewithin.js
+++ b/jstests/core/geo_s2largewithin.js
@@ -2,42 +2,33 @@
// doesn't take forever.
t = db.geo_s2largewithin;
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
+t.ensureIndex({geo: "2dsphere"});
testPoint = {
name: "origin",
- geo: {
- type: "Point",
- coordinates: [0.0, 0.0]
- }
+ geo: {type: "Point", coordinates: [0.0, 0.0]}
};
testHorizLine = {
name: "horiz",
- geo: {
- type: "LineString",
- coordinates: [[-2.0, 10.0], [2.0, 10.0]]
- }
+ geo: {type: "LineString", coordinates: [[-2.0, 10.0], [2.0, 10.0]]}
};
testVertLine = {
name: "vert",
- geo: {
- type: "LineString",
- coordinates: [[10.0, -2.0], [10.0, 2.0]]
- }
+ geo: {type: "LineString", coordinates: [[10.0, -2.0], [10.0, 2.0]]}
};
t.insert(testPoint);
t.insert(testHorizLine);
t.insert(testVertLine);
-//Test a poly that runs horizontally along the equator.
+// Test a poly that runs horizontally along the equator.
-longPoly = {type: "Polygon",
- coordinates: [
- [[30.0, 1.0], [-30.0, 1.0], [-30.0, -1.0], [30.0, -1.0], [30.0, 1.0]]
- ]};
+longPoly = {
+ type: "Polygon",
+ coordinates: [[[30.0, 1.0], [-30.0, 1.0], [-30.0, -1.0], [30.0, -1.0], [30.0, 1.0]]]
+};
result = t.find({geo: {$geoWithin: {$geometry: longPoly}}});
assert.eq(result.itcount(), 1);
diff --git a/jstests/core/geo_s2meridian.js b/jstests/core/geo_s2meridian.js
index feb1dbefed5..583b426845c 100644
--- a/jstests/core/geo_s2meridian.js
+++ b/jstests/core/geo_s2meridian.js
@@ -9,22 +9,14 @@ t.ensureIndex({geo: "2dsphere"});
*/
meridianCrossingLine = {
- geo: {
- type: "LineString",
- coordinates: [
- [-178.0, 10.0],
- [178.0, 10.0]]
- }
+ geo: {type: "LineString", coordinates: [[-178.0, 10.0], [178.0, 10.0]]}
};
assert.writeOK(t.insert(meridianCrossingLine));
lineAlongMeridian = {
- type: "LineString",
- coordinates: [
- [180.0, 11.0],
- [180.0, 9.0]
- ]
+ type: "LineString",
+ coordinates: [[180.0, 11.0], [180.0, 9.0]]
};
result = t.find({geo: {$geoIntersects: {$geometry: lineAlongMeridian}}});
@@ -34,26 +26,17 @@ t.drop();
t.ensureIndex({geo: "2dsphere"});
/*
* Test 2: check that within work across the meridian. We insert points
- * on the meridian, and immediately on either side, and confirm that a poly
+ * on the meridian, and immediately on either side, and confirm that a poly
* covering all of them returns them all.
*/
pointOnNegativeSideOfMeridian = {
- geo: {
- type: "Point",
- coordinates: [-179.0, 1.0]
- }
+ geo: {type: "Point", coordinates: [-179.0, 1.0]}
};
pointOnMeridian = {
- geo: {
- type: "Point",
- coordinates: [180.0, 1.0]
- }
+ geo: {type: "Point", coordinates: [180.0, 1.0]}
};
pointOnPositiveSideOfMeridian = {
- geo: {
- type: "Point",
- coordinates: [179.0, 1.0]
- }
+ geo: {type: "Point", coordinates: [179.0, 1.0]}
};
t.insert(pointOnMeridian);
@@ -62,9 +45,8 @@ t.insert(pointOnPositiveSideOfMeridian);
meridianCrossingPoly = {
type: "Polygon",
- coordinates: [
- [[-178.0, 10.0], [178.0, 10.0], [178.0, -10.0], [-178.0, -10.0], [-178.0, 10.0]]
- ]
+ coordinates:
+ [[[-178.0, 10.0], [178.0, 10.0], [178.0, -10.0], [-178.0, -10.0], [-178.0, 10.0]]]
};
result = t.find({geo: {$geoWithin: {$geometry: meridianCrossingPoly}}});
@@ -79,18 +61,12 @@ t.ensureIndex({geo: "2dsphere"});
*/
pointOnNegativeSideOfMerid = {
name: "closer",
- geo: {
- type: "Point",
- coordinates: [-179.0, 0.0]
- }
+ geo: {type: "Point", coordinates: [-179.0, 0.0]}
};
pointOnPositiveSideOfMerid = {
name: "farther",
- geo: {
- type: "Point",
- coordinates: [176.0, 0.0]
- }
+ geo: {type: "Point", coordinates: [176.0, 0.0]}
};
t.insert(pointOnNegativeSideOfMerid);
diff --git a/jstests/core/geo_s2multi.js b/jstests/core/geo_s2multi.js
index 8d86f8ad08c..2cd6a3d73d7 100644
--- a/jstests/core/geo_s2multi.js
+++ b/jstests/core/geo_s2multi.js
@@ -4,38 +4,70 @@ t.drop();
t.ensureIndex({geo: "2dsphere"});
// Let's try the examples in the GeoJSON spec.
-multiPointA = { "type": "MultiPoint", "coordinates": [ [100.0, 0.0], [101.0, 1.0] ] };
+multiPointA = {
+ "type": "MultiPoint",
+ "coordinates": [[100.0, 0.0], [101.0, 1.0]]
+};
assert.writeOK(t.insert({geo: multiPointA}));
-multiLineStringA = { "type": "MultiLineString", "coordinates": [ [ [100.0, 0.0], [101.0, 1.0] ],
- [ [102.0, 2.0], [103.0, 3.0] ]]};
+multiLineStringA = {
+ "type": "MultiLineString",
+ "coordinates": [[[100.0, 0.0], [101.0, 1.0]], [[102.0, 2.0], [103.0, 3.0]]]
+};
assert.writeOK(t.insert({geo: multiLineStringA}));
-multiPolygonA = { "type": "MultiPolygon", "coordinates": [
- [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
- [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
- [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]]};
+multiPolygonA = {
+ "type": "MultiPolygon",
+ "coordinates": [
+ [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
+ [
+ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
+ [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]
+ ]
+ ]
+};
assert.writeOK(t.insert({geo: multiPolygonA}));
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "Point", "coordinates": [100,0]}}}}).itcount());
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "Point", "coordinates": [101.0,1.0]}}}}).itcount());
+assert.eq(3,
+ t.find({geo: {$geoIntersects: {$geometry: {"type": "Point", "coordinates": [100, 0]}}}})
+ .itcount());
+assert.eq(3,
+ t.find({
+ geo: {$geoIntersects: {$geometry: {"type": "Point", "coordinates": [101.0, 1.0]}}}
+ }).itcount());
// Inside the hole in multiPolygonA
-assert.eq(0, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "Point", "coordinates": [100.21,0.21]}}}}).itcount());
+assert.eq(
+ 0,
+ t.find({geo: {$geoIntersects: {$geometry: {"type": "Point", "coordinates": [100.21, 0.21]}}}})
+ .itcount());
// One point inside the hole, one out.
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21]]}}}}).itcount());
-assert.eq(3, t.find({geo: {$geoIntersects: {$geometry:
- {"type": "MultiPoint", "coordinates": [[100,0],[100.21,0.21],[101,1]]}}}}).itcount());
+assert.eq(
+ 3,
+ t.find({
+ geo: {
+ $geoIntersects:
+ {$geometry: {"type": "MultiPoint", "coordinates": [[100, 0], [100.21, 0.21]]}}
+ }
+ }).itcount());
+assert.eq(
+ 3,
+ t.find({
+ geo: {
+ $geoIntersects: {
+ $geometry:
+ {"type": "MultiPoint", "coordinates": [[100, 0], [100.21, 0.21], [101, 1]]}
+ }
+ }
+ }).itcount());
// Polygon contains itself and the multipoint.
assert.eq(2, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount());
-partialPolygonA = { "type": "Polygon", "coordinates":
- [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ] };
+partialPolygonA = {
+ "type": "Polygon",
+ "coordinates": [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]]
+};
assert.writeOK(t.insert({geo: partialPolygonA}));
// Polygon contains itself, the partial poly, and the multipoint
assert.eq(3, t.find({geo: {$geoWithin: {$geometry: multiPolygonA}}}).itcount());
diff --git a/jstests/core/geo_s2near.js b/jstests/core/geo_s2near.js
index 5d0da52b6ec..08bf5ab9081 100644
--- a/jstests/core/geo_s2near.js
+++ b/jstests/core/geo_s2near.js
@@ -3,10 +3,14 @@ t = db.geo_s2near;
t.drop();
// Make sure that geoNear gives us back loc
-goldenPoint = {type: "Point", coordinates: [ 31.0, 41.0]};
+goldenPoint = {
+ type: "Point",
+ coordinates: [31.0, 41.0]
+};
t.insert({geo: goldenPoint});
-t.ensureIndex({ geo : "2dsphere" });
-resNear = db.runCommand({geoNear : t.getName(), near: [30, 40], num: 1, spherical: true, includeLocs: true});
+t.ensureIndex({geo: "2dsphere"});
+resNear = db.runCommand(
+ {geoNear: t.getName(), near: [30, 40], num: 1, spherical: true, includeLocs: true});
assert.eq(resNear.results[0].loc, goldenPoint);
// FYI:
@@ -17,60 +21,83 @@ lng = 0;
points = 10;
for (var x = -points; x < points; x += 1) {
for (var y = -points; y < points; y += 1) {
- t.insert({geo : { "type" : "Point", "coordinates" : [lng + x/1000.0, lat + y/1000.0]}});
+ t.insert({geo: {"type": "Point", "coordinates": [lng + x / 1000.0, lat + y / 1000.0]}});
}
}
-origin = { "type" : "Point", "coordinates": [ lng, lat ] };
+origin = {
+ "type": "Point",
+ "coordinates": [lng, lat]
+};
-t.ensureIndex({ geo : "2dsphere" });
+t.ensureIndex({geo: "2dsphere"});
// Near only works when the query is a point.
-someline = { "type" : "LineString", "coordinates": [ [ 40, 5], [41, 6]]};
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]};
-assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : someline } } }).count();});
-assert.throws(function() { return t.find({ "geo" : { "$near" : { "$geometry" : somepoly } } }).count();});
-assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: someline, spherical:true }).results.length;});
-assert.throws(function() { return db.runCommand({geoNear : t.getName(), near: somepoly, spherical:true }).results.length;});
+someline = {
+ "type": "LineString",
+ "coordinates": [[40, 5], [41, 6]]
+};
+somepoly = {
+ "type": "Polygon",
+ "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]
+};
+assert.throws(function() {
+ return t.find({"geo": {"$near": {"$geometry": someline}}}).count();
+});
+assert.throws(function() {
+ return t.find({"geo": {"$near": {"$geometry": somepoly}}}).count();
+});
+assert.throws(function() {
+ return db.runCommand({geoNear: t.getName(), near: someline, spherical: true}).results.length;
+});
+assert.throws(function() {
+ return db.runCommand({geoNear: t.getName(), near: somepoly, spherical: true}).results.length;
+});
// Do some basic near searches.
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 2000} } }).limit(10);
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, maxDistance: Math.PI, spherical: true});
+res = t.find({"geo": {"$near": {"$geometry": origin, $maxDistance: 2000}}}).limit(10);
+resNear = db.runCommand(
+ {geoNear: t.getName(), near: [0, 0], num: 10, maxDistance: Math.PI, spherical: true});
assert.eq(res.itcount(), resNear.results.length, 10);
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10);
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10, spherical: true});
+res = t.find({"geo": {"$near": {"$geometry": origin}}}).limit(10);
+resNear = db.runCommand({geoNear: t.getName(), near: [0, 0], num: 10, spherical: true});
assert.eq(res.itcount(), resNear.results.length, 10);
// Find all the points!
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000);
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true});
+res = t.find({"geo": {"$near": {"$geometry": origin}}}).limit(10000);
+resNear = db.runCommand({geoNear: t.getName(), near: [0, 0], num: 10000, spherical: true});
assert.eq(resNear.results.length, res.itcount(), (2 * points) * (2 * points));
// longitude goes -180 to 180
// latitude goes -90 to 90
// Let's put in some perverse (polar) data and make sure we get it back.
// Points go long, lat.
-t.insert({geo: { "type" : "Point", "coordinates" : [-180, -90]}});
-t.insert({geo: { "type" : "Point", "coordinates" : [180, -90]}});
-t.insert({geo: { "type" : "Point", "coordinates" : [180, 90]}});
-t.insert({geo: { "type" : "Point", "coordinates" : [-180, 90]}});
-res = t.find({ "geo" : { "$near" : { "$geometry" : origin } } }).limit(10000);
-resNear = db.runCommand({geoNear : t.getName(), near: [0,0], num: 10000, spherical: true});
+t.insert({geo: {"type": "Point", "coordinates": [-180, -90]}});
+t.insert({geo: {"type": "Point", "coordinates": [180, -90]}});
+t.insert({geo: {"type": "Point", "coordinates": [180, 90]}});
+t.insert({geo: {"type": "Point", "coordinates": [-180, 90]}});
+res = t.find({"geo": {"$near": {"$geometry": origin}}}).limit(10000);
+resNear = db.runCommand({geoNear: t.getName(), near: [0, 0], num: 10000, spherical: true});
assert.eq(res.itcount(), resNear.results.length, (2 * points) * (2 * points) + 4);
function testRadAndDegreesOK(distance) {
// Distance for old style points is radians.
- resRadians = t.find({geo: {$nearSphere: [0,0], $maxDistance: (distance / (6378.1 * 1000))}});
+ resRadians = t.find({geo: {$nearSphere: [0, 0], $maxDistance: (distance / (6378.1 * 1000))}});
// Distance for new style points is meters.
- resMeters = t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: distance} } });
+ resMeters = t.find({"geo": {"$near": {"$geometry": origin, $maxDistance: distance}}});
// And we should get the same # of results no matter what.
assert.eq(resRadians.itcount(), resMeters.itcount());
// Also, geoNear should behave the same way.
- resGNMeters = db.runCommand({geoNear : t.getName(), near: origin, maxDistance: distance, spherical: true});
- resGNRadians = db.runCommand({geoNear : t.getName(), near: [0,0], maxDistance: (distance / (6378.1 * 1000)), spherical: true});
+ resGNMeters = db.runCommand(
+ {geoNear: t.getName(), near: origin, maxDistance: distance, spherical: true});
+ resGNRadians = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ maxDistance: (distance / (6378.1 * 1000)),
+ spherical: true
+ });
assert.eq(resGNRadians.results.length, resGNMeters.results.length);
for (var i = 0; i < resGNRadians.length; ++i) {
// Radius of earth * radians = distance in meters.
@@ -84,4 +111,5 @@ testRadAndDegreesOK(50);
testRadAndDegreesOK(10000);
// SERVER-13666 legacy coordinates must be in bounds for spherical near queries.
-assert.commandFailed(db.runCommand({geoNear : t.getName(), near: [1210.466, 31.2051], spherical: true, num: 10}));
+assert.commandFailed(
+ db.runCommand({geoNear: t.getName(), near: [1210.466, 31.2051], spherical: true, num: 10}));
diff --git a/jstests/core/geo_s2nearComplex.js b/jstests/core/geo_s2nearComplex.js
index ecb5e646a54..0584c5e694a 100644
--- a/jstests/core/geo_s2nearComplex.js
+++ b/jstests/core/geo_s2nearComplex.js
@@ -11,25 +11,26 @@ var sin = Math.sin;
var cos = Math.cos;
var atan2 = Math.atan2;
-
-var originGeo = {type: "Point", coordinates: [20.0, 20.0]};
+var originGeo = {
+ type: "Point",
+ coordinates: [20.0, 20.0]
+};
// Center point for all tests.
var origin = {
name: "origin",
- geo: originGeo
+ geo: originGeo
};
-
/*
* Convenience function for checking that coordinates match. threshold let's you
* specify how accurate equals should be.
*/
-function coordinateEqual(first, second, threshold){
+function coordinateEqual(first, second, threshold) {
threshold = threshold || 0.001;
first = first['geo']['coordinates'];
second = second['geo']['coordinates'];
- if(Math.abs(first[0] - second[0]) <= threshold){
- if(Math.abs(first[1] - second[1]) <= threshold){
+ if (Math.abs(first[0] - second[0]) <= threshold) {
+ if (Math.abs(first[1] - second[1]) <= threshold) {
return true;
}
}
@@ -43,44 +44,49 @@ function coordinateEqual(first, second, threshold){
* be returned.
* based on this algorithm: http://williams.best.vwh.net/avform.htm#LL
*/
-function uniformPoints(origin, count, minDist, maxDist){
+function uniformPoints(origin, count, minDist, maxDist) {
var i;
var lng = origin['geo']['coordinates'][0];
var lat = origin['geo']['coordinates'][1];
var distances = [];
var points = [];
- for(i=0; i < count; i++){
+ for (i = 0; i < count; i++) {
distances.push((random() * (maxDist - minDist)) + minDist);
}
distances.sort();
- while(points.length < count){
+ while (points.length < count) {
var angle = random() * 2 * PI;
var distance = distances[points.length];
var pointLat = asin((sin(lat) * cos(distance)) + (cos(lat) * sin(distance) * cos(angle)));
- var pointDLng = atan2(sin(angle) * sin(distance) * cos(lat), cos(distance) - sin(lat) * sin(pointLat));
- var pointLng = ((lng - pointDLng + PI) % 2*PI) - PI;
+ var pointDLng =
+ atan2(sin(angle) * sin(distance) * cos(lat), cos(distance) - sin(lat) * sin(pointLat));
+ var pointLng = ((lng - pointDLng + PI) % 2 * PI) - PI;
// Latitude must be [-90, 90]
var newLat = lat + pointLat;
- if (newLat > 90) newLat -= 180;
- if (newLat < -90) newLat += 180;
+ if (newLat > 90)
+ newLat -= 180;
+ if (newLat < -90)
+ newLat += 180;
// Longitude must be [-180, 180]
var newLng = lng + pointLng;
- if (newLng > 180) newLng -= 360;
- if (newLng < -180) newLng += 360;
+ if (newLng > 180)
+ newLng -= 360;
+ if (newLng < -180)
+ newLng += 360;
var newPoint = {
geo: {
type: "Point",
- //coordinates: [lng + pointLng, lat + pointLat]
+ // coordinates: [lng + pointLng, lat + pointLat]
coordinates: [newLng, newLat]
}
};
points.push(newPoint);
}
- for(i=0; i < points.length; i++){
+ for (i = 0; i < points.length; i++) {
t.insert(points[i]);
}
return points;
@@ -88,12 +94,12 @@ function uniformPoints(origin, count, minDist, maxDist){
/*
* Creates a random uniform field as above, excepting for `numberOfHoles` gaps that
- * have `sizeOfHoles` points missing centered around a random point.
+ * have `sizeOfHoles` points missing centered around a random point.
*/
-function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, sizeOfHoles){
+function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, sizeOfHoles) {
var points = uniformPoints(origin, count, minDist, maxDist);
var i;
- for(i=0; i<numberOfHoles; i++){
+ for (i = 0; i < numberOfHoles; i++) {
var randomPoint = points[Math.floor(random() * points.length)];
removeNearest(randomPoint, sizeOfHoles);
}
@@ -105,10 +111,11 @@ function uniformPointsWithGaps(origin, count, minDist, maxDist, numberOfHoles, s
* you may specify an optional `distRatio` parameter which will specify the area that the cluster
* covers as a fraction of the full area that points are created on. Defaults to 10.
*/
-function uniformPointsWithClusters(origin, count, minDist, maxDist, numberOfClusters, minClusterSize, maxClusterSize, distRatio){
+function uniformPointsWithClusters(
+ origin, count, minDist, maxDist, numberOfClusters, minClusterSize, maxClusterSize, distRatio) {
distRatio = distRatio || 10;
var points = uniformPoints(origin, count, minDist, maxDist);
- for(j=0; j<numberOfClusters; j++){
+ for (j = 0; j < numberOfClusters; j++) {
var randomPoint = points[Math.floor(random() * points.length)];
var clusterSize = (random() * (maxClusterSize - minClusterSize)) + minClusterSize;
uniformPoints(randomPoint, clusterSize, minDist / distRatio, maxDist / distRatio);
@@ -118,10 +125,10 @@ function uniformPointsWithClusters(origin, count, minDist, maxDist, numberOfClus
* Function used to create gaps in existing point field. Will remove the `number` nearest
* geo objects to the specified `point`.
*/
-function removeNearest(point, number){
+function removeNearest(point, number) {
var pointsToRemove = t.find({geo: {$geoNear: {$geometry: point['geo']}}}).limit(number);
var idsToRemove = [];
- while(pointsToRemove.hasNext()){
+ while (pointsToRemove.hasNext()) {
point = pointsToRemove.next();
idsToRemove.push(point['_id']);
}
@@ -129,34 +136,36 @@ function removeNearest(point, number){
t.remove({_id: {$in: idsToRemove}});
}
/*
- * Validates the ordering of the nearest results is the same no matter how many
+ * Validates the ordering of the nearest results is the same no matter how many
* geo objects are requested. This could fail if two points have the same dist
* from origin, because they may not be well-ordered. If we see strange failures,
* we should consider that.
*/
-function validateOrdering(query){
+function validateOrdering(query) {
var near10 = t.find(query).limit(10);
var near20 = t.find(query).limit(20);
var near30 = t.find(query).limit(30);
var near40 = t.find(query).limit(40);
- for(i=0;i<10;i++){
+ for (i = 0; i < 10; i++) {
assert(coordinateEqual(near10[i], near20[i]));
assert(coordinateEqual(near10[i], near30[i]));
assert(coordinateEqual(near10[i], near40[i]));
}
- for(i=0;i<20;i++){
+ for (i = 0; i < 20; i++) {
assert(coordinateEqual(near20[i], near30[i]));
assert(coordinateEqual(near20[i], near40[i]));
}
- for(i=0;i<30;i++){
+ for (i = 0; i < 30; i++) {
assert(coordinateEqual(near30[i], near40[i]));
}
}
-var query = {geo: {$geoNear: {$geometry: originGeo}}};
+var query = {
+ geo: {$geoNear: {$geometry: originGeo}}
+};
// Test a uniform distribution of 1000 points.
uniformPoints(origin, 1000, 0.5, 1.5);
@@ -199,10 +208,13 @@ t.ensureIndex({geo: "2dsphere"});
// Test a uniform near search with origin around the pole.
// Center point near pole.
-originGeo = {type: "Point", coordinates: [0.0, 89.0]};
+originGeo = {
+ type: "Point",
+ coordinates: [0.0, 89.0]
+};
origin = {
name: "origin",
- geo: originGeo
+ geo: originGeo
};
uniformPoints(origin, 50, 0.5, 1.5);
@@ -210,17 +222,21 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}});
print("Millis for uniform near pole:");
print(t.find({geo: {$geoNear: {$geometry: originGeo}}})
- .explain("executionStats").executionStats.executionTimeMillis);
+ .explain("executionStats")
+ .executionStats.executionTimeMillis);
assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
t.drop();
t.ensureIndex({geo: "2dsphere"});
// Center point near the meridian
-originGeo = {type: "Point", coordinates: [179.0, 0.0]};
+originGeo = {
+ type: "Point",
+ coordinates: [179.0, 0.0]
+};
origin = {
name: "origin",
- geo: originGeo
+ geo: originGeo
};
uniformPoints(origin, 50, 0.5, 1.5);
@@ -228,17 +244,21 @@ validateOrdering({geo: {$geoNear: {$geometry: originGeo}}});
print("Millis for uniform on meridian:");
print(t.find({geo: {$geoNear: {$geometry: originGeo}}})
- .explain("executionStats").executionStats.executionTimeMillis);
+ .explain("executionStats")
+ .executionStats.executionTimeMillis);
assert.eq(t.find({geo: {$geoNear: {$geometry: originGeo}}}).itcount(), 50);
t.drop();
t.ensureIndex({geo: "2dsphere"});
// Center point near the negative meridian
-originGeo = {type: "Point", coordinates: [-179.0, 0.0]};
+originGeo = {
+ type: "Point",
+ coordinates: [-179.0, 0.0]
+};
origin = {
name: "origin",
- geo: originGeo
+ geo: originGeo
};
uniformPoints(origin, 50, 0.5, 1.5);
@@ -246,13 +266,17 @@ validateOrdering({geo: {$near: {$geometry: originGeo}}});
print("Millis for uniform on negative meridian:");
print(t.find({geo: {$geoNear: {$geometry: originGeo}}})
- .explain("executionStats").executionStats.executionTimeMillis);
+ .explain("executionStats")
+ .executionStats.executionTimeMillis);
assert.eq(t.find({geo: {$near: {$geometry: originGeo}}}).itcount(), 50);
// Near search with points that are really far away.
t.drop();
t.ensureIndex({geo: "2dsphere"});
-originGeo = {type: "Point", coordinates: [0.0, 0.0]};
+originGeo = {
+ type: "Point",
+ coordinates: [0.0, 0.0]
+};
origin = {
name: "origin",
geo: originGeo
@@ -267,6 +291,7 @@ cur = t.find({geo: {$near: {$geometry: originGeo}}});
print("Near search on very distant points:");
print(t.find({geo: {$geoNear: {$geometry: originGeo}}})
- .explain("executionStats").executionStats.executionTimeMillis);
+ .explain("executionStats")
+ .executionStats.executionTimeMillis);
pt = cur.next();
assert(pt);
diff --git a/jstests/core/geo_s2near_equator_opposite.js b/jstests/core/geo_s2near_equator_opposite.js
index 8ee5d486d5e..13bbc776daa 100644
--- a/jstests/core/geo_s2near_equator_opposite.js
+++ b/jstests/core/geo_s2near_equator_opposite.js
@@ -14,12 +14,14 @@ t.ensureIndex({loc: '2dsphere'});
// upper bound for half of earth's circumference in meters
var dist = 40075000 / 2 + 1;
-var nearSphereCount = t.find({loc: {$nearSphere:
- {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}}).itcount();
-var nearCount = t.find({loc: {$near:
- {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}}).itcount();
-var geoNearResult = db.runCommand({geoNear: t.getName(), near:
- {type: 'Point', coordinates: [180, 0]}, spherical: true});
+var nearSphereCount = t.find({
+ loc: {$nearSphere: {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}
+}).itcount();
+var nearCount =
+ t.find({loc: {$near: {$geometry: {type: 'Point', coordinates: [180, 0]}, $maxDistance: dist}}})
+ .itcount();
+var geoNearResult = db.runCommand(
+ {geoNear: t.getName(), near: {type: 'Point', coordinates: [180, 0]}, spherical: true});
print('nearSphere count = ' + nearSphereCount);
print('near count = ' + nearCount);
diff --git a/jstests/core/geo_s2nearcorrect.js b/jstests/core/geo_s2nearcorrect.js
index 9fdeb4aa6a3..54552a4bee5 100644
--- a/jstests/core/geo_s2nearcorrect.js
+++ b/jstests/core/geo_s2nearcorrect.js
@@ -5,8 +5,14 @@
t = db.geo_s2nearcorrect;
t.drop();
-longline = { "type" : "LineString", "coordinates": [ [0,0], [179, 89]]};
+longline = {
+ "type": "LineString",
+ "coordinates": [[0, 0], [179, 89]]
+};
t.insert({geo: longline});
t.ensureIndex({geo: "2dsphere"});
-origin = { "type" : "Point", "coordinates": [ 45, 45] };
-assert.eq(1, t.find({ "geo" : { "$near" : { "$geometry" : origin, $maxDistance: 20000000} } }).count());
+origin = {
+ "type": "Point",
+ "coordinates": [45, 45]
+};
+assert.eq(1, t.find({"geo": {"$near": {"$geometry": origin, $maxDistance: 20000000}}}).count());
diff --git a/jstests/core/geo_s2nearwithin.js b/jstests/core/geo_s2nearwithin.js
index 1e5a20d5209..1bcec709643 100644
--- a/jstests/core/geo_s2nearwithin.js
+++ b/jstests/core/geo_s2nearwithin.js
@@ -9,33 +9,56 @@ for (var x = -points; x < points; x += 1) {
}
}
-origin = { "type" : "Point", "coordinates": [ 0, 0] };
+origin = {
+ "type": "Point",
+ "coordinates": [0, 0]
+};
-t.ensureIndex({ geo : "2dsphere" });
+t.ensureIndex({geo: "2dsphere"});
// Near requires an index, and 2dsphere is an index. Spherical isn't
// specified so this doesn't work.
-assert.commandFailed( db.runCommand({ geoNear: t.getName(), near: [0, 0],
- query: { geo: { $within: { $center: [[0, 0], 1] }}}}));
+assert.commandFailed(db.runCommand(
+ {geoNear: t.getName(), near: [0, 0], query: {geo: {$within: {$center: [[0, 0], 1]}}}}));
// Spherical is specified so this does work. Old style points are weird
// because you can use them with both $center and $centerSphere. Points are
// the only things we will do this conversion for.
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$center: [[0, 0], 1]}}}});
+resNear = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ spherical: true,
+ query: {geo: {$within: {$center: [[0, 0], 1]}}}
+});
assert.eq(resNear.results.length, 5);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$centerSphere: [[0, 0], Math.PI/180.0]}}}});
+resNear = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ spherical: true,
+ query: {geo: {$within: {$centerSphere: [[0, 0], Math.PI / 180.0]}}}
+});
assert.eq(resNear.results.length, 5);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$centerSphere: [[0, 0], 0]}}}});
+resNear = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ spherical: true,
+ query: {geo: {$within: {$centerSphere: [[0, 0], 0]}}}
+});
assert.eq(resNear.results.length, 1);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$centerSphere: [[1, 0], 0.5 * Math.PI/180.0]}}}});
+resNear = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ spherical: true,
+ query: {geo: {$within: {$centerSphere: [[1, 0], 0.5 * Math.PI / 180.0]}}}
+});
assert.eq(resNear.results.length, 1);
-resNear = db.runCommand({geoNear : t.getName(), near: [0, 0], spherical: true,
- query: {geo: {$within: {$center: [[1, 0], 1.5]}}}});
+resNear = db.runCommand({
+ geoNear: t.getName(),
+ near: [0, 0],
+ spherical: true,
+ query: {geo: {$within: {$center: [[1, 0], 1.5]}}}
+});
assert.eq(resNear.results.length, 9);
diff --git a/jstests/core/geo_s2nongeoarray.js b/jstests/core/geo_s2nongeoarray.js
index 067c338faf3..8684706d168 100644
--- a/jstests/core/geo_s2nongeoarray.js
+++ b/jstests/core/geo_s2nongeoarray.js
@@ -2,14 +2,18 @@
// we find them with queries.
t = db.geo_s2nongeoarray;
-oldPoint = [40,5];
+oldPoint = [40, 5];
-var data = {geo: oldPoint, nonGeo: [123,456], otherNonGeo: [{b:[1,2]},{b:[3,4]}]};
+var data = {
+ geo: oldPoint,
+ nonGeo: [123, 456],
+ otherNonGeo: [{b: [1, 2]}, {b: [3, 4]}]
+};
t.drop();
assert.writeOK(t.insert(data));
-assert.commandWorked(t.ensureIndex({ otherNonGeo: 1 }));
-assert.eq(1, t.find({otherNonGeo: {b:[1,2]}}).itcount());
+assert.commandWorked(t.ensureIndex({otherNonGeo: 1}));
+assert.eq(1, t.find({otherNonGeo: {b: [1, 2]}}).itcount());
assert.eq(0, t.find({otherNonGeo: 1}).itcount());
assert.eq(1, t.find({'otherNonGeo.b': 1}).itcount());
diff --git a/jstests/core/geo_s2nonstring.js b/jstests/core/geo_s2nonstring.js
index 1f3258eeac3..43587f0c8e8 100755..100644
--- a/jstests/core/geo_s2nonstring.js
+++ b/jstests/core/geo_s2nonstring.js
@@ -2,21 +2,27 @@
t = db.geo_s2nonstring;
t.drop();
-t.ensureIndex( { geo:'2dsphere', x:1 } );
+t.ensureIndex({geo: '2dsphere', x: 1});
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:'a' } );
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:5 } );
+t.save({geo: {type: 'Point', coordinates: [0, 0]}, x: 'a'});
+t.save({geo: {type: 'Point', coordinates: [0, 0]}, x: 5});
t.drop();
-t.ensureIndex( { geo:'2dsphere', x:1 } );
+t.ensureIndex({geo: '2dsphere', x: 1});
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] }, x:'a' } );
-t.save( { geo:{ type:'Point', coordinates:[ 0, 0 ] } } );
+t.save({geo: {type: 'Point', coordinates: [0, 0]}, x: 'a'});
+t.save({geo: {type: 'Point', coordinates: [0, 0]}});
// Expect 1 match, where x is 'a'
-assert.eq( 1, t.count( { geo:{ $near:{ $geometry:{ type:'Point', coordinates:[ 0, 0 ] },
- $maxDistance: 20 } }, x:'a' } ) );
+assert.eq(1,
+ t.count({
+ geo: {$near: {$geometry: {type: 'Point', coordinates: [0, 0]}, $maxDistance: 20}},
+ x: 'a'
+ }));
// Expect 1 match, where x matches null (missing matches null).
-assert.eq( 1, t.count( { geo:{ $near:{ $geometry:{ type:'Point', coordinates:[ 0, 0 ] },
- $maxDistance: 20 } }, x:null } ) );
+assert.eq(1,
+ t.count({
+ geo: {$near: {$geometry: {type: 'Point', coordinates: [0, 0]}, $maxDistance: 20}},
+ x: null
+ }));
diff --git a/jstests/core/geo_s2nopoints.js b/jstests/core/geo_s2nopoints.js
index 903487c7008..0d2afdb1672 100644
--- a/jstests/core/geo_s2nopoints.js
+++ b/jstests/core/geo_s2nopoints.js
@@ -2,6 +2,7 @@
t = db.geo_s2nopoints;
t.drop();
-t.ensureIndex({loc: "2dsphere", x:1});
-assert.eq(0, t.count({loc: {$near: {$geometry: {type: 'Point', coordinates:[0,0]},
- $maxDistance: 10}}}));
+t.ensureIndex({loc: "2dsphere", x: 1});
+assert.eq(
+ 0,
+ t.count({loc: {$near: {$geometry: {type: 'Point', coordinates: [0, 0]}, $maxDistance: 10}}}));
diff --git a/jstests/core/geo_s2oddshapes.js b/jstests/core/geo_s2oddshapes.js
index aa284bbe20e..6f14533c928 100644
--- a/jstests/core/geo_s2oddshapes.js
+++ b/jstests/core/geo_s2oddshapes.js
@@ -3,60 +3,51 @@
// rather wide if their latitude (or longitude) range is large.
var t = db.geo_s2oddshapes;
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
+t.ensureIndex({geo: "2dsphere"});
var testPoint = {
name: "origin",
- geo: {
- type: "Point",
- coordinates: [0.0, 0.0]
- }
+ geo: {type: "Point", coordinates: [0.0, 0.0]}
};
var testHorizLine = {
name: "horiz",
- geo: {
- type: "LineString",
- coordinates: [[-2.0, 10.0], [2.0, 10.0]]
- }
+ geo: {type: "LineString", coordinates: [[-2.0, 10.0], [2.0, 10.0]]}
};
var testVertLine = {
name: "vert",
- geo: {
- type: "LineString",
- coordinates: [[10.0, -2.0], [10.0, 2.0]]
- }
+ geo: {type: "LineString", coordinates: [[10.0, -2.0], [10.0, 2.0]]}
};
t.insert(testPoint);
t.insert(testHorizLine);
t.insert(testVertLine);
-//Test a poly that runs vertically all the way along the meridian.
+// Test a poly that runs vertically all the way along the meridian.
-var tallPoly = {type: "Polygon",
- coordinates: [
- [[1.0, 89.0], [-1.0, 89.0], [-1.0, -89.0], [1.0, -89.0], [1.0, 89.0]]
- ]};
-//We expect that the testPoint (at the origin) will be within this poly.
+var tallPoly = {
+ type: "Polygon",
+ coordinates: [[[1.0, 89.0], [-1.0, 89.0], [-1.0, -89.0], [1.0, -89.0], [1.0, 89.0]]]
+};
+// We expect that the testPoint (at the origin) will be within this poly.
var result = t.find({geo: {$within: {$geometry: tallPoly}}});
assert.eq(result.itcount(), 1);
var result = t.find({geo: {$within: {$geometry: tallPoly}}});
assert.eq(result[0].name, 'origin');
-//We expect that the testPoint, and the testHorizLine should geoIntersect
-//with this poly.
+// We expect that the testPoint, and the testHorizLine should geoIntersect
+// with this poly.
result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
assert.eq(result.itcount(), 2);
result = t.find({geo: {$geoIntersects: {$geometry: tallPoly}}});
-//Test a poly that runs horizontally along the equator.
+// Test a poly that runs horizontally along the equator.
-var longPoly = {type: "Polygon",
- coordinates: [
- [[89.0, 1.0], [-89.0, 1.0], [-89.0, -1.0], [89.0, -1.0], [89.0, 1.0]]
- ]};
+var longPoly = {
+ type: "Polygon",
+ coordinates: [[[89.0, 1.0], [-89.0, 1.0], [-89.0, -1.0], [89.0, -1.0], [89.0, 1.0]]]
+};
// Thanks to spherical geometry, this poly contains most of the hemisphere.
result = t.find({geo: {$within: {$geometry: longPoly}}});
@@ -64,36 +55,28 @@ assert.eq(result.itcount(), 3);
result = t.find({geo: {$geoIntersects: {$geometry: longPoly}}});
assert.eq(result.itcount(), 3);
-//Test a poly that is the size of half the earth.
+// Test a poly that is the size of half the earth.
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
+t.ensureIndex({geo: "2dsphere"});
var insidePoint = {
name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [100.0, 0.0]
- }
+ geo: {type: "Point", name: "inside", coordinates: [100.0, 0.0]}
};
var outsidePoint = {
name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [-100.0, 0.0]
- }
+ geo: {type: "Point", name: "inside", coordinates: [-100.0, 0.0]}
};
t.insert(insidePoint);
t.insert(outsidePoint);
-var largePoly = {type: "Polygon",
- coordinates: [
- [[0.0, -90.0], [0.0, 90.0], [180.0, 0], [0.0, -90.0]]
- ]};
+var largePoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -90.0], [0.0, 90.0], [180.0, 0], [0.0, -90.0]]]
+};
result = t.find({geo: {$within: {$geometry: largePoly}}});
assert.eq(result.itcount(), 1);
@@ -101,38 +84,31 @@ result = t.find({geo: {$within: {$geometry: largePoly}}});
var point = result[0];
assert.eq(point.name, 'inside');
-//Test a poly that is very small. A couple meters around.
+// Test a poly that is very small. A couple meters around.
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
+t.ensureIndex({geo: "2dsphere"});
insidePoint = {
name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [0.01, 0.0]
- }};
+ geo: {type: "Point", name: "inside", coordinates: [0.01, 0.0]}
+};
outsidePoint = {
name: "inside",
- geo: {
- type: "Point",
- name: "inside",
- coordinates: [0.2, 0.0]
- }};
+ geo: {type: "Point", name: "inside", coordinates: [0.2, 0.0]}
+};
t.insert(insidePoint);
t.insert(outsidePoint);
-smallPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -0.01], [0.015, -0.01], [0.015, 0.01], [0.0, 0.01], [0.0, -0.01]]
- ]};
+smallPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -0.01], [0.015, -0.01], [0.015, 0.01], [0.0, 0.01], [0.0, -0.01]]]
+};
result = t.find({geo: {$within: {$geometry: smallPoly}}});
assert.eq(result.itcount(), 1);
result = t.find({geo: {$within: {$geometry: smallPoly}}});
point = result[0];
assert.eq(point.name, 'inside');
-
diff --git a/jstests/core/geo_s2ordering.js b/jstests/core/geo_s2ordering.js
index 84b78edecfb..026fdda62c6 100644
--- a/jstests/core/geo_s2ordering.js
+++ b/jstests/core/geo_s2ordering.js
@@ -16,20 +16,21 @@ function makepoints(needle) {
for (var x = -points; x < points; x += 1) {
for (var y = -points; y < points; y += 1) {
tag = x.toString() + "," + y.toString();
- bulk.insert({ nongeo: tag,
- geo: {
- type: "Point",
- coordinates: [lng + x/points, lat + y/points]}});
+ bulk.insert({
+ nongeo: tag,
+ geo: {type: "Point", coordinates: [lng + x / points, lat + y / points]}
+ });
}
}
- bulk.insert({ nongeo: needle, geo: { type: "Point", coordinates: [0,0] }});
+ bulk.insert({nongeo: needle, geo: {type: "Point", coordinates: [0, 0]}});
assert.writeOK(bulk.execute());
}
function runTest(index) {
t.ensureIndex(index);
var resultcount = 0;
- var cursor = t.find({nongeo: needle, geo: {$within: {$centerSphere: [[0,0], Math.PI/180.0]}}});
+ var cursor =
+ t.find({nongeo: needle, geo: {$within: {$centerSphere: [[0, 0], Math.PI / 180.0]}}});
var stats = cursor.explain("executionStats").executionStats;
t.dropIndex(index);
diff --git a/jstests/core/geo_s2overlappingpolys.js b/jstests/core/geo_s2overlappingpolys.js
index 819879d960d..485132039d5 100644
--- a/jstests/core/geo_s2overlappingpolys.js
+++ b/jstests/core/geo_s2overlappingpolys.js
@@ -1,57 +1,55 @@
var t = db.geo_s2overlappingpolys;
t.drop();
-t.ensureIndex( { geo : "2dsphere" } );
+t.ensureIndex({geo: "2dsphere"});
var minError = 0.8e-13;
-var canonPoly = {type: "Polygon",
- coordinates: [
- [[-1.0, -1.0], [1.0, -1.0], [1.0, 1.0], [-1.0, 1.0], [-1.0, -1.0]]
- ]};
+var canonPoly = {
+ type: "Polygon",
+ coordinates: [[[-1.0, -1.0], [1.0, -1.0], [1.0, 1.0], [-1.0, 1.0], [-1.0, -1.0]]]
+};
t.insert({geo: canonPoly});
-// Test 1: If a poly completely encloses the canonPoly, we expect the canonPoly
-// to be returned for both $within and $geoIntersect
+// Test 1: If a poly completely encloses the canonPoly, we expect the canonPoly
+// to be returned for both $within and $geoIntersect
-var outerPoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
- ]};
+var outerPoly = {
+ type: "Polygon",
+ coordinates: [[[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]]
+};
var result = t.find({geo: {$within: {$geometry: outerPoly}}});
assert.eq(result.itcount(), 1);
result = t.find({geo: {$geoIntersects: {$geometry: outerPoly}}});
assert.eq(result.itcount(), 1);
-
// Test 2: If a poly that covers half of the canonPoly, we expect that it should
// geoIntersect, but should not be within.
-var partialPoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -2.0], [2.0, -2.0], [2.0, 0.0], [-2.0, 0.0], [-2.0, -2.0]]
- ]};
+var partialPoly = {
+ type: "Polygon",
+ coordinates: [[[-2.0, -2.0], [2.0, -2.0], [2.0, 0.0], [-2.0, 0.0], [-2.0, -2.0]]]
+};
-//Should not be within
+// Should not be within
result = t.find({geo: {$within: {$geometry: partialPoly}}});
assert.eq(result.itcount(), 0);
-//This should however count as a geoIntersect
+// This should however count as a geoIntersect
result = t.find({geo: {$geoIntersects: {$geometry: partialPoly}}});
assert.eq(result.itcount(), 1);
-
-// Test 3: Polygons that intersect at a point or an edge have undefined
-// behaviour in s2 The s2 library we're using appears to have
+// Test 3: Polygons that intersect at a point or an edge have undefined
+// behaviour in s2 The s2 library we're using appears to have
// the following behaviour.
// Case (a): Polygons that intersect at one point (not a vertex).
// behaviour: geoIntersects.
-var sharedPointPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [0.0, -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
+var sharedPointPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -2.0], [0.0, -1.0], [1.0, -2.0], [0.0, -2.0]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: sharedPointPoly}}});
assert.eq(result.itcount(), 1);
@@ -59,34 +57,33 @@ assert.eq(result.itcount(), 1);
// Case (b): Polygons that intersect at one point (a vertex).
// behaviour: not geoIntersect
-var sharedVertexPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [1.0, -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
+var sharedVertexPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -2.0], [1.0, -1.0], [1.0, -2.0], [0.0, -2.0]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: sharedVertexPoly}}});
assert.eq(result.itcount(), 0);
-// Case (c): Polygons that intesersect at one point that is very close to a
+// Case (c): Polygons that intesersect at one point that is very close to a
// vertex should have the same behaviour as Case (b).
-var almostSharedVertexPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [1.0 - minError, -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
+var almostSharedVertexPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -2.0], [1.0 - minError, -1.0], [1.0, -2.0], [0.0, -2.0]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: almostSharedVertexPoly}}});
assert.eq(result.itcount(), 0);
-
-// Case (d): Polygons that intesersect at one point that is not quite as close
-// to a vertex should behave as though it were not a vertex, and should
+// Case (d): Polygons that intesersect at one point that is not quite as close
+// to a vertex should behave as though it were not a vertex, and should
// geoIntersect
-var notCloseEnoughSharedVertexPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [1.0 - (10 * minError), -1.0], [1.0, -2.0], [0.0, -2.0]]
- ]};
+var notCloseEnoughSharedVertexPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -2.0], [1.0 - (10 * minError), -1.0], [1.0, -2.0], [0.0, -2.0]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedVertexPoly}}});
assert.eq(result.itcount(), 1);
@@ -94,40 +91,39 @@ assert.eq(result.itcount(), 1);
// Case (e): Polygons that come very close to having a point intersection
// on a non-vertex coordinate should intersect.
-var almostSharedPointPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [0.0, (-1.0 - minError)], [1.0, -2.0], [0.0, -2.0]]
- ]};
+var almostSharedPointPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -2.0], [0.0, (-1.0 - minError)], [1.0, -2.0], [0.0, -2.0]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: almostSharedPointPoly}}});
assert.eq(result.itcount(), 1);
-
// Case (f): If we increase the error a little, it should no longer act
// as though it's intersecting.
// NOTE: I think this error bound seems odd. Going to 0.000152297 will break this test.
// I've confirmed there is an error bound, but it's a lot larger than we experienced above.
var errorBound = 0.000152298;
-var notCloseEnoughSharedPointPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -2.0], [0.0, -1.0 - errorBound], [1.0, -2.0], [0.0, -2.0]]
- ]};
+var notCloseEnoughSharedPointPoly = {
+ type: "Polygon",
+ coordinates: [[[0.0, -2.0], [0.0, -1.0 - errorBound], [1.0, -2.0], [0.0, -2.0]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughSharedPointPoly}}});
assert.eq(result.itcount(), 0);
-/* Test 3: Importantly, polygons with shared edges have undefined intersection
- * under s2. Therefore these test serve more to make sure nothing changes than
+/* Test 3: Importantly, polygons with shared edges have undefined intersection
+ * under s2. Therefore these test serve more to make sure nothing changes than
* to confirm an expected behaviour.
*/
// Case 1: A polygon who shares an edge with another polygon, where the searching
// polygon's edge is fully covered by the canon polygon's edge.
// Result: No intersection.
-var fullyCoveredEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -0.5], [-1.0, -0.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -0.5]]
- ]};
+var fullyCoveredEdgePoly = {
+ type: "Polygon",
+ coordinates: [[[-2.0, -0.5], [-1.0, -0.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -0.5]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: fullyCoveredEdgePoly}}});
assert.eq(result.itcount(), 0);
@@ -135,10 +131,10 @@ assert.eq(result.itcount(), 0);
// Case 2: A polygon who shares an edge with another polygon, where the searching
// polygon's edge fully covers the canon polygon's edge.
// Result: Intersection.
-var coveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0, -1.5], [-1.0, 1.5], [-2.0, 1.5], [-2.0, -1.5]]
- ]};
+var coveringEdgePoly = {
+ type: "Polygon",
+ coordinates: [[[-2.0, -1.5], [-1.0, -1.5], [-1.0, 1.5], [-2.0, 1.5], [-2.0, -1.5]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: coveringEdgePoly}}});
assert.eq(result.itcount(), 1);
@@ -146,21 +142,34 @@ assert.eq(result.itcount(), 1);
// Case 2a: same as Case 2, except pulled slightly away from the polygon.
// Result: Intersection.
// NOTE: Scales of errors?
-var closebyCoveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0 - (minError / 1000), -1.5], [-1.0 - (minError / 1000), 1.5], [-2.0, 1.5], [-2.0, -1.5]]
- ]};
+var closebyCoveringEdgePoly = {
+ type: "Polygon",
+ coordinates: [[
+ [-2.0, -1.5],
+ [-1.0 - (minError / 1000), -1.5],
+ [-1.0 - (minError / 1000), 1.5],
+ [-2.0, 1.5],
+ [-2.0, -1.5]
+ ]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: closebyCoveringEdgePoly}}});
assert.eq(result.itcount(), 1);
-// Case 2b: same as Case 4, except pulled slightly away from the polygon, so that it's not intersecting.
+// Case 2b: same as Case 4, except pulled slightly away from the polygon, so that it's not
+// intersecting.
// Result: No Intersection.
// NOTE: Scales of errors?
-var notCloseEnoughCoveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0 - (minError / 100), -1.5], [-1.0 - (minError / 100), 1.5], [-2.0, 1.5], [-2.0, -1.5]]
- ]};
+var notCloseEnoughCoveringEdgePoly = {
+ type: "Polygon",
+ coordinates: [[
+ [-2.0, -1.5],
+ [-1.0 - (minError / 100), -1.5],
+ [-1.0 - (minError / 100), 1.5],
+ [-2.0, 1.5],
+ [-2.0, -1.5]
+ ]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: notCloseEnoughCoveringEdgePoly}}});
assert.eq(result.itcount(), 0);
@@ -168,44 +177,60 @@ assert.eq(result.itcount(), 0);
// Case 3: A polygon who shares an edge with another polygon, where the searching
// polygon's edge partially covers by the canon polygon's edge.
// Result: No intersection.
-var partiallyCoveringEdgePoly = {type: "Polygon",
- coordinates: [
- [[-2.0, -1.5], [-1.0, -1.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -1.5]]
- ]};
+var partiallyCoveringEdgePoly = {
+ type: "Polygon",
+ coordinates: [[[-2.0, -1.5], [-1.0, -1.5], [-1.0, 0.5], [-2.0, 0.5], [-2.0, -1.5]]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: partiallyCoveringEdgePoly}}});
assert.eq(result.itcount(), 0);
-
-//Polygons that intersect at three non-co-linear points should geoIntersect
-var sharedPointsPoly = {type: "Polygon",
- coordinates: [
- [[0.0, -3.0], [0.0, -1.0], [2.0, -2.0], [1.0, 0.0], [2.0, 2.0], [0.0, 1.0], [0.0, 3.0], [3.0, 3.0], [3.0, -3.0], [0.0, -3.0]]
- ]};
+// Polygons that intersect at three non-co-linear points should geoIntersect
+var sharedPointsPoly = {
+ type: "Polygon",
+ coordinates: [[
+ [0.0, -3.0],
+ [0.0, -1.0],
+ [2.0, -2.0],
+ [1.0, 0.0],
+ [2.0, 2.0],
+ [0.0, 1.0],
+ [0.0, 3.0],
+ [3.0, 3.0],
+ [3.0, -3.0],
+ [0.0, -3.0]
+ ]]
+};
result = t.find({geo: {$geoIntersects: {$geometry: sharedPointsPoly}}});
assert.eq(result.itcount(), 1);
-//If a polygon contains a hole, and another polygon is within that hole, it should not be within or intersect.
+// If a polygon contains a hole, and another polygon is within that hole, it should not be within or
+// intersect.
-var bigHolePoly = {type: "Polygon",
+var bigHolePoly = {
+ type: "Polygon",
coordinates: [
[[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
[[-2.0, -2.0], [2.0, -2.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, -2.0]]
- ]};
+ ]
+};
result = t.find({geo: {$within: {$geometry: bigHolePoly}}});
assert.eq(result.itcount(), 0);
result = t.find({geo: {$geoIntersects: {$geometry: bigHolePoly}}});
assert.eq(result.itcount(), 0);
-// If a polygon has a hole, and another polygon is contained partially by that hole, it should be an intersection
+// If a polygon has a hole, and another polygon is contained partially by that hole, it should be an
+// intersection
// but not a within.
-var internalOverlapPoly = {type: "Polygon",
+var internalOverlapPoly = {
+ type: "Polygon",
coordinates: [
[[-3.0, -3.0], [3.0, -3.0], [3.0, 3.0], [-3.0, 3.0], [-3.0, -3.0]],
[[-2.0, 0.0], [2.0, 0.0], [2.0, 2.0], [-2.0, 2.0], [-2.0, 0.0]]
- ]};
+ ]
+};
result = t.find({geo: {$geoIntersects: {$geometry: internalOverlapPoly}}});
assert.eq(result.itcount(), 1);
diff --git a/jstests/core/geo_s2polywithholes.js b/jstests/core/geo_s2polywithholes.js
index 85aafccdb68..6ace711c718 100755..100644
--- a/jstests/core/geo_s2polywithholes.js
+++ b/jstests/core/geo_s2polywithholes.js
@@ -2,17 +2,26 @@ var t = db.geo_s2weirdpolys;
t.drop();
t.ensureIndex({geo: "2dsphere"});
-var centerPoint = {"type": "Point", "coordinates": [0.5, 0.5]};
-var edgePoint = {"type": "Point", "coordinates": [0, 0.5]};
-var cornerPoint = {"type": "Point", "coordinates": [0, 0]};
+var centerPoint = {
+ "type": "Point",
+ "coordinates": [0.5, 0.5]
+};
+var edgePoint = {
+ "type": "Point",
+ "coordinates": [0, 0.5]
+};
+var cornerPoint = {
+ "type": "Point",
+ "coordinates": [0, 0]
+};
-t.insert({geo : centerPoint});
-t.insert({geo : edgePoint});
-t.insert({geo : cornerPoint});
+t.insert({geo: centerPoint});
+t.insert({geo: edgePoint});
+t.insert({geo: cornerPoint});
-var polygonWithNoHole = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
- ]
+var polygonWithNoHole = {
+ "type": "Polygon",
+ "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]]
};
// Test 1: Sanity check. Expect all three points.
@@ -20,9 +29,11 @@ var sanityResult = t.find({geo: {$within: {$geometry: polygonWithNoHole}}});
assert.eq(sanityResult.itcount(), 3);
// Test 2: Polygon with a hole that isn't contained byt the poly shell.
-var polygonWithProtrudingHole = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0.4,0.9], [0.4,1.1], [0.5, 1.1], [0.5, 0.9], [0.4, 0.9]]
+var polygonWithProtrudingHole = {
+ "type": "Polygon",
+ "coordinates": [
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]],
+ [[0.4, 0.9], [0.4, 1.1], [0.5, 1.1], [0.5, 0.9], [0.4, 0.9]]
]
};
@@ -36,36 +47,44 @@ assert.throws(function() {
// Test 3: This test will confirm that a polygon with overlapping holes throws
// an error.
-var polyWithOverlappingHoles = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0.2,0.6], [0.2,0.9], [0.6, 0.9], [0.6, 0.6], [0.2, 0.6]],
- [[0.5,0.4], [0.5,0.7], [0.8, 0.7], [0.8, 0.4], [0.5, 0.4]]
+var polyWithOverlappingHoles = {
+ "type": "Polygon",
+ "coordinates": [
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]],
+ [[0.2, 0.6], [0.2, 0.9], [0.6, 0.9], [0.6, 0.6], [0.2, 0.6]],
+ [[0.5, 0.4], [0.5, 0.7], [0.8, 0.7], [0.8, 0.4], [0.5, 0.4]]
]
};
assert.writeError(t.insert({geo: polyWithOverlappingHoles}));
// Test 4: Only one nesting is allowed by GeoJSON.
-var polyWithDeepHole = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0.1,0.1], [0.1,0.9], [0.9, 0.9], [0.9, 0.1], [0.1, 0.1]],
- [[0.2,0.2], [0.2,0.8], [0.8, 0.8], [0.8, 0.2], [0.2, 0.2]]
+var polyWithDeepHole = {
+ "type": "Polygon",
+ "coordinates": [
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]],
+ [[0.1, 0.1], [0.1, 0.9], [0.9, 0.9], [0.9, 0.1], [0.1, 0.1]],
+ [[0.2, 0.2], [0.2, 0.8], [0.8, 0.8], [0.8, 0.2], [0.2, 0.2]]
]
};
assert.writeError(t.insert({geo: polyWithDeepHole}));
// Test 5: The first ring must be the exterior ring.
-var polyWithBiggerHole = {"type" : "Polygon", "coordinates": [
- [[0.1,0.1], [0.1,0.9], [0.9, 0.9], [0.9, 0.1], [0.1, 0.1]],
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]]
+var polyWithBiggerHole = {
+ "type": "Polygon",
+ "coordinates": [
+ [[0.1, 0.1], [0.1, 0.9], [0.9, 0.9], [0.9, 0.1], [0.1, 0.1]],
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]
]
};
assert.writeError(t.insert({geo: polyWithBiggerHole}));
// Test 6: Holes cannot share more than one vertex with exterior loop
-var polySharedVertices = {"type" : "Polygon", "coordinates": [
- [[0,0], [0,1], [1, 1], [1, 0], [0, 0]],
- [[0,0], [0.1,0.9], [1, 1], [0.9, 0.1], [0, 0]]
+var polySharedVertices = {
+ "type": "Polygon",
+ "coordinates": [
+ [[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]],
+ [[0, 0], [0.1, 0.9], [1, 1], [0.9, 0.1], [0, 0]]
]
};
assert.writeError(t.insert({geo: polySharedVertices}));
diff --git a/jstests/core/geo_s2selfintersectingpoly.js b/jstests/core/geo_s2selfintersectingpoly.js
index f34ea3a5ff1..236283ab8ac 100644
--- a/jstests/core/geo_s2selfintersectingpoly.js
+++ b/jstests/core/geo_s2selfintersectingpoly.js
@@ -2,10 +2,11 @@ var t = db.geo_s2selfintersectingpoly;
t.drop();
t.ensureIndex({geo: "2dsphere"});
-var intersectingPolygon = {"type": "Polygon", "coordinates": [
- [[0.0, 0.0], [0.0, 4.0], [-3.0, 2.0], [1.0, 2.0], [0.0, 0.0]]
-]};
+var intersectingPolygon = {
+ "type": "Polygon",
+ "coordinates": [[[0.0, 0.0], [0.0, 4.0], [-3.0, 2.0], [1.0, 2.0], [0.0, 0.0]]]
+};
/*
* Self intersecting polygons should cause a parse exception.
*/
-assert.writeError(t.insert({geo : intersectingPolygon}));
+assert.writeError(t.insert({geo: intersectingPolygon}));
diff --git a/jstests/core/geo_s2sparse.js b/jstests/core/geo_s2sparse.js
index e6454fbfbb7..ab3363b5860 100644
--- a/jstests/core/geo_s2sparse.js
+++ b/jstests/core/geo_s2sparse.js
@@ -3,9 +3,15 @@
var coll = db.geo_s2sparse;
-var point = { type: "Point", coordinates: [5, 5] };
+var point = {
+ type: "Point",
+ coordinates: [5, 5]
+};
-var indexSpec = { geo: "2dsphere", nonGeo: 1 };
+var indexSpec = {
+ geo: "2dsphere",
+ nonGeo: 1
+};
var indexName = 'test.geo_s2sparse.$geo_2dsphere_nonGeo_1';
@@ -20,7 +26,7 @@ coll.ensureIndex(indexSpec);
// Insert N documents with the geo field.
var N = 1000;
for (var i = 0; i < N; i++) {
- coll.insert({ geo: point, nonGeo: "point_"+i });
+ coll.insert({geo: point, nonGeo: "point_" + i});
}
// Expect N keys.
@@ -28,7 +34,7 @@ assert.eq(N, coll.validate().keysPerIndex[indexName]);
// Insert N documents without the geo field.
for (var i = 0; i < N; i++) {
- coll.insert({ wrongGeo: point, nonGeo: i});
+ coll.insert({wrongGeo: point, nonGeo: i});
}
// Still expect N keys as we didn't insert any geo stuff.
@@ -36,7 +42,7 @@ assert.eq(N, coll.validate().keysPerIndex[indexName]);
// Insert N documents with just the geo field.
for (var i = 0; i < N; i++) {
- coll.insert({ geo: point});
+ coll.insert({geo: point});
}
// Expect 2N keys.
@@ -44,10 +50,10 @@ assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
// Add some "not geo" stuff.
for (var i = 0; i < N; i++) {
- coll.insert({ geo: null});
- coll.insert({ geo: []});
- coll.insert({ geo: undefined});
- coll.insert({ geo: {}});
+ coll.insert({geo: null});
+ coll.insert({geo: []});
+ coll.insert({geo: undefined});
+ coll.insert({geo: {}});
}
// Still expect 2N keys.
@@ -62,7 +68,7 @@ coll.ensureIndex(indexSpec, {"2dsphereIndexVersion": 1});
// Insert N documents with the geo field.
for (var i = 0; i < N; i++) {
- coll.insert({ geo: point, nonGeo: "point_"+i });
+ coll.insert({geo: point, nonGeo: "point_" + i});
}
// Expect N keys.
@@ -70,7 +76,7 @@ assert.eq(N, coll.validate().keysPerIndex[indexName]);
// Insert N documents without the geo field.
for (var i = 0; i < N; i++) {
- coll.insert({ wrongGeo: point, nonGeo: i});
+ coll.insert({wrongGeo: point, nonGeo: i});
}
// Expect N keys as it's a V1 index.
@@ -89,7 +95,7 @@ indexName = 'test.geo_s2sparse.$geo_2dsphere_otherGeo_2dsphere';
// Insert N documents with the first geo field.
var N = 1000;
for (var i = 0; i < N; i++) {
- coll.insert({ geo: point});
+ coll.insert({geo: point});
}
// Expect N keys.
@@ -98,7 +104,7 @@ assert.eq(N, coll.validate().keysPerIndex[indexName]);
// Insert N documents with the second geo field.
var N = 1000;
for (var i = 0; i < N; i++) {
- coll.insert({ otherGeo: point});
+ coll.insert({otherGeo: point});
}
// They get inserted too.
@@ -106,7 +112,7 @@ assert.eq(N + N, coll.validate().keysPerIndex[indexName]);
// Insert N documents with neither geo field.
for (var i = 0; i < N; i++) {
- coll.insert({ nonGeo: i});
+ coll.insert({nonGeo: i});
}
// Still expect 2N keys as the neither geo docs were omitted from the index.
diff --git a/jstests/core/geo_s2twofields.js b/jstests/core/geo_s2twofields.js
index 26c75b08bfb..1868287cf5b 100644
--- a/jstests/core/geo_s2twofields.js
+++ b/jstests/core/geo_s2twofields.js
@@ -13,8 +13,14 @@ function randomCoord(center, minDistDeg, maxDistDeg) {
return [center[0] + dx, center[1] + dy];
}
-var nyc = {type: "Point", coordinates: [-74.0064, 40.7142]};
-var miami = {type: "Point", coordinates: [-80.1303, 25.7903]};
+var nyc = {
+ type: "Point",
+ coordinates: [-74.0064, 40.7142]
+};
+var miami = {
+ type: "Point",
+ coordinates: [-80.1303, 25.7903]
+};
var maxPoints = 10000;
var degrees = 5;
@@ -23,19 +29,23 @@ for (var i = 0; i < maxPoints; ++i) {
var fromCoord = randomCoord(nyc.coordinates, 0, degrees);
var toCoord = randomCoord(miami.coordinates, 0, degrees);
- arr.push( { from: { type: "Point", coordinates: fromCoord },
- to: { type: "Point", coordinates: toCoord}} );
+ arr.push({
+ from: {type: "Point", coordinates: fromCoord},
+ to: {type: "Point", coordinates: toCoord}
+ });
}
-res = t.insert( arr );
+res = t.insert(arr);
assert.writeOK(res);
-assert.eq( t.count(), maxPoints );
+assert.eq(t.count(), maxPoints);
function semiRigorousTime(func) {
var lowestTime = func();
var iter = 2;
for (var i = 0; i < iter; ++i) {
var run = func();
- if (run < lowestTime) { lowestTime = run; }
+ if (run < lowestTime) {
+ lowestTime = run;
+ }
}
return lowestTime;
}
@@ -55,19 +65,31 @@ function timeWithoutAndWithAnIndex(index, query) {
var maxQueryRad = 0.5 * PI / 180.0;
// When we're not looking at ALL the data, anything indexed should beat not-indexed.
-var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere", from: "2dsphere"},
- {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
+var smallQuery =
+ timeWithoutAndWithAnIndex({to: "2dsphere", from: "2dsphere"},
+ {
+ from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}},
+ to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}
+ });
print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
// assert(smallQuery[0] > smallQuery[1]);
// Let's just index one field.
-var smallQuery = timeWithoutAndWithAnIndex({to: "2dsphere"},
- {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
+var smallQuery =
+ timeWithoutAndWithAnIndex({to: "2dsphere"},
+ {
+ from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}},
+ to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}
+ });
print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
// assert(smallQuery[0] > smallQuery[1]);
// And the other one.
-var smallQuery = timeWithoutAndWithAnIndex({from: "2dsphere"},
- {from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}}, to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}});
+var smallQuery =
+ timeWithoutAndWithAnIndex({from: "2dsphere"},
+ {
+ from: {$within: {$centerSphere: [nyc.coordinates, maxQueryRad]}},
+ to: {$within: {$centerSphere: [miami.coordinates, maxQueryRad]}}
+ });
print("Indexed time " + smallQuery[1] + " unindexed " + smallQuery[0]);
// assert(smallQuery[0] > smallQuery[1]);
diff --git a/jstests/core/geo_s2validindex.js b/jstests/core/geo_s2validindex.js
index c6dd8be58d9..4c024d2d585 100644
--- a/jstests/core/geo_s2validindex.js
+++ b/jstests/core/geo_s2validindex.js
@@ -6,19 +6,19 @@ var coll = db.getCollection("twodspherevalid");
// Valid index
coll.drop();
-assert.commandWorked(coll.ensureIndex({geo : "2dsphere", other : 1}));
+assert.commandWorked(coll.ensureIndex({geo: "2dsphere", other: 1}));
// Valid index
coll.drop();
-assert.commandWorked(coll.ensureIndex({geo : "2dsphere", other : 1, geo2 : "2dsphere"}));
+assert.commandWorked(coll.ensureIndex({geo: "2dsphere", other: 1, geo2: "2dsphere"}));
// Invalid index, using hash with 2dsphere
coll.drop();
-assert.commandFailed(coll.ensureIndex({geo : "2dsphere", other : "hash"}));
+assert.commandFailed(coll.ensureIndex({geo: "2dsphere", other: "hash"}));
// Invalid index, using 2d with 2dsphere
coll.drop();
-assert.commandFailed(coll.ensureIndex({geo : "2dsphere", other : "2d"}));
+assert.commandFailed(coll.ensureIndex({geo: "2dsphere", other: "2d"}));
jsTest.log("Success!");
diff --git a/jstests/core/geo_s2within.js b/jstests/core/geo_s2within.js
index 77a9ed9ed3e..430e4f4dc07 100644
--- a/jstests/core/geo_s2within.js
+++ b/jstests/core/geo_s2within.js
@@ -3,34 +3,40 @@ t = db.geo_s2within;
t.drop();
t.ensureIndex({geo: "2dsphere"});
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,6], [41,6], [41,5], [40,5]]]};
+somepoly = {
+ "type": "Polygon",
+ "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]
+};
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [40.2, 5.2]]}});
+t.insert({geo: {"type": "LineString", "coordinates": [[40.1, 5.1], [40.2, 5.2]]}});
// This is only partially contained within the polygon.
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 40.1, 5.1], [42, 7]]}});
+t.insert({geo: {"type": "LineString", "coordinates": [[40.1, 5.1], [42, 7]]}});
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } });
+res = t.find({"geo": {"$within": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 1);
t.drop();
t.ensureIndex({geo: "2dsphere"});
-somepoly = { "type" : "Polygon",
- "coordinates" : [ [ [40,5], [40,8], [43,8], [43,5], [40,5]],
- [ [41,6], [42,6], [42,7], [41,7], [41,6]]]};
+somepoly = {
+ "type": "Polygon",
+ "coordinates": [
+ [[40, 5], [40, 8], [43, 8], [43, 5], [40, 5]],
+ [[41, 6], [42, 6], [42, 7], [41, 7], [41, 6]]
+ ]
+};
-t.insert({geo:{ "type" : "Point", "coordinates": [ 40, 5 ] }});
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } });
+t.insert({geo: {"type": "Point", "coordinates": [40, 5]}});
+res = t.find({"geo": {"$within": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 1);
// In the hole. Shouldn't find it.
-t.insert({geo:{ "type" : "Point", "coordinates": [ 41.1, 6.1 ] }});
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } });
+t.insert({geo: {"type": "Point", "coordinates": [41.1, 6.1]}});
+res = t.find({"geo": {"$within": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 1);
// Also in the hole.
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.1, 6.1], [41.2, 6.2]]}});
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } });
+t.insert({geo: {"type": "LineString", "coordinates": [[41.1, 6.1], [41.2, 6.2]]}});
+res = t.find({"geo": {"$within": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 1);
// Half-hole, half-not. Shouldn't be $within.
-t.insert({geo: { "type" : "LineString", "coordinates": [ [ 41.5, 6.5], [42.5, 7.5]]}});
-res = t.find({ "geo" : { "$within" : { "$geometry" : somepoly} } });
+t.insert({geo: {"type": "LineString", "coordinates": [[41.5, 6.5], [42.5, 7.5]]}});
+res = t.find({"geo": {"$within": {"$geometry": somepoly}}});
assert.eq(res.itcount(), 1);
diff --git a/jstests/core/geo_small_large.js b/jstests/core/geo_small_large.js
index e927e8d5402..549f00369a2 100644
--- a/jstests/core/geo_small_large.js
+++ b/jstests/core/geo_small_large.js
@@ -1,52 +1,50 @@
// SERVER-2386, general geo-indexing using very large and very small bounds
-load( "jstests/libs/geo_near_random.js" );
+load("jstests/libs/geo_near_random.js");
// Do some random tests (for near queries) with very large and small ranges
-var test = new GeoNearRandomTest( "geo_small_large" );
+var test = new GeoNearRandomTest("geo_small_large");
-bounds = { min : -Math.pow( 2, 34 ), max : Math.pow( 2, 34 ) };
+bounds = {
+ min: -Math.pow(2, 34),
+ max: Math.pow(2, 34)
+};
-test.insertPts( 50, bounds );
+test.insertPts(50, bounds);
-printjson( db["geo_small_large"].find().limit( 10 ).toArray() );
+printjson(db["geo_small_large"].find().limit(10).toArray());
-test.testPt( [ 0, 0 ] );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
+test.testPt([0, 0]);
+test.testPt(test.mkPt(undefined, bounds));
+test.testPt(test.mkPt(undefined, bounds));
+test.testPt(test.mkPt(undefined, bounds));
+test.testPt(test.mkPt(undefined, bounds));
-test = new GeoNearRandomTest( "geo_small_large" );
+test = new GeoNearRandomTest("geo_small_large");
-bounds = { min : -Math.pow( 2, -34 ), max : Math.pow( 2, -34 ) };
+bounds = {
+ min: -Math.pow(2, -34),
+ max: Math.pow(2, -34)
+};
-test.insertPts( 50, bounds );
+test.insertPts(50, bounds);
-printjson( db["geo_small_large"].find().limit( 10 ).toArray() );
-
-test.testPt( [ 0, 0 ] );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
-test.testPt( test.mkPt( undefined, bounds ) );
+printjson(db["geo_small_large"].find().limit(10).toArray());
+test.testPt([0, 0]);
+test.testPt(test.mkPt(undefined, bounds));
+test.testPt(test.mkPt(undefined, bounds));
+test.testPt(test.mkPt(undefined, bounds));
+test.testPt(test.mkPt(undefined, bounds));
// Check that our box and circle queries also work
-var scales = [
- Math.pow( 2, 40 ),
- Math.pow( 2, -40 ),
- Math.pow(2, 2),
- Math.pow(3, -15),
- Math.pow(3, 15)
-];
-
-for ( var i = 0; i < scales.length; i++ ) {
+var scales = [Math.pow(2, 40), Math.pow(2, -40), Math.pow(2, 2), Math.pow(3, -15), Math.pow(3, 15)];
+for (var i = 0; i < scales.length; i++) {
var scale = scales[i];
- var eps = Math.pow( 2, -7 ) * scale;
+ var eps = Math.pow(2, -7) * scale;
var radius = 5 * scale;
var max = 10 * scale;
var min = -max;
@@ -55,52 +53,51 @@ for ( var i = 0; i < scales.length; i++ ) {
var t = db["geo_small_large"];
t.drop();
- t.ensureIndex( { p : "2d" }, { min : min, max : max, bits : bits });
+ t.ensureIndex({p: "2d"}, {min: min, max: max, bits: bits});
var outPoints = 0;
var inPoints = 0;
- printjson({ eps : eps, radius : radius, max : max, min : min, range : range, bits : bits });
+ printjson({eps: eps, radius: radius, max: max, min: min, range: range, bits: bits});
// Put a point slightly inside and outside our range
- for ( var j = 0; j < 2; j++ ) {
- var currRad = ( j % 2 == 0 ? radius + eps : radius - eps );
- var res = t.insert( { p : { x : currRad, y : 0 } } );
- print( res.toString() );
+ for (var j = 0; j < 2; j++) {
+ var currRad = (j % 2 == 0 ? radius + eps : radius - eps);
+ var res = t.insert({p: {x: currRad, y: 0}});
+ print(res.toString());
}
- printjson( t.find().toArray() );
+ printjson(t.find().toArray());
- assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1,
- "Incorrect center points found!" );
- assert.eq( t.count( { p : { $within : { $box : [ [ -radius, -radius ], [ radius, radius ] ] } } } ), 1,
- "Incorrect box points found!" );
+ assert.eq(
+ t.count({p: {$within: {$center: [[0, 0], radius]}}}), 1, "Incorrect center points found!");
+ assert.eq(t.count({p: {$within: {$box: [[-radius, -radius], [radius, radius]]}}}),
+ 1,
+ "Incorrect box points found!");
var shouldFind = [];
var randoms = [];
- for ( var j = 0; j < 2; j++ ) {
-
- var randX = Math.random(); // randoms[j].randX
- var randY = Math.random(); // randoms[j].randY
+ for (var j = 0; j < 2; j++) {
+ var randX = Math.random(); // randoms[j].randX
+ var randY = Math.random(); // randoms[j].randY
- randoms.push({ randX : randX, randY : randY });
+ randoms.push({randX: randX, randY: randY});
- var x = randX * ( range - eps ) + eps + min;
- var y = randY * ( range - eps ) + eps + min;
+ var x = randX * (range - eps) + eps + min;
+ var y = randY * (range - eps) + eps + min;
- t.insert( { p : [ x, y ] } );
+ t.insert({p: [x, y]});
- if ( x * x + y * y > radius * radius ){
+ if (x * x + y * y > radius * radius) {
// print( "out point ");
// printjson({ x : x, y : y })
outPoints++;
- }
- else{
+ } else {
// print( "in point ");
// printjson({ x : x, y : y })
inPoints++;
- shouldFind.push({ x : x, y : y, radius : Math.sqrt( x * x + y * y ) });
+ shouldFind.push({x: x, y: y, radius: Math.sqrt(x * x + y * y)});
}
}
@@ -138,21 +135,22 @@ for ( var i = 0; i < scales.length; i++ ) {
printDiff( shouldFind, didFind )
*/
- assert.eq( t.count( { p : { $within : { $center : [[0, 0], radius ] } } } ), 1 + inPoints,
- "Incorrect random center points found!\n" + tojson( randoms ) );
+ assert.eq(t.count({p: {$within: {$center: [[0, 0], radius]}}}),
+ 1 + inPoints,
+ "Incorrect random center points found!\n" + tojson(randoms));
print("Found " + inPoints + " points in and " + outPoints + " points out.");
- var found = t.find( { p : { $near : [0, 0], $maxDistance : radius } } ).toArray();
+ var found = t.find({p: {$near: [0, 0], $maxDistance: radius}}).toArray();
var dist = 0;
- for( var f = 0; f < found.length; f++ ){
+ for (var f = 0; f < found.length; f++) {
var x = found[f].p.x != undefined ? found[f].p.x : found[f].p[0];
var y = found[f].p.y != undefined ? found[f].p.y : found[f].p[1];
- print( "Dist: x : " + x + " y : " + y + " dist : " +
- Math.sqrt( x * x + y * y) + " radius : " + radius );
+ print("Dist: x : " + x + " y : " + y + " dist : " + Math.sqrt(x * x + y * y) +
+ " radius : " + radius);
}
- assert.eq( t.count( { p : { $near : [0, 0], $maxDistance : radius } } ), 1 + inPoints,
- "Incorrect random center points found near!\n" + tojson( randoms ) );
+ assert.eq(t.count({p: {$near: [0, 0], $maxDistance: radius}}),
+ 1 + inPoints,
+ "Incorrect random center points found near!\n" + tojson(randoms));
}
-
diff --git a/jstests/core/geo_sort1.js b/jstests/core/geo_sort1.js
index cd07345b587..b7a229bb8ae 100644
--- a/jstests/core/geo_sort1.js
+++ b/jstests/core/geo_sort1.js
@@ -2,21 +2,23 @@
t = db.geo_sort1;
t.drop();
-for ( x=0; x<10; x++ ){
- for ( y=0; y<10; y++ ){
- t.insert( { loc : [ x , y ] , foo : x * x * y } );
+for (x = 0; x < 10; x++) {
+ for (y = 0; y < 10; y++) {
+ t.insert({loc: [x, y], foo: x * x * y});
}
}
-t.ensureIndex( { loc : "2d" , foo : 1 } );
+t.ensureIndex({loc: "2d", foo: 1});
-q = t.find( { loc : { $near : [ 5 , 5 ] } , foo : { $gt : 20 } } );
-m = function(z){ return z.foo; };
+q = t.find({loc: {$near: [5, 5]}, foo: {$gt: 20}});
+m = function(z) {
+ return z.foo;
+};
-a = q.clone().map( m );
-b = q.clone().sort( { foo : 1 } ).map( m );
+a = q.clone().map(m);
+b = q.clone().sort({foo: 1}).map(m);
-assert.neq( a , b , "A" );
+assert.neq(a, b, "A");
a.sort();
b.sort();
-assert.eq( a , b , "B" );
+assert.eq(a, b, "B");
diff --git a/jstests/core/geo_uniqueDocs.js b/jstests/core/geo_uniqueDocs.js
index 23297bb1ec9..8c4e11fc82e 100644
--- a/jstests/core/geo_uniqueDocs.js
+++ b/jstests/core/geo_uniqueDocs.js
@@ -5,36 +5,40 @@ collName = 'geo_uniqueDocs_test';
t = db.geo_uniqueDocs_test;
t.drop();
-t.save( { locs : [ [0,2], [3,4]] } );
-t.save( { locs : [ [6,8], [10,10] ] } );
+t.save({locs: [[0, 2], [3, 4]]});
+t.save({locs: [[6, 8], [10, 10]]});
-t.ensureIndex( { locs : '2d' } );
+t.ensureIndex({locs: '2d'});
// geoNear tests
// uniqueDocs option is ignored.
-assert.eq(2, db.runCommand({geoNear:collName, near:[0,0]}).results.length);
-assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:false}).results.length);
-assert.eq(2, db.runCommand({geoNear:collName, near:[0,0], uniqueDocs:true}).results.length);
-results = db.runCommand({geoNear:collName, near:[0,0], num:2}).results;
+assert.eq(2, db.runCommand({geoNear: collName, near: [0, 0]}).results.length);
+assert.eq(2, db.runCommand({geoNear: collName, near: [0, 0], uniqueDocs: false}).results.length);
+assert.eq(2, db.runCommand({geoNear: collName, near: [0, 0], uniqueDocs: true}).results.length);
+results = db.runCommand({geoNear: collName, near: [0, 0], num: 2}).results;
assert.eq(2, results.length);
assert.close(2, results[0].dis);
assert.close(10, results[1].dis);
-results = db.runCommand({geoNear:collName, near:[0,0], num:2, uniqueDocs:true}).results;
+results = db.runCommand({geoNear: collName, near: [0, 0], num: 2, uniqueDocs: true}).results;
assert.eq(2, results.length);
assert.close(2, results[0].dis);
assert.close(10, results[1].dis);
// $within tests
-assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]]}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : true}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$box : [[0,0],[9,9]], $uniqueDocs : false}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$box: [[0, 0], [9, 9]]}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$box: [[0, 0], [9, 9]], $uniqueDocs: true}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$box: [[0, 0], [9, 9]], $uniqueDocs: false}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : true}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$center : [[5,5],7], $uniqueDocs : false}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$center: [[5, 5], 7], $uniqueDocs: true}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$center: [[5, 5], 7], $uniqueDocs: false}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : true}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$centerSphere : [[5,5],1], $uniqueDocs : false}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$centerSphere: [[5, 5], 1], $uniqueDocs: true}}}).itcount());
+assert.eq(2, t.find({locs: {$within: {$centerSphere: [[5, 5], 1], $uniqueDocs: false}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : true}}}).itcount());
-assert.eq(2, t.find( {locs: {$within: {$polygon : [[0,0],[0,9],[9,9]], $uniqueDocs : false}}}).itcount());
+assert.eq(2,
+ t.find({locs: {$within: {$polygon: [[0, 0], [0, 9], [9, 9]], $uniqueDocs: true}}})
+ .itcount());
+assert.eq(2,
+ t.find({locs: {$within: {$polygon: [[0, 0], [0, 9], [9, 9]], $uniqueDocs: false}}})
+ .itcount());
diff --git a/jstests/core/geo_uniqueDocs2.js b/jstests/core/geo_uniqueDocs2.js
index 62a27d606e3..f6481b30f41 100644
--- a/jstests/core/geo_uniqueDocs2.js
+++ b/jstests/core/geo_uniqueDocs2.js
@@ -6,75 +6,90 @@ collName = 'jstests_geo_uniqueDocs2';
t = db[collName];
t.drop();
-t.save( {loc:[[20,30],[40,50]]} );
-t.ensureIndex( {loc:'2d'} );
+t.save({loc: [[20, 30], [40, 50]]});
+t.ensureIndex({loc: '2d'});
// Check exact matches of different locations.
-assert.eq( 1, t.count( { loc : [20,30] } ) );
-assert.eq( 1, t.count( { loc : [40,50] } ) );
+assert.eq(1, t.count({loc: [20, 30]}));
+assert.eq(1, t.count({loc: [40, 50]}));
// Check behavior for $near, where $uniqueDocs mode is unavailable.
-assert.eq( [t.findOne()], t.find( { loc: { $near: [50,50] } } ).toArray() );
+assert.eq([t.findOne()], t.find({loc: {$near: [50, 50]}}).toArray());
// Check correct number of matches for $within / $uniqueDocs.
// uniqueDocs ignored - does not affect results.
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40] } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : true } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ) );
+assert.eq(1, t.count({loc: {$within: {$center: [[30, 30], 40]}}}));
+assert.eq(1, t.count({loc: {$within: {$center: [[30, 30], 40], $uniqueDocs: true}}}));
+assert.eq(1, t.count({loc: {$within: {$center: [[30, 30], 40], $uniqueDocs: false}}}));
// For $within / $uniqueDocs, limit applies to docs.
-assert.eq( 1, t.find( { loc : { $within : { $center : [[30, 30], 40], $uniqueDocs : false } } } ).limit(1).itcount() );
+assert.eq(
+ 1, t.find({loc: {$within: {$center: [[30, 30], 40], $uniqueDocs: false}}}).limit(1).itcount());
// Now check a circle only containing one of the locs.
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10] } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : true } } } ) );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[30, 30], 10], $uniqueDocs : false } } } ) );
+assert.eq(1, t.count({loc: {$within: {$center: [[30, 30], 10]}}}));
+assert.eq(1, t.count({loc: {$within: {$center: [[30, 30], 10], $uniqueDocs: true}}}));
+assert.eq(1, t.count({loc: {$within: {$center: [[30, 30], 10], $uniqueDocs: false}}}));
// Check number and character of results with geoNear / uniqueDocs / includeLocs.
-notUniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : false } );
-uniqueNotInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : false } );
-notUniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } );
-uniqueInclude = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : true, includeLocs : true } );
+notUniqueNotInclude = db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 10, uniqueDocs: false, includeLocs: false});
+uniqueNotInclude = db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 10, uniqueDocs: true, includeLocs: false});
+notUniqueInclude = db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 10, uniqueDocs: false, includeLocs: true});
+uniqueInclude = db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 10, uniqueDocs: true, includeLocs: true});
// Check that only unique docs are returned.
-assert.eq( 1, notUniqueNotInclude.results.length );
-assert.eq( 1, uniqueNotInclude.results.length );
-assert.eq( 1, notUniqueInclude.results.length );
-assert.eq( 1, uniqueInclude.results.length );
+assert.eq(1, notUniqueNotInclude.results.length);
+assert.eq(1, uniqueNotInclude.results.length);
+assert.eq(1, notUniqueInclude.results.length);
+assert.eq(1, uniqueInclude.results.length);
// Check that locs are included.
-assert( !notUniqueNotInclude.results[0].loc );
-assert( !uniqueNotInclude.results[0].loc );
-assert( notUniqueInclude.results[0].loc );
-assert( uniqueInclude.results[0].loc );
+assert(!notUniqueNotInclude.results[0].loc);
+assert(!uniqueNotInclude.results[0].loc);
+assert(notUniqueInclude.results[0].loc);
+assert(uniqueInclude.results[0].loc);
// For geoNear / uniqueDocs, 'num' limit seems to apply to locs.
-assert.eq( 1, db.runCommand( { geoNear : collName , near : [50,50], num : 1, uniqueDocs : false, includeLocs : false } ).results.length );
+assert.eq(
+ 1,
+ db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 1, uniqueDocs: false, includeLocs: false})
+ .results.length);
// Check locs returned in includeLocs mode.
t.remove({});
-objLocs = [{x:20,y:30,z:['loc1','loca']},{x:40,y:50,z:['loc2','locb']}];
-t.save( {loc:objLocs} );
-results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results;
-assert.contains( results[0].loc, objLocs );
+objLocs = [{x: 20, y: 30, z: ['loc1', 'loca']}, {x: 40, y: 50, z: ['loc2', 'locb']}];
+t.save({loc: objLocs});
+results =
+ db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 10, uniqueDocs: false, includeLocs: true})
+ .results;
+assert.contains(results[0].loc, objLocs);
// Check locs returned in includeLocs mode, where locs are arrays.
t.remove({});
-arrLocs = [[20,30],[40,50]];
-t.save( {loc:arrLocs} );
-results = db.runCommand( { geoNear : collName , near : [50,50], num : 10, uniqueDocs : false, includeLocs : true } ).results;
+arrLocs = [[20, 30], [40, 50]];
+t.save({loc: arrLocs});
+results =
+ db.runCommand(
+ {geoNear: collName, near: [50, 50], num: 10, uniqueDocs: false, includeLocs: true})
+ .results;
// The original loc arrays are returned as objects.
expectedLocs = arrLocs;
-assert.contains( results[0].loc, expectedLocs );
+assert.contains(results[0].loc, expectedLocs);
// Test a large number of locations in the array.
t.drop();
arr = [];
-for( i = 0; i < 10000; ++i ) {
- arr.push( [10,10] );
+for (i = 0; i < 10000; ++i) {
+ arr.push([10, 10]);
}
-arr.push( [100,100] );
-t.save( {loc:arr} );
-t.ensureIndex( {loc:'2d'} );
-assert.eq( 1, t.count( { loc : { $within : { $center : [[99, 99], 5] } } } ) );
+arr.push([100, 100]);
+t.save({loc: arr});
+t.ensureIndex({loc: '2d'});
+assert.eq(1, t.count({loc: {$within: {$center: [[99, 99], 5]}}}));
diff --git a/jstests/core/geo_update.js b/jstests/core/geo_update.js
index 34305559039..ebe754680e9 100644
--- a/jstests/core/geo_update.js
+++ b/jstests/core/geo_update.js
@@ -4,34 +4,28 @@
var coll = db.testGeoUpdate;
coll.drop();
-coll.ensureIndex({ loc : "2d" });
+coll.ensureIndex({loc: "2d"});
// Test normal update
-print( "Updating..." );
+print("Updating...");
-coll.insert({ loc : [1.0, 2.0] });
+coll.insert({loc: [1.0, 2.0]});
-coll.update({ loc : { $near : [1.0, 2.0] } },
- { x : true, loc : [1.0, 2.0] });
+coll.update({loc: {$near: [1.0, 2.0]}}, {x: true, loc: [1.0, 2.0]});
// Test upsert
-print( "Upserting..." );
-
-coll.update({ loc : { $within : { $center : [[10, 20], 1] } } },
- { x : true },
- true);
-
-coll.update({ loc : { $near : [10.0, 20.0], $maxDistance : 1 } },
- { x : true },
- true);
-
-
-coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } },
- { $set : { loc : [100, 100] }, $push : { people : "chris" } },
+print("Upserting...");
+
+coll.update({loc: {$within: {$center: [[10, 20], 1]}}}, {x: true}, true);
+
+coll.update({loc: {$near: [10.0, 20.0], $maxDistance: 1}}, {x: true}, true);
+
+coll.update({loc: {$near: [100, 100], $maxDistance: 1}},
+ {$set: {loc: [100, 100]}, $push: {people: "chris"}},
true);
-
-coll.update({ loc : { $near : [100, 100], $maxDistance : 1 } },
- { $set : { loc : [100, 100] }, $push : { people : "john" } },
+
+coll.update({loc: {$near: [100, 100], $maxDistance: 1}},
+ {$set: {loc: [100, 100]}, $push: {people: "john"}},
true);
-assert.eq( 4, coll.find().itcount() );
+assert.eq(4, coll.find().itcount());
diff --git a/jstests/core/geo_update1.js b/jstests/core/geo_update1.js
index c3d2623d3de..e966afa7ea9 100644
--- a/jstests/core/geo_update1.js
+++ b/jstests/core/geo_update1.js
@@ -2,35 +2,35 @@
t = db.geo_update1;
t.drop();
-for(var x = 0; x < 10; x++ ) {
- for(var y = 0; y < 10; y++ ) {
- t.insert({"loc": [x, y] , x : x , y : y , z : 1 });
- }
-}
-
-t.ensureIndex( { loc : "2d" } );
-
-function p(){
- print( "--------------" );
- for ( var y=0; y<10; y++ ){
- var c = t.find( { y : y } ).sort( { x : 1 } );
+for (var x = 0; x < 10; x++) {
+ for (var y = 0; y < 10; y++) {
+ t.insert({"loc": [x, y], x: x, y: y, z: 1});
+ }
+}
+
+t.ensureIndex({loc: "2d"});
+
+function p() {
+ print("--------------");
+ for (var y = 0; y < 10; y++) {
+ var c = t.find({y: y}).sort({x: 1});
var s = "";
- while ( c.hasNext() )
+ while (c.hasNext())
s += c.next().z + " ";
- print( s );
+ print(s);
}
- print( "--------------" );
+ print("--------------");
}
p();
-var res = t.update({ loc: { $within: { $center: [[ 5, 5 ], 2 ]}}}, { $inc: { z: 1 }}, false, true);
-assert.writeOK( res );
+var res = t.update({loc: {$within: {$center: [[5, 5], 2]}}}, {$inc: {z: 1}}, false, true);
+assert.writeOK(res);
p();
-assert.writeOK(t.update({}, {'$inc' : { 'z' : 1}}, false, true));
+assert.writeOK(t.update({}, {'$inc': {'z': 1}}, false, true));
p();
-res = t.update({ loc: { $within: { $center: [[ 5, 5 ], 2 ]}}}, { $inc: { z: 1 }}, false, true);
-assert.writeOK( res );
+res = t.update({loc: {$within: {$center: [[5, 5], 2]}}}, {$inc: {z: 1}}, false, true);
+assert.writeOK(res);
p();
diff --git a/jstests/core/geo_update2.js b/jstests/core/geo_update2.js
index f2f1b6cee4b..ffcf02617be 100644
--- a/jstests/core/geo_update2.js
+++ b/jstests/core/geo_update2.js
@@ -2,38 +2,35 @@
t = db.geo_update2;
t.drop();
-for(var x = 0; x < 10; x++ ) {
- for(var y = 0; y < 10; y++ ) {
- t.insert({"loc": [x, y] , x : x , y : y });
- }
-}
-
-t.ensureIndex( { loc : "2d" } );
-
-function p(){
- print( "--------------" );
- for ( var y=0; y<10; y++ ){
- var c = t.find( { y : y } ).sort( { x : 1 } );
+for (var x = 0; x < 10; x++) {
+ for (var y = 0; y < 10; y++) {
+ t.insert({"loc": [x, y], x: x, y: y});
+ }
+}
+
+t.ensureIndex({loc: "2d"});
+
+function p() {
+ print("--------------");
+ for (var y = 0; y < 10; y++) {
+ var c = t.find({y: y}).sort({x: 1});
var s = "";
- while ( c.hasNext() )
+ while (c.hasNext())
s += c.next().z + " ";
- print( s );
+ print(s);
}
- print( "--------------" );
+ print("--------------");
}
p();
-
-assert.writeOK(t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}},
- {'$inc' : { 'z' : 1}}, false, true));
+assert.writeOK(
+ t.update({"loc": {"$within": {"$center": [[5, 5], 2]}}}, {'$inc': {'z': 1}}, false, true));
p();
-assert.writeOK(t.update({}, {'$inc' : { 'z' : 1}}, false, true));
+assert.writeOK(t.update({}, {'$inc': {'z': 1}}, false, true));
p();
-
-assert.writeOK(t.update({"loc" : {"$within" : {"$center" : [[5,5], 2]}}},
- {'$inc' : { 'z' : 1}}, false, true));
+assert.writeOK(
+ t.update({"loc": {"$within": {"$center": [[5, 5], 2]}}}, {'$inc': {'z': 1}}, false, true));
p();
-
diff --git a/jstests/core/geo_update_btree.js b/jstests/core/geo_update_btree.js
index 12a10c736b8..ea1025b10a9 100644
--- a/jstests/core/geo_update_btree.js
+++ b/jstests/core/geo_update_btree.js
@@ -1,31 +1,37 @@
// Tests whether the geospatial search is stable under btree updates
-var coll = db.getCollection( "jstests_geo_update_btree" );
+var coll = db.getCollection("jstests_geo_update_btree");
coll.drop();
-coll.ensureIndex( { loc : '2d' } );
+coll.ensureIndex({loc: '2d'});
-var big = new Array( 3000 ).toString();
+var big = new Array(3000).toString();
if (testingReplication) {
- coll.setWriteConcern({ w: 2 });
+ coll.setWriteConcern({w: 2});
}
Random.setRandomSeed();
var parallelInsert = startParallelShell(
- "Random.setRandomSeed();" +
- "for ( var i = 0; i < 1000; i++ ) {" +
+ "Random.setRandomSeed();" + "for ( var i = 0; i < 1000; i++ ) {" +
" var doc = { loc: [ Random.rand() * 180, Random.rand() * 180 ], v: '' };" +
- " db.jstests_geo_update_btree.insert(doc);" +
- "}");
+ " db.jstests_geo_update_btree.insert(doc);" + "}");
-for ( i = 0; i < 1000; i++ ) {
+for (i = 0; i < 1000; i++) {
coll.update(
- { loc : { $within : { $center : [ [ Random.rand() * 180, Random.rand() * 180 ], Random.rand() * 50 ] } } },
- { $set : { v : big } }, false, true );
-
- if( i % 10 == 0 ) print( i );
+ {
+ loc: {
+ $within:
+ {$center: [[Random.rand() * 180, Random.rand() * 180], Random.rand() * 50]}
+ }
+ },
+ {$set: {v: big}},
+ false,
+ true);
+
+ if (i % 10 == 0)
+ print(i);
}
parallelInsert();
diff --git a/jstests/core/geo_update_btree2.js b/jstests/core/geo_update_btree2.js
index b4ec059166a..de867bf8e14 100644
--- a/jstests/core/geo_update_btree2.js
+++ b/jstests/core/geo_update_btree2.js
@@ -11,56 +11,57 @@
// In order to expose the specific NON GUARANTEED isolation behavior this file tests
// we disable table scans to ensure that the new query system only looks at the 2d
// scan.
-assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) );
+assert.commandWorked(db._adminCommand({setParameter: 1, notablescan: true}));
-var status = function( msg ){
- print( "\n\n###\n" + msg + "\n###\n\n" );
+var status = function(msg) {
+ print("\n\n###\n" + msg + "\n###\n\n");
};
-var coll = db.getCollection( "jstests_geo_update_btree2" );
+var coll = db.getCollection("jstests_geo_update_btree2");
coll.drop();
-coll.ensureIndex( { loc : '2d' } );
+coll.ensureIndex({loc: '2d'});
-status( "Inserting points..." );
+status("Inserting points...");
var numPoints = 10;
Random.setRandomSeed();
-for ( i = 0; i < numPoints; i++ ) {
- coll.insert( { _id : i, loc : [ Random.rand() * 180, Random.rand() * 180 ], i : i % 2 } );
+for (i = 0; i < numPoints; i++) {
+ coll.insert({_id: i, loc: [Random.rand() * 180, Random.rand() * 180], i: i % 2});
}
-status( "Starting long query..." );
+status("Starting long query...");
-var query = coll.find({ loc : { $within : { $box : [[-180, -180], [180, 180]] } } }).batchSize( 2 );
-var firstValues = [ query.next()._id, query.next()._id ];
-printjson( firstValues );
+var query = coll.find({loc: {$within: {$box: [[-180, -180], [180, 180]]}}}).batchSize(2);
+var firstValues = [query.next()._id, query.next()._id];
+printjson(firstValues);
-status( "Removing points not returned by query..." );
+status("Removing points not returned by query...");
var allQuery = coll.find();
var removeIds = [];
-while( allQuery.hasNext() ){
+while (allQuery.hasNext()) {
var id = allQuery.next()._id;
- if( firstValues.indexOf( id ) < 0 ){
- removeIds.push( id );
+ if (firstValues.indexOf(id) < 0) {
+ removeIds.push(id);
}
}
var updateIds = [];
-for( var i = 0, max = removeIds.length / 2; i < max; i++ ) updateIds.push( removeIds.pop() );
+for (var i = 0, max = removeIds.length / 2; i < max; i++)
+ updateIds.push(removeIds.pop());
-printjson( removeIds );
-coll.remove({ _id : { $in : removeIds } });
+printjson(removeIds);
+coll.remove({_id: {$in: removeIds}});
-status( "Updating points returned by query..." );
+status("Updating points returned by query...");
printjson(updateIds);
-var big = new Array( 3000 ).toString();
-for( var i = 0; i < updateIds.length; i++ )
- coll.update({ _id : updateIds[i] }, { $set : { data : big } });
+var big = new Array(3000).toString();
+for (var i = 0; i < updateIds.length; i++)
+ coll.update({_id: updateIds[i]}, {$set: {data: big}});
-status( "Counting final points..." );
+status("Counting final points...");
// It's not defined whether or not we return documents that are modified during a query. We
// shouldn't crash, but it's not defined how many results we get back. This test is modifying every
@@ -69,4 +70,4 @@ status( "Counting final points..." );
// assert.eq( ( numPoints - 2 ) / 2, query.itcount() )
query.itcount();
-assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false} ) );
+assert.commandWorked(db._adminCommand({setParameter: 1, notablescan: false}));
diff --git a/jstests/core/geo_update_dedup.js b/jstests/core/geo_update_dedup.js
index 3011c5c9cfe..b354f3ca7ae 100644
--- a/jstests/core/geo_update_dedup.js
+++ b/jstests/core/geo_update_dedup.js
@@ -7,9 +7,11 @@ var t = db.jstests_geo_update_dedup;
// 2d index with $near
t.drop();
t.ensureIndex({locs: "2d"});
-t.save({locs: [[49.999,49.999], [50.0,50.0], [50.001,50.001]]});
+t.save({locs: [[49.999, 49.999], [50.0, 50.0], [50.001, 50.001]]});
-var q = {locs: {$near: [50.0, 50.0]}};
+var q = {
+ locs: {$near: [50.0, 50.0]}
+};
assert.eq(1, t.find(q).itcount(), 'duplicates returned from query');
var res = t.update({locs: {$near: [50.0, 50.0]}}, {$inc: {touchCount: 1}}, false, true);
@@ -18,8 +20,8 @@ assert.eq(1, t.findOne().touchCount);
t.drop();
t.ensureIndex({locs: "2d"});
-t.save({locs: [{x:49.999,y:49.999}, {x:50.0,y:50.0}, {x:50.001,y:50.001}]});
-res = t.update({locs: {$near: {x:50.0, y:50.0}}}, {$inc: {touchCount: 1}});
+t.save({locs: [{x: 49.999, y: 49.999}, {x: 50.0, y: 50.0}, {x: 50.001, y: 50.001}]});
+res = t.update({locs: {$near: {x: 50.0, y: 50.0}}}, {$inc: {touchCount: 1}});
assert.eq(1, res.nMatched);
assert.eq(1, t.findOne().touchCount);
@@ -35,26 +37,31 @@ assert.eq(1, t.findOne().touchCount);
// 2dsphere index with $geoNear
t.drop();
t.ensureIndex({geo: "2dsphere"});
-var x = { "type" : "Polygon",
- "coordinates" : [[[49.999,49.999], [50.0,50.0], [50.001,50.001], [49.999,49.999]]]};
+var x = {
+ "type": "Polygon",
+ "coordinates": [[[49.999, 49.999], [50.0, 50.0], [50.001, 50.001], [49.999, 49.999]]]
+};
t.save({geo: x});
-res = t.update({geo: {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}},
- {$inc: {touchCount: 1}}, false, true);
+res = t.update({geo: {$geoNear: {"type": "Point", "coordinates": [50.0, 50.0]}}},
+ {$inc: {touchCount: 1}},
+ false,
+ true);
assert.eq(1, res.nMatched);
assert.eq(1, t.findOne().touchCount);
t.drop();
var locdata = [
- {geo: {type: "Point", coordinates: [49.999,49.999]}},
- {geo: {type: "Point", coordinates: [50.000,50.000]}},
- {geo: {type: "Point", coordinates: [50.001,50.001]}}
+ {geo: {type: "Point", coordinates: [49.999, 49.999]}},
+ {geo: {type: "Point", coordinates: [50.000, 50.000]}},
+ {geo: {type: "Point", coordinates: [50.001, 50.001]}}
];
t.save({locdata: locdata, count: 0});
t.ensureIndex({"locdata.geo": "2dsphere"});
-res = t.update({"locdata.geo": {$geoNear: {"type" : "Point", "coordinates" : [50.0, 50.0]}}},
- {$inc: {touchCount: 1}}, false, true);
+res = t.update({"locdata.geo": {$geoNear: {"type": "Point", "coordinates": [50.0, 50.0]}}},
+ {$inc: {touchCount: 1}},
+ false,
+ true);
assert.eq(1, res.nMatched);
assert.eq(1, t.findOne().touchCount);
-
diff --git a/jstests/core/geo_validate.js b/jstests/core/geo_validate.js
index 5b9957166c3..6d92e5736ce 100644
--- a/jstests/core/geo_validate.js
+++ b/jstests/core/geo_validate.js
@@ -5,95 +5,95 @@
var coll = db.geo_validate;
coll.drop();
-coll.ensureIndex({ geo : "2dsphere" });
+coll.ensureIndex({geo: "2dsphere"});
//
//
// Make sure we can't do a $within search with an invalid circular region
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $center : [[0, 0], -1] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$center: [[0, 0], -1]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $centerSphere : [[0, 0], -1] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$centerSphere: [[0, 0], -1]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $center : [[0, 0], NaN] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$center: [[0, 0], NaN]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $centerSphere : [[0, 0], NaN] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$centerSphere: [[0, 0], NaN]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $center : [[0, 0], -Infinity] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$center: [[0, 0], -Infinity]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $centerSphere : [[0, 0], -Infinity] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$centerSphere: [[0, 0], -Infinity]}}});
});
//
//
// Make sure we can't do geo search with invalid point coordinates.
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $center : [[NaN, 0], 1] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$center: [[NaN, 0], 1]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $centerSphere : [[NaN, 0], 1] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$centerSphere: [[NaN, 0], 1]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $center : [[Infinity, 0], 1] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$center: [[Infinity, 0], 1]}}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $within : { $centerSphere : [[-Infinity, 0], 1] } } });
+assert.throws(function() {
+ coll.findOne({geo: {$within: {$centerSphere: [[-Infinity, 0], 1]}}});
});
//
//
// Make sure we can do a $within search with a zero-radius circular region
-assert.writeOK(coll.insert({ geo : [0, 0] }));
-assert.neq(null, coll.findOne({ geo : { $within : { $center : [[0, 0], 0] } } }));
-assert.neq(null, coll.findOne({ geo : { $within : { $centerSphere : [[0, 0], 0] } } }));
-assert.neq(null, coll.findOne({ geo : { $within : { $center : [[0, 0], Infinity] } } }));
-assert.neq(null, coll.findOne({ geo : { $within : { $centerSphere : [[0, 0], Infinity] } } }));
+assert.writeOK(coll.insert({geo: [0, 0]}));
+assert.neq(null, coll.findOne({geo: {$within: {$center: [[0, 0], 0]}}}));
+assert.neq(null, coll.findOne({geo: {$within: {$centerSphere: [[0, 0], 0]}}}));
+assert.neq(null, coll.findOne({geo: {$within: {$center: [[0, 0], Infinity]}}}));
+assert.neq(null, coll.findOne({geo: {$within: {$centerSphere: [[0, 0], Infinity]}}}));
//
//
// Make sure we can't do a $near search with an invalid circular region
-assert.throws(function(){
- coll.findOne({ geo : { $geoNear : [0, 0, -1] } });
+assert.throws(function() {
+ coll.findOne({geo: {$geoNear: [0, 0, -1]}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $geoNear : [0, 0], $maxDistance : -1 } });
+assert.throws(function() {
+ coll.findOne({geo: {$geoNear: [0, 0], $maxDistance: -1}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $geoNear : [0, 0, NaN] } });
+assert.throws(function() {
+ coll.findOne({geo: {$geoNear: [0, 0, NaN]}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $geoNear : [0, 0], $maxDistance : NaN } });
+assert.throws(function() {
+ coll.findOne({geo: {$geoNear: [0, 0], $maxDistance: NaN}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $geoNear : [0, 0, -Infinity] } });
+assert.throws(function() {
+ coll.findOne({geo: {$geoNear: [0, 0, -Infinity]}});
});
-assert.throws(function(){
- coll.findOne({ geo : { $geoNear : [0, 0], $maxDistance : -Infinity } });
+assert.throws(function() {
+ coll.findOne({geo: {$geoNear: [0, 0], $maxDistance: -Infinity}});
});
//
//
// Make sure we can't do a near search with a negative limit
-assert.commandFailed(db.runCommand({geoNear: coll.getName(),
- near: [0,0], spherical: true, num: -1}));
-assert.commandFailed(db.runCommand({geoNear: coll.getName(),
- near: [0,0], spherical: true, num: -Infinity}));
+assert.commandFailed(
+ db.runCommand({geoNear: coll.getName(), near: [0, 0], spherical: true, num: -1}));
+assert.commandFailed(
+ db.runCommand({geoNear: coll.getName(), near: [0, 0], spherical: true, num: -Infinity}));
// NaN is interpreted as limit 0
-assert.commandWorked(db.runCommand({geoNear: coll.getName(),
- near: [0,0], spherical: true, num: NaN}));
-
+assert.commandWorked(
+ db.runCommand({geoNear: coll.getName(), near: [0, 0], spherical: true, num: NaN}));
//
// SERVER-17241 Polygon has no loop
-assert.writeError(coll.insert({ geo : { type: 'Polygon', coordinates: [] } }));
+assert.writeError(coll.insert({geo: {type: 'Polygon', coordinates: []}}));
//
// SERVER-17486 Loop has less then 3 vertices.
assert.writeError(coll.insert({geo: {type: 'Polygon', coordinates: [[]]}}));
-assert.writeError(coll.insert({geo: {type: 'Polygon', coordinates: [[[0,0]]]}}));
-assert.writeError(coll.insert({geo: {type: 'Polygon', coordinates: [[[0,0], [0,0], [0,0], [0,0]]]}}));
+assert.writeError(coll.insert({geo: {type: 'Polygon', coordinates: [[[0, 0]]]}}));
+assert.writeError(
+ coll.insert({geo: {type: 'Polygon', coordinates: [[[0, 0], [0, 0], [0, 0], [0, 0]]]}}));
diff --git a/jstests/core/geo_withinquery.js b/jstests/core/geo_withinquery.js
index d60116c8838..3a71608ab6d 100644
--- a/jstests/core/geo_withinquery.js
+++ b/jstests/core/geo_withinquery.js
@@ -3,13 +3,17 @@ t = db.geo_withinquery;
t.drop();
num = 0;
-for ( x=0; x<=20; x++ ){
- for ( y=0; y<=20; y++ ){
- o = { _id : num++ , loc : [ x , y ] };
- t.save( o );
+for (x = 0; x <= 20; x++) {
+ for (y = 0; y <= 20; y++) {
+ o = {
+ _id: num++,
+ loc: [x, y]
+ };
+ t.save(o);
}
}
-assert.eq(21 * 21 - 1, t.find({ $and: [ {loc: {$ne:[0,0]}},
- {loc: {$within: {$box: [[0,0], [100,100]]}}},
- ]}).itcount(), "UHOH!");
+assert.eq(21 * 21 - 1,
+ t.find({$and: [{loc: {$ne: [0, 0]}}, {loc: {$within: {$box: [[0, 0], [100, 100]]}}}, ]})
+ .itcount(),
+ "UHOH!");
diff --git a/jstests/core/geoa.js b/jstests/core/geoa.js
index 036a5630550..cd1eeaf5aaa 100644
--- a/jstests/core/geoa.js
+++ b/jstests/core/geoa.js
@@ -2,11 +2,11 @@
t = db.geoa;
t.drop();
-t.save( { _id : 1 , a : { loc : [ 5 , 5 ] } } );
-t.save( { _id : 2 , a : { loc : [ 6 , 6 ] } } );
-t.save( { _id : 3 , a : { loc : [ 7 , 7 ] } } );
+t.save({_id: 1, a: {loc: [5, 5]}});
+t.save({_id: 2, a: {loc: [6, 6]}});
+t.save({_id: 3, a: {loc: [7, 7]}});
-t.ensureIndex( { "a.loc" : "2d" } );
+t.ensureIndex({"a.loc": "2d"});
-cur = t.find( { "a.loc" : { $near : [ 6 , 6 ] } } );
-assert.eq( 2 , cur.next()._id , "A1" );
+cur = t.find({"a.loc": {$near: [6, 6]}});
+assert.eq(2, cur.next()._id, "A1");
diff --git a/jstests/core/geob.js b/jstests/core/geob.js
index 0dcc2658ba2..b78eaa453df 100644
--- a/jstests/core/geob.js
+++ b/jstests/core/geob.js
@@ -1,10 +1,18 @@
var t = db.geob;
t.drop();
-var a = {p: [0, 0]};
-var b = {p: [1, 0]};
-var c = {p: [3, 4]};
-var d = {p: [0, 6]};
+var a = {
+ p: [0, 0]
+};
+var b = {
+ p: [1, 0]
+};
+var c = {
+ p: [3, 4]
+};
+var d = {
+ p: [0, 6]
+};
t.save(a);
t.save(b);
@@ -12,7 +20,7 @@ t.save(c);
t.save(d);
t.ensureIndex({p: "2d"});
-var res = t.runCommand("geoNear", {near: [0,0]});
+var res = t.runCommand("geoNear", {near: [0, 0]});
assert.close(3, res.stats.avgDistance, "A");
assert.close(0, res.results[0].dis, "B1");
@@ -27,7 +35,7 @@ assert.eq(c._id, res.results[2].obj._id, "D2");
assert.close(6, res.results[3].dis, "E1");
assert.eq(d._id, res.results[3].obj._id, "E2");
-res = t.runCommand("geoNear", {near: [0,0], distanceMultiplier: 2});
+res = t.runCommand("geoNear", {near: [0, 0], distanceMultiplier: 2});
assert.close(6, res.stats.avgDistance, "F");
assert.close(0, res.results[0].dis, "G");
assert.close(2, res.results[1].dis, "H");
diff --git a/jstests/core/geoc.js b/jstests/core/geoc.js
index 138b86c65c5..26e762e4978 100644
--- a/jstests/core/geoc.js
+++ b/jstests/core/geoc.js
@@ -1,24 +1,27 @@
t = db.geoc;
-t.drop();
+t.drop();
N = 1000;
-for (var i=0; i<N; i++) t.insert({loc:[100+Math.random(), 100+Math.random()], z:0});
-for (var i=0; i<N; i++) t.insert({loc:[0+Math.random(), 0+Math.random()], z:1});
-for (var i=0; i<N; i++) t.insert({loc:[-100+Math.random(), -100+Math.random()], z:2});
+for (var i = 0; i < N; i++)
+ t.insert({loc: [100 + Math.random(), 100 + Math.random()], z: 0});
+for (var i = 0; i < N; i++)
+ t.insert({loc: [0 + Math.random(), 0 + Math.random()], z: 1});
+for (var i = 0; i < N; i++)
+ t.insert({loc: [-100 + Math.random(), -100 + Math.random()], z: 2});
-t.ensureIndex({loc:'2d'});
+t.ensureIndex({loc: '2d'});
-function test( z , l ){
- assert.lt( 0 , t.find({loc:{$near:[100,100]}, z:z}).limit(l).itcount() , "z: " + z + " l: " + l );
+function test(z, l) {
+ assert.lt(
+ 0, t.find({loc: {$near: [100, 100]}, z: z}).limit(l).itcount(), "z: " + z + " l: " + l);
}
-test( 1 , 1 );
-test( 1 , 2 );
-test( 2 , 2 );
-test( 2 , 10 );
-test( 2 , 1000 );
-test( 2 , 100000 );
-test( 2 , 10000000 );
-
+test(1, 1);
+test(1, 2);
+test(2, 2);
+test(2, 10);
+test(2, 1000);
+test(2, 100000);
+test(2, 10000000);
diff --git a/jstests/core/geod.js b/jstests/core/geod.js
index 118f5021381..35844d0f914 100644
--- a/jstests/core/geod.js
+++ b/jstests/core/geod.js
@@ -1,14 +1,14 @@
-var t=db.geod;
-t.drop();
-t.save( { loc: [0,0] } );
-t.save( { loc: [0.5,0] } );
-t.ensureIndex({loc:"2d"});
+var t = db.geod;
+t.drop();
+t.save({loc: [0, 0]});
+t.save({loc: [0.5, 0]});
+t.ensureIndex({loc: "2d"});
// do a few geoNears with different maxDistances. The first iteration
// should match no points in the dataset.
dists = [.49, .51, 1.0];
-for (idx in dists){
- b=db.runCommand({geoNear:"geod", near:[1,0], num:2, maxDistance:dists[idx]});
- assert.eq(b.errmsg, undefined, "A"+idx);
- l=b.results.length;
- assert.eq(l, idx, "B"+idx);
+for (idx in dists) {
+ b = db.runCommand({geoNear: "geod", near: [1, 0], num: 2, maxDistance: dists[idx]});
+ assert.eq(b.errmsg, undefined, "A" + idx);
+ l = b.results.length;
+ assert.eq(l, idx, "B" + idx);
}
diff --git a/jstests/core/geoe.js b/jstests/core/geoe.js
index 22feb83ab1e..9568e13dc08 100644
--- a/jstests/core/geoe.js
+++ b/jstests/core/geoe.js
@@ -4,29 +4,28 @@
// the end of the btree and not reverse direction (leaving the rest of
// the search always looking at some random non-matching point).
-t=db.geo_box;
+t = db.geo_box;
t.drop();
-t.insert({"_id": 1, "geo" : [ 33, -11.1 ] });
-t.insert({"_id": 2, "geo" : [ -122, 33.3 ] });
-t.insert({"_id": 3, "geo" : [ -122, 33.4 ] });
-t.insert({"_id": 4, "geo" : [ -122.28, 37.67 ] });
-t.insert({"_id": 5, "geo" : [ -122.29, 37.68 ] });
-t.insert({"_id": 6, "geo" : [ -122.29, 37.67 ] });
-t.insert({"_id": 7, "geo" : [ -122.29, 37.67 ] });
-t.insert({"_id": 8, "geo" : [ -122.29, 37.68 ] });
-t.insert({"_id": 9, "geo" : [ -122.29, 37.68 ] });
-t.insert({"_id": 10, "geo" : [ -122.3, 37.67 ] });
-t.insert({"_id": 11, "geo" : [ -122.31, 37.67 ] });
-t.insert({"_id": 12, "geo" : [ -122.3, 37.66 ] });
-t.insert({"_id": 13, "geo" : [ -122.2435, 37.637072 ] });
-t.insert({"_id": 14, "geo" : [ -122.289505, 37.695774 ] });
+t.insert({"_id": 1, "geo": [33, -11.1]});
+t.insert({"_id": 2, "geo": [-122, 33.3]});
+t.insert({"_id": 3, "geo": [-122, 33.4]});
+t.insert({"_id": 4, "geo": [-122.28, 37.67]});
+t.insert({"_id": 5, "geo": [-122.29, 37.68]});
+t.insert({"_id": 6, "geo": [-122.29, 37.67]});
+t.insert({"_id": 7, "geo": [-122.29, 37.67]});
+t.insert({"_id": 8, "geo": [-122.29, 37.68]});
+t.insert({"_id": 9, "geo": [-122.29, 37.68]});
+t.insert({"_id": 10, "geo": [-122.3, 37.67]});
+t.insert({"_id": 11, "geo": [-122.31, 37.67]});
+t.insert({"_id": 12, "geo": [-122.3, 37.66]});
+t.insert({"_id": 13, "geo": [-122.2435, 37.637072]});
+t.insert({"_id": 14, "geo": [-122.289505, 37.695774]});
+t.ensureIndex({geo: "2d"});
-t.ensureIndex({ geo : "2d" });
-
-c=t.find({geo: {"$within": {"$box": [[-125.078461,36.494473], [-120.320648,38.905199]]} } });
+c = t.find({geo: {"$within": {"$box": [[-125.078461, 36.494473], [-120.320648, 38.905199]]}}});
assert.eq(11, c.count(), "A1");
-c=t.find({geo: {"$within": {"$box": [[-124.078461,36.494473], [-120.320648,38.905199]]} } });
+c = t.find({geo: {"$within": {"$box": [[-124.078461, 36.494473], [-120.320648, 38.905199]]}}});
assert.eq(11, c.count(), "B1");
diff --git a/jstests/core/geof.js b/jstests/core/geof.js
index 718c6e6593d..1d7f13eb881 100644
--- a/jstests/core/geof.js
+++ b/jstests/core/geof.js
@@ -2,18 +2,20 @@ t = db.geof;
t.drop();
// corners (dist ~0.98)
-t.insert({loc: [ 0.7, 0.7]});
-t.insert({loc: [ 0.7, -0.7]});
-t.insert({loc: [-0.7, 0.7]});
+t.insert({loc: [0.7, 0.7]});
+t.insert({loc: [0.7, -0.7]});
+t.insert({loc: [-0.7, 0.7]});
t.insert({loc: [-0.7, -0.7]});
// on x axis (dist == 0.9)
t.insert({loc: [-0.9, 0]});
t.insert({loc: [-0.9, 0]});
-t.ensureIndex( { loc : "2d" } );
+t.ensureIndex({loc: "2d"});
-t.find({loc: {$near: [0,0]}}).limit(2).forEach( function(o){
- //printjson(o);
- assert.lt(Geo.distance([0,0], o.loc), 0.95);
-});
+t.find({loc: {$near: [0, 0]}})
+ .limit(2)
+ .forEach(function(o) {
+ // printjson(o);
+ assert.lt(Geo.distance([0, 0], o.loc), 0.95);
+ });
diff --git a/jstests/core/geonear_cmd_input_validation.js b/jstests/core/geonear_cmd_input_validation.js
index 2a44391183b..ad3d56d240a 100644
--- a/jstests/core/geonear_cmd_input_validation.js
+++ b/jstests/core/geonear_cmd_input_validation.js
@@ -9,12 +9,8 @@ t.ensureIndex({loc: "2dsphere"});
// 2d index and minDistance.
// 2d index and GeoJSON
// 2dsphere index and spherical=false
-var indexTypes = ['2d', '2dsphere'],
- pointTypes = [
- {type: 'Point', coordinates: [0, 0]},
- [0, 0]],
- sphericalOptions = [true, false],
- optionNames = ['minDistance', 'maxDistance'],
+var indexTypes = ['2d', '2dsphere'], pointTypes = [{type: 'Point', coordinates: [0, 0]}, [0, 0]],
+ sphericalOptions = [true, false], optionNames = ['minDistance', 'maxDistance'],
badNumbers = [-1, undefined, 'foo'];
indexTypes.forEach(function(indexType) {
@@ -38,28 +34,20 @@ indexTypes.forEach(function(indexType) {
}
// Unsupported combinations should return errors.
- if (
- (indexType == '2d' && optionName == 'minDistance') ||
- (indexType == '2d' && !isLegacy) ||
- (indexType == '2dsphere' && !spherical)
- ) {
- assert.commandFailed(
- db.runCommand(makeCommand(1)),
- "geoNear with spherical=" + spherical + " and " + indexType
- + " index and " + pointDescription
- + " should've failed."
- );
+ if ((indexType == '2d' && optionName == 'minDistance') ||
+ (indexType == '2d' && !isLegacy) || (indexType == '2dsphere' && !spherical)) {
+ assert.commandFailed(db.runCommand(makeCommand(1)),
+ "geoNear with spherical=" + spherical + " and " +
+ indexType + " index and " + pointDescription +
+ " should've failed.");
// Stop processing this combination in the test matrix.
return;
}
// This is a supported combination. No error.
- assert.commandWorked(db.runCommand({
- geoNear: t.getName(),
- near: pointType,
- spherical: spherical
- }));
+ assert.commandWorked(
+ db.runCommand({geoNear: t.getName(), near: pointType, spherical: spherical}));
// No error with min/maxDistance 1.
db.runCommand(makeCommand(1));
@@ -71,28 +59,25 @@ indexTypes.forEach(function(indexType) {
}
// Try several bad values for min/maxDistance.
- badNumbers.concat(outOfRangeDistances).forEach(function(badDistance) {
+ badNumbers.concat(outOfRangeDistances)
+ .forEach(function(badDistance) {
- var msg = (
- "geoNear with spherical=" + spherical + " and "
- + pointDescription + " and " + indexType
- + " index should've failed with "
- + optionName + " " + badDistance);
+ var msg =
+ ("geoNear with spherical=" + spherical + " and " + pointDescription +
+ " and " + indexType + " index should've failed with " + optionName +
+ " " + badDistance);
- assert.commandFailed(
- db.runCommand(makeCommand(badDistance)),
- msg);
- });
+ assert.commandFailed(db.runCommand(makeCommand(badDistance)), msg);
+ });
// Bad values for limit / num.
['num', 'limit'].forEach(function(limitOptionName) {
[-1, 'foo'].forEach(function(badLimit) {
- var msg = (
- "geoNear with spherical=" + spherical + " and "
- + pointDescription + " and " + indexType
- + " index should've failed with '"
- + limitOptionName + "' " + badLimit);
+ var msg =
+ ("geoNear with spherical=" + spherical + " and " + pointDescription +
+ " and " + indexType + " index should've failed with '" +
+ limitOptionName + "' " + badLimit);
var command = makeCommand(1);
command[limitOptionName] = badLimit;
@@ -103,11 +88,9 @@ indexTypes.forEach(function(indexType) {
// Bad values for distanceMultiplier.
badNumbers.forEach(function(badNumber) {
- var msg = (
- "geoNear with spherical=" + spherical + " and "
- + pointDescription + " and " + indexType
- + " index should've failed with distanceMultiplier "
- + badNumber);
+ var msg = ("geoNear with spherical=" + spherical + " and " + pointDescription +
+ " and " + indexType +
+ " index should've failed with distanceMultiplier " + badNumber);
var command = makeCommand(1);
command['distanceMultiplier'] = badNumber;
diff --git a/jstests/core/getlog1.js b/jstests/core/getlog1.js
index cf59b8f8318..d19ba7cdde8 100644
--- a/jstests/core/getlog1.js
+++ b/jstests/core/getlog1.js
@@ -1,7 +1,7 @@
-// to run:
+// to run:
// ./mongo jstests/<this-file>
-contains = function(arr,obj) {
+contains = function(arr, obj) {
var i = arr.length;
while (i--) {
if (arr[i] === obj) {
@@ -11,17 +11,17 @@ contains = function(arr,obj) {
return false;
};
-var resp = db.adminCommand({getLog:"*"});
-assert( resp.ok == 1, "error executing getLog command" );
-assert( resp.names, "no names field" );
-assert( resp.names.length > 0, "names array is empty" );
-assert( contains(resp.names,"global") , "missing global category" );
-assert( !contains(resp.names,"butty") , "missing butty category" );
+var resp = db.adminCommand({getLog: "*"});
+assert(resp.ok == 1, "error executing getLog command");
+assert(resp.names, "no names field");
+assert(resp.names.length > 0, "names array is empty");
+assert(contains(resp.names, "global"), "missing global category");
+assert(!contains(resp.names, "butty"), "missing butty category");
-resp = db.adminCommand({getLog:"global"});
-assert( resp.ok == 1, "error executing getLog command" );
-assert( resp.log, "no log field" );
-assert( resp.log.length > 0 , "no log lines" );
+resp = db.adminCommand({getLog: "global"});
+assert(resp.ok == 1, "error executing getLog command");
+assert(resp.log, "no log field");
+assert(resp.log.length > 0, "no log lines");
// getLog value must be a string
-assert.commandFailed(db.adminCommand({ getLog: 21 }));
+assert.commandFailed(db.adminCommand({getLog: 21}));
diff --git a/jstests/core/getlog2.js b/jstests/core/getlog2.js
index 9bd217d3844..b6cf223b967 100644
--- a/jstests/core/getlog2.js
+++ b/jstests/core/getlog2.js
@@ -14,35 +14,49 @@ contains = function(arr, func) {
};
// test doesn't work when talking to mongos
-if(db.isMaster().msg != "isdbgrid") {
- // run a slow query
- glcol.save({ "SENTINEL": 1 });
- glcol.findOne({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } });
+if (db.isMaster().msg != "isdbgrid") {
+ // run a slow query
+ glcol.save({"SENTINEL": 1});
+ glcol.findOne({
+ "SENTINEL": 1,
+ "$where": function() {
+ sleep(1000);
+ return true;
+ }
+ });
- // run a slow update
- glcol.update({ "SENTINEL": 1, "$where": function() { sleep(1000); return true; } }, { "x": "x" });
+ // run a slow update
+ glcol.update(
+ {
+ "SENTINEL": 1,
+ "$where": function() {
+ sleep(1000);
+ return true;
+ }
+ },
+ {"x": "x"});
- var resp = db.adminCommand({getLog:"global"});
- assert( resp.ok == 1, "error executing getLog command" );
- assert( resp.log, "no log field" );
- assert( resp.log.length > 0 , "no log lines" );
+ var resp = db.adminCommand({getLog: "global"});
+ assert(resp.ok == 1, "error executing getLog command");
+ assert(resp.log, "no log field");
+ assert(resp.log.length > 0, "no log lines");
- // ensure that slow query is logged in detail
- assert( contains(resp.log, function(v) {
- print(v);
- var opString = db.getMongo().useReadCommands() ? " find " : " query ";
- var filterString = db.getMongo().useReadCommands() ? "filter:" : "query:";
- return v.indexOf(opString) != -1 && v.indexOf(filterString) != -1 &&
- v.indexOf("keysExamined:") != -1 &&
- v.indexOf("docsExamined:") != -1 &&
- v.indexOf("SENTINEL") != -1;
- }) );
+ // ensure that slow query is logged in detail
+ assert(contains(resp.log,
+ function(v) {
+ print(v);
+ var opString = db.getMongo().useReadCommands() ? " find " : " query ";
+ var filterString = db.getMongo().useReadCommands() ? "filter:" : "query:";
+ return v.indexOf(opString) != -1 && v.indexOf(filterString) != -1 &&
+ v.indexOf("keysExamined:") != -1 && v.indexOf("docsExamined:") != -1 &&
+ v.indexOf("SENTINEL") != -1;
+ }));
- // same, but for update
- assert( contains(resp.log, function(v) {
- return v.indexOf(" update ") != -1 && v.indexOf("query") != -1 &&
- v.indexOf("keysExamined:") != -1 &&
- v.indexOf("docsExamined:") != -1 &&
- v.indexOf("SENTINEL") != -1;
- }) );
+ // same, but for update
+ assert(contains(resp.log,
+ function(v) {
+ return v.indexOf(" update ") != -1 && v.indexOf("query") != -1 &&
+ v.indexOf("keysExamined:") != -1 && v.indexOf("docsExamined:") != -1 &&
+ v.indexOf("SENTINEL") != -1;
+ }));
}
diff --git a/jstests/core/getmore_invalidation.js b/jstests/core/getmore_invalidation.js
index 58104aaf95b..5c5d06deb1c 100644
--- a/jstests/core/getmore_invalidation.js
+++ b/jstests/core/getmore_invalidation.js
@@ -43,7 +43,8 @@
// Update the next matching doc so that it no longer matches.
assert.writeOK(t.update({_id: 3}, {$set: {a: "nomatch"}}));
- // Either the cursor should skip the result that no longer matches, or we should get back the old
+ // Either the cursor should skip the result that no longer matches, or we should get back the
+ // old
// version of the doc.
assert(!cursor.hasNext() || cursor.next()["a"] === "bar");
@@ -55,7 +56,7 @@
assert.writeOK(t.insert({a: 2, b: 3}));
assert.writeOK(t.insert({a: 2, b: 4}));
- cursor = t.find({a: {$in: [1,2]}}).sort({b: 1}).batchSize(2);
+ cursor = t.find({a: {$in: [1, 2]}}).sort({b: 1}).batchSize(2);
cursor.next();
cursor.next();
@@ -72,13 +73,14 @@
assert.writeOK(t.insert({a: 2, b: 3}));
assert.writeOK(t.insert({a: 2, b: 4}));
- cursor = t.find({a: {$in: [1,2]}}).sort({b: 1}).batchSize(2);
+ cursor = t.find({a: {$in: [1, 2]}}).sort({b: 1}).batchSize(2);
cursor.next();
cursor.next();
assert.writeOK(t.update({a: 2, b: 3}, {$set: {a: 6}}));
- // Either the cursor should skip the result that no longer matches, or we should get back the old
+ // Either the cursor should skip the result that no longer matches, or we should get back the
+ // old
// version of the doc.
assert(cursor.hasNext());
assert(cursor.next()["a"] === 2);
@@ -198,7 +200,7 @@
t.insert({a: 3, b: 3});
t.insert({a: 2, b: 1});
- cursor = t.find({a: {$in: [1,2,3]}}).sort({b: 1}).batchSize(2);
+ cursor = t.find({a: {$in: [1, 2, 3]}}).sort({b: 1}).batchSize(2);
cursor.next();
cursor.next();
@@ -215,7 +217,7 @@
t.insert({a: 3, b: 3});
t.insert({a: 2, b: 1});
- cursor = t.find({a: {$in: [1,2,3]}}).sort({b: 1}).batchSize(2);
+ cursor = t.find({a: {$in: [1, 2, 3]}}).sort({b: 1}).batchSize(2);
cursor.next();
cursor.next();
diff --git a/jstests/core/group1.js b/jstests/core/group1.js
index f59442cc6d7..6100ee94c70 100644
--- a/jstests/core/group1.js
+++ b/jstests/core/group1.js
@@ -1,116 +1,145 @@
t = db.group1;
t.drop();
-t.save( { n : 1 , a : 1 } );
-t.save( { n : 2 , a : 1 } );
-t.save( { n : 3 , a : 2 } );
-t.save( { n : 4 , a : 2 } );
-t.save( { n : 5 , a : 2 } );
-
-var p = { key : { a : true } ,
- reduce : function(obj,prev) { prev.count++; },
- initial: { count: 0 }
- };
-
-res = t.group( p );
-
-assert( res.length == 2 , "A" );
-assert( res[0].a == 1 , "B" );
-assert( res[0].count == 2 , "C" );
-assert( res[1].a == 2 , "D" );
-assert( res[1].count == 3 , "E" );
-
-assert.eq( res , t.groupcmd( p ) , "ZZ" );
-
-ret = t.groupcmd( { key : {} , reduce : p.reduce , initial : p.initial } );
-assert.eq( 1 , ret.length , "ZZ 2" );
-assert.eq( 5 , ret[0].count , "ZZ 3" );
-
-ret = t.groupcmd( { key : {} , reduce : function(obj,prev){ prev.sum += obj.n; } , initial : { sum : 0 } } );
-assert.eq( 1 , ret.length , "ZZ 4" );
-assert.eq( 15 , ret[0].sum , "ZZ 5" );
+t.save({n: 1, a: 1});
+t.save({n: 2, a: 1});
+t.save({n: 3, a: 2});
+t.save({n: 4, a: 2});
+t.save({n: 5, a: 2});
+
+var p = {
+ key: {a: true},
+ reduce: function(obj, prev) {
+ prev.count++;
+ },
+ initial: {count: 0}
+};
+
+res = t.group(p);
+
+assert(res.length == 2, "A");
+assert(res[0].a == 1, "B");
+assert(res[0].count == 2, "C");
+assert(res[1].a == 2, "D");
+assert(res[1].count == 3, "E");
+
+assert.eq(res, t.groupcmd(p), "ZZ");
+
+ret = t.groupcmd({key: {}, reduce: p.reduce, initial: p.initial});
+assert.eq(1, ret.length, "ZZ 2");
+assert.eq(5, ret[0].count, "ZZ 3");
+
+ret = t.groupcmd({
+ key: {},
+ reduce: function(obj, prev) {
+ prev.sum += obj.n;
+ },
+ initial: {sum: 0}
+});
+assert.eq(1, ret.length, "ZZ 4");
+assert.eq(15, ret[0].sum, "ZZ 5");
t.drop();
-t.save( { "a" : 2 } );
-t.save( { "b" : 5 } );
-t.save( { "a" : 1 } );
-t.save( { "a" : 2 } );
-
-c = {key: {a:1}, cond: {}, initial: {"count": 0}, reduce: function(obj, prev) { prev.count++; } };
+t.save({"a": 2});
+t.save({"b": 5});
+t.save({"a": 1});
+t.save({"a": 2});
-assert.eq( t.group( c ) , t.groupcmd( c ) , "ZZZZ" );
+c = {
+ key: {a: 1},
+ cond: {},
+ initial: {"count": 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ }
+};
+assert.eq(t.group(c), t.groupcmd(c), "ZZZZ");
t.drop();
-t.save( { name : { first : "a" , last : "A" } } );
-t.save( { name : { first : "b" , last : "B" } } );
-t.save( { name : { first : "a" , last : "A" } } );
-
+t.save({name: {first: "a", last: "A"}});
+t.save({name: {first: "b", last: "B"}});
+t.save({name: {first: "a", last: "A"}});
-p = { key : { 'name.first' : true } ,
- reduce : function(obj,prev) { prev.count++; },
- initial: { count: 0 }
- };
-
-res = t.group( p );
-assert.eq( 2 , res.length , "Z1" );
-assert.eq( "a" , res[0]['name.first'] , "Z2" );
-assert.eq( "b" , res[1]['name.first'] , "Z3" );
-assert.eq( 2 , res[0].count , "Z4" );
-assert.eq( 1 , res[1].count , "Z5" );
+p = {
+ key: {'name.first': true},
+ reduce: function(obj, prev) {
+ prev.count++;
+ },
+ initial: {count: 0}
+};
+
+res = t.group(p);
+assert.eq(2, res.length, "Z1");
+assert.eq("a", res[0]['name.first'], "Z2");
+assert.eq("b", res[1]['name.first'], "Z3");
+assert.eq(2, res[0].count, "Z4");
+assert.eq(1, res[1].count, "Z5");
// SERVER-15851 Test invalid user input.
p = {
- ns: "group1",
- key: {"name.first": true},
- $reduce: function(obj, prev){prev.count++;},
- initial: {count: 0},
- finalize: "abc"
- };
+ ns: "group1",
+ key: {"name.first": true},
+ $reduce: function(obj, prev) {
+ prev.count++;
+ },
+ initial: {count: 0},
+ finalize: "abc"
+};
assert.commandFailedWithCode(db.runCommand({group: p}),
ErrorCodes.JSInterpreterFailure,
"Illegal finalize function");
p = {
- ns: "group1",
- key: {"name.first": true},
- $reduce: function(obj, prev){prev.count++;},
- initial: {count: 0},
- finalize: function(obj){ob;}
- };
+ ns: "group1",
+ key: {"name.first": true},
+ $reduce: function(obj, prev) {
+ prev.count++;
+ },
+ initial: {count: 0},
+ finalize: function(obj) {
+ ob;
+ }
+};
assert.commandFailedWithCode(db.runCommand({group: p}),
ErrorCodes.JSInterpreterFailure,
"Illegal finalize function 2");
p = {
- ns: "group1",
- $keyf: "a" ,
- $reduce: function(obj, prev){prev.count++;},
- initial: {count: 0},
- finalize: function(obj){ob;}
- };
+ ns: "group1",
+ $keyf: "a",
+ $reduce: function(obj, prev) {
+ prev.count++;
+ },
+ initial: {count: 0},
+ finalize: function(obj) {
+ ob;
+ }
+};
assert.commandFailedWithCode(db.runCommand({group: p}),
ErrorCodes.JSInterpreterFailure,
"Illegal keyf function");
p = {
- ns: "group1",
- key: {"name.first": true},
- $reduce: "abc",
- initial: {count: 0}
- };
+ ns: "group1",
+ key: {"name.first": true},
+ $reduce: "abc",
+ initial: {count: 0}
+};
assert.commandFailedWithCode(db.runCommand({group: p}),
ErrorCodes.JSInterpreterFailure,
"Illegal reduce function");
p = {
- ns: "group1",
- key: {"name.first": true},
- $reduce: function(obj, pre){prev.count++;},
- initial: {count: 0}
- };
+ ns: "group1",
+ key: {"name.first": true},
+ $reduce: function(obj, pre) {
+ prev.count++;
+ },
+ initial: {count: 0}
+};
assert.commandFailedWithCode(db.runCommand({group: p}),
ErrorCodes.JSInterpreterFailure,
"Illegal reduce function 2");
diff --git a/jstests/core/group2.js b/jstests/core/group2.js
index b5566bce2c7..ada675f6f69 100644
--- a/jstests/core/group2.js
+++ b/jstests/core/group2.js
@@ -5,12 +5,13 @@ t.save({a: 2});
t.save({b: 5});
t.save({a: 1});
-cmd = { key: {a: 1},
- initial: {count: 0},
- reduce: function(obj, prev) {
- prev.count++;
- }
- };
+cmd = {
+ key: {a: 1},
+ initial: {count: 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ }
+};
result = t.group(cmd);
@@ -24,18 +25,19 @@ assert.eq(1, result[1].count, "G");
assert.eq(1, result[2].count, "H");
var keyFn = function(x) {
- return { a: 'a' in x ? x.a : null };
+ return {
+ a: 'a' in x ? x.a : null
+ };
};
delete cmd.key;
cmd["$keyf"] = keyFn;
-result2 = t.group( cmd );
-
-assert.eq( result , result2, "check result2" );
+result2 = t.group(cmd);
+assert.eq(result, result2, "check result2");
delete cmd.$keyf;
cmd["keyf"] = keyFn;
-result3 = t.group( cmd );
+result3 = t.group(cmd);
-assert.eq( result , result3, "check result3" );
+assert.eq(result, result3, "check result3");
diff --git a/jstests/core/group3.js b/jstests/core/group3.js
index d113b9d570f..134953b6649 100644
--- a/jstests/core/group3.js
+++ b/jstests/core/group3.js
@@ -6,20 +6,20 @@ t.save({a: 2});
t.save({a: 3});
t.save({a: 4});
-
-cmd = { initial: {count: 0, sum: 0},
- reduce: function(obj, prev) {
- prev.count++;
- prev.sum += obj.a;
- },
- finalize: function(obj) {
- if (obj.count){
- obj.avg = obj.sum / obj.count;
- }else{
- obj.avg = 0;
- }
- },
- };
+cmd = {
+ initial: {count: 0, sum: 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ prev.sum += obj.a;
+ },
+ finalize: function(obj) {
+ if (obj.count) {
+ obj.avg = obj.sum / obj.count;
+ } else {
+ obj.avg = 0;
+ }
+ },
+};
result1 = t.group(cmd);
@@ -28,11 +28,10 @@ assert.eq(10, result1[0].sum, "test1");
assert.eq(4, result1[0].count, "test1");
assert.eq(2.5, result1[0].avg, "test1");
-
cmd['finalize'] = function(obj) {
- if (obj.count){
+ if (obj.count) {
return obj.sum / obj.count;
- }else{
+ } else {
return 0;
}
};
diff --git a/jstests/core/group4.js b/jstests/core/group4.js
index 788d55c7962..2465274c027 100644
--- a/jstests/core/group4.js
+++ b/jstests/core/group4.js
@@ -2,44 +2,39 @@
t = db.group4;
t.drop();
-function test( c , n ){
+function test(c, n) {
var x = {};
- c.forEach(
- function(z){
- assert.eq( z.count , z.values.length , n + "\t" + tojson( z ) );
- }
- );
+ c.forEach(function(z) {
+ assert.eq(z.count, z.values.length, n + "\t" + tojson(z));
+ });
}
-t.insert({name:'bob',foo:1});
-t.insert({name:'bob',foo:2});
-t.insert({name:'alice',foo:1});
-t.insert({name:'alice',foo:3});
-t.insert({name:'fred',foo:3});
-t.insert({name:'fred',foo:4});
+t.insert({name: 'bob', foo: 1});
+t.insert({name: 'bob', foo: 2});
+t.insert({name: 'alice', foo: 1});
+t.insert({name: 'alice', foo: 3});
+t.insert({name: 'fred', foo: 3});
+t.insert({name: 'fred', foo: 4});
-x = t.group(
- {
- key: {foo:1},
- initial: {count:0,values:[]},
- reduce: function (obj, prev){
- prev.count++;
- prev.values.push(obj.name);
- }
- }
-);
-test( x , "A" );
+x = t.group({
+ key: {foo: 1},
+ initial: {count: 0, values: []},
+ reduce: function(obj, prev) {
+ prev.count++;
+ prev.values.push(obj.name);
+ }
+});
+test(x, "A");
-x = t.group(
- {
- key: {foo:1},
- initial: {count:0},
- reduce: function (obj, prev){
- if (!prev.values) {prev.values = [];}
- prev.count++;
- prev.values.push(obj.name);
+x = t.group({
+ key: {foo: 1},
+ initial: {count: 0},
+ reduce: function(obj, prev) {
+ if (!prev.values) {
+ prev.values = [];
}
+ prev.count++;
+ prev.values.push(obj.name);
}
-);
-test( x , "B" );
-
+});
+test(x, "B");
diff --git a/jstests/core/group5.js b/jstests/core/group5.js
index 5d13297bd4d..aa6283f73ff 100644
--- a/jstests/core/group5.js
+++ b/jstests/core/group5.js
@@ -3,36 +3,35 @@ t = db.group5;
t.drop();
// each group has groupnum+1 5 users
-for ( var group=0; group<10; group++ ){
- for ( var i=0; i<5+group; i++ ){
- t.save( { group : "group" + group , user : i } );
+for (var group = 0; group < 10; group++) {
+ for (var i = 0; i < 5 + group; i++) {
+ t.save({group: "group" + group, user: i});
}
}
-function c( group ){
- return t.group(
- {
- key : { group : 1 } ,
- q : { group : "group" + group } ,
- initial : { users : {} },
- reduce : function(obj,prev){
- prev.users[obj.user] = true; // add this user to the hash
- },
- finalize : function(x){
- var count = 0;
- for (var key in x.users){
- count++;
- }
-
- //replace user obj with count
- //count add new field and keep users
- x.users = count;
- return x;
+function c(group) {
+ return t.group({
+ key: {group: 1},
+ q: {group: "group" + group},
+ initial: {users: {}},
+ reduce: function(obj, prev) {
+ prev.users[obj.user] = true; // add this user to the hash
+ },
+ finalize: function(x) {
+ var count = 0;
+ for (var key in x.users) {
+ count++;
}
- })[0]; // returns array
+
+ // replace user obj with count
+ // count add new field and keep users
+ x.users = count;
+ return x;
+ }
+ })[0]; // returns array
}
-assert.eq( "group0" , c(0).group , "g0" );
-assert.eq( 5 , c(0).users , "g0 a" );
-assert.eq( "group5" , c(5).group , "g5" );
-assert.eq( 10 , c(5).users , "g5 a" );
+assert.eq("group0", c(0).group, "g0");
+assert.eq(5, c(0).users, "g0 a");
+assert.eq("group5", c(5).group, "g5");
+assert.eq(10, c(5).users, "g5 a");
diff --git a/jstests/core/group6.js b/jstests/core/group6.js
index b77a37a5d11..fff2fcaafd7 100644
--- a/jstests/core/group6.js
+++ b/jstests/core/group6.js
@@ -1,32 +1,40 @@
t = db.jstests_group6;
t.drop();
-for( i = 1; i <= 10; ++i ) {
- t.save( {i:new NumberLong( i ),y:1} );
+for (i = 1; i <= 10; ++i) {
+ t.save({i: new NumberLong(i), y: 1});
}
-assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
+assert.eq.automsg(
+ "55",
+ "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i");
t.drop();
-for( i = 1; i <= 10; ++i ) {
- if ( i % 2 == 0 ) {
- t.save( {i:new NumberLong( i ),y:1} );
+for (i = 1; i <= 10; ++i) {
+ if (i % 2 == 0) {
+ t.save({i: new NumberLong(i), y: 1});
} else {
- t.save( {i:i,y:1} );
+ t.save({i: i, y: 1});
}
}
-assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
+assert.eq.automsg(
+ "55",
+ "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i");
t.drop();
-for( i = 1; i <= 10; ++i ) {
- if ( i % 2 == 1 ) {
- t.save( {i:new NumberLong( i ),y:1} );
+for (i = 1; i <= 10; ++i) {
+ if (i % 2 == 1) {
+ t.save({i: new NumberLong(i), y: 1});
} else {
- t.save( {i:i,y:1} );
+ t.save({i: i, y: 1});
}
}
-assert.eq.automsg( "55", "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i" );
+assert.eq.automsg(
+ "55",
+ "t.group( {key:'y', reduce:function(doc,out){ out.i += doc.i; }, initial:{i:0} } )[ 0 ].i");
-assert.eq.automsg( "NumberLong(10)", "t.group( {$reduce: function(doc, prev) { prev.count += 1; }, initial: {count: new NumberLong(0) }} )[ 0 ].count" ); \ No newline at end of file
+assert.eq.automsg(
+ "NumberLong(10)",
+ "t.group( {$reduce: function(doc, prev) { prev.count += 1; }, initial: {count: new NumberLong(0) }} )[ 0 ].count"); \ No newline at end of file
diff --git a/jstests/core/group7.js b/jstests/core/group7.js
index 1413000079c..6d6ef03e99d 100644
--- a/jstests/core/group7.js
+++ b/jstests/core/group7.js
@@ -3,28 +3,28 @@
t = db.jstests_group7;
t.drop();
-function checkForYield( docs, updates ) {
+function checkForYield(docs, updates) {
t.drop();
a = 0;
- for( var i = 0; i < docs; ++i ) {
- t.save( {a:a} );
+ for (var i = 0; i < docs; ++i) {
+ t.save({a: a});
}
// Iteratively update all a values atomically.
p = startParallelShell(
'for( a = 0; a < ' + updates + '; ++a ) {' +
- 'db.jstests_group7.update({ $atomic: true }, { $set: { a: a }}, false, true);' +
- '}' );
+ 'db.jstests_group7.update({ $atomic: true }, { $set: { a: a }}, false, true);' +
+ '}');
- for( var i = 0; i < updates; ++i ) {
+ for (var i = 0; i < updates; ++i) {
print("running group " + i + " of " + updates);
- ret = t.group({key:{a:1},reduce:function(){},initial:{}});
+ ret = t.group({key: {a: 1}, reduce: function() {}, initial: {}});
// Check if group sees more than one a value, indicating that it yielded.
- if ( ret.length > 1 ) {
+ if (ret.length > 1) {
p();
return true;
}
- printjson( ret );
+ printjson(ret);
}
p();
@@ -34,14 +34,14 @@ function checkForYield( docs, updates ) {
var yielded = false;
var docs = 1500;
var updates = 50;
-for( var j = 1; j <= 6; ++j ) {
+for (var j = 1; j <= 6; ++j) {
print("Iteration " + j + " docs = " + docs + " updates = " + updates);
- if ( checkForYield( docs, updates ) ) {
+ if (checkForYield(docs, updates)) {
yielded = true;
break;
}
- // Increase docs and updates to encourage yielding.
+ // Increase docs and updates to encourage yielding.
docs *= 2;
updates *= 2;
}
-assert( yielded );
+assert(yielded);
diff --git a/jstests/core/group8.js b/jstests/core/group8.js
index 14fd890f1e7..85c8248b992 100644
--- a/jstests/core/group8.js
+++ b/jstests/core/group8.js
@@ -9,16 +9,30 @@ assert.writeOK(coll.insert({a: 2, b: "x"}));
assert.writeOK(coll.insert({a: 3, b: "y"}));
// Test case when "count" and "keys" are both zero.
-result = coll.runCommand({group: {ns: coll.getName(), key: {a: 1}, cond: {b: "z"},
- $reduce: function(x, y) {}, initial: {}}});
+result = coll.runCommand({
+ group: {
+ ns: coll.getName(),
+ key: {a: 1},
+ cond: {b: "z"},
+ $reduce: function(x, y) {},
+ initial: {}
+ }
+});
assert.commandWorked(result);
assert.eq(result.count, 0);
assert.eq(result.keys, 0);
assert.eq(result.retval.length, 0);
// Test case when "count" and "keys" are both non-zero.
-result = coll.runCommand({group: {ns: coll.getName(), key: {a: 1}, cond: {b: "x"},
- $reduce: function(x, y) {}, initial: {}}});
+result = coll.runCommand({
+ group: {
+ ns: coll.getName(),
+ key: {a: 1},
+ cond: {b: "x"},
+ $reduce: function(x, y) {},
+ initial: {}
+ }
+});
assert.commandWorked(result);
assert.eq(result.count, 3);
assert.eq(result.keys, 2);
diff --git a/jstests/core/group_empty.js b/jstests/core/group_empty.js
index c1772f88b22..6f5637ac0df 100644
--- a/jstests/core/group_empty.js
+++ b/jstests/core/group_empty.js
@@ -2,11 +2,12 @@
t = db.group_empty;
t.drop();
-res1 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
-t.ensureIndex( { x : 1 } );
-res2 = db.runCommand({group: {$reduce: function(){}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
+res1 = db.runCommand(
+ {group: {$reduce: function() {}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
+t.ensureIndex({x: 1});
+res2 = db.runCommand(
+ {group: {$reduce: function() {}, ns: 'group_empty', cond: {}, key: {}, initial: {count: 0}}});
assert.docEq(res1.retval, res2.retval);
assert.eq(res1.keys, res2.keys);
assert.eq(res1.count, res2.count);
-
diff --git a/jstests/core/grow_hash_table.js b/jstests/core/grow_hash_table.js
index 0c782444a2d..b26baae31af 100644
--- a/jstests/core/grow_hash_table.js
+++ b/jstests/core/grow_hash_table.js
@@ -11,9 +11,13 @@ var testDB = db.getSiblingDB('grow_hash_table');
var doTest = function(count) {
print('Testing with count of ' + count);
testDB.dropDatabase();
- var id = { data: 1 };
- var doc = { _id: id };
- var projection = { };
+ var id = {
+ data: 1
+ };
+ var doc = {
+ _id: id
+ };
+ var projection = {};
// Create a document and a projection with fields r1, r2, r3 ...
for (var i = 1; i <= count; ++i) {
@@ -27,11 +31,10 @@ var doTest = function(count) {
// Try to read the document using a large projection
try {
- var findCount = testDB.collection.find({ _id: id }, projection).itcount();
+ var findCount = testDB.collection.find({_id: id}, projection).itcount();
assert(findCount == 1,
'Failed to find single stored document, find().itcount() == ' + findCount);
- }
- catch (e) {
+ } catch (e) {
testDB.dropDatabase();
doassert('Test FAILED! Caught exception ' + tojsononeline(e));
}
diff --git a/jstests/core/hashindex1.js b/jstests/core/hashindex1.js
index 449f5520b84..778e31d84b2 100644
--- a/jstests/core/hashindex1.js
+++ b/jstests/core/hashindex1.js
@@ -4,88 +4,95 @@ t.drop();
// Include helpers for analyzing explain output.
load("jstests/libs/analyze_plan.js");
-//test non-single field hashed indexes don't get created (maybe change later)
-var badspec = {a : "hashed" , b : 1};
-t.ensureIndex( badspec );
-assert.eq( t.getIndexes().length , 1 , "only _id index should be created");
-
-//test unique index not created (maybe change later)
-var goodspec = {a : "hashed"};
-t.ensureIndex( goodspec , {"unique" : true});
-assert.eq( t.getIndexes().length , 1 , "unique index got created.");
-
-//now test that non-unique index does get created
+// test non-single field hashed indexes don't get created (maybe change later)
+var badspec = {
+ a: "hashed",
+ b: 1
+};
+t.ensureIndex(badspec);
+assert.eq(t.getIndexes().length, 1, "only _id index should be created");
+
+// test unique index not created (maybe change later)
+var goodspec = {
+ a: "hashed"
+};
+t.ensureIndex(goodspec, {"unique": true});
+assert.eq(t.getIndexes().length, 1, "unique index got created.");
+
+// now test that non-unique index does get created
t.ensureIndex(goodspec);
-assert.eq( t.getIndexes().length , 2 , "hashed index didn't get created");
+assert.eq(t.getIndexes().length, 2, "hashed index didn't get created");
-//test basic inserts
-for(i=0; i < 10; i++ ){
- t.insert( {a:i } );
+// test basic inserts
+for (i = 0; i < 10; i++) {
+ t.insert({a: i});
}
-assert.eq( t.find().count() , 10 , "basic insert didn't work");
-assert.eq( t.find().hint(goodspec).toArray().length , 10 , "basic insert didn't work");
-assert.eq( t.find({a : 3}).hint({_id : 1}).toArray()[0]._id ,
- t.find({a : 3}).hint(goodspec).toArray()[0]._id ,
- "hashindex lookup didn't work" );
-
-
-//make sure things with the same hash are not both returned
-t.insert( {a: 3.1} );
-assert.eq( t.find().count() , 11 , "additional insert didn't work");
-assert.eq( t.find({a : 3.1}).hint(goodspec).toArray().length , 1);
-assert.eq( t.find({a : 3}).hint(goodspec).toArray().length , 1);
-//test right obj is found
-assert.eq( t.find({a : 3.1}).hint(goodspec).toArray()[0].a , 3.1);
+assert.eq(t.find().count(), 10, "basic insert didn't work");
+assert.eq(t.find().hint(goodspec).toArray().length, 10, "basic insert didn't work");
+assert.eq(t.find({a: 3}).hint({_id: 1}).toArray()[0]._id,
+ t.find({a: 3}).hint(goodspec).toArray()[0]._id,
+ "hashindex lookup didn't work");
+
+// make sure things with the same hash are not both returned
+t.insert({a: 3.1});
+assert.eq(t.find().count(), 11, "additional insert didn't work");
+assert.eq(t.find({a: 3.1}).hint(goodspec).toArray().length, 1);
+assert.eq(t.find({a: 3}).hint(goodspec).toArray().length, 1);
+// test right obj is found
+assert.eq(t.find({a: 3.1}).hint(goodspec).toArray()[0].a, 3.1);
// Make sure we're using the hashed index.
-var explain = t.find({a : 1}).explain();
-assert( isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
+var explain = t.find({a: 1}).explain();
+assert(isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
// SERVER-12222
-//printjson( t.find({a : {$gte : 3 , $lte : 3}}).explain() )
-//assert.eq( t.find({a : {$gte : 3 , $lte : 3}}).explain().cursor ,
+// printjson( t.find({a : {$gte : 3 , $lte : 3}}).explain() )
+// assert.eq( t.find({a : {$gte : 3 , $lte : 3}}).explain().cursor ,
// cursorname ,
// "not using hashed cursor");
-var explain = t.find({c : 1}).explain();
-assert( !isIxscan(explain.queryPlanner.winningPlan), "using irrelevant hashed index");
+var explain = t.find({c: 1}).explain();
+assert(!isIxscan(explain.queryPlanner.winningPlan), "using irrelevant hashed index");
// Hash index used with a $in set membership predicate.
-var explain = t.find({a : {$in : [1,2]}}).explain();
+var explain = t.find({a: {$in: [1, 2]}}).explain();
printjson(explain);
-assert( isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
+assert(isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
// Hash index used with a singleton $and predicate conjunction.
-var explain = t.find({$and : [{a : 1}]}).explain();
-assert( isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
+var explain = t.find({$and: [{a: 1}]}).explain();
+assert(isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
// Hash index used with a non singleton $and predicate conjunction.
-var explain = t.find({$and : [{a : {$in : [1,2]}},{a : {$gt : 1}}]}).explain();
-assert( isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
+var explain = t.find({$and: [{a: {$in: [1, 2]}}, {a: {$gt: 1}}]}).explain();
+assert(isIxscan(explain.queryPlanner.winningPlan), "not using hashed index");
-//test creation of index based on hash of _id index
-var goodspec2 = {'_id' : "hashed"};
-t.ensureIndex( goodspec2 );
-assert.eq( t.getIndexes().length , 3 , "_id index didn't get created");
+// test creation of index based on hash of _id index
+var goodspec2 = {
+ '_id': "hashed"
+};
+t.ensureIndex(goodspec2);
+assert.eq(t.getIndexes().length, 3, "_id index didn't get created");
var newid = t.findOne()["_id"];
-assert.eq( t.find( {_id : newid} ).hint( {_id : 1} ).toArray()[0]._id ,
- t.find( {_id : newid} ).hint( goodspec2 ).toArray()[0]._id,
- "using hashed index and different index returns different docs");
-
-
-//test creation of sparse hashed index
-var sparseindex = {b : "hashed"};
-t.ensureIndex( sparseindex , {"sparse" : true});
-assert.eq( t.getIndexes().length , 4 , "sparse index didn't get created");
-
-//test sparse index has smaller total items on after inserts
-for(i=0; i < 10; i++ ){
- t.insert( {b : i} );
+assert.eq(t.find({_id: newid}).hint({_id: 1}).toArray()[0]._id,
+ t.find({_id: newid}).hint(goodspec2).toArray()[0]._id,
+ "using hashed index and different index returns different docs");
+
+// test creation of sparse hashed index
+var sparseindex = {
+ b: "hashed"
+};
+t.ensureIndex(sparseindex, {"sparse": true});
+assert.eq(t.getIndexes().length, 4, "sparse index didn't get created");
+
+// test sparse index has smaller total items on after inserts
+for (i = 0; i < 10; i++) {
+ t.insert({b: i});
}
var totalb = t.find().hint(sparseindex).toArray().length;
-assert.eq( totalb , 10 , "sparse index has wrong total");
+assert.eq(totalb, 10, "sparse index has wrong total");
-var total = t.find().hint({"_id" : 1}).toArray().length;
+var total = t.find().hint({"_id": 1}).toArray().length;
var totala = t.find().hint(goodspec).toArray().length;
-assert.eq(total , totala , "non-sparse index has wrong total");
-assert.lt(totalb , totala , "sparse index should have smaller total");
+assert.eq(total, totala, "non-sparse index has wrong total");
+assert.lt(totalb, totala, "sparse index should have smaller total");
diff --git a/jstests/core/hashtest1.js b/jstests/core/hashtest1.js
index d0307c2e59e..631aee522da 100644
--- a/jstests/core/hashtest1.js
+++ b/jstests/core/hashtest1.js
@@ -1,79 +1,80 @@
-//hashtest1.js
-//Simple tests to check hashing of various types
-//make sure that different numeric types hash to same thing, and other sanity checks
-
-var hash = function( v , seed ){
- if (seed)
- return db.runCommand({"_hashBSONElement" : v , "seed" : seed})["out"];
- else
- return db.runCommand({"_hashBSONElement" : v})["out"];
+// hashtest1.js
+// Simple tests to check hashing of various types
+// make sure that different numeric types hash to same thing, and other sanity checks
+
+var hash = function(v, seed) {
+ if (seed)
+ return db.runCommand({"_hashBSONElement": v, "seed": seed})["out"];
+ else
+ return db.runCommand({"_hashBSONElement": v})["out"];
};
-var oidHash = hash( ObjectId() );
-var oidHash2 = hash( ObjectId() );
-var oidHash3 = hash( ObjectId() );
-assert(! friendlyEqual( oidHash, oidHash2) , "ObjectIDs should hash to different things");
-assert(! friendlyEqual( oidHash, oidHash3) , "ObjectIDs should hash to different things");
-assert(! friendlyEqual( oidHash2, oidHash3) , "ObjectIDs should hash to different things");
-
-var intHash = hash( NumberInt(3) );
-var doubHash = hash( 3 );
-var doubHash2 = hash( 3.0 );
-var longHash = hash( NumberLong(3) );
-var fracHash = hash( NumberInt(3.5) );
-assert.eq( intHash , doubHash );
-assert.eq( intHash , doubHash2 );
-assert.eq( intHash , longHash );
-assert.eq( intHash , fracHash );
-
-var trueHash = hash( true );
-var falseHash = hash( false );
-assert(! friendlyEqual( trueHash, falseHash) , "true and false should hash to different things");
-
-var nullHash = hash( null );
-assert(! friendlyEqual( falseHash , nullHash ) , "false and null should hash to different things");
-
-var dateHash = hash( new Date() );
+var oidHash = hash(ObjectId());
+var oidHash2 = hash(ObjectId());
+var oidHash3 = hash(ObjectId());
+assert(!friendlyEqual(oidHash, oidHash2), "ObjectIDs should hash to different things");
+assert(!friendlyEqual(oidHash, oidHash3), "ObjectIDs should hash to different things");
+assert(!friendlyEqual(oidHash2, oidHash3), "ObjectIDs should hash to different things");
+
+var intHash = hash(NumberInt(3));
+var doubHash = hash(3);
+var doubHash2 = hash(3.0);
+var longHash = hash(NumberLong(3));
+var fracHash = hash(NumberInt(3.5));
+assert.eq(intHash, doubHash);
+assert.eq(intHash, doubHash2);
+assert.eq(intHash, longHash);
+assert.eq(intHash, fracHash);
+
+var trueHash = hash(true);
+var falseHash = hash(false);
+assert(!friendlyEqual(trueHash, falseHash), "true and false should hash to different things");
+
+var nullHash = hash(null);
+assert(!friendlyEqual(falseHash, nullHash), "false and null should hash to different things");
+
+var dateHash = hash(new Date());
// Sleep so we get a new date. Sleeping for 1 sometimes returns the same date, so 2
sleep(2);
-var isodateHash = hash( ISODate() );
-assert(! friendlyEqual( dateHash, isodateHash) , "different dates should hash to different things");
+var isodateHash = hash(ISODate());
+assert(!friendlyEqual(dateHash, isodateHash), "different dates should hash to different things");
-var stringHash = hash( "3" );
-assert(! friendlyEqual( intHash , stringHash ), "3 and \"3\" should hash to different things");
+var stringHash = hash("3");
+assert(!friendlyEqual(intHash, stringHash), "3 and \"3\" should hash to different things");
-var regExpHash = hash( RegExp("3") );
-assert(! friendlyEqual( stringHash , regExpHash) , "\"3\" and RegExp(3) should hash to different things");
+var regExpHash = hash(RegExp("3"));
+assert(!friendlyEqual(stringHash, regExpHash),
+ "\"3\" and RegExp(3) should hash to different things");
-var intHash4 = hash( 4 );
-assert(! friendlyEqual( intHash , intHash4 ), "3 and 4 should hash to different things");
+var intHash4 = hash(4);
+assert(!friendlyEqual(intHash, intHash4), "3 and 4 should hash to different things");
-var intHashSeeded = hash( 4 , 3 );
-assert(! friendlyEqual(intHash4 , intHashSeeded ), "different seeds should make different hashes");
+var intHashSeeded = hash(4, 3);
+assert(!friendlyEqual(intHash4, intHashSeeded), "different seeds should make different hashes");
-var minkeyHash = hash( MinKey );
-var maxkeyHash = hash( MaxKey );
-assert(! friendlyEqual(minkeyHash , maxkeyHash ), "minkey and maxkey should hash to different things");
+var minkeyHash = hash(MinKey);
+var maxkeyHash = hash(MaxKey);
+assert(!friendlyEqual(minkeyHash, maxkeyHash), "minkey and maxkey should hash to different things");
-var arrayHash = hash( [0,1.0,NumberLong(2)] );
-var arrayHash2 = hash( [0,NumberInt(1),2] );
-assert.eq( arrayHash , arrayHash2 , "didn't squash numeric types in array");
+var arrayHash = hash([0, 1.0, NumberLong(2)]);
+var arrayHash2 = hash([0, NumberInt(1), 2]);
+assert.eq(arrayHash, arrayHash2, "didn't squash numeric types in array");
-var objectHash = hash( {"0":0, "1" : NumberInt(1), "2" : 2} );
-assert(! friendlyEqual(objectHash , arrayHash2) , "arrays and sub-objects should hash to different things");
+var objectHash = hash({"0": 0, "1": NumberInt(1), "2": 2});
+assert(!friendlyEqual(objectHash, arrayHash2),
+ "arrays and sub-objects should hash to different things");
-var c = hash( {a : {}, b : 1} );
-var d = hash( {a : {b : 1}} );
-assert(! friendlyEqual( c , d ) , "hashing doesn't group sub-docs and fields correctly");
+var c = hash({a: {}, b: 1});
+var d = hash({a: {b: 1}});
+assert(!friendlyEqual(c, d), "hashing doesn't group sub-docs and fields correctly");
-var e = hash( {a : 3 , b : [NumberLong(3), {c : NumberInt(3)}]} );
-var f = hash( {a : NumberLong(3) , b : [NumberInt(3), {c : 3.0}]} );
-assert.eq( e , f , "recursive number squashing doesn't work");
+var e = hash({a: 3, b: [NumberLong(3), {c: NumberInt(3)}]});
+var f = hash({a: NumberLong(3), b: [NumberInt(3), {c: 3.0}]});
+assert.eq(e, f, "recursive number squashing doesn't work");
-var nanHash = hash( 0/0 );
-var zeroHash = hash( 0 );
-assert.eq( nanHash , zeroHash , "NaN and Zero should hash to the same thing");
+var nanHash = hash(0 / 0);
+var zeroHash = hash(0);
+assert.eq(nanHash, zeroHash, "NaN and Zero should hash to the same thing");
-
-//should also test that CodeWScope hashes correctly
-//but waiting for SERVER-3391 (CodeWScope support in shell)
+// should also test that CodeWScope hashes correctly
+// but waiting for SERVER-3391 (CodeWScope support in shell)
diff --git a/jstests/core/hint1.js b/jstests/core/hint1.js
index 1de06fd4e41..ddee0f369be 100644
--- a/jstests/core/hint1.js
+++ b/jstests/core/hint1.js
@@ -1,7 +1,16 @@
p = db.jstests_hint1;
p.drop();
-p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true } );
-p.ensureIndex( { ts: 1 } );
+p.save({ts: new Date(1), cls: "entry", verticals: "alleyinsider", live: true});
+p.ensureIndex({ts: 1});
-assert.eq(1, p.find({ live: true, ts: { $lt: new Date(1234119308272) }, cls: "entry", verticals: "alleyinsider" }).sort({ ts: -1 }).hint({ ts: 1 }).count());
+assert.eq(1,
+ p.find({
+ live: true,
+ ts: {$lt: new Date(1234119308272)},
+ cls: "entry",
+ verticals: "alleyinsider"
+ })
+ .sort({ts: -1})
+ .hint({ts: 1})
+ .count());
diff --git a/jstests/core/hostinfo.js b/jstests/core/hostinfo.js
index 16c3810b2c4..6d27b195f39 100644
--- a/jstests/core/hostinfo.js
+++ b/jstests/core/hostinfo.js
@@ -1,33 +1,33 @@
// SERVER-4615: Ensure hostInfo() command returns expected results on each platform
-assert.commandWorked( db.hostInfo() );
+assert.commandWorked(db.hostInfo());
var hostinfo = db.hostInfo();
// test for os-specific fields
if (hostinfo.os.type == "Windows") {
- assert.neq( hostinfo.os.name, "" || null, "Missing Windows os name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing Windows version" );
+ assert.neq(hostinfo.os.name, "" || null, "Missing Windows os name");
+ assert.neq(hostinfo.os.version, "" || null, "Missing Windows version");
} else if (hostinfo.os.type == "Linux") {
- assert.neq( hostinfo.os.name, "" || null, "Missing Linux os/distro name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing Lindows version" );
+ assert.neq(hostinfo.os.name, "" || null, "Missing Linux os/distro name");
+ assert.neq(hostinfo.os.version, "" || null, "Missing Lindows version");
} else if (hostinfo.os.type == "Darwin") {
- assert.neq( hostinfo.os.name, "" || null, "Missing Darwin os name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing Darwin version" );
+ assert.neq(hostinfo.os.name, "" || null, "Missing Darwin os name");
+ assert.neq(hostinfo.os.version, "" || null, "Missing Darwin version");
} else if (hostinfo.os.type == "BSD") {
- assert.neq( hostinfo.os.name, "" || null, "Missing FreeBSD os name" );
- assert.neq( hostinfo.os.version, "" || null, "Missing FreeBSD version" );
+ assert.neq(hostinfo.os.name, "" || null, "Missing FreeBSD os name");
+ assert.neq(hostinfo.os.version, "" || null, "Missing FreeBSD version");
}
-// comment out this block for systems which have not implemented hostinfo.
+// comment out this block for systems which have not implemented hostinfo.
if (hostinfo.os.type != "") {
- assert.neq( hostinfo.system.hostname, "" || null, "Missing Hostname" );
- assert.neq( hostinfo.system.currentTime, "" || null, "Missing Current Time" );
- assert.neq( hostinfo.system.cpuAddrSize, "" || null || 0, "Missing CPU Address Size" );
- assert.neq( hostinfo.system.memSizeMB, "" || null, "Missing Memory Size" );
- assert.neq( hostinfo.system.numCores, "" || null || 0, "Missing Number of Cores" );
- assert.neq( hostinfo.system.cpuArch, "" || null, "Missing CPU Architecture" );
- assert.neq( hostinfo.system.numaEnabled, "" || null, "Missing NUMA flag" );
+ assert.neq(hostinfo.system.hostname, "" || null, "Missing Hostname");
+ assert.neq(hostinfo.system.currentTime, "" || null, "Missing Current Time");
+ assert.neq(hostinfo.system.cpuAddrSize, "" || null || 0, "Missing CPU Address Size");
+ assert.neq(hostinfo.system.memSizeMB, "" || null, "Missing Memory Size");
+ assert.neq(hostinfo.system.numCores, "" || null || 0, "Missing Number of Cores");
+ assert.neq(hostinfo.system.cpuArch, "" || null, "Missing CPU Architecture");
+ assert.neq(hostinfo.system.numaEnabled, "" || null, "Missing NUMA flag");
}
diff --git a/jstests/core/id1.js b/jstests/core/id1.js
index 7c40f206851..dedf9c449c5 100644
--- a/jstests/core/id1.js
+++ b/jstests/core/id1.js
@@ -2,15 +2,15 @@
t = db.id1;
t.drop();
-t.save( { _id : { a : 1 , b : 2 } , x : "a" } );
-t.save( { _id : { a : 1 , b : 2 } , x : "b" } );
-t.save( { _id : { a : 3 , b : 2 } , x : "c" } );
-t.save( { _id : { a : 4 , b : 2 } , x : "d" } );
-t.save( { _id : { a : 4 , b : 2 } , x : "e" } );
-t.save( { _id : { a : 2 , b : 2 } , x : "f" } );
+t.save({_id: {a: 1, b: 2}, x: "a"});
+t.save({_id: {a: 1, b: 2}, x: "b"});
+t.save({_id: {a: 3, b: 2}, x: "c"});
+t.save({_id: {a: 4, b: 2}, x: "d"});
+t.save({_id: {a: 4, b: 2}, x: "e"});
+t.save({_id: {a: 2, b: 2}, x: "f"});
-assert.eq( 4 , t.find().count() , "A" );
-assert.eq( "b" , t.findOne( { _id : { a : 1 , b : 2 } } ).x );
-assert.eq( "c" , t.findOne( { _id : { a : 3 , b : 2 } } ).x );
-assert.eq( "e" , t.findOne( { _id : { a : 4 , b : 2 } } ).x );
-assert.eq( "f" , t.findOne( { _id : { a : 2 , b : 2 } } ).x );
+assert.eq(4, t.find().count(), "A");
+assert.eq("b", t.findOne({_id: {a: 1, b: 2}}).x);
+assert.eq("c", t.findOne({_id: {a: 3, b: 2}}).x);
+assert.eq("e", t.findOne({_id: {a: 4, b: 2}}).x);
+assert.eq("f", t.findOne({_id: {a: 2, b: 2}}).x);
diff --git a/jstests/core/idhack.js b/jstests/core/idhack.js
index b6f1c5fc4de..292c2ed86b6 100644
--- a/jstests/core/idhack.js
+++ b/jstests/core/idhack.js
@@ -5,78 +5,83 @@ t.drop();
// Include helpers for analyzing explain output.
load("jstests/libs/analyze_plan.js");
-t.insert( { _id : { x : 1 } , z : 1 } );
-t.insert( { _id : { x : 2 } , z : 2 } );
-t.insert( { _id : { x : 3 } , z : 3 } );
-t.insert( { _id : 1 , z : 4 } );
-t.insert( { _id : 2 , z : 5 } );
-t.insert( { _id : 3 , z : 6 } );
+t.insert({_id: {x: 1}, z: 1});
+t.insert({_id: {x: 2}, z: 2});
+t.insert({_id: {x: 3}, z: 3});
+t.insert({_id: 1, z: 4});
+t.insert({_id: 2, z: 5});
+t.insert({_id: 3, z: 6});
-assert.eq( 2 , t.findOne( { _id : { x : 2 } } ).z , "A1" );
-assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).count() , "A2" );
-assert.eq( 2 , t.find( { _id : { $gte : 2 } } ).itcount() , "A3" );
+assert.eq(2, t.findOne({_id: {x: 2}}).z, "A1");
+assert.eq(2, t.find({_id: {$gte: 2}}).count(), "A2");
+assert.eq(2, t.find({_id: {$gte: 2}}).itcount(), "A3");
-t.update( { _id : { x : 2 } } , { $set : { z : 7 } } );
-assert.eq( 7 , t.findOne( { _id : { x : 2 } } ).z , "B1" );
+t.update({_id: {x: 2}}, {$set: {z: 7}});
+assert.eq(7, t.findOne({_id: {x: 2}}).z, "B1");
-t.update( { _id : { $gte : 2 } } , { $set : { z : 8 } } , false , true );
-assert.eq( 4 , t.findOne( { _id : 1 } ).z , "C1" );
-assert.eq( 8 , t.findOne( { _id : 2 } ).z , "C2" );
-assert.eq( 8 , t.findOne( { _id : 3 } ).z , "C3" );
+t.update({_id: {$gte: 2}}, {$set: {z: 8}}, false, true);
+assert.eq(4, t.findOne({_id: 1}).z, "C1");
+assert.eq(8, t.findOne({_id: 2}).z, "C2");
+assert.eq(8, t.findOne({_id: 3}).z, "C3");
// explain output should show that the ID hack was applied.
-var query = { _id : { x : 2 } };
-var explain = t.find( query ).explain( true );
-print( "explain for " + tojson( query , "" , true ) + " = " + tojson( explain ) );
-assert.eq( 1 , explain.executionStats.nReturned , "D1" );
-assert.eq( 1 , explain.executionStats.totalKeysExamined , "D2" );
-assert( isIdhack(explain.queryPlanner.winningPlan), "D3" );
+var query = {
+ _id: {x: 2}
+};
+var explain = t.find(query).explain(true);
+print("explain for " + tojson(query, "", true) + " = " + tojson(explain));
+assert.eq(1, explain.executionStats.nReturned, "D1");
+assert.eq(1, explain.executionStats.totalKeysExamined, "D2");
+assert(isIdhack(explain.queryPlanner.winningPlan), "D3");
// ID hack cannot be used with hint().
-t.ensureIndex( { _id : 1 , a : 1 } );
-var hintExplain = t.find( query ).hint( { _id : 1 , a : 1 } ).explain();
-print( "explain for hinted query = " + tojson( hintExplain ) );
-assert( !isIdhack(hintExplain.queryPlanner.winningPlan), "E1" );
+t.ensureIndex({_id: 1, a: 1});
+var hintExplain = t.find(query).hint({_id: 1, a: 1}).explain();
+print("explain for hinted query = " + tojson(hintExplain));
+assert(!isIdhack(hintExplain.queryPlanner.winningPlan), "E1");
// ID hack cannot be used with skip().
-var skipExplain = t.find( query ).skip(1).explain();
-print( "explain for skip query = " + tojson( skipExplain ) );
-assert( !isIdhack(skipExplain.queryPlanner.winningPlan), "F1" );
+var skipExplain = t.find(query).skip(1).explain();
+print("explain for skip query = " + tojson(skipExplain));
+assert(!isIdhack(skipExplain.queryPlanner.winningPlan), "F1");
// Covered query returning _id field only can be handled by ID hack.
-var coveredExplain = t.find( query, { _id : 1 } ).explain();
-print( "explain for covered query = " + tojson( coveredExplain ) );
-assert( isIdhack(coveredExplain.queryPlanner.winningPlan), "G1" );
+var coveredExplain = t.find(query, {_id: 1}).explain();
+print("explain for covered query = " + tojson(coveredExplain));
+assert(isIdhack(coveredExplain.queryPlanner.winningPlan), "G1");
// Check doc from covered ID hack query.
-assert.eq( { _id : { x: 2 } }, t.findOne( query, { _id : 1 } ), "G2" );
+assert.eq({_id: {x: 2}}, t.findOne(query, {_id: 1}), "G2");
//
// Non-covered projection for idhack.
//
t.drop();
-t.insert( { _id: 0, a: 0, b: [ { c: 1 }, { c: 2 } ] });
-t.insert( { _id: 1, a: 1, b: [ { c: 3 }, { c: 4 } ] });
+t.insert({_id: 0, a: 0, b: [{c: 1}, {c: 2}]});
+t.insert({_id: 1, a: 1, b: [{c: 3}, {c: 4}]});
// Simple inclusion.
-assert.eq( { _id: 1, a: 1 }, t.find( { _id: 1 }, { a: 1 } ).next() );
-assert.eq( { a: 1 }, t.find({ _id: 1 }, { _id: 0, a: 1 } ).next() );
-assert.eq( { _id: 0, a: 0 }, t.find( { _id: 0 }, { _id: 1, a: 1 } ).next() );
+assert.eq({_id: 1, a: 1}, t.find({_id: 1}, {a: 1}).next());
+assert.eq({a: 1}, t.find({_id: 1}, {_id: 0, a: 1}).next());
+assert.eq({_id: 0, a: 0}, t.find({_id: 0}, {_id: 1, a: 1}).next());
// Non-simple: exclusion.
-assert.eq( { _id: 1, a: 1 }, t.find( { _id: 1 }, { b: 0 } ).next() );
-assert.eq( { _id: 0, }, t.find( { _id: 0 }, { a: 0, b: 0 } ).next() );
+assert.eq({_id: 1, a: 1}, t.find({_id: 1}, {b: 0}).next());
+assert.eq(
+ {
+ _id: 0,
+ },
+ t.find({_id: 0}, {a: 0, b: 0}).next());
// Non-simple: dotted fields.
-assert.eq( { b: [ { c: 1 }, { c: 2 } ] }, t.find( { _id: 0 }, { _id: 0, "b.c": 1 } ).next() );
-assert.eq( { _id: 1 }, t.find( { _id: 1 }, { "foo.bar": 1 } ).next() );
+assert.eq({b: [{c: 1}, {c: 2}]}, t.find({_id: 0}, {_id: 0, "b.c": 1}).next());
+assert.eq({_id: 1}, t.find({_id: 1}, {"foo.bar": 1}).next());
// Non-simple: elemMatch projection.
-assert.eq( { _id: 1, b: [ { c: 4 } ] },
- t.find( { _id: 1 }, { b: { $elemMatch: { c: 4 } } } ).next() );
+assert.eq({_id: 1, b: [{c: 4}]}, t.find({_id: 1}, {b: {$elemMatch: {c: 4}}}).next());
// Non-simple: .returnKey().
-assert.eq( { _id: 1 }, t.find( { _id: 1 } ).returnKey().next() );
+assert.eq({_id: 1}, t.find({_id: 1}).returnKey().next());
// Non-simple: .returnKey() overrides other projections.
-assert.eq( { _id: 1 }, t.find( { _id: 1 }, { a: 1 } ).returnKey().next() );
+assert.eq({_id: 1}, t.find({_id: 1}, {a: 1}).returnKey().next());
diff --git a/jstests/core/in.js b/jstests/core/in.js
index da1313692e1..852f6bcbca4 100644
--- a/jstests/core/in.js
+++ b/jstests/core/in.js
@@ -2,23 +2,27 @@
t = db.in1;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 2 } );
+t.save({a: 1});
+t.save({a: 2});
// $in must take an array as argument: SERVER-7445
-assert.throws( function() { return t.find( { a : { $in : { x : 1 } } } ).itcount(); } );
-assert.throws( function() { return t.find( { a : { $in : 1 } } ).itcount(); } );
+assert.throws(function() {
+ return t.find({a: {$in: {x: 1}}}).itcount();
+});
+assert.throws(function() {
+ return t.find({a: {$in: 1}}).itcount();
+});
-assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount() , "A" );
-assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "B" );
-assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "C" );
+assert.eq(1, t.find({a: {$in: [1]}}).itcount(), "A");
+assert.eq(1, t.find({a: {$in: [2]}}).itcount(), "B");
+assert.eq(2, t.find({a: {$in: [1, 2]}}).itcount(), "C");
-t.ensureIndex( { a : 1 } );
+t.ensureIndex({a: 1});
-assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount(), "D" );
-assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "E" );
-assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" );
+assert.eq(1, t.find({a: {$in: [1]}}).itcount(), "D");
+assert.eq(1, t.find({a: {$in: [2]}}).itcount(), "E");
+assert.eq(2, t.find({a: {$in: [1, 2]}}).itcount(), "F");
-assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" );
+assert.eq(0, t.find({a: {$in: []}}).itcount(), "G");
-assert.eq( 1 , t.find( { a : { $gt: 1, $in : [ 2 ] } } ).itcount() , "H" );
+assert.eq(1, t.find({a: {$gt: 1, $in: [2]}}).itcount(), "H");
diff --git a/jstests/core/in2.js b/jstests/core/in2.js
index 66b90daa25a..ddcee67ccd0 100644
--- a/jstests/core/in2.js
+++ b/jstests/core/in2.js
@@ -1,33 +1,30 @@
t = db.in2;
-function go( name , index ){
-
+function go(name, index) {
t.drop();
-
- t.save( { a : 1 , b : 1 } );
- t.save( { a : 1 , b : 2 } );
- t.save( { a : 1 , b : 3 } );
-
- t.save( { a : 1 , b : 1 } );
- t.save( { a : 2 , b : 2 } );
- t.save( { a : 3 , b : 3 } );
-
- t.save( { a : 1 , b : 1 } );
- t.save( { a : 2 , b : 1 } );
- t.save( { a : 3 , b : 1 } );
-
- if ( index )
- t.ensureIndex( index );
-
- assert.eq( 7 , t.find( { a : { $in : [ 1 , 2 ] } } ).count() , name + " A" );
-
- assert.eq( 6 , t.find( { a : { $in : [ 1 , 2 ] } , b : { $in : [ 1 , 2 ] } } ).count() , name + " B" );
-}
-go( "no index" );
-go( "index on a" , { a : 1 } );
-go( "index on b" , { b : 1 } );
-go( "index on a&b" , { a : 1 , b : 1 } );
+ t.save({a: 1, b: 1});
+ t.save({a: 1, b: 2});
+ t.save({a: 1, b: 3});
+
+ t.save({a: 1, b: 1});
+ t.save({a: 2, b: 2});
+ t.save({a: 3, b: 3});
+
+ t.save({a: 1, b: 1});
+ t.save({a: 2, b: 1});
+ t.save({a: 3, b: 1});
+ if (index)
+ t.ensureIndex(index);
+
+ assert.eq(7, t.find({a: {$in: [1, 2]}}).count(), name + " A");
+
+ assert.eq(6, t.find({a: {$in: [1, 2]}, b: {$in: [1, 2]}}).count(), name + " B");
+}
+go("no index");
+go("index on a", {a: 1});
+go("index on b", {b: 1});
+go("index on a&b", {a: 1, b: 1});
diff --git a/jstests/core/in3.js b/jstests/core/in3.js
index 5e7e587629f..02680642939 100644
--- a/jstests/core/in3.js
+++ b/jstests/core/in3.js
@@ -1,23 +1,23 @@
// SERVER-2829 Test arrays matching themselves within a $in expression.
t = db.jstests_in8;
-t.drop();
+t.drop();
-t.save( {key: [1]} );
-t.save( {key: ['1']} );
-t.save( {key: [[2]]} );
+t.save({key: [1]});
+t.save({key: ['1']});
+t.save({key: [[2]]});
-function doTest() {
- assert.eq( 1, t.count( {key:[1]} ) );
- assert.eq( 1, t.count( {key:{$in:[[1]]}} ) );
- assert.eq( 1, t.count( {key:{$in:[[1]],$ne:[2]}} ) );
- assert.eq( 1, t.count( {key:{$in:[['1']],$type:2}} ) );
- assert.eq( 1, t.count( {key:['1']} ) );
- assert.eq( 1, t.count( {key:{$in:[['1']]}} ) );
- assert.eq( 1, t.count( {key:[2]} ) );
- assert.eq( 1, t.count( {key:{$in:[[2]]}} ) );
-}
+function doTest() {
+ assert.eq(1, t.count({key: [1]}));
+ assert.eq(1, t.count({key: {$in: [[1]]}}));
+ assert.eq(1, t.count({key: {$in: [[1]], $ne: [2]}}));
+ assert.eq(1, t.count({key: {$in: [['1']], $type: 2}}));
+ assert.eq(1, t.count({key: ['1']}));
+ assert.eq(1, t.count({key: {$in: [['1']]}}));
+ assert.eq(1, t.count({key: [2]}));
+ assert.eq(1, t.count({key: {$in: [[2]]}}));
+}
-doTest();
-t.ensureIndex( {key:1} );
+doTest();
+t.ensureIndex({key: 1});
doTest();
diff --git a/jstests/core/in4.js b/jstests/core/in4.js
index cbe28e2e2df..c2f47bf8ed4 100644
--- a/jstests/core/in4.js
+++ b/jstests/core/in4.js
@@ -1,24 +1,24 @@
// SERVER-2343 Test $in empty array matching.
t = db.jstests_in9;
-t.drop();
+t.drop();
function someData() {
t.remove({});
- t.save( {key: []} );
+ t.save({key: []});
}
function moreData() {
- someData();
- t.save( {key: [1]} );
- t.save( {key: ['1']} );
- t.save( {key: null} );
- t.save( {} );
+ someData();
+ t.save({key: [1]});
+ t.save({key: ['1']});
+ t.save({key: null});
+ t.save({});
}
function check() {
- assert.eq( 1, t.count( {key:[]} ) );
- assert.eq( 1, t.count( {key:{$in:[[]]}} ) );
+ assert.eq(1, t.count({key: []}));
+ assert.eq(1, t.count({key: {$in: [[]]}}));
}
function doTest() {
@@ -26,10 +26,10 @@ function doTest() {
check();
moreData();
check();
-}
+}
-doTest();
+doTest();
// SERVER-1943 not fixed yet
-t.ensureIndex( {key:1} );
+t.ensureIndex({key: 1});
doTest();
diff --git a/jstests/core/in5.js b/jstests/core/in5.js
index a966a6a187c..c56621c91f3 100644
--- a/jstests/core/in5.js
+++ b/jstests/core/in5.js
@@ -1,56 +1,58 @@
t = db.in5;
-function go( fn ){
+function go(fn) {
t.drop();
o = {};
- o[fn] = { a : 1 , b : 2 };
- t.insert( o );
+ o[fn] = {
+ a: 1,
+ b: 2
+ };
+ t.insert(o);
x = {};
- x[fn] = { a : 1 , b : 2 };
- assert.eq( 1 , t.find( x ).itcount() , "A1 - " + fn );
-
+ x[fn] = {
+ a: 1,
+ b: 2
+ };
+ assert.eq(1, t.find(x).itcount(), "A1 - " + fn);
y = {};
- y[fn] = { $in : [ { a : 1 , b : 2 } ] };
- assert.eq( 1 , t.find( y ).itcount() , "A2 - " + fn );
-
+ y[fn] = {
+ $in: [{a: 1, b: 2}]
+ };
+ assert.eq(1, t.find(y).itcount(), "A2 - " + fn);
z = {};
- z[fn+".a"] = 1;
- z[fn+".b"] = { $in : [ 2 ] };
- assert.eq( 1 , t.find( z ).itcount() , "A3 - " + fn ); // SERVER-1366
+ z[fn + ".a"] = 1;
+ z[fn + ".b"] = {
+ $in: [2]
+ };
+ assert.eq(1, t.find(z).itcount(), "A3 - " + fn); // SERVER-1366
-
i = {};
i[fn] = 1;
- t.ensureIndex( i );
+ t.ensureIndex(i);
+
+ assert.eq(1, t.find(x).itcount(), "B1 - " + fn);
+ assert.eq(1, t.find(y).itcount(), "B2 - " + fn);
+ assert.eq(1, t.find(z).itcount(), "B3 - " + fn); // SERVER-1366
- assert.eq( 1 , t.find( x ).itcount() , "B1 - " + fn );
- assert.eq( 1 , t.find( y ).itcount() , "B2 - " + fn );
- assert.eq( 1 , t.find( z ).itcount() , "B3 - " + fn ); // SERVER-1366
-
- t.dropIndex( i );
+ t.dropIndex(i);
- assert.eq( 1 , t.getIndexes().length , "T2" );
+ assert.eq(1, t.getIndexes().length, "T2");
i = {};
- i[fn + ".a" ] = 1;
- t.ensureIndex( i );
- assert.eq( 2 , t.getIndexes().length , "T3" );
+ i[fn + ".a"] = 1;
+ t.ensureIndex(i);
+ assert.eq(2, t.getIndexes().length, "T3");
- assert.eq( 1 , t.find( x ).itcount() , "C1 - " + fn );
- assert.eq( 1 , t.find( y ).itcount() , "C2 - " + fn );
- assert.eq( 1 , t.find( z ).itcount() , "C3 - " + fn ); // SERVER-1366
-
- t.dropIndex( i );
+ assert.eq(1, t.find(x).itcount(), "C1 - " + fn);
+ assert.eq(1, t.find(y).itcount(), "C2 - " + fn);
+ assert.eq(1, t.find(z).itcount(), "C3 - " + fn); // SERVER-1366
-
+ t.dropIndex(i);
}
-go( "x" );
-go( "_id" );
-
-
-
+go("x");
+go("_id");
diff --git a/jstests/core/in6.js b/jstests/core/in6.js
index f114d93442a..4ee06541b81 100644
--- a/jstests/core/in6.js
+++ b/jstests/core/in6.js
@@ -1,13 +1,13 @@
t = db.jstests_in6;
t.drop();
-t.save( {} );
+t.save({});
function doTest() {
- assert.eq.automsg( "1", "t.count( {i:null} )" );
- assert.eq.automsg( "1", "t.count( {i:{$in:[null]}} )" );
+ assert.eq.automsg("1", "t.count( {i:null} )");
+ assert.eq.automsg("1", "t.count( {i:{$in:[null]}} )");
}
doTest();
-t.ensureIndex( {i:1} );
+t.ensureIndex({i: 1});
doTest();
diff --git a/jstests/core/in7.js b/jstests/core/in7.js
index cf614ab994d..2f6c9e3ff1a 100644
--- a/jstests/core/in7.js
+++ b/jstests/core/in7.js
@@ -2,13 +2,21 @@
t = db.jstests_ina;
t.drop();
-t.save( {} );
+t.save({});
-assert.throws( function() { t.find( {a:{$in:[{$elemMatch:{b:1}}]}} ).itcount(); } );
-assert.throws( function() { t.find( {a:{$not:{$in:[{$elemMatch:{b:1}}]}}} ).itcount(); } );
+assert.throws(function() {
+ t.find({a: {$in: [{$elemMatch: {b: 1}}]}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: {$not: {$in: [{$elemMatch: {b: 1}}]}}}).itcount();
+});
-assert.throws( function() { t.find( {a:{$nin:[{$elemMatch:{b:1}}]}} ).itcount(); } );
-assert.throws( function() { t.find( {a:{$not:{$nin:[{$elemMatch:{b:1}}]}}} ).itcount(); } );
+assert.throws(function() {
+ t.find({a: {$nin: [{$elemMatch: {b: 1}}]}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: {$not: {$nin: [{$elemMatch: {b: 1}}]}}}).itcount();
+});
// NOTE Above we don't check cases like {b:2,$elemMatch:{b:3,4}} - generally
// we assume that the first key is $elemMatch if any key is, and validating
diff --git a/jstests/core/in8.js b/jstests/core/in8.js
index be2a696f7c3..a6bdc520926 100644
--- a/jstests/core/in8.js
+++ b/jstests/core/in8.js
@@ -3,16 +3,16 @@
t = db.jstests_inb;
t.drop();
-function checkResults( query ) {
- assert.eq( 4, t.count( query ) );
- assert.eq( 4, t.find( query ).itcount() );
+function checkResults(query) {
+ assert.eq(4, t.count(query));
+ assert.eq(4, t.find(query).itcount());
}
-t.ensureIndex( {x:1} );
-t.save( {x:'aa'} );
-t.save( {x:'ab'} );
-t.save( {x:'ac'} );
-t.save( {x:'ad'} );
+t.ensureIndex({x: 1});
+t.save({x: 'aa'});
+t.save({x: 'ab'});
+t.save({x: 'ac'});
+t.save({x: 'ad'});
-checkResults( {x:{$in:[/^a/,/^ab/]}} );
-checkResults( {x:{$in:[/^ab/,/^a/]}} );
+checkResults({x: {$in: [/^a/, /^ab/]}});
+checkResults({x: {$in: [/^ab/, /^a/]}});
diff --git a/jstests/core/inc-SERVER-7446.js b/jstests/core/inc-SERVER-7446.js
index 6f365e1f5e2..2503df21f87 100644
--- a/jstests/core/inc-SERVER-7446.js
+++ b/jstests/core/inc-SERVER-7446.js
@@ -2,38 +2,44 @@ var c = db.incSERVER7446;
// A 32 bit overflow spills to 64 bits
c.drop();
-c.save( { a: NumberInt( "2147483647" ) } );
-var updateResult = c.update( {}, { $inc:{ a:NumberInt( 1 ) } } );
+c.save({a: NumberInt("2147483647")});
+var updateResult = c.update({}, {$inc: {a: NumberInt(1)}});
assert.eq(1, updateResult.nMatched, "Object not modified");
var res = c.findOne();
-assert.eq(NumberLong, res.a.constructor,
+assert.eq(NumberLong,
+ res.a.constructor,
"NumberInt incremented beyond std::numeric_limits<in32_t>::max() not NumberLong");
-assert.eq(NumberLong("2147483648"), res.a,
+assert.eq(NumberLong("2147483648"),
+ res.a,
"NumberInt incremented beyond std::numeric_limits<in32_t>::max() has wrong value");
// A 32 bit underflow spills to 64 bits
c.drop();
-c.save( { a: NumberInt( "-2147483648" ) } );
-updateResult = c.update( {}, { $inc:{ a:NumberInt( -1 ) } } );
+c.save({a: NumberInt("-2147483648")});
+updateResult = c.update({}, {$inc: {a: NumberInt(-1)}});
assert.eq(1, updateResult.nMatched, "Object not modified");
res = c.findOne();
-assert.eq(NumberLong, res.a.constructor,
+assert.eq(NumberLong,
+ res.a.constructor,
"NumberInt decremented beyond std::numeric_limits<in32_t>::min() not NumberLong");
-assert.eq(NumberLong("-2147483649"), res.a,
+assert.eq(NumberLong("-2147483649"),
+ res.a,
"NumberInt decremented beyond std::numeric_limits<in32_t>::min() has wrong value");
// A 64 bit overflow is an error
c.drop();
-c.save( { a: NumberLong( "9223372036854775807" ) } );
-updateResult = c.update( {}, { $inc:{ a:NumberInt( 1 ) } } );
-assert.eq(0, updateResult.nMatched,
- "Did not fail to increment a NumberLong past std::numeric_limits<int64_t>::max()");
+c.save({a: NumberLong("9223372036854775807")});
+updateResult = c.update({}, {$inc: {a: NumberInt(1)}});
+assert.eq(0,
+ updateResult.nMatched,
+ "Did not fail to increment a NumberLong past std::numeric_limits<int64_t>::max()");
// A 64 bit underflow is an error
c.drop();
-c.save( { a: NumberLong( "-9223372036854775808" ) } );
-updateResult = c.update( {}, { $inc:{ a:NumberInt( -1 ) } } );
-assert.eq(0, updateResult.nMatched,
- "Did not fail to decrement a NumberLong past std::numeric_limits<int64_t>::min()");
+c.save({a: NumberLong("-9223372036854775808")});
+updateResult = c.update({}, {$inc: {a: NumberInt(-1)}});
+assert.eq(0,
+ updateResult.nMatched,
+ "Did not fail to decrement a NumberLong past std::numeric_limits<int64_t>::min()");
c.drop();
diff --git a/jstests/core/inc1.js b/jstests/core/inc1.js
index 027f307a476..75192ebe013 100644
--- a/jstests/core/inc1.js
+++ b/jstests/core/inc1.js
@@ -2,31 +2,30 @@
t = db.inc1;
t.drop();
-function test( num , name ){
- assert.eq( 1 , t.count() , name + " count" );
- assert.eq( num , t.findOne().x , name + " value" );
+function test(num, name) {
+ assert.eq(1, t.count(), name + " count");
+ assert.eq(num, t.findOne().x, name + " value");
}
-t.save( { _id : 1 , x : 1 } );
-test( 1 , "A" );
+t.save({_id: 1, x: 1});
+test(1, "A");
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-test( 2 , "B" );
+t.update({_id: 1}, {$inc: {x: 1}});
+test(2, "B");
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-test( 3 , "C" );
+t.update({_id: 1}, {$inc: {x: 1}});
+test(3, "C");
-t.update( { _id : 2 } , { $inc : { x : 1 } } );
-test( 3 , "D" );
+t.update({_id: 2}, {$inc: {x: 1}});
+test(3, "D");
-t.update( { _id : 1 } , { $inc : { x : 2 } } );
-test( 5 , "E" );
+t.update({_id: 1}, {$inc: {x: 2}});
+test(5, "E");
-t.update( { _id : 1 } , { $inc : { x : -1 } } );
-test( 4 , "F" );
+t.update({_id: 1}, {$inc: {x: -1}});
+test(4, "F");
-t.ensureIndex( { x : 1 } );
-
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-test( 5 , "G" );
+t.ensureIndex({x: 1});
+t.update({_id: 1}, {$inc: {x: 1}});
+test(5, "G");
diff --git a/jstests/core/inc2.js b/jstests/core/inc2.js
index 3bcc5146f75..6ff4842e254 100644
--- a/jstests/core/inc2.js
+++ b/jstests/core/inc2.js
@@ -2,21 +2,23 @@
t = db.inc2;
t.drop();
-t.save( { _id : 1 , x : 1 } );
-t.save( { _id : 2 , x : 2 } );
-t.save( { _id : 3 , x : 3 } );
+t.save({_id: 1, x: 1});
+t.save({_id: 2, x: 2});
+t.save({_id: 3, x: 3});
-function order(){
- return t.find().sort( { x : 1 } ).map( function(z){ return z._id; } );
+function order() {
+ return t.find().sort({x: 1}).map(function(z) {
+ return z._id;
+ });
}
-assert.eq( "1,2,3" , order() , "A" );
+assert.eq("1,2,3", order(), "A");
-t.update( { _id : 1 } , { $inc : { x : 4 } } );
-assert.eq( "2,3,1" , order() , "B" );
+t.update({_id: 1}, {$inc: {x: 4}});
+assert.eq("2,3,1", order(), "B");
-t.ensureIndex( { x : 1 } );
-assert.eq( "2,3,1" , order() , "C" );
+t.ensureIndex({x: 1});
+assert.eq("2,3,1", order(), "C");
-t.update( { _id : 3 } , { $inc : { x : 4 } } );
-assert.eq( "2,1,3" , order() , "D" );
+t.update({_id: 3}, {$inc: {x: 4}});
+assert.eq("2,1,3", order(), "D");
diff --git a/jstests/core/inc3.js b/jstests/core/inc3.js
index 6f10ad2b27b..b69a77c64cd 100644
--- a/jstests/core/inc3.js
+++ b/jstests/core/inc3.js
@@ -2,15 +2,13 @@
t = db.inc3;
t.drop();
-t.save( { _id : 1 , z : 1 , a : 1 } );
-t.update( {} , { $inc : { z : 1 , a : 1 } } );
-t.update( {} , { $inc : { a : 1 , z : 1 } } );
-assert.eq( { _id : 1 , z : 3 , a : 3 } , t.findOne() , "A" );
-
+t.save({_id: 1, z: 1, a: 1});
+t.update({}, {$inc: {z: 1, a: 1}});
+t.update({}, {$inc: {a: 1, z: 1}});
+assert.eq({_id: 1, z: 3, a: 3}, t.findOne(), "A");
t.drop();
-t.save( { _id : 1 , a : 1 , z : 1 } );
-t.update( {} , { $inc : { z : 1 , a : 1 } } );
-t.update( {} , { $inc : { a : 1 , z : 1 } } );
-assert.eq( { _id : 1 , a : 3 , z : 3 } , t.findOne() , "B" );
-
+t.save({_id: 1, a: 1, z: 1});
+t.update({}, {$inc: {z: 1, a: 1}});
+t.update({}, {$inc: {a: 1, z: 1}});
+assert.eq({_id: 1, a: 3, z: 3}, t.findOne(), "B");
diff --git a/jstests/core/index1.js b/jstests/core/index1.js
index 64bbfa8732b..1bcc23be135 100644
--- a/jstests/core/index1.js
+++ b/jstests/core/index1.js
@@ -1,24 +1,30 @@
t = db.embeddedIndexTest;
-t.remove( {} );
+t.remove({});
-o = { name : "foo" , z : { a : 17 , b : 4} };
-t.save( o );
+o = {
+ name: "foo",
+ z: {a: 17, b: 4}
+};
+t.save(o);
-assert( t.findOne().z.a == 17 );
-assert( t.findOne( { z : { a : 17 } } ) == null);
+assert(t.findOne().z.a == 17);
+assert(t.findOne({z: {a: 17}}) == null);
-t.ensureIndex( { "z.a" : 1 } );
+t.ensureIndex({"z.a": 1});
-assert( t.findOne().z.a == 17 );
-assert( t.findOne( { z : { a : 17 } } ) == null);
+assert(t.findOne().z.a == 17);
+assert(t.findOne({z: {a: 17}}) == null);
-o = { name : "bar" , z : { a : 18 } };
-t.save( o );
+o = {
+ name: "bar",
+ z: {a: 18}
+};
+t.save(o);
-assert.eq.automsg( "2", "t.find().length()" );
-assert.eq.automsg( "2", "t.find().sort( { 'z.a' : 1 } ).length()" );
-assert.eq.automsg( "2", "t.find().sort( { 'z.a' : -1 } ).length()" );
+assert.eq.automsg("2", "t.find().length()");
+assert.eq.automsg("2", "t.find().sort( { 'z.a' : 1 } ).length()");
+assert.eq.automsg("2", "t.find().sort( { 'z.a' : -1 } ).length()");
assert(t.validate().valid);
diff --git a/jstests/core/index13.js b/jstests/core/index13.js
index 21105166037..920061dd6f5 100644
--- a/jstests/core/index13.js
+++ b/jstests/core/index13.js
@@ -19,129 +19,151 @@
t = db.jstests_index13;
t.drop();
-function assertConsistentResults( query ) {
- assert.eq( t.find( query ).hint( { $natural:1 } ).sort( { _id:1 } ).toArray(),
- t.find( query ).hint( index ).sort( { _id:1 } ).toArray() );
+function assertConsistentResults(query) {
+ assert.eq(t.find(query).hint({$natural: 1}).sort({_id: 1}).toArray(),
+ t.find(query).hint(index).sort({_id: 1}).toArray());
}
-function assertResults( query ) {
- explain = t.find( query ).hint( index ).explain();
+function assertResults(query) {
+ explain = t.find(query).hint(index).explain();
// printjson( explain ); // debug
- assertConsistentResults( query );
+ assertConsistentResults(query);
}
// Cases with single dotted index fied names.
-index = { 'a.b':1, 'a.c':1 };
-t.ensureIndex( index );
-t.save( { a:[ { b:1 }, { c:1 } ] } );
-t.save( { a:[ { b:1, c:1 } ] } );
-assert.eq( 2, t.count() );
+index = {
+ 'a.b': 1,
+ 'a.c': 1
+};
+t.ensureIndex(index);
+t.save({a: [{b: 1}, {c: 1}]});
+t.save({a: [{b: 1, c: 1}]});
+assert.eq(2, t.count());
// Without $elemMatch.
-assertResults( { 'a.b':1, 'a.c':1 } );
+assertResults({'a.b': 1, 'a.c': 1});
// With $elemMatch.
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } } } );
+assertResults({a: {$elemMatch: {b: 1, c: 1}}});
// Without shared $elemMatch.
-assertResults( { 'a.b':1, a:{ $elemMatch:{ c:1 } } } );
+assertResults({'a.b': 1, a: {$elemMatch: {c: 1}}});
// Two different $elemMatch expressions.
-assertResults( { $and:[ { a:{ $elemMatch:{ b:1 } } },
- { a:{ $elemMatch:{ c:1 } } } ] } );
-
+assertResults({$and: [{a: {$elemMatch: {b: 1}}}, {a: {$elemMatch: {c: 1}}}]});
// Cases relating to parse order and inclusion of intersected ranges.
-assertResults( { 'a.b':1, a:{ $elemMatch:{ b:{ $gt:0 }, c:1 } } } );
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':1 } );
-assertResults( { 'a.c':1, a:{ $elemMatch:{ b:1, c:1 } } } );
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'a.b':{ $gt:0 } } );
+assertResults({'a.b': 1, a: {$elemMatch: {b: {$gt: 0}, c: 1}}});
+assertResults({a: {$elemMatch: {b: 1, c: 1}}, 'a.b': 1});
+assertResults({'a.c': 1, a: {$elemMatch: {b: 1, c: 1}}});
+assertResults({a: {$elemMatch: {b: 1, c: 1}}, 'a.b': {$gt: 0}});
// Cases with $elemMatch on multiple fields.
t.remove({});
-index = { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 };
-t.ensureIndex( index );
-t.insert( { a:[ { b:1 }, { c:1 } ], d: { e:1, f:1 } } );
-t.insert( { a:[ { b:1, c:1 } ], d: { e:1, f:1 } } );
-t.insert( { a:{ b:1, c:1 }, d:[ { e:1, f:1 } ] } );
-t.insert( { a:{ b:1, c:1 }, d:[ { e:1 }, { f:1 } ] } );
-
-assert.eq( 4, t.count() );
+index = {
+ 'a.b': 1,
+ 'a.c': 1,
+ 'd.e': 1,
+ 'd.f': 1
+};
+t.ensureIndex(index);
+t.insert({a: [{b: 1}, {c: 1}], d: {e: 1, f: 1}});
+t.insert({a: [{b: 1, c: 1}], d: {e: 1, f: 1}});
+t.insert({a: {b: 1, c: 1}, d: [{e: 1, f: 1}]});
+t.insert({a: {b: 1, c: 1}, d: [{e: 1}, {f: 1}]});
+
+assert.eq(4, t.count());
// Without $elemMatch.
-assertResults( { 'a.b':1, 'a.c':1, 'd.e':1, 'd.f':1 } );
+assertResults({'a.b': 1, 'a.c': 1, 'd.e': 1, 'd.f': 1});
// With $elemMatch.
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd': { $elemMatch:{ e:1, f:1 } } } );
-assertResults( { a:{ $elemMatch:{ b:1, c:1 } }, 'd.e': 1, 'd.f' : 1 } );
-assertResults( { 'a.b': 1, 'a.c' : 1, 'd': { $elemMatch:{ e:1, f:1 } } } );
-
+assertResults({a: {$elemMatch: {b: 1, c: 1}}, 'd': {$elemMatch: {e: 1, f: 1}}});
+assertResults({a: {$elemMatch: {b: 1, c: 1}}, 'd.e': 1, 'd.f': 1});
+assertResults({'a.b': 1, 'a.c': 1, 'd': {$elemMatch: {e: 1, f: 1}}});
// Cases with nested $elemMatch.
t.remove({});
-index = { 'a.b.c':1, 'a.b.d' :1 };
-t.ensureIndex( index );
-t.insert( { a:[ { b: [ { c : 1, d : 1 } ] } ] } ) ;
-t.insert( { a:[ { b: [ { c : 1 } , { d : 1 } ] } ] } ) ;
-assert.eq( 2, t.count() );
+index = {
+ 'a.b.c': 1,
+ 'a.b.d': 1
+};
+t.ensureIndex(index);
+t.insert({a: [{b: [{c: 1, d: 1}]}]});
+t.insert({a: [{b: [{c: 1}, {d: 1}]}]});
+assert.eq(2, t.count());
// Without $elemMatch.
-assertResults( { 'a.b.c':1, 'a.b.d':1 } );
+assertResults({'a.b.c': 1, 'a.b.d': 1});
// With $elemMatch.
-assertResults( { "a" : { $elemMatch : { "b" : { $elemMatch : { c : 1, d : 1 } } } } } );
+assertResults({"a": {$elemMatch: {"b": {$elemMatch: {c: 1, d: 1}}}}});
// Cases with double dotted index field names.
t.drop();
-index = { 'a.b.x':1, 'a.b.y':1 };
-t.ensureIndex( index );
-t.save( { a:{ b:{ x:1, y:1 } } } );
-t.save( { a:[ { b:{ x:1 } }, { b:{ y:1 } } ] } );
-t.save( { a:[ { b:[ { x:1 }, { y:1 } ] } ] } );
-t.save( { a:[ { b:[ { x:1, y:1 } ] } ] } );
-assert.eq( 4, t.count() );
+index = {
+ 'a.b.x': 1,
+ 'a.b.y': 1
+};
+t.ensureIndex(index);
+t.save({a: {b: {x: 1, y: 1}}});
+t.save({a: [{b: {x: 1}}, {b: {y: 1}}]});
+t.save({a: [{b: [{x: 1}, {y: 1}]}]});
+t.save({a: [{b: [{x: 1, y: 1}]}]});
+assert.eq(4, t.count());
// No $elemMatch.
-assertResults( { 'a.b.x':1, 'a.b.y':1 } );
+assertResults({'a.b.x': 1, 'a.b.y': 1});
// $elemMatch with dotted children.
-assertResults( { a:{ $elemMatch:{ 'b.x':1, 'b.y':1 } } } );
+assertResults({a: {$elemMatch: {'b.x': 1, 'b.y': 1}}});
// $elemMatch with undotted children.
-assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
+assertResults({'a.b': {$elemMatch: {x: 1, y: 1}}});
// Cases where a field is indexed along with its children.
t.dropIndexes();
-index = { 'a':1, 'a.b.x':1, 'a.b.y':1 };
-t.ensureIndex( index );
+index = {
+ 'a': 1,
+ 'a.b.x': 1,
+ 'a.b.y': 1
+};
+t.ensureIndex(index);
// With $ne.
-assertResults( { a:{ $ne:4 }, 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
+assertResults({a: {$ne: 4}, 'a.b': {$elemMatch: {x: 1, y: 1}}});
// No constraint on a prior parent field.
-assertResults( { 'a.b':{ $elemMatch:{ x:1, y:1 } } } );
+assertResults({'a.b': {$elemMatch: {x: 1, y: 1}}});
// Cases with double dotted index field names branching to different fields at each dot.
t.drop();
-index = { 'a.b.c':1, 'a.e.f':1, 'a.b.d':1, 'a.e.g':1 };
-t.ensureIndex( index );
-t.save( { a:{ b:{ c:1, d:1 }, e:{ f:1, g:1 } } } );
-t.save( { a:[ { b:{ c:1 }, e:{ f:1 } }, { b:{ d:1 }, e:{ g:1 } } ] } );
-t.save( { a:[ { b:{ c:1 } }, { e:{ f:1 } }, { b:{ d:1 } }, { e:{ g:1 } } ] } );
-t.save( { a:[ { b:[ { c:1 }, { d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } );
-t.save( { a:[ { b:[ { c:[ 1 ] }, { d:[ 1 ] } ] }, { e:[ { f:[ 1 ] }, { g:[ 1 ] } ] } ] } );
-t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1 }, { g:1 } ] } ] } );
-t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { f:1, g:1 } ] } ] } );
-assert.eq( 7, t.count() );
+index = {
+ 'a.b.c': 1,
+ 'a.e.f': 1,
+ 'a.b.d': 1,
+ 'a.e.g': 1
+};
+t.ensureIndex(index);
+t.save({a: {b: {c: 1, d: 1}, e: {f: 1, g: 1}}});
+t.save({a: [{b: {c: 1}, e: {f: 1}}, {b: {d: 1}, e: {g: 1}}]});
+t.save({a: [{b: {c: 1}}, {e: {f: 1}}, {b: {d: 1}}, {e: {g: 1}}]});
+t.save({a: [{b: [{c: 1}, {d: 1}]}, {e: [{f: 1}, {g: 1}]}]});
+t.save({a: [{b: [{c: [1]}, {d: [1]}]}, {e: [{f: [1]}, {g: [1]}]}]});
+t.save({a: [{b: [{c: 1, d: 1}]}, {e: [{f: 1}, {g: 1}]}]});
+t.save({a: [{b: [{c: 1, d: 1}]}, {e: [{f: 1, g: 1}]}]});
+assert.eq(7, t.count());
// Constraint on a prior cousin field.
-assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } },
- 'a.e':{ $elemMatch:{ f:1, g:1 } } } );
+assertResults({'a.b': {$elemMatch: {c: 1, d: 1}}, 'a.e': {$elemMatch: {f: 1, g: 1}}});
// Different constraint on a prior cousin field.
-assertResults( { 'a.b':{ $elemMatch:{ d:1 } },
- 'a.e':{ $elemMatch:{ f:1, g:1 } } } );
-
+assertResults({'a.b': {$elemMatch: {d: 1}}, 'a.e': {$elemMatch: {f: 1, g: 1}}});
// Cases with double dotted index field names branching to different fields at each dot, and the
// same field name strings after the second dot.
t.drop();
-index = { 'a.b.c':1, 'a.e.c':1, 'a.b.d':1, 'a.e.d':1 };
-t.ensureIndex( index );
-t.save( { a:[ { b:[ { c:1, d:1 } ] }, { e:[ { c:1, d:1 } ] } ] } );
-assert.eq( 1, t.count() );
+index = {
+ 'a.b.c': 1,
+ 'a.e.c': 1,
+ 'a.b.d': 1,
+ 'a.e.d': 1
+};
+t.ensureIndex(index);
+t.save({a: [{b: [{c: 1, d: 1}]}, {e: [{c: 1, d: 1}]}]});
+assert.eq(1, t.count());
// Constraint on a prior cousin field with the same field names.
-assertResults( { 'a.b':{ $elemMatch:{ c:1, d:1 } }, 'a.e':{ $elemMatch:{ c:1, d:1 } } } );
+assertResults({'a.b': {$elemMatch: {c: 1, d: 1}}, 'a.e': {$elemMatch: {c: 1, d: 1}}});
diff --git a/jstests/core/index2.js b/jstests/core/index2.js
index b54abcaa792..11ef4e68caa 100644
--- a/jstests/core/index2.js
+++ b/jstests/core/index2.js
@@ -4,37 +4,49 @@
t = db.embeddedIndexTest2;
t.drop();
-assert( t.findOne() == null );
-
-o = { name : "foo" , z : { a : 17 } };
-p = { name : "foo" , z : { a : 17 } };
-q = { name : "barrr" , z : { a : 18 } };
-r = { name : "barrr" , z : { k : "zzz", L:[1,2] } };
-
-t.save( o );
-
-assert( t.findOne().z.a == 17 );
-
-t.save( p );
-t.save( q );
-
-assert( t.findOne({z:{a:17}}).z.a==17 );
-assert( t.find({z:{a:17}}).length() == 2 );
-assert( t.find({z:{a:18}}).length() == 1 );
-
-t.save( r );
-
-assert( t.findOne({z:{a:17}}).z.a==17 );
-assert( t.find({z:{a:17}}).length() == 2 );
-assert( t.find({z:{a:18}}).length() == 1 );
-
-t.ensureIndex( { z : 1 } );
-
-assert( t.findOne({z:{a:17}}).z.a==17 );
-assert( t.find({z:{a:17}}).length() == 2 );
-assert( t.find({z:{a:18}}).length() == 1 );
-
-assert( t.find().sort( { z : 1 } ).length() == 4 );
-assert( t.find().sort( { z : -1 } ).length() == 4 );
+assert(t.findOne() == null);
+
+o = {
+ name: "foo",
+ z: {a: 17}
+};
+p = {
+ name: "foo",
+ z: {a: 17}
+};
+q = {
+ name: "barrr",
+ z: {a: 18}
+};
+r = {
+ name: "barrr",
+ z: {k: "zzz", L: [1, 2]}
+};
+
+t.save(o);
+
+assert(t.findOne().z.a == 17);
+
+t.save(p);
+t.save(q);
+
+assert(t.findOne({z: {a: 17}}).z.a == 17);
+assert(t.find({z: {a: 17}}).length() == 2);
+assert(t.find({z: {a: 18}}).length() == 1);
+
+t.save(r);
+
+assert(t.findOne({z: {a: 17}}).z.a == 17);
+assert(t.find({z: {a: 17}}).length() == 2);
+assert(t.find({z: {a: 18}}).length() == 1);
+
+t.ensureIndex({z: 1});
+
+assert(t.findOne({z: {a: 17}}).z.a == 17);
+assert(t.find({z: {a: 17}}).length() == 2);
+assert(t.find({z: {a: 18}}).length() == 1);
+
+assert(t.find().sort({z: 1}).length() == 4);
+assert(t.find().sort({z: -1}).length() == 4);
assert(t.validate().valid);
diff --git a/jstests/core/index3.js b/jstests/core/index3.js
index 80139460cb4..e908f1fe2c9 100644
--- a/jstests/core/index3.js
+++ b/jstests/core/index3.js
@@ -3,14 +3,14 @@
t = db.index3;
t.drop();
-assert( t.getIndexes().length == 0 );
+assert(t.getIndexes().length == 0);
-t.ensureIndex( { name : 1 } );
+t.ensureIndex({name: 1});
-t.save( { name : "a" } );
+t.save({name: "a"});
-t.ensureIndex( { name : 1 } );
+t.ensureIndex({name: 1});
-assert( t.getIndexes().length == 2 );
+assert(t.getIndexes().length == 2);
assert(t.validate().valid);
diff --git a/jstests/core/index4.js b/jstests/core/index4.js
index ee8c59fa37c..1c96ded434a 100644
--- a/jstests/core/index4.js
+++ b/jstests/core/index4.js
@@ -1,33 +1,22 @@
// index4.js
-
t = db.index4;
t.drop();
-t.save( { name : "alleyinsider" ,
- instances : [
- { pool : "prod1" } ,
- { pool : "dev1" }
- ]
- } );
-
-t.save( { name : "clusterstock" ,
- instances : [
- { pool : "dev1" }
- ]
- } );
+t.save({name: "alleyinsider", instances: [{pool: "prod1"}, {pool: "dev1"}]});
+t.save({name: "clusterstock", instances: [{pool: "dev1"}]});
// this should fail, not allowed -- we confirm that.
-t.ensureIndex( { instances : { pool : 1 } } );
-assert.eq( 1, t.getIndexes().length, "no indexes other than _id should be here yet");
+t.ensureIndex({instances: {pool: 1}});
+assert.eq(1, t.getIndexes().length, "no indexes other than _id should be here yet");
-t.ensureIndex( { "instances.pool" : 1 } );
+t.ensureIndex({"instances.pool": 1});
-sleep( 10 );
+sleep(10);
-a = t.find( { instances : { pool : "prod1" } } );
-assert( a.length() == 1, "len1" );
-assert( a[0].name == "alleyinsider", "alley" );
+a = t.find({instances: {pool: "prod1"}});
+assert(a.length() == 1, "len1");
+assert(a[0].name == "alleyinsider", "alley");
-assert(t.validate().valid, "valid" );
+assert(t.validate().valid, "valid");
diff --git a/jstests/core/index5.js b/jstests/core/index5.js
index 841ac12ed45..11cfa1882e4 100644
--- a/jstests/core/index5.js
+++ b/jstests/core/index5.js
@@ -1,24 +1,24 @@
// index5.js - test reverse direction index
function validate() {
- assert.eq( 2, t.find().count() );
- f = t.find().sort( { a: 1 } );
- assert.eq( 2, t.count() );
- assert.eq( 1, f[ 0 ].a );
- assert.eq( 2, f[ 1 ].a );
- r = t.find().sort( { a: -1 } );
- assert.eq( 2, r.count() );
- assert.eq( 2, r[ 0 ].a );
- assert.eq( 1, r[ 1 ].a );
+ assert.eq(2, t.find().count());
+ f = t.find().sort({a: 1});
+ assert.eq(2, t.count());
+ assert.eq(1, f[0].a);
+ assert.eq(2, f[1].a);
+ r = t.find().sort({a: -1});
+ assert.eq(2, r.count());
+ assert.eq(2, r[0].a);
+ assert.eq(1, r[1].a);
}
t = db.index5;
t.drop();
-t.save( { a: 1 } );
-t.save( { a: 2 } );
+t.save({a: 1});
+t.save({a: 2});
validate();
-t.ensureIndex( { a: -1 } );
+t.ensureIndex({a: -1});
validate();
diff --git a/jstests/core/index6.js b/jstests/core/index6.js
index 8dbd8f74fcf..9adef9bf366 100644
--- a/jstests/core/index6.js
+++ b/jstests/core/index6.js
@@ -3,6 +3,6 @@
r = db.ed.db.index6;
r.drop();
-r.save( { comments : [ { name : "eliot", foo : 1 } ] } );
-r.ensureIndex( { "comments.name": 1 } );
-assert( r.findOne( { "comments.name": "eliot" } ) );
+r.save({comments: [{name: "eliot", foo: 1}]});
+r.ensureIndex({"comments.name": 1});
+assert(r.findOne({"comments.name": "eliot"}));
diff --git a/jstests/core/index8.js b/jstests/core/index8.js
index 7b41da5ce50..6773f2d29f5 100644
--- a/jstests/core/index8.js
+++ b/jstests/core/index8.js
@@ -3,60 +3,59 @@
t = db.jstests_index8;
t.drop();
-t.ensureIndex( { a: 1 } );
-t.ensureIndex( { b: 1 }, true );
-t.ensureIndex( { c: 1 }, [ false, "cIndex" ] );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1}, true);
+t.ensureIndex({c: 1}, [false, "cIndex"]);
-checkIndexes = function( num ) {
+checkIndexes = function(num) {
var indexes = t.getIndexes();
- assert.eq( 4, indexes.length );
+ assert.eq(4, indexes.length);
var start = 0;
- if ( indexes[0].name == "_id_" )
+ if (indexes[0].name == "_id_")
start = 1;
- assert( !indexes[ start ].unique , "A" + num );
- assert( indexes[ start + 1 ].unique , "B" + num + " " + tojson( indexes[start+1] ) );
- assert( !indexes[ start + 2 ].unique , "C" + num );
- assert.eq( "cIndex", indexes[ start + 2 ].name , "D" + num );
+ assert(!indexes[start].unique, "A" + num);
+ assert(indexes[start + 1].unique, "B" + num + " " + tojson(indexes[start + 1]));
+ assert(!indexes[start + 2].unique, "C" + num);
+ assert.eq("cIndex", indexes[start + 2].name, "D" + num);
};
-checkIndexes( 1 );
+checkIndexes(1);
t.reIndex();
-checkIndexes( 2 );
+checkIndexes(2);
-t.save( { a: 2, b: 1 } );
-t.save( { a: 2 } );
-assert.eq( 2, t.find().count() );
+t.save({a: 2, b: 1});
+t.save({a: 2});
+assert.eq(2, t.find().count());
-t.save( { b: 4 } );
-t.save( { b: 4 } );
-assert.eq( 3, t.find().count() );
-assert.eq( 3, t.find().hint( {c:1} ).toArray().length );
-assert.eq( 3, t.find().hint( {b:1} ).toArray().length );
-assert.eq( 3, t.find().hint( {a:1} ).toArray().length );
+t.save({b: 4});
+t.save({b: 4});
+assert.eq(3, t.find().count());
+assert.eq(3, t.find().hint({c: 1}).toArray().length);
+assert.eq(3, t.find().hint({b: 1}).toArray().length);
+assert.eq(3, t.find().hint({a: 1}).toArray().length);
t.drop();
-t.ensureIndex( { a: 1, b: -1 }, true );
-t.save( { a: 2, b: 3 } );
-t.save( { a: 2, b: 3 } );
-t.save( { a: 2, b: 4 } );
-t.save( { a: 1, b: 3 } );
-assert.eq( 3, t.find().count() );
+t.ensureIndex({a: 1, b: -1}, true);
+t.save({a: 2, b: 3});
+t.save({a: 2, b: 3});
+t.save({a: 2, b: 4});
+t.save({a: 1, b: 3});
+assert.eq(3, t.find().count());
t.drop();
-t.ensureIndex( { a: 1 }, true );
-t.save( { a: [ 2, 3 ] } );
-t.save( { a: 2 } );
-assert.eq( 1, t.find().count() );
+t.ensureIndex({a: 1}, true);
+t.save({a: [2, 3]});
+t.save({a: 2});
+assert.eq(1, t.find().count());
t.drop();
-t.ensureIndex( { a: 1 }, true );
-t.save( { a: 2 } );
-t.save( { a: [ 1, 2, 3 ] } );
-t.save( { a: [ 3, 2, 1 ] } );
-assert.eq( 1, t.find().sort( { a: 1 } ).hint( { a: 1 } ).toArray().length );
-assert.eq( 1, t.find().sort( { a: -1 } ).hint( { a: 1 } ).toArray().length );
-
-assert.eq( t._indexSpec( { x : 1 } , true ) , t._indexSpec( { x : 1 } , [ true ] ) , "spec 1" );
-assert.eq( t._indexSpec( { x : 1 } , "eliot" ) , t._indexSpec( { x : 1 } , [ "eliot" ] ) , "spec 2" );
-
+t.ensureIndex({a: 1}, true);
+t.save({a: 2});
+t.save({a: [1, 2, 3]});
+t.save({a: [3, 2, 1]});
+assert.eq(1, t.find().sort({a: 1}).hint({a: 1}).toArray().length);
+assert.eq(1, t.find().sort({a: -1}).hint({a: 1}).toArray().length);
+
+assert.eq(t._indexSpec({x: 1}, true), t._indexSpec({x: 1}, [true]), "spec 1");
+assert.eq(t._indexSpec({x: 1}, "eliot"), t._indexSpec({x: 1}, ["eliot"]), "spec 2");
diff --git a/jstests/core/index9.js b/jstests/core/index9.js
index 8fee4a35ca0..5f31dc978aa 100644
--- a/jstests/core/index9.js
+++ b/jstests/core/index9.js
@@ -1,25 +1,25 @@
t = db.jstests_index9;
t.drop();
-db.createCollection( "jstests_index9" );
-assert.eq( 1, t.getIndexes().length, "There should be 1 index with default collection" );
+db.createCollection("jstests_index9");
+assert.eq(1, t.getIndexes().length, "There should be 1 index with default collection");
t.drop();
-db.createCollection( "jstests_index9", {autoIndexId: true} );
-assert.eq( 1, t.getIndexes().length, "There should be 1 index if autoIndexId: true" );
+db.createCollection("jstests_index9", {autoIndexId: true});
+assert.eq(1, t.getIndexes().length, "There should be 1 index if autoIndexId: true");
t.drop();
-db.createCollection( "jstests_index9", {autoIndexId:false} );
-assert.eq( 0, t.getIndexes().length, "There should be 0 index if autoIndexId: false" );
-t.createIndex( { _id:1 } );
-assert.eq( 1, t.getIndexes().length );
-t.createIndex( { _id:1 } );
-assert.eq( 1, t.getIndexes().length );
+db.createCollection("jstests_index9", {autoIndexId: false});
+assert.eq(0, t.getIndexes().length, "There should be 0 index if autoIndexId: false");
+t.createIndex({_id: 1});
+assert.eq(1, t.getIndexes().length);
+t.createIndex({_id: 1});
+assert.eq(1, t.getIndexes().length);
t.drop();
-t.createIndex( { _id:1 } );
-assert.eq( 1, t.getIndexes().length );
+t.createIndex({_id: 1});
+assert.eq(1, t.getIndexes().length);
t.drop();
-t.save( {a:1} );
-t.createIndex( { _id:1 } );
-assert.eq( 1, t.getIndexes().length );
+t.save({a: 1});
+t.createIndex({_id: 1});
+assert.eq(1, t.getIndexes().length);
diff --git a/jstests/core/indexOtherNamespace.js b/jstests/core/indexOtherNamespace.js
index a94cff5d51b..f5919f721e8 100644
--- a/jstests/core/indexOtherNamespace.js
+++ b/jstests/core/indexOtherNamespace.js
@@ -6,14 +6,14 @@ load("jstests/libs/analyze_plan.js");
var otherDB = db.getSiblingDB("indexOtherNS");
otherDB.dropDatabase();
-otherDB.foo.insert({a:1});
+otherDB.foo.insert({a: 1});
assert.eq(1, otherDB.foo.getIndexes().length);
-assert(isCollscan(otherDB.foo.find({a:1}).explain().queryPlanner.winningPlan));
+assert(isCollscan(otherDB.foo.find({a: 1}).explain().queryPlanner.winningPlan));
-assert.writeError(otherDB.randomNS.system.indexes.insert({ ns: "indexOtherNS.foo",
- key: { a: 1 }, name: "a_1"}));
+assert.writeError(
+ otherDB.randomNS.system.indexes.insert({ns: "indexOtherNS.foo", key: {a: 1}, name: "a_1"}));
// Assert that index didn't actually get built
assert.eq(1, otherDB.foo.getIndexes().length);
-assert(isCollscan(otherDB.foo.find({a:1}).explain().queryPlanner.winningPlan));
+assert(isCollscan(otherDB.foo.find({a: 1}).explain().queryPlanner.winningPlan));
otherDB.dropDatabase();
diff --git a/jstests/core/index_arr1.js b/jstests/core/index_arr1.js
index 0878e19aa22..ba821bd3730 100644
--- a/jstests/core/index_arr1.js
+++ b/jstests/core/index_arr1.js
@@ -1,17 +1,17 @@
t = db.index_arr1;
t.drop();
-t.insert( { _id : 1 , a : 5 , b : [ { x : 1 } ] } );
-t.insert( { _id : 2 , a : 5 , b : [] } );
-t.insert( { _id : 3 , a : 5 } );
+t.insert({_id: 1, a: 5, b: [{x: 1}]});
+t.insert({_id: 2, a: 5, b: []});
+t.insert({_id: 3, a: 5});
-assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A1" );
+assert.eq(3, t.find({a: 5}).itcount(), "A1");
-t.ensureIndex( { a : 1 , "b.x" : 1 } );
+t.ensureIndex({a: 1, "b.x": 1});
-assert.eq( 3 , t.find( { a : 5 } ).itcount() , "A2" ); // SERVER-1082
+assert.eq(3, t.find({a: 5}).itcount(), "A2"); // SERVER-1082
-assert.eq( 2 , t.getIndexes().length , "B1" );
-t.insert( { _id : 4 , a : 5 , b : [] } );
-t.ensureIndex( { a : 1 , "b.a" : 1 , "b.c" : 1 } );
-assert.eq( 3 , t.getIndexes().length , "B2" );
+assert.eq(2, t.getIndexes().length, "B1");
+t.insert({_id: 4, a: 5, b: []});
+t.ensureIndex({a: 1, "b.a": 1, "b.c": 1});
+assert.eq(3, t.getIndexes().length, "B2");
diff --git a/jstests/core/index_arr2.js b/jstests/core/index_arr2.js
index 78c480719dc..952be73ff13 100644
--- a/jstests/core/index_arr2.js
+++ b/jstests/core/index_arr2.js
@@ -3,49 +3,45 @@ M = 5;
t = db.jstests_arr2;
-function test( withIndex ){
+function test(withIndex) {
t.drop();
-
+
// insert a bunch of items to force queries to use the index.
newObject = {
- _id : 1,
- a : [
- { b : { c : 1 } }
- ]
+ _id: 1,
+ a: [{b: {c: 1}}]
};
-
+
now = (new Date()).getTime() / 1000;
- for (created = now - NUM; created <= now; created++ ) {
+ for (created = now - NUM; created <= now; created++) {
newObject['created'] = created;
t.insert(newObject);
- newObject['_id'] ++;
+ newObject['_id']++;
}
-
+
// change the last M items.
query = {
- 'created' : { '$gte' : now - M }
+ 'created': {'$gte': now - M}
};
-
- Z = t.find( query ).count();
-
- if ( withIndex ){
- //t.ensureIndex( { 'a.b.c' : 1, 'created' : -1 } )
- //t.ensureIndex( { created : -1 } )
- t.ensureIndex( { 'a.b.c' : 1 } , { name : "x" } );
+
+ Z = t.find(query).count();
+
+ if (withIndex) {
+ // t.ensureIndex( { 'a.b.c' : 1, 'created' : -1 } )
+ // t.ensureIndex( { created : -1 } )
+ t.ensureIndex({'a.b.c': 1}, {name: "x"});
}
-
- var res = t.update(query, { '$set' : { "a.0.b.c" : 0 } } , false , true );
- assert.eq( Z, res.nMatched, "num updated withIndex:" + withIndex );
-
+
+ var res = t.update(query, {'$set': {"a.0.b.c": 0}}, false, true);
+ assert.eq(Z, res.nMatched, "num updated withIndex:" + withIndex);
+
// now see how many were actually updated.
query['a.b.c'] = 0;
-
+
count = t.count(query);
- assert.eq( Z , count , "count after withIndex:" + withIndex );
+ assert.eq(Z, count, "count after withIndex:" + withIndex);
}
-test( false );
-test( true );
-
-
+test(false);
+test(true);
diff --git a/jstests/core/index_big1.js b/jstests/core/index_big1.js
index eb4df5d5100..8f600fa37bc 100644
--- a/jstests/core/index_big1.js
+++ b/jstests/core/index_big1.js
@@ -7,32 +7,33 @@ t.drop();
var s = "";
-t.ensureIndex( { a : 1 , x : 1 } );
+t.ensureIndex({a: 1, x: 1});
var bulk = t.initializeUnorderedBulkOp();
-for ( i=0; i<N; i++ ) {
- bulk.insert( { a : i + .5 , x : s } );
+for (i = 0; i < N; i++) {
+ bulk.insert({a: i + .5, x: s});
s += "x";
}
-assert.throws( function() { bulk.execute(); } );
+assert.throws(function() {
+ bulk.execute();
+});
-assert.eq( 2 , t.getIndexes().length );
+assert.eq(2, t.getIndexes().length);
flip = -1;
-for ( i=0; i<N; i++ ) {
- var c = t.find( { a : i + .5 } ).count();
- if ( c == 1 ) {
- assert.eq( -1 , flip , "flipping : " + i );
- }
- else {
- if ( flip == -1 ) {
+for (i = 0; i < N; i++) {
+ var c = t.find({a: i + .5}).count();
+ if (c == 1) {
+ assert.eq(-1, flip, "flipping : " + i);
+ } else {
+ if (flip == -1) {
flip = i;
}
}
}
-//print(flip);
-//print(flip/1024);
+// print(flip);
+// print(flip/1024);
-assert.eq( /*v0 index : 797*/1002, flip , "flip changed" );
+assert.eq(/*v0 index : 797*/ 1002, flip, "flip changed");
diff --git a/jstests/core/index_bigkeys.js b/jstests/core/index_bigkeys.js
index 4b692ce1b19..564ddde2a5e 100755..100644
--- a/jstests/core/index_bigkeys.js
+++ b/jstests/core/index_bigkeys.js
@@ -5,20 +5,19 @@ var keys = [];
var str = "aaaabbbbccccddddeeeeffffgggghhhh";
-while ( str.length < 20000 ) {
- keys.push( str );
+while (str.length < 20000) {
+ keys.push(str);
str = str + str;
}
-function doInsert( order ) {
+function doInsert(order) {
if (order == 1) {
for (var i = 0; i < 10; i++) {
- t.insert({ _id: i, k: keys[i] });
+ t.insert({_id: i, k: keys[i]});
}
- }
- else {
+ } else {
for (var i = 9; i >= 0; i--) {
- t.insert({ _id: i, k: keys[i] });
+ t.insert({_id: i, k: keys[i]});
}
}
}
@@ -27,33 +26,33 @@ var expect = null;
function check() {
assert(t.validate().valid);
- assert.eq( 5, t.count() );
+ assert.eq(5, t.count());
- var c = t.find({ k: /^a/ }).count();
- assert.eq( 5, c );
+ var c = t.find({k: /^a/}).count();
+ assert.eq(5, c);
}
-function runTest( order ) {
+function runTest(order) {
t.drop();
- t.ensureIndex({ k: 1 });
- doInsert( order );
- check(); // check incremental addition
+ t.ensureIndex({k: 1});
+ doInsert(order);
+ check(); // check incremental addition
t.reIndex();
- check(); // check bottom up
+ check(); // check bottom up
t.drop();
- doInsert( order );
- assert.eq( 1, t.getIndexes().length );
- t.ensureIndex({ k: 1 });
- assert.eq( 1, t.getIndexes().length );
+ doInsert(order);
+ assert.eq(1, t.getIndexes().length);
+ t.ensureIndex({k: 1});
+ assert.eq(1, t.getIndexes().length);
t.drop();
- doInsert( order );
- assert.eq( 1, t.getIndexes().length );
- t.ensureIndex({ k: 1 }, { background: true });
- assert.eq( 1, t.getIndexes().length );
+ doInsert(order);
+ assert.eq(1, t.getIndexes().length);
+ t.ensureIndex({k: 1}, {background: true});
+ assert.eq(1, t.getIndexes().length);
}
-runTest( 1 );
-runTest( 2 );
+runTest(1);
+runTest(2);
diff --git a/jstests/core/index_bigkeys_nofail.js b/jstests/core/index_bigkeys_nofail.js
index 417470d7f04..10d8a0791c0 100644
--- a/jstests/core/index_bigkeys_nofail.js
+++ b/jstests/core/index_bigkeys_nofail.js
@@ -2,50 +2,50 @@
(function() {
"use strict";
- var t=db.index_bigkeys_nofail;
+ var t = db.index_bigkeys_nofail;
t.drop();
- var res=db.getSiblingDB('admin').runCommand( { setParameter: 1, failIndexKeyTooLong: true } );
- var was=res.was;
+ var res = db.getSiblingDB('admin').runCommand({setParameter: 1, failIndexKeyTooLong: true});
+ var was = res.was;
assert.commandWorked(res);
var x = new Array(1025).join('x');
- assert.commandWorked(t.ensureIndex({name:1}));
- assert.writeError(t.insert({name:x}));
- assert.commandWorked(t.dropIndex({name:1}));
- assert.writeOK(t.insert({name:x}));
- assert.commandFailed(t.ensureIndex({name:1}));
+ assert.commandWorked(t.ensureIndex({name: 1}));
+ assert.writeError(t.insert({name: x}));
+ assert.commandWorked(t.dropIndex({name: 1}));
+ assert.writeOK(t.insert({name: x}));
+ assert.commandFailed(t.ensureIndex({name: 1}));
t.drop();
- db.getSiblingDB('admin').runCommand( { setParameter: 1, failIndexKeyTooLong: false } );
+ db.getSiblingDB('admin').runCommand({setParameter: 1, failIndexKeyTooLong: false});
// inserts
- assert.writeOK(t.insert({_id: 1, name:x}));
- assert.commandWorked(t.ensureIndex({name:1}));
- assert.writeOK(t.insert({_id: 2, name:x}));
- assert.writeOK(t.insert({_id: 3, name:x}));
+ assert.writeOK(t.insert({_id: 1, name: x}));
+ assert.commandWorked(t.ensureIndex({name: 1}));
+ assert.writeOK(t.insert({_id: 2, name: x}));
+ assert.writeOK(t.insert({_id: 3, name: x}));
assert.eq(t.count(), 3);
// updates (smaller and larger)
- assert.writeOK(t.update({_id: 1}, {$set:{name:'short'}}));
- assert.writeOK(t.update({_id: 1}, {$set:{name: x}}));
- assert.writeOK(t.update({_id: 1}, {$set:{name: x + 'even longer'}}));
+ assert.writeOK(t.update({_id: 1}, {$set: {name: 'short'}}));
+ assert.writeOK(t.update({_id: 1}, {$set: {name: x}}));
+ assert.writeOK(t.update({_id: 1}, {$set: {name: x + 'even longer'}}));
// remove
assert.writeOK(t.remove({_id: 1}));
assert.eq(t.count(), 2);
- db.getSiblingDB('admin').runCommand( { setParameter: 1, failIndexKeyTooLong: true } );
+ db.getSiblingDB('admin').runCommand({setParameter: 1, failIndexKeyTooLong: true});
// can still delete even if key is oversized
assert.writeOK(t.remove({_id: 2}));
assert.eq(t.count(), 1);
// can still update to shorter, but not longer name.
- assert.writeError(t.update({_id: 3}, {$set:{name: x + 'even longer'}}));
- assert.writeOK(t.update({_id: 3}, {$set:{name:'short'}}));
- assert.writeError(t.update({_id: 3}, {$set:{name: x}}));
+ assert.writeError(t.update({_id: 3}, {$set: {name: x + 'even longer'}}));
+ assert.writeOK(t.update({_id: 3}, {$set: {name: 'short'}}));
+ assert.writeError(t.update({_id: 3}, {$set: {name: x}}));
- db.getSiblingDB('admin').runCommand( { setParameter: 1, failIndexKeyTooLong: was } );
+ db.getSiblingDB('admin').runCommand({setParameter: 1, failIndexKeyTooLong: was});
// Explicitly drop the collection to avoid failures in post-test hooks that run dbHash and
// validate commands.
diff --git a/jstests/core/index_bigkeys_update.js b/jstests/core/index_bigkeys_update.js
index 6bdaf033542..a3074bfdfdd 100644
--- a/jstests/core/index_bigkeys_update.js
+++ b/jstests/core/index_bigkeys_update.js
@@ -1,18 +1,18 @@
bigString = "";
-while ( bigString.length < 16000 )
+while (bigString.length < 16000)
bigString += ".";
t = db.index_bigkeys_update;
t.drop();
-t.insert( { _id : 0, x : "asd" } );
-t.ensureIndex( { x : 1 } );
+t.insert({_id: 0, x: "asd"});
+t.ensureIndex({x: 1});
-assert.eq( 1, t.count() );
+assert.eq(1, t.count());
-assert.writeError(t.update( {} , { $set : { x : bigString } } ));
+assert.writeError(t.update({}, {$set: {x: bigString}}));
-assert.eq( 1, t.count() );
-assert.eq( "asd", t.findOne().x ); // make sure doc is the old version
-assert.eq( "asd", t.findOne( { _id : 0 } ).x ); // make sure doc is the old version
+assert.eq(1, t.count());
+assert.eq("asd", t.findOne().x); // make sure doc is the old version
+assert.eq("asd", t.findOne({_id: 0}).x); // make sure doc is the old version
diff --git a/jstests/core/index_bigkeys_validation.js b/jstests/core/index_bigkeys_validation.js
index ef29b07ecc7..98c80aa081a 100644
--- a/jstests/core/index_bigkeys_validation.js
+++ b/jstests/core/index_bigkeys_validation.js
@@ -6,7 +6,7 @@
var coll = db.longindex;
coll.drop();
- var longVal = new Array(1025).join('x'); // Keys >= 1024 bytes cannot be indexed.
+ var longVal = new Array(1025).join('x'); // Keys >= 1024 bytes cannot be indexed.
assert.commandWorked(db.adminCommand({setParameter: 1, failIndexKeyTooLong: false}));
diff --git a/jstests/core/index_check2.js b/jstests/core/index_check2.js
index 8ebd13c850c..f8590229d39 100644
--- a/jstests/core/index_check2.js
+++ b/jstests/core/index_check2.js
@@ -5,38 +5,44 @@ t.drop();
// Include helpers for analyzing explain output.
load("jstests/libs/analyze_plan.js");
-for ( var i=0; i<1000; i++ ){
+for (var i = 0; i < 1000; i++) {
var a = [];
- for ( var j=1; j<5; j++ ){
- a.push( "tag" + ( i * j % 50 ));
+ for (var j = 1; j < 5; j++) {
+ a.push("tag" + (i * j % 50));
}
- t.save( { num : i , tags : a } );
+ t.save({num: i, tags: a});
}
-q1 = { tags : "tag6" };
-q2 = { tags : "tag12" };
-q3 = { tags : { $all : [ "tag6" , "tag12" ] } };
+q1 = {
+ tags: "tag6"
+};
+q2 = {
+ tags: "tag12"
+};
+q3 = {
+ tags: {$all: ["tag6", "tag12"]}
+};
-assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
-assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
-assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
+assert.eq(120, t.find(q1).itcount(), "q1 a");
+assert.eq(120, t.find(q2).itcount(), "q2 a");
+assert.eq(60, t.find(q3).itcount(), "q3 a");
-t.ensureIndex( { tags : 1 } );
+t.ensureIndex({tags: 1});
-assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
-assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
-assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
+assert.eq(120, t.find(q1).itcount(), "q1 a");
+assert.eq(120, t.find(q2).itcount(), "q2 a");
+assert.eq(60, t.find(q3).itcount(), "q3 a");
// We expect these queries to use index scans over { tags: 1 }.
-assert( isIxscan(t.find(q1).explain().queryPlanner.winningPlan) , "e1" );
-assert( isIxscan(t.find(q2).explain().queryPlanner.winningPlan) , "e2" );
-assert( isIxscan(t.find(q3).explain().queryPlanner.winningPlan) , "e3" );
+assert(isIxscan(t.find(q1).explain().queryPlanner.winningPlan), "e1");
+assert(isIxscan(t.find(q2).explain().queryPlanner.winningPlan), "e2");
+assert(isIxscan(t.find(q3).explain().queryPlanner.winningPlan), "e3");
scanned1 = t.find(q1).explain("executionStats").executionStats.totalKeysExamined;
scanned2 = t.find(q2).explain("executionStats").executionStats.totalKeysExamined;
scanned3 = t.find(q3).explain("executionStats").executionStats.totalKeysExamined;
-//print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 );
+// print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 );
// $all should just iterate either of the words
-assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" );
+assert(scanned3 <= Math.max(scanned1, scanned2), "$all makes query optimizer not work well");
diff --git a/jstests/core/index_check3.js b/jstests/core/index_check3.js
index 78135ff30ca..2c07ae6d50a 100644
--- a/jstests/core/index_check3.js
+++ b/jstests/core/index_check3.js
@@ -3,63 +3,62 @@
t = db.index_check3;
t.drop();
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
+t.save({a: "z"});
+assert.eq(1, t.find({a: {$lt: 2}}).itcount(), "A");
+assert.eq(1, t.find({a: {$gt: 2}}).itcount(), "B");
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
-t.save( { a : "z" } );
+t.ensureIndex({a: 1});
-assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "A" );
-assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "B" );
-
-t.ensureIndex( { a : 1 } );
-
-assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "C" );
-assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "D" );
+assert.eq(1, t.find({a: {$lt: 2}}).itcount(), "C");
+assert.eq(1, t.find({a: {$gt: 2}}).itcount(), "D");
t.drop();
-for ( var i=0; i<100; i++ ){
- var o = { i : i };
- if ( i % 2 == 0 )
+for (var i = 0; i < 100; i++) {
+ var o = {
+ i: i
+ };
+ if (i % 2 == 0)
o.foo = i;
- t.save( o );
+ t.save(o);
}
-t.ensureIndex( { foo : 1 } );
-
-var explain = t.find( { foo : { $lt : 50 } } ).explain("executionStats");
-assert.gt( 30 , explain.executionStats.totalKeysExamined , "lt" );
-var explain = t.find( { foo : { $gt : 50 } } ).explain("executionStats");
-assert.gt( 30 , explain.executionStats.totalKeysExamined , "gt" );
+t.ensureIndex({foo: 1});
+var explain = t.find({foo: {$lt: 50}}).explain("executionStats");
+assert.gt(30, explain.executionStats.totalKeysExamined, "lt");
+var explain = t.find({foo: {$gt: 50}}).explain("executionStats");
+assert.gt(30, explain.executionStats.totalKeysExamined, "gt");
t.drop();
-t.save( {i:'a'} );
-for( var i=0; i < 10; ++i ) {
- t.save( {} );
+t.save({i: 'a'});
+for (var i = 0; i < 10; ++i) {
+ t.save({});
}
-t.ensureIndex( { i : 1 } );
+t.ensureIndex({i: 1});
-var explain = t.find( { i : { $lte : 'a' } } ).explain("executionStats");
-assert.gt( 3 , explain.executionStats.totalKeysExamined , "lte" );
-//printjson( t.find( { i : { $gte : 'a' } } ).explain() );
+var explain = t.find({i: {$lte: 'a'}}).explain("executionStats");
+assert.gt(3, explain.executionStats.totalKeysExamined, "lte");
+// printjson( t.find( { i : { $gte : 'a' } } ).explain() );
// bug SERVER-99
-var explain = t.find( { i : { $gte : 'a' } } ).explain("executionStats");
-assert.gt( 3 , explain.executionStats.totalKeysExamined , "gte" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).count() , "gte a" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).count() , "gte c" );
-assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).itcount() , "gte d" );
-
-t.save( { i : "b" } );
-
-var explain = t.find( { i : { $gte : 'a' } } ).explain("executionStats");
-assert.gt( 3 , explain.executionStats.totalKeysExamined , "gte" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).count() , "gte a2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).itcount() , "gte c2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : -1 } ).itcount() , "gte d2" );
-assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : 1 } ).itcount() , "gte e2" );
+var explain = t.find({i: {$gte: 'a'}}).explain("executionStats");
+assert.gt(3, explain.executionStats.totalKeysExamined, "gte");
+assert.eq(1, t.find({i: {$gte: 'a'}}).count(), "gte a");
+assert.eq(1, t.find({i: {$gte: 'a'}}).itcount(), "gte b");
+assert.eq(1, t.find({i: {$gte: 'a'}}).sort({i: 1}).count(), "gte c");
+assert.eq(1, t.find({i: {$gte: 'a'}}).sort({i: 1}).itcount(), "gte d");
+
+t.save({i: "b"});
+
+var explain = t.find({i: {$gte: 'a'}}).explain("executionStats");
+assert.gt(3, explain.executionStats.totalKeysExamined, "gte");
+assert.eq(2, t.find({i: {$gte: 'a'}}).count(), "gte a2");
+assert.eq(2, t.find({i: {$gte: 'a'}}).itcount(), "gte b2");
+assert.eq(2, t.find({i: {$gte: 'a', $lt: MaxKey}}).itcount(), "gte c2");
+assert.eq(2, t.find({i: {$gte: 'a', $lt: MaxKey}}).sort({i: -1}).itcount(), "gte d2");
+assert.eq(2, t.find({i: {$gte: 'a', $lt: MaxKey}}).sort({i: 1}).itcount(), "gte e2");
diff --git a/jstests/core/index_check5.js b/jstests/core/index_check5.js
index f6b48448f63..2a3e73f9e8d 100644
--- a/jstests/core/index_check5.js
+++ b/jstests/core/index_check5.js
@@ -2,16 +2,17 @@
t = db.index_check5;
t.drop();
-t.save( { "name" : "Player1" ,
- "scores" : [{"level" : 1 , "score" : 100},
- {"level" : 2 , "score" : 50}],
- "total" : 150 } );
-t.save( { "name" : "Player2" ,
- "total" : 90 ,
- "scores" : [ {"level" : 1 , "score" : 90},
- {"level" : 2 , "score" : 0} ]
- } );
+t.save({
+ "name": "Player1",
+ "scores": [{"level": 1, "score": 100}, {"level": 2, "score": 50}],
+ "total": 150
+});
+t.save({
+ "name": "Player2",
+ "total": 90,
+ "scores": [{"level": 1, "score": 90}, {"level": 2, "score": 0}]
+});
-assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "A" );
-t.ensureIndex( { "scores.level" : 1 , "scores.score" : 1 } );
-assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "B" );
+assert.eq(2, t.find({"scores.level": 2, "scores.score": {$gt: 30}}).itcount(), "A");
+t.ensureIndex({"scores.level": 1, "scores.score": 1});
+assert.eq(2, t.find({"scores.level": 2, "scores.score": {$gt: 30}}).itcount(), "B");
diff --git a/jstests/core/index_check6.js b/jstests/core/index_check6.js
index a3b0e51ded2..4baeced8fb9 100644
--- a/jstests/core/index_check6.js
+++ b/jstests/core/index_check6.js
@@ -7,89 +7,98 @@ function keysExamined(query, hint) {
return explain.executionStats.totalKeysExamined;
}
-t.ensureIndex( { age : 1 , rating : 1 } );
+t.ensureIndex({age: 1, rating: 1});
-for ( var age=10; age<50; age++ ){
- for ( var rating=0; rating<10; rating++ ){
- t.save( { age : age , rating : rating } );
+for (var age = 10; age < 50; age++) {
+ for (var rating = 0; rating < 10; rating++) {
+ t.save({age: age, rating: rating});
}
}
-assert.eq( 10 , keysExamined( { age : 30 }, {} ) , "A" );
-assert.eq( 20 , keysExamined( { age : { $gte : 29 , $lte : 30 } }, {} ) , "B" );
-assert.eq( 19 , keysExamined( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,9] } },
- {age:1,rating:1} ) , "C1" );
-assert.eq( 24 , keysExamined( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [0,8] } },
- {age:1,rating:1} ) , "C2" );
-assert.eq( 29 , keysExamined( { age : { $gte : 25 , $lte : 30 }, rating: {$in: [1,8] } },
- {age:1,rating:1} ) , "C3" );
+assert.eq(10, keysExamined({age: 30}, {}), "A");
+assert.eq(20, keysExamined({age: {$gte: 29, $lte: 30}}, {}), "B");
+assert.eq(19,
+ keysExamined({age: {$gte: 25, $lte: 30}, rating: {$in: [0, 9]}}, {age: 1, rating: 1}),
+ "C1");
+assert.eq(24,
+ keysExamined({age: {$gte: 25, $lte: 30}, rating: {$in: [0, 8]}}, {age: 1, rating: 1}),
+ "C2");
+assert.eq(29,
+ keysExamined({age: {$gte: 25, $lte: 30}, rating: {$in: [1, 8]}}, {age: 1, rating: 1}),
+ "C3");
-assert.eq( 5 , keysExamined( { age : { $gte : 29 , $lte : 30 } , rating : 5 },
- {age:1,rating:1} ) , "C" ); // SERVER-371
-assert.eq( 7 , keysExamined( { age : { $gte : 29 , $lte : 30 } , rating : { $gte : 4 , $lte : 5 } },
- {age:1,rating:1} ) , "D" ); // SERVER-371
+assert.eq(5,
+ keysExamined({age: {$gte: 29, $lte: 30}, rating: 5}, {age: 1, rating: 1}),
+ "C"); // SERVER-371
+assert.eq(7,
+ keysExamined({age: {$gte: 29, $lte: 30}, rating: {$gte: 4, $lte: 5}},
+ {age: 1, rating: 1}),
+ "D"); // SERVER-371
-assert.eq.automsg( "2", "t.find( { age:30, rating:{ $gte:4, $lte:5} } )" +
- ".explain('executionStats')" +
- ".executionStats.totalKeysExamined" );
+assert.eq.automsg("2",
+ "t.find( { age:30, rating:{ $gte:4, $lte:5} } )" + ".explain('executionStats')" +
+ ".executionStats.totalKeysExamined");
t.drop();
-for ( var a=1; a<10; a++ ){
- for ( var b=0; b<10; b++ ){
- for ( var c=0; c<10; c++ ) {
- t.save( { a:a, b:b, c:c } );
+for (var a = 1; a < 10; a++) {
+ for (var b = 0; b < 10; b++) {
+ for (var c = 0; c < 10; c++) {
+ t.save({a: a, b: b, c: c});
}
}
}
-function doQuery( count, query, sort, index ) {
- var explain = t.find( query ).hint( index ).sort( sort ).explain("executionStats");
+function doQuery(count, query, sort, index) {
+ var explain = t.find(query).hint(index).sort(sort).explain("executionStats");
var nscanned = explain.executionStats.totalKeysExamined;
assert(Math.abs(count - nscanned) <= 2);
}
-function doTest( sort, index ) {
- doQuery( 1, { a:5, b:5, c:5 }, sort, index );
- doQuery( 2, { a:5, b:5, c:{$gte:5,$lte:6} }, sort, index );
- doQuery( 1, { a:5, b:5, c:{$gte:5.5,$lte:6} }, sort, index );
- doQuery( 1, { a:5, b:5, c:{$gte:5,$lte:5.5} }, sort, index );
- doQuery( 3, { a:5, b:5, c:{$gte:5,$lte:7} }, sort, index );
- doQuery( 4, { a:5, b:{$gte:5,$lte:6}, c:5 }, sort, index );
- if ( sort.b > 0 ) {
- doQuery( 3, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index );
- doQuery( 3, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index );
+function doTest(sort, index) {
+ doQuery(1, {a: 5, b: 5, c: 5}, sort, index);
+ doQuery(2, {a: 5, b: 5, c: {$gte: 5, $lte: 6}}, sort, index);
+ doQuery(1, {a: 5, b: 5, c: {$gte: 5.5, $lte: 6}}, sort, index);
+ doQuery(1, {a: 5, b: 5, c: {$gte: 5, $lte: 5.5}}, sort, index);
+ doQuery(3, {a: 5, b: 5, c: {$gte: 5, $lte: 7}}, sort, index);
+ doQuery(4, {a: 5, b: {$gte: 5, $lte: 6}, c: 5}, sort, index);
+ if (sort.b > 0) {
+ doQuery(3, {a: 5, b: {$gte: 5.5, $lte: 6}, c: 5}, sort, index);
+ doQuery(3, {a: 5, b: {$gte: 5, $lte: 5.5}, c: 5}, sort, index);
} else {
- doQuery( 3, { a:5, b:{$gte:5.5,$lte:6}, c:5 }, sort, index );
- doQuery( 3, { a:5, b:{$gte:5,$lte:5.5}, c:5 }, sort, index );
+ doQuery(3, {a: 5, b: {$gte: 5.5, $lte: 6}, c: 5}, sort, index);
+ doQuery(3, {a: 5, b: {$gte: 5, $lte: 5.5}, c: 5}, sort, index);
}
- doQuery( 8, { a:5, b:{$gte:5,$lte:7}, c:5 }, sort, index );
- doQuery( 5, { a:{$gte:5,$lte:6}, b:5, c:5 }, sort, index );
- if ( sort.a > 0 ) {
- doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index );
- doQuery( 3, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index );
- doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
+ doQuery(8, {a: 5, b: {$gte: 5, $lte: 7}, c: 5}, sort, index);
+ doQuery(5, {a: {$gte: 5, $lte: 6}, b: 5, c: 5}, sort, index);
+ if (sort.a > 0) {
+ doQuery(3, {a: {$gte: 5.5, $lte: 6}, b: 5, c: 5}, sort, index);
+ doQuery(3, {a: {$gte: 5, $lte: 5.5}, b: 5, c: 5}, sort, index);
+ doQuery(3, {a: {$gte: 5.5, $lte: 6}, b: 5, c: {$gte: 5, $lte: 6}}, sort, index);
} else {
- doQuery( 3, { a:{$gte:5.5,$lte:6}, b:5, c:5 }, sort, index );
- doQuery( 3, { a:{$gte:5,$lte:5.5}, b:5, c:5 }, sort, index );
- doQuery( 4, { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
+ doQuery(3, {a: {$gte: 5.5, $lte: 6}, b: 5, c: 5}, sort, index);
+ doQuery(3, {a: {$gte: 5, $lte: 5.5}, b: 5, c: 5}, sort, index);
+ doQuery(4, {a: {$gte: 5.5, $lte: 6}, b: 5, c: {$gte: 5, $lte: 6}}, sort, index);
}
- doQuery( 8, { a:{$gte:5,$lte:7}, b:5, c:5 }, sort, index );
- doQuery( 7, { a:{$gte:5,$lte:6}, b:5, c:{$gte:5,$lte:6} }, sort, index );
- doQuery( 7, { a:5, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index );
- doQuery( 11, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:5 }, sort, index );
- doQuery( 15, { a:{$gte:5,$lte:6}, b:{$gte:5,$lte:6}, c:{$gte:5,$lte:6} }, sort, index );
+ doQuery(8, {a: {$gte: 5, $lte: 7}, b: 5, c: 5}, sort, index);
+ doQuery(7, {a: {$gte: 5, $lte: 6}, b: 5, c: {$gte: 5, $lte: 6}}, sort, index);
+ doQuery(7, {a: 5, b: {$gte: 5, $lte: 6}, c: {$gte: 5, $lte: 6}}, sort, index);
+ doQuery(11, {a: {$gte: 5, $lte: 6}, b: {$gte: 5, $lte: 6}, c: 5}, sort, index);
+ doQuery(15, {a: {$gte: 5, $lte: 6}, b: {$gte: 5, $lte: 6}, c: {$gte: 5, $lte: 6}}, sort, index);
}
-for ( var a = -1; a <= 1; a += 2 ) {
- for( var b = -1; b <= 1; b += 2 ) {
- for( var c = -1; c <= 1; c += 2 ) {
+for (var a = -1; a <= 1; a += 2) {
+ for (var b = -1; b <= 1; b += 2) {
+ for (var c = -1; c <= 1; c += 2) {
t.dropIndexes();
- var spec = {a:a,b:b,c:c};
- t.ensureIndex( spec );
- doTest( spec, spec );
- doTest( {a:-a,b:-b,c:-c}, spec );
+ var spec = {
+ a: a,
+ b: b,
+ c: c
+ };
+ t.ensureIndex(spec);
+ doTest(spec, spec);
+ doTest({a: -a, b: -b, c: -c}, spec);
}
}
}
-
diff --git a/jstests/core/index_check7.js b/jstests/core/index_check7.js
index f8020d76143..fda248db467 100644
--- a/jstests/core/index_check7.js
+++ b/jstests/core/index_check7.js
@@ -2,14 +2,13 @@
t = db.index_check7;
t.drop();
-for ( var i=0; i<100; i++ )
- t.save( { x : i } );
+for (var i = 0; i < 100; i++)
+ t.save({x: i});
-t.ensureIndex( { x : 1 } );
-assert.eq( 1 , t.find( { x : 27 } ).explain(true).executionStats.totalKeysExamined , "A" );
+t.ensureIndex({x: 1});
+assert.eq(1, t.find({x: 27}).explain(true).executionStats.totalKeysExamined, "A");
-t.ensureIndex( { x : -1 } );
-assert.eq( 1 , t.find( { x : 27 } ).explain(true).executionStats.totalKeysExamined , "B" );
+t.ensureIndex({x: -1});
+assert.eq(1, t.find({x: 27}).explain(true).executionStats.totalKeysExamined, "B");
-assert.eq( 40 , t.find( { x : { $gt : 59 } } ).explain(true)
- .executionStats.totalKeysExamined , "C" );
+assert.eq(40, t.find({x: {$gt: 59}}).explain(true).executionStats.totalKeysExamined, "C");
diff --git a/jstests/core/index_create_too_many.js b/jstests/core/index_create_too_many.js
index add81a86703..44d5016a7cf 100644
--- a/jstests/core/index_create_too_many.js
+++ b/jstests/core/index_create_too_many.js
@@ -6,7 +6,10 @@ coll.drop();
// create 62 indexes, which leaves us with 63 indexes total (+1 for the _id index)
for (var i = 0; i < 62; i++) {
var name = 'i' + i;
- var spec = {key: {}, name: name};
+ var spec = {
+ key: {},
+ name: name
+ };
spec.key[name] = 1;
var res = coll.runCommand('createIndexes', {indexes: [spec]});
@@ -14,12 +17,8 @@ for (var i = 0; i < 62; i++) {
}
// attempt to add 2 more indexes to push over the limit (64).
-var newSpecs = [
- {key: {i62: 1 }, name: 'i62'},
- {key: {i63: 1 }, name: 'i63'}
-];
+var newSpecs = [{key: {i62: 1}, name: 'i62'}, {key: {i63: 1}, name: 'i63'}];
var res = coll.runCommand('createIndexes', {indexes: newSpecs});
assert.commandFailed(res, tojson(res));
-assert.eq(res.code, 67); // CannotCreateIndex
-
+assert.eq(res.code, 67); // CannotCreateIndex
diff --git a/jstests/core/index_create_with_nul_in_name.js b/jstests/core/index_create_with_nul_in_name.js
index 3c84460ddc4..9134649c086 100644
--- a/jstests/core/index_create_with_nul_in_name.js
+++ b/jstests/core/index_create_with_nul_in_name.js
@@ -6,9 +6,13 @@
var coll = db.create_index_with_nul_in_name;
coll.drop();
- var idx = {key: {'a': 1}, name: 'foo\0bar', ns: coll.getFullName()};
+ var idx = {
+ key: {'a': 1},
+ name: 'foo\0bar',
+ ns: coll.getFullName()
+ };
var res = coll.runCommand('createIndexes', {indexes: [idx]});
assert.commandFailed(res, tojson(res));
- assert.eq(res.code, 67); // CannotCreateIndex
+ assert.eq(res.code, 67); // CannotCreateIndex
}());
diff --git a/jstests/core/index_diag.js b/jstests/core/index_diag.js
index edb86e841e1..3e25bf2a1eb 100644
--- a/jstests/core/index_diag.js
+++ b/jstests/core/index_diag.js
@@ -2,43 +2,46 @@
t = db.index_diag;
t.drop();
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
all = [];
ids = [];
xs = [];
-function r( a ){
+function r(a) {
var n = [];
- for ( var x=a.length-1; x>=0; x-- )
- n.push( a[x] );
+ for (var x = a.length - 1; x >= 0; x--)
+ n.push(a[x]);
return n;
}
-for ( i=1; i<4; i++ ){
- o = { _id : i , x : -i };
- t.insert( o );
- all.push( o );
- ids.push( { _id : i } );
- xs.push( { x : -i } );
+for (i = 1; i < 4; i++) {
+ o = {
+ _id: i,
+ x: -i
+ };
+ t.insert(o);
+ all.push(o);
+ ids.push({_id: i});
+ xs.push({x: -i});
}
-assert.eq( all , t.find().sort( { _id : 1 } ).toArray() , "A1" );
-assert.eq( r( all ) , t.find().sort( { _id : -1 } ).toArray() , "A2" );
+assert.eq(all, t.find().sort({_id: 1}).toArray(), "A1");
+assert.eq(r(all), t.find().sort({_id: -1}).toArray(), "A2");
-assert.eq( all , t.find().sort( { x : -1 } ).toArray() , "A3" );
-assert.eq( r( all ) , t.find().sort( { x : 1 } ).toArray() , "A4" );
+assert.eq(all, t.find().sort({x: -1}).toArray(), "A3");
+assert.eq(r(all), t.find().sort({x: 1}).toArray(), "A4");
-assert.eq( ids , t.find().sort( { _id : 1 } ).returnKey().toArray() , "B1" );
-assert.eq( r( ids ) , t.find().sort( { _id : -1 } ).returnKey().toArray() , "B2" );
-assert.eq( xs , t.find().sort( { x : -1 } ).returnKey().toArray() , "B3" );
-assert.eq( r( xs ) , t.find().sort( { x : 1 } ).returnKey().toArray() , "B4" );
+assert.eq(ids, t.find().sort({_id: 1}).returnKey().toArray(), "B1");
+assert.eq(r(ids), t.find().sort({_id: -1}).returnKey().toArray(), "B2");
+assert.eq(xs, t.find().sort({x: -1}).returnKey().toArray(), "B3");
+assert.eq(r(xs), t.find().sort({x: 1}).returnKey().toArray(), "B4");
-assert.eq( r( xs ) , t.find().hint( { x : 1 } ).returnKey().toArray() , "B4" );
+assert.eq(r(xs), t.find().hint({x: 1}).returnKey().toArray(), "B4");
// SERVER-4981
-t.ensureIndex( { _id : 1 , x : 1 } );
-assert.eq( all , t.find().hint( { _id : 1 , x : 1 } ).returnKey().toArray() );
-assert.eq( r( all ) , t.find().hint( { _id : 1 , x : 1 } ).sort( { x : 1 } ).returnKey().toArray() );
+t.ensureIndex({_id: 1, x: 1});
+assert.eq(all, t.find().hint({_id: 1, x: 1}).returnKey().toArray());
+assert.eq(r(all), t.find().hint({_id: 1, x: 1}).sort({x: 1}).returnKey().toArray());
-assert.eq( [ {} , {} , {} ], t.find().hint( { $natural : 1 } ).returnKey().toArray() );
+assert.eq([{}, {}, {}], t.find().hint({$natural: 1}).returnKey().toArray());
diff --git a/jstests/core/index_dropdups_ignore.js b/jstests/core/index_dropdups_ignore.js
index d1ab12f3b2c..3622d800b01 100644
--- a/jstests/core/index_dropdups_ignore.js
+++ b/jstests/core/index_dropdups_ignore.js
@@ -3,17 +3,17 @@
var t = db.index_dropdups_ignore;
t.drop();
-t.insert({_id:1, a: 'dup'});
-t.insert({_id:2, a: 'dup'});
+t.insert({_id: 1, a: 'dup'});
+t.insert({_id: 2, a: 'dup'});
// Should fail with a dup-key error even though dropDups is true;
-var res = t.ensureIndex({a:1}, {unique: true, dropDups:true});
+var res = t.ensureIndex({a: 1}, {unique: true, dropDups: true});
assert.commandFailed(res);
assert.eq(res.code, 11000, tojson(res));
// Succeeds with the dup manually removed.
-t.remove({_id:2});
-var res = t.ensureIndex({a:1}, {unique: true, dropDups:true});
+t.remove({_id: 2});
+var res = t.ensureIndex({a: 1}, {unique: true, dropDups: true});
assert.commandWorked(res);
// The spec should have been stripped of the dropDups option.
diff --git a/jstests/core/index_elemmatch1.js b/jstests/core/index_elemmatch1.js
index fb3bd5e76b4..710db37b09e 100644
--- a/jstests/core/index_elemmatch1.js
+++ b/jstests/core/index_elemmatch1.js
@@ -5,37 +5,42 @@ t.drop();
x = 0;
y = 0;
var bulk = t.initializeUnorderedBulkOp();
-for ( a=0; a<100; a++ ){
- for ( b=0; b<100; b++ ){
- bulk.insert( { a : a , b : b % 10 , arr : [ { x : x++ % 10 , y : y++ % 10 } ] } );
+for (a = 0; a < 100; a++) {
+ for (b = 0; b < 100; b++) {
+ bulk.insert({a: a, b: b % 10, arr: [{x: x++ % 10, y: y++ % 10}]});
}
}
assert.writeOK(bulk.execute());
-t.ensureIndex( { a : 1 , b : 1 } );
-t.ensureIndex( { "arr.x" : 1 , a : 1 } );
+t.ensureIndex({a: 1, b: 1});
+t.ensureIndex({"arr.x": 1, a: 1});
-assert.eq( 100 , t.find( { a : 55 } ).itcount() , "A1" );
-assert.eq( 10 , t.find( { a : 55 , b : 7 } ).itcount() , "A2" );
+assert.eq(100, t.find({a: 55}).itcount(), "A1");
+assert.eq(10, t.find({a: 55, b: 7}).itcount(), "A2");
-q = { a : 55 , b : { $in : [ 1 , 5 , 8 ] } };
-assert.eq( 30 , t.find( q ).itcount() , "A3" );
+q = {
+ a: 55,
+ b: {$in: [1, 5, 8]}
+};
+assert.eq(30, t.find(q).itcount(), "A3");
-q.arr = { $elemMatch : { x : 5 , y : 5 } };
-assert.eq( 10 , t.find( q ).itcount() , "A4" );
+q.arr = {
+ $elemMatch: {x: 5, y: 5}
+};
+assert.eq(10, t.find(q).itcount(), "A4");
-function nscannedForCursor( explain, cursor ) {
+function nscannedForCursor(explain, cursor) {
plans = explain.allPlans;
- for( i in plans ) {
- if ( plans[ i ].cursor == cursor ) {
- return plans[ i ].nscanned;
+ for (i in plans) {
+ if (plans[i].cursor == cursor) {
+ return plans[i].nscanned;
}
}
return -1;
}
-var explain = t.find(q).hint( { "arr.x" : 1 , a : 1 } ).explain("executionStats");
-assert.eq( t.find(q).itcount(), explain.executionStats.totalKeysExamined );
+var explain = t.find(q).hint({"arr.x": 1, a: 1}).explain("executionStats");
+assert.eq(t.find(q).itcount(), explain.executionStats.totalKeysExamined);
printjson(t.find(q).explain());
print("Num results:");
diff --git a/jstests/core/index_filter_commands.js b/jstests/core/index_filter_commands.js
index b422dfffcd5..027731e97cf 100644
--- a/jstests/core/index_filter_commands.js
+++ b/jstests/core/index_filter_commands.js
@@ -1,6 +1,6 @@
/**
* Index Filter commands
- *
+ *
* Commands:
* - planCacheListFilters
* Displays index filters for all query shapes in a collection.
@@ -20,7 +20,7 @@
* cache state. We would do this with the planCacheListPlans command
* on the same query shape with the index filters.
*
- */
+ */
var t = db.jstests_index_filter_commands;
@@ -36,16 +36,32 @@ t.save({a: 1, b: 1});
// Add 2 indexes.
// 1st index is more efficient.
// 2nd and 3rd indexes will be used to test index filters.
-var indexA1 = {a: 1};
-var indexA1B1 = {a: 1, b: 1};
-var indexA1C1 = {a: 1, c: 1};
+var indexA1 = {
+ a: 1
+};
+var indexA1B1 = {
+ a: 1,
+ b: 1
+};
+var indexA1C1 = {
+ a: 1,
+ c: 1
+};
t.ensureIndex(indexA1);
t.ensureIndex(indexA1B1);
t.ensureIndex(indexA1C1);
-var queryA1 = {a: 1, b: 1};
-var projectionA1 = {_id: 0, a: 1};
-var sortA1 = {a: -1};
+var queryA1 = {
+ a: 1,
+ b: 1
+};
+var projectionA1 = {
+ _id: 0,
+ a: 1
+};
+var sortA1 = {
+ a: -1
+};
//
// Tests for planCacheListFilters, planCacheClearFilters, planCacheSetFilter
@@ -61,7 +77,6 @@ function getFilters(collection) {
assert.commandWorked(res, 'planCacheListFilters failed');
assert(res.hasOwnProperty('filters'), 'filters missing from planCacheListFilters result');
return res.filters;
-
}
// If query shape is in plan cache,
@@ -76,8 +91,8 @@ function planCacheContains(shape) {
function getPlans(shape) {
var res = t.runCommand('planCacheListPlans', shape);
assert.commandWorked(res, 'planCacheListPlans(' + tojson(shape, '', true) + ' failed');
- assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
- tojson(shape, '', true) + ') result');
+ assert(res.hasOwnProperty('plans'),
+ 'plans missing from planCacheListPlans(' + tojson(shape, '', true) + ') result');
return res.plans;
}
@@ -85,7 +100,8 @@ function getPlans(shape) {
// will return empty results.
var missingCollection = db.jstests_index_filter_commands_missing;
missingCollection.drop();
-assert.eq(0, getFilters(missingCollection),
+assert.eq(0,
+ getFilters(missingCollection),
'planCacheListFilters should return empty array on non-existent collection');
// Retrieve index filters from an empty test collection.
@@ -94,21 +110,31 @@ assert.eq(0, filters.length, 'unexpected number of index filters in planCacheLis
// Check details of winning plan in plan cache before setting index filter.
assert.eq(1, t.find(queryA1, projectionA1).sort(sortA1).itcount(), 'unexpected document count');
-var shape = {query: queryA1, sort: sortA1, projection: projectionA1};
+var shape = {
+ query: queryA1,
+ sort: sortA1,
+ projection: projectionA1
+};
var planBeforeSetFilter = getPlans(shape)[0];
print('Winning plan (before setting index filters) = ' + tojson(planBeforeSetFilter));
// Check filterSet field in plan details
-assert.eq(false, planBeforeSetFilter.filterSet, 'missing or invalid filterSet field in plan details');
+assert.eq(false,
+ planBeforeSetFilter.filterSet,
+ 'missing or invalid filterSet field in plan details');
// Adding index filters to a non-existent collection should be an error.
-assert.commandFailed(missingCollection.runCommand('planCacheSetFilter',
- {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
+assert.commandFailed(missingCollection.runCommand(
+ 'planCacheSetFilter',
+ {query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
// Add index filters for simple query.
-assert.commandWorked(t.runCommand('planCacheSetFilter',
+assert.commandWorked(t.runCommand(
+ 'planCacheSetFilter',
{query: queryA1, sort: sortA1, projection: projectionA1, indexes: [indexA1B1, indexA1C1]}));
filters = getFilters();
-assert.eq(1, filters.length, 'no change in query settings after successfully setting index filters');
+assert.eq(1,
+ filters.length,
+ 'no change in query settings after successfully setting index filters');
assert.eq(queryA1, filters[0].query, 'unexpected query in filters');
assert.eq(sortA1, filters[0].sort, 'unexpected sort in filters');
assert.eq(projectionA1, filters[0].projection, 'unexpected projection in filters');
@@ -154,23 +180,33 @@ if (db.isMaster().msg !== "isdbgrid") {
// No filter.
t.getPlanCache().clear();
assert.eq(false, t.find({z: 1}).explain('queryPlanner').queryPlanner.indexFilterSet);
- assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1)
- .explain('queryPlanner').queryPlanner.indexFilterSet);
+ assert.eq(false,
+ t.find(queryA1, projectionA1)
+ .sort(sortA1)
+ .explain('queryPlanner')
+ .queryPlanner.indexFilterSet);
// With one filter set.
assert.commandWorked(t.runCommand('planCacheSetFilter', {query: {z: 1}, indexes: [{z: 1}]}));
assert.eq(true, t.find({z: 1}).explain('queryPlanner').queryPlanner.indexFilterSet);
- assert.eq(false, t.find(queryA1, projectionA1).sort(sortA1)
- .explain('queryPlanner').queryPlanner.indexFilterSet);
+ assert.eq(false,
+ t.find(queryA1, projectionA1)
+ .sort(sortA1)
+ .explain('queryPlanner')
+ .queryPlanner.indexFilterSet);
// With two filters set.
- assert.commandWorked(t.runCommand('planCacheSetFilter', {
- query: queryA1,
- projection: projectionA1,
- sort: sortA1,
- indexes: [indexA1B1, indexA1C1]
- }));
+ assert.commandWorked(t.runCommand('planCacheSetFilter',
+ {
+ query: queryA1,
+ projection: projectionA1,
+ sort: sortA1,
+ indexes: [indexA1B1, indexA1C1]
+ }));
assert.eq(true, t.find({z: 1}).explain('queryPlanner').queryPlanner.indexFilterSet);
- assert.eq(true, t.find(queryA1, projectionA1).sort(sortA1)
- .explain('queryPlanner').queryPlanner.indexFilterSet);
+ assert.eq(true,
+ t.find(queryA1, projectionA1)
+ .sort(sortA1)
+ .explain('queryPlanner')
+ .queryPlanner.indexFilterSet);
}
diff --git a/jstests/core/index_many.js b/jstests/core/index_many.js
index a9eddbb32f1..142c9bbc4a5 100644
--- a/jstests/core/index_many.js
+++ b/jstests/core/index_many.js
@@ -3,12 +3,11 @@
t = db.many;
function f() {
-
t.drop();
db.many2.drop();
- t.save({ x: 9, y : 99 });
- t.save({ x: 19, y : 99 });
+ t.save({x: 9, y: 99});
+ t.save({x: 19, y: 99});
x = 2;
var lastErr = null;
@@ -16,9 +15,13 @@ function f() {
patt = {};
patt[x] = 1;
if (x == 20)
- patt = { x: 1 };
+ patt = {
+ x: 1
+ };
if (x == 64)
- patt = { y: 1 };
+ patt = {
+ y: 1
+ };
lastErr = t.ensureIndex(patt);
x++;
}
@@ -33,17 +36,16 @@ function f() {
}
assert(lim == 64, "not 64 indexes");
- assert(t.find({ x: 9 }).length() == 1, "b");
+ assert(t.find({x: 9}).length() == 1, "b");
- assert(t.find({ y: 99 }).length() == 2, "y idx");
+ assert(t.find({y: 99}).length() == 2, "y idx");
/* check that renamecollection remaps all the indexes right */
assert(t.renameCollection("many2").ok, "rename failed");
- assert(t.find({ x: 9 }).length() == 0, "many2a");
- assert(db.many2.find({ x: 9 }).length() == 1, "many2b");
- assert(t.find({ y: 99 }).length() == 0, "many2c");
- assert(db.many2.find({ y: 99 }).length() == 2, "many2d");
-
+ assert(t.find({x: 9}).length() == 0, "many2a");
+ assert(db.many2.find({x: 9}).length() == 1, "many2b");
+ assert(t.find({y: 99}).length() == 0, "many2c");
+ assert(db.many2.find({y: 99}).length() == 2, "many2d");
}
f();
diff --git a/jstests/core/index_many2.js b/jstests/core/index_many2.js
index ac265e5cf6e..87e99898b1f 100644
--- a/jstests/core/index_many2.js
+++ b/jstests/core/index_many2.js
@@ -2,30 +2,29 @@
t = db.index_many2;
t.drop();
-t.save( { x : 1 } );
+t.save({x: 1});
-assert.eq( 1 , t.getIndexKeys().length , "A1" );
+assert.eq(1, t.getIndexKeys().length, "A1");
-function make( n ){
+function make(n) {
var x = {};
- x["x"+n] = 1;
+ x["x" + n] = 1;
return x;
}
-for ( i=1; i<1000; i++ ){
- t.ensureIndex( make(i) );
+for (i = 1; i < 1000; i++) {
+ t.ensureIndex(make(i));
}
-assert.eq( 64 , t.getIndexKeys().length , "A2" );
-
+assert.eq(64, t.getIndexKeys().length, "A2");
num = t.getIndexKeys().length;
-t.dropIndex( make(num-1) );
-assert.eq( num - 1 , t.getIndexKeys().length , "B0" );
+t.dropIndex(make(num - 1));
+assert.eq(num - 1, t.getIndexKeys().length, "B0");
-t.ensureIndex( { z : 1 } );
-assert.eq( num , t.getIndexKeys().length , "B1" );
+t.ensureIndex({z: 1});
+assert.eq(num, t.getIndexKeys().length, "B1");
-t.dropIndex( "*" );
-assert.eq( 1 , t.getIndexKeys().length , "C1" );
+t.dropIndex("*");
+assert.eq(1, t.getIndexKeys().length, "C1");
diff --git a/jstests/core/index_partial_create_drop.js b/jstests/core/index_partial_create_drop.js
index d997f0fcfa3..34693ec9ae9 100644
--- a/jstests/core/index_partial_create_drop.js
+++ b/jstests/core/index_partial_create_drop.js
@@ -5,13 +5,12 @@
var isMongos = (db.runCommand("isMaster").msg === "isdbgrid");
var coll = db.index_partial_create_drop;
- var getNumKeys = function (idxName) {
+ var getNumKeys = function(idxName) {
var res = assert.commandWorked(coll.validate(true));
var kpi;
if (isMongos) {
kpi = res.raw[Object.getOwnPropertyNames(res.raw)[0]].keysPerIndex;
- }
- else {
+ } else {
kpi = res.keysPerIndex;
}
return kpi[coll.getFullName() + ".$" + idxName];
@@ -24,10 +23,12 @@
assert.commandFailed(coll.ensureIndex({x: 1}, {partialFilterExpression: {x: {$asdasd: 3}}}));
assert.commandFailed(coll.ensureIndex({x: 1}, {partialFilterExpression: {$and: 5}}));
assert.commandFailed(coll.ensureIndex({x: 1}, {partialFilterExpression: {x: /abc/}}));
- assert.commandFailed(coll.ensureIndex({x: 1},
- {partialFilterExpression: {$and: [
- {$and: [{x: {$lt: 2}}, {x: {$gt: 0}}]},
- {x: {$exists: true}}]}}));
+ assert.commandFailed(coll.ensureIndex(
+ {x: 1},
+ {
+ partialFilterExpression:
+ {$and: [{$and: [{x: {$lt: 2}}, {x: {$gt: 0}}]}, {x: {$exists: true}}]}
+ }));
for (var i = 0; i < 10; i++) {
assert.writeOK(coll.insert({x: i, a: i}));
@@ -40,8 +41,8 @@
assert.eq(1, coll.getIndexes().length);
// Create partial index in background.
- assert.commandWorked(coll.ensureIndex({x: 1}, {background: true,
- partialFilterExpression: {a: {$lt: 5}}}));
+ assert.commandWorked(
+ coll.ensureIndex({x: 1}, {background: true, partialFilterExpression: {a: {$lt: 5}}}));
assert.eq(5, getNumKeys("x_1"));
assert.commandWorked(coll.dropIndex({x: 1}));
assert.eq(1, coll.getIndexes().length);
@@ -55,8 +56,8 @@
// Partial indexes can't also be sparse indexes.
assert.commandFailed(coll.ensureIndex({x: 1}, {partialFilterExpression: {a: 1}, sparse: true}));
assert.commandFailed(coll.ensureIndex({x: 1}, {partialFilterExpression: {a: 1}, sparse: 1}));
- assert.commandWorked(coll.ensureIndex({x: 1}, {partialFilterExpression: {a: 1},
- sparse: false}));
+ assert.commandWorked(
+ coll.ensureIndex({x: 1}, {partialFilterExpression: {a: 1}, sparse: false}));
assert.eq(2, coll.getIndexes().length);
assert.commandWorked(coll.dropIndex({x: 1}));
assert.eq(1, coll.getIndexes().length);
diff --git a/jstests/core/index_partial_read_ops.js b/jstests/core/index_partial_read_ops.js
index ef5c30d25a6..42f77486c17 100644
--- a/jstests/core/index_partial_read_ops.js
+++ b/jstests/core/index_partial_read_ops.js
@@ -10,8 +10,8 @@ load("jstests/libs/analyze_plan.js");
coll.drop();
assert.commandWorked(coll.ensureIndex({x: 1}, {partialFilterExpression: {a: {$lte: 1.5}}}));
- assert.writeOK(coll.insert({x: 5, a: 2})); // Not in index.
- assert.writeOK(coll.insert({x: 6, a: 1})); // In index.
+ assert.writeOK(coll.insert({x: 5, a: 2})); // Not in index.
+ assert.writeOK(coll.insert({x: 6, a: 1})); // In index.
//
// Verify basic functionality with find().
@@ -65,14 +65,14 @@ load("jstests/libs/analyze_plan.js");
//
// findAndModify operation that should use index.
- explain = coll.explain('executionStats').findAndModify({query: {x: {$gt: 1}, a: 1},
- update: {$inc: {x: 1}}});
+ explain = coll.explain('executionStats')
+ .findAndModify({query: {x: {$gt: 1}, a: 1}, update: {$inc: {x: 1}}});
assert.eq(1, explain.executionStats.nReturned);
assert(isIxscan(explain.queryPlanner.winningPlan));
// findAndModify operation that should not use index.
- explain = coll.explain('executionStats').findAndModify({query: {x: {$gt: 1}, a: 2},
- update: {$inc: {x: 1}}});
+ explain = coll.explain('executionStats')
+ .findAndModify({query: {x: {$gt: 1}, a: 2}, update: {$inc: {x: 1}}});
assert.eq(1, explain.executionStats.nReturned);
assert(isCollscan(explain.queryPlanner.winningPlan));
})();
diff --git a/jstests/core/index_partial_write_ops.js b/jstests/core/index_partial_write_ops.js
index 92d3720d07e..b962347a26d 100644
--- a/jstests/core/index_partial_write_ops.js
+++ b/jstests/core/index_partial_write_ops.js
@@ -5,13 +5,12 @@
var isMongos = (db.runCommand("isMaster").msg === "isdbgrid");
var coll = db.index_partial_write_ops;
- var getNumKeys = function (idxName) {
+ var getNumKeys = function(idxName) {
var res = assert.commandWorked(coll.validate(true));
var kpi;
if (isMongos) {
kpi = res.raw[Object.getOwnPropertyNames(res.raw)[0]].keysPerIndex;
- }
- else {
+ } else {
kpi = res.keysPerIndex;
}
return kpi[coll.getFullName() + ".$" + idxName];
@@ -22,8 +21,8 @@
// Create partial index.
assert.commandWorked(coll.ensureIndex({x: 1}, {partialFilterExpression: {a: 1}}));
- assert.writeOK(coll.insert({_id: 1, x: 5, a: 2, b: 1})); // Not in index.
- assert.writeOK(coll.insert({_id: 2, x: 6, a: 1, b: 1})); // In index.
+ assert.writeOK(coll.insert({_id: 1, x: 5, a: 2, b: 1})); // Not in index.
+ assert.writeOK(coll.insert({_id: 2, x: 6, a: 1, b: 1})); // In index.
assert.eq(1, getNumKeys("x_1"));
diff --git a/jstests/core/index_plugins.js b/jstests/core/index_plugins.js
index d7271217e2f..f32e1e4345a 100644
--- a/jstests/core/index_plugins.js
+++ b/jstests/core/index_plugins.js
@@ -14,9 +14,9 @@ coll.dropIndexes();
assert.commandWorked(coll.ensureIndex({a: "text"}));
coll.dropIndexes();
-assert.commandFailed(coll.ensureIndex({a: "geoHaystack"}, {bucketSize: 1})); // compound required
+assert.commandFailed(coll.ensureIndex({a: "geoHaystack"}, {bucketSize: 1})); // compound required
-// Test compounding special index types with an ascending index.
+// Test compounding special index types with an ascending index.
assert.commandWorked(coll.ensureIndex({a: "2dsphere", b: 1}));
coll.dropIndexes();
@@ -30,14 +30,14 @@ coll.dropIndexes();
assert.commandWorked(coll.ensureIndex({a: "2d", b: 1}));
coll.dropIndexes();
-assert.commandFailed(coll.ensureIndex({a: 1, b: "2d"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: 1, b: "2d"})); // unsupported
assert.commandWorked(coll.ensureIndex({a: "geoHaystack", b: 1}, {bucketSize: 1}));
coll.dropIndexes();
-assert.commandFailed(coll.ensureIndex({a: 1, b: "geoHaystack"}, {bucketSize: 1})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: 1, b: "geoHaystack"}, {bucketSize: 1})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: "hashed", b: 1})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: 1, b: "hashed"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "hashed", b: 1})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: 1, b: "hashed"})); // unsupported
// Test compound index where multiple fields have same special index type.
@@ -46,17 +46,17 @@ coll.dropIndexes();
assert.commandWorked(coll.ensureIndex({a: "text", b: "text"}));
coll.dropIndexes();
-assert.commandFailed(coll.ensureIndex({a: "2d", b: "2d"})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "geoHaystack"}, // unsupported
+assert.commandFailed(coll.ensureIndex({a: "2d", b: "2d"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "geoHaystack"}, // unsupported
{bucketSize: 1}));
-assert.commandFailed(coll.ensureIndex({a: "hashed", b: "hashed"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "hashed", b: "hashed"})); // unsupported
// Test compounding different special index types with each other.
-assert.commandFailed(coll.ensureIndex({a: "2d", b: "hashed"})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: "hashed", b: "2dsphere"})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: "2dsphere", b: "text"})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: "text", b: "geoHaystack"})); // unsupported
-assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "2d"}, // unsupported
+assert.commandFailed(coll.ensureIndex({a: "2d", b: "hashed"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "hashed", b: "2dsphere"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "2dsphere", b: "text"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "text", b: "geoHaystack"})); // unsupported
+assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "2d"}, // unsupported
{bucketSize: 1}));
diff --git a/jstests/core/index_sparse1.js b/jstests/core/index_sparse1.js
index 950c8a8d797..d71c3c1dbfe 100644
--- a/jstests/core/index_sparse1.js
+++ b/jstests/core/index_sparse1.js
@@ -2,44 +2,40 @@
t = db.index_sparse1;
t.drop();
-t.insert( { _id : 1 , x : 1 } );
-t.insert( { _id : 2 , x : 2 } );
-t.insert( { _id : 3 , x : 2 } );
-t.insert( { _id : 4 } );
-t.insert( { _id : 5 } );
-
-assert.eq( 5 , t.count() , "A1" );
-assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "A2" );
-
-t.ensureIndex( { x : 1 } );
-assert.eq( 2 , t.getIndexes().length , "B1" );
-assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "B2" );
-t.dropIndex( { x : 1 } );
-assert.eq( 1 , t.getIndexes().length , "B3" );
-
-t.ensureIndex( { x : 1 } , { sparse : 1 } );
-assert.eq( 2 , t.getIndexes().length , "C1" );
-assert.eq( 5 , t.find().sort( { x : 1 } ).itcount() , "C2" );
-t.dropIndex( { x : 1 } );
-assert.eq( 1 , t.getIndexes().length , "C3" );
+t.insert({_id: 1, x: 1});
+t.insert({_id: 2, x: 2});
+t.insert({_id: 3, x: 2});
+t.insert({_id: 4});
+t.insert({_id: 5});
+
+assert.eq(5, t.count(), "A1");
+assert.eq(5, t.find().sort({x: 1}).itcount(), "A2");
+
+t.ensureIndex({x: 1});
+assert.eq(2, t.getIndexes().length, "B1");
+assert.eq(5, t.find().sort({x: 1}).itcount(), "B2");
+t.dropIndex({x: 1});
+assert.eq(1, t.getIndexes().length, "B3");
+
+t.ensureIndex({x: 1}, {sparse: 1});
+assert.eq(2, t.getIndexes().length, "C1");
+assert.eq(5, t.find().sort({x: 1}).itcount(), "C2");
+t.dropIndex({x: 1});
+assert.eq(1, t.getIndexes().length, "C3");
// -- sparse & unique
-t.remove( { _id : 2 } );
+t.remove({_id: 2});
-// test that we can't create a unique index without sparse
-assert.commandFailed( t.ensureIndex( { x : 1 } , { unique : 1 } ));
-assert.eq( 1 , t.getIndexes().length , "D2" );
-
-
-t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } );
-assert.eq( 2 , t.getIndexes().length , "E1" );
-t.dropIndex( { x : 1 } );
-assert.eq( 1 , t.getIndexes().length , "E3" );
-
-
-t.insert( { _id : 2 , x : 2 } );
-t.ensureIndex( { x : 1 } , { unique : 1 , sparse : 1 } );
-assert.eq( 1 , t.getIndexes().length , "F1" );
+// test that we can't create a unique index without sparse
+assert.commandFailed(t.ensureIndex({x: 1}, {unique: 1}));
+assert.eq(1, t.getIndexes().length, "D2");
+t.ensureIndex({x: 1}, {unique: 1, sparse: 1});
+assert.eq(2, t.getIndexes().length, "E1");
+t.dropIndex({x: 1});
+assert.eq(1, t.getIndexes().length, "E3");
+t.insert({_id: 2, x: 2});
+t.ensureIndex({x: 1}, {unique: 1, sparse: 1});
+assert.eq(1, t.getIndexes().length, "F1");
diff --git a/jstests/core/index_sparse2.js b/jstests/core/index_sparse2.js
index 4d5c5b84cd9..702bb8cd5f8 100644
--- a/jstests/core/index_sparse2.js
+++ b/jstests/core/index_sparse2.js
@@ -1,23 +1,20 @@
t = db.index_sparse2;
t.drop();
-t.insert( { _id : 1 , x : 1 , y : 1 } );
-t.insert( { _id : 2 , x : 2 } );
-t.insert( { _id : 3 } );
-
-t.ensureIndex( { x : 1 , y : 1 } );
-assert.eq( 2 , t.getIndexes().length , "A1" );
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "A2 count()" );
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "A2 itcount()" );
-t.dropIndex( { x : 1 , y : 1 } );
-assert.eq( 1 , t.getIndexes().length , "A3" );
-
-t.ensureIndex( { x : 1 , y : 1 } , { sparse : 1 } );
-assert.eq( 2 , t.getIndexes().length , "B1" );
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).count() , "B2 count()" );
-assert.eq( 3 , t.find().sort( { x : 1 , y : 1 } ).itcount() , "B2 itcount()" );
-t.dropIndex( { x : 1 , y : 1 } );
-assert.eq( 1 , t.getIndexes().length , "B3" );
-
+t.insert({_id: 1, x: 1, y: 1});
+t.insert({_id: 2, x: 2});
+t.insert({_id: 3});
+t.ensureIndex({x: 1, y: 1});
+assert.eq(2, t.getIndexes().length, "A1");
+assert.eq(3, t.find().sort({x: 1, y: 1}).count(), "A2 count()");
+assert.eq(3, t.find().sort({x: 1, y: 1}).itcount(), "A2 itcount()");
+t.dropIndex({x: 1, y: 1});
+assert.eq(1, t.getIndexes().length, "A3");
+t.ensureIndex({x: 1, y: 1}, {sparse: 1});
+assert.eq(2, t.getIndexes().length, "B1");
+assert.eq(3, t.find().sort({x: 1, y: 1}).count(), "B2 count()");
+assert.eq(3, t.find().sort({x: 1, y: 1}).itcount(), "B2 itcount()");
+t.dropIndex({x: 1, y: 1});
+assert.eq(1, t.getIndexes().length, "B3");
diff --git a/jstests/core/index_stats.js b/jstests/core/index_stats.js
index fc4aca2a77f..7db4559210c 100644
--- a/jstests/core/index_stats.js
+++ b/jstests/core/index_stats.js
@@ -78,10 +78,8 @@
//
// Confirm index stats tick on findAndModify() update.
//
- var res = db.runCommand({findAndModify: colName,
- query: {a: 1},
- update: {$set: {d: 1}},
- 'new': true});
+ var res = db.runCommand(
+ {findAndModify: colName, query: {a: 1}, update: {$set: {d: 1}}, 'new': true});
assert.commandWorked(res);
countA++;
assert.eq(countA, getUsageCount("a_1"));
@@ -89,9 +87,7 @@
//
// Confirm index stats tick on findAndModify() delete.
//
- res = db.runCommand({findAndModify: colName,
- query: {a: 2},
- remove: true});
+ res = db.runCommand({findAndModify: colName, query: {a: 2}, remove: true});
assert.commandWorked(res);
countA++;
assert.eq(countA, getUsageCount("a_1"));
@@ -112,8 +108,7 @@
var name = indexNameList[i];
if (name === "a_1") {
countA++;
- }
- else {
+ } else {
assert(name === "b_1_c_1");
countB++;
}
@@ -137,11 +132,15 @@
//
// Confirm index stats tick on group().
//
- res = db.runCommand({group: {ns: colName,
- key: {b: 1, c: 1},
- cond: {b: {$gt: 0}},
- $reduce: function(curr, result) {},
- initial: {}}});
+ res = db.runCommand({
+ group: {
+ ns: colName,
+ key: {b: 1, c: 1},
+ cond: {b: {$gt: 0}},
+ $reduce: function(curr, result) {},
+ initial: {}
+ }
+ });
assert.commandWorked(res);
countB++;
assert.eq(countB, getUsageCount("b_1_c_1"));
@@ -149,8 +148,7 @@
//
// Confirm index stats tick on aggregate w/ match.
//
- res = db.runCommand({aggregate: colName,
- pipeline: [{$match: {b: 1}}]});
+ res = db.runCommand({aggregate: colName, pipeline: [{$match: {b: 1}}]});
assert.commandWorked(res);
countB++;
assert.eq(countB, getUsageCount("b_1_c_1"));
@@ -158,11 +156,17 @@
//
// Confirm index stats tick on mapReduce with query.
//
- res = db.runCommand({mapReduce: colName,
- map: function() {emit(this.b, this.c);},
- reduce: function(key, val) {return val;},
- query: {b: 2},
- out: {inline: true}});
+ res = db.runCommand({
+ mapReduce: colName,
+ map: function() {
+ emit(this.b, this.c);
+ },
+ reduce: function(key, val) {
+ return val;
+ },
+ query: {b: 2},
+ out: {inline: true}
+ });
assert.commandWorked(res);
countB++;
assert.eq(countB, getUsageCount("b_1_c_1"));
@@ -206,5 +210,7 @@
//
// Confirm that retrieval fails if $indexStats is not in the first pipeline position.
//
- assert.throws(function() { col.aggregate([{$match: {}}, {$indexStats: {}}]); });
+ assert.throws(function() {
+ col.aggregate([{$match: {}}, {$indexStats: {}}]);
+ });
})();
diff --git a/jstests/core/indexa.js b/jstests/core/indexa.js
index 73da14fc8bc..be86c9acf54 100644
--- a/jstests/core/indexa.js
+++ b/jstests/core/indexa.js
@@ -1,22 +1,23 @@
-// unique index constraint test for updates
+// unique index constraint test for updates
// case where object doesn't grow tested here
t = db.indexa;
t.drop();
-t.ensureIndex( { x:1 }, true );
+t.ensureIndex({x: 1}, true);
-t.insert( { 'x':'A' } );
-t.insert( { 'x':'B' } );
-t.insert( { 'x':'A' } );
+t.insert({'x': 'A'});
+t.insert({'x': 'B'});
+t.insert({'x': 'A'});
-assert.eq( 2 , t.count() , "indexa 1" );
+assert.eq(2, t.count(), "indexa 1");
-t.update( {x:'B'}, { x:'A' } );
+t.update({x: 'B'}, {x: 'A'});
a = t.find().toArray();
-u = Array.unique( a.map( function(z){ return z.x; } ) );
-assert.eq( 2 , t.count() , "indexa 2" );
-
-assert( a.length == u.length , "unique index update is broken" );
+u = Array.unique(a.map(function(z) {
+ return z.x;
+}));
+assert.eq(2, t.count(), "indexa 2");
+assert(a.length == u.length, "unique index update is broken");
diff --git a/jstests/core/indexapi.js b/jstests/core/indexapi.js
index 911e58e980c..2df9709171d 100644
--- a/jstests/core/indexapi.js
+++ b/jstests/core/indexapi.js
@@ -2,41 +2,45 @@
t = db.indexapi;
t.drop();
-key = { x : 1 };
+key = {
+ x: 1
+};
-c = { ns : t._fullName , key : key , name : t._genIndexName( key ) };
-assert.eq( c , t._indexSpec( { x : 1 } ) , "A" );
+c = {
+ ns: t._fullName,
+ key: key,
+ name: t._genIndexName(key)
+};
+assert.eq(c, t._indexSpec({x: 1}), "A");
c.name = "bob";
-assert.eq( c , t._indexSpec( { x : 1 } , "bob" ) , "B" );
+assert.eq(c, t._indexSpec({x: 1}, "bob"), "B");
-c.name = t._genIndexName( key );
-assert.eq( c , t._indexSpec( { x : 1 } ) , "C" );
+c.name = t._genIndexName(key);
+assert.eq(c, t._indexSpec({x: 1}), "C");
c.unique = true;
-assert.eq( c , t._indexSpec( { x : 1 } , true ) , "D" );
-assert.eq( c , t._indexSpec( { x : 1 } , [ true ] ) , "E" );
-assert.eq( c , t._indexSpec( { x : 1 } , { unique : true } ) , "F" );
+assert.eq(c, t._indexSpec({x: 1}, true), "D");
+assert.eq(c, t._indexSpec({x: 1}, [true]), "E");
+assert.eq(c, t._indexSpec({x: 1}, {unique: true}), "F");
c.dropDups = true;
-assert.eq( c , t._indexSpec( { x : 1 } , [ true , true ] ) , "G" );
-assert.eq( c , t._indexSpec( { x : 1 } , { unique : true , dropDups : true } ) , "F" );
+assert.eq(c, t._indexSpec({x: 1}, [true, true]), "G");
+assert.eq(c, t._indexSpec({x: 1}, {unique: true, dropDups: true}), "F");
-t.ensureIndex( { x : 1 } , { unique : true } );
+t.ensureIndex({x: 1}, {unique: true});
idx = t.getIndexes();
-assert.eq( 2 , idx.length , "M1" );
-assert.eq( key , idx[1].key , "M2" );
-assert( idx[1].unique , "M3" );
+assert.eq(2, idx.length, "M1");
+assert.eq(key, idx[1].key, "M2");
+assert(idx[1].unique, "M3");
t.drop();
-t.ensureIndex( { x : 1 } , { unique : 1 } );
+t.ensureIndex({x: 1}, {unique: 1});
idx = t.getIndexes();
-assert.eq( 2 , idx.length , "M1" );
-assert.eq( key , idx[1].key , "M2" );
-assert( idx[1].unique , "M3" );
-//printjson( idx );
+assert.eq(2, idx.length, "M1");
+assert.eq(key, idx[1].key, "M2");
+assert(idx[1].unique, "M3");
+// printjson( idx );
// Test that attempting to create index in an invalid namespace fails.
-assert.writeError(db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } ));
-
-
+assert.writeError(db.system.indexes.insert({ns: "test", key: {x: 1}, name: "x"}));
diff --git a/jstests/core/indexb.js b/jstests/core/indexb.js
index d7d2e8c9f05..1262c621ba9 100644
--- a/jstests/core/indexb.js
+++ b/jstests/core/indexb.js
@@ -1,29 +1,28 @@
// unique index test for a case where the object grows
// and must move
-// see indexa.js for the test case for an update with dup id check
+// see indexa.js for the test case for an update with dup id check
// when it doesn't move
-
t = db.indexb;
t.drop();
-t.ensureIndex({a:1},true);
+t.ensureIndex({a: 1}, true);
-t.insert({a:1});
+t.insert({a: 1});
-x = { a : 2 };
+x = {
+ a: 2
+};
t.save(x);
{
+ assert(t.count() == 2, "count wrong B");
- assert( t.count() == 2, "count wrong B");
-
- x.a = 1;
- x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
- t.save(x); // should fail, not unique.
-
- assert( t.count() == 2,"count wrong" );
- assert( t.find({a:1}).count() == 1,"bfail1" );
- assert( t.find({a:2}).count() == 1,"bfail2" );
+ x.a = 1;
+ x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ t.save(x); // should fail, not unique.
+ assert(t.count() == 2, "count wrong");
+ assert(t.find({a: 1}).count() == 1, "bfail1");
+ assert(t.find({a: 2}).count() == 1, "bfail2");
}
diff --git a/jstests/core/indexc.js b/jstests/core/indexc.js
index b099e2d2823..8a4591bbd9a 100644
--- a/jstests/core/indexc.js
+++ b/jstests/core/indexc.js
@@ -2,19 +2,19 @@
t = db.indexc;
t.drop();
-for ( var i=1; i<100; i++ ){
- var d = new Date( ( new Date() ).getTime() + i );
- t.save( { a : i , ts : d , cats : [ i , i + 1 , i + 2 ] } );
- if ( i == 51 )
+for (var i = 1; i < 100; i++) {
+ var d = new Date((new Date()).getTime() + i);
+ t.save({a: i, ts: d, cats: [i, i + 1, i + 2]});
+ if (i == 51)
mid = d;
}
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "A" );
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "B" );
+assert.eq(50, t.find({ts: {$lt: mid}}).itcount(), "A");
+assert.eq(50, t.find({ts: {$lt: mid}}).sort({ts: 1}).itcount(), "B");
-t.ensureIndex( { ts : 1 , cats : 1 } );
-t.ensureIndex( { cats : 1 } );
+t.ensureIndex({ts: 1, cats: 1});
+t.ensureIndex({cats: 1});
// multi-key bug was firing here (related to getsetdup()):
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "C" );
-assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "D" );
+assert.eq(50, t.find({ts: {$lt: mid}}).itcount(), "C");
+assert.eq(50, t.find({ts: {$lt: mid}}).sort({ts: 1}).itcount(), "D");
diff --git a/jstests/core/indexd.js b/jstests/core/indexd.js
index 33246ad9812..31281de7ffa 100644
--- a/jstests/core/indexd.js
+++ b/jstests/core/indexd.js
@@ -2,9 +2,11 @@
t = db.indexd;
t.drop();
-t.save( { a : 1 } );
-t.ensureIndex( { a : 1 } );
-assert.throws( function(){ db.indexd.$_id_.drop(); } );
-assert( t.drop() );
+t.save({a: 1});
+t.ensureIndex({a: 1});
+assert.throws(function() {
+ db.indexd.$_id_.drop();
+});
+assert(t.drop());
-//db.indexd.$_id_.remove({});
+// db.indexd.$_id_.remove({});
diff --git a/jstests/core/indexe.js b/jstests/core/indexe.js
index e84322c6510..55e256b9df9 100644
--- a/jstests/core/indexe.js
+++ b/jstests/core/indexe.js
@@ -4,19 +4,19 @@ t.drop();
var num = 1000;
-for ( i=0; i<num; i++){
- t.insert( { a : "b" } );
+for (i = 0; i < num; i++) {
+ t.insert({a: "b"});
}
-assert.eq( num , t.find().count() ,"A1" );
-assert.eq( num , t.find( { a : "b" } ).count() , "B1" );
-assert.eq( num , t.find( { a : "b" } ).itcount() , "C1" );
+assert.eq(num, t.find().count(), "A1");
+assert.eq(num, t.find({a: "b"}).count(), "B1");
+assert.eq(num, t.find({a: "b"}).itcount(), "C1");
-t.ensureIndex( { a : 1 } );
+t.ensureIndex({a: 1});
-assert.eq( num , t.find().count() ,"A2" );
-assert.eq( num , t.find().sort( { a : 1 } ).count() , "A2a" );
-assert.eq( num , t.find( { a : "b" } ).count() , "B2" );
-assert.eq( num , t.find( { a : "b" } ).itcount() , "C3" );
+assert.eq(num, t.find().count(), "A2");
+assert.eq(num, t.find().sort({a: 1}).count(), "A2a");
+assert.eq(num, t.find({a: "b"}).count(), "B2");
+assert.eq(num, t.find({a: "b"}).itcount(), "C3");
t.drop();
diff --git a/jstests/core/indexes_on_indexes.js b/jstests/core/indexes_on_indexes.js
index df42c5161e5..3ebbf1d2f9c 100644
--- a/jstests/core/indexes_on_indexes.js
+++ b/jstests/core/indexes_on_indexes.js
@@ -8,20 +8,18 @@
assert.eq(t.system.indexes.getIndexes().length, 0);
print("trying via ensureIndex");
- assert.commandFailed(t.system.indexes.ensureIndex({_id:1}));
+ assert.commandFailed(t.system.indexes.ensureIndex({_id: 1}));
printjson(t.system.indexes.getIndexes());
assert.eq(t.system.indexes.getIndexes().length, 0);
print("trying via createIndex");
- assert.throws(t.system.indexes.createIndex({_id:1}));
+ assert.throws(t.system.indexes.createIndex({_id: 1}));
printjson(t.system.indexes.getIndexes());
assert.eq(t.system.indexes.getIndexes().length, 0);
print("trying via direct insertion");
- assert.throws(t.system.indexes.insert({ v:1,
- key:{_id:1},
- ns: "indexes_on_indexes.system.indexes",
- name:"wontwork"}));
+ assert.throws(t.system.indexes.insert(
+ {v: 1, key: {_id: 1}, ns: "indexes_on_indexes.system.indexes", name: "wontwork"}));
printjson(t.system.indexes.getIndexes());
assert.eq(t.system.indexes.getIndexes().length, 0);
}());
diff --git a/jstests/core/indexf.js b/jstests/core/indexf.js
index 3ad222bcbb8..5a126b3e22f 100644
--- a/jstests/core/indexf.js
+++ b/jstests/core/indexf.js
@@ -2,12 +2,12 @@
t = db.indexf;
t.drop();
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-t.save( { x : 2 } );
-t.save( { y : 3 } );
-t.save( { x : 4 } );
+t.save({x: 2});
+t.save({y: 3});
+t.save({x: 4});
-assert.eq( 2 , t.findOne( { x : 2 } ).x , "A1" );
-assert.eq( 3 , t.findOne( { x : null } ).y , "A2" );
-assert.eq( 4 , t.findOne( { x : 4 } ).x , "A3" );
+assert.eq(2, t.findOne({x: 2}).x, "A1");
+assert.eq(3, t.findOne({x: null}).y, "A2");
+assert.eq(4, t.findOne({x: 4}).x, "A3");
diff --git a/jstests/core/indexg.js b/jstests/core/indexg.js
index f4b2e0cada1..3cafede4cff 100644
--- a/jstests/core/indexg.js
+++ b/jstests/core/indexg.js
@@ -1,13 +1,13 @@
f = db.jstests_indexg;
f.drop();
-f.save( { list: [1, 2] } );
-f.save( { list: [1, 3] } );
+f.save({list: [1, 2]});
+f.save({list: [1, 3]});
doit = function() {
- assert.eq( 1, f.count( { list: { $in: [1], $ne: 3 } } ) );
- assert.eq( 1, f.count( { list: { $in: [1], $not:{$in: [3] } } } ) );
+ assert.eq(1, f.count({list: {$in: [1], $ne: 3}}));
+ assert.eq(1, f.count({list: {$in: [1], $not: {$in: [3]}}}));
};
doit();
-f.ensureIndex( { list: 1 } );
+f.ensureIndex({list: 1});
doit(); \ No newline at end of file
diff --git a/jstests/core/indexj.js b/jstests/core/indexj.js
index 7208abfdea6..01754466a3a 100644
--- a/jstests/core/indexj.js
+++ b/jstests/core/indexj.js
@@ -14,42 +14,42 @@ function keysExamined(query, hint, sort) {
return explain.executionStats.totalKeysExamined;
}
-t.ensureIndex( {a:1} );
-t.save( {a:5} );
-assert.eq( 0, keysExamined( { a: { $gt:4, $lt:5 } } ), "A" );
+t.ensureIndex({a: 1});
+t.save({a: 5});
+assert.eq(0, keysExamined({a: {$gt: 4, $lt: 5}}), "A");
t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:4} );
-assert.eq( 0, keysExamined( { a: { $gt:4, $lt:5 } } ), "B" );
+t.ensureIndex({a: 1});
+t.save({a: 4});
+assert.eq(0, keysExamined({a: {$gt: 4, $lt: 5}}), "B");
-t.save( {a:5} );
-assert.eq( 0, keysExamined( { a: { $gt:4, $lt:5 } } ), "D" );
+t.save({a: 5});
+assert.eq(0, keysExamined({a: {$gt: 4, $lt: 5}}), "D");
-t.save( {a:4} );
-assert.eq( 0, keysExamined( { a: { $gt:4, $lt:5 } } ), "C" );
+t.save({a: 4});
+assert.eq(0, keysExamined({a: {$gt: 4, $lt: 5}}), "C");
-t.save( {a:5} );
-assert.eq( 0, keysExamined( { a: { $gt:4, $lt:5 } } ), "D" );
+t.save({a: 5});
+assert.eq(0, keysExamined({a: {$gt: 4, $lt: 5}}), "D");
t.drop();
-t.ensureIndex( {a:1,b:1} );
-t.save( { a:1,b:1 } );
-t.save( { a:1,b:2 } );
-t.save( { a:2,b:1 } );
-t.save( { a:2,b:2 } );
-
-assert.eq( 3, keysExamined( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1} ) );
-assert.eq( 3, keysExamined( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1}, {a:-1,b:-1} ) );
-
-t.save( {a:1,b:1} );
-t.save( {a:1,b:1} );
-assert.eq( 3, keysExamined( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1} ) );
-assert.eq( 3, keysExamined( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1} ) );
-assert.eq( 3, keysExamined( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1}, {a:-1,b:-1} ) );
-
-assert.eq( 2, keysExamined( { a:{$in:[1,1.9]}, b:{$gt:1,$lt:2} }, {a:1,b:1} ) );
-assert.eq( 2, keysExamined( { a:{$in:[1.1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1}, {a:-1,b:-1} ) );
-
-t.save( { a:1,b:1.5} );
-assert.eq( 4, keysExamined( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} }, {a:1,b:1} ), "F" );
+t.ensureIndex({a: 1, b: 1});
+t.save({a: 1, b: 1});
+t.save({a: 1, b: 2});
+t.save({a: 2, b: 1});
+t.save({a: 2, b: 2});
+
+assert.eq(3, keysExamined({a: {$in: [1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}));
+assert.eq(3, keysExamined({a: {$in: [1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}, {a: -1, b: -1}));
+
+t.save({a: 1, b: 1});
+t.save({a: 1, b: 1});
+assert.eq(3, keysExamined({a: {$in: [1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}));
+assert.eq(3, keysExamined({a: {$in: [1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}));
+assert.eq(3, keysExamined({a: {$in: [1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}, {a: -1, b: -1}));
+
+assert.eq(2, keysExamined({a: {$in: [1, 1.9]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}));
+assert.eq(2, keysExamined({a: {$in: [1.1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}, {a: -1, b: -1}));
+
+t.save({a: 1, b: 1.5});
+assert.eq(4, keysExamined({a: {$in: [1, 2]}, b: {$gt: 1, $lt: 2}}, {a: 1, b: 1}), "F");
diff --git a/jstests/core/indexl.js b/jstests/core/indexl.js
index 3a5d0275887..c83638607d4 100644
--- a/jstests/core/indexl.js
+++ b/jstests/core/indexl.js
@@ -3,25 +3,25 @@
t = db.jstests_indexl;
function test(t) {
- t.save( {a:[1,2]} );
- assert.eq( 1, t.count( {a:{$all:[1],$in:[2]}} ) );
- assert.eq( 1, t.count( {a:{$all:[2],$in:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[2],$all:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[1],$all:[2]}} ) );
- assert.eq( 1, t.count( {a:{$all:[1],$in:[2]}} ) );
- t.save({a:[3,4]});
- t.save({a:[2,3]});
- t.save({a:[1,2,3,4]});
- assert.eq( 2, t.count( {a:{$in:[2],$all:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[3],$all:[1,2]}} ) );
- assert.eq( 1, t.count( {a:{$in:[1],$all:[3]}} ) );
- assert.eq( 2, t.count( {a:{$in:[2,3],$all:[1]}} ) );
- assert.eq( 1, t.count( {a:{$in:[4],$all:[2,3]}} ) );
- assert.eq( 3, t.count( {a:{$in:[1,3],$all:[2]}} ) );
+ t.save({a: [1, 2]});
+ assert.eq(1, t.count({a: {$all: [1], $in: [2]}}));
+ assert.eq(1, t.count({a: {$all: [2], $in: [1]}}));
+ assert.eq(1, t.count({a: {$in: [2], $all: [1]}}));
+ assert.eq(1, t.count({a: {$in: [1], $all: [2]}}));
+ assert.eq(1, t.count({a: {$all: [1], $in: [2]}}));
+ t.save({a: [3, 4]});
+ t.save({a: [2, 3]});
+ t.save({a: [1, 2, 3, 4]});
+ assert.eq(2, t.count({a: {$in: [2], $all: [1]}}));
+ assert.eq(1, t.count({a: {$in: [3], $all: [1, 2]}}));
+ assert.eq(1, t.count({a: {$in: [1], $all: [3]}}));
+ assert.eq(2, t.count({a: {$in: [2, 3], $all: [1]}}));
+ assert.eq(1, t.count({a: {$in: [4], $all: [2, 3]}}));
+ assert.eq(3, t.count({a: {$in: [1, 3], $all: [2]}}));
}
t.drop();
test(t);
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
test(t); \ No newline at end of file
diff --git a/jstests/core/indexm.js b/jstests/core/indexm.js
index 1c6f1c7baac..820bedd7baa 100644
--- a/jstests/core/indexm.js
+++ b/jstests/core/indexm.js
@@ -3,27 +3,22 @@
t = db.jstests_indexm;
t.drop();
-t.save( { a : [ { x : 1 } , { x : 2 } , { x : 3 } , { x : 4 } ] } );
+t.save({a: [{x: 1}, {x: 2}, {x: 3}, {x: 4}]});
-function test(){
- assert.eq( 1, t.count(
- {
- a : { x : 1 } ,
- "$or" : [ { a : { x : 2 } } , { a : { x : 3 } } ]
- }
- ) );
-}
+function test() {
+ assert.eq(1, t.count({a: {x: 1}, "$or": [{a: {x: 2}}, {a: {x: 3}}]}));
+}
// The first find will return a result since there isn't an index.
-test();
+test();
// Now create an index.
-t.ensureIndex({"a":1});
+t.ensureIndex({"a": 1});
test();
// Now create a different index.
t.dropIndexes();
-t.ensureIndex({"a.x":1});
+t.ensureIndex({"a.x": 1});
test();
// Drop the indexes.
diff --git a/jstests/core/indexn.js b/jstests/core/indexn.js
index 66a45d88836..416f0ec31b2 100644
--- a/jstests/core/indexn.js
+++ b/jstests/core/indexn.js
@@ -4,26 +4,26 @@
t = db.jstests_indexn;
t.drop();
-t.save( {a:1,b:[1,2]} );
+t.save({a: 1, b: [1, 2]});
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
// {a:1} is a single key index, so no matches are possible for this query
-assert.eq( 0, t.count( {a:{$gt:5,$lt:0}} ) );
+assert.eq(0, t.count({a: {$gt: 5, $lt: 0}}));
-assert.eq( 0, t.count( {a:{$gt:5,$lt:0},b:2} ) );
+assert.eq(0, t.count({a: {$gt: 5, $lt: 0}, b: 2}));
-assert.eq( 0, t.count( {a:{$gt:5,$lt:0},b:{$gt:0,$lt:5}} ) );
+assert.eq(0, t.count({a: {$gt: 5, $lt: 0}, b: {$gt: 0, $lt: 5}}));
// One clause of an $or is an "impossible match"
-printjson( t.find( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ).explain() );
-assert.eq( 1, t.count( {$or:[{a:{$gt:5,$lt:0}},{a:1}]} ) );
+printjson(t.find({$or: [{a: {$gt: 5, $lt: 0}}, {a: 1}]}).explain());
+assert.eq(1, t.count({$or: [{a: {$gt: 5, $lt: 0}}, {a: 1}]}));
// One clause of an $or is an "impossible match"; original order of the $or
// does not matter.
-printjson( t.find( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ).explain() );
-assert.eq( 1, t.count( {$or:[{a:1},{a:{$gt:5,$lt:0}}]} ) );
+printjson(t.find({$or: [{a: 1}, {a: {$gt: 5, $lt: 0}}]}).explain());
+assert.eq(1, t.count({$or: [{a: 1}, {a: {$gt: 5, $lt: 0}}]}));
-t.save( {a:2} );
-assert.eq( 2, t.count( {$or:[{a:1},{a:{$gt:5,$lt:0}},{a:2}]} ) );
+t.save({a: 2});
+assert.eq(2, t.count({$or: [{a: 1}, {a: {$gt: 5, $lt: 0}}, {a: 2}]}));
diff --git a/jstests/core/indexp.js b/jstests/core/indexp.js
index c2a6866fc12..0111f0cca35 100644
--- a/jstests/core/indexp.js
+++ b/jstests/core/indexp.js
@@ -6,18 +6,18 @@
var coll = db.jstests_indexp;
// Empty field checks.
-assert.commandFailed(coll.ensureIndex({ 'a..b': 1 }));
-assert.commandFailed(coll.ensureIndex({ '.a': 1 }));
-assert.commandFailed(coll.ensureIndex({ 'a.': 1 }));
-assert.commandFailed(coll.ensureIndex({ '.': 1 }));
-assert.commandFailed(coll.ensureIndex({ '': 1 }));
-assert.commandWorked(coll.ensureIndex({ 'a.b': 1 }));
+assert.commandFailed(coll.ensureIndex({'a..b': 1}));
+assert.commandFailed(coll.ensureIndex({'.a': 1}));
+assert.commandFailed(coll.ensureIndex({'a.': 1}));
+assert.commandFailed(coll.ensureIndex({'.': 1}));
+assert.commandFailed(coll.ensureIndex({'': 1}));
+assert.commandWorked(coll.ensureIndex({'a.b': 1}));
// '$'-prefixed field checks.
-assert.commandFailed(coll.ensureIndex({ '$a': 1 }));
-assert.commandFailed(coll.ensureIndex({ 'a.$b': 1 }));
-assert.commandFailed(coll.ensureIndex({ '$db': 1 }));
-assert.commandWorked(coll.ensureIndex({ 'a$ap': 1 })); // $ in middle is ok
-assert.commandWorked(coll.ensureIndex({ 'a.$id': 1 })); // $id/$db/$ref are execptions
+assert.commandFailed(coll.ensureIndex({'$a': 1}));
+assert.commandFailed(coll.ensureIndex({'a.$b': 1}));
+assert.commandFailed(coll.ensureIndex({'$db': 1}));
+assert.commandWorked(coll.ensureIndex({'a$ap': 1})); // $ in middle is ok
+assert.commandWorked(coll.ensureIndex({'a.$id': 1})); // $id/$db/$ref are execptions
coll.dropIndexes();
diff --git a/jstests/core/indexr.js b/jstests/core/indexr.js
index 1f7b75bbcf7..d242ad87316 100644
--- a/jstests/core/indexr.js
+++ b/jstests/core/indexr.js
@@ -4,32 +4,32 @@ t = db.jstests_indexr;
t.drop();
// Check without indexes.
-t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } );
-assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
+t.save({a: [{b: 3, c: 6}, {b: 1, c: 1}]});
+assert.eq(1, t.count({'a.b': {$gt: 2}, 'a.c': {$lt: 4}}));
+assert.eq(1, t.count({a: {b: 3, c: 6}, 'a.c': {$lt: 4}}));
// Check with single key indexes.
t.remove({});
-t.ensureIndex( {'a.b':1,'a.c':1} );
-t.ensureIndex( {a:1,'a.c':1} );
-assert.eq( 0, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 0, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
+t.ensureIndex({'a.b': 1, 'a.c': 1});
+t.ensureIndex({a: 1, 'a.c': 1});
+assert.eq(0, t.count({'a.b': {$gt: 2}, 'a.c': {$lt: 4}}));
+assert.eq(0, t.count({a: {b: 3, c: 6}, 'a.c': {$lt: 4}}));
-t.save( { a: { b: 3, c: 3 } } );
-assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 1, t.count( { a:{ b:3, c:3 }, 'a.c': { $lt:4 } } ) );
+t.save({a: {b: 3, c: 3}});
+assert.eq(1, t.count({'a.b': {$gt: 2}, 'a.c': {$lt: 4}}));
+assert.eq(1, t.count({a: {b: 3, c: 3}, 'a.c': {$lt: 4}}));
// Check with multikey indexes.
t.remove({});
-t.save( { a: [ { b: 3, c: 6 }, { b: 1, c: 1 } ] } );
+t.save({a: [{b: 3, c: 6}, {b: 1, c: 1}]});
-assert.eq( 1, t.count( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ) );
-assert.eq( 1, t.count( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ) );
+assert.eq(1, t.count({'a.b': {$gt: 2}, 'a.c': {$lt: 4}}));
+assert.eq(1, t.count({a: {b: 3, c: 6}, 'a.c': {$lt: 4}}));
// Check reverse direction.
-assert.eq( 1, t.find( { 'a.b':{ $gt:2 }, 'a.c': { $lt:4 } } ).sort( {'a.b':-1} ).itcount() );
-assert.eq( 1, t.find( { a:{ b:3, c:6 }, 'a.c': { $lt:4 } } ).sort( {a:-1} ).itcount() );
+assert.eq(1, t.find({'a.b': {$gt: 2}, 'a.c': {$lt: 4}}).sort({'a.b': -1}).itcount());
+assert.eq(1, t.find({a: {b: 3, c: 6}, 'a.c': {$lt: 4}}).sort({a: -1}).itcount());
// Check second field is constrained if first is not.
-assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {'a.b':1,'a.c':1} ).itcount() );
-assert.eq( 1, t.find( { 'a.c': { $lt:4 } } ).hint( {a:1,'a.c':1} ).itcount() );
+assert.eq(1, t.find({'a.c': {$lt: 4}}).hint({'a.b': 1, 'a.c': 1}).itcount());
+assert.eq(1, t.find({'a.c': {$lt: 4}}).hint({a: 1, 'a.c': 1}).itcount());
diff --git a/jstests/core/indexs.js b/jstests/core/indexs.js
index 0b7bfe412c4..2fc9724a590 100644
--- a/jstests/core/indexs.js
+++ b/jstests/core/indexs.js
@@ -1,18 +1,19 @@
-// Test index key generation issue with parent and nested fields in same index and array containing subobject SERVER-3005.
+// Test index key generation issue with parent and nested fields in same index and array containing
+// subobject SERVER-3005.
t = db.jstests_indexs;
t.drop();
-t.ensureIndex( {a:1} );
-t.save( { a: [ { b: 3 } ] } );
-assert.eq( 1, t.count( { a:{ b:3 } } ) );
+t.ensureIndex({a: 1});
+t.save({a: [{b: 3}]});
+assert.eq(1, t.count({a: {b: 3}}));
t.drop();
-t.ensureIndex( {a:1,'a.b':1} );
-t.save( { a: { b: 3 } } );
-assert.eq( 1, t.count( { a:{ b:3 } } ) );
+t.ensureIndex({a: 1, 'a.b': 1});
+t.save({a: {b: 3}});
+assert.eq(1, t.count({a: {b: 3}}));
t.drop();
-t.ensureIndex( {a:1,'a.b':1} );
-t.save( { a: [ { b: 3 } ] } );
-assert.eq( 1, t.count( { a:{ b:3 } } ) );
+t.ensureIndex({a: 1, 'a.b': 1});
+t.save({a: [{b: 3}]});
+assert.eq(1, t.count({a: {b: 3}}));
diff --git a/jstests/core/indext.js b/jstests/core/indext.js
index 134e81acdeb..163a3e4a8a3 100644
--- a/jstests/core/indext.js
+++ b/jstests/core/indext.js
@@ -3,16 +3,16 @@
t = db.jstests_indext;
t.drop();
-t.ensureIndex( {'a.b':1}, {sparse:true} );
-t.save( {a:[]} );
-t.save( {a:1} );
-assert.eq( 0, t.find().hint( {'a.b':1} ).itcount() );
+t.ensureIndex({'a.b': 1}, {sparse: true});
+t.save({a: []});
+t.save({a: 1});
+assert.eq(0, t.find().hint({'a.b': 1}).itcount());
-t.ensureIndex( {'a.b':1,'a.c':1}, {sparse:true} );
-t.save( {a:[]} );
-t.save( {a:1} );
-assert.eq( 0, t.find().hint( {'a.b':1,'a.c':1} ).itcount() );
+t.ensureIndex({'a.b': 1, 'a.c': 1}, {sparse: true});
+t.save({a: []});
+t.save({a: 1});
+assert.eq(0, t.find().hint({'a.b': 1, 'a.c': 1}).itcount());
-t.save( {a:[{b:1}]} );
-t.save( {a:1} );
-assert.eq( 1, t.find().hint( {'a.b':1,'a.c':1} ).itcount() );
+t.save({a: [{b: 1}]});
+t.save({a: 1});
+assert.eq(1, t.find().hint({'a.b': 1, 'a.c': 1}).itcount());
diff --git a/jstests/core/indexu.js b/jstests/core/indexu.js
index d1ef13d0468..923356bf79b 100644
--- a/jstests/core/indexu.js
+++ b/jstests/core/indexu.js
@@ -4,105 +4,112 @@
t = db.jstests_indexu;
t.drop();
-var dupDoc = {a:[{'0':1}]}; // There are two 'a.0' fields in this doc.
-var dupDoc2 = {a:[{'1':1},'c']};
-var noDupDoc = {a:[{'1':1}]};
+var dupDoc = {
+ a: [{'0': 1}]
+}; // There are two 'a.0' fields in this doc.
+var dupDoc2 = {
+ a: [{'1': 1}, 'c']
+};
+var noDupDoc = {
+ a: [{'1': 1}]
+};
// Test that we can't index dupDoc.
-assert.writeOK( t.save( dupDoc ));
-assert.commandFailed(t.ensureIndex( {'a.0':1} ));
+assert.writeOK(t.save(dupDoc));
+assert.commandFailed(t.ensureIndex({'a.0': 1}));
t.remove({});
-assert.commandWorked(t.ensureIndex( {'a.0':1} ));
-assert.writeError( t.save( dupDoc ));
+assert.commandWorked(t.ensureIndex({'a.0': 1}));
+assert.writeError(t.save(dupDoc));
// Test that we can't index dupDoc2.
t.drop();
-assert.writeOK(t.save( dupDoc2 ));
-assert.commandFailed(t.ensureIndex( {'a.1':1} ));
+assert.writeOK(t.save(dupDoc2));
+assert.commandFailed(t.ensureIndex({'a.1': 1}));
t.remove({});
-assert.commandWorked(t.ensureIndex( {'a.1':1} ));
-assert.writeError(t.save( dupDoc2 ));
+assert.commandWorked(t.ensureIndex({'a.1': 1}));
+assert.writeError(t.save(dupDoc2));
// Test that we can index dupDoc with a different index.
t.drop();
-t.ensureIndex( {'a.b':1} );
-assert.writeOK(t.save( dupDoc ));
+t.ensureIndex({'a.b': 1});
+assert.writeOK(t.save(dupDoc));
// Test number field starting with hyphen.
t.drop();
-t.ensureIndex( {'a.-1':1} );
-assert.writeOK(t.save( {a:[{'-1':1}]} ));
+t.ensureIndex({'a.-1': 1});
+assert.writeOK(t.save({a: [{'-1': 1}]}));
// Test number field starting with zero.
t.drop();
-t.ensureIndex( {'a.00':1} );
-assert.writeOK( t.save( {a:[{'00':1}]} ));
+t.ensureIndex({'a.00': 1});
+assert.writeOK(t.save({a: [{'00': 1}]}));
// Test multiple array indexes
t.drop();
-t.ensureIndex( {'a.0':1,'a.1':1} );
-assert.writeOK( t.save( {a:[{'1':1}]} ));
-assert.writeError( t.save( {a:[{'1':1},4]} ));
+t.ensureIndex({'a.0': 1, 'a.1': 1});
+assert.writeOK(t.save({a: [{'1': 1}]}));
+assert.writeError(t.save({a: [{'1': 1}, 4]}));
// Test that we can index noDupDoc.
t.drop();
-t.save( noDupDoc );
-assert.commandWorked(t.ensureIndex( {'a.0':1} ));
-assert.commandWorked(t.ensureIndex( {'a.1':1} ));
+t.save(noDupDoc);
+assert.commandWorked(t.ensureIndex({'a.0': 1}));
+assert.commandWorked(t.ensureIndex({'a.1': 1}));
t.drop();
-t.ensureIndex( {'a.0':1} );
-t.ensureIndex( {'a.1':1} );
-assert.writeOK(t.save( noDupDoc ));
+t.ensureIndex({'a.0': 1});
+t.ensureIndex({'a.1': 1});
+assert.writeOK(t.save(noDupDoc));
// Test that we can query noDupDoc.
-assert.eq( 1, t.find( {'a.1':1} ).hint( {'a.1':1} ).itcount() );
-assert.eq( 1, t.find( {'a.1':1} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {'a.0':1} ).itcount() );
-assert.eq( 1, t.find( {'a.0':{'1':1}} ).hint( {$natural:1} ).itcount() );
+assert.eq(1, t.find({'a.1': 1}).hint({'a.1': 1}).itcount());
+assert.eq(1, t.find({'a.1': 1}).hint({$natural: 1}).itcount());
+assert.eq(1, t.find({'a.0': {'1': 1}}).hint({'a.0': 1}).itcount());
+assert.eq(1, t.find({'a.0': {'1': 1}}).hint({$natural: 1}).itcount());
// Check multiple nested array fields.
t.drop();
-t.save( {a:[[1]]} );
-assert.commandWorked(t.ensureIndex( {'a.0.0':1} ));
-assert.eq( 1, t.find( {'a.0.0':1} ).hint( {$natural:1} ).itcount() );
-assert.eq( 1, t.find( {'a.0.0':1} ).hint( {'a.0.0':1} ).itcount() );
+t.save({a: [[1]]});
+assert.commandWorked(t.ensureIndex({'a.0.0': 1}));
+assert.eq(1, t.find({'a.0.0': 1}).hint({$natural: 1}).itcount());
+assert.eq(1, t.find({'a.0.0': 1}).hint({'a.0.0': 1}).itcount());
-// Check where there is a duplicate for a partially addressed field but not for a fully addressed field.
+// Check where there is a duplicate for a partially addressed field but not for a fully addressed
+// field.
t.drop();
-t.save( {a:[[1],{'0':1}]} );
-assert.commandFailed(t.ensureIndex( {'a.0.0':1} ));
+t.save({a: [[1], {'0': 1}]});
+assert.commandFailed(t.ensureIndex({'a.0.0': 1}));
// Check where there is a duplicate for a fully addressed field.
t.drop();
-assert.writeOK( t.save( {a:[[1],{'0':[1]}]} ));
-assert.commandFailed(t.ensureIndex( {'a.0.0':1} ));
+assert.writeOK(t.save({a: [[1], {'0': [1]}]}));
+assert.commandFailed(t.ensureIndex({'a.0.0': 1}));
// Two ways of addressing parse to an array.
t.drop();
-t.save( {a:[{'0':1}]} );
-assert.commandFailed(t.ensureIndex( {'a.0.0':1} ));
+t.save({a: [{'0': 1}]});
+assert.commandFailed(t.ensureIndex({'a.0.0': 1}));
// Test several key depths - with same arrays being found.
t.drop();
-t.save( {a:[{'0':[{'0':1}]}]} );
-assert.commandFailed(t.ensureIndex( {'a.0.0.0.0.0.0':1} ));
-assert.commandFailed(t.ensureIndex( {'a.0.0.0.0.0':1} ));
-assert.commandFailed(t.ensureIndex( {'a.0.0.0.0':1} ));
-assert.commandFailed(t.ensureIndex( {'a.0.0.0':1} ));
-assert.commandFailed(t.ensureIndex( {'a.0.0':1} ));
-assert.commandFailed(t.ensureIndex( {'a.0':1} ));
-assert.commandWorked(t.ensureIndex( {'a':1} ));
+t.save({a: [{'0': [{'0': 1}]}]});
+assert.commandFailed(t.ensureIndex({'a.0.0.0.0.0.0': 1}));
+assert.commandFailed(t.ensureIndex({'a.0.0.0.0.0': 1}));
+assert.commandFailed(t.ensureIndex({'a.0.0.0.0': 1}));
+assert.commandFailed(t.ensureIndex({'a.0.0.0': 1}));
+assert.commandFailed(t.ensureIndex({'a.0.0': 1}));
+assert.commandFailed(t.ensureIndex({'a.0': 1}));
+assert.commandWorked(t.ensureIndex({'a': 1}));
// Two prefixes extract docs, but one terminates extraction before array.
t.drop();
-t.save( {a:[{'0':{'c':[]}}]} );
-assert.commandFailed(t.ensureIndex( {'a.0.c':1} ));
+t.save({a: [{'0': {'c': []}}]});
+assert.commandFailed(t.ensureIndex({'a.0.c': 1}));
t.drop();
-t.save( {a:[[{'b':1}]]} );
-assert.eq( 1, t.find( {'a.0.b':1} ).itcount() );
-t.ensureIndex( {'a.0.b':1} );
-assert.eq( 1, t.find( {'a.0.b':1} ).itcount() );
+t.save({a: [[{'b': 1}]]});
+assert.eq(1, t.find({'a.0.b': 1}).itcount());
+t.ensureIndex({'a.0.b': 1});
+assert.eq(1, t.find({'a.0.b': 1}).itcount());
diff --git a/jstests/core/indexv.js b/jstests/core/indexv.js
index 95074da7333..638de158aa4 100644
--- a/jstests/core/indexv.js
+++ b/jstests/core/indexv.js
@@ -3,16 +3,16 @@
t = db.jstests_indexv;
t.drop();
-t.ensureIndex( {'a.b':1} );
+t.ensureIndex({'a.b': 1});
-t.save( {a:[{},{b:1}]} );
-var e = t.find( {'a.b':null} ).explain("executionStats");
-assert.eq( 1, e.executionStats.nReturned );
-assert.eq( 1, e.executionStats.totalKeysExamined );
+t.save({a: [{}, {b: 1}]});
+var e = t.find({'a.b': null}).explain("executionStats");
+assert.eq(1, e.executionStats.nReturned);
+assert.eq(1, e.executionStats.totalKeysExamined);
t.drop();
-t.ensureIndex( {'a.b.c':1} );
-t.save( {a:[{b:[]},{b:{c:1}}]} );
-var e = t.find( {'a.b.c':null} ).explain("executionStats");
-assert.eq( 0, e.executionStats.nReturned );
-assert.eq( 1, e.executionStats.totalKeysExamined );
+t.ensureIndex({'a.b.c': 1});
+t.save({a: [{b: []}, {b: {c: 1}}]});
+var e = t.find({'a.b.c': null}).explain("executionStats");
+assert.eq(0, e.executionStats.nReturned);
+assert.eq(1, e.executionStats.totalKeysExamined);
diff --git a/jstests/core/insert1.js b/jstests/core/insert1.js
index ff53e2b2bd7..0f4f6977a1a 100644
--- a/jstests/core/insert1.js
+++ b/jstests/core/insert1.js
@@ -1,14 +1,19 @@
t = db.insert1;
t.drop();
-var o = {a:1};
+var o = {
+ a: 1
+};
t.insert(o);
var doc = t.findOne();
assert.eq(1, doc.a);
assert(doc._id != null, tojson(doc));
t.drop();
-o = {a:2, _id:new ObjectId()};
+o = {
+ a: 2,
+ _id: new ObjectId()
+};
var id = o._id;
t.insert(o);
doc = t.findOne();
@@ -16,7 +21,10 @@ assert.eq(2, doc.a);
assert.eq(id, doc._id);
t.drop();
-o = {a:3, _id:"asdf"};
+o = {
+ a: 3,
+ _id: "asdf"
+};
id = o._id;
t.insert(o);
doc = t.findOne();
@@ -24,7 +32,10 @@ assert.eq(3, doc.a);
assert.eq(id, doc._id);
t.drop();
-o = {a:4, _id:null};
+o = {
+ a: 4,
+ _id: null
+};
t.insert(o);
doc = t.findOne();
assert.eq(4, doc.a);
@@ -33,8 +44,10 @@ assert.eq(null, doc._id, tojson(doc));
t.drop();
var toInsert = [];
var count = 100 * 1000;
-for (i = 0; i < count; ++i) { toInsert.push({_id: i, a: 5}); }
+for (i = 0; i < count; ++i) {
+ toInsert.push({_id: i, a: 5});
+}
assert.writeOK(t.insert(toInsert));
-doc = t.findOne({_id:1});
+doc = t.findOne({_id: 1});
assert.eq(5, doc.a);
assert.eq(count, t.count(), "bad count");
diff --git a/jstests/core/insert2.js b/jstests/core/insert2.js
index 4d5de35bb36..f01fd153d0c 100644
--- a/jstests/core/insert2.js
+++ b/jstests/core/insert2.js
@@ -7,8 +7,8 @@ conn.forceWriteMode(db.getMongo().writeMode());
t = conn.getDB(db.getName()).insert2;
t.drop();
-assert.isnull( t.findOne() , "A" );
-assert.writeError(t.insert( { z : 1 , $inc : { x : 1 } } , 0, true ));
-assert.isnull( t.findOne() , "B" );
+assert.isnull(t.findOne(), "A");
+assert.writeError(t.insert({z: 1, $inc: {x: 1}}, 0, true));
+assert.isnull(t.findOne(), "B");
// Collection should not exist
-assert.commandFailed( t.stats() );
+assert.commandFailed(t.stats());
diff --git a/jstests/core/insert_id_undefined.js b/jstests/core/insert_id_undefined.js
index d6b9008fbdf..874d6c0228c 100644
--- a/jstests/core/insert_id_undefined.js
+++ b/jstests/core/insert_id_undefined.js
@@ -1,7 +1,7 @@
// ensure a document with _id undefined cannot be saved
t = db.insert_id_undefined;
t.drop();
-t.insert({_id:undefined});
+t.insert({_id: undefined});
assert.eq(t.count(), 0);
// Make sure the collection was not created
-assert.commandFailed( t.stats() );
+assert.commandFailed(t.stats());
diff --git a/jstests/core/insert_illegal_doc.js b/jstests/core/insert_illegal_doc.js
index 0d92653d3a0..97edeba4f17 100644
--- a/jstests/core/insert_illegal_doc.js
+++ b/jstests/core/insert_illegal_doc.js
@@ -20,7 +20,7 @@ assert.eq(0, coll.find().itcount(), "should not be a doc");
// test update
res = coll.insert({_id: 1});
assert.writeOK(res, "insert failed");
-res = coll.update({_id: 1}, {$set : { a : [1, 2, 3], b: [4, 5, 6]}});
+res = coll.update({_id: 1}, {$set: {a: [1, 2, 3], b: [4, 5, 6]}});
assert.writeError(res);
assert.eq(res.getWriteError().code, 10088);
assert.eq(undefined, coll.findOne().a, "update should have failed");
diff --git a/jstests/core/insert_long_index_key.js b/jstests/core/insert_long_index_key.js
index 6379c36fb4a..934b51b0369 100644
--- a/jstests/core/insert_long_index_key.js
+++ b/jstests/core/insert_long_index_key.js
@@ -2,9 +2,9 @@ t = db.insert_long_index_key;
t.drop();
var s = new Array(2000).toString();
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-t.insert({ x: 1 });
-t.insert({ x: s });
+t.insert({x: 1});
+t.insert({x: s});
-assert.eq( 1, t.count() );
+assert.eq(1, t.count());
diff --git a/jstests/core/invalid_db_name.js b/jstests/core/invalid_db_name.js
index 58cabab7c00..4b9fb7ba895 100644
--- a/jstests/core/invalid_db_name.js
+++ b/jstests/core/invalid_db_name.js
@@ -5,20 +5,21 @@
// This is a hack to bypass invalid database name checking by the DB constructor
invalidDB._name = "Invalid DB Name";
- var doWrite = function() { return invalidDB.coll.insert({x: 1}); };
+ var doWrite = function() {
+ return invalidDB.coll.insert({x: 1});
+ };
// This will return a $err style error message if we use OP_INSERT, but a
// {ok: 0, errmsg: "...", code: ...} style response if we use write commands.
if (db.getMongo().writeMode() == "compatibility") {
assert.throws(doWrite);
- }
- else {
+ } else {
assert.writeError(doWrite());
}
// Ensure that no database was created
- var dbList = db.getSiblingDB('admin').runCommand({ listDatabases : 1 }).databases;
- dbList.forEach(function (dbInfo) {
+ var dbList = db.getSiblingDB('admin').runCommand({listDatabases: 1}).databases;
+ dbList.forEach(function(dbInfo) {
assert.neq('Invalid DB Name', dbInfo.name, 'database with invalid name was created');
});
}());
diff --git a/jstests/core/ismaster.js b/jstests/core/ismaster.js
index cae8c848044..d920e20383b 100644
--- a/jstests/core/ismaster.js
+++ b/jstests/core/ismaster.js
@@ -1,23 +1,32 @@
var res = db.isMaster();
// check that the fields that should be there are there and have proper values
-assert( res.maxBsonObjectSize &&
- isNumber(res.maxBsonObjectSize) &&
- res.maxBsonObjectSize > 0, "maxBsonObjectSize possibly missing:" + tojson(res));
-assert( res.maxMessageSizeBytes &&
- isNumber(res.maxMessageSizeBytes) &&
- res.maxBsonObjectSize > 0, "maxMessageSizeBytes possibly missing:" + tojson(res));
-assert( res.maxWriteBatchSize &&
- isNumber(res.maxWriteBatchSize) &&
- res.maxWriteBatchSize > 0, "maxWriteBatchSize possibly missing:" + tojson(res));
+assert(res.maxBsonObjectSize && isNumber(res.maxBsonObjectSize) && res.maxBsonObjectSize > 0,
+ "maxBsonObjectSize possibly missing:" + tojson(res));
+assert(res.maxMessageSizeBytes && isNumber(res.maxMessageSizeBytes) && res.maxBsonObjectSize > 0,
+ "maxMessageSizeBytes possibly missing:" + tojson(res));
+assert(res.maxWriteBatchSize && isNumber(res.maxWriteBatchSize) && res.maxWriteBatchSize > 0,
+ "maxWriteBatchSize possibly missing:" + tojson(res));
assert(res.ismaster, "ismaster missing or false:" + tojson(res));
assert(res.localTime, "localTime possibly missing:" + tojson(res));
-
if (!testingReplication) {
var badFields = [];
- var unwantedReplSetFields = ["setName", "setVersion", "secondary", "hosts", "passives",
- "arbiters", "primary", "aribterOnly", "passive",
- "slaveDelay", "hidden", "tags", "buildIndexes", "me"];
+ var unwantedReplSetFields = [
+ "setName",
+ "setVersion",
+ "secondary",
+ "hosts",
+ "passives",
+ "arbiters",
+ "primary",
+ "aribterOnly",
+ "passive",
+ "slaveDelay",
+ "hidden",
+ "tags",
+ "buildIndexes",
+ "me"
+ ];
// check that the fields that shouldn't be there are not there
for (field in res) {
if (!res.hasOwnProperty(field)) {
@@ -27,6 +36,6 @@ if (!testingReplication) {
badFields.push(field);
}
}
- assert(badFields.length === 0, "\nthe result:\n" + tojson(res)
- + "\ncontained fields it shouldn't have: " + badFields);
+ assert(badFields.length === 0,
+ "\nthe result:\n" + tojson(res) + "\ncontained fields it shouldn't have: " + badFields);
}
diff --git a/jstests/core/js1.js b/jstests/core/js1.js
index 240d9f82fbb..89910f4bd23 100644
--- a/jstests/core/js1.js
+++ b/jstests/core/js1.js
@@ -1,12 +1,22 @@
t = db.jstests_js1;
-t.remove( {} );
+t.remove({});
-t.save( { z : 1 } );
-t.save( { z : 2 } );
-assert( 2 == t.find().length() );
-assert( 2 == t.find( { $where : function(){ return 1; } } ).length() );
-assert( 1 == t.find( { $where : function(){ return obj.z == 2; } } ).length() );
+t.save({z: 1});
+t.save({z: 2});
+assert(2 == t.find().length());
+assert(2 ==
+ t.find({
+ $where: function() {
+ return 1;
+ }
+ }).length());
+assert(1 ==
+ t.find({
+ $where: function() {
+ return obj.z == 2;
+ }
+ }).length());
assert(t.validate().valid);
diff --git a/jstests/core/js2.js b/jstests/core/js2.js
index 54c919ac1ba..9dfb5c0b091 100644
--- a/jstests/core/js2.js
+++ b/jstests/core/js2.js
@@ -1,23 +1,23 @@
t = db.jstests_js2;
-t.remove( {} );
+t.remove({});
t2 = db.jstests_js2_2;
-t2.remove( {} );
+t2.remove({});
-assert.eq( 0 , t2.find().length() , "A" );
+assert.eq(0, t2.find().length(), "A");
-t.save( { z : 1 } );
-t.save( { z : 2 } );
-assert.throws( function(){
- t.find( { $where :
- function(){
- db.jstests_js2_2.save( { y : 1 } );
- return 1;
- }
- } ).forEach( printjson );
-} , null , "can't save from $where" );
+t.save({z: 1});
+t.save({z: 2});
+assert.throws(function() {
+ t.find({
+ $where: function() {
+ db.jstests_js2_2.save({y: 1});
+ return 1;
+ }
+ }).forEach(printjson);
+}, null, "can't save from $where");
-assert.eq( 0 , t2.find().length() , "B" );
+assert.eq(0, t2.find().length(), "B");
-assert(t.validate().valid , "E");
+assert(t.validate().valid, "E");
diff --git a/jstests/core/js3.js b/jstests/core/js3.js
index 97ed0bfb834..36d16051135 100644
--- a/jstests/core/js3.js
+++ b/jstests/core/js3.js
@@ -1,76 +1,88 @@
t = db.jstests_js3;
-debug = function( s ){
- //printjson( s );
+debug = function(s) {
+ // printjson( s );
};
-for( z = 0; z < 2; z++ ) {
+for (z = 0; z < 2; z++) {
debug(z);
-
+
t.drop();
-
- if( z > 0 ) {
- t.ensureIndex({_id:1});
- t.ensureIndex({i:1});
+
+ if (z > 0) {
+ t.ensureIndex({_id: 1});
+ t.ensureIndex({i: 1});
}
-
- for( i = 0; i < 1000; i++ )
- t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
-
- assert( 33 == db.dbEval(function() { return 33; } ) );
-
- db.dbEval( function() { db.jstests_js3.save({i:-1, z:"server side"}); } );
-
- assert( t.findOne({i:-1}) );
-
- assert( 2 == t.find( { $where :
- function(){
- return obj.i == 7 || obj.i == 8;
- }
- } ).length() );
-
-
+
+ for (i = 0; i < 1000; i++)
+ t.save({
+ i: i,
+ z:
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ });
+
+ assert(33 ==
+ db.dbEval(function() {
+ return 33;
+ }));
+
+ db.dbEval(function() {
+ db.jstests_js3.save({i: -1, z: "server side"});
+ });
+
+ assert(t.findOne({i: -1}));
+
+ assert(2 ==
+ t.find({
+ $where: function() {
+ return obj.i == 7 || obj.i == 8;
+ }
+ }).length());
+
// NPE test
var ok = false;
try {
- var x = t.find( { $where :
- function(){
- asdf.asdf.f.s.s();
- }
- } );
- debug( x.length() );
- debug( tojson( x ) );
- }
- catch(e) {
- ok = true;
+ var x = t.find({
+ $where: function() {
+ asdf.asdf.f.s.s();
+ }
+ });
+ debug(x.length());
+ debug(tojson(x));
+ } catch (e) {
+ ok = true;
}
- debug( ok );
+ debug(ok);
assert(ok);
-
- t.ensureIndex({z:1});
- t.ensureIndex({q:1});
-
- debug( "before indexed find" );
-
- arr = t.find( { $where :
- function(){
- return obj.i == 7 || obj.i == 8;
- }
- } ).toArray();
- debug( arr );
- assert.eq( 2, arr.length );
-
- debug( "after indexed find" );
-
- for( i = 1000; i < 2000; i++ )
- t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
-
- assert( t.find().count() == 2001 );
-
- assert( t.validate().valid );
-
- debug( "done iter" );
+
+ t.ensureIndex({z: 1});
+ t.ensureIndex({q: 1});
+
+ debug("before indexed find");
+
+ arr = t.find({
+ $where: function() {
+ return obj.i == 7 || obj.i == 8;
+ }
+ }).toArray();
+ debug(arr);
+ assert.eq(2, arr.length);
+
+ debug("after indexed find");
+
+ for (i = 1000; i < 2000; i++)
+ t.save({
+ i: i,
+ z:
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ });
+
+ assert(t.find().count() == 2001);
+
+ assert(t.validate().valid);
+
+ debug("done iter");
}
t.drop(); \ No newline at end of file
diff --git a/jstests/core/js4.js b/jstests/core/js4.js
index 3afa4720dd6..51a85182866 100644
--- a/jstests/core/js4.js
+++ b/jstests/core/js4.js
@@ -1,49 +1,50 @@
t = db.jstests_js4;
t.drop();
-real = { a : 1 ,
- b : "abc" ,
- c : /abc/i ,
- d : new Date(111911100111) ,
- e : null ,
- f : true
- };
-
-t.save( real );
-
-assert.eq( "/abc/i" , real.c.toString() , "regex 1" );
-
-var cursor = t.find( { $where :
- function(){
- fullObject;
- assert.eq( 7 , Object.keySet( obj ).length , "A" );
- assert.eq( 1 , obj.a , "B" );
- assert.eq( "abc" , obj.b , "C" );
- assert.eq( "/abc/i" , obj.c.toString() , "D" );
- assert.eq( 111911100111 , obj.d.getTime() , "E" );
- assert( obj.f , "F" );
- assert( ! obj.e , "G" );
-
- return true;
- }
- } );
-assert.eq( 1 , cursor.toArray().length );
-assert.eq( "abc" , cursor[0].b );
+real = {
+ a: 1,
+ b: "abc",
+ c: /abc/i,
+ d: new Date(111911100111),
+ e: null,
+ f: true
+};
+
+t.save(real);
+
+assert.eq("/abc/i", real.c.toString(), "regex 1");
+
+var cursor = t.find({
+ $where: function() {
+ fullObject;
+ assert.eq(7, Object.keySet(obj).length, "A");
+ assert.eq(1, obj.a, "B");
+ assert.eq("abc", obj.b, "C");
+ assert.eq("/abc/i", obj.c.toString(), "D");
+ assert.eq(111911100111, obj.d.getTime(), "E");
+ assert(obj.f, "F");
+ assert(!obj.e, "G");
+
+ return true;
+ }
+});
+assert.eq(1, cursor.toArray().length);
+assert.eq("abc", cursor[0].b);
// ---
t.drop();
-t.save( { a : 2 , b : { c : 7 , d : "d is good" } } );
-var cursor = t.find( { $where :
- function(){
- fullObject;
- assert.eq( 3 , Object.keySet( obj ).length );
- assert.eq( 2 , obj.a );
- assert.eq( 7 , obj.b.c );
- assert.eq( "d is good" , obj.b.d );
- return true;
- }
- } );
-assert.eq( 1 , cursor.toArray().length );
+t.save({a: 2, b: {c: 7, d: "d is good"}});
+var cursor = t.find({
+ $where: function() {
+ fullObject;
+ assert.eq(3, Object.keySet(obj).length);
+ assert.eq(2, obj.a);
+ assert.eq(7, obj.b.c);
+ assert.eq("d is good", obj.b.d);
+ return true;
+ }
+});
+assert.eq(1, cursor.toArray().length);
assert(t.validate().valid);
diff --git a/jstests/core/js5.js b/jstests/core/js5.js
index 8fa45a0afe3..c02b451de42 100644
--- a/jstests/core/js5.js
+++ b/jstests/core/js5.js
@@ -2,9 +2,9 @@
t = db.jstests_js5;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 2 } );
+t.save({a: 1});
+t.save({a: 2});
-assert.eq( 2 , t.find( { "$where" : "this.a" } ).count() , "A" );
-assert.eq( 0 , t.find( { "$where" : "this.b" } ).count() , "B" );
-assert.eq( 0 , t.find( { "$where" : "this.b > 45" } ).count() , "C" );
+assert.eq(2, t.find({"$where": "this.a"}).count(), "A");
+assert.eq(0, t.find({"$where": "this.b"}).count(), "B");
+assert.eq(0, t.find({"$where": "this.b > 45"}).count(), "C");
diff --git a/jstests/core/js7.js b/jstests/core/js7.js
index d12e207379e..aeaec66ff47 100644
--- a/jstests/core/js7.js
+++ b/jstests/core/js7.js
@@ -1,5 +1,7 @@
t = db.jstests_js7;
t.drop();
-assert.eq( 17 , db.eval( function( foo ){ return foo; } , 17 ) );
-
+assert.eq(17,
+ db.eval(function(foo) {
+ return foo;
+ }, 17));
diff --git a/jstests/core/js8.js b/jstests/core/js8.js
index da2dcc619cd..15b7ff7d7af 100644
--- a/jstests/core/js8.js
+++ b/jstests/core/js8.js
@@ -1,14 +1,35 @@
t = db.jstests_js8;
t.drop();
-t.save( { a : 1 , b : [ 2 , 3 , 4 ] } );
-
-assert.eq( 1 , t.find().length() , "A" );
-assert.eq( 1 , t.find( function(){ return this.a == 1; } ).length() , "B" );
-assert.eq( 1 , t.find( function(){ if ( ! this.b.length ) return true; return this.b.length == 3; } ).length() , "B2" );
-assert.eq( 1 , t.find( function(){ return this.b[0] == 2; } ).length() , "C" );
-assert.eq( 0 , t.find( function(){ return this.b[0] == 3; } ).length() , "D" );
-assert.eq( 1 , t.find( function(){ return this.b[1] == 3; } ).length() , "E" );
+t.save({a: 1, b: [2, 3, 4]});
+assert.eq(1, t.find().length(), "A");
+assert.eq(1,
+ t.find(function() {
+ return this.a == 1;
+ }).length(),
+ "B");
+assert.eq(1,
+ t.find(function() {
+ if (!this.b.length)
+ return true;
+ return this.b.length == 3;
+ }).length(),
+ "B2");
+assert.eq(1,
+ t.find(function() {
+ return this.b[0] == 2;
+ }).length(),
+ "C");
+assert.eq(0,
+ t.find(function() {
+ return this.b[0] == 3;
+ }).length(),
+ "D");
+assert.eq(1,
+ t.find(function() {
+ return this.b[1] == 3;
+ }).length(),
+ "E");
assert(t.validate().valid);
diff --git a/jstests/core/js9.js b/jstests/core/js9.js
index 286adb9a1a4..b29a31afdc4 100644
--- a/jstests/core/js9.js
+++ b/jstests/core/js9.js
@@ -1,24 +1,17 @@
c = db.jstests_js9;
c.drop();
-c.save( { a : 1 } );
-c.save( { a : 2 } );
+c.save({a: 1});
+c.save({a: 2});
+assert.eq(2, c.find().length());
+assert.eq(2, c.find().count());
-assert.eq( 2 , c.find().length() );
-assert.eq( 2 , c.find().count() );
-
-
-assert.eq( 2 ,
- db.eval(
- function(){
- num = 0;
- db.jstests_js9.find().forEach(
- function(z){
- num++;
- }
- );
- return num;
- }
- )
- );
+assert.eq(2,
+ db.eval(function() {
+ num = 0;
+ db.jstests_js9.find().forEach(function(z) {
+ num++;
+ });
+ return num;
+ }));
diff --git a/jstests/core/json1.js b/jstests/core/json1.js
index d4bb8435086..d502a683e06 100644
--- a/jstests/core/json1.js
+++ b/jstests/core/json1.js
@@ -1,28 +1,57 @@
-x = { quotes:"a\"b" , nulls:null };
-eval( "y = " + tojson( x ) );
-assert.eq( tojson( x ) , tojson( y ) , "A" );
-assert.eq( typeof( x.nulls ) , typeof( y.nulls ) , "B" );
+x = {
+ quotes: "a\"b",
+ nulls: null
+};
+eval("y = " + tojson(x));
+assert.eq(tojson(x), tojson(y), "A");
+assert.eq(typeof(x.nulls), typeof(y.nulls), "B");
// each type is parsed properly
-x = {"x" : null, "y" : true, "z" : 123, "w" : "foo", "a": undefined};
-assert.eq(tojson(x,"",false), '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo",\n\t"a" : undefined\n}' , "C" );
+x = {
+ "x": null,
+ "y": true,
+ "z": 123,
+ "w": "foo",
+ "a": undefined
+};
+assert.eq(tojson(x, "", false),
+ '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo",\n\t"a" : undefined\n}',
+ "C");
-x = {"x" : [], "y" : {}};
-assert.eq(tojson(x,"",false), '{\n\t"x" : [ ],\n\t"y" : {\n\t\t\n\t}\n}' , "D" );
+x = {
+ "x": [],
+ "y": {}
+};
+assert.eq(tojson(x, "", false), '{\n\t"x" : [ ],\n\t"y" : {\n\t\t\n\t}\n}', "D");
// nested
-x = {"x" : [{"x" : [1,2,[]], "z" : "ok", "y" : [[]]}, {"foo" : "bar"}], "y" : null};
-assert.eq(tojson(x), '{\n\t"x" : [\n\t\t{\n\t\t\t"x" : [\n\t\t\t\t1,\n\t\t\t\t2,\n\t\t\t\t[ ]\n\t\t\t],\n\t\t\t"z" : "ok",\n\t\t\t"y" : [\n\t\t\t\t[ ]\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\t"foo" : "bar"\n\t\t}\n\t],\n\t"y" : null\n}' , "E" );
+x = {
+ "x": [{"x": [1, 2, []], "z": "ok", "y": [[]]}, {"foo": "bar"}],
+ "y": null
+};
+assert.eq(
+ tojson(x),
+ '{\n\t"x" : [\n\t\t{\n\t\t\t"x" : [\n\t\t\t\t1,\n\t\t\t\t2,\n\t\t\t\t[ ]\n\t\t\t],\n\t\t\t"z" : "ok",\n\t\t\t"y" : [\n\t\t\t\t[ ]\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\t"foo" : "bar"\n\t\t}\n\t],\n\t"y" : null\n}',
+ "E");
// special types
-x = {"x" : ObjectId("4ad35a73d2e34eb4fc43579a"), 'z' : /xd?/ig};
-assert.eq(tojson(x,"",false), '{\n\t"x" : ObjectId("4ad35a73d2e34eb4fc43579a"),\n\t"z" : /xd?/gi\n}' , "F" );
+x = {
+ "x": ObjectId("4ad35a73d2e34eb4fc43579a"),
+ 'z': /xd?/ig
+};
+assert.eq(tojson(x, "", false),
+ '{\n\t"x" : ObjectId("4ad35a73d2e34eb4fc43579a"),\n\t"z" : /xd?/gi\n}',
+ "F");
// Timestamp type
-x = {"x" : Timestamp()};
-assert.eq(tojson(x,"",false), '{\n\t"x" : Timestamp(0, 0)\n}' , "G");
+x = {
+ "x": Timestamp()
+};
+assert.eq(tojson(x, "", false), '{\n\t"x" : Timestamp(0, 0)\n}', "G");
// Timestamp type, second
-x = {"x" : Timestamp(10,2)};
-assert.eq(tojson(x,"",false), '{\n\t"x" : Timestamp(10, 2)\n}' , "H");
+x = {
+ "x": Timestamp(10, 2)
+};
+assert.eq(tojson(x, "", false), '{\n\t"x" : Timestamp(10, 2)\n}', "H");
diff --git a/jstests/core/kill_cursors.js b/jstests/core/kill_cursors.js
index 60dc5e2d3e4..0433e29609b 100644
--- a/jstests/core/kill_cursors.js
+++ b/jstests/core/kill_cursors.js
@@ -14,31 +14,22 @@
}
// killCursors command should fail if the collection name is not a string.
- cmdRes = db.runCommand({
- killCursors: {foo: "bad collection param"},
- cursors: [NumberLong(123), NumberLong(456)]
- });
+ cmdRes = db.runCommand(
+ {killCursors: {foo: "bad collection param"}, cursors: [NumberLong(123), NumberLong(456)]});
assert.commandFailedWithCode(cmdRes, ErrorCodes.FailedToParse);
// killCursors command should fail if the cursors parameter is not an array.
- cmdRes = db.runCommand({
- killCursors: coll.getName(),
- cursors: {a: NumberLong(123), b: NumberLong(456)}
- });
+ cmdRes = db.runCommand(
+ {killCursors: coll.getName(), cursors: {a: NumberLong(123), b: NumberLong(456)}});
assert.commandFailedWithCode(cmdRes, ErrorCodes.FailedToParse);
// killCursors command should fail if the cursors parameter is an empty array.
- cmdRes = db.runCommand({
- killCursors: coll.getName(),
- cursors: []
- });
+ cmdRes = db.runCommand({killCursors: coll.getName(), cursors: []});
assert.commandFailedWithCode(cmdRes, ErrorCodes.BadValue);
// killCursors command should report cursors as not found if the collection does not exist.
- cmdRes = db.runCommand({
- killCursors: "non-existent-collection",
- cursors: [NumberLong(123), NumberLong(456)]
- });
+ cmdRes = db.runCommand(
+ {killCursors: "non-existent-collection", cursors: [NumberLong(123), NumberLong(456)]});
assert.commandWorked(cmdRes);
assert.eq(cmdRes.cursorsKilled, []);
assert.eq(cmdRes.cursorsNotFound, [NumberLong(123), NumberLong(456)]);
@@ -46,10 +37,8 @@
assert.eq(cmdRes.cursorsUnknown, []);
// killCursors command should report non-existent cursors as "not found".
- cmdRes = db.runCommand({
- killCursors: coll.getName(),
- cursors: [NumberLong(123), NumberLong(456)]
- });
+ cmdRes =
+ db.runCommand({killCursors: coll.getName(), cursors: [NumberLong(123), NumberLong(456)]});
assert.commandWorked(cmdRes);
assert.eq(cmdRes.cursorsKilled, []);
assert.eq(cmdRes.cursorsNotFound, [NumberLong(123), NumberLong(456)]);
@@ -62,10 +51,7 @@
cursorId = cmdRes.cursor.id;
assert.neq(cursorId, NumberLong(0));
- cmdRes = db.runCommand({
- killCursors: coll.getName(),
- cursors: [NumberLong(123), cursorId]
- });
+ cmdRes = db.runCommand({killCursors: coll.getName(), cursors: [NumberLong(123), cursorId]});
assert.commandWorked(cmdRes);
assert.eq(cmdRes.cursorsKilled, [cursorId]);
assert.eq(cmdRes.cursorsNotFound, [NumberLong(123)]);
@@ -78,10 +64,7 @@
cursorId = cmdRes.cursor.id;
assert.neq(cursorId, NumberLong(0));
- cmdRes = db.runCommand({
- killCursors: coll.getName(),
- cursors: [NumberLong(123), cursorId]
- });
+ cmdRes = db.runCommand({killCursors: coll.getName(), cursors: [NumberLong(123), cursorId]});
assert.commandWorked(cmdRes);
assert.eq(cmdRes.cursorsKilled, [cursorId]);
assert.eq(cmdRes.cursorsNotFound, [NumberLong(123)]);
@@ -95,10 +78,8 @@
var cleanup;
try {
// Enable a failpoint to ensure that the cursor remains pinned.
- assert.commandWorked(db.adminCommand({
- configureFailPoint: failpointName,
- mode: "alwaysOn"
- }));
+ assert.commandWorked(
+ db.adminCommand({configureFailPoint: failpointName, mode: "alwaysOn"}));
cmdRes = db.runCommand({find: coll.getName(), batchSize: 2});
assert.commandWorked(cmdRes);
@@ -110,8 +91,8 @@
var isMongos = (cmdRes.msg === "isdbgrid");
// Pin the cursor during a getMore.
- var code = 'db.runCommand({getMore: ' + cursorId.toString() +
- ', collection: "' + coll.getName() + '"});';
+ var code = 'db.runCommand({getMore: ' + cursorId.toString() + ', collection: "' +
+ coll.getName() + '"});';
cleanup = startParallelShell(code);
// Sleep to make it more likely that the cursor will be pinned.
@@ -122,10 +103,7 @@
//
// Currently, pinned cursors that are targeted by a killCursors operation are kept alive on
// mongod but are killed on mongos (see SERVER-21710).
- cmdRes = db.runCommand({
- killCursors: coll.getName(),
- cursors: [NumberLong(123), cursorId]
- });
+ cmdRes = db.runCommand({killCursors: coll.getName(), cursors: [NumberLong(123), cursorId]});
assert.commandWorked(cmdRes);
assert.eq(cmdRes.cursorsNotFound, [NumberLong(123)]);
assert.eq(cmdRes.cursorsUnknown, []);
@@ -133,15 +111,13 @@
if (isMongos) {
assert.eq(cmdRes.cursorsKilled, [cursorId]);
assert.eq(cmdRes.cursorsAlive, []);
- }
- else {
+ } else {
// If the cursor has already been pinned it will be left alive; otherwise it will be
// killed.
if (cmdRes.cursorsAlive.length === 1) {
assert.eq(cmdRes.cursorsKilled, []);
assert.eq(cmdRes.cursorsAlive, [cursorId]);
- }
- else {
+ } else {
assert.eq(cmdRes.cursorsKilled, [cursorId]);
assert.eq(cmdRes.cursorsAlive, []);
}
diff --git a/jstests/core/killop.js b/jstests/core/killop.js
index f367d3dec33..66476ec10f4 100644
--- a/jstests/core/killop.js
+++ b/jstests/core/killop.js
@@ -15,7 +15,7 @@
t = db.jstests_killop;
t.drop();
-t.save( {x:1} );
+t.save({x: 1});
/**
* This function filters for the operations that we're looking for, based on their state and
@@ -24,42 +24,53 @@ t.save( {x:1} );
function ops() {
p = db.currentOp().inprog;
ids = [];
- for ( var i in p ) {
- var o = p[ i ];
- // We *can't* check for ns, b/c it's not guaranteed to be there unless the query is active, which
- // it may not be in our polling cycle - particularly b/c we sleep every second in both the query and
+ for (var i in p) {
+ var o = p[i];
+ // We *can't* check for ns, b/c it's not guaranteed to be there unless the query is active,
+ // which
+ // it may not be in our polling cycle - particularly b/c we sleep every second in both the
+ // query and
// the assert
- if ( ( o.active || o.waitingForLock ) && o.query && o.query.query && o.query.query.$where && o.query.count == "jstests_killop" ) {
- ids.push( o.opid );
+ if ((o.active || o.waitingForLock) && o.query && o.query.query && o.query.query.$where &&
+ o.query.count == "jstests_killop") {
+ ids.push(o.opid);
}
}
return ids;
}
jsTestLog("Starting long-running $where operation");
-var s1 = startParallelShell(
- "db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )" );
-var s2 = startParallelShell(
- "db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )" );
+var s1 =
+ startParallelShell("db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )");
+var s2 =
+ startParallelShell("db.jstests_killop.count( { $where: function() { while( 1 ) { ; } } } )");
jsTestLog("Finding ops in currentOp() output");
o = [];
-assert.soon(function() { o = ops(); return o.length == 2; },
- { toString: function () { return tojson(db.currentOp().inprog); } },
- 10000);
+assert.soon(
+ function() {
+ o = ops();
+ return o.length == 2;
+ },
+ {
+ toString: function() {
+ return tojson(db.currentOp().inprog);
+ }
+ },
+ 10000);
start = new Date();
jsTestLog("Killing ops");
-db.killOp( o[ 0 ] );
-db.killOp( o[ 1 ] );
+db.killOp(o[0]);
+db.killOp(o[1]);
jsTestLog("Waiting for ops to terminate");
[s1, s2].forEach(function(awaitShell) {
var exitCode = awaitShell({checkExitSuccess: false});
- assert.neq(0, exitCode,
- "expected shell to exit abnormally due to JS execution being terminated");
+ assert.neq(
+ 0, exitCode, "expected shell to exit abnormally due to JS execution being terminated");
});
// don't want to pass if timeout killed the js function.
var end = new Date();
var diff = end - start;
-assert.lt( diff, 30000, "Start: " + start + "; end: " + end + "; diff: " + diff); \ No newline at end of file
+assert.lt(diff, 30000, "Start: " + start + "; end: " + end + "; diff: " + diff); \ No newline at end of file
diff --git a/jstests/core/list_collections1.js b/jstests/core/list_collections1.js
index aaf47b50099..04acb82290b 100644
--- a/jstests/core/list_collections1.js
+++ b/jstests/core/list_collections1.js
@@ -6,7 +6,7 @@
// listCollections output, but rather tests for existence or absence of particular collections in
// listCollections output.
-(function () {
+(function() {
"use strict";
var mydb = db.getSiblingDB("list_collections1");
@@ -25,7 +25,9 @@
assert.eq('object', typeof(res.cursor));
assert.eq(0, res.cursor.id);
assert.eq('string', typeof(res.cursor.ns));
- collObj = res.cursor.firstBatch.filter(function(c) { return c.name === "foo"; })[0];
+ collObj = res.cursor.firstBatch.filter(function(c) {
+ return c.name === "foo";
+ })[0];
assert(collObj);
assert.eq('object', typeof(collObj.options));
@@ -34,9 +36,8 @@
//
var getListCollectionsCursor = function(options, subsequentBatchSize) {
- return new DBCommandCursor(mydb.getMongo(),
- mydb.runCommand("listCollections", options),
- subsequentBatchSize);
+ return new DBCommandCursor(
+ mydb.getMongo(), mydb.runCommand("listCollections", options), subsequentBatchSize);
};
var cursorCountMatching = function(cursor, pred) {
@@ -45,8 +46,11 @@
assert.commandWorked(mydb.dropDatabase());
assert.commandWorked(mydb.createCollection("foo"));
- assert.eq(1, cursorCountMatching(getListCollectionsCursor(),
- function(c) { return c.name === "foo"; }));
+ assert.eq(1,
+ cursorCountMatching(getListCollectionsCursor(),
+ function(c) {
+ return c.name === "foo";
+ }));
//
// Test that the collection metadata object is returned correctly.
@@ -55,12 +59,16 @@
assert.commandWorked(mydb.dropDatabase());
assert.commandWorked(mydb.createCollection("foo"));
assert.commandWorked(mydb.createCollection("bar", {temp: true}));
- assert.eq(1, cursorCountMatching(getListCollectionsCursor(),
- function(c) { return c.name === "foo" &&
- c.options.temp === undefined; }));
- assert.eq(1, cursorCountMatching(getListCollectionsCursor(),
- function(c) { return c.name === "bar" &&
- c.options.temp === true; }));
+ assert.eq(1,
+ cursorCountMatching(getListCollectionsCursor(),
+ function(c) {
+ return c.name === "foo" && c.options.temp === undefined;
+ }));
+ assert.eq(1,
+ cursorCountMatching(getListCollectionsCursor(),
+ function(c) {
+ return c.name === "bar" && c.options.temp === true;
+ }));
//
// Test basic usage of "filter" option.
@@ -69,23 +77,29 @@
assert.commandWorked(mydb.dropDatabase());
assert.commandWorked(mydb.createCollection("foo"));
assert.commandWorked(mydb.createCollection("bar", {temp: true}));
- assert.eq(2, cursorCountMatching(getListCollectionsCursor({filter: {}}),
- function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(getListCollectionsCursor({filter: {}}),
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
assert.eq(2, getListCollectionsCursor({filter: {name: {$in: ["foo", "bar"]}}}).itcount());
assert.eq(1, getListCollectionsCursor({filter: {name: /^foo$/}}).itcount());
assert.eq(1, getListCollectionsCursor({filter: {"options.temp": true}}).itcount());
mydb.foo.drop();
- assert.eq(1, cursorCountMatching(getListCollectionsCursor({filter: {}}),
- function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(1,
+ cursorCountMatching(getListCollectionsCursor({filter: {}}),
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
assert.eq(1, getListCollectionsCursor({filter: {name: {$in: ["foo", "bar"]}}}).itcount());
assert.eq(0, getListCollectionsCursor({filter: {name: /^foo$/}}).itcount());
assert.eq(1, getListCollectionsCursor({filter: {"options.temp": true}}).itcount());
mydb.bar.drop();
- assert.eq(0, cursorCountMatching(getListCollectionsCursor({filter: {}}),
- function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(0,
+ cursorCountMatching(getListCollectionsCursor({filter: {}}),
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
assert.eq(0, getListCollectionsCursor({filter: {name: {$in: ["foo", "bar"]}}}).itcount());
assert.eq(0, getListCollectionsCursor({filter: {name: /^foo$/}}).itcount());
assert.eq(0, getListCollectionsCursor({filter: {"options.temp": true}}).itcount());
@@ -94,10 +108,18 @@
// Test for invalid values of "filter".
//
- assert.throws(function() { getListCollectionsCursor({filter: {$invalid: 1}}); });
- assert.throws(function() { getListCollectionsCursor({filter: 0}); });
- assert.throws(function() { getListCollectionsCursor({filter: 'x'}); });
- assert.throws(function() { getListCollectionsCursor({filter: []}); });
+ assert.throws(function() {
+ getListCollectionsCursor({filter: {$invalid: 1}});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({filter: 0});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({filter: 'x'});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({filter: []});
+ });
//
// Test basic usage of "cursor.batchSize" option.
@@ -108,32 +130,50 @@
assert.commandWorked(mydb.createCollection("bar"));
cursor = getListCollectionsCursor({cursor: {batchSize: 2}});
assert.eq(2, cursor.objsLeftInBatch());
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
cursor = getListCollectionsCursor({cursor: {batchSize: 1}});
assert.eq(1, cursor.objsLeftInBatch());
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
cursor = getListCollectionsCursor({cursor: {batchSize: 0}});
assert.eq(0, cursor.objsLeftInBatch());
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
cursor = getListCollectionsCursor({cursor: {batchSize: NumberInt(2)}});
assert.eq(2, cursor.objsLeftInBatch());
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
cursor = getListCollectionsCursor({cursor: {batchSize: NumberLong(2)}});
assert.eq(2, cursor.objsLeftInBatch());
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
// Test a large batch size, and assert that at least 2 results are returned in the initial
// batch.
cursor = getListCollectionsCursor({cursor: {batchSize: Math.pow(2, 62)}});
assert.lte(2, cursor.objsLeftInBatch());
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
// Ensure that the server accepts an empty object for "cursor". This is equivalent to not
// specifying "cursor" at all.
@@ -141,21 +181,40 @@
// We do not test for objsLeftInBatch() here, since the default batch size for this command
// is not specified.
cursor = getListCollectionsCursor({cursor: {}});
- assert.eq(2, cursorCountMatching(cursor, function(c) { return c.name === "foo" ||
- c.name === "bar"; }));
+ assert.eq(2,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo" || c.name === "bar";
+ }));
//
// Test for invalid values of "cursor" and "cursor.batchSize".
//
- assert.throws(function() { getListCollectionsCursor({cursor: 0}); });
- assert.throws(function() { getListCollectionsCursor({cursor: 'x'}); });
- assert.throws(function() { getListCollectionsCursor({cursor: []}); });
- assert.throws(function() { getListCollectionsCursor({cursor: {foo: 1}}); });
- assert.throws(function() { getListCollectionsCursor({cursor: {batchSize: -1}}); });
- assert.throws(function() { getListCollectionsCursor({cursor: {batchSize: 'x'}}); });
- assert.throws(function() { getListCollectionsCursor({cursor: {batchSize: {}}}); });
- assert.throws(function() { getListCollectionsCursor({cursor: {batchSize: 2, foo: 1}}); });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: 0});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: 'x'});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: []});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: {foo: 1}});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: {batchSize: -1}});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: {batchSize: 'x'}});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: {batchSize: {}}});
+ });
+ assert.throws(function() {
+ getListCollectionsCursor({cursor: {batchSize: 2, foo: 1}});
+ });
//
// Test more than 2 batches of results.
@@ -186,7 +245,11 @@
assert.commandWorked(mydb.dropDatabase());
cursor = getListCollectionsCursor();
- assert.eq(0, cursorCountMatching(cursor, function(c) { return c.name === "foo"; }));
+ assert.eq(0,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo";
+ }));
//
// Test on empty database.
@@ -196,7 +259,11 @@
assert.commandWorked(mydb.createCollection("foo"));
mydb.foo.drop();
cursor = getListCollectionsCursor();
- assert.eq(0, cursorCountMatching(cursor, function(c) { return c.name === "foo"; }));
+ assert.eq(0,
+ cursorCountMatching(cursor,
+ function(c) {
+ return c.name === "foo";
+ }));
//
// Test killCursors against a listCollections cursor.
@@ -211,7 +278,9 @@
res = mydb.runCommand("listCollections", {cursor: {batchSize: 0}});
cursor = new DBCommandCursor(mydb.getMongo(), res, 2);
cursor = null;
- gc(); // Shell will send a killCursors message when cleaning up underlying cursor.
+ gc(); // Shell will send a killCursors message when cleaning up underlying cursor.
cursor = new DBCommandCursor(mydb.getMongo(), res, 2);
- assert.throws(function() { cursor.hasNext(); });
+ assert.throws(function() {
+ cursor.hasNext();
+ });
}());
diff --git a/jstests/core/list_collections_filter.js b/jstests/core/list_collections_filter.js
index 39dd6da235c..4b5c42bbc78 100644
--- a/jstests/core/list_collections_filter.js
+++ b/jstests/core/list_collections_filter.js
@@ -67,16 +67,10 @@
// Filter with $and and $in.
testListCollections({name: {$in: ["lists", /.*_sets$/]}, options: {}},
["lists", "ordered_sets", "unordered_sets"]);
- testListCollections({$and: [
- {name: {$in: ["lists", /.*_sets$/]}},
- {name: "lists"},
- {options: {}},
- ]},
- ["lists"]);
- testListCollections({$and: [
- {name: {$in: ["lists", /.*_sets$/]}},
- {name: "non-existent"},
- {options: {}},
- ]},
- []);
+ testListCollections(
+ {$and: [{name: {$in: ["lists", /.*_sets$/]}}, {name: "lists"}, {options: {}}, ]},
+ ["lists"]);
+ testListCollections(
+ {$and: [{name: {$in: ["lists", /.*_sets$/]}}, {name: "non-existent"}, {options: {}}, ]},
+ []);
}());
diff --git a/jstests/core/list_indexes.js b/jstests/core/list_indexes.js
index db3f895bc20..520406be59f 100644
--- a/jstests/core/list_indexes.js
+++ b/jstests/core/list_indexes.js
@@ -1,6 +1,6 @@
// Basic functional tests for the listIndexes command.
-(function () {
+(function() {
"use strict";
var coll = db.list_indexes1;
@@ -27,17 +27,20 @@
//
var getListIndexesCursor = function(coll, options, subsequentBatchSize) {
- return new DBCommandCursor(coll.getDB().getMongo(),
- coll.runCommand("listIndexes", options),
- subsequentBatchSize);
+ return new DBCommandCursor(
+ coll.getDB().getMongo(), coll.runCommand("listIndexes", options), subsequentBatchSize);
};
var cursorGetIndexSpecs = function(cursor) {
- return cursor.toArray().sort(function(a, b) { return a.name > b.name; });
+ return cursor.toArray().sort(function(a, b) {
+ return a.name > b.name;
+ });
};
var cursorGetIndexNames = function(cursor) {
- return cursorGetIndexSpecs(cursor).map(function(spec) { return spec.name; });
+ return cursorGetIndexSpecs(cursor).map(function(spec) {
+ return spec.name;
+ });
};
coll.drop();
@@ -162,7 +165,9 @@
res = coll.runCommand("listIndexes", {cursor: {batchSize: 0}});
cursor = new DBCommandCursor(coll.getDB().getMongo(), res, 2);
cursor = null;
- gc(); // Shell will send a killCursors message when cleaning up underlying cursor.
+ gc(); // Shell will send a killCursors message when cleaning up underlying cursor.
cursor = new DBCommandCursor(coll.getDB().getMongo(), res, 2);
- assert.throws(function() { cursor.hasNext(); });
+ assert.throws(function() {
+ cursor.hasNext();
+ });
}());
diff --git a/jstests/core/list_indexes_invalid.js b/jstests/core/list_indexes_invalid.js
index 5db1077aecb..0e9c5ffa88c 100644
--- a/jstests/core/list_indexes_invalid.js
+++ b/jstests/core/list_indexes_invalid.js
@@ -6,11 +6,27 @@ coll.drop();
assert.commandWorked(coll.getDB().createCollection(coll.getName()));
assert.commandWorked(coll.ensureIndex({a: 1}, {unique: true}));
-assert.throws(function() { getListIndexesCursor(coll, {cursor: 0}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: 'x'}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: []}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: {foo: 1}}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: {batchSize: -1}}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: {batchSize: 'x'}}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: {batchSize: {}}}); });
-assert.throws(function() { getListIndexesCursor(coll, {cursor: {batchSize: 2, foo: 1}}); });
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: 0});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: 'x'});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: []});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: {foo: 1}});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: {batchSize: -1}});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: {batchSize: 'x'}});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: {batchSize: {}}});
+});
+assert.throws(function() {
+ getListIndexesCursor(coll, {cursor: {batchSize: 2, foo: 1}});
+});
diff --git a/jstests/core/loadserverscripts.js b/jstests/core/loadserverscripts.js
index 4288ae973a0..daf87b2475b 100644
--- a/jstests/core/loadserverscripts.js
+++ b/jstests/core/loadserverscripts.js
@@ -15,48 +15,50 @@ x = testdb.system.js.findOne();
assert.isnull(x, "Test for empty collection");
// User functions should not be defined yet
-assert.eq( typeof myfunc, "undefined", "Checking that myfunc() is undefined" );
-assert.eq( typeof myfunc2, "undefined", "Checking that myfunc2() is undefined" );
+assert.eq(typeof myfunc, "undefined", "Checking that myfunc() is undefined");
+assert.eq(typeof myfunc2, "undefined", "Checking that myfunc2() is undefined");
// Insert a function in the context of this process: make sure it's in the collection
-testdb.system.js.insert( { _id: "myfunc", "value": function(){ return "myfunc"; } } );
-testdb.system.js.insert( { _id: "mystring", "value": "var root = this;" } );
-testdb.system.js.insert( { _id: "changeme", "value": false });
+testdb.system.js.insert({
+ _id: "myfunc",
+ "value": function() {
+ return "myfunc";
+ }
+});
+testdb.system.js.insert({_id: "mystring", "value": "var root = this;"});
+testdb.system.js.insert({_id: "changeme", "value": false});
x = testdb.system.js.count();
-assert.eq( x, 3, "Should now be one function in the system.js collection");
+assert.eq(x, 3, "Should now be one function in the system.js collection");
// Set a global variable that will be over-written
var changeme = true;
// Load that function
testdb.loadServerScripts();
-assert.eq( typeof myfunc, "function", "Checking that myfunc() loaded correctly" );
-assert.eq( typeof mystring, "string", "Checking that mystring round-tripped correctly" );
-assert.eq( changeme, false, "Checking that global var was overwritten" );
+assert.eq(typeof myfunc, "function", "Checking that myfunc() loaded correctly");
+assert.eq(typeof mystring, "string", "Checking that mystring round-tripped correctly");
+assert.eq(changeme, false, "Checking that global var was overwritten");
// Make sure it works
x = myfunc();
assert.eq(x, "myfunc", "Checking that myfunc() returns the correct value");
// Insert value into collection from another process
-var coproc = startParallelShell(
- 'db.getSisterDB("loadserverscripts").system.js.insert' +
- ' ( {_id: "myfunc2", "value": function(){ return "myfunc2"; } } );'
- );
+var coproc =
+ startParallelShell('db.getSisterDB("loadserverscripts").system.js.insert' +
+ ' ( {_id: "myfunc2", "value": function(){ return "myfunc2"; } } );');
// wait for results
coproc();
// Make sure the collection's been updated
x = testdb.system.js.count();
-assert.eq( x, 4, "Should now be two functions in the system.js collection");
-
+assert.eq(x, 4, "Should now be two functions in the system.js collection");
// Load the new functions: test them as above
testdb.loadServerScripts();
-assert.eq( typeof myfunc2, "function", "Checking that myfunc2() loaded correctly" );
+assert.eq(typeof myfunc2, "function", "Checking that myfunc2() loaded correctly");
x = myfunc2();
assert.eq(x, "myfunc2", "Checking that myfunc2() returns the correct value");
jsTest.log("completed test of db.loadServerScripts()");
-
diff --git a/jstests/core/loglong.js b/jstests/core/loglong.js
index 0a8889c2b25..0447d915d10 100644
--- a/jstests/core/loglong.js
+++ b/jstests/core/loglong.js
@@ -4,26 +4,28 @@
t = db.loglong;
t.drop();
-t.insert( { x : 1 } );
+t.insert({x: 1});
n = 0;
-query = { x : [] };
-while ( Object.bsonsize( query ) < 30000 ) {
- query.x.push( n++ );
+query = {
+ x: []
+};
+while (Object.bsonsize(query) < 30000) {
+ query.x.push(n++);
}
-before = db.adminCommand( { setParameter : 1 , logLevel : 1 } );
+before = db.adminCommand({setParameter: 1, logLevel: 1});
-t.findOne( query );
+t.findOne(query);
-x = db.adminCommand( { setParameter : 1 , logLevel : before.was } );
-assert.eq( 1 , x.was , tojson( x ) );
+x = db.adminCommand({setParameter: 1, logLevel: before.was});
+assert.eq(1, x.was, tojson(x));
-log = db.adminCommand( { getLog : "global" } ).log;
+log = db.adminCommand({getLog: "global"}).log;
found = false;
-for ( i=log.length - 1; i>= 0; i-- ) {
- if ( log[i].indexOf( "warning: log line attempted (16kB)" ) >= 0 ) {
+for (i = log.length - 1; i >= 0; i--) {
+ if (log[i].indexOf("warning: log line attempted (16kB)") >= 0) {
found = true;
break;
}
diff --git a/jstests/core/logprocessdetails.js b/jstests/core/logprocessdetails.js
index c53655843e1..1ff4fff1112 100644
--- a/jstests/core/logprocessdetails.js
+++ b/jstests/core/logprocessdetails.js
@@ -7,9 +7,9 @@
* Returns true if regex matches a string in the array
*/
doesLogMatchRegex = function(logArray, regex) {
- for (var i = (logArray.length - 1); i >= 0; i--){
+ for (var i = (logArray.length - 1); i >= 0; i--) {
var regexInLine = regex.exec(logArray[i]);
- if (regexInLine != null){
+ if (regexInLine != null) {
return true;
}
}
@@ -17,18 +17,18 @@ doesLogMatchRegex = function(logArray, regex) {
};
doTest = function() {
- var log = db.adminCommand({ getLog: 'global'});
- //this regex will need to change if output changes
+ var log = db.adminCommand({getLog: 'global'});
+ // this regex will need to change if output changes
var re = new RegExp(".*conn.*options.*");
assert.neq(null, log);
var lineCount = log.totalLinesWritten;
assert.neq(0, lineCount);
- var result = db.adminCommand({ logRotate: 1});
+ var result = db.adminCommand({logRotate: 1});
assert.eq(1, result.ok);
- var log2 = db.adminCommand({ getLog: 'global'});
+ var log2 = db.adminCommand({getLog: 'global'});
assert.neq(null, log2);
assert.gte(log2.totalLinesWritten, lineCount);
diff --git a/jstests/core/long_index_rename.js b/jstests/core/long_index_rename.js
index 27517ac8e5a..df3397bbb46 100644
--- a/jstests/core/long_index_rename.js
+++ b/jstests/core/long_index_rename.js
@@ -7,11 +7,15 @@ t = db.long_index_rename;
t.drop();
for (i = 1; i < 10; i++) {
- t.save({a:i});
+ t.save({a: i});
}
-t.createIndex({a:1}, {name: "aaa"});
-var result = t.createIndex({a:1}, {name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
- "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"});
-assert( !result.ok );
-assert( result.errmsg.indexOf( "too long" ) >= 0 );
+t.createIndex({a: 1}, {name: "aaa"});
+var result = t.createIndex(
+ {a: 1},
+ {
+ name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ });
+assert(!result.ok);
+assert(result.errmsg.indexOf("too long") >= 0);
diff --git a/jstests/core/map1.js b/jstests/core/map1.js
index 5697e41f32c..ea2dec5db69 100644
--- a/jstests/core/map1.js
+++ b/jstests/core/map1.js
@@ -1,24 +1,22 @@
-function basic1( key , lookup , shouldFail){
+function basic1(key, lookup, shouldFail) {
var m = new Map();
- m.put( key , 17 );
-
- var out = m.get( lookup || key );
+ m.put(key, 17);
- if ( ! shouldFail ){
- assert.eq( 17 , out , "basic1 missing: " + tojson( key ) );
- }
- else {
- assert.isnull( out , "basic1 not missing: " + tojson( key ) );
- }
+ var out = m.get(lookup || key);
+ if (!shouldFail) {
+ assert.eq(17, out, "basic1 missing: " + tojson(key));
+ } else {
+ assert.isnull(out, "basic1 not missing: " + tojson(key));
+ }
}
-basic1( 6 );
-basic1( new Date() );
-basic1( "eliot" );
-basic1( { a : 1 } );
-basic1( { a : 1 , b : 1 } );
-basic1( { a : 1 } , { b : 1 } , true );
-basic1( { a : 1 , b : 1 } , { b : 1 , a : 1 } , true );
-basic1( { a : 1 } , { a : 2 } , true );
+basic1(6);
+basic1(new Date());
+basic1("eliot");
+basic1({a: 1});
+basic1({a: 1, b: 1});
+basic1({a: 1}, {b: 1}, true);
+basic1({a: 1, b: 1}, {b: 1, a: 1}, true);
+basic1({a: 1}, {a: 2}, true);
diff --git a/jstests/core/max_doc_size.js b/jstests/core/max_doc_size.js
index 509d0b4b2ea..03deeafb307 100644
--- a/jstests/core/max_doc_size.js
+++ b/jstests/core/max_doc_size.js
@@ -1,36 +1,32 @@
var maxBsonObjectSize = db.isMaster().maxBsonObjectSize;
-var docOverhead = Object.bsonsize({ _id: new ObjectId(), x: '' });
+var docOverhead = Object.bsonsize({_id: new ObjectId(), x: ''});
var maxStrSize = maxBsonObjectSize - docOverhead;
-
var maxStr = 'a';
-while (maxStr.length < maxStrSize) maxStr += 'a';
+while (maxStr.length < maxStrSize)
+ maxStr += 'a';
var coll = db.max_doc_size;
coll.drop();
-var res = db.runCommand({ insert: coll.getName(),
- documents: [{ _id: new ObjectId(), x: maxStr }] });
+var res = db.runCommand({insert: coll.getName(), documents: [{_id: new ObjectId(), x: maxStr}]});
assert(res.ok);
assert.eq(null, res.writeErrors);
coll.drop();
-res = db.runCommand({ update: coll.getName(),
- ordered: true,
- updates: [{ q: { a: 1 },
- u: { _id: new ObjectId(), x: maxStr },
- upsert: true
- }]});
+res = db.runCommand({
+ update: coll.getName(),
+ ordered: true,
+ updates: [{q: {a: 1}, u: {_id: new ObjectId(), x: maxStr}, upsert: true}]
+});
assert(res.ok);
assert.eq(null, res.writeErrors);
coll.drop();
var id = new ObjectId();
-coll.insert({ _id: id });
-res = db.runCommand({ update: coll.getName(),
- ordered: true,
- updates: [{ q: { _id: id },
- u: { $set: { x: maxStr }}}] });
+coll.insert({_id: id});
+res = db.runCommand(
+ {update: coll.getName(), ordered: true, updates: [{q: {_id: id}, u: {$set: {x: maxStr}}}]});
assert(res.ok);
assert.eq(null, res.writeErrors);
@@ -41,28 +37,26 @@ assert.eq(null, res.writeErrors);
var overBigStr = maxStr + 'a';
coll.drop();
-res = db.runCommand({ insert: coll.getName(),
- documents: [{ _id: new ObjectId(), x: overBigStr }] });
+res = db.runCommand({insert: coll.getName(), documents: [{_id: new ObjectId(), x: overBigStr}]});
assert(res.ok);
assert.neq(null, res.writeErrors);
coll.drop();
-res = db.runCommand({ update: coll.getName(),
- ordered: true,
- updates: [{ q: { a: 1 },
- u: { _id: new ObjectId(), x: overBigStr },
- upsert: true
- }]});
+res = db.runCommand({
+ update: coll.getName(),
+ ordered: true,
+ updates: [{q: {a: 1}, u: {_id: new ObjectId(), x: overBigStr}, upsert: true}]
+});
assert(res.ok);
assert.neq(null, res.writeErrors);
coll.drop();
id = new ObjectId();
-coll.insert({ _id: id });
-res = db.runCommand({ update: coll.getName(),
- ordered: true,
- updates: [{ q: { _id: id },
- u: { $set: { x: overBigStr }}}] });
+coll.insert({_id: id});
+res = db.runCommand({
+ update: coll.getName(),
+ ordered: true,
+ updates: [{q: {_id: id}, u: {$set: {x: overBigStr}}}]
+});
assert(res.ok);
assert.neq(null, res.writeErrors);
-
diff --git a/jstests/core/max_time_ms.js b/jstests/core/max_time_ms.js
index 9c2797ca12b..e70ae8cb0fe 100644
--- a/jstests/core/max_time_ms.js
+++ b/jstests/core/max_time_ms.js
@@ -1,7 +1,7 @@
// Tests query/command option $maxTimeMS.
var t = db.max_time_ms;
-var exceededTimeLimit = 50; // ErrorCodes::ExceededTimeLimit
+var exceededTimeLimit = 50; // ErrorCodes::ExceededTimeLimit
var cursor;
var res;
@@ -10,10 +10,17 @@ var res;
//
t.drop();
-t.insert([{},{},{}]);
-cursor = t.find({$where: function() { sleep(100); return true; }});
+t.insert([{}, {}, {}]);
+cursor = t.find({
+ $where: function() {
+ sleep(100);
+ return true;
+ }
+});
cursor.maxTimeMS(100);
-assert.throws(function() { cursor.itcount(); }, [], "expected query to abort due to time limit");
+assert.throws(function() {
+ cursor.itcount();
+}, [], "expected query to abort due to time limit");
//
// Simple negative test for query: a ~300ms query with a 10s time limit should not hit the time
@@ -21,12 +28,17 @@ assert.throws(function() { cursor.itcount(); }, [], "expected query to abort due
//
t.drop();
-t.insert([{},{},{}]);
-cursor = t.find({$where: function() { sleep(100); return true; }});
-cursor.maxTimeMS(10*1000);
-assert.doesNotThrow(function() { cursor.itcount(); },
- [],
- "expected query to not hit the time limit");
+t.insert([{}, {}, {}]);
+cursor = t.find({
+ $where: function() {
+ sleep(100);
+ return true;
+ }
+});
+cursor.maxTimeMS(10 * 1000);
+assert.doesNotThrow(function() {
+ cursor.itcount();
+}, [], "expected query to not hit the time limit");
//
// Simple positive test for getmore:
@@ -36,22 +48,28 @@ assert.doesNotThrow(function() { cursor.itcount(); },
//
t.drop();
-t.insert([{},{},{}]); // fast batch
-t.insert([{slow: true},{slow: true},{slow: true}]); // slow batch
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(5*1000);
+t.insert([{}, {}, {}]); // fast batch
+t.insert([{slow: true}, {slow: true}, {slow: true}]); // slow batch
+cursor = t.find({
+ $where: function() {
+ if (this.slow) {
+ sleep(5 * 1000);
+ }
+ return true;
}
- return true;
-}});
+});
cursor.batchSize(3);
cursor.maxTimeMS(1000);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 1 (query) to not hit the time limit");
-assert.throws(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 2 (getmore) to abort due to time limit");
+assert.doesNotThrow(function() {
+ cursor.next();
+ cursor.next();
+ cursor.next();
+}, [], "expected batch 1 (query) to not hit the time limit");
+assert.throws(function() {
+ cursor.next();
+ cursor.next();
+ cursor.next();
+}, [], "expected batch 2 (getmore) to abort due to time limit");
//
// Simple negative test for getmore:
@@ -61,22 +79,28 @@ assert.throws(function() { cursor.next(); cursor.next(); cursor.next(); },
//
t.drop();
-t.insert([{},{},{}]); // fast batch
-t.insert([{},{},{slow: true}]); // slow batch
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
+t.insert([{}, {}, {}]); // fast batch
+t.insert([{}, {}, {slow: true}]); // slow batch
+cursor = t.find({
+ $where: function() {
+ if (this.slow) {
+ sleep(2 * 1000);
+ }
+ return true;
}
- return true;
-}});
+});
cursor.batchSize(3);
-cursor.maxTimeMS(10*1000);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 1 (query) to not hit the time limit");
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 2 (getmore) to not hit the time limit");
+cursor.maxTimeMS(10 * 1000);
+assert.doesNotThrow(function() {
+ cursor.next();
+ cursor.next();
+ cursor.next();
+}, [], "expected batch 1 (query) to not hit the time limit");
+assert.doesNotThrow(function() {
+ cursor.next();
+ cursor.next();
+ cursor.next();
+}, [], "expected batch 2 (getmore) to not hit the time limit");
//
// Many-batch positive test for getmore:
@@ -85,18 +109,22 @@ assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
//
t.drop();
-for (var i=0; i<5; i++) {
- t.insert([{},{},{slow:true}]);
+for (var i = 0; i < 5; i++) {
+ t.insert([{}, {}, {slow: true}]);
}
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
+cursor = t.find({
+ $where: function() {
+ if (this.slow) {
+ sleep(2 * 1000);
+ }
+ return true;
}
- return true;
-}});
+});
cursor.batchSize(3);
-cursor.maxTimeMS(6*1000);
-assert.throws(function() { cursor.itcount(); }, [], "expected find() to abort due to time limit");
+cursor.maxTimeMS(6 * 1000);
+assert.throws(function() {
+ cursor.itcount();
+}, [], "expected find() to abort due to time limit");
//
// Many-batch negative test for getmore:
@@ -105,20 +133,22 @@ assert.throws(function() { cursor.itcount(); }, [], "expected find() to abort du
//
t.drop();
-for (var i=0; i<5; i++) {
- t.insert([{},{},{slow:true}]);
+for (var i = 0; i < 5; i++) {
+ t.insert([{}, {}, {slow: true}]);
}
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
+cursor = t.find({
+ $where: function() {
+ if (this.slow) {
+ sleep(2 * 1000);
+ }
+ return true;
}
- return true;
-}});
+});
cursor.batchSize(3);
-cursor.maxTimeMS(20*1000);
-assert.doesNotThrow(function() { cursor.itcount(); },
- [],
- "expected find() to not hit the time limit");
+cursor.maxTimeMS(20 * 1000);
+assert.doesNotThrow(function() {
+ cursor.itcount();
+}, [], "expected find() to not hit the time limit");
//
// Simple positive test for commands: a ~300ms command with a 100ms time limit should be aborted.
@@ -135,7 +165,7 @@ assert(res.ok == 0 && res.code == exceededTimeLimit,
//
t.drop();
-res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 10*1000});
+res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 10 * 1000});
assert(res.ok == 1,
"expected sleep command to not hit the time limit, ok=" + res.ok + ", code=" + res.code);
@@ -148,45 +178,81 @@ t.insert({});
// Verify lower boundary for acceptable input (0 is acceptable, 1 isn't).
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(0).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberInt(0)).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberLong(0)).itcount(); });
+assert.doesNotThrow.automsg(function() {
+ t.find().maxTimeMS(0).itcount();
+});
+assert.doesNotThrow.automsg(function() {
+ t.find().maxTimeMS(NumberInt(0)).itcount();
+});
+assert.doesNotThrow.automsg(function() {
+ t.find().maxTimeMS(NumberLong(0)).itcount();
+});
assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: 0}).ok);
assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(0)}).ok);
assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(0)}).ok);
-assert.throws.automsg(function() { t.find().maxTimeMS(-1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberInt(-1)).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberLong(-1)).itcount(); });
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(-1).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(NumberInt(-1)).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(NumberLong(-1)).itcount();
+});
assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: -1}).ok);
assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(-1)}).ok);
assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(-1)}).ok);
// Verify upper boundary for acceptable input (2^31-1 is acceptable, 2^31 isn't).
-var maxValue = Math.pow(2,31)-1;
+var maxValue = Math.pow(2, 31) - 1;
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(maxValue).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberInt(maxValue)).itcount(); });
-assert.doesNotThrow.automsg(function() { t.find().maxTimeMS(NumberLong(maxValue)).itcount(); });
+assert.doesNotThrow.automsg(function() {
+ t.find().maxTimeMS(maxValue).itcount();
+});
+assert.doesNotThrow.automsg(function() {
+ t.find().maxTimeMS(NumberInt(maxValue)).itcount();
+});
+assert.doesNotThrow.automsg(function() {
+ t.find().maxTimeMS(NumberLong(maxValue)).itcount();
+});
assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue}).ok);
assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue)}).ok);
assert.eq(1, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue)}).ok);
-assert.throws.automsg(function() { t.find().maxTimeMS(maxValue+1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberInt(maxValue+1)).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(NumberLong(maxValue+1)).itcount(); });
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue+1}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue+1)}).ok);
-assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue+1)}).ok);
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(maxValue + 1).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(NumberInt(maxValue + 1)).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(NumberLong(maxValue + 1)).itcount();
+});
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: maxValue + 1}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberInt(maxValue + 1)}).ok);
+assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: NumberLong(maxValue + 1)}).ok);
// Verify invalid values are rejected.
-assert.throws.automsg(function() { t.find().maxTimeMS(0.1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(-0.1).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS().itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS("").itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS(true).itcount(); });
-assert.throws.automsg(function() { t.find().maxTimeMS({}).itcount(); });
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(0.1).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(-0.1).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS().itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS("").itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS(true).itcount();
+});
+assert.throws.automsg(function() {
+ t.find().maxTimeMS({}).itcount();
+});
assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: 0.1}).ok);
assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: -0.1}).ok);
assert.eq(0, t.getDB().runCommand({ping: 1, maxTimeMS: undefined}).ok);
@@ -210,9 +276,9 @@ assert.eq(0, cursor.next().ok);
// TODO: rewrite to use runCommandWithMetadata when we have a shell helper so that
// we can test server side validation.
assert.throws(function() {
- cursor = t.getDB().$cmd.find({ping: 1}).limit(-1).maxTimeMS(0);
- cursor._ensureSpecial();
- cursor.next();
+ cursor = t.getDB().$cmd.find({ping: 1}).limit(-1).maxTimeMS(0);
+ cursor._ensureSpecial();
+ cursor.next();
});
//
@@ -221,77 +287,91 @@ assert.throws(function() {
// maxTimeAlwaysTimeOut positive test for command.
t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"}).ok);
-res = t.getDB().runCommand({ping: 1, maxTimeMS: 10*1000});
+assert.eq(
+ 1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "alwaysOn"}).ok);
+res = t.getDB().runCommand({ping: 1, maxTimeMS: 10 * 1000});
assert(res.ok == 0 && res.code == exceededTimeLimit,
- "expected command to trigger maxTimeAlwaysTimeOut fail point, ok=" + res.ok + ", code="
- + res.code);
+ "expected command to trigger maxTimeAlwaysTimeOut fail point, ok=" + res.ok + ", code=" +
+ res.code);
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
// maxTimeNeverTimeOut positive test for command.
t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
+assert.eq(1,
+ t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "alwaysOn"}).ok);
res = t.getDB().adminCommand({sleep: 1, millis: 300, maxTimeMS: 100});
assert(res.ok == 1,
- "expected command to trigger maxTimeNeverTimeOut fail point, ok=" + res.ok + ", code="
- + res.code);
+ "expected command to trigger maxTimeNeverTimeOut fail point, ok=" + res.ok + ", code=" +
+ res.code);
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
// maxTimeAlwaysTimeOut positive test for query.
t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"}).ok);
-assert.throws(function() { t.find().maxTimeMS(10*1000).itcount(); },
- [],
- "expected query to trigger maxTimeAlwaysTimeOut fail point");
+assert.eq(
+ 1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "alwaysOn"}).ok);
+assert.throws(function() {
+ t.find().maxTimeMS(10 * 1000).itcount();
+}, [], "expected query to trigger maxTimeAlwaysTimeOut fail point");
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
// maxTimeNeverTimeOut positive test for query.
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
+assert.eq(1,
+ t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "alwaysOn"}).ok);
t.drop();
-t.insert([{},{},{}]);
-cursor = t.find({$where: function() { sleep(100); return true; }});
+t.insert([{}, {}, {}]);
+cursor = t.find({
+ $where: function() {
+ sleep(100);
+ return true;
+ }
+});
cursor.maxTimeMS(100);
-assert.doesNotThrow(function() { cursor.itcount(); },
- [],
- "expected query to trigger maxTimeNeverTimeOut fail point");
+assert.doesNotThrow(function() {
+ cursor.itcount();
+}, [], "expected query to trigger maxTimeNeverTimeOut fail point");
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
// maxTimeAlwaysTimeOut positive test for getmore.
t.drop();
-t.insert([{},{},{}]);
-cursor = t.find().maxTimeMS(10*1000).batchSize(2);
-assert.doesNotThrow.automsg(function() { cursor.next(); cursor.next(); });
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"}).ok);
-assert.throws(function() { cursor.next(); },
- [],
- "expected getmore to trigger maxTimeAlwaysTimeOut fail point");
+t.insert([{}, {}, {}]);
+cursor = t.find().maxTimeMS(10 * 1000).batchSize(2);
+assert.doesNotThrow.automsg(function() {
+ cursor.next();
+ cursor.next();
+});
+assert.eq(
+ 1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "alwaysOn"}).ok);
+assert.throws(function() {
+ cursor.next();
+}, [], "expected getmore to trigger maxTimeAlwaysTimeOut fail point");
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}).ok);
// maxTimeNeverTimeOut positive test for getmore.
t.drop();
-t.insert([{},{},{}]); // fast batch
-t.insert([{slow: true},{slow: true},{slow: true}]); // slow batch
-cursor = t.find({$where: function() {
- if (this.slow) {
- sleep(2*1000);
+t.insert([{}, {}, {}]); // fast batch
+t.insert([{slow: true}, {slow: true}, {slow: true}]); // slow batch
+cursor = t.find({
+ $where: function() {
+ if (this.slow) {
+ sleep(2 * 1000);
+ }
+ return true;
}
- return true;
-}});
+});
cursor.batchSize(3);
-cursor.maxTimeMS(2*1000);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 1 (query) to not hit the time limit");
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
-assert.doesNotThrow(function() { cursor.next(); cursor.next(); cursor.next(); },
- [],
- "expected batch 2 (getmore) to trigger maxTimeNeverTimeOut fail point");
+cursor.maxTimeMS(2 * 1000);
+assert.doesNotThrow(function() {
+ cursor.next();
+ cursor.next();
+ cursor.next();
+}, [], "expected batch 1 (query) to not hit the time limit");
+assert.eq(1,
+ t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "alwaysOn"}).ok);
+assert.doesNotThrow(function() {
+ cursor.next();
+ cursor.next();
+ cursor.next();
+}, [], "expected batch 2 (getmore) to trigger maxTimeNeverTimeOut fail point");
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
//
@@ -299,33 +379,36 @@ assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
//
// "aggregate" command.
-res = t.runCommand("aggregate", {pipeline: [], maxTimeMS: 60*1000});
+res = t.runCommand("aggregate", {pipeline: [], maxTimeMS: 60 * 1000});
assert(res.ok == 1,
"expected aggregate with maxtime to succeed, ok=" + res.ok + ", code=" + res.code);
// "collMod" command.
-res = t.runCommand("collMod", {usePowerOf2Sizes: true, maxTimeMS: 60*1000});
+res = t.runCommand("collMod", {usePowerOf2Sizes: true, maxTimeMS: 60 * 1000});
assert(res.ok == 1,
"expected collmod with maxtime to succeed, ok=" + res.ok + ", code=" + res.code);
//
// Test maxTimeMS for parallelCollectionScan
//
-res = t.runCommand({parallelCollectionScan: t.getName(), numCursors: 1, maxTimeMS: 60*1000});
+res = t.runCommand({parallelCollectionScan: t.getName(), numCursors: 1, maxTimeMS: 60 * 1000});
assert.commandWorked(res);
-var cursor = new DBCommandCursor( t.getDB().getMongo(), res.cursors[0], 5 );
-assert.commandWorked(t.getDB().adminCommand({
- configureFailPoint: "maxTimeAlwaysTimeOut",
- mode: "alwaysOn"
-}));
-assert.throws(function() { cursor.itcount(); }, [], "expected query to abort due to time limit");
-assert.commandWorked(t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}));
+var cursor = new DBCommandCursor(t.getDB().getMongo(), res.cursors[0], 5);
+assert.commandWorked(
+ t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "alwaysOn"}));
+assert.throws(function() {
+ cursor.itcount();
+}, [], "expected query to abort due to time limit");
+assert.commandWorked(
+ t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", mode: "off"}));
//
// test count shell helper SERVER-13334
//
t.drop();
-assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut",
- mode: "alwaysOn"}).ok);
-assert.doesNotThrow(function() { t.find({}).maxTimeMS(1).count(); });
+assert.eq(1,
+ t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "alwaysOn"}).ok);
+assert.doesNotThrow(function() {
+ t.find({}).maxTimeMS(1).count();
+});
assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeNeverTimeOut", mode: "off"}).ok);
diff --git a/jstests/core/maxscan.js b/jstests/core/maxscan.js
index ab490f51de0..a862ed426a0 100644
--- a/jstests/core/maxscan.js
+++ b/jstests/core/maxscan.js
@@ -3,16 +3,16 @@ t = db.maxscan;
t.drop();
N = 100;
-for ( i=0; i<N; i++ ){
- t.insert( { _id : i , x : i % 10 } );
+for (i = 0; i < N; i++) {
+ t.insert({_id: i, x: i % 10});
}
-assert.eq( N , t.find().itcount() , "A" );
-assert.eq( 50 , t.find().maxScan(50).itcount() , "B" );
+assert.eq(N, t.find().itcount(), "A");
+assert.eq(50, t.find().maxScan(50).itcount(), "B");
-assert.eq( 10 , t.find( { x : 2 } ).itcount() , "C" );
-assert.eq( 5 , t.find( { x : 2 } ).maxScan(50).itcount() , "D" );
+assert.eq(10, t.find({x: 2}).itcount(), "C");
+assert.eq(5, t.find({x: 2}).maxScan(50).itcount(), "D");
t.ensureIndex({x: 1});
-assert.eq( 10, t.find( { x : 2 } ).hint({x:1}).maxScan(N).itcount() , "E" );
-assert.eq( 0, t.find( { x : 2 } ).hint({x:1}).maxScan(1).itcount() , "E" );
+assert.eq(10, t.find({x: 2}).hint({x: 1}).maxScan(N).itcount(), "E");
+assert.eq(0, t.find({x: 2}).hint({x: 1}).maxScan(1).itcount(), "E");
diff --git a/jstests/core/minmax.js b/jstests/core/minmax.js
index d6dc7cc1034..670c7d2f8b2 100644
--- a/jstests/core/minmax.js
+++ b/jstests/core/minmax.js
@@ -1,56 +1,66 @@
// test min / max query parameters
addData = function() {
- t.save( { a: 1, b: 1 } );
- t.save( { a: 1, b: 2 } );
- t.save( { a: 2, b: 1 } );
- t.save( { a: 2, b: 2 } );
+ t.save({a: 1, b: 1});
+ t.save({a: 1, b: 2});
+ t.save({a: 2, b: 1});
+ t.save({a: 2, b: 2});
};
t = db.jstests_minmax;
t.drop();
-t.ensureIndex( { a: 1, b: 1 } );
+t.ensureIndex({a: 1, b: 1});
addData();
-printjson( t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray() );
-assert.eq( 1, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray().length );
-assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1.5 } ).toArray().length );
-assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 2 } ).toArray().length );
+printjson(t.find().min({a: 1, b: 2}).max({a: 2, b: 1}).toArray());
+assert.eq(1, t.find().min({a: 1, b: 2}).max({a: 2, b: 1}).toArray().length);
+assert.eq(2, t.find().min({a: 1, b: 2}).max({a: 2, b: 1.5}).toArray().length);
+assert.eq(2, t.find().min({a: 1, b: 2}).max({a: 2, b: 2}).toArray().length);
// just one bound
-assert.eq( 3, t.find().min( { a: 1, b: 2 } ).toArray().length );
-assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).toArray().length );
-assert.eq( 3, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: 1 } ).toArray().length );
-assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).hint( { a: 1, b: 1 } ).toArray().length );
+assert.eq(3, t.find().min({a: 1, b: 2}).toArray().length);
+assert.eq(3, t.find().max({a: 2, b: 1.5}).toArray().length);
+assert.eq(3, t.find().min({a: 1, b: 2}).hint({a: 1, b: 1}).toArray().length);
+assert.eq(3, t.find().max({a: 2, b: 1.5}).hint({a: 1, b: 1}).toArray().length);
t.drop();
-t.ensureIndex( { a: 1, b: -1 } );
+t.ensureIndex({a: 1, b: -1});
addData();
-assert.eq( 4, t.find().min( { a: 1, b: 2 } ).toArray().length );
-assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).toArray().length );
-assert.eq( 1, t.find().min( { a: 2, b: 1 } ).toArray().length );
-assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).toArray().length );
-assert.eq( 4, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray().length );
-assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
-assert.eq( 1, t.find().min( { a: 2, b: 1 } ).hint( { a: 1, b: -1 } ).toArray().length );
-assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq(4, t.find().min({a: 1, b: 2}).toArray().length);
+assert.eq(4, t.find().max({a: 2, b: 0.5}).toArray().length);
+assert.eq(1, t.find().min({a: 2, b: 1}).toArray().length);
+assert.eq(1, t.find().max({a: 1, b: 1.5}).toArray().length);
+assert.eq(4, t.find().min({a: 1, b: 2}).hint({a: 1, b: -1}).toArray().length);
+assert.eq(4, t.find().max({a: 2, b: 0.5}).hint({a: 1, b: -1}).toArray().length);
+assert.eq(1, t.find().min({a: 2, b: 1}).hint({a: 1, b: -1}).toArray().length);
+assert.eq(1, t.find().max({a: 1, b: 1.5}).hint({a: 1, b: -1}).toArray().length);
// hint doesn't match
-assert.throws( function() { t.find().min( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray(); } );
-assert.throws( function() { t.find().min( { a: 1, b: 1 } ).max( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray(); } );
-assert.throws( function() { t.find().min( { b: 1 } ).max( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray(); } );
-assert.throws( function() { t.find().min( { a: 1 } ).hint( { $natural: 1 } ).toArray(); } );
-assert.throws( function() { t.find().max( { a: 1 } ).hint( { $natural: 1 } ).toArray(); } );
+assert.throws(function() {
+ t.find().min({a: 1}).hint({a: 1, b: -1}).toArray();
+});
+assert.throws(function() {
+ t.find().min({a: 1, b: 1}).max({a: 1}).hint({a: 1, b: -1}).toArray();
+});
+assert.throws(function() {
+ t.find().min({b: 1}).max({a: 1, b: 2}).hint({a: 1, b: -1}).toArray();
+});
+assert.throws(function() {
+ t.find().min({a: 1}).hint({$natural: 1}).toArray();
+});
+assert.throws(function() {
+ t.find().max({a: 1}).hint({$natural: 1}).toArray();
+});
// Reverse direction scan of the a:1 index between a:6 (inclusive) and a:3 (exclusive).
t.drop();
-t.ensureIndex( { a:1 } );
-for( i = 0; i < 10; ++i ) {
- t.save( { _id:i, a:i } );
+t.ensureIndex({a: 1});
+for (i = 0; i < 10; ++i) {
+ t.save({_id: i, a: i});
}
-if ( 0 ) { // SERVER-3766
-reverseResult = t.find().min( { a:6 } ).max( { a:3 } ).sort( { a:-1 } ).hint( { a:1 } ).toArray();
-assert.eq( [ { _id:6, a:6 }, { _id:5, a:5 }, { _id:4, a:4 } ], reverseResult );
+if (0) { // SERVER-3766
+ reverseResult = t.find().min({a: 6}).max({a: 3}).sort({a: -1}).hint({a: 1}).toArray();
+ assert.eq([{_id: 6, a: 6}, {_id: 5, a: 5}, {_id: 4, a: 4}], reverseResult);
}
//
diff --git a/jstests/core/minmax_edge.js b/jstests/core/minmax_edge.js
index a22367cc2a9..abd39724a80 100644
--- a/jstests/core/minmax_edge.js
+++ b/jstests/core/minmax_edge.js
@@ -10,15 +10,15 @@ var t = db.minmax_edge;
* Results is the cursor toArray, expectedIds is a list of _ids
*/
function verifyResultIds(results, expectedIds) {
- //check they are the same length
+ // check they are the same length
assert.eq(results.length, expectedIds.length);
- function compare(a,b) {
- if (a._id < b._id)
- return -1;
- if (a._id > b._id)
- return 1;
- return 0;
+ function compare(a, b) {
+ if (a._id < b._id)
+ return -1;
+ if (a._id > b._id)
+ return 1;
+ return 0;
}
results.sort(compare);
@@ -62,22 +62,22 @@ reset(t);
assert.commandWorked(t.ensureIndex({a: 1}));
verifyMin({a: Infinity}, []);
-verifyMax({a: Infinity}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: -Infinity}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: -Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: -Infinity}, []);
// NaN < all ints.
-verifyMin({a: NaN}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: NaN}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: NaN}, []);
// {a: 1} > all ints.
verifyMin({a: {a: 1}}, []);
-verifyMax({a: {a: 1}}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: {a: 1}}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
// 'a' > all ints.
verifyMin({a: 'a'}, []);
-verifyMax({a: 'a'}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: 'a'}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyResultIds(t.find().min({a: 4}).max({a: 4}).toArray(), []);
@@ -86,64 +86,64 @@ reset(t);
assert.commandWorked(t.ensureIndex({a: 1, b: -1}));
// Same as single-key index assertions, with b field present.
-verifyMin({a: NaN, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: NaN, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: NaN, b: 1}, []);
verifyMin({a: Infinity, b: 1}, []);
-verifyMax({a: Infinity, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: Infinity, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: -Infinity, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: -Infinity, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: -Infinity, b: 1}, []);
verifyMin({a: {a: 1}, b: 1}, []);
-verifyMax({a: {a: 1}, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: {a: 1}, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMin({a: 'a', b: 1}, []);
-verifyMax({a: 'a', b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: 'a', b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyResultIds(t.find().min({a: 4, b: 1}).max({a: 4, b: 1}).toArray(), []);
// Edge cases on b values
-verifyMin({a: 1, b: Infinity}, [0,1,2,3,4,5,6,7,8]);
-verifyMin({a: 2, b: Infinity}, [3,4,5,6,7,8]);
-verifyMin({a: 3, b: Infinity}, [6,7,8]);
+verifyMin({a: 1, b: Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
+verifyMin({a: 2, b: Infinity}, [3, 4, 5, 6, 7, 8]);
+verifyMin({a: 3, b: Infinity}, [6, 7, 8]);
verifyMax({a: 1, b: Infinity}, []);
-verifyMax({a: 2, b: Infinity}, [0,1,2]);
-verifyMax({a: 3, b: Infinity}, [0,1,2,3,4,5]);
+verifyMax({a: 2, b: Infinity}, [0, 1, 2]);
+verifyMax({a: 3, b: Infinity}, [0, 1, 2, 3, 4, 5]);
-verifyMin({a: 1, b: -Infinity}, [3,4,5,6,7,8]);
-verifyMin({a: 2, b: -Infinity}, [6,7,8]);
+verifyMin({a: 1, b: -Infinity}, [3, 4, 5, 6, 7, 8]);
+verifyMin({a: 2, b: -Infinity}, [6, 7, 8]);
verifyMin({a: 3, b: -Infinity}, []);
-verifyMax({a: 1, b: -Infinity}, [0,1,2]);
-verifyMax({a: 2, b: -Infinity}, [0,1,2,3,4,5]);
-verifyMax({a: 3, b: -Infinity}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: 1, b: -Infinity}, [0, 1, 2]);
+verifyMax({a: 2, b: -Infinity}, [0, 1, 2, 3, 4, 5]);
+verifyMax({a: 3, b: -Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: 2, b: NaN}, [6,7,8]);
-verifyMax({a: 2, b: NaN}, [0,1,2,3,4,5]);
+verifyMin({a: 2, b: NaN}, [6, 7, 8]);
+verifyMax({a: 2, b: NaN}, [0, 1, 2, 3, 4, 5]);
-verifyMin({a: 2, b: {b: 1}}, [3,4,5,6,7,8]);
-verifyMax({a: 2, b: {b: 1}}, [0,1,2]);
+verifyMin({a: 2, b: {b: 1}}, [3, 4, 5, 6, 7, 8]);
+verifyMax({a: 2, b: {b: 1}}, [0, 1, 2]);
-verifyMin({a: 2, b: 'b'}, [3,4,5,6,7,8]);
-verifyMax({a: 2, b: 'b'}, [0,1,2]);
+verifyMin({a: 2, b: 'b'}, [3, 4, 5, 6, 7, 8]);
+verifyMax({a: 2, b: 'b'}, [0, 1, 2]);
// Test descending index.
reset(t);
t.ensureIndex({a: -1});
verifyMin({a: NaN}, []);
-verifyMax({a: NaN}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: NaN}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: Infinity}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: Infinity}, []);
verifyMin({a: -Infinity}, []);
-verifyMax({a: -Infinity}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: -Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: {a: 1}}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: {a: 1}}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: {a: 1}}, []);
-verifyMin({a: 'a'}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: 'a'}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: 'a'}, []);
verifyResultIds(t.find().min({a: 4}).max({a: 4}).toArray(), []);
@@ -154,43 +154,43 @@ t.ensureIndex({a: -1, b: -1});
// Same as single-key index assertions, with b field present.
verifyMin({a: NaN, b: 1}, []);
-verifyMax({a: NaN, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: NaN, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: Infinity, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: Infinity, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: Infinity, b: 1}, []);
verifyMin({a: -Infinity, b: 1}, []);
-verifyMax({a: -Infinity, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMax({a: -Infinity, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
-verifyMin({a: {a: 1}, b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: {a: 1}, b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: {a: 1}, b: 1}, []);
-verifyMin({a: 'a', b: 1}, [0,1,2,3,4,5,6,7,8]);
+verifyMin({a: 'a', b: 1}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
verifyMax({a: 'a', b: 1}, []);
// Edge cases on b values.
-verifyMin({a: 1, b: Infinity}, [0,1,2]);
-verifyMin({a: 2, b: Infinity}, [0,1,2,3,4,5]);
-verifyMin({a: 3, b: Infinity}, [0,1,2,3,4,5,6,7,8]);
-verifyMax({a: 1, b: Infinity}, [3,4,5,6,7,8]);
-verifyMax({a: 2, b: Infinity}, [6,7,8]);
+verifyMin({a: 1, b: Infinity}, [0, 1, 2]);
+verifyMin({a: 2, b: Infinity}, [0, 1, 2, 3, 4, 5]);
+verifyMin({a: 3, b: Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
+verifyMax({a: 1, b: Infinity}, [3, 4, 5, 6, 7, 8]);
+verifyMax({a: 2, b: Infinity}, [6, 7, 8]);
verifyMax({a: 3, b: Infinity}, []);
verifyMin({a: 1, b: -Infinity}, []);
-verifyMin({a: 2, b: -Infinity}, [0,1,2]);
-verifyMin({a: 3, b: -Infinity}, [0,1,2,3,4,5]);
-verifyMax({a: 1, b: -Infinity}, [0,1,2,3,4,5,6,7,8]);
-verifyMax({a: 2, b: -Infinity}, [3,4,5,6,7,8]);
-verifyMax({a: 3, b: -Infinity}, [6,7,8]);
+verifyMin({a: 2, b: -Infinity}, [0, 1, 2]);
+verifyMin({a: 3, b: -Infinity}, [0, 1, 2, 3, 4, 5]);
+verifyMax({a: 1, b: -Infinity}, [0, 1, 2, 3, 4, 5, 6, 7, 8]);
+verifyMax({a: 2, b: -Infinity}, [3, 4, 5, 6, 7, 8]);
+verifyMax({a: 3, b: -Infinity}, [6, 7, 8]);
-verifyMin({a: 2, b: NaN}, [0,1,2]);
-verifyMax({a: 2, b: NaN}, [3,4,5,6,7,8]);
+verifyMin({a: 2, b: NaN}, [0, 1, 2]);
+verifyMax({a: 2, b: NaN}, [3, 4, 5, 6, 7, 8]);
-verifyMin({a: 2, b: {b: 1}}, [3,4,5,6,7,8]);
-verifyMax({a: 2, b: {b: 1}}, [0,1,2]);
+verifyMin({a: 2, b: {b: 1}}, [3, 4, 5, 6, 7, 8]);
+verifyMax({a: 2, b: {b: 1}}, [0, 1, 2]);
-verifyMin({a: 2, b: 'b'}, [3,4,5,6,7,8]);
-verifyMax({a: 2, b: 'b'}, [0,1,2]);
+verifyMin({a: 2, b: 'b'}, [3, 4, 5, 6, 7, 8]);
+verifyMax({a: 2, b: 'b'}, [0, 1, 2]);
// Now a couple cases with an extra compound index.
t.drop();
@@ -205,27 +205,27 @@ t.insert({_id: 5, a: 2, b: 'b', c: 2});
t.insert({_id: 6, a: 2, b: 'a', c: 1});
t.insert({_id: 7, a: 2, b: 'a', c: 2});
-verifyMin({a: 1, b: 'a', c: 1}, [2,3,4,5,6,7]);
+verifyMin({a: 1, b: 'a', c: 1}, [2, 3, 4, 5, 6, 7]);
verifyMin({a: 2, b: 'a', c: 2}, [7]);
-verifyMax({a: 1, b: 'a', c: 1}, [0,1]);
-verifyMax({a: 2, b: 'a', c: 2}, [0,1,2,3,4,5,6]);
+verifyMax({a: 1, b: 'a', c: 1}, [0, 1]);
+verifyMax({a: 2, b: 'a', c: 2}, [0, 1, 2, 3, 4, 5, 6]);
verifyMin({a: Infinity, b: 'a', c: 2}, []);
-verifyMax({a: Infinity, b: 'a', c: 2}, [0,1,2,3,4,5,6,7]);
+verifyMax({a: Infinity, b: 'a', c: 2}, [0, 1, 2, 3, 4, 5, 6, 7]);
-verifyMin({a: -Infinity, b: 'a', c: 2}, [0,1,2,3,4,5,6,7]);
+verifyMin({a: -Infinity, b: 'a', c: 2}, [0, 1, 2, 3, 4, 5, 6, 7]);
verifyMax({a: -Infinity, b: 'a', c: 2}, []);
// 'a' > Infinity, actually.
-verifyMin({a: 1, b: Infinity, c: 2}, [4,5,6,7]);
-verifyMax({a: 1, b: Infinity, c: 2}, [0,1,2,3]);
+verifyMin({a: 1, b: Infinity, c: 2}, [4, 5, 6, 7]);
+verifyMax({a: 1, b: Infinity, c: 2}, [0, 1, 2, 3]);
// Also, 'a' > -Infinity.
-verifyMin({a: 1, b: -Infinity, c: 2}, [4,5,6,7]);
-verifyMax({a: 1, b: -Infinity, c: 2}, [0,1,2,3]);
+verifyMin({a: 1, b: -Infinity, c: 2}, [4, 5, 6, 7]);
+verifyMax({a: 1, b: -Infinity, c: 2}, [0, 1, 2, 3]);
-verifyMin({a: 1, b: 'a', c: Infinity}, [4,5,6,7]);
-verifyMax({a: 1, b: 'a', c: Infinity}, [0,1,2,3]);
+verifyMin({a: 1, b: 'a', c: Infinity}, [4, 5, 6, 7]);
+verifyMax({a: 1, b: 'a', c: Infinity}, [0, 1, 2, 3]);
-verifyMin({a: 1, b: 'a', c: -Infinity}, [2,3,4,5,6,7]);
-verifyMax({a: 1, b: 'a', c: -Infinity}, [0,1]);
+verifyMin({a: 1, b: 'a', c: -Infinity}, [2, 3, 4, 5, 6, 7]);
+verifyMax({a: 1, b: 'a', c: -Infinity}, [0, 1]);
diff --git a/jstests/core/mod1.js b/jstests/core/mod1.js
index 834084e9301..11be6b1b293 100644
--- a/jstests/core/mod1.js
+++ b/jstests/core/mod1.js
@@ -2,26 +2,28 @@
t = db.mod1;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 11 } );
-t.save( { a : 20 } );
-t.save( { a : "asd" } );
-t.save( { a : "adasdas" } );
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 11});
+t.save({a: 20});
+t.save({a: "asd"});
+t.save({a: "adasdas"});
-assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "A1" );
-assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "A2" );
-assert.eq( 0 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain("executionStats")
- .executionStats.totalKeysExamined , "A3" );
+assert.eq(2, t.find("this.a % 10 == 1").itcount(), "A1");
+assert.eq(2, t.find({a: {$mod: [10, 1]}}).itcount(), "A2");
+assert.eq(0,
+ t.find({a: {$mod: [10, 1]}}).explain("executionStats").executionStats.totalKeysExamined,
+ "A3");
-t.ensureIndex( { a : 1 } );
+t.ensureIndex({a: 1});
-assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "B1" );
-assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "B2" );
+assert.eq(2, t.find("this.a % 10 == 1").itcount(), "B1");
+assert.eq(2, t.find({a: {$mod: [10, 1]}}).itcount(), "B2");
-assert.eq( 1 , t.find( "this.a % 10 == 0" ).itcount() , "B3" );
-assert.eq( 1 , t.find( { a : { $mod : [ 10 , 0 ] } } ).itcount() , "B4" );
-assert.eq( 4 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain("executionStats")
- .executionStats.totalKeysExamined, "B5" );
+assert.eq(1, t.find("this.a % 10 == 0").itcount(), "B3");
+assert.eq(1, t.find({a: {$mod: [10, 0]}}).itcount(), "B4");
+assert.eq(4,
+ t.find({a: {$mod: [10, 1]}}).explain("executionStats").executionStats.totalKeysExamined,
+ "B5");
-assert.eq( 1, t.find( { a: { $gt: 5, $mod : [ 10, 1 ] } } ).itcount() );
+assert.eq(1, t.find({a: {$gt: 5, $mod: [10, 1]}}).itcount());
diff --git a/jstests/core/mr1.js b/jstests/core/mr1.js
index 2d64a1375d7..0225ab3cb62 100644
--- a/jstests/core/mr1.js
+++ b/jstests/core/mr1.js
@@ -2,182 +2,199 @@
t = db.mr1;
t.drop();
-t.save( { x : 1 , tags : [ "a" , "b" ] } );
-t.save( { x : 2 , tags : [ "b" , "c" ] } );
-t.save( { x : 3 , tags : [ "c" , "a" ] } );
-t.save( { x : 4 , tags : [ "b" , "c" ] } );
+t.save({x: 1, tags: ["a", "b"]});
+t.save({x: 2, tags: ["b", "c"]});
+t.save({x: 3, tags: ["c", "a"]});
+t.save({x: 4, tags: ["b", "c"]});
emit = printjson;
-function d( x ){
- printjson( x );
+function d(x) {
+ printjson(x);
}
ks = "_id";
-if ( db.version() == "1.1.1" )
+if (db.version() == "1.1.1")
ks = "key";
-
-m = function(){
- this.tags.forEach(
- function(z){
- emit( z , { count : 1 } );
- }
- );
+m = function() {
+ this.tags.forEach(function(z) {
+ emit(z, {count: 1});
+ });
};
-m2 = function(){
- for ( var i=0; i<this.tags.length; i++ ){
- emit( this.tags[i] , 1 );
+m2 = function() {
+ for (var i = 0; i < this.tags.length; i++) {
+ emit(this.tags[i], 1);
}
};
-
-r = function( key , values ){
+r = function(key, values) {
var total = 0;
- for ( var i=0; i<values.length; i++ ){
+ for (var i = 0; i < values.length; i++) {
total += values[i].count;
}
- return { count : total };
+ return {
+ count: total
+ };
};
-r2 = function( key , values ){
+r2 = function(key, values) {
var total = 0;
- for ( var i=0; i<values.length; i++ ){
+ for (var i = 0; i < values.length; i++) {
total += values[i];
}
return total;
};
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
-d( res );
-if ( ks == "_id" ) assert( res.ok , "not ok" );
-assert.eq( 4 , res.counts.input , "A" );
+res = db.runCommand({mapreduce: "mr1", map: m, reduce: r, out: "mr1_out"});
+d(res);
+if (ks == "_id")
+ assert(res.ok, "not ok");
+assert.eq(4, res.counts.input, "A");
x = db[res.result];
-assert.eq( 3 , x.find().count() , "B" );
-x.find().forEach( d );
+assert.eq(3, x.find().count(), "B");
+x.find().forEach(d);
z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
-d( z );
-assert.eq( 3 , Object.keySet( z ).length , "C" );
-assert.eq( 2 , z.a , "D" );
-assert.eq( 3 , z.b , "E" );
-assert.eq( 3 , z.c , "F" );
+x.find().forEach(function(a) {
+ z[a[ks]] = a.value.count;
+});
+d(z);
+assert.eq(3, Object.keySet(z).length, "C");
+assert.eq(2, z.a, "D");
+assert.eq(3, z.b, "E");
+assert.eq(3, z.c, "F");
x.drop();
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , query : { x : { "$gt" : 2 } } , out : "mr1_out" } );
-d( res );
-assert.eq( 2 , res.counts.input , "B" );
+res = db.runCommand({mapreduce: "mr1", map: m, reduce: r, query: {x: {"$gt": 2}}, out: "mr1_out"});
+d(res);
+assert.eq(2, res.counts.input, "B");
x = db[res.result];
z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
-assert.eq( 1 , z.a , "C1" );
-assert.eq( 1 , z.b , "C2" );
-assert.eq( 2 , z.c , "C3" );
+x.find().forEach(function(a) {
+ z[a[ks]] = a.value.count;
+});
+assert.eq(1, z.a, "C1");
+assert.eq(1, z.b, "C2");
+assert.eq(2, z.c, "C3");
x.drop();
-res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , query : { x : { "$gt" : 2 } } , out : "mr1_out" } );
-d( res );
-assert.eq( 2 , res.counts.input , "B" );
+res =
+ db.runCommand({mapreduce: "mr1", map: m2, reduce: r2, query: {x: {"$gt": 2}}, out: "mr1_out"});
+d(res);
+assert.eq(2, res.counts.input, "B");
x = db[res.result];
z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value; } );
-assert.eq( 1 , z.a , "C1z" );
-assert.eq( 1 , z.b , "C2z" );
-assert.eq( 2 , z.c , "C3z" );
+x.find().forEach(function(a) {
+ z[a[ks]] = a.value;
+});
+assert.eq(1, z.a, "C1z");
+assert.eq(1, z.b, "C2z");
+assert.eq(2, z.c, "C3z");
x.drop();
-res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r , query : { x : { "$gt" : 2 } } } );
-d( res );
-assert.eq( 2 , res.counts.input , "B2" );
-assert.eq( "mr1_foo" , res.result , "B2-c" );
+res = db.runCommand({mapreduce: "mr1", out: "mr1_foo", map: m, reduce: r, query: {x: {"$gt": 2}}});
+d(res);
+assert.eq(2, res.counts.input, "B2");
+assert.eq("mr1_foo", res.result, "B2-c");
x = db[res.result];
z = {};
-x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
-assert.eq( 1 , z.a , "C1a" );
-assert.eq( 1 , z.b , "C2a" );
-assert.eq( 2 , z.c , "C3a" );
+x.find().forEach(function(a) {
+ z[a[ks]] = a.value.count;
+});
+assert.eq(1, z.a, "C1a");
+assert.eq(1, z.b, "C2a");
+assert.eq(2, z.c, "C3a");
x.drop();
-for ( i=5; i<1000; i++ ){
- t.save( { x : i , tags : [ "b" , "d" ] } );
+for (i = 5; i < 1000; i++) {
+ t.save({x: i, tags: ["b", "d"]});
}
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
-d( res );
-assert.eq( 999 , res.counts.input , "Z1" );
+res = db.runCommand({mapreduce: "mr1", map: m, reduce: r, out: "mr1_out"});
+d(res);
+assert.eq(999, res.counts.input, "Z1");
x = db[res.result];
-x.find().forEach( d );
-assert.eq( 4 , x.find().count() , "Z2" );
-assert.eq( "a,b,c,d" , x.distinct( ks ) , "Z3" );
+x.find().forEach(d);
+assert.eq(4, x.find().count(), "Z2");
+assert.eq("a,b,c,d", x.distinct(ks), "Z3");
-function getk( k ){
+function getk(k) {
var o = {};
o[ks] = k;
- return x.findOne( o );
+ return x.findOne(o);
}
-assert.eq( 2 , getk( "a" ).value.count , "ZA" );
-assert.eq( 998 , getk( "b" ).value.count , "ZB" );
-assert.eq( 3 , getk( "c" ).value.count , "ZC" );
-assert.eq( 995 , getk( "d" ).value.count , "ZD" );
+assert.eq(2, getk("a").value.count, "ZA");
+assert.eq(998, getk("b").value.count, "ZB");
+assert.eq(3, getk("c").value.count, "ZC");
+assert.eq(995, getk("d").value.count, "ZD");
x.drop();
-if ( true ){
- printjson( db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , verbose : true , out : "mr1_out" } ) );
+if (true) {
+ printjson(db.runCommand({mapreduce: "mr1", map: m, reduce: r, verbose: true, out: "mr1_out"}));
}
-print( "t1: " + Date.timeFunc(
- function(){
- var out = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
- if ( ks == "_id" ) assert( out.ok , "XXX : " + tojson( out ) );
- db[out.result].drop();
- } , 10 ) + " (~500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } , 10 ) );
-
-
+print("t1: " +
+ Date.timeFunc(
+ function() {
+ var out = db.runCommand({mapreduce: "mr1", map: m, reduce: r, out: "mr1_out"});
+ if (ks == "_id")
+ assert(out.ok, "XXX : " + tojson(out));
+ db[out.result].drop();
+ },
+ 10) +
+ " (~500 on 2.8ghz) - itcount: " +
+ Date.timeFunc(function() {
+ db.mr1.find().itcount();
+ }, 10));
// test doesn't exist
-res = db.runCommand( { mapreduce : "lasjdlasjdlasjdjasldjalsdj12e" , map : m , reduce : r , out : "mr1_out" } );
-assert( ! res.ok , "should be not ok" );
+res =
+ db.runCommand({mapreduce: "lasjdlasjdlasjdjasldjalsdj12e", map: m, reduce: r, out: "mr1_out"});
+assert(!res.ok, "should be not ok");
-if ( true ){
+if (true) {
correct = {};
-
- for ( i=0; i<20000; i++ ){
+
+ for (i = 0; i < 20000; i++) {
k = "Z" + i % 10000;
- if ( correct[k] )
+ if (correct[k])
correct[k]++;
else
correct[k] = 1;
- t.save( { x : i , tags : [ k ] } );
+ t.save({x: i, tags: [k]});
}
-
- res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r } );
- d( res );
- print( "t2: " + res.timeMillis + " (~3500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } ) );
+
+ res = db.runCommand({mapreduce: "mr1", out: "mr1_foo", map: m, reduce: r});
+ d(res);
+ print("t2: " + res.timeMillis + " (~3500 on 2.8ghz) - itcount: " +
+ Date.timeFunc(function() {
+ db.mr1.find().itcount();
+ }));
x = db[res.result];
z = {};
- x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
- for ( zz in z ){
- if ( zz.indexOf( "Z" ) == 0 ){
- assert.eq( correct[zz] , z[zz] , "ZZ : " + zz );
+ x.find().forEach(function(a) {
+ z[a[ks]] = a.value.count;
+ });
+ for (zz in z) {
+ if (zz.indexOf("Z") == 0) {
+ assert.eq(correct[zz], z[zz], "ZZ : " + zz);
}
}
x.drop();
-
- res = db.runCommand( { mapreduce : "mr1" , out : "mr1_out" , map : m2 , reduce : r2 } );
- d(res);
- print( "t3: " + res.timeMillis + " (~3500 on 2.8ghz)" );
- res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , out : { inline : true } } );
- print( "t4: " + res.timeMillis );
+ res = db.runCommand({mapreduce: "mr1", out: "mr1_out", map: m2, reduce: r2});
+ d(res);
+ print("t3: " + res.timeMillis + " (~3500 on 2.8ghz)");
+ res = db.runCommand({mapreduce: "mr1", map: m2, reduce: r2, out: {inline: true}});
+ print("t4: " + res.timeMillis);
}
-
-res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , out : "mr1_out" } );
-assert( res.ok , "should be ok" );
+res = db.runCommand({mapreduce: "mr1", map: m, reduce: r, out: "mr1_out"});
+assert(res.ok, "should be ok");
t.drop();
t1 = db.mr1_out;
diff --git a/jstests/core/mr2.js b/jstests/core/mr2.js
index 21091c591b1..c13ff447970 100644
--- a/jstests/core/mr2.js
+++ b/jstests/core/mr2.js
@@ -3,83 +3,79 @@
t = db.mr2;
t.drop();
-t.save( { comments : [ { who : "a" , txt : "asdasdasd" } ,
- { who : "b" , txt : "asdasdasdasdasdasdas" } ] } );
+t.save({comments: [{who: "a", txt: "asdasdasd"}, {who: "b", txt: "asdasdasdasdasdasdas"}]});
-t.save( { comments : [ { who : "b" , txt : "asdasdasdaaa" } ,
- { who : "c" , txt : "asdasdasdaasdasdas" } ] } );
+t.save({comments: [{who: "b", txt: "asdasdasdaaa"}, {who: "c", txt: "asdasdasdaasdasdas"}]});
-
-
-function m(){
- for ( var i=0; i<this.comments.length; i++ ){
+function m() {
+ for (var i = 0; i < this.comments.length; i++) {
var c = this.comments[i];
- emit( c.who , { totalSize : c.txt.length , num : 1 } );
+ emit(c.who, {totalSize: c.txt.length, num: 1});
}
}
-function r( who , values ){
- var n = { totalSize : 0 , num : 0 };
- for ( var i=0; i<values.length; i++ ){
+function r(who, values) {
+ var n = {
+ totalSize: 0,
+ num: 0
+ };
+ for (var i = 0; i < values.length; i++) {
n.totalSize += values[i].totalSize;
n.num += values[i].num;
}
return n;
}
-function reformat( r ){
+function reformat(r) {
var x = {};
var cursor;
- if ( r.results )
+ if (r.results)
cursor = r.results;
else
cursor = r.find();
- cursor.forEach(
- function(z){
- x[z._id] = z.value;
- }
- );
+ cursor.forEach(function(z) {
+ x[z._id] = z.value;
+ });
return x;
}
-function f( who , res ){
+function f(who, res) {
res.avg = res.totalSize / res.num;
return res;
}
-res = t.mapReduce( m , r , { finalize : f , out : "mr2_out" } );
-printjson( res );
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "A1" );
-assert.eq( 16 , x.b.avg , "A2" );
-assert.eq( 18 , x.c.avg , "A3" );
+res = t.mapReduce(m, r, {finalize: f, out: "mr2_out"});
+printjson(res);
+x = reformat(res);
+assert.eq(9, x.a.avg, "A1");
+assert.eq(16, x.b.avg, "A2");
+assert.eq(18, x.c.avg, "A3");
res.drop();
// inline does needs to exist - so set it to false to make sure the code is just checking for
// existence
-res = t.mapReduce( m , r , { finalize : f , out : { inline : 0 } } );
-printjson( res );
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "B1" );
-assert.eq( 16 , x.b.avg , "B2" );
-assert.eq( 18 , x.c.avg , "B3" );
+res = t.mapReduce(m, r, {finalize: f, out: {inline: 0}});
+printjson(res);
+x = reformat(res);
+assert.eq(9, x.a.avg, "B1");
+assert.eq(16, x.b.avg, "B2");
+assert.eq(18, x.c.avg, "B3");
res.drop();
-assert( ! ( "result" in res ) , "B4" );
+assert(!("result" in res), "B4");
-res = t.mapReduce( m , r , { finalize : f , out : "mr2_out", jsMode: true } );
-printjson( res );
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "A1" );
-assert.eq( 16 , x.b.avg , "A2" );
-assert.eq( 18 , x.c.avg , "A3" );
+res = t.mapReduce(m, r, {finalize: f, out: "mr2_out", jsMode: true});
+printjson(res);
+x = reformat(res);
+assert.eq(9, x.a.avg, "A1");
+assert.eq(16, x.b.avg, "A2");
+assert.eq(18, x.c.avg, "A3");
res.drop();
-res = t.mapReduce( m , r , { finalize : f , out : { inline : 5 }, jsMode: true } );
-printjson( res );
-x = reformat( res );
-assert.eq( 9 , x.a.avg , "B1" );
-assert.eq( 16 , x.b.avg , "B2" );
-assert.eq( 18 , x.c.avg , "B3" );
+res = t.mapReduce(m, r, {finalize: f, out: {inline: 5}, jsMode: true});
+printjson(res);
+x = reformat(res);
+assert.eq(9, x.a.avg, "B1");
+assert.eq(16, x.b.avg, "B2");
+assert.eq(18, x.c.avg, "B3");
res.drop();
-assert( ! ( "result" in res ) , "B4" );
-
+assert(!("result" in res), "B4");
diff --git a/jstests/core/mr3.js b/jstests/core/mr3.js
index 48b38e430f0..a2cee1f2d8f 100644
--- a/jstests/core/mr3.js
+++ b/jstests/core/mr3.js
@@ -2,46 +2,46 @@
t = db.mr3;
t.drop();
-t.save( { x : 1 , tags : [ "a" , "b" ] } );
-t.save( { x : 2 , tags : [ "b" , "c" ] } );
-t.save( { x : 3 , tags : [ "c" , "a" ] } );
-t.save( { x : 4 , tags : [ "b" , "c" ] } );
+t.save({x: 1, tags: ["a", "b"]});
+t.save({x: 2, tags: ["b", "c"]});
+t.save({x: 3, tags: ["c", "a"]});
+t.save({x: 4, tags: ["b", "c"]});
-m = function( n , x ){
+m = function(n, x) {
x = x || 1;
- this.tags.forEach(
- function(z){
- for ( var i=0; i<x; i++ )
- emit( z , { count : n || 1 } );
- }
- );
+ this.tags.forEach(function(z) {
+ for (var i = 0; i < x; i++)
+ emit(z, {count: n || 1});
+ });
};
-r = function( key , values ){
+r = function(key, values) {
var total = 0;
- for ( var i=0; i<values.length; i++ ){
+ for (var i = 0; i < values.length; i++) {
total += values[i].count;
}
- return { count : total };
+ return {
+ count: total
+ };
};
-res = t.mapReduce( m , r , { out : "mr3_out" } );
+res = t.mapReduce(m, r, {out: "mr3_out"});
z = res.convertToSingleObject();
-assert.eq( 3 , Object.keySet( z ).length , "A1" );
-assert.eq( 2 , z.a.count , "A2" );
-assert.eq( 3 , z.b.count , "A3" );
-assert.eq( 3 , z.c.count , "A4" );
+assert.eq(3, Object.keySet(z).length, "A1");
+assert.eq(2, z.a.count, "A2");
+assert.eq(3, z.b.count, "A3");
+assert.eq(3, z.c.count, "A4");
res.drop();
-res = t.mapReduce( m , r , { out : "mr3_out" , mapparams : [ 2 , 2 ] } );
+res = t.mapReduce(m, r, {out: "mr3_out", mapparams: [2, 2]});
z = res.convertToSingleObject();
-assert.eq( 3 , Object.keySet( z ).length , "B1" );
-assert.eq( 8 , z.a.count , "B2" );
-assert.eq( 12 , z.b.count , "B3" );
-assert.eq( 12 , z.c.count , "B4" );
+assert.eq(3, Object.keySet(z).length, "B1");
+assert.eq(8, z.a.count, "B2");
+assert.eq(12, z.b.count, "B3");
+assert.eq(12, z.c.count, "B4");
res.drop();
@@ -49,25 +49,28 @@ res.drop();
realm = m;
-m = function(){
- emit( this._id , 1 );
+m = function() {
+ emit(this._id, 1);
};
-res = t.mapReduce( m , r , { out : "mr3_out" } );
+res = t.mapReduce(m, r, {out: "mr3_out"});
res.drop();
-m = function(){
- emit( this._id , this.xzz.a );
+m = function() {
+ emit(this._id, this.xzz.a);
};
before = db.getCollectionNames().length;
-assert.throws( function(){ t.mapReduce( m , r , { out : "mr3_out" } ); } );
-assert.eq( before , db.getCollectionNames().length , "after throw crap" );
-
+assert.throws(function() {
+ t.mapReduce(m, r, {out: "mr3_out"});
+});
+assert.eq(before, db.getCollectionNames().length, "after throw crap");
m = realm;
-r = function( k , v ){
+r = function(k, v) {
return v.x.x.x;
};
before = db.getCollectionNames().length;
-assert.throws( function(){ t.mapReduce( m , r , "mr3_out" ); } );
-assert.eq( before , db.getCollectionNames().length , "after throw crap" );
+assert.throws(function() {
+ t.mapReduce(m, r, "mr3_out");
+});
+assert.eq(before, db.getCollectionNames().length, "after throw crap");
diff --git a/jstests/core/mr4.js b/jstests/core/mr4.js
index 2e989c19bff..ae5e11528af 100644
--- a/jstests/core/mr4.js
+++ b/jstests/core/mr4.js
@@ -2,44 +2,43 @@
t = db.mr4;
t.drop();
-t.save( { x : 1 , tags : [ "a" , "b" ] } );
-t.save( { x : 2 , tags : [ "b" , "c" ] } );
-t.save( { x : 3 , tags : [ "c" , "a" ] } );
-t.save( { x : 4 , tags : [ "b" , "c" ] } );
-
-m = function(){
- this.tags.forEach(
- function(z){
- emit( z , { count : xx } );
- }
- );
+t.save({x: 1, tags: ["a", "b"]});
+t.save({x: 2, tags: ["b", "c"]});
+t.save({x: 3, tags: ["c", "a"]});
+t.save({x: 4, tags: ["b", "c"]});
+
+m = function() {
+ this.tags.forEach(function(z) {
+ emit(z, {count: xx});
+ });
};
-r = function( key , values ){
+r = function(key, values) {
var total = 0;
- for ( var i=0; i<values.length; i++ ){
+ for (var i = 0; i < values.length; i++) {
total += values[i].count;
}
- return { count : total };
+ return {
+ count: total
+ };
};
-res = t.mapReduce( m , r , { out : "mr4_out" , scope : { xx : 1 } } );
+res = t.mapReduce(m, r, {out: "mr4_out", scope: {xx: 1}});
z = res.convertToSingleObject();
-assert.eq( 3 , Object.keySet( z ).length , "A1" );
-assert.eq( 2 , z.a.count , "A2" );
-assert.eq( 3 , z.b.count , "A3" );
-assert.eq( 3 , z.c.count , "A4" );
+assert.eq(3, Object.keySet(z).length, "A1");
+assert.eq(2, z.a.count, "A2");
+assert.eq(3, z.b.count, "A3");
+assert.eq(3, z.c.count, "A4");
res.drop();
-
-res = t.mapReduce( m , r , { scope : { xx : 2 } , out : "mr4_out" } );
+res = t.mapReduce(m, r, {scope: {xx: 2}, out: "mr4_out"});
z = res.convertToSingleObject();
-assert.eq( 3 , Object.keySet( z ).length , "A1" );
-assert.eq( 4 , z.a.count , "A2" );
-assert.eq( 6 , z.b.count , "A3" );
-assert.eq( 6 , z.c.count , "A4" );
+assert.eq(3, Object.keySet(z).length, "A1");
+assert.eq(4, z.a.count, "A2");
+assert.eq(6, z.b.count, "A3");
+assert.eq(6, z.c.count, "A4");
res.drop();
diff --git a/jstests/core/mr5.js b/jstests/core/mr5.js
index 786ef2cb8bf..537625e954b 100644
--- a/jstests/core/mr5.js
+++ b/jstests/core/mr5.js
@@ -2,57 +2,58 @@
t = db.mr5;
t.drop();
-t.save( { "partner" : 1, "visits" : 9 } );
-t.save( { "partner" : 2, "visits" : 9 } );
-t.save( { "partner" : 1, "visits" : 11 } );
-t.save( { "partner" : 1, "visits" : 30 } );
-t.save( { "partner" : 2, "visits" : 41 } );
-t.save( { "partner" : 2, "visits" : 41 } );
-
-m = function(){
- emit( this.partner , { stats : [ this.visits ] } );
+t.save({"partner": 1, "visits": 9});
+t.save({"partner": 2, "visits": 9});
+t.save({"partner": 1, "visits": 11});
+t.save({"partner": 1, "visits": 30});
+t.save({"partner": 2, "visits": 41});
+t.save({"partner": 2, "visits": 41});
+
+m = function() {
+ emit(this.partner, {stats: [this.visits]});
};
-r = function( k , v ){
+r = function(k, v) {
var stats = [];
var total = 0;
- for ( var i=0; i<v.length; i++ ){
- for ( var j in v[i].stats ) {
- stats.push( v[i].stats[j] );
+ for (var i = 0; i < v.length; i++) {
+ for (var j in v[i].stats) {
+ stats.push(v[i].stats[j]);
total += v[i].stats[j];
}
}
- return { stats : stats , total : total };
+ return {
+ stats: stats,
+ total: total
+ };
};
-res = t.mapReduce( m , r , { out : "mr5_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
+res = t.mapReduce(m, r, {out: "mr5_out", scope: {xx: 1}});
+// res.find().forEach( printjson )
z = res.convertToSingleObject();
-assert.eq( 2 , Object.keySet( z ).length , "A1" );
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" );
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" );
-
+assert.eq(2, Object.keySet(z).length, "A1");
+assert.eq([9, 11, 30], z["1"].stats, "A2");
+assert.eq([9, 41, 41], z["2"].stats, "A3");
res.drop();
-m = function(){
+m = function() {
var x = "partner";
var y = "visits";
- emit( this[x] , { stats : [ this[y] ] } );
+ emit(this [x],
+ {
+ stats:
+ [this[y]]
+ });
};
-
-
-res = t.mapReduce( m , r , { out : "mr5_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
+res = t.mapReduce(m, r, {out: "mr5_out", scope: {xx: 1}});
+// res.find().forEach( printjson )
z = res.convertToSingleObject();
-assert.eq( 2 , Object.keySet( z ).length , "B1" );
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" );
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" );
-
+assert.eq(2, Object.keySet(z).length, "B1");
+assert.eq([9, 11, 30], z["1"].stats, "B2");
+assert.eq([9, 41, 41], z["2"].stats, "B3");
res.drop();
-
-
diff --git a/jstests/core/mr_bigobject.js b/jstests/core/mr_bigobject.js
index b7bfed4e4ab..d87b2af4cdc 100644
--- a/jstests/core/mr_bigobject.js
+++ b/jstests/core/mr_bigobject.js
@@ -4,35 +4,36 @@ t.drop();
var large = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
var s = large;
-while ( s.length < ( 6 * 1024 * 1024 ) ){
+while (s.length < (6 * 1024 * 1024)) {
s += large;
}
-for ( i=0; i<5; i++ )
- t.insert( { _id : i , s : s } );
+for (i = 0; i < 5; i++)
+ t.insert({_id: i, s: s});
-m = function(){
- emit( 1 , this.s + this.s );
+m = function() {
+ emit(1, this.s + this.s);
};
-r = function( k , v ){
+r = function(k, v) {
return 1;
};
-assert.throws( function(){ r = t.mapReduce( m , r , "mr_bigobject_out" ); } , null , "emit should fail" );
+assert.throws(function() {
+ r = t.mapReduce(m, r, "mr_bigobject_out");
+}, null, "emit should fail");
-
-m = function(){
- emit( 1 , this.s );
+m = function() {
+ emit(1, this.s);
};
-assert.eq( { 1 : 1 } , t.mapReduce( m , r , "mr_bigobject_out" ).convertToSingleObject() , "A1" );
+assert.eq({1: 1}, t.mapReduce(m, r, "mr_bigobject_out").convertToSingleObject(), "A1");
-r = function( k , v ){
+r = function(k, v) {
total = 0;
- for ( var i=0; i<v.length; i++ ){
+ for (var i = 0; i < v.length; i++) {
var x = v[i];
- if ( typeof( x ) == "number" )
+ if (typeof(x) == "number")
total += x;
else
total += x.length;
@@ -40,6 +41,8 @@ r = function( k , v ){
return total;
};
-assert.eq( { 1 : t.count() * s.length } , t.mapReduce( m , r , "mr_bigobject_out" ).convertToSingleObject() , "A1" );
+assert.eq({1: t.count() * s.length},
+ t.mapReduce(m, r, "mr_bigobject_out").convertToSingleObject(),
+ "A1");
t.drop();
diff --git a/jstests/core/mr_bigobject_replace.js b/jstests/core/mr_bigobject_replace.js
index dbed4664e7a..28a295c1b2e 100644
--- a/jstests/core/mr_bigobject_replace.js
+++ b/jstests/core/mr_bigobject_replace.js
@@ -14,7 +14,10 @@
// Returns a document of the form { _id: ObjectId(...), value: '...' } with the specified
// 'targetSize' in bytes.
function makeDocWithSize(targetSize) {
- var doc = {_id: new ObjectId(), value: ''};
+ var doc = {
+ _id: new ObjectId(),
+ value: ''
+ };
var size = Object.bsonsize(doc);
assert.gte(targetSize, size);
@@ -38,17 +41,25 @@
// Insert a document so the mapper gets run.
assert.writeOK(db.input.insert({}));
- var res = db.runCommand(Object.extend({
- mapReduce: "input",
- map: mapper,
- out: {replace: "mr_bigobject_replace"},
- }, testOptions));
+ var res = db.runCommand(Object.extend(
+ {
+ mapReduce: "input",
+ map: mapper,
+ out: {replace: "mr_bigobject_replace"},
+ },
+ testOptions));
assert.commandFailed(res, "creating a document larger than 16MB didn't fail");
- assert.lte(0, res.errmsg.indexOf("object to insert too large"),
+ assert.lte(0,
+ res.errmsg.indexOf("object to insert too large"),
"map-reduce command failed for a reason other than inserting a large document");
}
runTest({reduce: createBigDocument});
- runTest({reduce: function() { return 1; }, finalize: createBigDocument});
+ runTest({
+ reduce: function() {
+ return 1;
+ },
+ finalize: createBigDocument
+ });
})();
diff --git a/jstests/core/mr_comments.js b/jstests/core/mr_comments.js
index 406ddb40a45..503bded9bd0 100644
--- a/jstests/core/mr_comments.js
+++ b/jstests/core/mr_comments.js
@@ -2,27 +2,27 @@
t = db.mr_comments;
t.drop();
-t.insert( { foo : 1 } );
-t.insert( { foo : 1 } );
-t.insert( { foo : 2 } );
+t.insert({foo: 1});
+t.insert({foo: 1});
+t.insert({foo: 2});
-res = db.runCommand(
- { mapreduce : "mr_comments",
- map : "// This will fail\n\n // Emit some stuff\n emit(this.foo, 1)\n",
- reduce : function(key, values){
- return Array.sum(values);
- },
- out: "mr_comments_out"
- });
-assert.eq( 3 , res.counts.emit );
+res = db.runCommand({
+ mapreduce: "mr_comments",
+ map: "// This will fail\n\n // Emit some stuff\n emit(this.foo, 1)\n",
+ reduce: function(key, values) {
+ return Array.sum(values);
+ },
+ out: "mr_comments_out"
+});
+assert.eq(3, res.counts.emit);
-res = db.runCommand(
- { mapreduce : "mr_comments",
- map : "// This will fail\nfunction(){\n // Emit some stuff\n emit(this.foo, 1)\n}\n",
- reduce : function(key, values){
- return Array.sum(values);
- },
- out: "mr_comments_out"
- });
+res = db.runCommand({
+ mapreduce: "mr_comments",
+ map: "// This will fail\nfunction(){\n // Emit some stuff\n emit(this.foo, 1)\n}\n",
+ reduce: function(key, values) {
+ return Array.sum(values);
+ },
+ out: "mr_comments_out"
+});
-assert.eq( 3 , res.counts.emit );
+assert.eq(3, res.counts.emit);
diff --git a/jstests/core/mr_errorhandling.js b/jstests/core/mr_errorhandling.js
index 1bd94bbd56e..280d6e76891 100644
--- a/jstests/core/mr_errorhandling.js
+++ b/jstests/core/mr_errorhandling.js
@@ -2,48 +2,49 @@
t = db.mr_errorhandling;
t.drop();
-t.save( { a : [ 1 , 2 , 3 ] } );
-t.save( { a : [ 2 , 3 , 4 ] } );
+t.save({a: [1, 2, 3]});
+t.save({a: [2, 3, 4]});
-m_good = function(){
- for ( var i=0; i<this.a.length; i++ ){
- emit( this.a[i] , 1 );
+m_good = function() {
+ for (var i = 0; i < this.a.length; i++) {
+ emit(this.a[i], 1);
}
};
-m_bad = function(){
- for ( var i=0; i<this.a.length; i++ ){
- emit( this.a[i] );
+m_bad = function() {
+ for (var i = 0; i < this.a.length; i++) {
+ emit(this.a[i]);
}
};
-r = function( k , v ){
+r = function(k, v) {
var total = 0;
- for ( var i=0; i<v.length; i++ )
+ for (var i = 0; i < v.length; i++)
total += v[i];
return total;
};
-res = t.mapReduce( m_good , r , "mr_errorhandling_out" );
-assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
+res = t.mapReduce(m_good, r, "mr_errorhandling_out");
+assert.eq({1: 1, 2: 2, 3: 2, 4: 1}, res.convertToSingleObject(), "A");
res.drop();
res = null;
theerror = null;
try {
- res = t.mapReduce( m_bad , r , "mr_errorhandling_out" );
-}
-catch ( e ){
+ res = t.mapReduce(m_bad, r, "mr_errorhandling_out");
+} catch (e) {
theerror = e.toString();
}
-assert.isnull( res , "B1" );
-assert( theerror , "B2" );
-assert( theerror.indexOf( "emit" ) >= 0 , "B3" );
+assert.isnull(res, "B1");
+assert(theerror, "B2");
+assert(theerror.indexOf("emit") >= 0, "B3");
// test things are still in an ok state
-res = t.mapReduce( m_good , r , "mr_errorhandling_out" );
-assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
+res = t.mapReduce(m_good, r, "mr_errorhandling_out");
+assert.eq({1: 1, 2: 2, 3: 2, 4: 1}, res.convertToSingleObject(), "A");
res.drop();
-assert.throws( function(){ t.mapReduce( m_good , r , { out : "xxx" , query : "foo" } ); } );
+assert.throws(function() {
+ t.mapReduce(m_good, r, {out: "xxx", query: "foo"});
+});
diff --git a/jstests/core/mr_index.js b/jstests/core/mr_index.js
index fd650852871..796dbe5c562 100644
--- a/jstests/core/mr_index.js
+++ b/jstests/core/mr_index.js
@@ -6,34 +6,34 @@ outName = "mr_index_out";
out = db[outName];
out.drop();
-t.insert( { tags : [ 1 ] } );
-t.insert( { tags : [ 1 , 2 ] } );
-t.insert( { tags : [ 1 , 2 , 3 ] } );
-t.insert( { tags : [ 3 ] } );
-t.insert( { tags : [ 2 , 3 ] } );
-t.insert( { tags : [ 2 , 3 ] } );
-t.insert( { tags : [ 1 , 2 ] } );
-
-m = function(){
- for ( i=0; i<this.tags.length; i++ )
- emit( this.tags[i] , 1 );
+t.insert({tags: [1]});
+t.insert({tags: [1, 2]});
+t.insert({tags: [1, 2, 3]});
+t.insert({tags: [3]});
+t.insert({tags: [2, 3]});
+t.insert({tags: [2, 3]});
+t.insert({tags: [1, 2]});
+
+m = function() {
+ for (i = 0; i < this.tags.length; i++)
+ emit(this.tags[i], 1);
};
-r = function( k , vs ){
- return Array.sum( vs );
+r = function(k, vs) {
+ return Array.sum(vs);
};
-ex = function(){
- return out.find().sort( { value : 1 } ).explain("executionStats");
+ex = function() {
+ return out.find().sort({value: 1}).explain("executionStats");
};
-res = t.mapReduce( m , r , { out : outName } );
+res = t.mapReduce(m, r, {out: outName});
-assert.eq( 3 , ex().executionStats.nReturned , "A1" );
-out.ensureIndex( { value : 1 } );
-assert.eq( 3 , ex().executionStats.nReturned , "A2" );
+assert.eq(3, ex().executionStats.nReturned, "A1");
+out.ensureIndex({value: 1});
+assert.eq(3, ex().executionStats.nReturned, "A2");
-res = t.mapReduce( m , r , { out : outName } );
+res = t.mapReduce(m, r, {out: outName});
-assert.eq( 3 , ex().executionStats.nReturned , "B1" );
+assert.eq(3, ex().executionStats.nReturned, "B1");
res.drop();
diff --git a/jstests/core/mr_index2.js b/jstests/core/mr_index2.js
index 3255f5507f4..99741fedcbd 100644
--- a/jstests/core/mr_index2.js
+++ b/jstests/core/mr_index2.js
@@ -2,21 +2,24 @@
t = db.mr_index2;
t.drop();
-t.save( { arr : [1, 2] } );
+t.save({arr: [1, 2]});
-map = function() { emit(this._id, 1); };
-reduce = function(k,vals) { return Array.sum( vals ); };
+map = function() {
+ emit(this._id, 1);
+};
+reduce = function(k, vals) {
+ return Array.sum(vals);
+};
-res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : {} });
-assert.eq( 1 ,res.counts.input , "A" );
+res = t.mapReduce(map, reduce, {out: "mr_index2_out", query: {}});
+assert.eq(1, res.counts.input, "A");
res.drop();
-res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : { arr: {$gte:0} } });
-assert.eq( 1 ,res.counts.input , "B" );
+res = t.mapReduce(map, reduce, {out: "mr_index2_out", query: {arr: {$gte: 0}}});
+assert.eq(1, res.counts.input, "B");
res.drop();
-t.ensureIndex({arr:1});
-res = t.mapReduce(map,reduce, { out : "mr_index2_out" , query : { arr: {$gte:0} } });
-assert.eq( 1 ,res.counts.input , "C" );
+t.ensureIndex({arr: 1});
+res = t.mapReduce(map, reduce, {out: "mr_index2_out", query: {arr: {$gte: 0}}});
+assert.eq(1, res.counts.input, "C");
res.drop();
-
diff --git a/jstests/core/mr_index3.js b/jstests/core/mr_index3.js
index d667a844ec9..bac61cb6bc1 100644
--- a/jstests/core/mr_index3.js
+++ b/jstests/core/mr_index3.js
@@ -1,50 +1,61 @@
t = db.mr_index3;
-t.drop();
-
-t.insert( { _id : 1, name : 'name1', tags : ['dog', 'cat'] } );
-t.insert( { _id : 2, name : 'name2', tags : ['cat'] } );
-t.insert( { _id : 3, name : 'name3', tags : ['mouse', 'cat', 'dog'] } );
-t.insert( { _id : 4, name : 'name4', tags : [] } );
-
-m = function(){
- for ( var i=0; i<this.tags.length; i++ )
- emit( this.tags[i] , 1 );
-};
-
-r = function( key , values ){
- return Array.sum( values );
-};
-
-a1 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r , out : { inline : true } } ).results;
-a2 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : 'name1'} , out : { inline : true }}).results;
-a3 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : {$gt:'name'} } , out : { inline : true }}).results;
-
-assert.eq( [
- {
- "_id" : "cat",
- "value" : 3
- },
- {
- "_id" : "dog",
- "value" : 2
- },
- {
- "_id" : "mouse",
- "value" : 1
- }
-] , a1 , "A1" );
-assert.eq( [ { "_id" : "cat", "value" : 1 }, { "_id" : "dog", "value" : 1 } ] , a2 , "A2" );
-assert.eq( a1 , a3 , "A3" );
-
-t.ensureIndex({name:1, tags:1});
-
-b1 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r , out : { inline : true } } ).results;
-b2 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : 'name1'} , out : { inline : true }}).results;
-b3 = db.runCommand({ mapreduce : 'mr_index3', map : m, reduce : r, query: {name : {$gt:'name'} } , out : { inline : true }}).results;
-
-assert.eq( a1 , b1 , "AB1" );
-assert.eq( a2 , b2 , "AB2" );
-assert.eq( a3 , b3 , "AB3" );
-
-
+t.drop();
+
+t.insert({_id: 1, name: 'name1', tags: ['dog', 'cat']});
+t.insert({_id: 2, name: 'name2', tags: ['cat']});
+t.insert({_id: 3, name: 'name3', tags: ['mouse', 'cat', 'dog']});
+t.insert({_id: 4, name: 'name4', tags: []});
+
+m = function() {
+ for (var i = 0; i < this.tags.length; i++)
+ emit(this.tags[i], 1);
+};
+
+r = function(key, values) {
+ return Array.sum(values);
+};
+
+a1 = db.runCommand({mapreduce: 'mr_index3', map: m, reduce: r, out: {inline: true}}).results;
+a2 = db.runCommand({
+ mapreduce: 'mr_index3',
+ map: m,
+ reduce: r,
+ query: {name: 'name1'},
+ out: {inline: true}
+}).results;
+a3 = db.runCommand({
+ mapreduce: 'mr_index3',
+ map: m,
+ reduce: r,
+ query: {name: {$gt: 'name'}},
+ out: {inline: true}
+}).results;
+
+assert.eq([{"_id": "cat", "value": 3}, {"_id": "dog", "value": 2}, {"_id": "mouse", "value": 1}],
+ a1,
+ "A1");
+assert.eq([{"_id": "cat", "value": 1}, {"_id": "dog", "value": 1}], a2, "A2");
+assert.eq(a1, a3, "A3");
+
+t.ensureIndex({name: 1, tags: 1});
+
+b1 = db.runCommand({mapreduce: 'mr_index3', map: m, reduce: r, out: {inline: true}}).results;
+b2 = db.runCommand({
+ mapreduce: 'mr_index3',
+ map: m,
+ reduce: r,
+ query: {name: 'name1'},
+ out: {inline: true}
+}).results;
+b3 = db.runCommand({
+ mapreduce: 'mr_index3',
+ map: m,
+ reduce: r,
+ query: {name: {$gt: 'name'}},
+ out: {inline: true}
+}).results;
+
+assert.eq(a1, b1, "AB1");
+assert.eq(a2, b2, "AB2");
+assert.eq(a3, b3, "AB3");
diff --git a/jstests/core/mr_killop.js b/jstests/core/mr_killop.js
index 3f9cf52052d..c4d8b666f11 100644
--- a/jstests/core/mr_killop.js
+++ b/jstests/core/mr_killop.js
@@ -5,32 +5,26 @@ t.drop();
t2 = db.jstests_mr_killop_out;
t2.drop();
-function debug( x ) {
-// printjson( x );
+function debug(x) {
+ // printjson( x );
}
/** @return op code for map reduce op created by spawned shell, or that op's child */
-function op( childLoop ) {
+function op(childLoop) {
p = db.currentOp().inprog;
- debug( p );
- for ( var i in p ) {
- var o = p[ i ];
+ debug(p);
+ for (var i in p) {
+ var o = p[i];
// Identify a map/reduce or where distinct operation by its collection, whether or not
// it is currently active.
- if ( childLoop ) {
- if ( ( o.active || o.waitingForLock ) &&
- o.query &&
- o.query.query &&
- o.query.query.$where &&
- o.query.distinct == "jstests_mr_killop" ) {
+ if (childLoop) {
+ if ((o.active || o.waitingForLock) && o.query && o.query.query &&
+ o.query.query.$where && o.query.distinct == "jstests_mr_killop") {
return o.opid;
}
- }
- else {
- if ( ( o.active || o.waitingForLock ) &&
- o.query &&
- o.query.mapreduce &&
- o.query.mapreduce == "jstests_mr_killop" ) {
+ } else {
+ if ((o.active || o.waitingForLock) && o.query && o.query.mapreduce &&
+ o.query.mapreduce == "jstests_mr_killop") {
return o.opid;
}
}
@@ -46,113 +40,139 @@ function op( childLoop ) {
* This is necessay for a child distinct $where of a map reduce op because child
* ops currently mask parent ops in currentOp.
*/
-function testOne( map, reduce, finalize, scope, childLoop, wait ) {
- debug( "testOne - map = " + tojson( map ) + "; reduce = " + tojson( reduce ) +
- "; finalize = " + tojson( finalize ) + "; scope = " + tojson( scope ) +
- "; childLoop = " + childLoop + "; wait = " + wait );
-
+function testOne(map, reduce, finalize, scope, childLoop, wait) {
+ debug("testOne - map = " + tojson(map) + "; reduce = " + tojson(reduce) + "; finalize = " +
+ tojson(finalize) + "; scope = " + tojson(scope) + "; childLoop = " + childLoop +
+ "; wait = " + wait);
+
t.drop();
t2.drop();
// Ensure we have 2 documents for the reduce to run
- t.save( {a:1} );
- t.save( {a:1} );
+ t.save({a: 1});
+ t.save({a: 1});
spec = {
- mapreduce:"jstests_mr_killop",
- out:"jstests_mr_killop_out",
+ mapreduce: "jstests_mr_killop",
+ out: "jstests_mr_killop_out",
map: map,
reduce: reduce
};
- if ( finalize ) {
- spec[ "finalize" ] = finalize;
+ if (finalize) {
+ spec["finalize"] = finalize;
}
- if ( scope ) {
- spec[ "scope" ] = scope;
+ if (scope) {
+ spec["scope"] = scope;
}
// Windows shell strips all double quotes from command line, so use
// single quotes.
- stringifiedSpec = tojson( spec ).toString().replace( /\n/g, ' ' ).replace( /\"/g, "\'" );
-
+ stringifiedSpec = tojson(spec).toString().replace(/\n/g, ' ').replace(/\"/g, "\'");
+
// The assert below won't be caught by this test script, but it will cause error messages
// to be printed.
- var awaitShell = startParallelShell( "assert.commandWorked( db.runCommand( " +
- stringifiedSpec + " ) );" );
-
- if ( wait ) {
- sleep( 2000 );
+ var awaitShell =
+ startParallelShell("assert.commandWorked( db.runCommand( " + stringifiedSpec + " ) );");
+
+ if (wait) {
+ sleep(2000);
}
-
+
o = null;
- assert.soon( function() { o = op( childLoop ); return o != -1; } );
+ assert.soon(function() {
+ o = op(childLoop);
+ return o != -1;
+ });
+
+ res = db.killOp(o);
+ debug("did kill : " + tojson(res));
- res = db.killOp( o );
- debug( "did kill : " + tojson( res ) );
-
// When the map reduce op is killed, the spawned shell will exit
var exitCode = awaitShell({checkExitSuccess: false});
- assert.neq(0, exitCode,
+ assert.neq(0,
+ exitCode,
"expected shell to exit abnormally due to map-reduce execution being terminated");
- debug( "parallel shell completed" );
-
- assert.eq( -1, op( childLoop ) );
+ debug("parallel shell completed");
+
+ assert.eq(-1, op(childLoop));
}
/** Test using wait and non wait modes */
-function test( map, reduce, finalize, scope, childLoop ) {
- debug( " Non wait mode" );
- testOne( map, reduce, finalize, scope, childLoop, false );
+function test(map, reduce, finalize, scope, childLoop) {
+ debug(" Non wait mode");
+ testOne(map, reduce, finalize, scope, childLoop, false);
- debug( " Wait mode" );
- testOne( map, reduce, finalize, scope, childLoop, true );
+ debug(" Wait mode");
+ testOne(map, reduce, finalize, scope, childLoop, true);
}
/** Test looping in map and reduce functions */
-function runMRTests( loop, childLoop ) {
- debug( " Running MR test - loop map function. no scope " );
- test( loop, // map
- function( k, v ) { return v[ 0 ]; }, // reduce
- null, // finalize
- null, // scope
- childLoop );
-
- debug( " Running MR test - loop reduce function " );
- test( function() { emit( this.a, 1 ); }, // map
- loop, // reduce
- null, // finalize
- null, // scope
- childLoop );
-
- debug( " Running finalization test - loop map function. with scope " );
- test( function() { loop(); }, // map
- function( k, v ) { return v[ 0 ]; }, // reduce
- null, // finalize
- { loop: loop }, // scope
- childLoop );
+function runMRTests(loop, childLoop) {
+ debug(" Running MR test - loop map function. no scope ");
+ test(loop, // map
+ function(k, v) {
+ return v[0];
+ }, // reduce
+ null, // finalize
+ null, // scope
+ childLoop);
+
+ debug(" Running MR test - loop reduce function ");
+ test(
+ function() {
+ emit(this.a, 1);
+ }, // map
+ loop, // reduce
+ null, // finalize
+ null, // scope
+ childLoop);
+
+ debug(" Running finalization test - loop map function. with scope ");
+ test(
+ function() {
+ loop();
+ }, // map
+ function(k, v) {
+ return v[0];
+ }, // reduce
+ null, // finalize
+ {loop: loop}, // scope
+ childLoop);
}
/** Test looping in finalize function */
-function runFinalizeTests( loop, childLoop ) {
- debug( " Running finalization test - no scope " );
- test( function() { emit( this.a, 1 ); }, // map
- function( k, v ) { return v[ 0 ]; }, // reduce
- loop, // finalize
- null, // scope
- childLoop );
-
- debug( " Running finalization test - with scope " );
- test( function() { emit( this.a, 1 ); }, // map
- function( k, v ) { return v[ 0 ]; }, // reduce
- function( a, b ) { loop(); }, // finalize
- { loop: loop }, // scope
- childLoop );
+function runFinalizeTests(loop, childLoop) {
+ debug(" Running finalization test - no scope ");
+ test(
+ function() {
+ emit(this.a, 1);
+ }, // map
+ function(k, v) {
+ return v[0];
+ }, // reduce
+ loop, // finalize
+ null, // scope
+ childLoop);
+
+ debug(" Running finalization test - with scope ");
+ test(
+ function() {
+ emit(this.a, 1);
+ }, // map
+ function(k, v) {
+ return v[0];
+ }, // reduce
+ function(a, b) {
+ loop();
+ }, // finalize
+ {loop: loop}, // scope
+ childLoop);
}
// Run inside server. No access to debug().
var loop = function() {
- while( 1 ) {
- sleep( 1000 );
+ while (1) {
+ sleep(1000);
}
};
-runMRTests( loop, false );
-runFinalizeTests( loop, false );
+runMRTests(loop, false);
+runFinalizeTests(loop, false);
diff --git a/jstests/core/mr_merge.js b/jstests/core/mr_merge.js
index 83d00f39392..92490cdd6fd 100644
--- a/jstests/core/mr_merge.js
+++ b/jstests/core/mr_merge.js
@@ -2,59 +2,66 @@
t = db.mr_merge;
t.drop();
-t.insert( { a : [ 1 , 2 ] } );
-t.insert( { a : [ 2 , 3 ] } );
-t.insert( { a : [ 3 , 4 ] } );
+t.insert({a: [1, 2]});
+t.insert({a: [2, 3]});
+t.insert({a: [3, 4]});
outName = "mr_merge_out";
out = db[outName];
out.drop();
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); };
-r = function(k,vs){ return Array.sum( vs ); };
+m = function() {
+ for (i = 0; i < this.a.length; i++)
+ emit(this.a[i], 1);
+};
+r = function(k, vs) {
+ return Array.sum(vs);
+};
-function tos( o ){
+function tos(o) {
var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
+ for (var i = 0; i < 100; i++) {
+ if (o[i])
s += i + "_" + o[i];
}
return s;
}
+res = t.mapReduce(m, r, {out: outName});
-res = t.mapReduce( m , r , { out : outName } );
+expected = {
+ "1": 1,
+ "2": 2,
+ "3": 2,
+ "4": 1
+};
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "A");
-
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 };
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
-
-t.insert( { a : [ 4 , 5 ] } );
-out.insert( { _id : 10 , value : "5" } );
-res = t.mapReduce( m , r , { out : outName } );
+t.insert({a: [4, 5]});
+out.insert({_id: 10, value: "5"});
+res = t.mapReduce(m, r, {out: outName});
expected["4"]++;
expected["5"] = 1;
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "B");
-t.insert( { a : [ 5 , 6 ] } );
-out.insert( { _id : 10 , value : "5" } );
-res = t.mapReduce( m , r , { out : { merge : outName } } );
+t.insert({a: [5, 6]});
+out.insert({_id: 10, value: "5"});
+res = t.mapReduce(m, r, {out: {merge: outName}});
expected["5"]++;
expected["10"] = 5;
expected["6"] = 1;
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "C" );
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "C");
// test that the nonAtomic output gives valid result
-t.insert( { a : [ 6 , 7 ] } );
-out.insert( { _id : 20 , value : "10" } );
-res = t.mapReduce( m , r , { out : { merge : outName, nonAtomic: true } } );
+t.insert({a: [6, 7]});
+out.insert({_id: 20, value: "10"});
+res = t.mapReduce(m, r, {out: {merge: outName, nonAtomic: true}});
expected["6"]++;
expected["20"] = 10;
expected["7"] = 1;
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "D" );
-
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "D");
diff --git a/jstests/core/mr_merge2.js b/jstests/core/mr_merge2.js
index a4835e4397a..e324c124f29 100644
--- a/jstests/core/mr_merge2.js
+++ b/jstests/core/mr_merge2.js
@@ -2,36 +2,46 @@
t = db.mr_merge2;
t.drop();
-t.insert( { a : [ 1 , 2 ] } );
-t.insert( { a : [ 2 , 3 ] } );
-t.insert( { a : [ 3 , 4 ] } );
+t.insert({a: [1, 2]});
+t.insert({a: [2, 3]});
+t.insert({a: [3, 4]});
outName = "mr_merge2_out";
out = db[outName];
out.drop();
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); };
-r = function(k,vs){ return Array.sum( vs ); };
+m = function() {
+ for (i = 0; i < this.a.length; i++)
+ emit(this.a[i], 1);
+};
+r = function(k, vs) {
+ return Array.sum(vs);
+};
-function tos( o ){
+function tos(o) {
var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
+ for (var i = 0; i < 100; i++) {
+ if (o[i])
s += i + "_" + o[i] + "|";
}
return s;
}
-
-outOptions = { out : { merge : outName } };
-
-res = t.mapReduce( m , r , outOptions );
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 };
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
-
-t.insert( { a : [ 4 , 5 ] } );
-res = t.mapReduce( m , r , outOptions );
+outOptions = {
+ out: {merge: outName}
+};
+
+res = t.mapReduce(m, r, outOptions);
+expected = {
+ "1": 1,
+ "2": 2,
+ "3": 2,
+ "4": 1
+};
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "A");
+
+t.insert({a: [4, 5]});
+res = t.mapReduce(m, r, outOptions);
expected["4"]++;
expected["5"] = 1;
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
-
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "B");
diff --git a/jstests/core/mr_mutable_properties.js b/jstests/core/mr_mutable_properties.js
index e46f35b5079..12c52385275 100644
--- a/jstests/core/mr_mutable_properties.js
+++ b/jstests/core/mr_mutable_properties.js
@@ -4,40 +4,56 @@
var collection = db.mrMutableReceiver;
collection.drop();
-collection.insert({a:1});
+collection.insert({a: 1});
var map = function() {
// set property on receiver
- this.feed = {beef:1};
+ this.feed = {
+ beef: 1
+ };
// modify property on receiever
- this.a = {cake:1};
+ this.a = {
+ cake: 1
+ };
emit(this._id, this.feed);
emit(this._id, this.a);
};
var reduce = function(key, values) {
// set property on receiver
- this.feed = {beat:1};
+ this.feed = {
+ beat: 1
+ };
// set property on key arg
- key.fed = {mochi:1};
+ key.fed = {
+ mochi: 1
+ };
// push properties onto values array arg
values.push(this.feed);
values.push(key.fed);
// modify each value in the (modified) array arg
- values.forEach(function(val) { val.mod = 1; });
- return {food:values};
+ values.forEach(function(val) {
+ val.mod = 1;
+ });
+ return {
+ food: values
+ };
};
var finalize = function(key, values) {
// set property on receiver
- this.feed = {ice:1};
+ this.feed = {
+ ice: 1
+ };
// set property on key arg
- key.fed = {cream:1};
+ key.fed = {
+ cream: 1
+ };
// push properties onto values array arg
printjson(values);
@@ -45,7 +61,9 @@ var finalize = function(key, values) {
values.food.push(key.fed);
// modify each value in the (modified) array arg
- values.food.forEach(function(val) { val.mod = 1; });
+ values.food.forEach(function(val) {
+ val.mod = 1;
+ });
return values;
};
@@ -59,4 +77,6 @@ assert.eq(mr.results[0].value.food[2].beat, 1);
assert.eq(mr.results[0].value.food[3].mochi, 1);
assert.eq(mr.results[0].value.food[4].ice, 1);
assert.eq(mr.results[0].value.food[5].cream, 1);
-mr.results[0].value.food.forEach(function(val) { assert.eq(val.mod, 1); });
+mr.results[0].value.food.forEach(function(val) {
+ assert.eq(val.mod, 1);
+});
diff --git a/jstests/core/mr_optim.js b/jstests/core/mr_optim.js
index 65550d1c841..7437753ca67 100644
--- a/jstests/core/mr_optim.js
+++ b/jstests/core/mr_optim.js
@@ -4,43 +4,41 @@ t = db.mr_optim;
t.drop();
for (var i = 0; i < 1000; ++i) {
- t.save( {a: Math.random(1000), b: Math.random(10000)} );
+ t.save({a: Math.random(1000), b: Math.random(10000)});
}
-function m(){
+function m() {
emit(this._id, 13);
}
-function r( key , values ){
+function r(key, values) {
return "bad";
}
-function reformat( r ){
+function reformat(r) {
var x = {};
var cursor;
- if ( r.results )
+ if (r.results)
cursor = r.results;
else
cursor = r.find();
- cursor.forEach(
- function(z){
- x[z._id] = z.value;
- }
- );
+ cursor.forEach(function(z) {
+ x[z._id] = z.value;
+ });
return x;
}
-res = t.mapReduce( m , r , { out : "mr_optim_out" } );
-printjson( res );
-x = reformat( res );
+res = t.mapReduce(m, r, {out: "mr_optim_out"});
+printjson(res);
+x = reformat(res);
for (var key in x) {
assert.eq(x[key], 13, "value is not equal to original, maybe reduce has run");
}
res.drop();
-res = t.mapReduce( m , r , { out : { inline : 1 } } );
-//printjson( res )
-x2 = reformat( res );
+res = t.mapReduce(m, r, {out: {inline: 1}});
+// printjson( res )
+x2 = reformat(res);
res.drop();
assert.eq(x, x2, "object from inline and collection are not equal");
diff --git a/jstests/core/mr_outreduce.js b/jstests/core/mr_outreduce.js
index d380d169751..d23b0714a51 100644
--- a/jstests/core/mr_outreduce.js
+++ b/jstests/core/mr_outreduce.js
@@ -2,48 +2,55 @@
t = db.mr_outreduce;
t.drop();
-t.insert( { _id : 1 , a : [ 1 , 2 ] } );
-t.insert( { _id : 2 , a : [ 2 , 3 ] } );
-t.insert( { _id : 3 , a : [ 3 , 4 ] } );
+t.insert({_id: 1, a: [1, 2]});
+t.insert({_id: 2, a: [2, 3]});
+t.insert({_id: 3, a: [3, 4]});
outName = "mr_outreduce_out";
out = db[outName];
out.drop();
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); };
-r = function(k,vs){ return Array.sum( vs ); };
+m = function() {
+ for (i = 0; i < this.a.length; i++)
+ emit(this.a[i], 1);
+};
+r = function(k, vs) {
+ return Array.sum(vs);
+};
-function tos( o ){
+function tos(o) {
var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
+ for (var i = 0; i < 100; i++) {
+ if (o[i])
s += i + "_" + o[i] + "|";
}
return s;
}
+res = t.mapReduce(m, r, {out: outName});
-res = t.mapReduce( m , r , { out : outName } );
+expected = {
+ "1": 1,
+ "2": 2,
+ "3": 2,
+ "4": 1
+};
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "A");
-
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 };
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "A" );
-
-t.insert( { _id : 4 , a : [ 4 , 5 ] } );
-out.insert( { _id : 10 , value : "5" } ); // this is a sentinal to make sure it wasn't killed
-res = t.mapReduce( m , r , { out : { reduce : outName } , query : { _id : { $gt : 3 } } } );
+t.insert({_id: 4, a: [4, 5]});
+out.insert({_id: 10, value: "5"}); // this is a sentinal to make sure it wasn't killed
+res = t.mapReduce(m, r, {out: {reduce: outName}, query: {_id: {$gt: 3}}});
expected["4"]++;
expected["5"] = 1;
expected["10"] = 5;
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "B" );
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "B");
-t.insert( { _id : 5 , a : [ 5 , 6 ] } );
-out.insert( { _id : 20 , value : "10" } ); // this is a sentinal to make sure it wasn't killed
-res = t.mapReduce( m , r , { out : { reduce : outName, nonAtomic: true } , query : { _id : { $gt : 4 } } } );
+t.insert({_id: 5, a: [5, 6]});
+out.insert({_id: 20, value: "10"}); // this is a sentinal to make sure it wasn't killed
+res = t.mapReduce(m, r, {out: {reduce: outName, nonAtomic: true}, query: {_id: {$gt: 4}}});
expected["5"]++;
expected["6"] = 1;
expected["20"] = 10;
-assert.eq( tos( expected ) , tos( res.convertToSingleObject() ) , "C" );
-
+assert.eq(tos(expected), tos(res.convertToSingleObject()), "C");
diff --git a/jstests/core/mr_outreduce2.js b/jstests/core/mr_outreduce2.js
index 45ec9be7ee2..a08b6b51527 100644
--- a/jstests/core/mr_outreduce2.js
+++ b/jstests/core/mr_outreduce2.js
@@ -7,21 +7,24 @@ t.drop();
db[out].drop();
-t.insert( { _id : 1 , x : 1 } );
-t.insert( { _id : 2 , x : 1 } );
-t.insert( { _id : 3 , x : 2 } );
+t.insert({_id: 1, x: 1});
+t.insert({_id: 2, x: 1});
+t.insert({_id: 3, x: 2});
-m = function(){ emit( this.x , 1 ); };
-r = function(k,v){ return Array.sum( v ); };
+m = function() {
+ emit(this.x, 1);
+};
+r = function(k, v) {
+ return Array.sum(v);
+};
-res = t.mapReduce( m , r , { out : { reduce : out } , query : { _id : { $gt : 0 } } } );
+res = t.mapReduce(m, r, {out: {reduce: out}, query: {_id: {$gt: 0}}});
-assert.eq( 2 , db[out].findOne( { _id : 1 } ).value , "A1" );
-assert.eq( 1 , db[out].findOne( { _id : 2 } ).value , "A2" );
+assert.eq(2, db[out].findOne({_id: 1}).value, "A1");
+assert.eq(1, db[out].findOne({_id: 2}).value, "A2");
+t.insert({_id: 4, x: 2});
+res = t.mapReduce(m, r, {out: {reduce: out}, query: {_id: {$gt: 3}}, finalize: null});
-t.insert( { _id : 4 , x : 2 } );
-res = t.mapReduce( m , r , { out : { reduce : out } , query : { _id : { $gt : 3 } } , finalize : null } );
-
-assert.eq( 2 , db[out].findOne( { _id : 1 } ).value , "B1" );
-assert.eq( 2 , db[out].findOne( { _id : 2 } ).value , "B2" );
+assert.eq(2, db[out].findOne({_id: 1}).value, "B1");
+assert.eq(2, db[out].findOne({_id: 2}).value, "B2");
diff --git a/jstests/core/mr_replaceIntoDB.js b/jstests/core/mr_replaceIntoDB.js
index 34c208255ee..6ffc8546c2d 100644
--- a/jstests/core/mr_replaceIntoDB.js
+++ b/jstests/core/mr_replaceIntoDB.js
@@ -2,44 +2,52 @@
t = db.mr_replace;
t.drop();
-t.insert( { a : [ 1 , 2 ] } );
-t.insert( { a : [ 2 , 3 ] } );
-t.insert( { a : [ 3 , 4 ] } );
+t.insert({a: [1, 2]});
+t.insert({a: [2, 3]});
+t.insert({a: [3, 4]});
outCollStr = "mr_replace_col";
outDbStr = "mr_db";
-m = function(){ for (i=0; i<this.a.length; i++ ) emit( this.a[i] , 1 ); };
-r = function(k,vs){ return Array.sum( vs ); };
+m = function() {
+ for (i = 0; i < this.a.length; i++)
+ emit(this.a[i], 1);
+};
+r = function(k, vs) {
+ return Array.sum(vs);
+};
-function tos( o ){
+function tos(o) {
var s = "";
- for ( var i=0; i<100; i++ ){
- if ( o[i] )
+ for (var i = 0; i < 100; i++) {
+ if (o[i])
s += i + "_" + o[i];
}
return s;
}
print("Testing mr replace into other DB");
-res = t.mapReduce( m , r , { out : { replace: outCollStr, db: outDbStr } } );
-printjson( res );
-expected = { "1" : 1 , "2" : 2 , "3" : 2 , "4" : 1 };
+res = t.mapReduce(m, r, {out: {replace: outCollStr, db: outDbStr}});
+printjson(res);
+expected = {
+ "1": 1,
+ "2": 2,
+ "3": 2,
+ "4": 1
+};
outDb = db.getMongo().getDB(outDbStr);
outColl = outDb[outCollStr];
-str = tos( outColl.convertToSingleObject("value") );
+str = tos(outColl.convertToSingleObject("value"));
print("Received result: " + str);
-assert.eq( tos( expected ) , str , "A Received wrong result " + str );
+assert.eq(tos(expected), str, "A Received wrong result " + str);
print("checking result field");
assert.eq(res.result.collection, outCollStr, "B1 Wrong collection " + res.result.collection);
assert.eq(res.result.db, outDbStr, "B2 Wrong db " + res.result.db);
print("Replace again and check");
-outColl.save({_id: "5", value : 1});
-t.mapReduce( m , r , { out : { replace: outCollStr, db: outDbStr } } );
-str = tos( outColl.convertToSingleObject("value") );
+outColl.save({_id: "5", value: 1});
+t.mapReduce(m, r, {out: {replace: outCollStr, db: outDbStr}});
+str = tos(outColl.convertToSingleObject("value"));
print("Received result: " + str);
-assert.eq( tos( expected ) , str , "C1 Received wrong result " + str );
-
-
+assert.eq(tos(expected), str, "C1 Received wrong result " + str);
diff --git a/jstests/core/mr_sort.js b/jstests/core/mr_sort.js
index 8d0ba96ad82..b90ad3f6bf5 100644
--- a/jstests/core/mr_sort.js
+++ b/jstests/core/mr_sort.js
@@ -2,43 +2,38 @@
t = db.mr_sort;
t.drop();
-t.ensureIndex( { x : 1 } );
-
-t.insert( { x : 1 } );
-t.insert( { x : 10 } );
-t.insert( { x : 2 } );
-t.insert( { x : 9 } );
-t.insert( { x : 3 } );
-t.insert( { x : 8 } );
-t.insert( { x : 4 } );
-t.insert( { x : 7 } );
-t.insert( { x : 5 } );
-t.insert( { x : 6 } );
-
-m = function(){
- emit( "a" , this.x );
+t.ensureIndex({x: 1});
+
+t.insert({x: 1});
+t.insert({x: 10});
+t.insert({x: 2});
+t.insert({x: 9});
+t.insert({x: 3});
+t.insert({x: 8});
+t.insert({x: 4});
+t.insert({x: 7});
+t.insert({x: 5});
+t.insert({x: 6});
+
+m = function() {
+ emit("a", this.x);
};
-r = function( k , v ){
- return Array.sum( v );
+r = function(k, v) {
+ return Array.sum(v);
};
-
-res = t.mapReduce( m , r , "mr_sort_out " );
+res = t.mapReduce(m, r, "mr_sort_out ");
x = res.convertToSingleObject();
res.drop();
-assert.eq( { "a" : 55 } , x , "A1" );
+assert.eq({"a": 55}, x, "A1");
-res = t.mapReduce( m , r , { out : "mr_sort_out" , query : { x : { $lt : 3 } } } );
+res = t.mapReduce(m, r, {out: "mr_sort_out", query: {x: {$lt: 3}}});
x = res.convertToSingleObject();
res.drop();
-assert.eq( { "a" : 3 } , x , "A2" );
+assert.eq({"a": 3}, x, "A2");
-res = t.mapReduce( m , r , { out : "mr_sort_out" , sort : { x : 1 } , limit : 2 } );
+res = t.mapReduce(m, r, {out: "mr_sort_out", sort: {x: 1}, limit: 2});
x = res.convertToSingleObject();
res.drop();
-assert.eq( { "a" : 3 } , x , "A3" );
-
-
-
-
+assert.eq({"a": 3}, x, "A3");
diff --git a/jstests/core/mr_stored.js b/jstests/core/mr_stored.js
index 3403411ea70..63fa301e66d 100644
--- a/jstests/core/mr_stored.js
+++ b/jstests/core/mr_stored.js
@@ -2,65 +2,77 @@
t = db.mr_stored;
t.drop();
-t.save( { "partner" : 1, "visits" : 9 } );
-t.save( { "partner" : 2, "visits" : 9 } );
-t.save( { "partner" : 1, "visits" : 11 } );
-t.save( { "partner" : 1, "visits" : 30 } );
-t.save( { "partner" : 2, "visits" : 41 } );
-t.save( { "partner" : 2, "visits" : 41 } );
+t.save({"partner": 1, "visits": 9});
+t.save({"partner": 2, "visits": 9});
+t.save({"partner": 1, "visits": 11});
+t.save({"partner": 1, "visits": 30});
+t.save({"partner": 2, "visits": 41});
+t.save({"partner": 2, "visits": 41});
-m = function(obj){
- emit( obj.partner , { stats : [ obj.visits ] } );
+m = function(obj) {
+ emit(obj.partner, {stats: [obj.visits]});
};
-r = function( k , v ){
+r = function(k, v) {
var stats = [];
var total = 0;
- for ( var i=0; i<v.length; i++ ){
- for ( var j in v[i].stats ) {
- stats.push( v[i].stats[j] );
+ for (var i = 0; i < v.length; i++) {
+ for (var j in v[i].stats) {
+ stats.push(v[i].stats[j]);
total += v[i].stats[j];
}
}
- return { stats : stats , total : total };
+ return {
+ stats: stats,
+ total: total
+ };
};
// Test that map reduce works with stored javascript
-db.system.js.save( { _id : "mr_stored_map" , value : m } );
-db.system.js.save( { _id : "mr_stored_reduce" , value : r } );
+db.system.js.save({_id: "mr_stored_map", value: m});
+db.system.js.save({_id: "mr_stored_reduce", value: r});
-res = t.mapReduce( function () { mr_stored_map(this); } ,
- function ( k , v ) { return mr_stored_reduce( k , v ); } ,
- { out : "mr_stored_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
+res = t.mapReduce(
+ function() {
+ mr_stored_map(this);
+ },
+ function(k, v) {
+ return mr_stored_reduce(k, v);
+ },
+ {out: "mr_stored_out", scope: {xx: 1}});
+// res.find().forEach( printjson )
z = res.convertToSingleObject();
-assert.eq( 2 , Object.keySet( z ).length , "A1" );
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" );
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" );
-
+assert.eq(2, Object.keySet(z).length, "A1");
+assert.eq([9, 11, 30], z["1"].stats, "A2");
+assert.eq([9, 41, 41], z["2"].stats, "A3");
res.drop();
-m = function(obj){
+m = function(obj) {
var x = "partner";
var y = "visits";
- emit( obj[x] , { stats : [ obj[y] ] } );
+ emit(obj[x], {stats: [obj[y]]});
};
-db.system.js.save( { _id : "mr_stored_map" , value : m } );
+db.system.js.save({_id: "mr_stored_map", value: m});
-res = t.mapReduce( function () { mr_stored_map(this); } ,
- function ( k , v ) { return mr_stored_reduce( k , v ); } ,
- { out : "mr_stored_out" , scope : { xx : 1 } } );
-//res.find().forEach( printjson )
+res = t.mapReduce(
+ function() {
+ mr_stored_map(this);
+ },
+ function(k, v) {
+ return mr_stored_reduce(k, v);
+ },
+ {out: "mr_stored_out", scope: {xx: 1}});
+// res.find().forEach( printjson )
z = res.convertToSingleObject();
-assert.eq( 2 , Object.keySet( z ).length , "B1" );
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" );
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" );
+assert.eq(2, Object.keySet(z).length, "B1");
+assert.eq([9, 11, 30], z["1"].stats, "B2");
+assert.eq([9, 41, 41], z["2"].stats, "B3");
-db.system.js.remove( { _id : "mr_stored_map" } );
-db.system.js.remove( { _id : "mr_stored_reduce" } );
+db.system.js.remove({_id: "mr_stored_map"});
+db.system.js.remove({_id: "mr_stored_reduce"});
res.drop();
diff --git a/jstests/core/mr_undef.js b/jstests/core/mr_undef.js
index 1bf89e3acc2..de3b61543d7 100644
--- a/jstests/core/mr_undef.js
+++ b/jstests/core/mr_undef.js
@@ -6,17 +6,23 @@ outname = "mr_undef_out";
out = db[outname];
out.drop();
-t.insert({x : 0});
+t.insert({x: 0});
-var m = function() { emit(this.mod, this.x); };
-var r = function(k,v) { total = 0; for(i in v) { total+= v[i]; } return total; };
+var m = function() {
+ emit(this.mod, this.x);
+};
+var r = function(k, v) {
+ total = 0;
+ for (i in v) {
+ total += v[i];
+ }
+ return total;
+};
-res = t.mapReduce(m, r, {out : outname } );
+res = t.mapReduce(m, r, {out: outname});
-assert.eq( 0 , out.find( { _id : { $type : 6 } } ).itcount() , "A1" );
-assert.eq( 1 , out.find( { _id : { $type : 10 } } ).itcount() , "A2" );
+assert.eq(0, out.find({_id: {$type: 6}}).itcount(), "A1");
+assert.eq(1, out.find({_id: {$type: 10}}).itcount(), "A2");
x = out.findOne();
-assert.eq( x , out.findOne( { _id : x["_id"] } ) , "A3" );
-
-
+assert.eq(x, out.findOne({_id: x["_id"]}), "A3");
diff --git a/jstests/core/multi.js b/jstests/core/multi.js
index eb6cad348cd..c7853b18f25 100644
--- a/jstests/core/multi.js
+++ b/jstests/core/multi.js
@@ -1,24 +1,24 @@
t = db.jstests_multi;
t.drop();
-t.ensureIndex( { a: 1 } );
-t.save( { a: [ 1, 2 ] } );
-assert.eq( 1, t.find( { a: { $gt: 0 } } ).count() , "A" );
-assert.eq( 1, t.find( { a: { $gt: 0 } } ).toArray().length , "B" );
+t.ensureIndex({a: 1});
+t.save({a: [1, 2]});
+assert.eq(1, t.find({a: {$gt: 0}}).count(), "A");
+assert.eq(1, t.find({a: {$gt: 0}}).toArray().length, "B");
t.drop();
-t.save( { a: [ [ [ 1 ] ] ] } );
-assert.eq( 0, t.find( { a:1 } ).count() , "C" );
-assert.eq( 0, t.find( { a: [ 1 ] } ).count() , "D" );
-assert.eq( 1, t.find( { a: [ [ 1 ] ] } ).count() , "E" );
-assert.eq( 1, t.find( { a: [ [ [ 1 ] ] ] } ).count() , "F" );
+t.save({a: [[[1]]]});
+assert.eq(0, t.find({a: 1}).count(), "C");
+assert.eq(0, t.find({a: [1]}).count(), "D");
+assert.eq(1, t.find({a: [[1]]}).count(), "E");
+assert.eq(1, t.find({a: [[[1]]]}).count(), "F");
t.drop();
-t.save( { a: [ 1, 2 ] } );
-assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , "G" );
+t.save({a: [1, 2]});
+assert.eq(0, t.find({a: {$ne: 1}}).count(), "G");
t.drop();
-t.save( { a: [ { b: 1 }, { b: 2 } ] } );
-assert.eq( 0, t.find( { 'a.b': { $ne: 1 } } ).count() , "H" );
+t.save({a: [{b: 1}, {b: 2}]});
+assert.eq(0, t.find({'a.b': {$ne: 1}}).count(), "H");
// TODO - run same tests with an index on a
diff --git a/jstests/core/multi2.js b/jstests/core/multi2.js
index 7c72722fd34..d5111c31913 100644
--- a/jstests/core/multi2.js
+++ b/jstests/core/multi2.js
@@ -2,22 +2,20 @@
t = db.multi2;
t.drop();
-t.save( { x : 1 , a : [ 1 ] } );
-t.save( { x : 1 , a : [] } );
-t.save( { x : 1 , a : null } );
-t.save( {} );
+t.save({x: 1, a: [1]});
+t.save({x: 1, a: []});
+t.save({x: 1, a: null});
+t.save({});
-assert.eq( 3 , t.find( { x : 1 } ).count() , "A" );
-
-t.ensureIndex( { x : 1 } );
-assert.eq( 3 , t.find( { x : 1 } ).count() , "B" );
-assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s1" );
-assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "B2" );
-
-t.dropIndex( { x : 1 } );
-t.ensureIndex( { x : 1 , a : 1 } );
-assert.eq( 3 , t.find( { x : 1 } ).count() , "C" ); // SERVER-279
-assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s2" );
-assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "C2" );
+assert.eq(3, t.find({x: 1}).count(), "A");
+t.ensureIndex({x: 1});
+assert.eq(3, t.find({x: 1}).count(), "B");
+assert.eq(4, t.find().sort({x: 1, a: 1}).count(), "s1");
+assert.eq(1, t.find({x: 1, a: null}).count(), "B2");
+t.dropIndex({x: 1});
+t.ensureIndex({x: 1, a: 1});
+assert.eq(3, t.find({x: 1}).count(), "C"); // SERVER-279
+assert.eq(4, t.find().sort({x: 1, a: 1}).count(), "s2");
+assert.eq(1, t.find({x: 1, a: null}).count(), "C2");
diff --git a/jstests/core/multikey_geonear.js b/jstests/core/multikey_geonear.js
index 7f5bbe3f75f..6d796cb62ff 100644
--- a/jstests/core/multikey_geonear.js
+++ b/jstests/core/multikey_geonear.js
@@ -49,15 +49,15 @@ t.insert({_id: 0, a: [{b: 0}, {c: {type: "Point", coordinates: [0, 0]}}]});
t.insert({_id: 1, a: [{b: 1}, {c: {type: "Point", coordinates: [1, 1]}}]});
t.insert({_id: 2, a: [{b: 2}, {c: {type: "Point", coordinates: [2, 2]}}]});
-cursor = t.find({"a.b": {$gte: 0}, "a.c": {$near:
- {$geometry: {type: "Point", coordinates: [2, 2]}}}});
+cursor =
+ t.find({"a.b": {$gte: 0}, "a.c": {$near: {$geometry: {type: "Point", coordinates: [2, 2]}}}});
checkResults(cursor);
// Double check that we're not intersecting bounds. Doing so should cause us to
// miss the result here.
t.insert({_id: 3, a: [{b: 10}, {b: -1}, {c: {type: "Point", coordinates: [0, 0]}}]});
-cursor = t.find({"a.b": {$lt: 0, $gt: 9}, "a.c": {$near:
- {$geometry: {type: "Point", coordinates: [0, 0]}}}});
+cursor = t.find(
+ {"a.b": {$lt: 0, $gt: 9}, "a.c": {$near: {$geometry: {type: "Point", coordinates: [0, 0]}}}});
assert.eq(3, cursor.next()["_id"]);
assert(!cursor.hasNext());
diff --git a/jstests/core/ne1.js b/jstests/core/ne1.js
index e1c5656b5c8..ba0bf7a3de7 100644
--- a/jstests/core/ne1.js
+++ b/jstests/core/ne1.js
@@ -2,10 +2,10 @@
t = db.ne1;
t.drop();
-t.save( { x : 1 } );
-t.save( { x : 2 } );
-t.save( { x : 3 } );
+t.save({x: 1});
+t.save({x: 2});
+t.save({x: 3});
-assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "A" );
-t.ensureIndex( { x : 1 } );
-assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "B" );
+assert.eq(2, t.find({x: {$ne: 2}}).itcount(), "A");
+t.ensureIndex({x: 1});
+assert.eq(2, t.find({x: {$ne: 2}}).itcount(), "B");
diff --git a/jstests/core/ne2.js b/jstests/core/ne2.js
index b0960d69cfa..8814688a45b 100644
--- a/jstests/core/ne2.js
+++ b/jstests/core/ne2.js
@@ -2,15 +2,15 @@
t = db.jstests_ne2;
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-t.save( { a:-0.5 } );
-t.save( { a:0 } );
-t.save( { a:0 } );
-t.save( { a:0.5 } );
+t.save({a: -0.5});
+t.save({a: 0});
+t.save({a: 0});
+t.save({a: 0.5});
-e = t.find( { a: { $ne: 0 } } ).explain( true );
-assert.eq( 2, e.executionStats.nReturned, 'A' );
+e = t.find({a: {$ne: 0}}).explain(true);
+assert.eq(2, e.executionStats.nReturned, 'A');
-e = t.find( { a: { $gt: -1, $lt: 1, $ne: 0 } } ).explain( true );
-assert.eq( 2, e.executionStats.nReturned, 'B' );
+e = t.find({a: {$gt: -1, $lt: 1, $ne: 0}}).explain(true);
+assert.eq(2, e.executionStats.nReturned, 'B');
diff --git a/jstests/core/ne3.js b/jstests/core/ne3.js
index 3260fd3c40f..5c38858c019 100644
--- a/jstests/core/ne3.js
+++ b/jstests/core/ne3.js
@@ -3,10 +3,20 @@
t = db.jstests_ne3;
t.drop();
-assert.throws( function() { t.findOne( { t: { $ne: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $gt: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $gte: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $lt: /a/ } } ); } );
-assert.throws( function() { t.findOne( { t: { $lte: /a/ } } ); } );
+assert.throws(function() {
+ t.findOne({t: {$ne: /a/}});
+});
+assert.throws(function() {
+ t.findOne({t: {$gt: /a/}});
+});
+assert.throws(function() {
+ t.findOne({t: {$gte: /a/}});
+});
+assert.throws(function() {
+ t.findOne({t: {$lt: /a/}});
+});
+assert.throws(function() {
+ t.findOne({t: {$lte: /a/}});
+});
-assert.eq( 0, t.count( { t: { $in: [ /a/ ] } } ) );
+assert.eq(0, t.count({t: {$in: [/a/]}}));
diff --git a/jstests/core/nestedarr1.js b/jstests/core/nestedarr1.js
index b3bc9b73156..98ddc2193ea 100644
--- a/jstests/core/nestedarr1.js
+++ b/jstests/core/nestedarr1.js
@@ -1,30 +1,33 @@
// make sure that we don't crash on large nested arrays but correctly do not index them
// SERVER-5127, SERVER-5036
-function makeNestArr(depth){
- if(depth == 1){
- return {a : [depth]};
- }
- else{
- return {a : [makeNestArr(depth - 1)] };
+function makeNestArr(depth) {
+ if (depth == 1) {
+ return {
+ a: [depth]
+ };
+ } else {
+ return {
+ a: [makeNestArr(depth - 1)]
+ };
}
}
t = db.arrNestTest;
t.drop();
-t.ensureIndex({a:1});
+t.ensureIndex({a: 1});
n = 1;
-while ( true ) {
+while (true) {
var before = t.count();
- t.insert( { _id : n, a : makeNestArr(n) } );
+ t.insert({_id: n, a: makeNestArr(n)});
var after = t.count();
- if ( before == after )
+ if (before == after)
break;
n++;
}
-assert( n > 30, "not enough n: " + n );
+assert(n > 30, "not enough n: " + n);
-assert.eq( t.count(), t.find( { _id : { $gt : 0 } } ).hint( { a : 1 } ).itcount() );
+assert.eq(t.count(), t.find({_id: {$gt: 0}}).hint({a: 1}).itcount());
diff --git a/jstests/core/nestedobj1.js b/jstests/core/nestedobj1.js
index 45ef0c530d4..97b9460da6f 100644
--- a/jstests/core/nestedobj1.js
+++ b/jstests/core/nestedobj1.js
@@ -1,10 +1,14 @@
-//SERVER-5127, SERVER-5036
+// SERVER-5127, SERVER-5036
-function makeNestObj(depth){
- toret = { a : 1};
+function makeNestObj(depth) {
+ toret = {
+ a: 1
+ };
- for(i = 1; i < depth; i++){
- toret = {a : toret};
+ for (i = 1; i < depth; i++) {
+ toret = {
+ a: toret
+ };
}
return toret;
@@ -13,18 +17,18 @@ function makeNestObj(depth){
t = db.objNestTest;
t.drop();
-t.ensureIndex({a:1});
+t.ensureIndex({a: 1});
n = 1;
-while ( true ) {
+while (true) {
var before = t.count();
- t.insert( { _id : n, a : makeNestObj(n) } );
+ t.insert({_id: n, a: makeNestObj(n)});
var after = t.count();
- if ( before == after )
+ if (before == after)
break;
n++;
}
-assert( n > 30, "not enough n: " + n );
+assert(n > 30, "not enough n: " + n);
-assert.eq( t.count(), t.find( { _id : { $gt : 0 } } ).hint( { a : 1 } ).itcount() );
+assert.eq(t.count(), t.find({_id: {$gt: 0}}).hint({a: 1}).itcount());
diff --git a/jstests/core/nin.js b/jstests/core/nin.js
index 7a25afd382e..d6cd78ee7a4 100644
--- a/jstests/core/nin.js
+++ b/jstests/core/nin.js
@@ -1,58 +1,64 @@
t = db.jstests_nin;
t.drop();
-function checkEqual( name , key , value ){
+function checkEqual(name, key, value) {
var o = {};
- o[key] = { $in : [ value ] };
- var i = t.find( o ).count();
- o[key] = { $nin : [ value ] };
- var n = t.find( o ).count();
-
- assert.eq( t.find().count() , i + n ,
- "checkEqual " + name + " $in + $nin != total | " + i + " + " + n + " != " + t.find().count() );
+ o[key] = {
+ $in: [value]
+ };
+ var i = t.find(o).count();
+ o[key] = {
+ $nin: [value]
+ };
+ var n = t.find(o).count();
+
+ assert.eq(t.find().count(),
+ i + n,
+ "checkEqual " + name + " $in + $nin != total | " + i + " + " + n + " != " +
+ t.find().count());
}
-doTest = function( n ) {
-
- t.save( { a:[ 1,2,3 ] } );
- t.save( { a:[ 1,2,4 ] } );
- t.save( { a:[ 1,8,5 ] } );
- t.save( { a:[ 1,8,6 ] } );
- t.save( { a:[ 1,9,7 ] } );
-
- assert.eq( 5, t.find( { a: { $nin: [ 10 ] } } ).count() , n + " A" );
- assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , n + " B" );
- assert.eq( 0, t.find( { a: { $nin: [ 1 ] } } ).count() , n + " C" );
- assert.eq( 0, t.find( { a: { $nin: [ 1, 2 ] } } ).count() , n + " D" );
- assert.eq( 3, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " E" );
- assert.eq( 3, t.find( { a: { $nin: [ 8 ] } } ).count() , n + " F" );
- assert.eq( 4, t.find( { a: { $nin: [ 9 ] } } ).count() , n + " G" );
- assert.eq( 4, t.find( { a: { $nin: [ 3 ] } } ).count() , n + " H" );
- assert.eq( 3, t.find( { a: { $nin: [ 2, 3 ] } } ).count() , n + " I" );
- assert.eq( 1, t.find( { a: { $ne: 8, $nin: [ 2, 3 ] } } ).count() , n + " I2" );
-
- checkEqual( n + " A" , "a" , 5 );
-
- t.save( { a: [ 2, 2 ] } );
- assert.eq( 3, t.find( { a: { $nin: [ 2, 2 ] } } ).count() , n + " J" );
-
- t.save( { a: [ [ 2 ] ] } );
- assert.eq( 4, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " K" );
-
- t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
- checkEqual( n + " B" , "a" , 5 );
- checkEqual( n + " C" , "a.b" , 5 );
-
- assert.eq( 7, t.find( { 'a.b': { $nin: [ 10 ] } } ).count() , n + " L" );
- assert.eq( 7, t.find( { 'a.b': { $nin: [ [ 10, 11 ] ] } } ).count() , n + " M" );
- assert.eq( 7, t.find( { a: { $nin: [ 11 ] } } ).count() , n + " N" );
-
- t.save( { a: { b: [ 20, 30 ] } } );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() , n + " O" );
- assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() , n + " P" );
+doTest = function(n) {
+
+ t.save({a: [1, 2, 3]});
+ t.save({a: [1, 2, 4]});
+ t.save({a: [1, 8, 5]});
+ t.save({a: [1, 8, 6]});
+ t.save({a: [1, 9, 7]});
+
+ assert.eq(5, t.find({a: {$nin: [10]}}).count(), n + " A");
+ assert.eq(0, t.find({a: {$ne: 1}}).count(), n + " B");
+ assert.eq(0, t.find({a: {$nin: [1]}}).count(), n + " C");
+ assert.eq(0, t.find({a: {$nin: [1, 2]}}).count(), n + " D");
+ assert.eq(3, t.find({a: {$nin: [2]}}).count(), n + " E");
+ assert.eq(3, t.find({a: {$nin: [8]}}).count(), n + " F");
+ assert.eq(4, t.find({a: {$nin: [9]}}).count(), n + " G");
+ assert.eq(4, t.find({a: {$nin: [3]}}).count(), n + " H");
+ assert.eq(3, t.find({a: {$nin: [2, 3]}}).count(), n + " I");
+ assert.eq(1, t.find({a: {$ne: 8, $nin: [2, 3]}}).count(), n + " I2");
+
+ checkEqual(n + " A", "a", 5);
+
+ t.save({a: [2, 2]});
+ assert.eq(3, t.find({a: {$nin: [2, 2]}}).count(), n + " J");
+
+ t.save({a: [[2]]});
+ assert.eq(4, t.find({a: {$nin: [2]}}).count(), n + " K");
+
+ t.save({a: [{b: [10, 11]}, 11]});
+ checkEqual(n + " B", "a", 5);
+ checkEqual(n + " C", "a.b", 5);
+
+ assert.eq(7, t.find({'a.b': {$nin: [10]}}).count(), n + " L");
+ assert.eq(7, t.find({'a.b': {$nin: [[10, 11]]}}).count(), n + " M");
+ assert.eq(7, t.find({a: {$nin: [11]}}).count(), n + " N");
+
+ t.save({a: {b: [20, 30]}});
+ assert.eq(1, t.find({'a.b': {$all: [20]}}).count(), n + " O");
+ assert.eq(1, t.find({'a.b': {$all: [20, 30]}}).count(), n + " P");
};
-doTest( "no index" );
+doTest("no index");
t.drop();
-t.ensureIndex( {a:1} );
-doTest( "with index" );
+t.ensureIndex({a: 1});
+doTest("with index");
diff --git a/jstests/core/nin2.js b/jstests/core/nin2.js
index afdbb0494da..41996c1f4bb 100644
--- a/jstests/core/nin2.js
+++ b/jstests/core/nin2.js
@@ -4,64 +4,64 @@ t = db.jstests_nin2;
t.drop();
// Check various operator types.
-function checkOperators( array, inMatches ) {
+function checkOperators(array, inMatches) {
inCount = inMatches ? 1 : 0;
notInCount = 1 - inCount;
- assert.eq( inCount, t.count( {foo:{$in:array}} ) );
- assert.eq( notInCount, t.count( {foo:{$not:{$in:array}}} ) );
- assert.eq( notInCount, t.count( {foo:{$nin:array}} ) );
- assert.eq( inCount, t.count( {foo:{$not:{$nin:array}}} ) );
+ assert.eq(inCount, t.count({foo: {$in: array}}));
+ assert.eq(notInCount, t.count({foo: {$not: {$in: array}}}));
+ assert.eq(notInCount, t.count({foo: {$nin: array}}));
+ assert.eq(inCount, t.count({foo: {$not: {$nin: array}}}));
}
t.save({});
-assert.eq( 1, t.count( {foo:null} ) );
-assert.eq( 0, t.count( {foo:{$ne:null}} ) );
-assert.eq( 0, t.count( {foo:1} ) );
+assert.eq(1, t.count({foo: null}));
+assert.eq(0, t.count({foo: {$ne: null}}));
+assert.eq(0, t.count({foo: 1}));
// Check matching null against missing field.
-checkOperators( [null], true );
-checkOperators( [null,1], true );
-checkOperators( [1,null], true );
+checkOperators([null], true);
+checkOperators([null, 1], true);
+checkOperators([1, null], true);
t.remove({});
-t.save({foo:null});
+t.save({foo: null});
-assert.eq( 1, t.count( {foo:null} ) );
-assert.eq( 0, t.count( {foo:{$ne:null}} ) );
-assert.eq( 0, t.count( {foo:1} ) );
+assert.eq(1, t.count({foo: null}));
+assert.eq(0, t.count({foo: {$ne: null}}));
+assert.eq(0, t.count({foo: 1}));
// Check matching empty set.
-checkOperators( [], false );
+checkOperators([], false);
// Check matching null against missing null field.
-checkOperators( [null], true );
-checkOperators( [null,1], true );
-checkOperators( [1,null], true );
+checkOperators([null], true);
+checkOperators([null, 1], true);
+checkOperators([1, null], true);
t.remove({});
-t.save({foo:1});
+t.save({foo: 1});
-assert.eq( 0, t.count( {foo:null} ) );
-assert.eq( 1, t.count( {foo:{$ne:null}} ) );
-assert.eq( 1, t.count( {foo:1} ) );
+assert.eq(0, t.count({foo: null}));
+assert.eq(1, t.count({foo: {$ne: null}}));
+assert.eq(1, t.count({foo: 1}));
// Check matching null against 1.
-checkOperators( [null], false );
-checkOperators( [null,1], true );
-checkOperators( [1,null], true );
+checkOperators([null], false);
+checkOperators([null, 1], true);
+checkOperators([1, null], true);
t.remove({});
-t.save( {foo:[0,1]} );
+t.save({foo: [0, 1]});
// Check exact match of embedded array.
-checkOperators( [[0,1]], true );
+checkOperators([[0, 1]], true);
t.remove({});
-t.save( {foo:[]} );
+t.save({foo: []});
// Check exact match of embedded empty array.
-checkOperators( [[]], true );
+checkOperators([[]], true);
t.remove({});
-t.save( {foo:'foo'} );
+t.save({foo: 'foo'});
// Check regex match.
-checkOperators( [/o/], true );
+checkOperators([/o/], true);
diff --git a/jstests/core/no_db_created.js b/jstests/core/no_db_created.js
index 3c10dbc5772..3491914d470 100644
--- a/jstests/core/no_db_created.js
+++ b/jstests/core/no_db_created.js
@@ -1,15 +1,14 @@
// checks that operations do not create a database
-(function() {
+(function() {
"use strict";
var adminDB = db.getSiblingDB("admin");
var noDB = function(db) {
var dbName = db.getName();
var dbsRes = assert.commandWorked(adminDB.runCommand("listDatabases"));
dbsRes.databases.forEach(function(e) {
- assert.neq(dbName,
- e.name,
- "Found db which shouldn't exist:" + dbName + "; " + tojson(dbsRes));
+ assert.neq(
+ dbName, e.name, "Found db which shouldn't exist:" + dbName + "; " + tojson(dbsRes));
});
};
var mydb = db.getSiblingDB("neverCreated");
@@ -19,7 +18,7 @@
var coll = mydb.fake;
// force:true is for replset passthroughs
- assert.commandFailed(coll.runCommand("compact", {force:true}));
+ assert.commandFailed(coll.runCommand("compact", {force: true}));
noDB(mydb);
assert.writeOK(coll.insert({}));
mydb.dropDatabase();
@@ -29,7 +28,7 @@
assert.writeOK(coll.insert({}));
mydb.dropDatabase();
- assert.commandFailed(coll.runCommand("collMod", {expireAfterSeconds:1}));
+ assert.commandFailed(coll.runCommand("collMod", {expireAfterSeconds: 1}));
noDB(mydb);
assert.writeOK(coll.insert({}));
mydb.dropDatabase();
diff --git a/jstests/core/not1.js b/jstests/core/not1.js
index 6ff509f8d80..576c5817940 100644
--- a/jstests/core/not1.js
+++ b/jstests/core/not1.js
@@ -2,19 +2,18 @@
t = db.not1;
t.drop();
-
-t.insert({a:1});
-t.insert({a:2});
+t.insert({a: 1});
+t.insert({a: 2});
t.insert({});
-function test( name ){
- assert.eq( 3 , t.find().count() , name + "A" );
- assert.eq( 1 , t.find( { a : 1 } ).count() , name + "B" );
- assert.eq( 2 , t.find( { a : { $ne : 1 } } ).count() , name + "C" ); // SERVER-198
- assert.eq( 1 , t.find({a:{$in:[1]}}).count() , name + "D" );
- assert.eq( 2 , t.find({a:{$nin:[1]}}).count() , name + "E" ); // SERVER-198
+function test(name) {
+ assert.eq(3, t.find().count(), name + "A");
+ assert.eq(1, t.find({a: 1}).count(), name + "B");
+ assert.eq(2, t.find({a: {$ne: 1}}).count(), name + "C"); // SERVER-198
+ assert.eq(1, t.find({a: {$in: [1]}}).count(), name + "D");
+ assert.eq(2, t.find({a: {$nin: [1]}}).count(), name + "E"); // SERVER-198
}
-test( "no index" );
-t.ensureIndex( { a : 1 } );
-test( "with index" );
+test("no index");
+t.ensureIndex({a: 1});
+test("with index");
diff --git a/jstests/core/not2.js b/jstests/core/not2.js
index 43dbfb20e3e..98eb19cee6f 100644
--- a/jstests/core/not2.js
+++ b/jstests/core/not2.js
@@ -1,84 +1,84 @@
t = db.jstests_not2;
t.drop();
-check = function( query, expected, size ) {
- if ( size == null ) {
+check = function(query, expected, size) {
+ if (size == null) {
size = 1;
}
- assert.eq( size, t.find( query ).itcount(), tojson( query ) );
- if ( size > 0 ) {
- assert.eq( expected, t.findOne( query ).i, tojson( query ) );
+ assert.eq(size, t.find(query).itcount(), tojson(query));
+ if (size > 0) {
+ assert.eq(expected, t.findOne(query).i, tojson(query));
}
};
-fail = function( query ) {
+fail = function(query) {
try {
- t.find( query ).itcount();
- assert( false, tojson( query ) );
- } catch ( e ) {
+ t.find(query).itcount();
+ assert(false, tojson(query));
+ } catch (e) {
// expected
}
};
doTest = function() {
-t.remove( {} );
-
-t.save( {i:"a"} );
-t.save( {i:"b"} );
+ t.remove({});
-fail( {i:{$not:"a"}} );
-// SERVER-12735: We currently do not handle double negatives
-// during query canonicalization.
-//fail( {i:{$not:{$not:"a"}}} );
-//fail( {i:{$not:{$not:{$gt:"a"}}}} );
-fail( {i:{$not:{$ref:"foo"}}} );
-fail( {i:{$not:{}}} );
-check( {i:{$gt:"a"}}, "b" );
-check( {i:{$not:{$gt:"a"}}}, "a" );
-check( {i:{$not:{$ne:"a"}}}, "a" );
-check( {i:{$not:{$gte:"b"}}}, "a" );
-check( {i:{$exists:true}}, "a", 2 );
-check( {i:{$not:{$exists:true}}}, "", 0 );
-check( {j:{$not:{$exists:false}}}, "", 0 );
-check( {j:{$not:{$exists:true}}}, "a", 2 );
-check( {i:{$not:{$in:["a"]}}}, "b" );
-check( {i:{$not:{$in:["a", "b"]}}}, "", 0 );
-check( {i:{$not:{$in:["g"]}}}, "a", 2 );
-check( {i:{$not:{$nin:["a"]}}}, "a" );
-check( {i:{$not:/a/}}, "b" );
-check( {i:{$not:/(a|b)/}}, "", 0 );
-check( {i:{$not:/a/,$regex:"a"}}, "", 0 );
-check( {i:{$not:/aa/}}, "a", 2 );
-fail( {i:{$not:{$regex:"a"}}} );
-fail( {i:{$not:{$options:"a"}}} );
-check( {i:{$type:2}}, "a", 2 );
-check( {i:{$not:{$type:1}}}, "a", 2 );
-check( {i:{$not:{$type:2}}}, "", 0 );
+ t.save({i: "a"});
+ t.save({i: "b"});
-t.remove( {} );
-t.save( {i:1} );
-check( {i:{$not:{$mod:[5,1]}}}, null, 0 );
-check( {i:{$mod:[5,2]}}, null, 0 );
-check( {i:{$not:{$mod:[5,2]}}}, 1, 1 );
+ fail({i: {$not: "a"}});
+ // SERVER-12735: We currently do not handle double negatives
+ // during query canonicalization.
+ // fail( {i:{$not:{$not:"a"}}} );
+ // fail( {i:{$not:{$not:{$gt:"a"}}}} );
+ fail({i: {$not: {$ref: "foo"}}});
+ fail({i: {$not: {}}});
+ check({i: {$gt: "a"}}, "b");
+ check({i: {$not: {$gt: "a"}}}, "a");
+ check({i: {$not: {$ne: "a"}}}, "a");
+ check({i: {$not: {$gte: "b"}}}, "a");
+ check({i: {$exists: true}}, "a", 2);
+ check({i: {$not: {$exists: true}}}, "", 0);
+ check({j: {$not: {$exists: false}}}, "", 0);
+ check({j: {$not: {$exists: true}}}, "a", 2);
+ check({i: {$not: {$in: ["a"]}}}, "b");
+ check({i: {$not: {$in: ["a", "b"]}}}, "", 0);
+ check({i: {$not: {$in: ["g"]}}}, "a", 2);
+ check({i: {$not: {$nin: ["a"]}}}, "a");
+ check({i: {$not: /a/}}, "b");
+ check({i: {$not: /(a|b)/}}, "", 0);
+ check({i: {$not: /a/, $regex: "a"}}, "", 0);
+ check({i: {$not: /aa/}}, "a", 2);
+ fail({i: {$not: {$regex: "a"}}});
+ fail({i: {$not: {$options: "a"}}});
+ check({i: {$type: 2}}, "a", 2);
+ check({i: {$not: {$type: 1}}}, "a", 2);
+ check({i: {$not: {$type: 2}}}, "", 0);
-t.remove( {} );
-t.save( {i:["a","b"]} );
-check( {i:{$not:{$size:2}}}, null, 0 );
-check( {i:{$not:{$size:3}}}, ["a","b"] );
-check( {i:{$not:{$gt:"a"}}}, null, 0 );
-check( {i:{$not:{$gt:"c"}}}, ["a","b"] );
-check( {i:{$not:{$all:["a","b"]}}}, null, 0 );
-check( {i:{$not:{$all:["c"]}}}, ["a","b"] );
+ t.remove({});
+ t.save({i: 1});
+ check({i: {$not: {$mod: [5, 1]}}}, null, 0);
+ check({i: {$mod: [5, 2]}}, null, 0);
+ check({i: {$not: {$mod: [5, 2]}}}, 1, 1);
-t.remove( {} );
-t.save( {i:[{j:"a"}]} );
-t.save( {i:[{j:"b"}]} );
-check( {i:{$not:{$elemMatch:{j:"a"}}}}, [{j:"b"}] );
-check( {i:{$not:{$elemMatch:{j:"f"}}}}, [{j:"a"}], 2 );
+ t.remove({});
+ t.save({i: ["a", "b"]});
+ check({i: {$not: {$size: 2}}}, null, 0);
+ check({i: {$not: {$size: 3}}}, ["a", "b"]);
+ check({i: {$not: {$gt: "a"}}}, null, 0);
+ check({i: {$not: {$gt: "c"}}}, ["a", "b"]);
+ check({i: {$not: {$all: ["a", "b"]}}}, null, 0);
+ check({i: {$not: {$all: ["c"]}}}, ["a", "b"]);
+
+ t.remove({});
+ t.save({i: [{j: "a"}]});
+ t.save({i: [{j: "b"}]});
+ check({i: {$not: {$elemMatch: {j: "a"}}}}, [{j: "b"}]);
+ check({i: {$not: {$elemMatch: {j: "f"}}}}, [{j: "a"}], 2);
};
doTest();
-t.ensureIndex( {i:1} );
+t.ensureIndex({i: 1});
doTest();
diff --git a/jstests/core/not3.js b/jstests/core/not3.js
index 9f3014f2c1a..9699f3838d1 100644
--- a/jstests/core/not3.js
+++ b/jstests/core/not3.js
@@ -9,11 +9,15 @@ t.save({_id: 0, arr: [1, 2, 3]});
t.save({_id: 1, arr: [10, 11]});
// Case 1: simple $ne over array field.
-var case1 = {arr: {$ne: 3}};
+var case1 = {
+ arr: {$ne: 3}
+};
assert.eq(1, t.find(case1).itcount(), "Case 1: wrong number of results");
assert.eq(1, t.findOne(case1)._id, "Case 1: wrong _id");
// Case 2: simple $not over array field.
-var case2 = {arr: {$not: {$gt: 6}}};
+var case2 = {
+ arr: {$not: {$gt: 6}}
+};
assert.eq(1, t.find(case2).itcount(), "Case 2: wrong number of results");
assert.eq(0, t.findOne(case2)._id, "Case 2: wrong _id");
diff --git a/jstests/core/notablescan.js b/jstests/core/notablescan.js
index f2ca68d2912..80306c08cf2 100644
--- a/jstests/core/notablescan.js
+++ b/jstests/core/notablescan.js
@@ -4,28 +4,38 @@ t = db.test_notablescan;
t.drop();
try {
- assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:true } ) );
+ assert.commandWorked(db._adminCommand({setParameter: 1, notablescan: true}));
// commented lines are SERVER-2222
- if ( 0 ) { // SERVER-2222
- assert.throws( function() { t.find( {a:1} ).toArray(); } );
+ if (0) { // SERVER-2222
+ assert.throws(function() {
+ t.find({a: 1}).toArray();
+ });
}
- t.save( {a:1} );
- if ( 0 ) { // SERVER-2222
- assert.throws( function() { t.count( {a:1} ); } );
- assert.throws( function() { t.find( {} ).toArray(); } );
+ t.save({a: 1});
+ if (0) { // SERVER-2222
+ assert.throws(function() {
+ t.count({a: 1});
+ });
+ assert.throws(function() {
+ t.find({}).toArray();
+ });
}
- assert.eq( 1, t.find( {} ).itcount() ); // SERVER-274
- assert.throws( function() { t.find( {a:1} ).toArray(); } );
- assert.throws( function() { t.find( {a:1} ).hint( {$natural:1} ).toArray(); } );
- t.ensureIndex( {a:1} );
- assert.eq( 0, t.find( {a:1,b:1} ).itcount() );
- assert.eq( 1, t.find( {a:1,b:null} ).itcount() );
+ assert.eq(1, t.find({}).itcount()); // SERVER-274
+ assert.throws(function() {
+ t.find({a: 1}).toArray();
+ });
+ assert.throws(function() {
+ t.find({a: 1}).hint({$natural: 1}).toArray();
+ });
+ t.ensureIndex({a: 1});
+ assert.eq(0, t.find({a: 1, b: 1}).itcount());
+ assert.eq(1, t.find({a: 1, b: null}).itcount());
// SERVER-4327
- assert.eq( 0, t.find( {a:{$in:[]}} ).itcount() );
- assert.eq( 0, t.find( {a:{$in:[]},b:0} ).itcount() );
+ assert.eq(0, t.find({a: {$in: []}}).itcount());
+ assert.eq(0, t.find({a: {$in: []}, b: 0}).itcount());
} finally {
// We assume notablescan was false before this test started and restore that
// expected value.
- assert.commandWorked( db._adminCommand( { setParameter:1, notablescan:false } ) );
+ assert.commandWorked(db._adminCommand({setParameter: 1, notablescan: false}));
}
diff --git a/jstests/core/ns_length.js b/jstests/core/ns_length.js
index 20825818174..17e5cbc0e25 100644
--- a/jstests/core/ns_length.js
+++ b/jstests/core/ns_length.js
@@ -5,14 +5,14 @@ var maxNsLength = 127;
var maxNsCollectionLength = 120;
var myDb = db.getSiblingDB("ns_length");
-myDb.dropDatabase(); // start empty
+myDb.dropDatabase(); // start empty
function mkStr(length) {
s = "";
while (s.length < length) {
s += "x";
}
- return s;
+ return s;
}
function canMakeCollectionWithName(name) {
@@ -36,7 +36,7 @@ function canMakeCollectionWithName(name) {
}
function canMakeIndexWithName(collection, name) {
- var success = collection.ensureIndex({x:1}, {name: name}).ok;
+ var success = collection.ensureIndex({x: 1}, {name: name}).ok;
if (success) {
assert.commandWorked(collection.dropIndex(name));
}
@@ -80,12 +80,11 @@ for (var i = maxCollectionNameLength - 3; i <= maxCollectionNameLength + 3; i++)
}
// test renaming collections with the destination around the name limit due to long indexe names
-myDb.from.ensureIndex({a:1}, {name: mkStr(100)});
-var indexNsNameOverhead = (myDb.getName() + "..$").length + 100; // index ns name - collection name
+myDb.from.ensureIndex({a: 1}, {name: mkStr(100)});
+var indexNsNameOverhead = (myDb.getName() + "..$").length + 100; // index ns name - collection name
var maxCollectionNameWithIndex = maxNsLength - indexNsNameOverhead;
for (var i = maxCollectionNameWithIndex - 3; i <= maxCollectionNameWithIndex + 3; i++) {
assert.eq(canRenameCollection("from", mkStr(i)),
i <= maxCollectionNameWithIndex,
"index ns name length = " + (indexNsNameOverhead + i));
}
-
diff --git a/jstests/core/null.js b/jstests/core/null.js
index f4bdeb44a4d..b5508c689d9 100644
--- a/jstests/core/null.js
+++ b/jstests/core/null.js
@@ -2,25 +2,25 @@
t = db.null1;
t.drop();
-t.save( { x : 1 } );
-t.save( { x : null } );
+t.save({x: 1});
+t.save({x: null});
-assert.eq( 1 , t.find( { x : null } ).count() , "A" );
-assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "B" );
+assert.eq(1, t.find({x: null}).count(), "A");
+assert.eq(1, t.find({x: {$ne: null}}).count(), "B");
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-assert.eq( 1 , t.find( { x : null } ).count() , "C" );
-assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "D" );
+assert.eq(1, t.find({x: null}).count(), "C");
+assert.eq(1, t.find({x: {$ne: null}}).count(), "D");
// -----
-assert.eq( 2, t.find( { y : null } ).count(), "E" );
+assert.eq(2, t.find({y: null}).count(), "E");
-t.ensureIndex( { y : 1 } );
-assert.eq( 2, t.find( { y : null } ).count(), "E" );
+t.ensureIndex({y: 1});
+assert.eq(2, t.find({y: null}).count(), "E");
-t.dropIndex( { y : 1 } );
+t.dropIndex({y: 1});
-t.ensureIndex( { y : 1 }, { sparse : true } );
-assert.eq( 2, t.find( { y : null } ).count(), "E" );
+t.ensureIndex({y: 1}, {sparse: true});
+assert.eq(2, t.find({y: null}).count(), "E");
diff --git a/jstests/core/null2.js b/jstests/core/null2.js
index 841c26ac22f..0e8173bf874 100644
--- a/jstests/core/null2.js
+++ b/jstests/core/null2.js
@@ -2,44 +2,38 @@
t = db.null2;
t.drop();
-t.insert( { _id : 1, a : [ { b : 5 } ] } );
-t.insert( { _id : 2, a : [ {} ] } );
-t.insert( { _id : 3, a : [] } );
-t.insert( { _id : 4, a : [ {}, { b : 5 } ] } );
-t.insert( { _id : 5, a : [ 5, { b : 5 } ] } );
-
-function doQuery( query ) {
- printjson( query );
- t.find( query ).forEach(
- function(z) {
- print( "\t" + tojson(z) );
- }
- );
- return t.find( query ).count();
+t.insert({_id: 1, a: [{b: 5}]});
+t.insert({_id: 2, a: [{}]});
+t.insert({_id: 3, a: []});
+t.insert({_id: 4, a: [{}, {b: 5}]});
+t.insert({_id: 5, a: [5, {b: 5}]});
+
+function doQuery(query) {
+ printjson(query);
+ t.find(query).forEach(function(z) {
+ print("\t" + tojson(z));
+ });
+ return t.find(query).count();
}
-function getIds( query ) {
+function getIds(query) {
var ids = [];
- t.find( query ).forEach(
- function(z) {
- ids.push( z._id );
- }
- );
+ t.find(query).forEach(function(z) {
+ ids.push(z._id);
+ });
return ids;
}
-theQueries = [ { "a.b" : null }, { "a.b" : { $in : [ null ] } } ];
+theQueries = [{"a.b": null}, {"a.b": {$in: [null]}}];
-for ( var i=0; i < theQueries.length; i++ ) {
- assert.eq( 2, doQuery( theQueries[i] ) );
- assert.eq( [2,4], getIds( theQueries[i] ) );
+for (var i = 0; i < theQueries.length; i++) {
+ assert.eq(2, doQuery(theQueries[i]));
+ assert.eq([2, 4], getIds(theQueries[i]));
}
-t.ensureIndex( { "a.b" : 1 } );
+t.ensureIndex({"a.b": 1});
-for ( var i=0; i < theQueries.length; i++ ) {
- assert.eq( 2, doQuery( theQueries[i] ) );
- assert.eq( [2,4], getIds( theQueries[i] ) );
+for (var i = 0; i < theQueries.length; i++) {
+ assert.eq(2, doQuery(theQueries[i]));
+ assert.eq([2, 4], getIds(theQueries[i]));
}
-
-
diff --git a/jstests/core/null_field_name.js b/jstests/core/null_field_name.js
index 7fa14b0a1bc..f51e2fbff6a 100644
--- a/jstests/core/null_field_name.js
+++ b/jstests/core/null_field_name.js
@@ -1,8 +1,8 @@
// SERVER-10313: Test that null char in field name causes an error when converting to bson
-assert.throws( function () { Object.bsonsize({"a\0":1}); },
- null,
- "null char in field name");
+assert.throws(function() {
+ Object.bsonsize({"a\0": 1});
+}, null, "null char in field name");
-assert.throws( function () { Object.bsonsize({"\0asdf":1}); },
- null,
- "null char in field name"); \ No newline at end of file
+assert.throws(function() {
+ Object.bsonsize({"\0asdf": 1});
+}, null, "null char in field name"); \ No newline at end of file
diff --git a/jstests/core/numberint.js b/jstests/core/numberint.js
index f786a515c29..55c923aea79 100644
--- a/jstests/core/numberint.js
+++ b/jstests/core/numberint.js
@@ -1,77 +1,79 @@
-assert.eq.automsg( "0", "new NumberInt()" );
-
-n = new NumberInt( 4 );
-assert.eq.automsg( "4", "n" );
-assert.eq.automsg( "4", "n.toNumber()" );
-assert.eq.automsg( "8", "n + 4" );
-assert.eq.automsg( "'NumberInt(4)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(4)'", "tojson( n )" );
+assert.eq.automsg("0", "new NumberInt()");
+
+n = new NumberInt(4);
+assert.eq.automsg("4", "n");
+assert.eq.automsg("4", "n.toNumber()");
+assert.eq.automsg("8", "n + 4");
+assert.eq.automsg("'NumberInt(4)'", "n.toString()");
+assert.eq.automsg("'NumberInt(4)'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(4) }'", "p" );
-
-assert.eq.automsg( "NumberInt(4 )", "eval( tojson( NumberInt( 4 ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberInt( -4 );
-assert.eq.automsg( "-4", "n" );
-assert.eq.automsg( "-4", "n.toNumber()" );
-assert.eq.automsg( "0", "n + 4" );
-assert.eq.automsg( "'NumberInt(-4)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(-4)'", "tojson( n )" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberInt(4) }'", "p");
+
+assert.eq.automsg("NumberInt(4 )", "eval( tojson( NumberInt( 4 ) ) )");
+assert.eq.automsg("a", "eval( tojson( a ) )");
+
+n = new NumberInt(-4);
+assert.eq.automsg("-4", "n");
+assert.eq.automsg("-4", "n.toNumber()");
+assert.eq.automsg("0", "n + 4");
+assert.eq.automsg("'NumberInt(-4)'", "n.toString()");
+assert.eq.automsg("'NumberInt(-4)'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(-4) }'", "p" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberInt(-4) }'", "p");
-n = new NumberInt( "11111" );
-assert.eq.automsg( "'NumberInt(11111)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(11111)'", "tojson( n )" );
+n = new NumberInt("11111");
+assert.eq.automsg("'NumberInt(11111)'", "n.toString()");
+assert.eq.automsg("'NumberInt(11111)'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(11111) }'", "p" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberInt(11111) }'", "p");
-assert.eq.automsg( "NumberInt('11111' )", "eval( tojson( NumberInt( '11111' ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
+assert.eq.automsg("NumberInt('11111' )", "eval( tojson( NumberInt( '11111' ) ) )");
+assert.eq.automsg("a", "eval( tojson( a ) )");
-n = new NumberInt( "-11111" );
-assert.eq.automsg( "-11111", "n.toNumber()" );
-assert.eq.automsg( "-11107", "n + 4" );
-assert.eq.automsg( "'NumberInt(-11111)'", "n.toString()" );
-assert.eq.automsg( "'NumberInt(-11111)'", "tojson( n )" );
+n = new NumberInt("-11111");
+assert.eq.automsg("-11111", "n.toNumber()");
+assert.eq.automsg("-11107", "n + 4");
+assert.eq.automsg("'NumberInt(-11111)'", "n.toString()");
+assert.eq.automsg("'NumberInt(-11111)'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberInt(-11111) }'", "p" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberInt(-11111) }'", "p");
// parsing: SpiderMonkey evaluates non-numeric strings as 0, which is not bad
-//assert.throws.automsg( function() { new NumberInt( "" ); } );
-//assert.throws.automsg( function() { new NumberInt( "y" ); } );
+// assert.throws.automsg( function() { new NumberInt( "" ); } );
+// assert.throws.automsg( function() { new NumberInt( "y" ); } );
// eq
-assert.eq( { x : 5 } , { x : new NumberInt( "5" ) } );
+assert.eq({x: 5}, {x: new NumberInt("5")});
-assert( 5 == NumberInt( 5 ) , "eq" );
-assert( 5 < NumberInt( 6 ) , "lt" );
-assert( 5 > NumberInt( 4 ) , "lt" );
-assert( NumberInt( 1 ) , "to bool a" );
+assert(5 == NumberInt(5), "eq");
+assert(5 < NumberInt(6), "lt");
+assert(5 > NumberInt(4), "lt");
+assert(NumberInt(1), "to bool a");
// objects are always considered thruthy
-//assert( ! NumberInt( 0 ) , "to bool b" );
+// assert( ! NumberInt( 0 ) , "to bool b" );
// create doc with int value in db
-t = db.getCollection( "numberint" );
+t = db.getCollection("numberint");
t.drop();
-o = { a : NumberInt(42) };
-t.save( o );
+o = {
+ a: NumberInt(42)
+};
+t.save(o);
-assert.eq( 42 , t.findOne().a , "save doc 1" );
-assert.eq( 1 , t.find({a: {$type: 16}}).count() , "save doc 2" );
-assert.eq( 0 , t.find({a: {$type: 1}}).count() , "save doc 3" );
+assert.eq(42, t.findOne().a, "save doc 1");
+assert.eq(1, t.find({a: {$type: 16}}).count(), "save doc 2");
+assert.eq(0, t.find({a: {$type: 1}}).count(), "save doc 3");
// roundtripping
mod = t.findOne({a: 42});
@@ -79,14 +81,12 @@ mod.a += 10;
mod.b = "foo";
delete mod._id;
t.save(mod);
-assert.eq( 2 , t.find({a: {$type: 16}}).count() , "roundtrip 1" );
-assert.eq( 0 , t.find({a: {$type: 1}}).count() , "roundtrip 2" );
-assert.eq( 1 , t.find({a: 52}).count() , "roundtrip 3" );
+assert.eq(2, t.find({a: {$type: 16}}).count(), "roundtrip 1");
+assert.eq(0, t.find({a: {$type: 1}}).count(), "roundtrip 2");
+assert.eq(1, t.find({a: 52}).count(), "roundtrip 3");
// save regular number
t.save({a: 42});
-assert.eq( 2 , t.find({a: {$type: 16}}).count() , "normal 1" );
-assert.eq( 1 , t.find({a: {$type: 1}}).count() , "normal 2" );
-assert.eq( 2 , t.find({a: 42}).count() , "normal 3" );
-
-
+assert.eq(2, t.find({a: {$type: 16}}).count(), "normal 1");
+assert.eq(1, t.find({a: {$type: 1}}).count(), "normal 2");
+assert.eq(2, t.find({a: 42}).count(), "normal 3");
diff --git a/jstests/core/numberlong.js b/jstests/core/numberlong.js
index adda61f8acb..884a301440e 100644
--- a/jstests/core/numberlong.js
+++ b/jstests/core/numberlong.js
@@ -1,93 +1,102 @@
-assert.eq.automsg( "0", "new NumberLong()" );
-
-n = new NumberLong( 4 );
-assert.eq.automsg( "4", "n" );
-assert.eq.automsg( "4", "n.toNumber()" );
-assert.eq.automsg( "8", "n + 4" );
-assert.eq.automsg( "'NumberLong(4)'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(4)'", "tojson( n )" );
+assert.eq.automsg("0", "new NumberLong()");
+
+n = new NumberLong(4);
+assert.eq.automsg("4", "n");
+assert.eq.automsg("4", "n.toNumber()");
+assert.eq.automsg("8", "n + 4");
+assert.eq.automsg("'NumberLong(4)'", "n.toString()");
+assert.eq.automsg("'NumberLong(4)'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(4) }'", "p" );
-
-assert.eq.automsg( "NumberLong(4 )", "eval( tojson( NumberLong( 4 ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberLong( -4 );
-assert.eq.automsg( "-4", "n" );
-assert.eq.automsg( "-4", "n.toNumber()" );
-assert.eq.automsg( "0", "n + 4" );
-assert.eq.automsg( "'NumberLong(-4)'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(-4)'", "tojson( n )" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberLong(4) }'", "p");
+
+assert.eq.automsg("NumberLong(4 )", "eval( tojson( NumberLong( 4 ) ) )");
+assert.eq.automsg("a", "eval( tojson( a ) )");
+
+n = new NumberLong(-4);
+assert.eq.automsg("-4", "n");
+assert.eq.automsg("-4", "n.toNumber()");
+assert.eq.automsg("0", "n + 4");
+assert.eq.automsg("'NumberLong(-4)'", "n.toString()");
+assert.eq.automsg("'NumberLong(-4)'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(-4) }'", "p" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberLong(-4) }'", "p");
// double
-n = new NumberLong(4294967296); // 2^32
-assert.eq.automsg( "4294967296", "n" );
-assert.eq.automsg( "4294967296", "n.toNumber()" );
-assert.eq.automsg( "4294967295", "n - 1" );
-assert.eq.automsg( "'NumberLong(\"4294967296\")'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(\"4294967296\")'", "tojson( n )" );
-assert.eq.automsg( "4294967296", "n.floatApprox" );
-assert.eq.automsg( "", "n.top" );
-assert.eq.automsg( "", "n.bottom" );
+n = new NumberLong(4294967296); // 2^32
+assert.eq.automsg("4294967296", "n");
+assert.eq.automsg("4294967296", "n.toNumber()");
+assert.eq.automsg("4294967295", "n - 1");
+assert.eq.automsg("'NumberLong(\"4294967296\")'", "n.toString()");
+assert.eq.automsg("'NumberLong(\"4294967296\")'", "tojson( n )");
+assert.eq.automsg("4294967296", "n.floatApprox");
+assert.eq.automsg("", "n.top");
+assert.eq.automsg("", "n.bottom");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(\"4294967296\") }'", "p" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberLong(\"4294967296\") }'", "p");
// too big to fit in double
-n = new NumberLong( "11111111111111111" );
-assert.eq.automsg( "11111111111111112", "n.toNumber()" );
-assert.eq.automsg( "11111111111111116", "n + 4" );
-assert.eq.automsg( "'NumberLong(\"11111111111111111\")'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(\"11111111111111111\")'", "tojson( n )" );
+n = new NumberLong("11111111111111111");
+assert.eq.automsg("11111111111111112", "n.toNumber()");
+assert.eq.automsg("11111111111111116", "n + 4");
+assert.eq.automsg("'NumberLong(\"11111111111111111\")'", "n.toString()");
+assert.eq.automsg("'NumberLong(\"11111111111111111\")'", "tojson( n )");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(\"11111111111111111\") }'", "p" );
-
-assert.eq.automsg( "NumberLong('11111111111111111' )", "eval( tojson( NumberLong( '11111111111111111' ) ) )" );
-assert.eq.automsg( "a", "eval( tojson( a ) )" );
-
-n = new NumberLong( "-11111111111111111" );
-assert.eq.automsg( "-11111111111111112", "n.toNumber()" );
-assert.eq.automsg( "-11111111111111108", "n + 4" );
-assert.eq.automsg( "'NumberLong(\"-11111111111111111\")'", "n.toString()" );
-assert.eq.automsg( "'NumberLong(\"-11111111111111111\")'", "tojson( n )" );
-assert.eq.automsg( "-11111111111111112", "n.floatApprox" );
-assert.eq.automsg( "4292380288", "n.top" );
-assert.eq.automsg( "3643379257", "n.bottom" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberLong(\"11111111111111111\") }'", "p");
+
+assert.eq.automsg("NumberLong('11111111111111111' )",
+ "eval( tojson( NumberLong( '11111111111111111' ) ) )");
+assert.eq.automsg("a", "eval( tojson( a ) )");
+
+n = new NumberLong("-11111111111111111");
+assert.eq.automsg("-11111111111111112", "n.toNumber()");
+assert.eq.automsg("-11111111111111108", "n + 4");
+assert.eq.automsg("'NumberLong(\"-11111111111111111\")'", "n.toString()");
+assert.eq.automsg("'NumberLong(\"-11111111111111111\")'", "tojson( n )");
+assert.eq.automsg("-11111111111111112", "n.floatApprox");
+assert.eq.automsg("4292380288", "n.top");
+assert.eq.automsg("3643379257", "n.bottom");
a = {};
a.a = n;
-p = tojson( a );
-assert.eq.automsg( "'{ \"a\" : NumberLong(\"-11111111111111111\") }'", "p" );
+p = tojson(a);
+assert.eq.automsg("'{ \"a\" : NumberLong(\"-11111111111111111\") }'", "p");
-n = new NumberLong( "9223372036854775807" );
-assert.eq.automsg( "9223372036854775807", "n.floatApprox" );
-assert.eq.automsg( "2147483647", "n.top" );
-assert.eq.automsg( "4294967295", "n.bottom" );
+n = new NumberLong("9223372036854775807");
+assert.eq.automsg("9223372036854775807", "n.floatApprox");
+assert.eq.automsg("2147483647", "n.top");
+assert.eq.automsg("4294967295", "n.bottom");
-n = new NumberLong( 9223372036854775807, 2147483647, 4294967295 );
-assert.eq.automsg( "9223372036854775807", "n.floatApprox" );
-assert.eq.automsg( "2147483647", "n.top" );
-assert.eq.automsg( "4294967295", "n.bottom" );
+n = new NumberLong(9223372036854775807, 2147483647, 4294967295);
+assert.eq.automsg("9223372036854775807", "n.floatApprox");
+assert.eq.automsg("2147483647", "n.top");
+assert.eq.automsg("4294967295", "n.bottom");
// parsing
-assert.throws.automsg( function() { new NumberLong( "" ); } );
-assert.throws.automsg( function() { new NumberLong( "y" ); } );
-assert.throws.automsg( function() { new NumberLong( "11111111111111111111" ); } );
+assert.throws.automsg(function() {
+ new NumberLong("");
+});
+assert.throws.automsg(function() {
+ new NumberLong("y");
+});
+assert.throws.automsg(function() {
+ new NumberLong("11111111111111111111");
+});
// create NumberLong from NumberInt (SERVER-9973)
-assert.doesNotThrow.automsg( function() { new NumberLong(NumberInt(1)); } );
+assert.doesNotThrow.automsg(function() {
+ new NumberLong(NumberInt(1));
+});
// check that creating a NumberLong from a NumberLong bigger than a double doesn't
// get a truncated value (SERVER-9973)
-n = new NumberLong(NumberLong( "11111111111111111" ));
+n = new NumberLong(NumberLong("11111111111111111"));
assert.eq.automsg("n.toString()", "'NumberLong(\"11111111111111111\")'");
//
@@ -112,10 +121,24 @@ assert.eq(left.compare(left), 0);
assert.eq(right.compare(right), 0);
// Bad input to .compare().
-assert.throws(function() { NumberLong("0").compare(); });
-assert.throws(function() { NumberLong("0").compare(null); });
-assert.throws(function() { NumberLong("0").compare(undefined); });
-assert.throws(function() { NumberLong("0").compare(3); });
-assert.throws(function() { NumberLong("0").compare("foo"); });
-assert.throws(function() { NumberLong("0").compare(NumberLong("0"), 3); });
-assert.throws(function() { NumberLong("0").compare({'replSet2Members': 6}); });
+assert.throws(function() {
+ NumberLong("0").compare();
+});
+assert.throws(function() {
+ NumberLong("0").compare(null);
+});
+assert.throws(function() {
+ NumberLong("0").compare(undefined);
+});
+assert.throws(function() {
+ NumberLong("0").compare(3);
+});
+assert.throws(function() {
+ NumberLong("0").compare("foo");
+});
+assert.throws(function() {
+ NumberLong("0").compare(NumberLong("0"), 3);
+});
+assert.throws(function() {
+ NumberLong("0").compare({'replSet2Members': 6});
+});
diff --git a/jstests/core/numberlong2.js b/jstests/core/numberlong2.js
index c730345f307..59ca199259f 100644
--- a/jstests/core/numberlong2.js
+++ b/jstests/core/numberlong2.js
@@ -3,13 +3,13 @@
t = db.jstests_numberlong2;
t.drop();
-t.ensureIndex( {x:1} );
+t.ensureIndex({x: 1});
function chk(longNum) {
t.remove({});
- t.save({ x: longNum });
- assert.eq(longNum, t.find().hint({ x: 1 }).next().x);
- assert.eq(longNum, t.find({}, { _id: 0, x: 1 }).hint({ x: 1 }).next().x);
+ t.save({x: longNum});
+ assert.eq(longNum, t.find().hint({x: 1}).next().x);
+ assert.eq(longNum, t.find({}, {_id: 0, x: 1}).hint({x: 1}).next().x);
}
chk(NumberLong("1123539983311657217"));
@@ -21,8 +21,9 @@ chk(NumberLong("4503599627370497"));
t.remove({});
s = "11235399833116571";
-for( i = 99; i >= 0; --i ) {
- t.save( {x:NumberLong( s + i )} );
+for (i = 99; i >= 0; --i) {
+ t.save({x: NumberLong(s + i)});
}
-assert.eq( t.find().sort( {x:1} ).hint( {$natural:1} ).toArray(), t.find().sort( {x:1} ).hint( {x:1} ).toArray() );
+assert.eq(t.find().sort({x: 1}).hint({$natural: 1}).toArray(),
+ t.find().sort({x: 1}).hint({x: 1}).toArray());
diff --git a/jstests/core/numberlong3.js b/jstests/core/numberlong3.js
index b8a8c9c468e..b62d1865ff4 100644
--- a/jstests/core/numberlong3.js
+++ b/jstests/core/numberlong3.js
@@ -4,22 +4,24 @@ t = db.jstests_numberlong3;
t.drop();
s = "11235399833116571";
-for( i = 10; i >= 0; --i ) {
- n = NumberLong( s + i );
- t.save( {x:n} );
- if ( 0 ) { // SERVER-3719
- t.save( {x:n.floatApprox} );
+for (i = 10; i >= 0; --i) {
+ n = NumberLong(s + i);
+ t.save({x: n});
+ if (0) { // SERVER-3719
+ t.save({x: n.floatApprox});
}
}
-ret = t.find().sort({x:1}).toArray().filter( function( x ) { return typeof( x.x.floatApprox ) != 'undefined'; } );
+ret = t.find().sort({x: 1}).toArray().filter(function(x) {
+ return typeof(x.x.floatApprox) != 'undefined';
+});
-//printjson( ret );
+// printjson( ret );
-for( i = 1; i < ret.length; ++i ) {
- first = ret[i-1].x.toString();
+for (i = 1; i < ret.length; ++i) {
+ first = ret[i - 1].x.toString();
second = ret[i].x.toString();
- if ( first.length == second.length ) {
- assert.lte( ret[i-1].x.toString(), ret[i].x.toString() );
+ if (first.length == second.length) {
+ assert.lte(ret[i - 1].x.toString(), ret[i].x.toString());
}
}
diff --git a/jstests/core/numberlong4.js b/jstests/core/numberlong4.js
index 0924931efaf..f81a9599e21 100644
--- a/jstests/core/numberlong4.js
+++ b/jstests/core/numberlong4.js
@@ -1,21 +1,21 @@
-// Test handling of comparison between long longs and their double approximations in btrees - SERVER-3719.
+// Test handling of comparison between long longs and their double approximations in btrees -
+// SERVER-3719.
t = db.jstests_numberlong4;
t.drop();
-if ( 0 ) { // SERVER-3719
+if (0) { // SERVER-3719
-t.ensureIndex({x:1});
+ t.ensureIndex({x: 1});
-Random.setRandomSeed();
+ Random.setRandomSeed();
-s = "11235399833116571";
-for( i = 0; i < 10000; ++i ) {
- n = NumberLong( s + Random.randInt( 10 ) );
- t.insert( { x: ( Random.randInt( 2 ) ? n : n.floatApprox ) } );
-}
-
-// If this does not return, there is a problem with index structure.
-t.find().hint({x:1}).itcount();
+ s = "11235399833116571";
+ for (i = 0; i < 10000; ++i) {
+ n = NumberLong(s + Random.randInt(10));
+ t.insert({x: (Random.randInt(2) ? n : n.floatApprox)});
+ }
+ // If this does not return, there is a problem with index structure.
+ t.find().hint({x: 1}).itcount();
}
diff --git a/jstests/core/objid1.js b/jstests/core/objid1.js
index f1a9fbe0fbd..d08089c26db 100644
--- a/jstests/core/objid1.js
+++ b/jstests/core/objid1.js
@@ -2,15 +2,17 @@ t = db.objid1;
t.drop();
b = new ObjectId();
-assert( b.str , "A" );
+assert(b.str, "A");
-a = new ObjectId( b.str );
-assert.eq( a.str , b.str , "B" );
+a = new ObjectId(b.str);
+assert.eq(a.str, b.str, "B");
-t.save( { a : a } );
-assert( t.findOne().a.isObjectId , "C" );
-assert.eq( a.str , t.findOne().a.str , "D" );
+t.save({a: a});
+assert(t.findOne().a.isObjectId, "C");
+assert.eq(a.str, t.findOne().a.str, "D");
-x = { a : new ObjectId() };
-eval( " y = " + tojson( x ) );
-assert.eq( x.a.str , y.a.str , "E" );
+x = {
+ a: new ObjectId()
+};
+eval(" y = " + tojson(x));
+assert.eq(x.a.str, y.a.str, "E");
diff --git a/jstests/core/objid2.js b/jstests/core/objid2.js
index 0805dffced7..247843b587b 100644
--- a/jstests/core/objid2.js
+++ b/jstests/core/objid2.js
@@ -1,7 +1,7 @@
t = db.objid2;
t.drop();
-t.save( { _id : 517 , a : "hello" } );
+t.save({_id: 517, a: "hello"});
-assert.eq( t.findOne().a , "hello" );
-assert.eq( t.findOne()._id , 517 );
+assert.eq(t.findOne().a, "hello");
+assert.eq(t.findOne()._id, 517);
diff --git a/jstests/core/objid3.js b/jstests/core/objid3.js
index ddf20d9af27..12d45530e52 100644
--- a/jstests/core/objid3.js
+++ b/jstests/core/objid3.js
@@ -1,9 +1,8 @@
t = db.objid3;
t.drop();
-t.save( { a : "bob" , _id : 517 } );
-for ( var k in t.findOne() ){
- assert.eq( k , "_id" , "keys out of order" );
+t.save({a: "bob", _id: 517});
+for (var k in t.findOne()) {
+ assert.eq(k, "_id", "keys out of order");
break;
}
-
diff --git a/jstests/core/objid4.js b/jstests/core/objid4.js
index 1ae55a558f2..7513e077029 100644
--- a/jstests/core/objid4.js
+++ b/jstests/core/objid4.js
@@ -1,16 +1,19 @@
-
o = new ObjectId();
-assert( o.str );
+assert(o.str);
-a = new ObjectId( o.str );
-assert.eq( o.str , a.str );
-assert.eq( a.str , a.str.toString() );
+a = new ObjectId(o.str);
+assert.eq(o.str, a.str);
+assert.eq(a.str, a.str.toString());
-b = ObjectId( o.str );
-assert.eq( o.str , b.str );
-assert.eq( b.str , b.str.toString() );
+b = ObjectId(o.str);
+assert.eq(o.str, b.str);
+assert.eq(b.str, b.str.toString());
-assert.throws( function(z){ return new ObjectId( "a" ); } );
-assert.throws( function(z){ return new ObjectId( "12345678901234567890123z" ); } );
+assert.throws(function(z) {
+ return new ObjectId("a");
+});
+assert.throws(function(z) {
+ return new ObjectId("12345678901234567890123z");
+});
diff --git a/jstests/core/objid5.js b/jstests/core/objid5.js
index 6189032df6e..5b3917727e9 100644
--- a/jstests/core/objid5.js
+++ b/jstests/core/objid5.js
@@ -2,18 +2,18 @@
t = db.objid5;
t.drop();
-t.save( { _id : 5.5 } );
-assert.eq( 18 , Object.bsonsize( t.findOne() ) , "A" );
+t.save({_id: 5.5});
+assert.eq(18, Object.bsonsize(t.findOne()), "A");
-x = db.runCommand( { features : 1 } );
-y = db.runCommand( { features : 1 , oidReset : 1 } );
+x = db.runCommand({features: 1});
+y = db.runCommand({features: 1, oidReset: 1});
-if( !x.ok )
+if (!x.ok)
print("x: " + tojson(x));
-assert( x.oidMachine , "B1" );
-assert.neq( x.oidMachine , y.oidMachine , "B2" );
-assert.eq( x.oidMachine , y.oidMachineOld , "B3" );
+assert(x.oidMachine, "B1");
+assert.neq(x.oidMachine, y.oidMachine, "B2");
+assert.eq(x.oidMachine, y.oidMachineOld, "B3");
-assert.eq( 18 , Object.bsonsize( { _id : 7.7 } ) , "C1" );
-assert.eq( 0 , Object.bsonsize( null ) , "C2" );
+assert.eq(18, Object.bsonsize({_id: 7.7}), "C1");
+assert.eq(0, Object.bsonsize(null), "C2");
diff --git a/jstests/core/objid7.js b/jstests/core/objid7.js
index 520289c8c37..4c3505f8965 100644
--- a/jstests/core/objid7.js
+++ b/jstests/core/objid7.js
@@ -1,13 +1,12 @@
-a = new ObjectId( "4c1a478603eba73620000000" );
-b = new ObjectId( "4c1a478603eba73620000000" );
+a = new ObjectId("4c1a478603eba73620000000");
+b = new ObjectId("4c1a478603eba73620000000");
c = new ObjectId();
-assert.eq( a.toString() , b.toString() , "A" );
-assert.eq( a.toString() , "ObjectId(\"4c1a478603eba73620000000\")" , "B" );
+assert.eq(a.toString(), b.toString(), "A");
+assert.eq(a.toString(), "ObjectId(\"4c1a478603eba73620000000\")", "B");
-assert( a.equals( b ) , "C" );
-
-assert.neq( a.toString() , c.toString() , "D" );
-assert( ! a.equals( c ) , "E" );
+assert(a.equals(b), "C");
+assert.neq(a.toString(), c.toString(), "D");
+assert(!a.equals(c), "E");
diff --git a/jstests/core/opcounters_active.js b/jstests/core/opcounters_active.js
index 56330cbbebc..c184eab3d1d 100644
--- a/jstests/core/opcounters_active.js
+++ b/jstests/core/opcounters_active.js
@@ -2,51 +2,32 @@
(function() {
"use strict";
- //Test the getActiveCommands function
- //Should remove the listCollections section but keep the rest
+ // Test the getActiveCommands function
+ // Should remove the listCollections section but keep the rest
var testInput = {
- "isMaster" : {
- "failed" : NumberLong(0),
- "total" : NumberLong(3)
- },
- "mapreduce" : {
- "shardedfinish" : {
- "failed" : NumberLong(0),
- "total" : NumberLong(1)
- }
- },
- "listCollections" : {
- "failed" : NumberLong(0),
- "total" : NumberLong(0)
- }
+ "isMaster": {"failed": NumberLong(0), "total": NumberLong(3)},
+ "mapreduce": {"shardedfinish": {"failed": NumberLong(0), "total": NumberLong(1)}},
+ "listCollections": {"failed": NumberLong(0), "total": NumberLong(0)}
};
var testExpected = {
- "isMaster" : {
- "failed" : NumberLong(0),
- "total" : NumberLong(3)
- },
- "mapreduce" : {
- "shardedfinish" : {
- "failed" : NumberLong(0),
- "total" : NumberLong(1)
- }
- }
+ "isMaster": {"failed": NumberLong(0), "total": NumberLong(3)},
+ "mapreduce": {"shardedfinish": {"failed": NumberLong(0), "total": NumberLong(1)}}
};
var testResult = getActiveCommands(testInput);
assert.eq(testResult, testExpected, "getActiveCommands did not return the expected result");
- //Test that the serverstatus helper works
+ // Test that the serverstatus helper works
var result = db.serverStatus();
assert.neq(undefined, result, result);
- //Test that the metrics tree returns
+ // Test that the metrics tree returns
assert.neq(undefined, result.metrics, result);
- //Test that the metrics.commands tree returns
+ // Test that the metrics.commands tree returns
assert.neq(undefined, result.metrics.commands, result);
- //Test that the metrics.commands.serverStatus value is non-zero
+ // Test that the metrics.commands.serverStatus value is non-zero
assert.neq(0, result.metrics.commands.serverStatus.total, result);
- //Test that the command returns successfully when no metrics tree is present
- var result = db.serverStatus({"metrics":0});
+ // Test that the command returns successfully when no metrics tree is present
+ var result = db.serverStatus({"metrics": 0});
assert.eq(undefined, result.metrics, result);
}()); \ No newline at end of file
diff --git a/jstests/core/opcounters_write_cmd.js b/jstests/core/opcounters_write_cmd.js
index 660b82bd501..21e42be16b5 100644
--- a/jstests/core/opcounters_write_cmd.js
+++ b/jstests/core/opcounters_write_cmd.js
@@ -27,35 +27,34 @@ t.drop();
// Single insert, no error.
opCounters = newdb.serverStatus().opcounters;
-res = t.insert({_id:0});
+res = t.insert({_id: 0});
assert.writeOK(res);
assert.eq(opCounters.insert + 1, newdb.serverStatus().opcounters.insert);
// Bulk insert, no error.
opCounters = newdb.serverStatus().opcounters;
-res = t.insert([{_id:1},{_id:2}]);
+res = t.insert([{_id: 1}, {_id: 2}]);
assert.writeOK(res);
assert.eq(opCounters.insert + 2, newdb.serverStatus().opcounters.insert);
-
// Test is not run when in compatibility mode as errors are not counted
-if (t.getMongo().writeMode() != "compatibility"){
+if (t.getMongo().writeMode() != "compatibility") {
// Single insert, with error.
opCounters = newdb.serverStatus().opcounters;
- res = t.insert({_id:0});
+ res = t.insert({_id: 0});
assert.writeError(res);
assert.eq(opCounters.insert + 1, newdb.serverStatus().opcounters.insert);
// Bulk insert, with error, ordered.
opCounters = newdb.serverStatus().opcounters;
- res = t.insert([{_id:3},{_id:3},{_id:4}]);
+ res = t.insert([{_id: 3}, {_id: 3}, {_id: 4}]);
assert.writeError(res);
assert.eq(opCounters.insert + 2, newdb.serverStatus().opcounters.insert);
// Bulk insert, with error, unordered.
var continueOnErrorFlag = 1;
opCounters = newdb.serverStatus().opcounters;
- res = t.insert([{_id:5},{_id:5},{_id:6}], continueOnErrorFlag);
+ res = t.insert([{_id: 5}, {_id: 5}, {_id: 6}], continueOnErrorFlag);
assert.writeError(res);
assert.eq(opCounters.insert + 3, newdb.serverStatus().opcounters.insert);
}
@@ -64,17 +63,17 @@ if (t.getMongo().writeMode() != "compatibility"){
//
t.drop();
-t.insert({_id:0});
+t.insert({_id: 0});
// Update, no error.
opCounters = newdb.serverStatus().opcounters;
-res = t.update({_id:0}, {$set:{a:1}});
+res = t.update({_id: 0}, {$set: {a: 1}});
assert.writeOK(res);
assert.eq(opCounters.update + 1, newdb.serverStatus().opcounters.update);
// Update, with error.
opCounters = newdb.serverStatus().opcounters;
-res = t.update({_id:0}, {$set:{_id:1}});
+res = t.update({_id: 0}, {$set: {_id: 1}});
assert.writeError(res);
assert.eq(opCounters.update + 1, newdb.serverStatus().opcounters.update);
@@ -83,17 +82,17 @@ assert.eq(opCounters.update + 1, newdb.serverStatus().opcounters.update);
//
t.drop();
-t.insert([{_id:0},{_id:1}]);
+t.insert([{_id: 0}, {_id: 1}]);
// Delete, no error.
opCounters = newdb.serverStatus().opcounters;
-res = t.remove({_id:0});
+res = t.remove({_id: 0});
assert.writeOK(res);
assert.eq(opCounters.delete + 1, newdb.serverStatus().opcounters.delete);
// Delete, with error.
opCounters = newdb.serverStatus().opcounters;
-res = t.remove({_id:{$invalidOp:1}});
+res = t.remove({_id: {$invalidOp: 1}});
assert.writeError(res);
assert.eq(opCounters.delete + 1, newdb.serverStatus().opcounters.delete);
@@ -104,7 +103,7 @@ assert.eq(opCounters.delete + 1, newdb.serverStatus().opcounters.delete);
//
t.drop();
-t.insert({_id:0});
+t.insert({_id: 0});
// Query, no error.
opCounters = newdb.serverStatus().opcounters;
@@ -113,7 +112,9 @@ assert.eq(opCounters.query + 1, newdb.serverStatus().opcounters.query);
// Query, with error.
opCounters = newdb.serverStatus().opcounters;
-assert.throws(function() { t.findOne({_id:{$invalidOp:1}}); });
+assert.throws(function() {
+ t.findOne({_id: {$invalidOp: 1}});
+});
assert.eq(opCounters.query + 1, newdb.serverStatus().opcounters.query);
//
@@ -123,11 +124,11 @@ assert.eq(opCounters.query + 1, newdb.serverStatus().opcounters.query);
//
t.drop();
-t.insert([{_id:0},{_id:1},{_id:2}]);
+t.insert([{_id: 0}, {_id: 1}, {_id: 2}]);
// Getmore, no error.
opCounters = newdb.serverStatus().opcounters;
-t.find().batchSize(2).toArray(); // 3 documents, batchSize=2 => 1 query + 1 getmore
+t.find().batchSize(2).toArray(); // 3 documents, batchSize=2 => 1 query + 1 getmore
assert.eq(opCounters.query + 1, newdb.serverStatus().opcounters.query);
assert.eq(opCounters.getmore + 1, newdb.serverStatus().opcounters.getmore);
@@ -142,41 +143,46 @@ assert.eq(opCounters.getmore + 1, newdb.serverStatus().opcounters.getmore);
//
t.drop();
-t.insert({_id:0});
+t.insert({_id: 0});
// Command, recognized, no error.
serverStatus = newdb.runCommand({serverStatus: 1});
opCounters = serverStatus.opcounters;
metricsObj = serverStatus.metrics.commands;
-assert.eq(opCounters.command + 1, newdb.serverStatus().opcounters.command); // "serverStatus" counted
+assert.eq(opCounters.command + 1,
+ newdb.serverStatus().opcounters.command); // "serverStatus" counted
// Count this and the last run of "serverStatus"
assert.eq(metricsObj.serverStatus.total + 2,
- newdb.serverStatus().metrics.commands.serverStatus.total,
- "total ServerStatus command counter did not increment"); // "serverStatus" counted
-assert.eq(metricsObj.serverStatus.failed,
- newdb.serverStatus().metrics.commands.serverStatus.failed,
- "failed ServerStatus command counter incremented!"); // "serverStatus" counted
+ newdb.serverStatus().metrics.commands.serverStatus.total,
+ "total ServerStatus command counter did not increment"); // "serverStatus" counted
+assert.eq(metricsObj.serverStatus.failed,
+ newdb.serverStatus().metrics.commands.serverStatus.failed,
+ "failed ServerStatus command counter incremented!"); // "serverStatus" counted
// Command, recognized, with error.
-countVal = { "total" : 0, "failed" : 0 };
+countVal = {
+ "total": 0,
+ "failed": 0
+};
if (metricsObj.count != null) {
countVal = metricsObj.count;
}
-res = t.runCommand("count", {query:{$invalidOp:1}}); // "count command" counted
+res = t.runCommand("count", {query: {$invalidOp: 1}}); // "count command" counted
assert.eq(0, res.ok);
assert.eq(opCounters.command + 5,
- newdb.serverStatus().opcounters.command); // "serverStatus", "count" counted
+ newdb.serverStatus().opcounters.command); // "serverStatus", "count" counted
-assert.eq(countVal.total +1,
- newdb.serverStatus().metrics.commands.count.total,
- "total count command counter did not incremented"); // "serverStatus", "count" counted
-assert.eq(countVal.failed + 1,
- newdb.serverStatus().metrics.commands.count.failed,
- "failed count command counter did not increment"); // "serverStatus", "count" counted
+assert.eq(countVal.total + 1,
+ newdb.serverStatus().metrics.commands.count.total,
+ "total count command counter did not incremented"); // "serverStatus", "count" counted
+assert.eq(countVal.failed + 1,
+ newdb.serverStatus().metrics.commands.count.failed,
+ "failed count command counter did not increment"); // "serverStatus", "count" counted
// Command, unrecognized.
res = t.runCommand("invalid");
assert.eq(0, res.ok);
-assert.eq(opCounters.command + 8, newdb.serverStatus().opcounters.command); // "serverStatus" counted
+assert.eq(opCounters.command + 8,
+ newdb.serverStatus().opcounters.command); // "serverStatus" counted
assert.eq(null, newdb.serverStatus().metrics.commands.invalid);
assert.eq(metricsObj['<UNKNOWN>'] + 1, newdb.serverStatus().metrics.commands['<UNKNOWN>']);
diff --git a/jstests/core/or1.js b/jstests/core/or1.js
index cc6d7aa37e8..0552524eb4c 100644
--- a/jstests/core/or1.js
+++ b/jstests/core/or1.js
@@ -1,43 +1,60 @@
t = db.jstests_or1;
t.drop();
-checkArrs = function( a, b ) {
- assert.eq( a.length, b.length );
+checkArrs = function(a, b) {
+ assert.eq(a.length, b.length);
aStr = [];
bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert.neq( -1, bStr.indexOf( aStr[ i ] ) );
+ a.forEach(function(x) {
+ aStr.push(tojson(x));
+ });
+ b.forEach(function(x) {
+ bStr.push(tojson(x));
+ });
+ for (i = 0; i < aStr.length; ++i) {
+ assert.neq(-1, bStr.indexOf(aStr[i]));
}
};
doTest = function() {
-t.save( {_id:0,a:1} );
-t.save( {_id:1,a:2} );
-t.save( {_id:2,b:1} );
-t.save( {_id:3,b:2} );
-t.save( {_id:4,a:1,b:1} );
-t.save( {_id:5,a:1,b:2} );
-t.save( {_id:6,a:2,b:1} );
-t.save( {_id:7,a:2,b:2} );
+ t.save({_id: 0, a: 1});
+ t.save({_id: 1, a: 2});
+ t.save({_id: 2, b: 1});
+ t.save({_id: 3, b: 2});
+ t.save({_id: 4, a: 1, b: 1});
+ t.save({_id: 5, a: 1, b: 2});
+ t.save({_id: 6, a: 2, b: 1});
+ t.save({_id: 7, a: 2, b: 2});
-assert.throws( function() { t.find( { $or:"a" } ).toArray(); } );
-assert.throws( function() { t.find( { $or:[] } ).toArray(); } );
-assert.throws( function() { t.find( { $or:[ "a" ] } ).toArray(); } );
+ assert.throws(function() {
+ t.find({$or: "a"}).toArray();
+ });
+ assert.throws(function() {
+ t.find({$or: []}).toArray();
+ });
+ assert.throws(function() {
+ t.find({$or: ["a"]}).toArray();
+ });
-a1 = t.find( { $or: [ { a : 1 } ] } ).toArray();
-checkArrs( [ { _id:0, a:1 }, { _id:4, a:1, b:1 }, { _id:5, a:1, b:2 } ], a1 );
+ a1 = t.find({$or: [{a: 1}]}).toArray();
+ checkArrs([{_id: 0, a: 1}, {_id: 4, a: 1, b: 1}, {_id: 5, a: 1, b: 2}], a1);
-a1b2 = t.find( { $or: [ { a : 1 }, { b : 2 } ] } ).toArray();
-checkArrs( [ { _id:0, a:1 }, { _id:3, b:2 }, { _id:4, a:1, b:1 }, { _id:5, a:1, b:2 }, { _id:7, a:2, b:2 } ], a1b2 );
+ a1b2 = t.find({$or: [{a: 1}, {b: 2}]}).toArray();
+ checkArrs([
+ {_id: 0, a: 1},
+ {_id: 3, b: 2},
+ {_id: 4, a: 1, b: 1},
+ {_id: 5, a: 1, b: 2},
+ {_id: 7, a: 2, b: 2}
+ ],
+ a1b2);
-t.drop();
-t.save( {a:[0,1],b:[0,1]} );
-assert.eq( 1, t.find( { $or: [ { a: {$in:[0,1]}} ] } ).toArray().length );
-assert.eq( 1, t.find( { $or: [ { b: {$in:[0,1]}} ] } ).toArray().length );
-assert.eq( 1, t.find( { $or: [ { a: {$in:[0,1]}}, { b: {$in:[0,1]}} ] } ).toArray().length );
+ t.drop();
+ t.save({a: [0, 1], b: [0, 1]});
+ assert.eq(1, t.find({$or: [{a: {$in: [0, 1]}}]}).toArray().length);
+ assert.eq(1, t.find({$or: [{b: {$in: [0, 1]}}]}).toArray().length);
+ assert.eq(1, t.find({$or: [{a: {$in: [0, 1]}}, {b: {$in: [0, 1]}}]}).toArray().length);
};
@@ -45,13 +62,13 @@ doTest();
// not part of SERVER-1003, but good check for subseq. implementations
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
doTest();
t.drop();
-t.ensureIndex( {b:1} );
+t.ensureIndex({b: 1});
doTest();
t.drop();
-t.ensureIndex( {a:1,b:1} );
+t.ensureIndex({a: 1, b: 1});
doTest();
diff --git a/jstests/core/or2.js b/jstests/core/or2.js
index 11cfc44ff7f..2624c213fad 100644
--- a/jstests/core/or2.js
+++ b/jstests/core/or2.js
@@ -4,47 +4,58 @@ t.drop();
// Include helpers for analyzing explain output.
load("jstests/libs/analyze_plan.js");
-checkArrs = function( a, b ) {
- assert.eq( a.length, b.length );
+checkArrs = function(a, b) {
+ assert.eq(a.length, b.length);
aStr = [];
bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert.neq( -1, bStr.indexOf( aStr[ i ] ) );
+ a.forEach(function(x) {
+ aStr.push(tojson(x));
+ });
+ b.forEach(function(x) {
+ bStr.push(tojson(x));
+ });
+ for (i = 0; i < aStr.length; ++i) {
+ assert.neq(-1, bStr.indexOf(aStr[i]));
}
};
-doTest = function( index ) {
- if ( index == null ) {
+doTest = function(index) {
+ if (index == null) {
index = true;
}
- t.save( {_id:0,x:0,a:1} );
- t.save( {_id:1,x:0,a:2} );
- t.save( {_id:2,x:0,b:1} );
- t.save( {_id:3,x:0,b:2} );
- t.save( {_id:4,x:1,a:1,b:1} );
- t.save( {_id:5,x:1,a:1,b:2} );
- t.save( {_id:6,x:1,a:2,b:1} );
- t.save( {_id:7,x:1,a:2,b:2} );
+ t.save({_id: 0, x: 0, a: 1});
+ t.save({_id: 1, x: 0, a: 2});
+ t.save({_id: 2, x: 0, b: 1});
+ t.save({_id: 3, x: 0, b: 2});
+ t.save({_id: 4, x: 1, a: 1, b: 1});
+ t.save({_id: 5, x: 1, a: 1, b: 2});
+ t.save({_id: 6, x: 1, a: 2, b: 1});
+ t.save({_id: 7, x: 1, a: 2, b: 2});
- assert.throws( function() { t.find( { x:0,$or:"a" } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$or:[] } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$or:[ "a" ] } ).toArray(); } );
+ assert.throws(function() {
+ t.find({x: 0, $or: "a"}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $or: []}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $or: ["a"]}).toArray();
+ });
- a1 = t.find( { x:0, $or: [ { a : 1 } ] } ).toArray();
- checkArrs( [ { _id:0, x:0, a:1 } ], a1 );
- if ( index ) {
- var explain = t.find( { x:0,$or: [ { a : 1 } ] } ).explain();
- assert( isIxscan(explain.queryPlanner.winningPlan) );
+ a1 = t.find({x: 0, $or: [{a: 1}]}).toArray();
+ checkArrs([{_id: 0, x: 0, a: 1}], a1);
+ if (index) {
+ var explain = t.find({x: 0, $or: [{a: 1}]}).explain();
+ assert(isIxscan(explain.queryPlanner.winningPlan));
}
- a1b2 = t.find( { x:1, $or: [ { a : 1 }, { b : 2 } ] } ).toArray();
- checkArrs( [ { _id:4, x:1, a:1, b:1 }, { _id:5, x:1, a:1, b:2 }, { _id:7, x:1, a:2, b:2 } ], a1b2 );
- if ( index ) {
- var explain = t.find( { x:0,$or: [ { a : 1 } ] } ).explain();
- assert( isIxscan(explain.queryPlanner.winningPlan) );
+ a1b2 = t.find({x: 1, $or: [{a: 1}, {b: 2}]}).toArray();
+ checkArrs([{_id: 4, x: 1, a: 1, b: 1}, {_id: 5, x: 1, a: 1, b: 2}, {_id: 7, x: 1, a: 2, b: 2}],
+ a1b2);
+ if (index) {
+ var explain = t.find({x: 0, $or: [{a: 1}]}).explain();
+ assert(isIxscan(explain.queryPlanner.winningPlan));
}
/*
@@ -56,19 +67,19 @@ doTest = function( index ) {
*/
};
-doTest( false );
+doTest(false);
-t.ensureIndex( { x:1 } );
+t.ensureIndex({x: 1});
doTest();
t.drop();
-t.ensureIndex( { x:1,a:1 } );
+t.ensureIndex({x: 1, a: 1});
doTest();
t.drop();
-t.ensureIndex( {x:1,b:1} );
+t.ensureIndex({x: 1, b: 1});
doTest();
t.drop();
-t.ensureIndex( {x:1,a:1,b:1} );
+t.ensureIndex({x: 1, a: 1, b: 1});
doTest();
diff --git a/jstests/core/or3.js b/jstests/core/or3.js
index 1ca0ac29d80..50434965bae 100644
--- a/jstests/core/or3.js
+++ b/jstests/core/or3.js
@@ -4,63 +4,73 @@ t.drop();
// Include helpers for analyzing explain output.
load("jstests/libs/analyze_plan.js");
-checkArrs = function( a, b ) {
- assert.eq( a.length, b.length );
+checkArrs = function(a, b) {
+ assert.eq(a.length, b.length);
aStr = [];
bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert.neq( -1, bStr.indexOf( aStr[ i ] ) );
+ a.forEach(function(x) {
+ aStr.push(tojson(x));
+ });
+ b.forEach(function(x) {
+ bStr.push(tojson(x));
+ });
+ for (i = 0; i < aStr.length; ++i) {
+ assert.neq(-1, bStr.indexOf(aStr[i]));
}
};
-doTest = function( index ) {
- if ( index == null ) {
+doTest = function(index) {
+ if (index == null) {
index = true;
}
-
- t.save( {_id:0,x:0,a:1} );
- t.save( {_id:1,x:0,a:2} );
- t.save( {_id:2,x:0,b:1} );
- t.save( {_id:3,x:0,b:2} );
- t.save( {_id:4,x:1,a:1,b:1} );
- t.save( {_id:5,x:1,a:1,b:2} );
- t.save( {_id:6,x:1,a:2,b:1} );
- t.save( {_id:7,x:1,a:2,b:2} );
-
- assert.throws( function() { t.find( { x:0,$nor:"a" } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$nor:[] } ).toArray(); } );
- assert.throws( function() { t.find( { x:0,$nor:[ "a" ] } ).toArray(); } );
- an1 = t.find( { $nor: [ { a : 1 } ] } ).toArray();
- checkArrs( t.find( {a:{$ne:1}} ).toArray(), an1 );
-
- an1bn2 = t.find( { x:1, $nor: [ { a : 1 }, { b : 2 } ] } ).toArray();
- checkArrs( [ { _id:6, x:1, a:2, b:1 } ], an1bn2 );
- checkArrs( t.find( { x:1, a:{$ne:1}, b:{$ne:2} } ).toArray(), an1bn2 );
- if ( index ) {
- var explain = t.find( { x:1, $nor: [ { a : 1 }, { b : 2 } ] } ).explain();
- assert( isIxscan(explain.queryPlanner.winningPlan) );
+ t.save({_id: 0, x: 0, a: 1});
+ t.save({_id: 1, x: 0, a: 2});
+ t.save({_id: 2, x: 0, b: 1});
+ t.save({_id: 3, x: 0, b: 2});
+ t.save({_id: 4, x: 1, a: 1, b: 1});
+ t.save({_id: 5, x: 1, a: 1, b: 2});
+ t.save({_id: 6, x: 1, a: 2, b: 1});
+ t.save({_id: 7, x: 1, a: 2, b: 2});
+
+ assert.throws(function() {
+ t.find({x: 0, $nor: "a"}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $nor: []}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $nor: ["a"]}).toArray();
+ });
+
+ an1 = t.find({$nor: [{a: 1}]}).toArray();
+ checkArrs(t.find({a: {$ne: 1}}).toArray(), an1);
+
+ an1bn2 = t.find({x: 1, $nor: [{a: 1}, {b: 2}]}).toArray();
+ checkArrs([{_id: 6, x: 1, a: 2, b: 1}], an1bn2);
+ checkArrs(t.find({x: 1, a: {$ne: 1}, b: {$ne: 2}}).toArray(), an1bn2);
+ if (index) {
+ var explain = t.find({x: 1, $nor: [{a: 1}, {b: 2}]}).explain();
+ assert(isIxscan(explain.queryPlanner.winningPlan));
}
-
- an1b2 = t.find( { $nor: [ { a : 1 } ], $or: [ { b : 2 } ] } ).toArray();
- checkArrs( t.find( {a:{$ne:1},b:2} ).toArray(), an1b2 );
+
+ an1b2 = t.find({$nor: [{a: 1}], $or: [{b: 2}]}).toArray();
+ checkArrs(t.find({a: {$ne: 1}, b: 2}).toArray(), an1b2);
};
-doTest( false );
+doTest(false);
-t.ensureIndex( { x:1 } );
+t.ensureIndex({x: 1});
doTest();
t.drop();
-t.ensureIndex( { x:1,a:1 } );
+t.ensureIndex({x: 1, a: 1});
doTest();
t.drop();
-t.ensureIndex( {x:1,b:1} );
+t.ensureIndex({x: 1, b: 1});
doTest();
t.drop();
-t.ensureIndex( {x:1,a:1,b:1} );
+t.ensureIndex({x: 1, a: 1, b: 1});
doTest();
diff --git a/jstests/core/or4.js b/jstests/core/or4.js
index a02150ff1d0..6053295a7d1 100644
--- a/jstests/core/or4.js
+++ b/jstests/core/or4.js
@@ -1,82 +1,90 @@
t = db.jstests_or4;
t.drop();
-checkArrs = function( a, b ) {
+checkArrs = function(a, b) {
m = "[" + a + "] != [" + b + "]";
- a = eval( a );
- b = eval( b );
- assert.eq( a.length, b.length, m );
+ a = eval(a);
+ b = eval(b);
+ assert.eq(a.length, b.length, m);
aStr = [];
bStr = [];
- a.forEach( function( x ) { aStr.push( tojson( x ) ); } );
- b.forEach( function( x ) { bStr.push( tojson( x ) ); } );
- for ( i = 0; i < aStr.length; ++i ) {
- assert( -1 != bStr.indexOf( aStr[ i ] ), m );
+ a.forEach(function(x) {
+ aStr.push(tojson(x));
+ });
+ b.forEach(function(x) {
+ bStr.push(tojson(x));
+ });
+ for (i = 0; i < aStr.length; ++i) {
+ assert(-1 != bStr.indexOf(aStr[i]), m);
}
};
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
-t.save( {a:2} );
-t.save( {b:3} );
-t.save( {b:3} );
-t.save( {a:2,b:3} );
+t.save({a: 2});
+t.save({b: 3});
+t.save({b: 3});
+t.save({a: 2, b: 3});
-assert.eq.automsg( "4", "t.count( {$or:[{a:2},{b:3}]} )" );
-assert.eq.automsg( "2", "t.count( {$or:[{a:2},{a:2}]} )" );
+assert.eq.automsg("4", "t.count( {$or:[{a:2},{b:3}]} )");
+assert.eq.automsg("2", "t.count( {$or:[{a:2},{a:2}]} )");
-assert.eq.automsg( "2", "t.find( {} ).skip( 2 ).count( true )" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).skip( 2 ).count( true )" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:2},{b:3}]} ).skip( 3 ).count( true )" );
+assert.eq.automsg("2", "t.find( {} ).skip( 2 ).count( true )");
+assert.eq.automsg("2", "t.find( {$or:[{a:2},{b:3}]} ).skip( 2 ).count( true )");
+assert.eq.automsg("1", "t.find( {$or:[{a:2},{b:3}]} ).skip( 3 ).count( true )");
-assert.eq.automsg( "2", "t.find( {} ).limit( 2 ).count( true )" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:2},{b:3}]} ).limit( 1 ).count( true )" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).limit( 2 ).count( true )" );
-assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).limit( 3 ).count( true )" );
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).limit( 4 ).count( true )" );
+assert.eq.automsg("2", "t.find( {} ).limit( 2 ).count( true )");
+assert.eq.automsg("1", "t.find( {$or:[{a:2},{b:3}]} ).limit( 1 ).count( true )");
+assert.eq.automsg("2", "t.find( {$or:[{a:2},{b:3}]} ).limit( 2 ).count( true )");
+assert.eq.automsg("3", "t.find( {$or:[{a:2},{b:3}]} ).limit( 3 ).count( true )");
+assert.eq.automsg("4", "t.find( {$or:[{a:2},{b:3}]} ).limit( 4 ).count( true )");
-t.remove({ $or: [{ a: 2 }, { b: 3}] });
-assert.eq.automsg( "0", "t.count()" );
+t.remove({$or: [{a: 2}, {b: 3}]});
+assert.eq.automsg("0", "t.count()");
-t.save( {b:3} );
-t.remove({ $or: [{ a: 2 }, { b: 3}] });
-assert.eq.automsg( "0", "t.count()" );
+t.save({b: 3});
+t.remove({$or: [{a: 2}, {b: 3}]});
+assert.eq.automsg("0", "t.count()");
-t.save( {a:2} );
-t.save( {b:3} );
-t.save( {a:2,b:3} );
+t.save({a: 2});
+t.save({b: 3});
+t.save({a: 2, b: 3});
-t.update( {$or:[{a:2},{b:3}]}, {$set:{z:1}}, false, true );
-assert.eq.automsg( "3", "t.count( {z:1} )" );
+t.update({$or: [{a: 2}, {b: 3}]}, {$set: {z: 1}}, false, true);
+assert.eq.automsg("3", "t.count( {z:1} )");
-assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).toArray().length" );
-checkArrs( "t.find().toArray()", "t.find( {$or:[{a:2},{b:3}]} ).toArray()" );
-assert.eq.automsg( "2", "t.find( {$or:[{a:2},{b:3}]} ).skip(1).toArray().length" );
+assert.eq.automsg("3", "t.find( {$or:[{a:2},{b:3}]} ).toArray().length");
+checkArrs("t.find().toArray()", "t.find( {$or:[{a:2},{b:3}]} ).toArray()");
+assert.eq.automsg("2", "t.find( {$or:[{a:2},{b:3}]} ).skip(1).toArray().length");
-assert.eq.automsg( "3", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length" );
+assert.eq.automsg("3", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length");
-t.save( {a:1} );
-t.save( {b:4} );
-t.save( {a:2} );
+t.save({a: 1});
+t.save({b: 4});
+t.save({a: 2});
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length" );
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).snapshot().toArray().length" );
+assert.eq.automsg("4", "t.find( {$or:[{a:2},{b:3}]} ).batchSize( 2 ).toArray().length");
+assert.eq.automsg("4", "t.find( {$or:[{a:2},{b:3}]} ).snapshot().toArray().length");
-t.save( {a:1,b:3} );
-assert.eq.automsg( "4", "t.find( {$or:[{a:2},{b:3}]} ).limit(4).toArray().length" );
+t.save({a: 1, b: 3});
+assert.eq.automsg("4", "t.find( {$or:[{a:2},{b:3}]} ).limit(4).toArray().length");
-assert.eq.automsg( "[1,2]", "Array.sort( t.distinct( 'a', {$or:[{a:2},{b:3}]} ) )" );
+assert.eq.automsg("[1,2]", "Array.sort( t.distinct( 'a', {$or:[{a:2},{b:3}]} ) )");
-assert.eq.automsg( "[{a:2},{a:null},{a:1}]", "t.group( {key:{a:1}, cond:{$or:[{a:2},{b:3}]}, reduce:function( x, y ) { }, initial:{} } )" );
-assert.eq.automsg( "5", "t.mapReduce( function() { emit( 'a', this.a ); }, function( key, vals ) { return vals.length; }, {out:{inline:true},query:{$or:[{a:2},{b:3}]}} ).counts.input" );
+assert.eq.automsg(
+ "[{a:2},{a:null},{a:1}]",
+ "t.group( {key:{a:1}, cond:{$or:[{a:2},{b:3}]}, reduce:function( x, y ) { }, initial:{} } )");
+assert.eq.automsg(
+ "5",
+ "t.mapReduce( function() { emit( 'a', this.a ); }, function( key, vals ) { return vals.length; }, {out:{inline:true},query:{$or:[{a:2},{b:3}]}} ).counts.input");
-t.remove( {} );
+t.remove({});
-t.save( {a:[1,2]} );
-assert.eq.automsg( "1", "t.find( {$or:[{a:1},{a:2}]} ).toArray().length" );
-assert.eq.automsg( "1", "t.count( {$or:[{a:1},{a:2}]} )" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:2},{a:1}]} ).toArray().length" );
-assert.eq.automsg( "1", "t.count( {$or:[{a:2},{a:1}]} )" );
+t.save({a: [1, 2]});
+assert.eq.automsg("1", "t.find( {$or:[{a:1},{a:2}]} ).toArray().length");
+assert.eq.automsg("1", "t.count( {$or:[{a:1},{a:2}]} )");
+assert.eq.automsg("1", "t.find( {$or:[{a:2},{a:1}]} ).toArray().length");
+assert.eq.automsg("1", "t.count( {$or:[{a:2},{a:1}]} )");
t.remove({});
diff --git a/jstests/core/or5.js b/jstests/core/or5.js
index 8d9d8802860..dd32c1c3c50 100644
--- a/jstests/core/or5.js
+++ b/jstests/core/or5.js
@@ -1,63 +1,65 @@
t = db.jstests_or5;
t.drop();
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-
-t.ensureIndex( {c:1} );
-
-t.save( {a:2} );
-t.save( {b:3} );
-t.save( {c:4} );
-t.save( {a:2,b:3} );
-t.save( {a:2,c:4} );
-t.save( {b:3,c:4} );
-t.save( {a:2,b:3,c:4} );
-
-assert.eq.automsg( "7", "t.count( {$or:[{a:2},{b:3},{c:4}]} )" );
-assert.eq.automsg( "6", "t.count( {$or:[{a:6},{b:3},{c:4}]} )" );
-assert.eq.automsg( "6", "t.count( {$or:[{a:2},{b:6},{c:4}]} )" );
-assert.eq.automsg( "6", "t.count( {$or:[{a:2},{b:3},{c:6}]} )" );
-
-assert.eq.automsg( "7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).toArray().length" );
-
-for( i = 2; i <= 7; ++i ) {
-assert.eq.automsg( "7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( i ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).batchSize( i ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).batchSize( i ).toArray().length" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).batchSize( i ).toArray().length" );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+
+t.ensureIndex({c: 1});
+
+t.save({a: 2});
+t.save({b: 3});
+t.save({c: 4});
+t.save({a: 2, b: 3});
+t.save({a: 2, c: 4});
+t.save({b: 3, c: 4});
+t.save({a: 2, b: 3, c: 4});
+
+assert.eq.automsg("7", "t.count( {$or:[{a:2},{b:3},{c:4}]} )");
+assert.eq.automsg("6", "t.count( {$or:[{a:6},{b:3},{c:4}]} )");
+assert.eq.automsg("6", "t.count( {$or:[{a:2},{b:6},{c:4}]} )");
+assert.eq.automsg("6", "t.count( {$or:[{a:2},{b:3},{c:6}]} )");
+
+assert.eq.automsg("7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).toArray().length");
+assert.eq.automsg("6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).toArray().length");
+assert.eq.automsg("6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).toArray().length");
+assert.eq.automsg("6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).toArray().length");
+
+for (i = 2; i <= 7; ++i) {
+ assert.eq.automsg("7", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( i ).toArray().length");
+ assert.eq.automsg("6", "t.find( {$or:[{a:6},{b:3},{c:4}]} ).batchSize( i ).toArray().length");
+ assert.eq.automsg("6", "t.find( {$or:[{a:2},{b:6},{c:4}]} ).batchSize( i ).toArray().length");
+ assert.eq.automsg("6", "t.find( {$or:[{a:2},{b:3},{c:6}]} ).batchSize( i ).toArray().length");
}
-t.ensureIndex( {z:"2d"} );
+t.ensureIndex({z: "2d"});
-assert.throws.automsg( function() { return t.find( {$or:[{z:{$near:[50,50]}},{a:2}]} ).toArray(); } );
+assert.throws.automsg(function() {
+ return t.find({$or: [{z: {$near: [50, 50]}}, {a: 2}]}).toArray();
+});
function reset() {
t.drop();
-
- t.ensureIndex( {a:1} );
- t.ensureIndex( {b:1} );
- t.ensureIndex( {c:1} );
-
- t.save( {a:2} );
- t.save( {a:2} );
- t.save( {b:3} );
- t.save( {b:3} );
- t.save( {c:4} );
- t.save( {c:4} );
+
+ t.ensureIndex({a: 1});
+ t.ensureIndex({b: 1});
+ t.ensureIndex({c: 1});
+
+ t.save({a: 2});
+ t.save({a: 2});
+ t.save({b: 3});
+ t.save({b: 3});
+ t.save({c: 4});
+ t.save({c: 4});
}
reset();
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 1 ).itcount()" );
-assert.eq.automsg( "6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 2 ).itcount()" );
+assert.eq.automsg("6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 1 ).itcount()");
+assert.eq.automsg("6", "t.find( {$or:[{a:2},{b:3},{c:4}]} ).batchSize( 2 ).itcount()");
t.drop();
-t.save( {a:[1,2]} );
-assert.eq.automsg( "1", "t.find( {$or:[{a:[1,2]}]} ).itcount()" );
-assert.eq.automsg( "1", "t.find( {$or:[{a:{$all:[1,2]}}]} ).itcount()" );
-assert.eq.automsg( "0", "t.find( {$or:[{a:{$all:[1,3]}}]} ).itcount()" );
+t.save({a: [1, 2]});
+assert.eq.automsg("1", "t.find( {$or:[{a:[1,2]}]} ).itcount()");
+assert.eq.automsg("1", "t.find( {$or:[{a:{$all:[1,2]}}]} ).itcount()");
+assert.eq.automsg("0", "t.find( {$or:[{a:{$all:[1,3]}}]} ).itcount()");
diff --git a/jstests/core/or7.js b/jstests/core/or7.js
index 916158047d8..49fd936d7eb 100644
--- a/jstests/core/or7.js
+++ b/jstests/core/or7.js
@@ -1,41 +1,41 @@
t = db.jstests_or7;
t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:2} );
+t.ensureIndex({a: 1});
+t.save({a: 2});
-assert.eq.automsg( "1", "t.count( {$or:[{a:{$in:[1,3]}},{a:2}]} )" );
+assert.eq.automsg("1", "t.count( {$or:[{a:{$in:[1,3]}},{a:2}]} )");
-//SERVER-1201 ...
+// SERVER-1201 ...
t.remove({});
-t.save( {a:"aa"} );
-t.save( {a:"ab"} );
-t.save( {a:"ad"} );
+t.save({a: "aa"});
+t.save({a: "ab"});
+t.save({a: "ad"});
-assert.eq.automsg( "3", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+assert.eq.automsg("3", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )");
t.remove({});
-t.save( {a:"aa"} );
-t.save( {a:"ad"} );
+t.save({a: "aa"});
+t.save({a: "ad"});
-assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+assert.eq.automsg("2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )");
t.remove({});
-t.save( {a:"aa"} );
-t.save( {a:"ac"} );
+t.save({a: "aa"});
+t.save({a: "ac"});
-assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+assert.eq.automsg("2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )");
-assert.eq.automsg( "2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )" );
+assert.eq.automsg("2", "t.count( {$or:[{a:/^ab/},{a:/^a/}]} )");
-t.save( {a:"ab"} );
-assert.eq.automsg( "3", "t.count( {$or:[{a:{$in:[/^ab/],$gte:'abc'}},{a:/^a/}]} )" );
+t.save({a: "ab"});
+assert.eq.automsg("3", "t.count( {$or:[{a:{$in:[/^ab/],$gte:'abc'}},{a:/^a/}]} )");
t.remove({});
-t.save( {a:"a"} );
-t.save( {a:"b"} );
-assert.eq.automsg( "2", "t.count( {$or:[{a:{$gt:'a',$lt:'b'}},{a:{$gte:'a',$lte:'b'}}]} )" );
+t.save({a: "a"});
+t.save({a: "b"});
+assert.eq.automsg("2", "t.count( {$or:[{a:{$gt:'a',$lt:'b'}},{a:{$gte:'a',$lte:'b'}}]} )");
diff --git a/jstests/core/or8.js b/jstests/core/or8.js
index 40d5b38cede..c778238b96e 100644
--- a/jstests/core/or8.js
+++ b/jstests/core/or8.js
@@ -3,26 +3,26 @@
t = db.jstests_or8;
t.drop();
-t.find({ "$or": [ { "PropA": { "$lt": "b" } }, { "PropA": { "$lt": "b", "$gt": "a" } } ] }).toArray();
+t.find({"$or": [{"PropA": {"$lt": "b"}}, {"PropA": {"$lt": "b", "$gt": "a"}}]}).toArray();
// empty $in
-t.save( {a:1} );
-t.save( {a:3} );
-t.ensureIndex( {a:1} );
-t.find({ $or: [ { a: {$in:[]} } ] } ).toArray();
-assert.eq.automsg( "2", "t.find({ $or: [ { a: {$in:[]} }, {a:1}, {a:3} ] } ).toArray().length" );
-assert.eq.automsg( "2", "t.find({ $or: [ {a:1}, { a: {$in:[]} }, {a:3} ] } ).toArray().length" );
-assert.eq.automsg( "2", "t.find({ $or: [ {a:1}, {a:3}, { a: {$in:[]} } ] } ).toArray().length" );
+t.save({a: 1});
+t.save({a: 3});
+t.ensureIndex({a: 1});
+t.find({$or: [{a: {$in: []}}]}).toArray();
+assert.eq.automsg("2", "t.find({ $or: [ { a: {$in:[]} }, {a:1}, {a:3} ] } ).toArray().length");
+assert.eq.automsg("2", "t.find({ $or: [ {a:1}, { a: {$in:[]} }, {a:3} ] } ).toArray().length");
+assert.eq.automsg("2", "t.find({ $or: [ {a:1}, {a:3}, { a: {$in:[]} } ] } ).toArray().length");
// nested negate field
t.drop();
-t.save( {a:{b:1,c:1}} );
-t.ensureIndex( { 'a.b':1 } );
-t.ensureIndex( { 'a.c':1 } );
-assert.eq( 1, t.find( {$or: [ { 'a.b':1 }, { 'a.c':1 } ] } ).itcount() );
+t.save({a: {b: 1, c: 1}});
+t.ensureIndex({'a.b': 1});
+t.ensureIndex({'a.c': 1});
+assert.eq(1, t.find({$or: [{'a.b': 1}, {'a.c': 1}]}).itcount());
t.remove({});
-t.save( {a:[{b:1,c:1},{b:2,c:1}]} );
-assert.eq( 1, t.find( {$or: [ { 'a.b':2 }, { 'a.c':1 } ] } ).itcount() );
+t.save({a: [{b: 1, c: 1}, {b: 2, c: 1}]});
+assert.eq(1, t.find({$or: [{'a.b': 2}, {'a.c': 1}]}).itcount());
diff --git a/jstests/core/or9.js b/jstests/core/or9.js
index c76c5407b6f..d203d3d5f4d 100644
--- a/jstests/core/or9.js
+++ b/jstests/core/or9.js
@@ -3,56 +3,56 @@
t = db.jstests_or9;
t.drop();
-t.ensureIndex( {a:1,b:1} );
+t.ensureIndex({a: 1, b: 1});
-t.save( {a:2,b:2} );
+t.save({a: 2, b: 2});
-function check( a, q ) {
+function check(a, q) {
count = a;
query = q;
- assert.eq.automsg( "count", "t.count( query )" );
+ assert.eq.automsg("count", "t.count( query )");
}
// SERVER-12594: there are two clauses in this case, because we do
// not yet collapse OR of ANDs to a single ixscan.
-check( 1, { $or: [ { a: { $gte:1,$lte:3 } }, { a: 2 } ] } );
+check(1, {$or: [{a: {$gte: 1, $lte: 3}}, {a: 2}]});
-check( 1, { $or: [ { a: { $gt:2,$lte:3 } }, { a: 2 } ] } );
+check(1, {$or: [{a: {$gt: 2, $lte: 3}}, {a: 2}]});
-check( 1, { $or: [ { b: { $gte:1,$lte:3 } }, { b: 2 } ] } );
-check( 1, { $or: [ { b: { $gte:2,$lte:3 } }, { b: 2 } ] } );
-check( 1, { $or: [ { b: { $gt:2,$lte:3 } }, { b: 2 } ] } );
+check(1, {$or: [{b: {$gte: 1, $lte: 3}}, {b: 2}]});
+check(1, {$or: [{b: {$gte: 2, $lte: 3}}, {b: 2}]});
+check(1, {$or: [{b: {$gt: 2, $lte: 3}}, {b: 2}]});
// SERVER-12594: there are two clauses in this case, because we do
// not yet collapse OR of ANDs to a single ixscan.
-check( 1, { $or: [ { a: { $gte:1,$lte:3 } }, { a: 2, b: 2 } ] } );
+check(1, {$or: [{a: {$gte: 1, $lte: 3}}, {a: 2, b: 2}]});
-check( 1, { $or: [ { a: { $gte:1,$lte:3 }, b:3 }, { a: 2 } ] } );
+check(1, {$or: [{a: {$gte: 1, $lte: 3}, b: 3}, {a: 2}]});
-check( 1, { $or: [ { b: { $gte:1,$lte:3 } }, { b: 2, a: 2 } ] } );
+check(1, {$or: [{b: {$gte: 1, $lte: 3}}, {b: 2, a: 2}]});
-check( 1, { $or: [ { b: { $gte:1,$lte:3 }, a:3 }, { b: 2 } ] } );
+check(1, {$or: [{b: {$gte: 1, $lte: 3}, a: 3}, {b: 2}]});
-check( 1, { $or: [ { a: { $gte:1,$lte:3 }, b: 3 }, { a: 2, b: 2 } ] } );
-check( 1, { $or: [ { a: { $gte:2,$lte:3 }, b: 3 }, { a: 2, b: 2 } ] } );
+check(1, {$or: [{a: {$gte: 1, $lte: 3}, b: 3}, {a: 2, b: 2}]});
+check(1, {$or: [{a: {$gte: 2, $lte: 3}, b: 3}, {a: 2, b: 2}]});
// SERVER-12594: there are two clauses in this case, because we do
// not yet collapse OR of ANDs to a single ixscan.
-check( 1, { $or: [ { a: { $gte:1,$lte:3 }, b: 2 }, { a: 2, b: 2 } ] } );
+check(1, {$or: [{a: {$gte: 1, $lte: 3}, b: 2}, {a: 2, b: 2}]});
-check( 1, { $or: [ { b: { $gte:1,$lte:3 }, a: 3 }, { a: 2, b: 2 } ] } );
-check( 1, { $or: [ { b: { $gte:2,$lte:3 }, a: 3 }, { a: 2, b: 2 } ] } );
+check(1, {$or: [{b: {$gte: 1, $lte: 3}, a: 3}, {a: 2, b: 2}]});
+check(1, {$or: [{b: {$gte: 2, $lte: 3}, a: 3}, {a: 2, b: 2}]});
// SERVER-12594: there are two clauses in this case, because we do
// not yet collapse OR of ANDs to a single ixscan.
-check( 1, { $or: [ { b: { $gte:1,$lte:3 }, a: 2 }, { a: 2, b: 2 } ] } );
+check(1, {$or: [{b: {$gte: 1, $lte: 3}, a: 2}, {a: 2, b: 2}]});
t.remove({});
-t.save( {a:1,b:5} );
-t.save( {a:5,b:1} );
+t.save({a: 1, b: 5});
+t.save({a: 5, b: 1});
// SERVER-12594: there are two clauses in the case below, because we do
// not yet collapse OR of ANDs to a single ixscan.
-check( 2, { $or: [ { a: { $in:[1,5] }, b: { $in:[1,5] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
+check(2, {$or: [{a: {$in: [1, 5]}, b: {$in: [1, 5]}}, {a: {$in: [1, 5]}, b: {$in: [1, 5]}}]});
-check( 2, { $or: [ { a: { $in:[1] }, b: { $in:[1,5] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
-check( 2, { $or: [ { a: { $in:[1] }, b: { $in:[1] } }, { a: { $in:[1,5] }, b: { $in:[1,5] } } ] } );
+check(2, {$or: [{a: {$in: [1]}, b: {$in: [1, 5]}}, {a: {$in: [1, 5]}, b: {$in: [1, 5]}}]});
+check(2, {$or: [{a: {$in: [1]}, b: {$in: [1]}}, {a: {$in: [1, 5]}, b: {$in: [1, 5]}}]});
diff --git a/jstests/core/or_inexact.js b/jstests/core/or_inexact.js
index 722d47ee05a..8c9db1cc7ba 100644
--- a/jstests/core/or_inexact.js
+++ b/jstests/core/or_inexact.js
@@ -38,8 +38,8 @@ t.insert({_id: 0, names: ["thomas", "alexandra"]});
t.insert({_id: 1, names: "frank"});
t.insert({_id: 2, names: "alice"});
t.insert({_id: 3, names: ["dave"]});
-cursor = t.find({$or: [{names: "frank"}, {names: /^al(ice|ex)/},
- {names: {$elemMatch: {$eq: "thomas"}}}]});
+cursor = t.find(
+ {$or: [{names: "frank"}, {names: /^al(ice|ex)/}, {names: {$elemMatch: {$eq: "thomas"}}}]});
assert.eq(3, cursor.itcount(), "case 3");
// Case 4: Two INEXACT_FETCH.
@@ -48,8 +48,8 @@ t.ensureIndex({names: 1});
t.insert({_id: 0, names: ["thomas", "alexandra"]});
t.insert({_id: 1, names: ["frank", "alice"]});
t.insert({_id: 2, names: "frank"});
-cursor = t.find({$or: [{names: {$elemMatch: {$eq: "alexandra"}}},
- {names: {$elemMatch: {$eq: "frank"}}}]});
+cursor = t.find(
+ {$or: [{names: {$elemMatch: {$eq: "alexandra"}}}, {names: {$elemMatch: {$eq: "frank"}}}]});
assert.eq(2, cursor.itcount(), "case 4");
// Case 5: Two indices. One has EXACT and INEXACT_COVERED. The other
@@ -62,8 +62,8 @@ t.insert({_id: 1, first: "john", last: "doe"});
t.insert({_id: 2, first: "dave", last: "st"});
t.insert({_id: 3, first: ["dave", "david"], last: "pasette"});
t.insert({_id: 4, first: "joanna", last: ["smith", "doe"]});
-cursor = t.find({$or: [{first: "frank"}, {last: {$elemMatch: {$eq: "doe"}}},
- {first: /david/}, {last: "st"}]});
+cursor = t.find(
+ {$or: [{first: "frank"}, {last: {$elemMatch: {$eq: "doe"}}}, {first: /david/}, {last: "st"}]});
assert.eq(4, cursor.itcount(), "case 5");
// Case 6: Multikey with only EXACT predicates.
@@ -116,12 +116,32 @@ t.drop();
t.ensureIndex({pre: 1, loc: "2dsphere"});
t.insert({_id: 0, pre: 3, loc: {type: "Point", coordinates: [40, 5]}});
t.insert({_id: 1, pre: 4, loc: {type: "Point", coordinates: [0, 0]}});
-cursor = t.find({$or: [{pre: 3, loc: {$geoWithin: {$geometry:
- {type: "Polygon",
- coordinates: [[[39,4], [41,4], [41,6], [39,6], [39,4]]]}}}},
- {pre: 4, loc: {$geoWithin: {$geometry:
- {type: "Polygon",
- coordinates: [[[-1,-1], [1,-1], [1,1], [-1,1], [-1,-1]]]}}}}]});
+cursor = t.find({
+ $or: [
+ {
+ pre: 3,
+ loc: {
+ $geoWithin: {
+ $geometry: {
+ type: "Polygon",
+ coordinates: [[[39, 4], [41, 4], [41, 6], [39, 6], [39, 4]]]
+ }
+ }
+ }
+ },
+ {
+ pre: 4,
+ loc: {
+ $geoWithin: {
+ $geometry: {
+ type: "Polygon",
+ coordinates: [[[-1, -1], [1, -1], [1, 1], [-1, 1], [-1, -1]]]
+ }
+ }
+ }
+ }
+ ]
+});
assert.eq(2, cursor.itcount(), "case 11");
// Case 12: GEO with non-geo, same index, 2d.
@@ -129,12 +149,32 @@ t.drop();
t.ensureIndex({pre: 1, loc: "2d"});
t.insert({_id: 0, pre: 3, loc: {type: "Point", coordinates: [40, 5]}});
t.insert({_id: 1, pre: 4, loc: {type: "Point", coordinates: [0, 0]}});
-cursor = t.find({$or: [{pre: 3, loc: {$geoWithin: {$geometry:
- {type: "Polygon",
- coordinates: [[[39,4], [41,4], [41,6], [39,6], [39,4]]]}}}},
- {pre: 4, loc: {$geoWithin: {$geometry:
- {type: "Polygon",
- coordinates: [[[-1,-1], [1,-1], [1,1], [-1,1], [-1,-1]]]}}}}]});
+cursor = t.find({
+ $or: [
+ {
+ pre: 3,
+ loc: {
+ $geoWithin: {
+ $geometry: {
+ type: "Polygon",
+ coordinates: [[[39, 4], [41, 4], [41, 6], [39, 6], [39, 4]]]
+ }
+ }
+ }
+ },
+ {
+ pre: 4,
+ loc: {
+ $geoWithin: {
+ $geometry: {
+ type: "Polygon",
+ coordinates: [[[-1, -1], [1, -1], [1, 1], [-1, 1], [-1, -1]]]
+ }
+ }
+ }
+ }
+ ]
+});
assert.eq(2, cursor.itcount(), "case 12");
// Case 13: $elemMatch object.
@@ -142,8 +182,7 @@ t.drop();
t.ensureIndex({"a.b": 1});
t.insert({_id: 0, a: [{b: 1}, {b: 2}]});
t.insert({_id: 1, a: [{b: 3}, {b: 4}]});
-cursor = t.find({$or: [{a: {$elemMatch: {b: {$lte: 1}}}},
- {a: {$elemMatch: {b: {$gte: 4}}}}]});
+cursor = t.find({$or: [{a: {$elemMatch: {b: {$lte: 1}}}}, {a: {$elemMatch: {b: {$gte: 4}}}}]});
assert.eq(2, cursor.itcount(), "case 13");
// Case 14: $elemMatch object, below an AND.
@@ -151,8 +190,8 @@ t.drop();
t.ensureIndex({"a.b": 1});
t.insert({_id: 0, a: [{b: 1}, {b: 2}]});
t.insert({_id: 1, a: [{b: 2}, {b: 4}]});
-cursor = t.find({"a.b": 2, $or: [{a: {$elemMatch: {b: {$lte: 1}}}},
- {a: {$elemMatch: {b: {$gte: 4}}}}]});
+cursor = t.find(
+ {"a.b": 2, $or: [{a: {$elemMatch: {b: {$lte: 1}}}}, {a: {$elemMatch: {b: {$gte: 4}}}}]});
assert.eq(2, cursor.itcount(), "case 14");
// Case 15: $or below $elemMatch.
@@ -196,8 +235,7 @@ t.ensureIndex({name: 1});
t.insert({_id: 0, name: "thomas"});
t.insert({_id: 1, name: "alexandra"});
t.insert({_id: 2});
-cursor = t.find({$or: [{name: {$in: ["thomas", /^alexand(er|ra)/]}},
- {name: {$exists: false}}]});
+cursor = t.find({$or: [{name: {$in: ["thomas", /^alexand(er|ra)/]}}, {name: {$exists: false}}]});
assert.eq(3, cursor.itcount(), "case 19");
// Case 20: $in with EXACT, INEXACT_COVERED, and INEXACT_FETCH, two indices.
@@ -209,8 +247,8 @@ t.insert({_id: 1, a: "z", b: "z"});
t.insert({_id: 2});
t.insert({_id: 3, a: "w", b: "x"});
t.insert({_id: 4, a: "l", b: "p"});
-cursor = t.find({$or: [{a: {$in: [/z/, /x/]}}, {a: "w"},
- {b: {$exists: false}}, {b: {$in: ["p"]}}]});
+cursor =
+ t.find({$or: [{a: {$in: [/z/, /x/]}}, {a: "w"}, {b: {$exists: false}}, {b: {$in: ["p"]}}]});
assert.eq(5, cursor.itcount(), "case 19");
// Case 21: two $geoWithin that collapse to a single GEO index scan.
@@ -218,10 +256,28 @@ t.drop();
t.ensureIndex({loc: "2dsphere"});
t.insert({_id: 0, loc: {type: "Point", coordinates: [40, 5]}});
t.insert({_id: 1, loc: {type: "Point", coordinates: [0, 0]}});
-cursor = t.find({$or: [{loc: {$geoWithin: {$geometry:
- {type: "Polygon",
- coordinates: [[[39,4], [41,4], [41,6], [39,6], [39,4]]]}}}},
- {loc: {$geoWithin: {$geometry:
- {type: "Polygon",
- coordinates: [[[-1,-1], [1,-1], [1,1], [-1,1], [-1,-1]]]}}}}]});
+cursor = t.find({
+ $or: [
+ {
+ loc: {
+ $geoWithin: {
+ $geometry: {
+ type: "Polygon",
+ coordinates: [[[39, 4], [41, 4], [41, 6], [39, 6], [39, 4]]]
+ }
+ }
+ }
+ },
+ {
+ loc: {
+ $geoWithin: {
+ $geometry: {
+ type: "Polygon",
+ coordinates: [[[-1, -1], [1, -1], [1, 1], [-1, 1], [-1, -1]]]
+ }
+ }
+ }
+ }
+ ]
+});
assert.eq(2, cursor.itcount(), "case 21");
diff --git a/jstests/core/ora.js b/jstests/core/ora.js
index 67af4c191ec..f50f0d13027 100644
--- a/jstests/core/ora.js
+++ b/jstests/core/ora.js
@@ -6,12 +6,16 @@ for (var i = 0; i < 10; i += 1) {
t.save({x: i, y: 10 - i});
}
assert.eq.automsg("1", "t.find({$or: [{$where: 'this.x === 2'}]}).count()");
-assert.eq.automsg("2", "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 2'}]}).count()");
-assert.eq.automsg("1", "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 8'}]}).count()");
+assert.eq.automsg("2",
+ "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 2'}]}).count()");
+assert.eq.automsg("1",
+ "t.find({$or: [{$where: 'this.x === 2'}, {$where: 'this.y === 8'}]}).count()");
assert.eq.automsg("10", "t.find({$or: [{$where: 'this.x === 2'}, {x: {$ne: 2}}]}).count()");
// geo
t.drop();
t.ensureIndex({loc: "2d"});
-assert.throws(function () {t.find({$or: [{loc: {$near: [11, 11]}}]}).limit(1).next()['_id'];});
+assert.throws(function() {
+ t.find({$or: [{loc: {$near: [11, 11]}}]}).limit(1).next()['_id'];
+});
diff --git a/jstests/core/orb.js b/jstests/core/orb.js
index a4abdeecabf..345ac92d26e 100644
--- a/jstests/core/orb.js
+++ b/jstests/core/orb.js
@@ -3,15 +3,16 @@
var t = db.jstests_orb;
t.drop();
-t.save( {a:1} );
-t.ensureIndex( {a:-1} );
+t.save({a: 1});
+t.ensureIndex({a: -1});
-assert.eq.automsg( "1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )" );
+assert.eq.automsg("1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )");
t.drop();
-t.save( {a:1,b:1} );
-t.ensureIndex( {a:1,b:-1} );
+t.save({a: 1, b: 1});
+t.ensureIndex({a: 1, b: -1});
-assert.eq.automsg( "1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )" );
-assert.eq.automsg( "1", "t.count( {$or: [ { a:1, b: { $gt:0,$lt:2 } }, { a:1, b: { $gt:-1,$lt:3 } } ] } )" ); \ No newline at end of file
+assert.eq.automsg("1", "t.count( {$or: [ { a: { $gt:0,$lt:2 } }, { a: { $gt:-1,$lt:3 } } ] } )");
+assert.eq.automsg(
+ "1", "t.count( {$or: [ { a:1, b: { $gt:0,$lt:2 } }, { a:1, b: { $gt:-1,$lt:3 } } ] } )"); \ No newline at end of file
diff --git a/jstests/core/orc.js b/jstests/core/orc.js
index dec6a7b920d..7d686972898 100644
--- a/jstests/core/orc.js
+++ b/jstests/core/orc.js
@@ -2,28 +2,50 @@
t = db.jstests_orc;
t.drop();
-// The goal here will be to ensure the full range of valid values is scanned for each or clause, in order to ensure that
-// duplicates are eliminated properly in the cases below when field range elimination is not employed. The deduplication
-// of interest will occur on field a. The range specifications for fields b and c are such that (in the current
-// implementation) field range elimination will not occur between the or clauses, meaning that the full range of valid values
+// The goal here will be to ensure the full range of valid values is scanned for each or clause, in
+// order to ensure that
+// duplicates are eliminated properly in the cases below when field range elimination is not
+// employed. The deduplication
+// of interest will occur on field a. The range specifications for fields b and c are such that (in
+// the current
+// implementation) field range elimination will not occur between the or clauses, meaning that the
+// full range of valid values
// will be scanned for each clause and deduplication will be forced.
-// NOTE This test uses some tricks to avoid or range elimination, but in future implementations these tricks may not apply.
-// Perhaps it would be worthwhile to create a mode where range elimination is disabled so it will be possible to write a more
+// NOTE This test uses some tricks to avoid or range elimination, but in future implementations
+// these tricks may not apply.
+// Perhaps it would be worthwhile to create a mode where range elimination is disabled so it will be
+// possible to write a more
// robust test.
-t.ensureIndex( {a:-1,b:1,c:1} );
+t.ensureIndex({a: -1, b: 1, c: 1});
// sanity test
-t.save( {a:null,b:4,c:4} );
-assert.eq( 1, t.count( {$or:[{a:null,b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:null,b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
+t.save({a: null, b: 4, c: 4});
+assert.eq(1,
+ t.count({
+ $or: [
+ {a: null, b: {$gte: 0, $lte: 5}, c: {$gte: 0, $lte: 5}},
+ {a: null, b: {$gte: 3, $lte: 8}, c: {$gte: 3, $lte: 8}}
+ ]
+ }));
// from here on is SERVER-2245
t.remove({});
-t.save( {b:4,c:4} );
-assert.eq( 1, t.count( {$or:[{a:null,b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:null,b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
+t.save({b: 4, c: 4});
+assert.eq(1,
+ t.count({
+ $or: [
+ {a: null, b: {$gte: 0, $lte: 5}, c: {$gte: 0, $lte: 5}},
+ {a: null, b: {$gte: 3, $lte: 8}, c: {$gte: 3, $lte: 8}}
+ ]
+ }));
-//t.remove({});
-//t.save( {a:[],b:4,c:4} );
-//printjson( t.find( {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ).explain() );
-//assert.eq( 1, t.count( {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} ) );
+// t.remove({});
+// t.save( {a:[],b:4,c:4} );
+// printjson( t.find(
+// {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]}
+// ).explain() );
+// assert.eq( 1, t.count(
+// {$or:[{a:[],b:{$gte:0,$lte:5},c:{$gte:0,$lte:5}},{a:[],b:{$gte:3,$lte:8},c:{$gte:3,$lte:8}}]} )
+// );
diff --git a/jstests/core/ord.js b/jstests/core/ord.js
index df47c405146..46607b539e8 100644
--- a/jstests/core/ord.js
+++ b/jstests/core/ord.js
@@ -1,6 +1,6 @@
// check that we don't crash if an index used by an earlier or clause is dropped
-// Dropping an index kills all cursors on the indexed namespace, not just those
+// Dropping an index kills all cursors on the indexed namespace, not just those
// cursors using the dropped index. This test is to serve as a reminder that
// the $or implementation may need minor adjustments (memory ownership) if this
// behavior is changed.
@@ -8,27 +8,27 @@
t = db.jstests_ord;
t.drop();
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
-for( i = 0; i < 80; ++i ) {
- t.save( {a:1} );
+for (i = 0; i < 80; ++i) {
+ t.save({a: 1});
}
-for( i = 0; i < 100; ++i ) {
- t.save( {b:1} );
+for (i = 0; i < 100; ++i) {
+ t.save({b: 1});
}
-c = t.find( { $or: [ {a:1}, {b:1} ] } ).batchSize( 100 );
-for( i = 0; i < 90; ++i ) {
+c = t.find({$or: [{a: 1}, {b: 1}]}).batchSize(100);
+for (i = 0; i < 90; ++i) {
c.next();
}
// At this point, our initial query has ended and there is a client cursor waiting
// to read additional documents from index {b:1}. Deduping is performed against
// the index key {a:1}.
-t.dropIndex( {a:1} );
+t.dropIndex({a: 1});
-// Dropping an index kills all cursors on the indexed namespace, not just those
+// Dropping an index kills all cursors on the indexed namespace, not just those
// cursors using the dropped index.
-assert.throws( c.next() );
+assert.throws(c.next());
diff --git a/jstests/core/ore.js b/jstests/core/ore.js
index 93538f8b46b..959f43f5101 100644
--- a/jstests/core/ore.js
+++ b/jstests/core/ore.js
@@ -4,10 +4,10 @@
t = db.jstests_ore;
t.drop();
-t.ensureIndex( {a:-1} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: -1});
+t.ensureIndex({b: 1});
-t.save( {a:1,b:1} );
-t.save( {a:2,b:1} );
+t.save({a: 1, b: 1});
+t.save({a: 2, b: 1});
-assert.eq( 2, t.count( {$or:[{a:{$in:[1,2]}},{b:1}]} ) );
+assert.eq(2, t.count({$or: [{a: {$in: [1, 2]}}, {b: 1}]}));
diff --git a/jstests/core/orf.js b/jstests/core/orf.js
index bae8c61f89a..5d58e59c74f 100644
--- a/jstests/core/orf.js
+++ b/jstests/core/orf.js
@@ -5,18 +5,20 @@ t.drop();
var a = [];
var expectBounds = [];
-for( var i = 0; i < 200; ++i ) {
- a.push( {_id:i} );
+for (var i = 0; i < 200; ++i) {
+ a.push({_id: i});
expectBounds.push([i, i]);
}
-a.forEach( function( x ) { t.save( x ); } );
+a.forEach(function(x) {
+ t.save(x);
+});
// This $or query is answered as an index scan over
// a series of _id index point intervals.
-explain = t.find( {$or:a} ).hint( {_id: 1} ).explain( true );
-printjson( explain );
-assert.eq( 200, explain.executionStats.nReturned, 'n' );
-assert.eq( 200, explain.executionStats.totalKeysExamined, 'keys examined' );
-assert.eq( 200, explain.executionStats.totalDocsExamined, 'docs examined' );
+explain = t.find({$or: a}).hint({_id: 1}).explain(true);
+printjson(explain);
+assert.eq(200, explain.executionStats.nReturned, 'n');
+assert.eq(200, explain.executionStats.totalKeysExamined, 'keys examined');
+assert.eq(200, explain.executionStats.totalDocsExamined, 'docs examined');
-assert.eq( 200, t.count( {$or:a} ) );
+assert.eq(200, t.count({$or: a}));
diff --git a/jstests/core/org.js b/jstests/core/org.js
index 19239f96c10..4bc4a813b4c 100644
--- a/jstests/core/org.js
+++ b/jstests/core/org.js
@@ -3,17 +3,17 @@
t = db.jstests_org;
t.drop();
-t.ensureIndex( {a:1}, {sparse:true} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: 1}, {sparse: true});
+t.ensureIndex({b: 1});
t.remove({});
-t.save( {a:1,b:2} );
-assert.eq( 1, t.count( {$or:[{a:1},{b:2}]} ) );
+t.save({a: 1, b: 2});
+assert.eq(1, t.count({$or: [{a: 1}, {b: 2}]}));
t.remove({});
-t.save( {a:null,b:2} );
-assert.eq( 1, t.count( {$or:[{a:null},{b:2}]} ) );
+t.save({a: null, b: 2});
+assert.eq(1, t.count({$or: [{a: null}, {b: 2}]}));
t.remove({});
-t.save( {b:2} );
-assert.eq( 1, t.count( {$or:[{a:null},{b:2}]} ) );
+t.save({b: 2});
+assert.eq(1, t.count({$or: [{a: null}, {b: 2}]}));
diff --git a/jstests/core/orh.js b/jstests/core/orh.js
index 5fb845fd01c..357bb1ea5c3 100644
--- a/jstests/core/orh.js
+++ b/jstests/core/orh.js
@@ -3,15 +3,15 @@
t = db.jstests_orh;
t.drop();
-t.ensureIndex( {a:1}, {sparse:true} );
-t.ensureIndex( {b:1,a:1} );
+t.ensureIndex({a: 1}, {sparse: true});
+t.ensureIndex({b: 1, a: 1});
t.remove({});
-t.save( {b:2} );
-assert.eq( 1, t.count( {a:null} ) );
-assert.eq( 1, t.count( {b:2,a:null} ) );
+t.save({b: 2});
+assert.eq(1, t.count({a: null}));
+assert.eq(1, t.count({b: 2, a: null}));
-assert.eq( 1, t.count( {$or:[{b:2,a:null},{a:null}]} ) );
+assert.eq(1, t.count({$or: [{b: 2, a: null}, {a: null}]}));
// Is this desired?
-assert.eq( 1, t.count( {$or:[{a:null},{b:2,a:null}]} ) );
+assert.eq(1, t.count({$or: [{a: null}, {b: 2, a: null}]}));
diff --git a/jstests/core/orj.js b/jstests/core/orj.js
index fa234f36cb5..683f45253a8 100644
--- a/jstests/core/orj.js
+++ b/jstests/core/orj.js
@@ -3,119 +3,159 @@
t = db.jstests_orj;
t.drop();
-t.save( {a:1,b:2} );
+t.save({a: 1, b: 2});
function check() {
-
-assert.throws( function() { t.find( { x:0,$or:"a" } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[ "a" ] } ).toArray(); } );
-
-assert.throws( function() { t.find( { x:0,$or:[{$or:"a"}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[{$or:[]}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$or:[{$or:[ "a" ]}] } ).toArray(); } );
-
-assert.throws( function() { t.find( { x:0,$nor:"a" } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[ "a" ] } ).toArray(); } );
-
-assert.throws( function() { t.find( { x:0,$nor:[{$nor:"a"}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[{$nor:[]}] } ).toArray(); } );
-assert.throws( function() { t.find( { x:0,$nor:[{$nor:[ "a" ]}] } ).toArray(); } );
-
-assert.eq( 1, t.find( {a:1,b:2} ).itcount() );
-
-assert.eq( 1, t.find( {a:1,$or:[{b:2}]} ).itcount() );
-assert.eq( 0, t.find( {a:1,$or:[{b:3}]} ).itcount() );
-
-assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {a:1,$or:[{$or:[{b:3}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$or:[{$or:[{a:2},{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{a:2},{$or:[{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{a:1},{$or:[{b:3}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$or:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:4}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:2},{b:4}]}]} ).itcount() );
-assert.eq( 0, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:3},{b:4}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {a:1,$and:[{$or:[{$or:[{b:2}]}]}]} ).itcount() );
-assert.eq( 0, t.find( {a:1,$and:[{$or:[{$or:[{b:3}]}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$or:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:1}]}]} ).itcount() );
-
-assert.eq( 0, t.find( {$and:[{$nor:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
-assert.eq( 1, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:3},{b:1}]}]} ).itcount() );
-
-assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).itcount() );
-assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).itcount() );
-
+ assert.throws(function() {
+ t.find({x: 0, $or: "a"}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $or: []}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $or: ["a"]}).toArray();
+ });
+
+ assert.throws(function() {
+ t.find({x: 0, $or: [{$or: "a"}]}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $or: [{$or: []}]}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $or: [{$or: ["a"]}]}).toArray();
+ });
+
+ assert.throws(function() {
+ t.find({x: 0, $nor: "a"}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $nor: []}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $nor: ["a"]}).toArray();
+ });
+
+ assert.throws(function() {
+ t.find({x: 0, $nor: [{$nor: "a"}]}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $nor: [{$nor: []}]}).toArray();
+ });
+ assert.throws(function() {
+ t.find({x: 0, $nor: [{$nor: ["a"]}]}).toArray();
+ });
+
+ assert.eq(1, t.find({a: 1, b: 2}).itcount());
+
+ assert.eq(1, t.find({a: 1, $or: [{b: 2}]}).itcount());
+ assert.eq(0, t.find({a: 1, $or: [{b: 3}]}).itcount());
+
+ assert.eq(1, t.find({a: 1, $or: [{$or: [{b: 2}]}]}).itcount());
+ assert.eq(1, t.find({a: 1, $or: [{$or: [{b: 2}]}]}).itcount());
+ assert.eq(0, t.find({a: 1, $or: [{$or: [{b: 3}]}]}).itcount());
+
+ assert.eq(1, t.find({$or: [{$or: [{a: 2}, {b: 2}]}]}).itcount());
+ assert.eq(1, t.find({$or: [{a: 2}, {$or: [{b: 2}]}]}).itcount());
+ assert.eq(1, t.find({$or: [{a: 1}, {$or: [{b: 3}]}]}).itcount());
+
+ assert.eq(1, t.find({$or: [{$or: [{a: 1}, {a: 2}]}, {$or: [{b: 3}, {b: 4}]}]}).itcount());
+ assert.eq(1, t.find({$or: [{$or: [{a: 0}, {a: 2}]}, {$or: [{b: 2}, {b: 4}]}]}).itcount());
+ assert.eq(0, t.find({$or: [{$or: [{a: 0}, {a: 2}]}, {$or: [{b: 3}, {b: 4}]}]}).itcount());
+
+ assert.eq(1, t.find({a: 1, $and: [{$or: [{$or: [{b: 2}]}]}]}).itcount());
+ assert.eq(0, t.find({a: 1, $and: [{$or: [{$or: [{b: 3}]}]}]}).itcount());
+
+ assert.eq(1, t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$or: [{b: 1}, {b: 2}]}]}).itcount());
+ assert.eq(0, t.find({$and: [{$or: [{a: 3}, {a: 2}]}, {$or: [{b: 1}, {b: 2}]}]}).itcount());
+ assert.eq(0, t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$or: [{b: 3}, {b: 1}]}]}).itcount());
+
+ assert.eq(0, t.find({$and: [{$nor: [{a: 1}, {a: 2}]}, {$nor: [{b: 1}, {b: 2}]}]}).itcount());
+ assert.eq(0, t.find({$and: [{$nor: [{a: 3}, {a: 2}]}, {$nor: [{b: 1}, {b: 2}]}]}).itcount());
+ assert.eq(1, t.find({$and: [{$nor: [{a: 3}, {a: 2}]}, {$nor: [{b: 3}, {b: 1}]}]}).itcount());
+
+ assert.eq(1, t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$nor: [{b: 1}, {b: 3}]}]}).itcount());
+ assert.eq(0, t.find({$and: [{$or: [{a: 3}, {a: 2}]}, {$nor: [{b: 1}, {b: 3}]}]}).itcount());
+ assert.eq(0, t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$nor: [{b: 1}, {b: 2}]}]}).itcount());
}
check();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
check();
t.dropIndexes();
-t.ensureIndex( {b:1} );
+t.ensureIndex({b: 1});
check();
t.dropIndexes();
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
check();
t.dropIndexes();
-t.ensureIndex( {a:1,b:1} );
+t.ensureIndex({a: 1, b: 1});
check();
t.dropIndexes();
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-t.ensureIndex( {a:1,b:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+t.ensureIndex({a: 1, b: 1});
check();
-function checkHinted( hint ) {
- assert.eq( 1, t.find( {a:1,b:2} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {a:1,$or:[{b:2}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {a:1,$or:[{b:3}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {a:1,$or:[{$or:[{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {a:1,$or:[{$or:[{b:3}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$or:[{$or:[{a:2},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$or:[{a:2},{$or:[{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$or:[{a:1},{$or:[{b:3}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$or:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:4}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:2},{b:4}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$or:[{$or:[{a:0},{a:2}]},{$or:[{b:3},{b:4}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {a:1,$and:[{$or:[{$or:[{b:2}]}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {a:1,$and:[{$or:[{$or:[{b:3}]}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$or:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$or:[{b:3},{b:1}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 0, t.find( {$and:[{$nor:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
- assert.eq( 1, t.find( {$and:[{$nor:[{a:3},{a:2}]},{$nor:[{b:3},{b:1}]}]} ).hint( hint ).itcount() );
-
- assert.eq( 1, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:3},{a:2}]},{$nor:[{b:1},{b:3}]}]} ).hint( hint ).itcount() );
- assert.eq( 0, t.find( {$and:[{$or:[{a:1},{a:2}]},{$nor:[{b:1},{b:2}]}]} ).hint( hint ).itcount() );
+function checkHinted(hint) {
+ assert.eq(1, t.find({a: 1, b: 2}).hint(hint).itcount());
+
+ assert.eq(1, t.find({a: 1, $or: [{b: 2}]}).hint(hint).itcount());
+ assert.eq(0, t.find({a: 1, $or: [{b: 3}]}).hint(hint).itcount());
+
+ assert.eq(1, t.find({a: 1, $or: [{$or: [{b: 2}]}]}).hint(hint).itcount());
+ assert.eq(1, t.find({a: 1, $or: [{$or: [{b: 2}]}]}).hint(hint).itcount());
+ assert.eq(0, t.find({a: 1, $or: [{$or: [{b: 3}]}]}).hint(hint).itcount());
+
+ assert.eq(1, t.find({$or: [{$or: [{a: 2}, {b: 2}]}]}).hint(hint).itcount());
+ assert.eq(1, t.find({$or: [{a: 2}, {$or: [{b: 2}]}]}).hint(hint).itcount());
+ assert.eq(1, t.find({$or: [{a: 1}, {$or: [{b: 3}]}]}).hint(hint).itcount());
+
+ assert.eq(
+ 1, t.find({$or: [{$or: [{a: 1}, {a: 2}]}, {$or: [{b: 3}, {b: 4}]}]}).hint(hint).itcount());
+ assert.eq(
+ 1, t.find({$or: [{$or: [{a: 0}, {a: 2}]}, {$or: [{b: 2}, {b: 4}]}]}).hint(hint).itcount());
+ assert.eq(
+ 0, t.find({$or: [{$or: [{a: 0}, {a: 2}]}, {$or: [{b: 3}, {b: 4}]}]}).hint(hint).itcount());
+
+ assert.eq(1, t.find({a: 1, $and: [{$or: [{$or: [{b: 2}]}]}]}).hint(hint).itcount());
+ assert.eq(0, t.find({a: 1, $and: [{$or: [{$or: [{b: 3}]}]}]}).hint(hint).itcount());
+
+ assert.eq(
+ 1, t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$or: [{b: 1}, {b: 2}]}]}).hint(hint).itcount());
+ assert.eq(
+ 0, t.find({$and: [{$or: [{a: 3}, {a: 2}]}, {$or: [{b: 1}, {b: 2}]}]}).hint(hint).itcount());
+ assert.eq(
+ 0, t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$or: [{b: 3}, {b: 1}]}]}).hint(hint).itcount());
+
+ assert.eq(
+ 0,
+ t.find({$and: [{$nor: [{a: 1}, {a: 2}]}, {$nor: [{b: 1}, {b: 2}]}]}).hint(hint).itcount());
+ assert.eq(
+ 0,
+ t.find({$and: [{$nor: [{a: 3}, {a: 2}]}, {$nor: [{b: 1}, {b: 2}]}]}).hint(hint).itcount());
+ assert.eq(
+ 1,
+ t.find({$and: [{$nor: [{a: 3}, {a: 2}]}, {$nor: [{b: 3}, {b: 1}]}]}).hint(hint).itcount());
+
+ assert.eq(
+ 1,
+ t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$nor: [{b: 1}, {b: 3}]}]}).hint(hint).itcount());
+ assert.eq(
+ 0,
+ t.find({$and: [{$or: [{a: 3}, {a: 2}]}, {$nor: [{b: 1}, {b: 3}]}]}).hint(hint).itcount());
+ assert.eq(
+ 0,
+ t.find({$and: [{$or: [{a: 1}, {a: 2}]}, {$nor: [{b: 1}, {b: 2}]}]}).hint(hint).itcount());
}
-checkHinted( {$natural:1} );
-checkHinted( {a:1} );
-checkHinted( {b:1} );
-checkHinted( {a:1,b:1} ); \ No newline at end of file
+checkHinted({$natural: 1});
+checkHinted({a: 1});
+checkHinted({b: 1});
+checkHinted({a: 1, b: 1}); \ No newline at end of file
diff --git a/jstests/core/ork.js b/jstests/core/ork.js
index d6d40161e69..f367b6b4bad 100644
--- a/jstests/core/ork.js
+++ b/jstests/core/ork.js
@@ -3,9 +3,21 @@
t = db.jstests_ork;
t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:[1,2],b:5} );
-t.save( {a:[2,4],b:5} );
+t.ensureIndex({a: 1});
+t.save({a: [1, 2], b: 5});
+t.save({a: [2, 4], b: 5});
-assert.eq( 2, t.find( {$or:[{a:1,$and:[{$or:[{a:2},{a:3}]},{$or:[{b:5}]}]},{a:2,$or:[{a:3},{a:4}]}]} ).itcount() );
-assert.eq( 1, t.find( {$or:[{a:1,$and:[{$or:[{a:2},{a:3}]},{$or:[{b:6}]}]},{a:2,$or:[{a:3},{a:4}]}]} ).itcount() );
+assert.eq(2,
+ t.find({
+ $or: [
+ {a: 1, $and: [{$or: [{a: 2}, {a: 3}]}, {$or: [{b: 5}]}]},
+ {a: 2, $or: [{a: 3}, {a: 4}]}
+ ]
+ }).itcount());
+assert.eq(1,
+ t.find({
+ $or: [
+ {a: 1, $and: [{$or: [{a: 2}, {a: 3}]}, {$or: [{b: 6}]}]},
+ {a: 2, $or: [{a: 3}, {a: 4}]}
+ ]
+ }).itcount());
diff --git a/jstests/core/oro.js b/jstests/core/oro.js
index d93bfa6dd09..be8a99e7e35 100644
--- a/jstests/core/oro.js
+++ b/jstests/core/oro.js
@@ -4,24 +4,25 @@ t = db.jstests_oro;
t.drop();
orClauses = [];
-for( idxKey = 'a'; idxKey <= 'aaaaaaaaaa'; idxKey += 'a' ) {
+for (idxKey = 'a'; idxKey <= 'aaaaaaaaaa'; idxKey += 'a') {
idx = {};
- idx[ idxKey ] = 1;
- t.ensureIndex( idx );
- for( i = 0; i < 200; ++i ) {
- t.insert( idx );
+ idx[idxKey] = 1;
+ t.ensureIndex(idx);
+ for (i = 0; i < 200; ++i) {
+ t.insert(idx);
}
- orClauses.push( idx );
+ orClauses.push(idx);
}
-printjson( t.find({$or:orClauses}).explain() );
-c = t.find({$or:orClauses}).batchSize( 100 );
+printjson(t.find({$or: orClauses}).explain());
+c = t.find({$or: orClauses}).batchSize(100);
count = 0;
-while( c.hasNext() ) {
- for( i = 0; i < 50 && c.hasNext(); ++i, c.next(), ++count );
+while (c.hasNext()) {
+ for (i = 0; i < 50 && c.hasNext(); ++i, c.next(), ++count)
+ ;
// Interleave with another operation.
t.stats();
}
-assert.eq( 10 * 200, count );
+assert.eq(10 * 200, count);
diff --git a/jstests/core/orp.js b/jstests/core/orp.js
index 18abdfbc63a..a706d6f4c1a 100644
--- a/jstests/core/orp.js
+++ b/jstests/core/orp.js
@@ -4,40 +4,39 @@
t = db.jstests_orp;
t.drop();
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-t.ensureIndex( { c:1 } );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+t.ensureIndex({c: 1});
-for( i = 0; i < 200; ++i ) {
- t.save( { a:1, b:1 } );
+for (i = 0; i < 200; ++i) {
+ t.save({a: 1, b: 1});
}
// Deduping results from the previous clause.
-assert.eq( 200, t.count( { $or:[ { a:1 }, { b:1 } ] } ) );
+assert.eq(200, t.count({$or: [{a: 1}, {b: 1}]}));
// Deduping results from a prior clause.
-assert.eq( 200, t.count( { $or:[ { a:1 }, { c:1 }, { b:1 } ] } ) );
-t.save( { c:1 } );
-assert.eq( 201, t.count( { $or:[ { a:1 }, { c:1 }, { b:1 } ] } ) );
+assert.eq(200, t.count({$or: [{a: 1}, {c: 1}, {b: 1}]}));
+t.save({c: 1});
+assert.eq(201, t.count({$or: [{a: 1}, {c: 1}, {b: 1}]}));
// Deduping results that would normally be index only matches on overlapping and double scanned $or
// field regions.
t.drop();
-t.ensureIndex( { a:1, b:1 } );
-for( i = 0; i < 16; ++i ) {
- for( j = 0; j < 16; ++j ) {
- t.save( { a:i, b:j } );
+t.ensureIndex({a: 1, b: 1});
+for (i = 0; i < 16; ++i) {
+ for (j = 0; j < 16; ++j) {
+ t.save({a: i, b: j});
}
}
-assert.eq( 16 * 16,
- t.count( { $or:[ { a:{ $gte:0 }, b:{ $gte:0 } }, { a:{ $lte:16 }, b:{ $lte:16 } } ] } ) );
+assert.eq(16 * 16, t.count({$or: [{a: {$gte: 0}, b: {$gte: 0}}, {a: {$lte: 16}, b: {$lte: 16}}]}));
// Deduping results from a clause that completed before the multi cursor takeover.
t.drop();
-t.ensureIndex( { a:1 } );
-t.ensureIndex( { b:1 } );
-t.save( { a:1,b:200 } );
-for( i = 0; i < 200; ++i ) {
- t.save( { b:i } );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+t.save({a: 1, b: 200});
+for (i = 0; i < 200; ++i) {
+ t.save({b: i});
}
-assert.eq( 201, t.count( { $or:[ { a:1 }, { b:{ $gte:0 } } ] } ) );
+assert.eq(201, t.count({$or: [{a: 1}, {b: {$gte: 0}}]}));
diff --git a/jstests/core/plan_cache_clear.js b/jstests/core/plan_cache_clear.js
index 264c2885f48..8f9cf0ea302 100644
--- a/jstests/core/plan_cache_clear.js
+++ b/jstests/core/plan_cache_clear.js
@@ -23,7 +23,7 @@ t.save({a: 2, b: 2});
// We need two indices so that the MultiPlanRunner is executed.
t.ensureIndex({a: 1});
-t.ensureIndex({a: 1, b:1});
+t.ensureIndex({a: 1, b: 1});
// Run a query so that an entry is inserted into the cache.
assert.eq(1, t.find({a: 1, b: 1}).itcount(), 'unexpected document count');
diff --git a/jstests/core/plan_cache_list_plans.js b/jstests/core/plan_cache_list_plans.js
index 038bcd949eb..b4be4ad46c4 100644
--- a/jstests/core/plan_cache_list_plans.js
+++ b/jstests/core/plan_cache_list_plans.js
@@ -5,11 +5,15 @@ t.drop();
// Utility function to list plans for a query.
function getPlans(query, sort, projection) {
- var key = {query: query, sort: sort, projection: projection};
+ var key = {
+ query: query,
+ sort: sort,
+ projection: projection
+ };
var res = t.runCommand('planCacheListPlans', key);
assert.commandWorked(res, 'planCacheListPlans(' + tojson(key, '', true) + ' failed');
- assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
- tojson(key, '', true) + ') result');
+ assert(res.hasOwnProperty('plans'),
+ 'plans missing from planCacheListPlans(' + tojson(key, '', true) + ') result');
return res.plans;
}
@@ -20,14 +24,16 @@ t.save({a: 2, b: 2});
// We need two indices so that the MultiPlanRunner is executed.
t.ensureIndex({a: 1});
-t.ensureIndex({a: 1, b:1});
+t.ensureIndex({a: 1, b: 1});
// Invalid key should be an error.
-assert.eq(0, getPlans({unknownfield: 1}, {}, {}),
+assert.eq(0,
+ getPlans({unknownfield: 1}, {}, {}),
'planCacheListPlans should return empty results on unknown query shape');
// Create a cache entry.
-assert.eq(1, t.find({a: 1, b: 1}, {_id: 0, a: 1}).sort({a: -1}).itcount(),
+assert.eq(1,
+ t.find({a: 1, b: 1}, {_id: 0, a: 1}).sort({a: -1}).itcount(),
'unexpected document count');
// Retrieve plans for valid cache entry.
@@ -65,10 +71,10 @@ for (var i = 0; i < plans.length; i++) {
print('plan ' + i + ': ' + tojson(plans[i]));
assert.gt(plans[i].reason.score, 0, 'plan ' + i + ' score is invalid');
if (i > 0) {
- assert.lte(plans[i].reason.score, plans[i-1].reason.score,
+ assert.lte(plans[i].reason.score,
+ plans[i - 1].reason.score,
'plans not sorted by score in descending order. ' +
- 'plan ' + i + ' has a score that is greater than that of the previous plan');
+ 'plan ' + i + ' has a score that is greater than that of the previous plan');
}
assert(plans[i].reason.stats.hasOwnProperty('stage'), 'no stats inserted for plan ' + i);
}
-
diff --git a/jstests/core/plan_cache_list_shapes.js b/jstests/core/plan_cache_list_shapes.js
index c22d7caa2e4..4711940870d 100644
--- a/jstests/core/plan_cache_list_shapes.js
+++ b/jstests/core/plan_cache_list_shapes.js
@@ -20,7 +20,8 @@ function getShapes(collection) {
// and should return an empty array of query shapes.
var missingCollection = db.jstests_query_cache_missing;
missingCollection.drop();
-assert.eq(0, getShapes(missingCollection).length,
+assert.eq(0,
+ getShapes(missingCollection).length,
'planCacheListQueryShapes should return empty array on non-existent collection');
t.save({a: 1, b: 1});
@@ -33,7 +34,8 @@ t.ensureIndex({a: 1});
t.ensureIndex({a: 1, b: 1});
// Run a query.
-assert.eq(1, t.find({a: 1, b: 1}, {_id: 1, a: 1}).sort({a: -1}).itcount(),
+assert.eq(1,
+ t.find({a: 1, b: 1}, {_id: 1, a: 1}).sort({a: -1}).itcount(),
'unexpected document count');
// We now expect the two indices to be compared and a cache entry to exist.
@@ -41,7 +43,8 @@ assert.eq(1, t.find({a: 1, b: 1}, {_id: 1, a: 1}).sort({a: -1}).itcount(),
// Number of shapes should match queries executed by multi-plan runner.
var shapes = getShapes();
assert.eq(1, shapes.length, 'unexpected number of shapes in planCacheListQueryShapes result');
-assert.eq({query: {a: 1, b: 1}, sort: {a: -1}, projection: {_id: 1, a: 1}}, shapes[0],
+assert.eq({query: {a: 1, b: 1}, sort: {a: -1}, projection: {_id: 1, a: 1}},
+ shapes[0],
'unexpected query shape returned from planCacheListQueryShapes');
// Running a different query shape should cause another entry to be cached.
diff --git a/jstests/core/plan_cache_shell_helpers.js b/jstests/core/plan_cache_shell_helpers.js
index c22e0e451eb..a61421afc7b 100644
--- a/jstests/core/plan_cache_shell_helpers.js
+++ b/jstests/core/plan_cache_shell_helpers.js
@@ -16,26 +16,38 @@ function getShapes(collection) {
}
// Utility function to list plans for a query.
function getPlans(query, sort, projection) {
- var key = {query: query, sort: sort, projection: projection};
+ var key = {
+ query: query,
+ sort: sort,
+ projection: projection
+ };
var res = t.runCommand('planCacheListPlans', key);
assert.commandWorked(res, 'planCacheListPlans(' + tojson(key, '', true) + ' failed');
- assert(res.hasOwnProperty('plans'), 'plans missing from planCacheListPlans(' +
- tojson(key, '', true) + ') result');
+ assert(res.hasOwnProperty('plans'),
+ 'plans missing from planCacheListPlans(' + tojson(key, '', true) + ') result');
return res.plans;
}
// Add data an indices.
var n = 200;
for (var i = 0; i < n; i++) {
- t.save({a:i, b: -1, c: 1});
+ t.save({a: i, b: -1, c: 1});
}
t.ensureIndex({a: 1});
t.ensureIndex({b: 1});
// Populate plan cache.
-var queryB = {a: {$gte: 199}, b: -1};
-var projectionB = {_id: 0, b: 1};
-var sortC = {c: -1};
+var queryB = {
+ a: {$gte: 199},
+ b: -1
+};
+var projectionB = {
+ _id: 0,
+ b: 1
+};
+var sortC = {
+ c: -1
+};
assert.eq(1, t.find(queryB, projectionB).sort(sortC).itcount(), 'unexpected document count');
assert.eq(1, t.find(queryB, projectionB).itcount(), 'unexpected document count');
assert.eq(1, t.find(queryB).sort(sortC).itcount(), 'unexpected document count');
@@ -68,10 +80,12 @@ print(planCache);
var missingCollection = db.jstests_plan_cache_missing;
missingCollection.drop();
// should return empty array on non-existent collection.
-assert.eq(0, missingCollection.getPlanCache().listQueryShapes().length,
+assert.eq(0,
+ missingCollection.getPlanCache().listQueryShapes().length,
'collection.getPlanCache().listQueryShapes() should return empty results ' +
- 'on non-existent collection');
-assert.eq(getShapes(), planCache.listQueryShapes(),
+ 'on non-existent collection');
+assert.eq(getShapes(),
+ planCache.listQueryShapes(),
'unexpected collection.getPlanCache().listQueryShapes() shell helper result');
//
@@ -79,21 +93,27 @@ assert.eq(getShapes(), planCache.listQueryShapes(),
//
// should return empty array on non-existent query shape.
-assert.eq(0, planCache.getPlansByQuery({unknownfield: 1}).length,
+assert.eq(0,
+ planCache.getPlansByQuery({unknownfield: 1}).length,
'collection.getPlanCache().getPlansByQuery() should return empty results ' +
- 'on non-existent collection');
+ 'on non-existent collection');
// should error on missing required field query.
-assert.throws(function() { planCache.getPlansByQuery(); });
+assert.throws(function() {
+ planCache.getPlansByQuery();
+});
// Invoke with various permutations of required (query) and optional (projection, sort) arguments.
-assert.eq(getPlans(queryB, sortC, projectionB), planCache.getPlansByQuery(queryB, projectionB,
- sortC),
+assert.eq(getPlans(queryB, sortC, projectionB),
+ planCache.getPlansByQuery(queryB, projectionB, sortC),
'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-assert.eq(getPlans(queryB, {}, projectionB), planCache.getPlansByQuery(queryB, projectionB),
+assert.eq(getPlans(queryB, {}, projectionB),
+ planCache.getPlansByQuery(queryB, projectionB),
'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-assert.eq(getPlans(queryB, sortC, {}), planCache.getPlansByQuery(queryB, undefined, sortC),
+assert.eq(getPlans(queryB, sortC, {}),
+ planCache.getPlansByQuery(queryB, undefined, sortC),
'plans from collection.getPlanCache().getPlansByQuery() different from command result');
-assert.eq(getPlans(queryB, {}, {}), planCache.getPlansByQuery(queryB),
+assert.eq(getPlans(queryB, {}, {}),
+ planCache.getPlansByQuery(queryB),
'plans from collection.getPlanCache().getPlansByQuery() different from command result');
// getPlansByQuery() will also accept a single argument with the query shape object
@@ -104,7 +124,11 @@ assert.eq(getPlans(queryB, {}, {}), planCache.getPlansByQuery(queryB),
// projection: <projection>,
// sort: <sort>
// }
-var shapeB = {query: queryB, projection: projectionB, sort: sortC};
+var shapeB = {
+ query: queryB,
+ projection: projectionB,
+ sort: sortC
+};
assert.eq(getPlans(queryB, sortC, projectionB),
planCache.getPlansByQuery(shapeB),
'collection.getPlanCache().getPlansByQuery() did not accept query shape object');
@@ -113,16 +137,16 @@ assert.eq(getPlans(queryB, sortC, projectionB),
// The entire invalid query shape object will be passed to the command
// as the 'query' component which will result in the server returning an empty
// array of plans.
-assert.eq(0, planCache.getPlansByQuery({query: queryB}).length,
+assert.eq(0,
+ planCache.getPlansByQuery({query: queryB}).length,
'collection.getPlanCache.getPlansByQuery should return empty results on ' +
- 'incomplete query shape');
-assert.eq(0, planCache.getPlansByQuery({query: queryB, sort: sortC,
- projection: projectionB,
- unknown_field: 1}).length,
+ 'incomplete query shape');
+assert.eq(0,
+ planCache.getPlansByQuery(
+ {query: queryB, sort: sortC, projection: projectionB, unknown_field: 1})
+ .length,
'collection.getPlanCache.getPlansByQuery should return empty results on ' +
- 'invalid query shape');
-
-
+ 'invalid query shape');
//
// collection.getPlanCache().clearPlansByQuery
@@ -131,19 +155,24 @@ assert.eq(0, planCache.getPlansByQuery({query: queryB, sort: sortC,
// should not error on non-existent query shape.
planCache.clearPlansByQuery({unknownfield: 1});
// should error on missing required field query.
-assert.throws(function() { planCache.clearPlansByQuery(); });
+assert.throws(function() {
+ planCache.clearPlansByQuery();
+});
// Invoke with various permutations of required (query) and optional (projection, sort) arguments.
planCache.clearPlansByQuery(queryB, projectionB);
-assert.eq(3, getShapes().length,
+assert.eq(3,
+ getShapes().length,
'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
planCache.clearPlansByQuery(queryB, undefined, sortC);
-assert.eq(2, getShapes().length,
+assert.eq(2,
+ getShapes().length,
'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
planCache.clearPlansByQuery(queryB);
-assert.eq(1, getShapes().length,
+assert.eq(1,
+ getShapes().length,
'query shape not dropped after running collection.getPlanCache().clearPlansByQuery()');
planCache.clear();
@@ -163,15 +192,14 @@ assert.eq(1, t.find(queryB).sort(sortC).itcount(), 'unexpected document count');
// Clear using query shape object.
planCache.clearPlansByQuery({query: queryB, projection: {}, sort: sortC});
-assert.eq(0, getShapes().length,
+assert.eq(0,
+ getShapes().length,
'collection.getPlanCache().clearPlansByQuery() did not accept query shape object');
// Should not error on missing or extra fields in query shape object.
planCache.clearPlansByQuery({query: queryB});
-planCache.clearPlansByQuery({query: queryB, sort: sortC, projection: projectionB,
- unknown_field: 1});
-
-
+planCache.clearPlansByQuery(
+ {query: queryB, sort: sortC, projection: projectionB, unknown_field: 1});
//
// collection.getPlanCache().clear
diff --git a/jstests/core/pop_server_13516.js b/jstests/core/pop_server_13516.js
index 231889b7a7d..8d0bacbb3e5 100644
--- a/jstests/core/pop_server_13516.js
+++ b/jstests/core/pop_server_13516.js
@@ -5,8 +5,8 @@ t.drop();
var id = NumberInt(0);
var object = {
- _id : id,
- data : []
+ _id: id,
+ data: []
};
for (var i = 0; i < 4096; i++) {
@@ -14,7 +14,7 @@ for (var i = 0; i < 4096; i++) {
}
t.insert(object);
-t.update({ _id : id}, { $pop : { data : -1 } });
+t.update({_id: id}, {$pop: {data: -1}});
var modified = t.findOne();
assert.eq(4095, modified.data.length);
diff --git a/jstests/core/profile1.js b/jstests/core/profile1.js
index 4fe9993116f..67f4a53d2b9 100644
--- a/jstests/core/profile1.js
+++ b/jstests/core/profile1.js
@@ -2,13 +2,13 @@
"use strict";
function profileCursor(query) {
query = query || {};
- Object.extend(query, {user:username + "@" + db.getName()});
+ Object.extend(query, {user: username + "@" + db.getName()});
return db.system.profile.find(query);
}
function getProfileAString() {
var s = "\n";
- profileCursor().forEach(function(z){
+ profileCursor().forEach(function(z) {
s += tojson(z) + " ,\n";
});
return s;
@@ -17,7 +17,7 @@
function resetProfile(level, slowms) {
db.setProfilingLevel(0);
db.system.profile.drop();
- db.setProfilingLevel(level,slowms);
+ db.setProfilingLevel(level, slowms);
}
// special db so that it can be run in parallel tests
@@ -29,7 +29,6 @@
db.dropDatabase();
try {
-
db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
db.auth(username, "password");
@@ -63,7 +62,7 @@
assert.eq(2, profileItems.length, "E2 -- " + msg);
// Make sure we can't drop if profiling is still on
- assert.throws(function(z){
+ assert.throws(function(z) {
db.getCollection("system.profile").drop();
});
@@ -88,8 +87,12 @@
resetProfile(2);
db.profile1.drop();
- var q = {_id: 5};
- var u = {$inc: {x: 1}};
+ var q = {
+ _id: 5
+ };
+ var u = {
+ $inc: {x: 1}
+ };
db.profile1.update(q, u);
var r = profileCursor({ns: db.profile1.getFullName()}).sort({$natural: -1})[0];
assert.eq(q, r.query, "Y1: " + tojson(r));
diff --git a/jstests/core/profile2.js b/jstests/core/profile2.js
index 836fbce8f11..bb1605abd1e 100644
--- a/jstests/core/profile2.js
+++ b/jstests/core/profile2.js
@@ -10,7 +10,7 @@ assert.commandWorked(coll.getDB().runCommand({profile: 2}));
var str = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
var hugeStr = str;
-while (hugeStr.length < 2*1024*1024){
+while (hugeStr.length < 2 * 1024 * 1024) {
hugeStr += str;
}
@@ -39,7 +39,7 @@ assert(result.hasOwnProperty('ns'));
assert(result.hasOwnProperty('millis'));
assert(result.hasOwnProperty('query'));
assert.eq('string', typeof(result.query));
-assert(result.query.match(/^{ a: "a+\.\.\." }$/)); // String value is truncated.
+assert(result.query.match(/^{ a: "a+\.\.\." }$/)); // String value is truncated.
assert.commandWorked(coll.getDB().runCommand({profile: 0}));
coll.getDB().system.profile.drop();
@@ -54,7 +54,7 @@ assert(result.hasOwnProperty('ns'));
assert(result.hasOwnProperty('millis'));
assert(result.hasOwnProperty('updateobj'));
assert.eq('string', typeof(result.updateobj));
-assert(result.updateobj.match(/^{ a: "a+\.\.\." }$/)); // String value is truncated.
+assert(result.updateobj.match(/^{ a: "a+\.\.\." }$/)); // String value is truncated.
assert.commandWorked(coll.getDB().runCommand({profile: 0}));
coll.getDB().system.profile.drop();
diff --git a/jstests/core/profile3.js b/jstests/core/profile3.js
index 96b2314ab87..3a22bd1f952 100644
--- a/jstests/core/profile3.js
+++ b/jstests/core/profile3.js
@@ -7,50 +7,47 @@ db.dropAllUsers();
t = db.profile3;
t.drop();
-profileCursor = function( query ) {
- print( "----" );
+profileCursor = function(query) {
+ print("----");
query = query || {};
- Object.extend( query, { user: username + "@" + db.getName() } );
- return db.system.profile.find( query );
+ Object.extend(query, {user: username + "@" + db.getName()});
+ return db.system.profile.find(query);
};
try {
username = "jstests_profile3_user";
db.createUser({user: username, pwd: "password", roles: jsTest.basicUserRoles});
- db.auth( username, "password" );
-
+ db.auth(username, "password");
+
db.setProfilingLevel(0);
db.system.profile.drop();
- assert.eq( 0 , profileCursor().count() );
-
+ assert.eq(0, profileCursor().count());
+
db.setProfilingLevel(2);
-
+
db.createCollection(t.getName());
- t.insert( { x : 1 } );
- t.findOne( { x : 1 } );
- t.find( { x : 1 } ).count();
- t.update( { x : 1 }, {$inc:{a:1}, $set: {big: Array(128).toString()}} );
- t.update( { x : 1 }, {$inc:{a:1}} );
- t.update( { x : 0 }, {$inc:{a:1}} );
-
- profileCursor().forEach( printjson );
+ t.insert({x: 1});
+ t.findOne({x: 1});
+ t.find({x: 1}).count();
+ t.update({x: 1}, {$inc: {a: 1}, $set: {big: Array(128).toString()}});
+ t.update({x: 1}, {$inc: {a: 1}});
+ t.update({x: 0}, {$inc: {a: 1}});
- db.setProfilingLevel(0);
+ profileCursor().forEach(printjson);
+ db.setProfilingLevel(0);
- assert.eq(profileCursor({nMatched: {$exists:1}}).count(), 3);
+ assert.eq(profileCursor({nMatched: {$exists: 1}}).count(), 3);
assert.eq(profileCursor({nMatched: 1}).count(), 2);
assert.eq(profileCursor({nMatched: 0}).count(), 1);
- if ( db.serverStatus().storageEngine.name == "mmapv1" ) {
- assert.eq(profileCursor({nmoved: 1}).count(), 1 );
+ if (db.serverStatus().storageEngine.name == "mmapv1") {
+ assert.eq(profileCursor({nmoved: 1}).count(), 1);
}
db.system.profile.drop();
-}
-finally {
+} finally {
db.setProfilingLevel(0);
db = stddb;
}
-
diff --git a/jstests/core/profile4.js b/jstests/core/profile4.js
index 8dd722f4406..05c96260a88 100644
--- a/jstests/core/profile4.js
+++ b/jstests/core/profile4.js
@@ -30,7 +30,7 @@ try {
// Clear the profiling collection.
db.setProfilingLevel(0);
db.system.profile.drop();
- assert.eq(0 , profileCursor().count());
+ assert.eq(0, profileCursor().count());
// Enable profiling. It will be disabled again at the end of the test, or if the test fails.
db.setProfilingLevel(2);
@@ -93,7 +93,9 @@ try {
// For queries with a lot of stats data, the execution stats in the profile is replaced by
// the plan summary.
var orClauses = 32;
- var bigOrQuery = { $or: [] };
+ var bigOrQuery = {
+ $or: []
+ };
for (var i = 0; i < orClauses; ++i) {
var indexSpec = {};
indexSpec["a" + i] = 1;
@@ -107,15 +109,15 @@ try {
// Confirm "cursorExhausted" not set when cursor is open.
coll.drop();
coll.insert([{_id: 0}, {_id: 1}, {_id: 2}, {_id: 3}, {_id: 4}]);
- coll.find().batchSize(2).next(); // Query performed leaving open cursor
+ coll.find().batchSize(2).next(); // Query performed leaving open cursor
lastOp = getLastOp();
assert.eq(lastOp.op, "query");
assert(!("cursorExhausted" in lastOp));
var cursor = coll.find().batchSize(2);
- cursor.next(); // Perform initial query and consume first of 2 docs returned.
- cursor.next(); // Consume second of 2 docs from initial query.
- cursor.next(); // getMore performed, leaving open cursor.
+ cursor.next(); // Perform initial query and consume first of 2 docs returned.
+ cursor.next(); // Consume second of 2 docs from initial query.
+ cursor.next(); // getMore performed, leaving open cursor.
lastOp = getLastOp();
assert.eq(lastOp.op, "getmore");
assert(!("cursorExhausted" in lastOp));
@@ -235,12 +237,9 @@ try {
assert.eq(lastOp.ndeleted, 1);
// Update with {upsert: true} as findAndModify.
- assert.eq({_id: 2, a: 2, b: 1}, coll.findAndModify({
- query: {_id: 2, a: 2},
- update: {$inc: {b: 1}},
- upsert: true,
- new: true
- }));
+ assert.eq({_id: 2, a: 2, b: 1},
+ coll.findAndModify(
+ {query: {_id: 2, a: 2}, update: {$inc: {b: 1}}, upsert: true, new: true}));
lastOp = getLastOp();
assert.eq(lastOp.op, "command");
assert.eq(lastOp.ns, coll.getFullName());
@@ -256,10 +255,7 @@ try {
assert.eq(lastOp.upsert, true);
// Idhack update as findAndModify.
- assert.eq({_id: 2, a: 2, b: 1}, coll.findAndModify({
- query: {_id: 2},
- update: {$inc: {b: 1}}
- }));
+ assert.eq({_id: 2, a: 2, b: 1}, coll.findAndModify({query: {_id: 2}, update: {$inc: {b: 1}}}));
lastOp = getLastOp();
assert.eq(lastOp.keysExamined, 1);
assert.eq(lastOp.docsExamined, 1);
@@ -267,11 +263,8 @@ try {
assert.eq(lastOp.nModified, 1);
// Update as findAndModify with projection.
- assert.eq({a: 2}, coll.findAndModify({
- query: {a: 2},
- update: {$inc: {b: 1}},
- fields: {_id: 0, a: 1}
- }));
+ assert.eq({a: 2},
+ coll.findAndModify({query: {a: 2}, update: {$inc: {b: 1}}, fields: {_id: 0, a: 1}}));
lastOp = getLastOp();
assert.eq(lastOp.op, "command");
assert.eq(lastOp.ns, coll.getFullName());
@@ -285,11 +278,7 @@ try {
assert.eq(lastOp.nModified, 1);
// Delete as findAndModify with projection.
- assert.eq({a: 2}, coll.findAndModify({
- query: {a: 2},
- remove: true,
- fields: {_id: 0, a: 1}
- }));
+ assert.eq({a: 2}, coll.findAndModify({query: {a: 2}, remove: true, fields: {_id: 0, a: 1}}));
lastOp = getLastOp();
assert.eq(lastOp.op, "command");
assert.eq(lastOp.ns, coll.getFullName());
@@ -308,7 +297,8 @@ try {
// Update
coll.update({a: 2}, {$inc: {b: 1}});
lastOp = getLastOp();
- assert.eq(lastOp.op, "update"); assert.eq(lastOp.ns, coll.getFullName());
+ assert.eq(lastOp.op, "update");
+ assert.eq(lastOp.ns, coll.getFullName());
assert.eq(lastOp.query, {a: 2});
assert.eq(lastOp.updateobj, {$inc: {b: 1}});
assert.eq(lastOp.keysExamined, 0);
@@ -331,8 +321,7 @@ try {
db.setProfilingLevel(0);
db.system.profile.drop();
-}
-finally {
+} finally {
db.setProfilingLevel(0);
db = stddb;
}
diff --git a/jstests/core/profile5.js b/jstests/core/profile5.js
index 11b4bbe7d59..d507b864906 100644
--- a/jstests/core/profile5.js
+++ b/jstests/core/profile5.js
@@ -20,7 +20,8 @@ t.update({x: {$gt: 3}}, {$set: {y: true}}, {multi: true});
printjson(t.find().toArray());
-assert.eq(1, db.system.profile.count({op: "update"}),
+assert.eq(1,
+ db.system.profile.count({op: "update"}),
"expected exactly one update op in system.profile");
var prof = db.system.profile.findOne({op: "update"});
printjson(prof);
diff --git a/jstests/core/profile_no_such_db.js b/jstests/core/profile_no_such_db.js
index e11d93ca66c..51e70f4dc5d 100644
--- a/jstests/core/profile_no_such_db.js
+++ b/jstests/core/profile_no_such_db.js
@@ -1,38 +1,40 @@
// Test that reading the profiling level doesn't create databases, but setting it does.
-(function (db) {
-'use strict';
+(function(db) {
+ 'use strict';
-function dbExists() {
- return Array.contains(db.getMongo().getDBNames(), db.getName());
-}
+ function dbExists() {
+ return Array.contains(db.getMongo().getDBNames(), db.getName());
+ }
-db = db.getSiblingDB('profile_no_such_db'); // Note: changes db argument not global var.
-assert.commandWorked(db.dropDatabase());
-assert(!dbExists());
-
-// Reading the profiling level shouldn't create the database.
-var defaultProfilingLevel = db.getProfilingLevel();
-assert(!dbExists());
-
-// This test assumes that the default profiling level hasn't been changed.
-assert.eq(defaultProfilingLevel, 0);
-
-[0,1,2].forEach(function(level) {
- jsTest.log('Testing profiling level ' + level);
-
- // Setting the profiling level creates the database.
- // Note: in storage engines other than MMAPv1 setting the profiling level to 0 puts the database
- // in a weird state where it exists internally, but doesn't show up in listDatabases, and won't
- // exist if you restart the server.
- var res = db.setProfilingLevel(level);
- assert.eq(res.was, defaultProfilingLevel);
- assert(dbExists() || level == 0);
- assert.eq(db.getProfilingLevel(), level);
-
- // Dropping the db reverts the profiling level to the default.
+ db = db.getSiblingDB('profile_no_such_db'); // Note: changes db argument not global var.
assert.commandWorked(db.dropDatabase());
- assert.eq(db.getProfilingLevel(), defaultProfilingLevel);
assert(!dbExists());
-});
+
+ // Reading the profiling level shouldn't create the database.
+ var defaultProfilingLevel = db.getProfilingLevel();
+ assert(!dbExists());
+
+ // This test assumes that the default profiling level hasn't been changed.
+ assert.eq(defaultProfilingLevel, 0);
+
+ [0, 1, 2].forEach(function(level) {
+ jsTest.log('Testing profiling level ' + level);
+
+ // Setting the profiling level creates the database.
+ // Note: in storage engines other than MMAPv1 setting the profiling level to 0 puts the
+ // database
+ // in a weird state where it exists internally, but doesn't show up in listDatabases, and
+ // won't
+ // exist if you restart the server.
+ var res = db.setProfilingLevel(level);
+ assert.eq(res.was, defaultProfilingLevel);
+ assert(dbExists() || level == 0);
+ assert.eq(db.getProfilingLevel(), level);
+
+ // Dropping the db reverts the profiling level to the default.
+ assert.commandWorked(db.dropDatabase());
+ assert.eq(db.getProfilingLevel(), defaultProfilingLevel);
+ assert(!dbExists());
+ });
}(db));
diff --git a/jstests/core/proj_key1.js b/jstests/core/proj_key1.js
index 264c941192f..5ef6be3f51b 100644
--- a/jstests/core/proj_key1.js
+++ b/jstests/core/proj_key1.js
@@ -4,14 +4,15 @@ t.drop();
as = [];
-for ( i=0; i<10; i++ ){
- as.push( { a : i } );
- t.insert( { a : i , b : i } );
+for (i = 0; i < 10; i++) {
+ as.push({a: i});
+ t.insert({a: i, b: i});
}
-t.ensureIndex( { a : 1 } );
+t.ensureIndex({a: 1});
-// assert( t.find( {} , { a : 1 , _id : 0 } ).explain().indexOnly , "A4" ); // TODO: need to modify query optimier SERVER-2109
+// assert( t.find( {} , { a : 1 , _id : 0 } ).explain().indexOnly , "A4" ); // TODO: need to modify
+// query optimier SERVER-2109
-assert.eq( as , t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).toArray() , "B1" );
-assert.eq( as , t.find( { a : { $gte : 0 } } , { a : 1 , _id : 0 } ).batchSize(2).toArray() , "B1" );
+assert.eq(as, t.find({a: {$gte: 0}}, {a: 1, _id: 0}).toArray(), "B1");
+assert.eq(as, t.find({a: {$gte: 0}}, {a: 1, _id: 0}).batchSize(2).toArray(), "B1");
diff --git a/jstests/core/pull.js b/jstests/core/pull.js
index 392d8bec227..d2d03e85144 100644
--- a/jstests/core/pull.js
+++ b/jstests/core/pull.js
@@ -1,33 +1,33 @@
t = db.jstests_pull;
t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pull: { a: 2 } } );
-t.update( {}, { $pull: { a: 6 } } );
-assert.eq( [ 1, 3 ], t.findOne().a );
+t.save({a: [1, 2, 3]});
+t.update({}, {$pull: {a: 2}});
+t.update({}, {$pull: {a: 6}});
+assert.eq([1, 3], t.findOne().a);
t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pull: { a: 2 } } );
-t.update( {}, { $pull: { a: 2 } } );
-assert.eq( [ 1, 3 ], t.findOne().a );
+t.save({a: [1, 2, 3]});
+t.update({}, {$pull: {a: 2}});
+t.update({}, {$pull: {a: 2}});
+assert.eq([1, 3], t.findOne().a);
t.drop();
-t.save( { a: [ 2 ] } );
-t.update( {}, { $pull: { a: 2 } } );
-t.update( {}, { $pull: { a: 6 } } );
-assert.eq( [], t.findOne().a );
+t.save({a: [2]});
+t.update({}, {$pull: {a: 2}});
+t.update({}, {$pull: {a: 6}});
+assert.eq([], t.findOne().a);
// SERVER-6047: $pull creates empty nested docs for dotted fields
// that don't exist.
t.drop();
-t.save({ m : 1 } );
-t.update( { m : 1 }, { $pull : { 'a.b' : [ 1 ] } } );
-assert( ('a' in t.findOne()) == false );
+t.save({m: 1});
+t.update({m: 1}, {$pull: {'a.b': [1]}});
+assert(('a' in t.findOne()) == false);
// Non-obvious bit: the implementation of non-in-place update
// might do different things depending on whether the "new" field
// comes before or after existing fields in the document.
// So for now it's worth testing that too. Sorry, future; blame the past.
-t.update( { m : 1 }, { $pull : { 'x.y' : [ 1 ] } } );
-assert( ('z' in t.findOne()) == false );
-// End SERVER-6047
+t.update({m: 1}, {$pull: {'x.y': [1]}});
+assert(('z' in t.findOne()) == false);
+// End SERVER-6047
diff --git a/jstests/core/pull2.js b/jstests/core/pull2.js
index b5a4f8f9870..861d5164c03 100644
--- a/jstests/core/pull2.js
+++ b/jstests/core/pull2.js
@@ -2,30 +2,32 @@
t = db.pull2;
t.drop();
-t.save( { a : [ { x : 1 } , { x : 1 , b : 2 } ] } );
-assert.eq( 2 , t.findOne().a.length , "A" );
+t.save({a: [{x: 1}, {x: 1, b: 2}]});
+assert.eq(2, t.findOne().a.length, "A");
-t.update( {} , { $pull : { a : { x : 1 } } } );
-assert.eq( 0 , t.findOne().a.length , "B" );
+t.update({}, {$pull: {a: {x: 1}}});
+assert.eq(0, t.findOne().a.length, "B");
-assert.eq( 1 , t.find().count() , "C1" );
+assert.eq(1, t.find().count(), "C1");
-t.update( {} , { $push : { a : { x : 1 } } } );
-t.update( {} , { $push : { a : { x : 1 , b : 2 } } } );
-assert.eq( 2 , t.findOne().a.length , "C" );
+t.update({}, {$push: {a: {x: 1}}});
+t.update({}, {$push: {a: {x: 1, b: 2}}});
+assert.eq(2, t.findOne().a.length, "C");
-t.update( {} , { $pullAll : { a : [ { x : 1 } ] } } );
-assert.eq( 1 , t.findOne().a.length , "D" );
+t.update({}, {$pullAll: {a: [{x: 1}]}});
+assert.eq(1, t.findOne().a.length, "D");
-t.update( {} , { $push : { a : { x : 2 , b : 2 } } } );
-t.update( {} , { $push : { a : { x : 3 , b : 2 } } } );
-t.update( {} , { $push : { a : { x : 4 , b : 2 } } } );
-assert.eq( 4 , t.findOne().a.length , "E" );
+t.update({}, {$push: {a: {x: 2, b: 2}}});
+t.update({}, {$push: {a: {x: 3, b: 2}}});
+t.update({}, {$push: {a: {x: 4, b: 2}}});
+assert.eq(4, t.findOne().a.length, "E");
-assert.eq( 1 , t.find().count() , "C2" );
-
-
-t.update( {} , { $pull : { a : { x : { $lt : 3 } } } } );
-assert.eq( 2 , t.findOne().a.length , "F" );
-assert.eq( [ 3 , 4 ] , t.findOne().a.map( function(z){ return z.x; } ) , "G" );
+assert.eq(1, t.find().count(), "C2");
+t.update({}, {$pull: {a: {x: {$lt: 3}}}});
+assert.eq(2, t.findOne().a.length, "F");
+assert.eq([3, 4],
+ t.findOne().a.map(function(z) {
+ return z.x;
+ }),
+ "G");
diff --git a/jstests/core/pull_or.js b/jstests/core/pull_or.js
index 905c7a87060..a91d88c2c18 100644
--- a/jstests/core/pull_or.js
+++ b/jstests/core/pull_or.js
@@ -2,20 +2,19 @@
t = db.pull_or;
t.drop();
-doc = { _id : 1 , a : { b : [ { x : 1 },
- { y : 'y' },
- { x : 2 },
- { z : 'z' } ] } };
+doc = {
+ _id: 1,
+ a: {b: [{x: 1}, {y: 'y'}, {x: 2}, {z: 'z'}]}
+};
-t.insert( doc );
+t.insert(doc);
-t.update({}, { $pull : { 'a.b' : { 'y' : { $exists : true } } } } );
+t.update({}, {$pull: {'a.b': {'y': {$exists: true}}}});
-assert.eq( [ { x : 1 }, { x : 2 }, { z : 'z' } ], t.findOne().a.b );
+assert.eq([{x: 1}, {x: 2}, {z: 'z'}], t.findOne().a.b);
t.drop();
-t.insert( doc );
-t.update({}, { $pull : { 'a.b' : { $or : [ { 'y' : { $exists : true } },
- { 'z' : { $exists : true } } ] } } } );
+t.insert(doc);
+t.update({}, {$pull: {'a.b': {$or: [{'y': {$exists: true}}, {'z': {$exists: true}}]}}});
-assert.eq( [ { x : 1 }, { x : 2 } ], t.findOne().a.b );
+assert.eq([{x: 1}, {x: 2}], t.findOne().a.b);
diff --git a/jstests/core/pull_remove1.js b/jstests/core/pull_remove1.js
index 90460eb2d6e..926dcbf9575 100644
--- a/jstests/core/pull_remove1.js
+++ b/jstests/core/pull_remove1.js
@@ -2,13 +2,17 @@
t = db.pull_remove1;
t.drop();
-o = { _id : 1 , a : [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ] };
-t.insert( o );
+o = {
+ _id: 1,
+ a: [1, 2, 3, 4, 5, 6, 7, 8]
+};
+t.insert(o);
-assert.eq( o , t.findOne() , "A1" );
+assert.eq(o, t.findOne(), "A1");
-o.a = o.a.filter( function(z){ return z >= 6; } );
-t.update( {} , { $pull : { a : { $lt : 6 } } } );
-
-assert.eq( o.a , t.findOne().a , "A2" );
+o.a = o.a.filter(function(z) {
+ return z >= 6;
+});
+t.update({}, {$pull: {a: {$lt: 6}}});
+assert.eq(o.a, t.findOne().a, "A2");
diff --git a/jstests/core/pullall.js b/jstests/core/pullall.js
index e66e8b10d35..2925a45623f 100644
--- a/jstests/core/pullall.js
+++ b/jstests/core/pullall.js
@@ -1,31 +1,31 @@
t = db.jstests_pullall;
t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pullAll: { a: [ 3 ] } } );
-assert.eq( [ 1, 2 ], t.findOne().a );
-t.update( {}, { $pullAll: { a: [ 3 ] } } );
-assert.eq( [ 1, 2 ], t.findOne().a );
+t.save({a: [1, 2, 3]});
+t.update({}, {$pullAll: {a: [3]}});
+assert.eq([1, 2], t.findOne().a);
+t.update({}, {$pullAll: {a: [3]}});
+assert.eq([1, 2], t.findOne().a);
t.drop();
-t.save( { a: [ 1, 2, 3 ] } );
-t.update( {}, { $pullAll: { a: [ 2, 3 ] } } );
-assert.eq( [ 1 ], t.findOne().a );
-t.update( {}, { $pullAll: { a: [] } } );
-assert.eq( [ 1 ], t.findOne().a );
-t.update( {}, { $pullAll: { a: [ 1, 5 ] } } );
-assert.eq( [], t.findOne().a );
+t.save({a: [1, 2, 3]});
+t.update({}, {$pullAll: {a: [2, 3]}});
+assert.eq([1], t.findOne().a);
+t.update({}, {$pullAll: {a: []}});
+assert.eq([1], t.findOne().a);
+t.update({}, {$pullAll: {a: [1, 5]}});
+assert.eq([], t.findOne().a);
// SERVER-6047: $pullAll creates empty nested docs for dotted fields
// that don't exist.
t.drop();
-t.save({ m : 1 } );
-t.update( { m : 1 }, { $pullAll : { 'a.b' : [ 1 ] } } );
-assert( ('a' in t.findOne()) == false );
+t.save({m: 1});
+t.update({m: 1}, {$pullAll: {'a.b': [1]}});
+assert(('a' in t.findOne()) == false);
// Non-obvious bit: the implementation of non-in-place update
// might do different things depending on whether the "new" field
// comes before or after existing fields in the document.
// So for now it's worth testing that too. Sorry, future; blame the past.
-t.update( { m : 1 }, { $pullAll : { 'x.y' : [ 1 ] } } );
-assert( ('z' in t.findOne()) == false );
+t.update({m: 1}, {$pullAll: {'x.y': [1]}});
+assert(('z' in t.findOne()) == false);
// End SERVER-6047
diff --git a/jstests/core/pullall2.js b/jstests/core/pullall2.js
index c97c4e43aba..92d835c0dab 100644
--- a/jstests/core/pullall2.js
+++ b/jstests/core/pullall2.js
@@ -2,19 +2,28 @@
t = db.pullall2;
t.drop();
-o = { _id : 1 , a : [] };
-for ( i=0; i<5; i++ )
- o.a.push( { x : i , y : i } );
+o = {
+ _id: 1,
+ a: []
+};
+for (i = 0; i < 5; i++)
+ o.a.push({x: i, y: i});
-t.insert( o );
+t.insert(o);
-assert.eq( o , t.findOne() , "A" );
+assert.eq(o, t.findOne(), "A");
-t.update( {} , { $pull : { a : { x : 3 } } } );
-o.a = o.a.filter( function(z){ return z.x != 3; } );
-assert.eq( o , t.findOne() , "B" );
+t.update({}, {$pull: {a: {x: 3}}});
+o.a = o.a.filter(function(z) {
+ return z.x != 3;
+});
+assert.eq(o, t.findOne(), "B");
-t.update( {} , { $pull : { a : { x : { $in : [ 1 , 4 ] } } } } );
-o.a = o.a.filter( function(z){ return z.x != 1; } );
-o.a = o.a.filter( function(z){ return z.x != 4; } );
-assert.eq( o , t.findOne() , "C" );
+t.update({}, {$pull: {a: {x: {$in: [1, 4]}}}});
+o.a = o.a.filter(function(z) {
+ return z.x != 1;
+});
+o.a = o.a.filter(function(z) {
+ return z.x != 4;
+});
+assert.eq(o, t.findOne(), "C");
diff --git a/jstests/core/push.js b/jstests/core/push.js
index cefccbb79d4..50ff92cc2cd 100644
--- a/jstests/core/push.js
+++ b/jstests/core/push.js
@@ -3,53 +3,50 @@ var res;
t = db.push;
t.drop();
-t.save( { _id : 2 , a : [ 1 ] } );
-t.update( { _id : 2 } , { $push : { a : 2 } } );
-assert.eq( "1,2" , t.findOne().a.toString() , "A" );
-t.update( { _id : 2 } , { $push : { a : 3 } } );
-assert.eq( "1,2,3" , t.findOne().a.toString() , "B" );
+t.save({_id: 2, a: [1]});
+t.update({_id: 2}, {$push: {a: 2}});
+assert.eq("1,2", t.findOne().a.toString(), "A");
+t.update({_id: 2}, {$push: {a: 3}});
+assert.eq("1,2,3", t.findOne().a.toString(), "B");
-t.update( { _id : 2 } , { $pop : { a : 1 } } );
-assert.eq( "1,2" , t.findOne().a.toString() , "C" );
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.eq( "2" , t.findOne().a.toString() , "D" );
+t.update({_id: 2}, {$pop: {a: 1}});
+assert.eq("1,2", t.findOne().a.toString(), "C");
+t.update({_id: 2}, {$pop: {a: -1}});
+assert.eq("2", t.findOne().a.toString(), "D");
+t.update({_id: 2}, {$push: {a: 3}});
+t.update({_id: 2}, {$push: {a: 4}});
+t.update({_id: 2}, {$push: {a: 5}});
+assert.eq("2,3,4,5", t.findOne().a.toString(), "E1");
-t.update( { _id : 2 } , { $push : { a : 3 } } );
-t.update( { _id : 2 } , { $push : { a : 4 } } );
-t.update( { _id : 2 } , { $push : { a : 5 } } );
-assert.eq( "2,3,4,5" , t.findOne().a.toString() , "E1" );
+t.update({_id: 2}, {$pop: {a: -1}});
+assert.eq("3,4,5", t.findOne().a.toString(), "E2");
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.eq( "3,4,5" , t.findOne().a.toString() , "E2" );
+t.update({_id: 2}, {$pop: {a: -1}});
+assert.eq("4,5", t.findOne().a.toString(), "E3");
-t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.eq( "4,5" , t.findOne().a.toString() , "E3" );
+res = t.update({_id: 2}, {$pop: {a: -1}});
+assert.writeOK(res);
+assert.eq("5", t.findOne().a.toString(), "E4");
-res = t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.writeOK( res );
-assert.eq( "5" , t.findOne().a.toString() , "E4" );
+res = t.update({_id: 2}, {$pop: {a: -1}});
+assert.writeOK(res);
+assert.eq("", t.findOne().a.toString(), "E5");
+res = t.update({_id: 2}, {$pop: {a: -1}});
+assert.writeOK(res);
+assert.eq("", t.findOne().a.toString(), "E6");
-res = t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.writeOK( res );
-assert.eq( "" , t.findOne().a.toString() , "E5" );
+res = t.update({_id: 2}, {$pop: {a: -1}});
+assert.writeOK(res);
+assert.eq("", t.findOne().a.toString(), "E7");
-res = t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.writeOK( res );
-assert.eq( "" , t.findOne().a.toString() , "E6" );
+res = t.update({_id: 2}, {$pop: {a: 1}});
+assert.writeOK(res);
+assert.eq("", t.findOne().a.toString(), "E8");
-res = t.update( { _id : 2 } , { $pop : { a : -1 } } );
-assert.writeOK( res );
-assert.eq( "" , t.findOne().a.toString() , "E7" );
-
-res = t.update( { _id : 2 } , { $pop : { a : 1 } } );
-assert.writeOK( res );
-assert.eq( "" , t.findOne().a.toString() , "E8" );
-
-res = t.update( { _id : 2 } , { $pop : { b : -1 } } );
-assert.writeOK( res );
-
-res = t.update( { _id : 2 } , { $pop : { b : 1 } } );
-assert.writeOK( res );
+res = t.update({_id: 2}, {$pop: {b: -1}});
+assert.writeOK(res);
+res = t.update({_id: 2}, {$pop: {b: 1}});
+assert.writeOK(res);
diff --git a/jstests/core/push2.js b/jstests/core/push2.js
index 5161cc373b6..431e86f596a 100644
--- a/jstests/core/push2.js
+++ b/jstests/core/push2.js
@@ -2,20 +2,20 @@
t = db.push2;
t.drop();
-t.save( { _id : 1 , a : [] } );
+t.save({_id: 1, a: []});
s = new Array(700000).toString();
gotError = null;
-for ( x=0; x<100; x++ ){
- print (x + " pushes");
- var res = t.update( {} , { $push : { a : s } } );
+for (x = 0; x < 100; x++) {
+ print(x + " pushes");
+ var res = t.update({}, {$push: {a: s}});
gotError = res.hasWriteError();
- if ( gotError )
+ if (gotError)
break;
}
-assert( gotError , "should have gotten error" );
+assert(gotError, "should have gotten error");
t.drop();
diff --git a/jstests/core/push_sort.js b/jstests/core/push_sort.js
index 9bbf5447ae6..0e407d969ba 100644
--- a/jstests/core/push_sort.js
+++ b/jstests/core/push_sort.js
@@ -11,93 +11,94 @@ t.drop();
//
// $slice amount is too large to kick in.
-t.save( { _id: 1, x: [ {a:1}, {a:2} ] } );
-t.update( {_id:1}, { $push: { x: { $each: [ {a:3} ], $slice:-5, $sort: {a:1} } } } );
-assert.eq( [{a:1}, {a:2}, {a:3}] , t.findOne( {_id:1} ).x );
+t.save({_id: 1, x: [{a: 1}, {a: 2}]});
+t.update({_id: 1}, {$push: {x: {$each: [{a: 3}], $slice: -5, $sort: {a: 1}}}});
+assert.eq([{a: 1}, {a: 2}, {a: 3}], t.findOne({_id: 1}).x);
// $slice amount kicks in using values of both the base doc and of the $each clause.
-t.save({ _id: 2, x: [ {a:1}, {a:3} ] } );
-t.update( {_id:2}, { $push: { x: { $each: [ {a:2} ], $slice:-2, $sort: {a:1} } } } );
-assert.eq( [{a:2}, {a:3}], t.findOne( {_id:2} ).x );
+t.save({_id: 2, x: [{a: 1}, {a: 3}]});
+t.update({_id: 2}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {a: 1}}}});
+assert.eq([{a: 2}, {a: 3}], t.findOne({_id: 2}).x);
// $sort is descending and $slice is too large to kick in.
-t.save({ _id: 3, x: [ {a:1}, {a:3} ] } );
-t.update( {_id:3}, { $push: { x: { $each: [ {a:2} ], $slice:-5, $sort: {a:-1} } } } );
-assert.eq( [{a:3}, {a:2}, {a:1}], t.findOne( {_id:3} ).x );
+t.save({_id: 3, x: [{a: 1}, {a: 3}]});
+t.update({_id: 3}, {$push: {x: {$each: [{a: 2}], $slice: -5, $sort: {a: -1}}}});
+assert.eq([{a: 3}, {a: 2}, {a: 1}], t.findOne({_id: 3}).x);
// $sort is descending and $slice kicks in using values of both the base doc and of
// the $each clause.
-t.save({ _id: 4, x: [ {a:1}, {a:3} ] } );
-t.update( {_id:4}, { $push: { x: { $each: [ {a:2} ], $slice:-2, $sort: {a:-1} } } } );
-assert.eq( [{a:2}, {a:1}], t.findOne( {_id:4} ).x );
+t.save({_id: 4, x: [{a: 1}, {a: 3}]});
+t.update({_id: 4}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {a: -1}}}});
+assert.eq([{a: 2}, {a: 1}], t.findOne({_id: 4}).x);
// $sort over only a portion of the array's elements objects and #slice kicking in
// using values of both the base doc and of the $each clause.
-t.save({ _id: 5, x: [ {a:1,b:2}, {a:3,b:1} ] } );
-t.update( {_id:5}, { $push: { x: { $each: [ {a:2,b:3} ], $slice:-2, $sort: {b:1} } } } );
-assert.eq( [{a:1, b:2}, {a:2,b:3}], t.findOne( {_id:5} ).x );
+t.save({_id: 5, x: [{a: 1, b: 2}, {a: 3, b: 1}]});
+t.update({_id: 5}, {$push: {x: {$each: [{a: 2, b: 3}], $slice: -2, $sort: {b: 1}}}});
+assert.eq([{a: 1, b: 2}, {a: 2, b: 3}], t.findOne({_id: 5}).x);
// $sort over an array of nested objects and $slice too large to kick in.
-t.save({ _id: 6, x: [ {a:{b:2}}, {a:{b:1}} ] } );
-t.update( {_id:6}, { $push: { x: { $each: [ {a:{b:3}} ], $slice:-5, $sort: {'a.b':1} } } } );
-assert.eq( [{a:{b:1}}, {a:{b:2}}, {a:{b:3}}], t.findOne( {_id:6} ).x );
+t.save({_id: 6, x: [{a: {b: 2}}, {a: {b: 1}}]});
+t.update({_id: 6}, {$push: {x: {$each: [{a: {b: 3}}], $slice: -5, $sort: {'a.b': 1}}}});
+assert.eq([{a: {b: 1}}, {a: {b: 2}}, {a: {b: 3}}], t.findOne({_id: 6}).x);
// $sort over an array of nested objects and $slice kicking in using values of both the
// base doc and of the $each clause.
-t.save({ _id: 7, x: [ {a:{b:2}}, {a:{b:1}} ] } );
-t.update( {_id:7}, { $push: { x: { $each: [ {a:{b:3}} ], $slice:-2, $sort: {'a.b':1} } } } );
-assert.eq( [{a:{b:2}}, {a:{b:3}}], t.findOne( {_id:7} ).x );
+t.save({_id: 7, x: [{a: {b: 2}}, {a: {b: 1}}]});
+t.update({_id: 7}, {$push: {x: {$each: [{a: {b: 3}}], $slice: -2, $sort: {'a.b': 1}}}});
+assert.eq([{a: {b: 2}}, {a: {b: 3}}], t.findOne({_id: 7}).x);
//
// Invalid Cases
//
// $push with $sort should not push a "$sort" field
-var doc8 = {_id: 8, x: [{a:1}, {a:2}]};
+var doc8 = {
+ _id: 8,
+ x: [{a: 1}, {a: 2}]
+};
t.save(doc8);
var res = t.update({_id: 8}, {$push: {x: {$sort: {a: -1}}}});
assert.writeError(res);
-assert.docEq(t.findOne({_id:8}), doc8); //ensure doc was not changed
+assert.docEq(t.findOne({_id: 8}), doc8); // ensure doc was not changed
-t.save({ _id: 100, x: [ {a:1} ] } );
+t.save({_id: 100, x: [{a: 1}]});
// For now, elements of the $each vector need to be objects. In here, '2' is an invalide $each.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [ 2 ], $slice:-2, $sort:{a:1} } } } ) );
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [2], $slice: -2, $sort: {a: 1}}}}));
// For the same reason as above, '1' is an invalid $each element.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2},1], $slice:-2, $sort:{a:1} } } }));
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}, 1], $slice: -2, $sort: {a: 1}}}}));
// The sort key pattern cannot be empty.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort:{} } } } ) );
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {}}}}));
// For now, we do not support positive $slice's (ie, trimming from the array's front).
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:2, $sort: {a:1} } } }));
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: 2, $sort: {a: 1}}}}));
// A $slice cannot be a fractional value.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2.1, $sort: {a:1} } }}));
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2.1, $sort: {a: 1}}}}));
// The sort key pattern's value must be either 1 or -1. In here, {a:-2} is an invalid value.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {a:-2} } } }));
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {a: -2}}}}));
// For now, we are not supporting sorting of basic elements (non-object, non-arrays). In here,
// the $sort clause would need to have a key pattern value rather than 1.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: 1 } } } ) );
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: 1}}}));
// The key pattern 'a.' is an invalid value for $sort.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {'a.':1} }}}));
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {'a.': 1}}}}));
// An empty key pattern is not a valid $sort value.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {'':1} } } }));
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {'': 1}}}}));
// If a $slice is used, the only other $sort clause that's accepted is $sort. In here, $xxx
// is not a valid clause.
-assert.throws( t.update( {_id:100}, { $push: { x: { $each: [{a:2}], $slice:-2, $xxx: {s:1} } } } ) );
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $xxx: {s: 1}}}}));
t.remove({});
// Ensure that existing values are validated in the array as objects during a $sort with $each,
// not only the elements in the $each array.
-t.save({ _id: 100, x: [ 1, "foo" ] } );
-assert.throws(t.update(
- {_id: 100},
- { $push: { x: { $each: [{a:2}], $slice:-2, $sort: {a:1} } } } ) );
+t.save({_id: 100, x: [1, "foo"]});
+assert.throws(t.update({_id: 100}, {$push: {x: {$each: [{a: 2}], $slice: -2, $sort: {a: 1}}}}));
diff --git a/jstests/core/pushall.js b/jstests/core/pushall.js
index 736d3a3fc14..e9d388e6f6f 100644
--- a/jstests/core/pushall.js
+++ b/jstests/core/pushall.js
@@ -1,18 +1,18 @@
t = db.jstests_pushall;
t.drop();
-t.save( { _id: 1, a: [ 1, 2, 3 ] } );
-t.update( { _id: 1 }, { $pushAll: { a: [ 4 ] } } );
-assert.eq( [ 1, 2, 3, 4 ], t.findOne( { _id: 1 } ).a );
-t.update( {}, { $pushAll: { a: [ 4 ] } } );
-assert.eq( [ 1, 2, 3, 4, 4 ], t.findOne( { _id: 1 } ).a );
+t.save({_id: 1, a: [1, 2, 3]});
+t.update({_id: 1}, {$pushAll: {a: [4]}});
+assert.eq([1, 2, 3, 4], t.findOne({_id: 1}).a);
+t.update({}, {$pushAll: {a: [4]}});
+assert.eq([1, 2, 3, 4, 4], t.findOne({_id: 1}).a);
-t.save( { _id: 2, a: [ 1, 2, 3 ] } );
-t.update( { _id: 2 }, { $pushAll: { a: [ 4, 5 ] } } );
-assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne( { _id: 2 } ).a );
-t.update( { _id: 2 }, { $pushAll: { a: [] } } );
-assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne( { _id: 2 } ).a );
+t.save({_id: 2, a: [1, 2, 3]});
+t.update({_id: 2}, {$pushAll: {a: [4, 5]}});
+assert.eq([1, 2, 3, 4, 5], t.findOne({_id: 2}).a);
+t.update({_id: 2}, {$pushAll: {a: []}});
+assert.eq([1, 2, 3, 4, 5], t.findOne({_id: 2}).a);
-t.save( { _id: 3 } );
-t.update( { _id: 3 }, { $pushAll: { a: [ 1, 2 ] } } );
-assert.eq( [ 1, 2 ], t.findOne( { _id: 3 } ).a );
+t.save({_id: 3});
+t.update({_id: 3}, {$pushAll: {a: [1, 2]}});
+assert.eq([1, 2], t.findOne({_id: 3}).a);
diff --git a/jstests/core/query1.js b/jstests/core/query1.js
index 78d70eb55fb..9936ce18e92 100644
--- a/jstests/core/query1.js
+++ b/jstests/core/query1.js
@@ -2,25 +2,23 @@
t = db.query1;
t.drop();
-t.save( { num : 1 } );
-t.save( { num : 3 } );
-t.save( { num : 4 } );
+t.save({num: 1});
+t.save({num: 3});
+t.save({num: 4});
num = 0;
total = 0;
-t.find().forEach(
- function(z){
- num++;
- total += z.num;
- }
-);
+t.find().forEach(function(z) {
+ num++;
+ total += z.num;
+});
-assert.eq( num , 3 , "num" );
-assert.eq( total , 8 , "total" );
+assert.eq(num, 3, "num");
+assert.eq(total, 8, "total");
-assert.eq( 3 , t.find().comment("this is a test").itcount() , "B1" );
-assert.eq( 3 , t.find().comment("this is a test").count() , "B2" );
+assert.eq(3, t.find().comment("this is a test").itcount(), "B1");
+assert.eq(3, t.find().comment("this is a test").count(), "B2");
-assert.eq( 3 , t.find().comment("yo ho ho").itcount() , "C1" );
-assert.eq( 3 , t.find().comment("this is a test").count() , "C2" );
+assert.eq(3, t.find().comment("yo ho ho").itcount(), "C1");
+assert.eq(3, t.find().comment("this is a test").count(), "C2");
diff --git a/jstests/core/queryoptimizer3.js b/jstests/core/queryoptimizer3.js
index 8cace47dc66..4bc3754ff7a 100644
--- a/jstests/core/queryoptimizer3.js
+++ b/jstests/core/queryoptimizer3.js
@@ -3,35 +3,31 @@
t = db.jstests_queryoptimizer3;
t.drop();
-p = startParallelShell( 'for( i = 0; i < 400; ++i ) { sleep( 50 ); db.jstests_queryoptimizer3.drop(); }' );
+p = startParallelShell(
+ 'for( i = 0; i < 400; ++i ) { sleep( 50 ); db.jstests_queryoptimizer3.drop(); }');
-for( i = 0; i < 100; ++i ) {
+for (i = 0; i < 100; ++i) {
t.drop();
- t.ensureIndex({a:1});
- t.ensureIndex({b:1});
- for( j = 0; j < 100; ++j ) {
- t.save({a:j,b:j});
+ t.ensureIndex({a: 1});
+ t.ensureIndex({b: 1});
+ for (j = 0; j < 100; ++j) {
+ t.save({a: j, b: j});
}
try {
m = i % 5;
- if ( m == 0 ) {
- t.count({a:{$gte:0},b:{$gte:0}});
+ if (m == 0) {
+ t.count({a: {$gte: 0}, b: {$gte: 0}});
+ } else if (m == 1) {
+ t.find({a: {$gte: 0}, b: {$gte: 0}}).itcount();
+ } else if (m == 2) {
+ t.remove({a: {$gte: 0}, b: {$gte: 0}});
+ } else if (m == 3) {
+ t.update({a: {$gte: 0}, b: {$gte: 0}}, {});
+ } else if (m == 4) {
+ t.distinct('x', {a: {$gte: 0}, b: {$gte: 0}});
}
- else if ( m == 1 ) {
- t.find({a:{$gte:0},b:{$gte:0}}).itcount();
- }
- else if ( m == 2 ) {
- t.remove({a:{$gte:0},b:{$gte:0}});
- }
- else if ( m == 3 ) {
- t.update({a:{$gte:0},b:{$gte:0}},{});
- }
- else if ( m == 4 ) {
- t.distinct('x',{a:{$gte:0},b:{$gte:0}});
- }
- }
- catch (e) {
+ } catch (e) {
print("Op killed during yield: " + e.message);
}
}
diff --git a/jstests/core/queryoptimizer6.js b/jstests/core/queryoptimizer6.js
index 32efccbdb0b..8e00772aa4e 100644
--- a/jstests/core/queryoptimizer6.js
+++ b/jstests/core/queryoptimizer6.js
@@ -3,7 +3,7 @@
t = db.jstests_queryoptimizer6;
t.drop();
-t.save( {a:1} );
+t.save({a: 1});
// There is a bug in the 2.4.x indexing where the first query below returns 0 results with this
// index, but 1 result without it.
@@ -11,6 +11,6 @@ t.save( {a:1} );
// t.ensureIndex( {b:1}, {sparse:true} );
// The sparse index will be used, and recorded for this query pattern.
-assert.eq( 1, t.find( {a:1,b:{$ne:1}} ).itcount() );
+assert.eq(1, t.find({a: 1, b: {$ne: 1}}).itcount());
// The query pattern should be different, and the sparse index should not be used.
-assert.eq( 1, t.find( {a:1} ).itcount() );
+assert.eq(1, t.find({a: 1}).itcount());
diff --git a/jstests/core/queryoptimizera.js b/jstests/core/queryoptimizera.js
index d705bf580ea..80036cd78c0 100644
--- a/jstests/core/queryoptimizera.js
+++ b/jstests/core/queryoptimizera.js
@@ -2,13 +2,13 @@
// constraint is printed at appropriate times. SERVER-5353
function numWarnings() {
- logs = db.adminCommand( { getLog:"global" } ).log;
+ logs = db.adminCommand({getLog: "global"}).log;
ret = 0;
- logs.forEach( function( x ) {
- if ( x.match( warningMatchRegexp ) ) {
- ++ret;
- }
- } );
+ logs.forEach(function(x) {
+ if (x.match(warningMatchRegexp)) {
+ ++ret;
+ }
+ });
return ret;
}
@@ -17,30 +17,30 @@ collectionNameIndex = 0;
// Generate a collection name not already present in the log.
do {
testCollectionName = 'jstests_queryoptimizera__' + collectionNameIndex++;
- warningMatchString = 'unindexed _id query on capped collection.*collection: test.' +
- testCollectionName;
- warningMatchRegexp = new RegExp( warningMatchString );
-
-} while( numWarnings() > 0 );
+ warningMatchString =
+ 'unindexed _id query on capped collection.*collection: test.' + testCollectionName;
+ warningMatchRegexp = new RegExp(warningMatchString);
-t = db[ testCollectionName ];
+} while (numWarnings() > 0);
+
+t = db[testCollectionName];
t.drop();
notCappedCollectionName = testCollectionName + '_notCapped';
-notCapped = db[ notCappedCollectionName ];
+notCapped = db[notCappedCollectionName];
notCapped.drop();
-db.createCollection( testCollectionName, { capped:true, size:1000 } );
-db.createCollection( notCappedCollectionName, { autoIndexId:false } );
+db.createCollection(testCollectionName, {capped: true, size: 1000});
+db.createCollection(notCappedCollectionName, {autoIndexId: false});
-t.insert( {} );
-notCapped.insert( {} );
+t.insert({});
+notCapped.insert({});
oldNumWarnings = 0;
function assertNoNewWarnings() {
- assert.eq( oldNumWarnings, numWarnings() );
+ assert.eq(oldNumWarnings, numWarnings());
}
function assertNewWarning() {
@@ -48,45 +48,45 @@ function assertNewWarning() {
// Ensure that newNumWarnings > oldNumWarnings. It's not safe to test that oldNumWarnings + 1
// == newNumWarnings, because a (simulated) page fault exception may cause multiple messages to
// be logged instead of only one.
- assert.lt( oldNumWarnings, newNumWarnings );
+ assert.lt(oldNumWarnings, newNumWarnings);
oldNumWarnings = newNumWarnings;
}
// Simple _id query
-t.find( { _id:0 } ).itcount();
+t.find({_id: 0}).itcount();
assertNoNewWarnings();
// Simple _id query without an _id index, on a non capped collection.
-notCapped.find( { _id:0 } ).itcount();
+notCapped.find({_id: 0}).itcount();
assertNoNewWarnings();
// A multi field query, including _id.
-t.find( { _id:0, a:0 } ).itcount();
+t.find({_id: 0, a: 0}).itcount();
assertNoNewWarnings();
// An unsatisfiable query.
-t.find( { _id:0, a:{$in:[]} } ).itcount();
+t.find({_id: 0, a: {$in: []}}).itcount();
assertNoNewWarnings();
// An hinted query.
-t.find( { _id:0 } ).hint( { $natural:1 } ).itcount();
+t.find({_id: 0}).hint({$natural: 1}).itcount();
assertNoNewWarnings();
// Retry a multi field query.
-t.find( { _id:0, a:0 } ).itcount();
+t.find({_id: 0, a: 0}).itcount();
assertNoNewWarnings();
// Warnings should not be printed when an index is added on _id.
-t.ensureIndex( { _id:1 } );
+t.ensureIndex({_id: 1});
-t.find( { _id:0 } ).itcount();
+t.find({_id: 0}).itcount();
assertNoNewWarnings();
-t.find( { _id:0, a:0 } ).itcount();
+t.find({_id: 0, a: 0}).itcount();
assertNoNewWarnings();
-t.find( { _id:0, a:0 } ).itcount();
+t.find({_id: 0, a: 0}).itcount();
assertNoNewWarnings();
-t.drop(); // cleanup
+t.drop(); // cleanup
notCapped.drop(); \ No newline at end of file
diff --git a/jstests/core/read_after_optime.js b/jstests/core/read_after_optime.js
index 4f8d601facf..08275791bde 100644
--- a/jstests/core/read_after_optime.js
+++ b/jstests/core/read_after_optime.js
@@ -1,22 +1,16 @@
// Test that attempting to read after optime fails if replication is not enabled.
(function() {
-"use strict";
+ "use strict";
-var currentTime = new Date();
+ var currentTime = new Date();
-var futureOpTime = new Timestamp((currentTime / 1000 + 3600), 0);
+ var futureOpTime = new Timestamp((currentTime / 1000 + 3600), 0);
-var res = assert.commandFailed(db.runCommand({
- find: 'user',
- filter: { x: 1 },
- readConcern: {
- afterOpTime: { ts: futureOpTime, t: 0 }
- }
-}));
+ var res = assert.commandFailed(db.runCommand(
+ {find: 'user', filter: {x: 1}, readConcern: {afterOpTime: {ts: futureOpTime, t: 0}}}));
-assert.eq(123, res.code); // ErrorCodes::NotAReplicaSet
-assert.eq(null, res.waitedMS);
+ assert.eq(123, res.code); // ErrorCodes::NotAReplicaSet
+ assert.eq(null, res.waitedMS);
})();
-
diff --git a/jstests/core/recursion.js b/jstests/core/recursion.js
index dc7b51be845..926250be20d 100644
--- a/jstests/core/recursion.js
+++ b/jstests/core/recursion.js
@@ -1,7 +1,7 @@
// Basic tests for a form of stack recursion that's been shown to cause C++
// side stack overflows in the past. See SERVER-19614.
-(function () {
+(function() {
"use strict";
db.recursion.drop();
@@ -14,7 +14,7 @@
// Make sure db.eval doesn't blow up
function dbEvalRecursion() {
- db.eval(function () {
+ db.eval(function() {
function recursion() {
recursion.apply();
}
@@ -25,17 +25,17 @@
// Make sure mapReduce doesn't blow up
function mapReduceRecursion() {
- db.recursion.mapReduce(function(){
- (function recursion(){
- recursion.apply();
- })();
- }, function(){
- }, {
- out: 'inline'
- });
+ db.recursion.mapReduce(
+ function() {
+ (function recursion() {
+ recursion.apply();
+ })();
+ },
+ function() {},
+ {out: 'inline'});
}
db.recursion.insert({});
- assert.commandFailedWithCode(
- assert.throws(mapReduceRecursion), ErrorCodes.JSInterpreterFailure);
+ assert.commandFailedWithCode(assert.throws(mapReduceRecursion),
+ ErrorCodes.JSInterpreterFailure);
}());
diff --git a/jstests/core/ref.js b/jstests/core/ref.js
index f519d796bad..02c4cb92a07 100644
--- a/jstests/core/ref.js
+++ b/jstests/core/ref.js
@@ -1,29 +1,32 @@
-// to run:
+// to run:
// ./mongo jstests/ref.js
db.otherthings.drop();
db.things.drop();
-var other = { s : "other thing", n : 1};
+var other = {
+ s: "other thing",
+ n: 1
+};
db.otherthings.save(other);
-db.things.save( { name : "abc" } );
+db.things.save({name: "abc"});
x = db.things.findOne();
-x.o = new DBPointer( "otherthings" , other._id );
+x.o = new DBPointer("otherthings", other._id);
db.things.save(x);
-assert( db.things.findOne().o.fetch().n == 1, "dbref broken 2" );
+assert(db.things.findOne().o.fetch().n == 1, "dbref broken 2");
other.n++;
db.otherthings.save(other);
-assert( db.things.findOne().o.fetch().n == 2, "dbrefs broken" );
+assert(db.things.findOne().o.fetch().n == 2, "dbrefs broken");
db.getSiblingDB("otherdb").dropDatabase();
var objid = new ObjectId();
-db.getSiblingDB("otherdb").getCollection("othercoll").insert({_id:objid, field:"value"});
-var subdoc = db.getSiblingDB("otherdb").getCollection("othercoll").findOne({_id:objid});
+db.getSiblingDB("otherdb").getCollection("othercoll").insert({_id: objid, field: "value"});
+var subdoc = db.getSiblingDB("otherdb").getCollection("othercoll").findOne({_id: objid});
db.mycoll.drop();
-db.mycoll.insert({_id:"asdf", asdf:new DBRef("othercoll", objid, "otherdb")});
-var doc = db.mycoll.findOne({_id:"asdf"}, {_id:0, asdf:1});
+db.mycoll.insert({_id: "asdf", asdf: new DBRef("othercoll", objid, "otherdb")});
+var doc = db.mycoll.findOne({_id: "asdf"}, {_id: 0, asdf: 1});
assert.eq(tojson(doc.asdf.fetch()), tojson(subdoc), "otherdb dbref");
diff --git a/jstests/core/ref2.js b/jstests/core/ref2.js
index 29640cd5da0..d0c21da257f 100644
--- a/jstests/core/ref2.js
+++ b/jstests/core/ref2.js
@@ -2,13 +2,18 @@
t = db.ref2;
t.drop();
-a = { $ref : "foo" , $id : 1 };
-b = { $ref : "foo" , $id : 2 };
+a = {
+ $ref: "foo",
+ $id: 1
+};
+b = {
+ $ref: "foo",
+ $id: 2
+};
+t.save({name: "a", r: a});
+t.save({name: "b", r: b});
-t.save( { name : "a" , r : a } );
-t.save( { name : "b" , r : b } );
-
-assert.eq( 2 , t.find().count() , "A" );
-assert.eq( 1 , t.find( { r : a } ).count() , "B" );
-assert.eq( 1 , t.find( { r : b } ).count() , "C" );
+assert.eq(2, t.find().count(), "A");
+assert.eq(1, t.find({r: a}).count(), "B");
+assert.eq(1, t.find({r: b}).count(), "C");
diff --git a/jstests/core/ref3.js b/jstests/core/ref3.js
index 14037ee4cc8..929e4152daf 100644
--- a/jstests/core/ref3.js
+++ b/jstests/core/ref3.js
@@ -1,19 +1,22 @@
-// to run:
+// to run:
// ./mongo jstests/ref3.js
db.otherthings3.drop();
db.things3.drop();
-var other = { s : "other thing", n : 1};
+var other = {
+ s: "other thing",
+ n: 1
+};
db.otherthings3.save(other);
-db.things3.save( { name : "abc" } );
+db.things3.save({name: "abc"});
x = db.things3.findOne();
-x.o = new DBRef( "otherthings3" , other._id );
+x.o = new DBRef("otherthings3", other._id);
db.things3.save(x);
-assert( db.things3.findOne().o.fetch().n == 1, "dbref broken 2" );
+assert(db.things3.findOne().o.fetch().n == 1, "dbref broken 2");
other.n++;
db.otherthings3.save(other);
-assert( db.things3.findOne().o.fetch().n == 2, "dbrefs broken" );
+assert(db.things3.findOne().o.fetch().n == 2, "dbrefs broken");
diff --git a/jstests/core/ref4.js b/jstests/core/ref4.js
index 1c105ef2795..07796d1e96a 100644
--- a/jstests/core/ref4.js
+++ b/jstests/core/ref4.js
@@ -5,16 +5,18 @@ b = db.ref4b;
a.drop();
b.drop();
-var other = { s : "other thing", n : 17 };
+var other = {
+ s: "other thing",
+ n: 17
+};
b.save(other);
-a.save( { name : "abc" , others : [ new DBRef( "ref4b" , other._id ) , new DBPointer( "ref4b" , other._id ) ] } );
-assert( a.findOne().others[0].fetch().n == 17 , "dbref broken 1" );
+a.save({name: "abc", others: [new DBRef("ref4b", other._id), new DBPointer("ref4b", other._id)]});
+assert(a.findOne().others[0].fetch().n == 17, "dbref broken 1");
-x = Array.fetchRefs( a.findOne().others );
-assert.eq( 2 , x.length , "A" );
-assert.eq( 17 , x[0].n , "B" );
-assert.eq( 17 , x[1].n , "C" );
+x = Array.fetchRefs(a.findOne().others);
+assert.eq(2, x.length, "A");
+assert.eq(17, x[0].n, "B");
+assert.eq(17, x[1].n, "C");
-
-assert.eq( 0 , Array.fetchRefs( a.findOne().others , "z" ).length , "D" );
+assert.eq(0, Array.fetchRefs(a.findOne().others, "z").length, "D");
diff --git a/jstests/core/regex.js b/jstests/core/regex.js
index f431d506ea6..235c1936885 100644
--- a/jstests/core/regex.js
+++ b/jstests/core/regex.js
@@ -1,24 +1,24 @@
t = db.jstests_regex;
t.drop();
-t.save( { a: "bcd" } );
-assert.eq( 1, t.count( { a: /b/ } ) , "A" );
-assert.eq( 1, t.count( { a: /bc/ } ) , "B" );
-assert.eq( 1, t.count( { a: /bcd/ } ) , "C" );
-assert.eq( 0, t.count( { a: /bcde/ } ) , "D" );
+t.save({a: "bcd"});
+assert.eq(1, t.count({a: /b/}), "A");
+assert.eq(1, t.count({a: /bc/}), "B");
+assert.eq(1, t.count({a: /bcd/}), "C");
+assert.eq(0, t.count({a: /bcde/}), "D");
t.drop();
-t.save( { a: { b: "cde" } } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "E" );
+t.save({a: {b: "cde"}});
+assert.eq(1, t.count({'a.b': /de/}), "E");
t.drop();
-t.save( { a: { b: [ "cde" ] } } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "F" );
+t.save({a: {b: ["cde"]}});
+assert.eq(1, t.count({'a.b': /de/}), "F");
t.drop();
-t.save( { a: [ { b: "cde" } ] } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "G" );
+t.save({a: [{b: "cde"}]});
+assert.eq(1, t.count({'a.b': /de/}), "G");
t.drop();
-t.save( { a: [ { b: [ "cde" ] } ] } );
-assert.eq( 1, t.count( { 'a.b': /de/ } ) , "H" );
+t.save({a: [{b: ["cde"]}]});
+assert.eq(1, t.count({'a.b': /de/}), "H");
diff --git a/jstests/core/regex2.js b/jstests/core/regex2.js
index 51e24ae420a..80dec55f184 100644
--- a/jstests/core/regex2.js
+++ b/jstests/core/regex2.js
@@ -2,69 +2,68 @@
t = db.regex2;
t.drop();
-t.save( { a : "test" } );
-t.save( { a : "Test" } );
-
-assert.eq( 2 , t.find().count() , "A" );
-assert.eq( 1 , t.find( { a : "Test" } ).count() , "B" );
-assert.eq( 1 , t.find( { a : "test" } ).count() , "C" );
-assert.eq( 1 , t.find( { a : /Test/ } ).count() , "D" );
-assert.eq( 1 , t.find( { a : /test/ } ).count() , "E" );
-assert.eq( 2 , t.find( { a : /test/i } ).count() , "F" );
+t.save({a: "test"});
+t.save({a: "Test"});
+assert.eq(2, t.find().count(), "A");
+assert.eq(1, t.find({a: "Test"}).count(), "B");
+assert.eq(1, t.find({a: "test"}).count(), "C");
+assert.eq(1, t.find({a: /Test/}).count(), "D");
+assert.eq(1, t.find({a: /test/}).count(), "E");
+assert.eq(2, t.find({a: /test/i}).count(), "F");
t.drop();
a = "\u0442\u0435\u0441\u0442";
b = "\u0422\u0435\u0441\u0442";
-assert( ( new RegExp( a ) ).test( a ) , "B 1" );
-assert( ! ( new RegExp( a ) ).test( b ) , "B 2" );
-assert( ( new RegExp( a , "i" ) ).test( b ) , "B 3 " );
-
-t.save( { a : a } );
-t.save( { a : b } );
+assert((new RegExp(a)).test(a), "B 1");
+assert(!(new RegExp(a)).test(b), "B 2");
+assert((new RegExp(a, "i")).test(b), "B 3 ");
+t.save({a: a});
+t.save({a: b});
-assert.eq( 2 , t.find().count() , "C A" );
-assert.eq( 1 , t.find( { a : a } ).count() , "C B" );
-assert.eq( 1 , t.find( { a : b } ).count() , "C C" );
-assert.eq( 1 , t.find( { a : new RegExp( a ) } ).count() , "C D" );
-assert.eq( 1 , t.find( { a : new RegExp( b ) } ).count() , "C E" );
-assert.eq( 2 , t.find( { a : new RegExp( a , "i" ) } ).count() , "C F is spidermonkey built with UTF-8 support?" );
-
+assert.eq(2, t.find().count(), "C A");
+assert.eq(1, t.find({a: a}).count(), "C B");
+assert.eq(1, t.find({a: b}).count(), "C C");
+assert.eq(1, t.find({a: new RegExp(a)}).count(), "C D");
+assert.eq(1, t.find({a: new RegExp(b)}).count(), "C E");
+assert.eq(2,
+ t.find({a: new RegExp(a, "i")}).count(),
+ "C F is spidermonkey built with UTF-8 support?");
// same tests as above but using {$regex: "a|b", $options: "imx"} syntax.
t.drop();
-t.save( { a : "test" } );
-t.save( { a : "Test" } );
-
-assert.eq( 2 , t.find().count() , "obj A" );
-assert.eq( 1 , t.find( { a : {$regex:"Test"} } ).count() , "obj D" );
-assert.eq( 1 , t.find( { a : {$regex:"test"} } ).count() , "obj E" );
-assert.eq( 2 , t.find( { a : {$regex:"test", $options:"i"} } ).count() , "obj F" );
-assert.eq( 2 , t.find( { a : {$options:"i", $regex:"test"} } ).count() , "obj F rev" ); // both orders should work
+t.save({a: "test"});
+t.save({a: "Test"});
+assert.eq(2, t.find().count(), "obj A");
+assert.eq(1, t.find({a: {$regex: "Test"}}).count(), "obj D");
+assert.eq(1, t.find({a: {$regex: "test"}}).count(), "obj E");
+assert.eq(2, t.find({a: {$regex: "test", $options: "i"}}).count(), "obj F");
+assert.eq(2,
+ t.find({a: {$options: "i", $regex: "test"}}).count(),
+ "obj F rev"); // both orders should work
t.drop();
a = "\u0442\u0435\u0441\u0442";
b = "\u0422\u0435\u0441\u0442";
-t.save( { a : a } );
-t.save( { a : b } );
+t.save({a: a});
+t.save({a: b});
-
-assert.eq( 1 , t.find( { a : {$regex: a} } ).count() , "obj C D" );
-assert.eq( 1 , t.find( { a : {$regex: b} } ).count() , "obj C E" );
-assert.eq( 2 , t.find( { a : {$regex: a , $options: "i" } } ).count() , "obj C F is spidermonkey built with UTF-8 support?" );
+assert.eq(1, t.find({a: {$regex: a}}).count(), "obj C D");
+assert.eq(1, t.find({a: {$regex: b}}).count(), "obj C E");
+assert.eq(2,
+ t.find({a: {$regex: a, $options: "i"}}).count(),
+ "obj C F is spidermonkey built with UTF-8 support?");
// Test s (DOT_ALL) option. Not supported with /regex/opts syntax
t.drop();
-t.save({a:'1 2'});
-t.save({a:'1\n2'});
-assert.eq( 1 , t.find( { a : {$regex: '1.*2'} } ).count() );
-assert.eq( 2 , t.find( { a : {$regex: '1.*2', $options: 's'} } ).count() );
-
-
+t.save({a: '1 2'});
+t.save({a: '1\n2'});
+assert.eq(1, t.find({a: {$regex: '1.*2'}}).count());
+assert.eq(2, t.find({a: {$regex: '1.*2', $options: 's'}}).count());
diff --git a/jstests/core/regex3.js b/jstests/core/regex3.js
index 747fbf4d8a8..bc1623cecea 100644
--- a/jstests/core/regex3.js
+++ b/jstests/core/regex3.js
@@ -2,38 +2,39 @@
t = db.regex3;
t.drop();
-t.save( { name : "eliot" } );
-t.save( { name : "emily" } );
-t.save( { name : "bob" } );
-t.save( { name : "aaron" } );
-
-assert.eq( 2 , t.find( { name : /^e.*/ } ).itcount() , "no index count" );
-assert.eq( 4 , t.find( { name : /^e.*/ } ).explain(true).executionStats.totalDocsExamined ,
- "no index explain" );
-t.ensureIndex( { name : 1 } );
-assert.eq( 2 , t.find( { name : /^e.*/ } ).itcount() , "index count" );
-assert.eq( 2 , t.find( { name : /^e.*/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain" ); // SERVER-239
+t.save({name: "eliot"});
+t.save({name: "emily"});
+t.save({name: "bob"});
+t.save({name: "aaron"});
+
+assert.eq(2, t.find({name: /^e.*/}).itcount(), "no index count");
+assert.eq(4,
+ t.find({name: /^e.*/}).explain(true).executionStats.totalDocsExamined,
+ "no index explain");
+t.ensureIndex({name: 1});
+assert.eq(2, t.find({name: /^e.*/}).itcount(), "index count");
+assert.eq(2,
+ t.find({name: /^e.*/}).explain(true).executionStats.totalKeysExamined,
+ "index explain"); // SERVER-239
t.drop();
-t.save( { name : "aa" } );
-t.save( { name : "ab" } );
-t.save( { name : "ac" } );
-t.save( { name : "c" } );
+t.save({name: "aa"});
+t.save({name: "ab"});
+t.save({name: "ac"});
+t.save({name: "c"});
-assert.eq( 3 , t.find( { name : /^aa*/ } ).itcount() , "B ni" );
-t.ensureIndex( { name : 1 } );
-assert.eq( 3 , t.find( { name : /^aa*/ } ).itcount() , "B i 1" );
-assert.eq( 4 , t.find( { name : /^aa*/ } ).explain(true).executionStats.totalKeysExamined ,
- "B i 1 e" );
+assert.eq(3, t.find({name: /^aa*/}).itcount(), "B ni");
+t.ensureIndex({name: 1});
+assert.eq(3, t.find({name: /^aa*/}).itcount(), "B i 1");
+assert.eq(4, t.find({name: /^aa*/}).explain(true).executionStats.totalKeysExamined, "B i 1 e");
-assert.eq( 2 , t.find( { name : /^a[ab]/ } ).itcount() , "B i 2" );
-assert.eq( 2 , t.find( { name : /^a[bc]/ } ).itcount() , "B i 3" );
+assert.eq(2, t.find({name: /^a[ab]/}).itcount(), "B i 2");
+assert.eq(2, t.find({name: /^a[bc]/}).itcount(), "B i 3");
t.drop();
-t.save( { name: "" } );
-assert.eq( 1, t.find( { name: /^a?/ } ).itcount() , "C 1" );
-t.ensureIndex( { name: 1 } );
-assert.eq( 1, t.find( { name: /^a?/ } ).itcount(), "C 2");
+t.save({name: ""});
+assert.eq(1, t.find({name: /^a?/}).itcount(), "C 1");
+t.ensureIndex({name: 1});
+assert.eq(1, t.find({name: /^a?/}).itcount(), "C 2");
diff --git a/jstests/core/regex4.js b/jstests/core/regex4.js
index ed5e76331e0..112375e2e09 100644
--- a/jstests/core/regex4.js
+++ b/jstests/core/regex4.js
@@ -2,19 +2,22 @@
t = db.regex4;
t.drop();
-t.save( { name : "eliot" } );
-t.save( { name : "emily" } );
-t.save( { name : "bob" } );
-t.save( { name : "aaron" } );
+t.save({name: "eliot"});
+t.save({name: "emily"});
+t.save({name: "bob"});
+t.save({name: "aaron"});
-assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "no index count" );
-assert.eq( 4 , t.find( { name : /^e.*/ } ).explain(true).executionStats.totalDocsExamined ,
- "no index explain" );
-//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "no index count ne" ); // SERVER-251
+assert.eq(2, t.find({name: /^e.*/}).count(), "no index count");
+assert.eq(4,
+ t.find({name: /^e.*/}).explain(true).executionStats.totalDocsExamined,
+ "no index explain");
+// assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "no index count ne" ); //
+// SERVER-251
-t.ensureIndex( { name : 1 } );
+t.ensureIndex({name: 1});
-assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "index count" );
-assert.eq( 2 , t.find( { name : /^e.*/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain" ); // SERVER-239
-//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "index count ne" ); // SERVER-251
+assert.eq(2, t.find({name: /^e.*/}).count(), "index count");
+assert.eq(2,
+ t.find({name: /^e.*/}).explain(true).executionStats.totalKeysExamined,
+ "index explain"); // SERVER-239
+// assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "index count ne" ); // SERVER-251
diff --git a/jstests/core/regex5.js b/jstests/core/regex5.js
index 5e3b7d0a6d3..36274f8b3ca 100644
--- a/jstests/core/regex5.js
+++ b/jstests/core/regex5.js
@@ -8,34 +8,34 @@ for (var i = 0; i < 10; i++) {
t.save({filler: "filler"});
}
-t.save( { x : [ "abc" , "xyz1" ] } );
-t.save( { x : [ "ac" , "xyz2" ] } );
+t.save({x: ["abc", "xyz1"]});
+t.save({x: ["ac", "xyz2"]});
a = /.*b.*c/;
x = /.*y.*/;
doit = function() {
-
- assert.eq( 1 , t.find( { x : a } ).count() , "A" );
- assert.eq( 2 , t.find( { x : x } ).count() , "B" );
- assert.eq( 2 , t.find( { x : { $in: [ x ] } } ).count() , "C" ); // SERVER-322
- assert.eq( 1 , t.find( { x : { $in: [ a, "xyz1" ] } } ).count() , "D" ); // SERVER-322
- assert.eq( 2 , t.find( { x : { $in: [ a, "xyz2" ] } } ).count() , "E" ); // SERVER-322
- assert.eq( 1 , t.find( { x : { $all : [ a , x ] } } ).count() , "F" ); // SERVER-505
- assert.eq( 1 , t.find( { x : { $all : [ a , "abc" ] } } ).count() , "G" ); // SERVER-505
- assert.eq( 0 , t.find( { x : { $all : [ a , "ac" ] } } ).count() , "H" ); // SERVER-505
- assert.eq( 10 , t.find( { x : { $nin: [ x ] } } ).count() , "I" ); // SERVER-322
- assert.eq( 11 , t.find( { x : { $nin: [ a, "xyz1" ] } } ).count() , "J" ); // SERVER-322
- assert.eq( 10 , t.find( { x : { $nin: [ a, "xyz2" ] } } ).count() , "K" ); // SERVER-322
- assert.eq( 2 , t.find( { x : { $not: { $nin: [ x ] } } } ).count() , "L" ); // SERVER-322
- assert.eq( 11 , t.find( { x : { $nin: [ /^a.c/ ] } } ).count() , "M" ); // SERVER-322
+
+ assert.eq(1, t.find({x: a}).count(), "A");
+ assert.eq(2, t.find({x: x}).count(), "B");
+ assert.eq(2, t.find({x: {$in: [x]}}).count(), "C"); // SERVER-322
+ assert.eq(1, t.find({x: {$in: [a, "xyz1"]}}).count(), "D"); // SERVER-322
+ assert.eq(2, t.find({x: {$in: [a, "xyz2"]}}).count(), "E"); // SERVER-322
+ assert.eq(1, t.find({x: {$all: [a, x]}}).count(), "F"); // SERVER-505
+ assert.eq(1, t.find({x: {$all: [a, "abc"]}}).count(), "G"); // SERVER-505
+ assert.eq(0, t.find({x: {$all: [a, "ac"]}}).count(), "H"); // SERVER-505
+ assert.eq(10, t.find({x: {$nin: [x]}}).count(), "I"); // SERVER-322
+ assert.eq(11, t.find({x: {$nin: [a, "xyz1"]}}).count(), "J"); // SERVER-322
+ assert.eq(10, t.find({x: {$nin: [a, "xyz2"]}}).count(), "K"); // SERVER-322
+ assert.eq(2, t.find({x: {$not: {$nin: [x]}}}).count(), "L"); // SERVER-322
+ assert.eq(11, t.find({x: {$nin: [/^a.c/]}}).count(), "M"); // SERVER-322
};
doit();
-t.ensureIndex( {x:1} );
-print( "now indexed" );
+t.ensureIndex({x: 1});
+print("now indexed");
doit();
// SERVER-505
-assert.eq( 0, t.find( { x : { $all: [ "a", /^a/ ] } } ).itcount());
-assert.eq( 2, t.find( { x : { $all: [ /^a/ ] } } ).itcount());
+assert.eq(0, t.find({x: {$all: ["a", /^a/]}}).itcount());
+assert.eq(2, t.find({x: {$all: [/^a/]}}).itcount());
diff --git a/jstests/core/regex6.js b/jstests/core/regex6.js
index 4380ab1ab6b..7b9ed1910ed 100644
--- a/jstests/core/regex6.js
+++ b/jstests/core/regex6.js
@@ -2,40 +2,54 @@
t = db.regex6;
t.drop();
-t.save( { name : "eliot" } );
-t.save( { name : "emily" } );
-t.save( { name : "bob" } );
-t.save( { name : "aaron" } );
-t.save( { name : "[with]some?symbols" } );
+t.save({name: "eliot"});
+t.save({name: "emily"});
+t.save({name: "bob"});
+t.save({name: "aaron"});
+t.save({name: "[with]some?symbols"});
-t.ensureIndex( { name : 1 } );
+t.ensureIndex({name: 1});
-assert.eq( 0 , t.find( { name : /^\// } ).count() , "index count" );
-assert.eq( 1 , t.find( { name : /^\// } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 1" );
-assert.eq( 0 , t.find( { name : /^é/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 2" );
-assert.eq( 0 , t.find( { name : /^\é/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 3" );
-assert.eq( 1 , t.find( { name : /^\./ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 4" );
-assert.eq( 5 , t.find( { name : /^./ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 5" );
+assert.eq(0, t.find({name: /^\//}).count(), "index count");
+assert.eq(1,
+ t.find({name: /^\//}).explain(true).executionStats.totalKeysExamined,
+ "index explain 1");
+assert.eq(0,
+ t.find({name: /^é/}).explain(true).executionStats.totalKeysExamined,
+ "index explain 2");
+assert.eq(0,
+ t.find({name: /^\é/}).explain(true).executionStats.totalKeysExamined,
+ "index explain 3");
+assert.eq(1,
+ t.find({name: /^\./}).explain(true).executionStats.totalKeysExamined,
+ "index explain 4");
+assert.eq(5,
+ t.find({name: /^./}).explain(true).executionStats.totalKeysExamined,
+ "index explain 5");
// SERVER-2862
-assert.eq( 0 , t.find( { name : /^\Qblah\E/ } ).count() , "index explain 6" );
-assert.eq( 1 , t.find( { name : /^\Qblah\E/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 6" );
-assert.eq( 1 , t.find( { name : /^blah/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 6" );
-assert.eq( 1 , t.find( { name : /^\Q[\Ewi\Qth]some?s\Eym/ } ).count() , "index count 2" );
-assert.eq( 2 , t.find( { name : /^\Q[\Ewi\Qth]some?s\Eym/ } ).explain(true)
- .executionStats.totalKeysExamined ,
- "index explain 6" );
-assert.eq( 2 , t.find( { name : /^bob/ } ).explain(true).executionStats.totalKeysExamined ,
- "index explain 6" ); // proof executionStats.totalKeysExamined == count+1
+assert.eq(0, t.find({name: /^\Qblah\E/}).count(), "index explain 6");
+assert.eq(1,
+ t.find({name: /^\Qblah\E/}).explain(true).executionStats.totalKeysExamined,
+ "index explain 6");
+assert.eq(1,
+ t.find({name: /^blah/}).explain(true).executionStats.totalKeysExamined,
+ "index explain 6");
+assert.eq(1, t.find({name: /^\Q[\Ewi\Qth]some?s\Eym/}).count(), "index count 2");
+assert.eq(2,
+ t.find({name: /^\Q[\Ewi\Qth]some?s\Eym/}).explain(true).executionStats.totalKeysExamined,
+ "index explain 6");
+assert.eq(2,
+ t.find({name: /^bob/}).explain(true).executionStats.totalKeysExamined,
+ "index explain 6"); // proof executionStats.totalKeysExamined == count+1
-assert.eq( 1, t.find( { name : { $regex : "^e", $gte: "emily" } } ).explain(true)
- .executionStats.totalKeysExamined , "ie7" );
-assert.eq( 1, t.find( { name : { $gt : "a", $regex: "^emily" } } ).explain(true)
- .executionStats.totalKeysExamined , "ie7" );
+assert.eq(1,
+ t.find({name: {$regex: "^e", $gte: "emily"}})
+ .explain(true)
+ .executionStats.totalKeysExamined,
+ "ie7");
+assert.eq(1,
+ t.find({name: {$gt: "a", $regex: "^emily"}})
+ .explain(true)
+ .executionStats.totalKeysExamined,
+ "ie7");
diff --git a/jstests/core/regex7.js b/jstests/core/regex7.js
index 8f32ed603ad..90949ec3723 100644
--- a/jstests/core/regex7.js
+++ b/jstests/core/regex7.js
@@ -1,26 +1,26 @@
t = db.regex_matches_self;
t.drop();
-t.insert({r:/^a/});
-t.insert({r:/^a/i});
-t.insert({r:/^b/});
+t.insert({r: /^a/});
+t.insert({r: /^a/i});
+t.insert({r: /^b/});
// no index
-assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 a');
-assert.eq( 1, t.count({r:/^a/}), '1 2');
-assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 a');
-assert.eq( 1, t.count({r:/^a/i}), '2 2 a');
-assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 a');
-assert.eq( 1, t.count({r:/^b/}), '3 2 a');
+assert.eq(/^a/, t.findOne({r: /^a/}).r, '1 1 a');
+assert.eq(1, t.count({r: /^a/}), '1 2');
+assert.eq(/^a/i, t.findOne({r: /^a/i}).r, '2 1 a');
+assert.eq(1, t.count({r: /^a/i}), '2 2 a');
+assert.eq(/^b/, t.findOne({r: /^b/}).r, '3 1 a');
+assert.eq(1, t.count({r: /^b/}), '3 2 a');
// with index
-t.ensureIndex({r:1});
-assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 b');
-assert.eq( 1, t.count({r:/^a/}), '1 2 b');
-assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 b');
-assert.eq( 1, t.count({r:/^a/i}), '2 2 b');
-assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 b');
-assert.eq( 1, t.count({r:/^b/}), '3 2 b');
+t.ensureIndex({r: 1});
+assert.eq(/^a/, t.findOne({r: /^a/}).r, '1 1 b');
+assert.eq(1, t.count({r: /^a/}), '1 2 b');
+assert.eq(/^a/i, t.findOne({r: /^a/i}).r, '2 1 b');
+assert.eq(1, t.count({r: /^a/i}), '2 2 b');
+assert.eq(/^b/, t.findOne({r: /^b/}).r, '3 1 b');
+assert.eq(1, t.count({r: /^b/}), '3 2 b');
-t.insert( {r:"a"} );
-assert.eq( 2, t.count({r:/^a/}), 'c' ); \ No newline at end of file
+t.insert({r: "a"});
+assert.eq(2, t.count({r: /^a/}), 'c'); \ No newline at end of file
diff --git a/jstests/core/regex8.js b/jstests/core/regex8.js
index 3bb598831a7..3b731c11edd 100644
--- a/jstests/core/regex8.js
+++ b/jstests/core/regex8.js
@@ -2,18 +2,18 @@
t = db.regex8;
t.drop();
-t.insert( { _id : 1 , a : "abc" } );
-t.insert( { _ud : 2 , a : "abc" } );
-t.insert( { _id : 3 , a : "bdc" } );
+t.insert({_id: 1, a: "abc"});
+t.insert({_ud: 2, a: "abc"});
+t.insert({_id: 3, a: "bdc"});
-function test( msg ){
- assert.eq( 3 , t.find().itcount() , msg + "1" );
- assert.eq( 2 , t.find( { a : /a.*/ } ).itcount() , msg + "2" );
- assert.eq( 3 , t.find( { a : /[ab].*/ } ).itcount() , msg + "3" );
- assert.eq( 3 , t.find( { a : /[a|b].*/ } ).itcount() , msg + "4" );
+function test(msg) {
+ assert.eq(3, t.find().itcount(), msg + "1");
+ assert.eq(2, t.find({a: /a.*/}).itcount(), msg + "2");
+ assert.eq(3, t.find({a: /[ab].*/}).itcount(), msg + "3");
+ assert.eq(3, t.find({a: /[a|b].*/}).itcount(), msg + "4");
}
-test( "A" );
+test("A");
-t.ensureIndex( { a : 1 } );
-test( "B" );
+t.ensureIndex({a: 1});
+test("B");
diff --git a/jstests/core/regex9.js b/jstests/core/regex9.js
index e12310ee848..96188d689dc 100644
--- a/jstests/core/regex9.js
+++ b/jstests/core/regex9.js
@@ -2,10 +2,10 @@
t = db.regex9;
t.drop();
-t.insert( { _id : 1 , a : [ "a" , "b" , "c" ] } );
-t.insert( { _id : 2 , a : [ "a" , "b" , "c" , "d" ] } );
-t.insert( { _id : 3 , a : [ "b" , "c" , "d" ] } );
+t.insert({_id: 1, a: ["a", "b", "c"]});
+t.insert({_id: 2, a: ["a", "b", "c", "d"]});
+t.insert({_id: 3, a: ["b", "c", "d"]});
-assert.eq( 2 , t.find( { a : /a/ } ).itcount() , "A1" );
-assert.eq( 2 , t.find( { a : { $regex : "a" } } ).itcount() , "A2" );
-assert.eq( 2 , t.find( { a : { $regex : /a/ } } ).itcount() , "A3" );
+assert.eq(2, t.find({a: /a/}).itcount(), "A1");
+assert.eq(2, t.find({a: {$regex: "a"}}).itcount(), "A2");
+assert.eq(2, t.find({a: {$regex: /a/}}).itcount(), "A3");
diff --git a/jstests/core/regex_embed1.js b/jstests/core/regex_embed1.js
index 7e08ca66e77..6a47a7919ed 100644
--- a/jstests/core/regex_embed1.js
+++ b/jstests/core/regex_embed1.js
@@ -3,23 +3,20 @@ t = db.regex_embed1;
t.drop();
-t.insert( { _id : 1 , a : [ { x : "abc" } , { x : "def" } ] } );
-t.insert( { _id : 2 , a : [ { x : "ab" } , { x : "de" } ] } );
-t.insert( { _id : 3 , a : [ { x : "ab" } , { x : "de" } , { x : "abc" } ] } );
+t.insert({_id: 1, a: [{x: "abc"}, {x: "def"}]});
+t.insert({_id: 2, a: [{x: "ab"}, {x: "de"}]});
+t.insert({_id: 3, a: [{x: "ab"}, {x: "de"}, {x: "abc"}]});
-function test( m ){
- assert.eq( 3 , t.find().itcount() , m + "1" );
- assert.eq( 2 , t.find( { "a.x" : "abc" } ).itcount() , m + "2" );
- assert.eq( 2 , t.find( { "a.x" : /.*abc.*/ } ).itcount() , m + "3" );
-
- assert.eq( 1 , t.find( { "a.0.x" : "abc" } ).itcount() , m + "4" );
- assert.eq( 1 , t.find( { "a.0.x" : /abc/ } ).itcount() , m + "5" );
-}
-
-test( "A" );
-
-t.ensureIndex( { "a.x" : 1 } );
-test( "B" );
+function test(m) {
+ assert.eq(3, t.find().itcount(), m + "1");
+ assert.eq(2, t.find({"a.x": "abc"}).itcount(), m + "2");
+ assert.eq(2, t.find({"a.x": /.*abc.*/}).itcount(), m + "3");
+ assert.eq(1, t.find({"a.0.x": "abc"}).itcount(), m + "4");
+ assert.eq(1, t.find({"a.0.x": /abc/}).itcount(), m + "5");
+}
+test("A");
+t.ensureIndex({"a.x": 1});
+test("B");
diff --git a/jstests/core/regex_limit.js b/jstests/core/regex_limit.js
index e05dae8ab8b..0a8b3e08593 100644
--- a/jstests/core/regex_limit.js
+++ b/jstests/core/regex_limit.js
@@ -1,22 +1,21 @@
var t = db.regex_limit;
t.drop();
-var repeatStr = function(str, n){
- return new Array(n + 1).join(str);
+var repeatStr = function(str, n) {
+ return new Array(n + 1).join(str);
};
-t.insert({ z: repeatStr('c', 100000) });
+t.insert({z: repeatStr('c', 100000)});
var maxOkStrLen = repeatStr('c', 32764);
var strTooLong = maxOkStrLen + 'c';
-assert(t.findOne({ z: { $regex: maxOkStrLen }}) != null);
+assert(t.findOne({z: {$regex: maxOkStrLen}}) != null);
assert.throws(function() {
- t.findOne({ z: { $regex: strTooLong }});
+ t.findOne({z: {$regex: strTooLong}});
});
-assert(t.findOne({ z: { $in: [ new RegExp(maxOkStrLen) ]}}) != null);
+assert(t.findOne({z: {$in: [new RegExp(maxOkStrLen)]}}) != null);
assert.throws(function() {
- t.findOne({ z: { $in: [ new RegExp(strTooLong) ]}});
+ t.findOne({z: {$in: [new RegExp(strTooLong)]}});
});
-
diff --git a/jstests/core/regex_not_id.js b/jstests/core/regex_not_id.js
index b5d0f1b01a6..1f15250f240 100644
--- a/jstests/core/regex_not_id.js
+++ b/jstests/core/regex_not_id.js
@@ -3,10 +3,10 @@
var testColl = db.regex_not_id;
testColl.drop();
-assert.writeOK(testColl.insert({ _id: "ABCDEF1" }, {writeConcern:{w:1}}));
+assert.writeOK(testColl.insert({_id: "ABCDEF1"}, {writeConcern: {w: 1}}));
// Should be an error.
-assert.writeError(testColl.insert({ _id: /^A/ }, {writeConcern:{w:1}}));
+assert.writeError(testColl.insert({_id: /^A/}, {writeConcern: {w: 1}}));
// _id doesn't have to be first; still disallowed
-assert.writeError(testColl.insert({ xxx: "ABCDEF", _id: /ABCDEF/ }, {writeConcern:{w:1}})); \ No newline at end of file
+assert.writeError(testColl.insert({xxx: "ABCDEF", _id: /ABCDEF/}, {writeConcern: {w: 1}})); \ No newline at end of file
diff --git a/jstests/core/regex_options.js b/jstests/core/regex_options.js
index 3febe2575ab..f661e4812a8 100644
--- a/jstests/core/regex_options.js
+++ b/jstests/core/regex_options.js
@@ -1,7 +1,7 @@
t = db.jstests_regex_options;
t.drop();
-t.save( { a: "foo" } );
-assert.eq( 1, t.count( { a: { "$regex": /O/i } } ) );
-assert.eq( 1, t.count( { a: /O/i } ) );
-assert.eq( 1, t.count( { a: { "$regex": "O", "$options": "i" } } ) );
+t.save({a: "foo"});
+assert.eq(1, t.count({a: {"$regex": /O/i}}));
+assert.eq(1, t.count({a: /O/i}));
+assert.eq(1, t.count({a: {"$regex": "O", "$options": "i"}}));
diff --git a/jstests/core/regex_util.js b/jstests/core/regex_util.js
index 86ba8036516..b0c7791b6c1 100644
--- a/jstests/core/regex_util.js
+++ b/jstests/core/regex_util.js
@@ -1,27 +1,26 @@
// Tests for RegExp.escape
(function() {
- var TEST_STRINGS = [
- "[db]",
- "{ab}",
- "<c2>",
- "(abc)",
- "^first^",
- "&addr",
- "k@10gen.com",
- "#4",
- "!b",
- "<>3",
- "****word+",
- "\t| |\n\r",
- "Mongo-db",
- "[{(<>)}]!@#%^&*+\\"
- ];
+ var TEST_STRINGS = [
+ "[db]",
+ "{ab}",
+ "<c2>",
+ "(abc)",
+ "^first^",
+ "&addr",
+ "k@10gen.com",
+ "#4",
+ "!b",
+ "<>3",
+ "****word+",
+ "\t| |\n\r",
+ "Mongo-db",
+ "[{(<>)}]!@#%^&*+\\"
+ ];
- TEST_STRINGS.forEach(function (str) {
- var escaped = RegExp.escape(str);
- var regex = new RegExp(escaped);
- assert(regex.test(str), "Wrong escape for " + str);
- });
+ TEST_STRINGS.forEach(function(str) {
+ var escaped = RegExp.escape(str);
+ var regex = new RegExp(escaped);
+ assert(regex.test(str), "Wrong escape for " + str);
+ });
})();
-
diff --git a/jstests/core/regexa.js b/jstests/core/regexa.js
index b0d47190e77..b56e2cf405e 100644
--- a/jstests/core/regexa.js
+++ b/jstests/core/regexa.js
@@ -4,16 +4,16 @@ t = db.jstests_regexa;
t.drop();
function check() {
- assert.eq( 1, t.count( {a:/^(z|.)/} ) );
- assert.eq( 1, t.count( {a:/^z|./} ) );
- assert.eq( 0, t.count( {a:/^z(z|.)/} ) );
- assert.eq( 1, t.count( {a:/^zz|./} ) );
+ assert.eq(1, t.count({a: /^(z|.)/}));
+ assert.eq(1, t.count({a: /^z|./}));
+ assert.eq(0, t.count({a: /^z(z|.)/}));
+ assert.eq(1, t.count({a: /^zz|./}));
}
-t.save( {a:'a'} );
+t.save({a: 'a'});
check();
-t.ensureIndex( {a:1} );
-if ( 1 ) { // SERVER-3298
-check();
+t.ensureIndex({a: 1});
+if (1) { // SERVER-3298
+ check();
}
diff --git a/jstests/core/regexb.js b/jstests/core/regexb.js
index 169841239c8..09e3518728b 100644
--- a/jstests/core/regexb.js
+++ b/jstests/core/regexb.js
@@ -3,12 +3,7 @@
t = db.jstests_regexb;
t.drop();
-t.save( {a:'a',b:'b',c:'c',d:'d',e:'e'} );
-
-assert.eq( 1, t.count( {a:/a/,b:/b/,c:/c/,d:/d/,e:/e/} ) );
-assert.eq( 0, t.count( {a:/a/,b:/b/,c:/c/,d:/d/,e:/barf/} ) );
-
-
-
-
+t.save({a: 'a', b: 'b', c: 'c', d: 'd', e: 'e'});
+assert.eq(1, t.count({a: /a/, b: /b/, c: /c/, d: /d/, e: /e/}));
+assert.eq(0, t.count({a: /a/, b: /b/, c: /c/, d: /d/, e: /barf/}));
diff --git a/jstests/core/regexc.js b/jstests/core/regexc.js
index f7690c96496..43ad7fd860c 100644
--- a/jstests/core/regexc.js
+++ b/jstests/core/regexc.js
@@ -8,7 +8,7 @@ t.ensureIndex({a: 1});
t.save({a: "0"});
t.save({a: "1"});
t.save({a: "10"});
-assert.eq( 1, t.find({$and: [{a: /0/}, {a: /1/}]}).itcount() );
+assert.eq(1, t.find({$and: [{a: /0/}, {a: /1/}]}).itcount());
// implicit $and using compound index twice
t.drop();
@@ -16,7 +16,7 @@ t.ensureIndex({a: 1, b: 1});
t.save({a: "0", b: "1"});
t.save({a: "10", b: "10"});
t.save({a: "10", b: "2"});
-assert.eq( 2, t.find({a: /0/, b: /1/}).itcount() );
+assert.eq(2, t.find({a: /0/, b: /1/}).itcount());
// $or using same index twice
t.drop();
@@ -25,4 +25,4 @@ t.save({a: "0"});
t.save({a: "1"});
t.save({a: "2"});
t.save({a: "10"});
-assert.eq( 3, t.find({$or: [{a: /0/}, {a: /1/}]}).itcount() );
+assert.eq(3, t.find({$or: [{a: /0/}, {a: /1/}]}).itcount());
diff --git a/jstests/core/remove.js b/jstests/core/remove.js
index 9a71767c3c7..c4afd07c99c 100644
--- a/jstests/core/remove.js
+++ b/jstests/core/remove.js
@@ -3,17 +3,18 @@
t = db.removetest;
-function f(n,dir) {
- t.ensureIndex({x:dir||1});
- for( i = 0; i < n; i++ ) t.save( { x:3, z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+function f(n, dir) {
+ t.ensureIndex({x: dir || 1});
+ for (i = 0; i < n; i++)
+ t.save({x: 3, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"});
- assert.eq( n , t.find().count() );
- t.remove({x:3});
+ assert.eq(n, t.find().count());
+ t.remove({x: 3});
- assert.eq( 0 , t.find().count() );
-
- assert( t.findOne() == null , "A:" + tojson( t.findOne() ) );
- assert( t.validate().valid , "B" );
+ assert.eq(0, t.find().count());
+
+ assert(t.findOne() == null, "A:" + tojson(t.findOne()));
+ assert(t.validate().valid, "B");
}
t.drop();
@@ -21,7 +22,9 @@ f(300, 1);
f(500, -1);
-assert(t.validate().valid , "C" );
+assert(t.validate().valid, "C");
// no query for remove() throws starting in 2.6
-assert.throws(function() { db.t.remove(); });
+assert.throws(function() {
+ db.t.remove();
+});
diff --git a/jstests/core/remove2.js b/jstests/core/remove2.js
index 6605d83e269..81d377c3dfa 100644
--- a/jstests/core/remove2.js
+++ b/jstests/core/remove2.js
@@ -4,33 +4,36 @@
t = db.removetest2;
function f() {
- t.save( { x:[3,3,3,3,3,3,3,3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
- t.save( { x: 9 } );
- t.save( { x: 1 } );
+ t.save({
+ x: [3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 6],
+ z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ });
+ t.save({x: 9});
+ t.save({x: 1});
- t.remove({x:3});
+ t.remove({x: 3});
- assert( t.findOne({x:3}) == null );
- assert( t.validate().valid );
+ assert(t.findOne({x: 3}) == null);
+ assert(t.validate().valid);
}
x = 0;
function g() {
- t.save( { x:[3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
- t.save( { x:[7,8,9], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+ t.save({x: [3, 4, 5, 6], z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"});
+ t.save({x: [7, 8, 9], z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"});
var res;
- res = t.remove( {x : {$gte:3}, $atomic:x++ } );
+ res = t.remove({x: {$gte: 3}, $atomic: x++});
- assert.writeOK( res );
+ assert.writeOK(res);
// $atomic within $and is not allowed.
- //res = t.remove( {x : {$gte:3}, $and:[{$atomic:true}] } );
- //assert.writeError( res );
+ // res = t.remove( {x : {$gte:3}, $and:[{$atomic:true}] } );
+ // assert.writeError( res );
- assert( t.findOne({x:3}) == null );
- assert( t.findOne({x:8}) == null );
- assert( t.validate().valid );
+ assert(t.findOne({x: 3}) == null);
+ assert(t.findOne({x: 8}) == null);
+ assert(t.validate().valid);
}
t.drop();
@@ -38,10 +41,9 @@ f();
t.drop();
g();
-t.ensureIndex({x:1});
+t.ensureIndex({x: 1});
t.remove({});
f();
t.drop();
-t.ensureIndex({x:1});
+t.ensureIndex({x: 1});
g();
-
diff --git a/jstests/core/remove3.js b/jstests/core/remove3.js
index 2a51a6e0fd4..75e95860cb1 100644
--- a/jstests/core/remove3.js
+++ b/jstests/core/remove3.js
@@ -2,17 +2,17 @@
t = db.remove3;
t.drop();
-for ( i=1; i<=8; i++){
- t.save( { _id : i , x : i } );
+for (i = 1; i <= 8; i++) {
+ t.save({_id: i, x: i});
}
-assert.eq( 8 , t.count() , "A" );
+assert.eq(8, t.count(), "A");
-t.remove( { x : { $lt : 5 } } );
-assert.eq( 4 , t.count() , "B" );
+t.remove({x: {$lt: 5}});
+assert.eq(4, t.count(), "B");
-t.remove( { _id : 5 } );
-assert.eq( 3 , t.count() , "C" );
+t.remove({_id: 5});
+assert.eq(3, t.count(), "C");
-t.remove( { _id : { $lt : 8 } } );
-assert.eq( 1 , t.count() , "D" );
+t.remove({_id: {$lt: 8}});
+assert.eq(1, t.count(), "D");
diff --git a/jstests/core/remove4.js b/jstests/core/remove4.js
index bd007ed4d27..dde28517031 100644
--- a/jstests/core/remove4.js
+++ b/jstests/core/remove4.js
@@ -1,10 +1,10 @@
t = db.remove4;
t.drop();
-t.save ( { a : 1 , b : 1 } );
-t.save ( { a : 2 , b : 1 } );
-t.save ( { a : 3 , b : 1 } );
+t.save({a: 1, b: 1});
+t.save({a: 2, b: 1});
+t.save({a: 3, b: 1});
-assert.eq( 3 , t.find().length() );
-t.remove( { b : 1 } );
-assert.eq( 0 , t.find().length() );
+assert.eq(3, t.find().length());
+t.remove({b: 1});
+assert.eq(0, t.find().length());
diff --git a/jstests/core/remove6.js b/jstests/core/remove6.js
index 83f5a5e6733..61cc39b6e3e 100644
--- a/jstests/core/remove6.js
+++ b/jstests/core/remove6.js
@@ -4,37 +4,36 @@ t.drop();
N = 1000;
-function pop(){
+function pop() {
t.drop();
var arr = [];
- for ( var i=0; i<N; i++ ){
- arr.push( { x : 1 , tags : [ "a" , "b" , "c" ] } );
+ for (var i = 0; i < N; i++) {
+ arr.push({x: 1, tags: ["a", "b", "c"]});
}
- t.insert( arr );
- assert.eq( t.count(), N );
+ t.insert(arr);
+ assert.eq(t.count(), N);
}
-function del(){
- return t.remove( { tags : { $in : [ "a" , "c" ] } } );
+function del() {
+ return t.remove({tags: {$in: ["a", "c"]}});
}
-function test( n , idx ){
+function test(n, idx) {
pop();
- assert.eq( N , t.count() , n + " A " + idx );
- if ( idx )
- t.ensureIndex( idx );
+ assert.eq(N, t.count(), n + " A " + idx);
+ if (idx)
+ t.ensureIndex(idx);
var res = del();
- assert( !res.hasWriteError() , "error deleting: " + res.toString() );
- assert.eq( 0 , t.count() , n + " B " + idx );
+ assert(!res.hasWriteError(), "error deleting: " + res.toString());
+ assert.eq(0, t.count(), n + " B " + idx);
}
-test( "a" );
-test( "b" , { x : 1 } );
-test( "c" , { tags : 1 } );
+test("a");
+test("b", {x: 1});
+test("c", {tags: 1});
N = 5000;
-test( "a2" );
-test( "b2" , { x : 1 } );
-test( "c2" , { tags : 1 } );
-
+test("a2");
+test("b2", {x: 1});
+test("c2", {tags: 1});
diff --git a/jstests/core/remove7.js b/jstests/core/remove7.js
index f50419b3bd0..ef5500fa1fa 100644
--- a/jstests/core/remove7.js
+++ b/jstests/core/remove7.js
@@ -2,33 +2,32 @@
t = db.remove7;
t.drop();
-
-
-function getTags( n ){
+function getTags(n) {
n = n || 5;
var a = [];
- for ( var i=0; i<n; i++ ){
- var v = Math.ceil( 20 * Math.random() );
- a.push( v );
+ for (var i = 0; i < n; i++) {
+ var v = Math.ceil(20 * Math.random());
+ a.push(v);
}
return a;
}
-for ( i=0; i<1000; i++ ){
- t.save( { tags : getTags() } );
+for (i = 0; i < 1000; i++) {
+ t.save({tags: getTags()});
}
-t.ensureIndex( { tags : 1 } );
-
-for ( i=0; i<200; i++ ){
- for ( var j=0; j<10; j++ )
- t.save( { tags : getTags( 100 ) } );
- var q = { tags : { $in : getTags( 10 ) } };
- var before = t.find( q ).count();
- var res = t.remove( q );
- var after = t.find( q ).count();
- assert.eq( 0 , after , "not zero after!" );
- assert.writeOK( res );
+t.ensureIndex({tags: 1});
+
+for (i = 0; i < 200; i++) {
+ for (var j = 0; j < 10; j++)
+ t.save({tags: getTags(100)});
+ var q = {
+ tags: {$in: getTags(10)}
+ };
+ var before = t.find(q).count();
+ var res = t.remove(q);
+ var after = t.find(q).count();
+ assert.eq(0, after, "not zero after!");
+ assert.writeOK(res);
}
-
diff --git a/jstests/core/remove8.js b/jstests/core/remove8.js
index 32d8270e9c0..563e4708cf9 100644
--- a/jstests/core/remove8.js
+++ b/jstests/core/remove8.js
@@ -4,18 +4,20 @@ t.drop();
N = 1000;
-function fill(){
- for ( var i=0; i<N; i++ ){
- t.save( { x : i } );
+function fill() {
+ for (var i = 0; i < N; i++) {
+ t.save({x: i});
}
}
fill();
-assert.eq( N , t.count() , "A" );
-t.remove( {} );
-assert.eq( 0 , t.count() , "B" );
+assert.eq(N, t.count(), "A");
+t.remove({});
+assert.eq(0, t.count(), "B");
fill();
-assert.eq( N , t.count() , "C" );
-db.eval( function(){ db.remove8.remove( {} ); } );
-assert.eq( 0 , t.count() , "D" );
+assert.eq(N, t.count(), "C");
+db.eval(function() {
+ db.remove8.remove({});
+});
+assert.eq(0, t.count(), "D");
diff --git a/jstests/core/remove9.js b/jstests/core/remove9.js
index 85e9002d108..1c82d7b4fce 100644
--- a/jstests/core/remove9.js
+++ b/jstests/core/remove9.js
@@ -2,15 +2,16 @@
t = db.jstests_remove9;
t.drop();
-t.ensureIndex( {i:1} );
-for( i = 0; i < 1000; ++i ) {
- t.save( {i:i} );
+t.ensureIndex({i: 1});
+for (i = 0; i < 1000; ++i) {
+ t.save({i: i});
}
-s = startParallelShell( 't = db.jstests_remove9; Random.setRandomSeed(); for( j = 0; j < 5000; ++j ) { i = Random.randInt( 499 ) * 2; t.update( {i:i}, {$set:{i:2000}} ); t.remove( {i:2000} ); t.save( {i:i} ); }' );
+s = startParallelShell(
+ 't = db.jstests_remove9; Random.setRandomSeed(); for( j = 0; j < 5000; ++j ) { i = Random.randInt( 499 ) * 2; t.update( {i:i}, {$set:{i:2000}} ); t.remove( {i:2000} ); t.save( {i:i} ); }');
-for( i = 0; i < 1000; ++i ) {
- assert.eq( 500, t.find( {i:{$gte:0,$mod:[2,1]}} ).hint( {i:1} ).itcount() );
+for (i = 0; i < 1000; ++i) {
+ assert.eq(500, t.find({i: {$gte: 0, $mod: [2, 1]}}).hint({i: 1}).itcount());
}
s();
diff --git a/jstests/core/remove_justone.js b/jstests/core/remove_justone.js
index 19bfd6d1a03..04813149dfd 100644
--- a/jstests/core/remove_justone.js
+++ b/jstests/core/remove_justone.js
@@ -2,15 +2,15 @@
t = db.remove_justone;
t.drop();
-t.insert( { x : 1 } );
-t.insert( { x : 1 } );
-t.insert( { x : 1 } );
-t.insert( { x : 1 } );
+t.insert({x: 1});
+t.insert({x: 1});
+t.insert({x: 1});
+t.insert({x: 1});
-assert.eq( 4 , t.count() );
+assert.eq(4, t.count());
-t.remove( { x : 1 } , true );
-assert.eq( 3 , t.count() );
+t.remove({x: 1}, true);
+assert.eq(3, t.count());
-t.remove( { x : 1 } );
-assert.eq( 0 , t.count() );
+t.remove({x: 1});
+assert.eq(0, t.count());
diff --git a/jstests/core/remove_undefined.js b/jstests/core/remove_undefined.js
index 51df72c6bbf..0505e494f55 100644
--- a/jstests/core/remove_undefined.js
+++ b/jstests/core/remove_undefined.js
@@ -1,28 +1,35 @@
t = db.drop_undefined.js;
-t.insert( { _id : 1 } );
-t.insert( { _id : 2 } );
-t.insert( { _id : null } );
-
-z = { foo : 1 , x : null };
-
-t.remove( { x : z.bar } );
-assert.eq( 3 , t.count() , "A1" );
-
-t.remove( { x : undefined } );
-assert.eq( 3 , t.count() , "A2" );
-
-assert.throws( function(){ t.remove( { _id : z.bar } ); } , null , "B1" );
-assert.throws( function(){ t.remove( { _id : undefined } ); } , null , "B2" );
-
-
-t.remove( { _id : z.x } );
-assert.eq( 2 , t.count() , "C1" );
-
-t.insert( { _id : null } );
-assert.eq( 3 , t.count() , "C2" );
-
-assert.throws( function(){ t.remove( { _id : undefined } ); } , null, "C3" );
-assert.eq( 3 , t.count() , "C4" );
-
+t.insert({_id: 1});
+t.insert({_id: 2});
+t.insert({_id: null});
+
+z = {
+ foo: 1,
+ x: null
+};
+
+t.remove({x: z.bar});
+assert.eq(3, t.count(), "A1");
+
+t.remove({x: undefined});
+assert.eq(3, t.count(), "A2");
+
+assert.throws(function() {
+ t.remove({_id: z.bar});
+}, null, "B1");
+assert.throws(function() {
+ t.remove({_id: undefined});
+}, null, "B2");
+
+t.remove({_id: z.x});
+assert.eq(2, t.count(), "C1");
+
+t.insert({_id: null});
+assert.eq(3, t.count(), "C2");
+
+assert.throws(function() {
+ t.remove({_id: undefined});
+}, null, "C3");
+assert.eq(3, t.count(), "C4");
diff --git a/jstests/core/removea.js b/jstests/core/removea.js
index 40ee0e6a186..9b51080910b 100644
--- a/jstests/core/removea.js
+++ b/jstests/core/removea.js
@@ -5,27 +5,27 @@ t = db.jstests_removea;
Random.setRandomSeed();
-for( v = 0; v < 2; ++v ) { // Try each index version.
+for (v = 0; v < 2; ++v) { // Try each index version.
t.drop();
- t.ensureIndex( { a:1 }, { v:v } );
+ t.ensureIndex({a: 1}, {v: v});
S = 100;
B = 100;
- for ( var x = 0; x < S; x++ ) {
+ for (var x = 0; x < S; x++) {
var batch = [];
- for ( var y = 0; y < B; y++ ) {
- var i = y + ( B * x );
- batch.push( { a : i } );
+ for (var y = 0; y < B; y++) {
+ var i = y + (B * x);
+ batch.push({a: i});
}
- t.insert( batch );
+ t.insert(batch);
}
- assert.eq( t.count(), S * B );
+ assert.eq(t.count(), S * B);
toDrop = [];
- for( i = 0; i < S * B ; ++i ) {
- toDrop.push( Random.randInt( 10000 ) ); // Dups in the query will be ignored.
+ for (i = 0; i < S * B; ++i) {
+ toDrop.push(Random.randInt(10000)); // Dups in the query will be ignored.
}
// Remove many of the documents; $atomic prevents use of a ClientCursor, which would invoke a
// different bucket deallocation procedure than the one to be tested (see SERVER-4575).
- var res = t.remove( { a:{ $in:toDrop }, $atomic:true } );
- assert.writeOK( res );
+ var res = t.remove({a: {$in: toDrop}, $atomic: true});
+ assert.writeOK(res);
}
diff --git a/jstests/core/removeb.js b/jstests/core/removeb.js
index 1e6658bd7a9..2141e138254 100644
--- a/jstests/core/removeb.js
+++ b/jstests/core/removeb.js
@@ -3,34 +3,33 @@
t = db.jstests_removeb;
t.drop();
-t.ensureIndex( { a:1 } );
+t.ensureIndex({a: 1});
// Make the index multikey to trigger cursor dedup checking.
-t.insert( { a:[ -1, -2 ] } );
+t.insert({a: [-1, -2]});
t.remove({});
// Insert some data.
-for( i = 0; i < 20000; ++i ) {
- t.insert( { a:i } );
+for (i = 0; i < 20000; ++i) {
+ t.insert({a: i});
}
p = startParallelShell(
- // Wait until the remove operation (below) begins running.
- 'while( db.jstests_removeb.count() == 20000 );' +
- // Insert documents with increasing 'a' values. These inserted documents may
- // reuse Records freed by the remove operation in progress and will be
- // visited by the remove operation if it has not completed.
- 'for( i = 20000; i < 40000; ++i ) {' +
- ' db.jstests_removeb.insert( { a:i } );' +
- ' if (i % 1000 == 0) {' +
- ' print( i-20000 + \" of 20000 documents inserted\" );' +
- ' }' +
- '}'
- );
+ // Wait until the remove operation (below) begins running.
+ 'while( db.jstests_removeb.count() == 20000 );' +
+ // Insert documents with increasing 'a' values. These inserted documents may
+ // reuse Records freed by the remove operation in progress and will be
+ // visited by the remove operation if it has not completed.
+ 'for( i = 20000; i < 40000; ++i ) {' +
+ ' db.jstests_removeb.insert( { a:i } );' +
+ ' if (i % 1000 == 0) {' +
+ ' print( i-20000 + \" of 20000 documents inserted\" );' +
+ ' }' +
+ '}');
// Remove using the a:1 index in ascending direction.
-var res = t.remove( { a:{ $gte:0 } } );
-assert( !res.hasWriteError(), 'The remove operation failed.' );
+var res = t.remove({a: {$gte: 0}});
+assert(!res.hasWriteError(), 'The remove operation failed.');
p();
diff --git a/jstests/core/removec.js b/jstests/core/removec.js
index b062399bdb5..f2c4e29e4fc 100644
--- a/jstests/core/removec.js
+++ b/jstests/core/removec.js
@@ -2,37 +2,35 @@
t = db.jstests_removec;
t.drop();
-t.ensureIndex( { a:1 } );
+t.ensureIndex({a: 1});
/** @return an array containing a sequence of numbers from i to i + 10. */
-function runStartingWith( i ) {
+function runStartingWith(i) {
ret = [];
- for( j = 0; j < 11; ++j ) {
- ret.push( i + j );
+ for (j = 0; j < 11; ++j) {
+ ret.push(i + j);
}
return ret;
}
// Insert some documents with adjacent index keys.
-for( i = 0; i < 1100; i += 11 ) {
- t.save( { a:runStartingWith( i ) } );
+for (i = 0; i < 1100; i += 11) {
+ t.save({a: runStartingWith(i)});
}
// Remove and then reinsert random documents in the background.
-s = startParallelShell(
- 't = db.jstests_removec;' +
+s = startParallelShell('t = db.jstests_removec;' +
'Random.setRandomSeed();' +
'for( j = 0; j < 1000; ++j ) {' +
' o = t.findOne( { a:Random.randInt( 1100 ) } );' +
' t.remove( { _id:o._id } );' +
' t.insert( o );' +
- '}'
- );
+ '}');
// Find operations are error free. Note that the cursor throws if it detects the $err
// field in the returned document.
-for( i = 0; i < 200; ++i ) {
- t.find( { a:{ $gte:0 } } ).hint( { a:1 } ).itcount();
+for (i = 0; i < 200; ++i) {
+ t.find({a: {$gte: 0}}).hint({a: 1}).itcount();
}
s();
diff --git a/jstests/core/rename.js b/jstests/core/rename.js
index b2695d95e0f..3287159f850 100644
--- a/jstests/core/rename.js
+++ b/jstests/core/rename.js
@@ -1,4 +1,4 @@
-admin = db.getMongo().getDB( "admin" );
+admin = db.getMongo().getDB("admin");
a = db.jstests_rename_a;
b = db.jstests_rename_b;
@@ -8,22 +8,24 @@ a.drop();
b.drop();
c.drop();
-a.save( {a: 1} );
-a.save( {a: 2} );
-a.ensureIndex( {a:1} );
-a.ensureIndex( {b:1} );
+a.save({a: 1});
+a.save({a: 2});
+a.ensureIndex({a: 1});
+a.ensureIndex({b: 1});
-c.save( {a: 100} );
-assert.commandFailed( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_c"} ) );
+c.save({a: 100});
+assert.commandFailed(
+ admin.runCommand({renameCollection: "test.jstests_rename_a", to: "test.jstests_rename_c"}));
-assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
-assert.eq( 0, a.find().count() );
+assert.commandWorked(
+ admin.runCommand({renameCollection: "test.jstests_rename_a", to: "test.jstests_rename_b"}));
+assert.eq(0, a.find().count());
-assert.eq( 2, b.find().count() );
-assert( db.getCollectionNames().indexOf( "jstests_rename_b" ) >= 0 );
-assert( db.getCollectionNames().indexOf( "jstests_rename_a" ) < 0 );
-assert.eq( 3, db.jstests_rename_b.getIndexes().length );
-assert.eq( 0, db.jstests_rename_a.getIndexes().length );
+assert.eq(2, b.find().count());
+assert(db.getCollectionNames().indexOf("jstests_rename_b") >= 0);
+assert(db.getCollectionNames().indexOf("jstests_rename_a") < 0);
+assert.eq(3, db.jstests_rename_b.getIndexes().length);
+assert.eq(0, db.jstests_rename_a.getIndexes().length);
// now try renaming a capped collection
@@ -33,25 +35,26 @@ c.drop();
// TODO: too many numbers hard coded here
// this test depends precisely on record size and hence may not be very reliable
-// note we use floats to make sure numbers are represented as doubles for SpiderMonkey, since test relies on record size
-db.createCollection( "jstests_rename_a", {capped:true,size:10000} );
-for( i = 0.1; i < 10; ++i ) {
- a.save( { i: i } );
+// note we use floats to make sure numbers are represented as doubles for SpiderMonkey, since test
+// relies on record size
+db.createCollection("jstests_rename_a", {capped: true, size: 10000});
+for (i = 0.1; i < 10; ++i) {
+ a.save({i: i});
}
-assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
-assert.eq( 1, b.count( {i:9.1} ) );
-printjson( b.stats() );
-for( i = 10.1; i < 1000; ++i ) {
- b.save( { i: i } );
+assert.commandWorked(
+ admin.runCommand({renameCollection: "test.jstests_rename_a", to: "test.jstests_rename_b"}));
+assert.eq(1, b.count({i: 9.1}));
+printjson(b.stats());
+for (i = 10.1; i < 1000; ++i) {
+ b.save({i: i});
}
-printjson( b.stats() );
-//res = b.find().sort({i:1});
-//while (res.hasNext()) printjson(res.next());
+printjson(b.stats());
+// res = b.find().sort({i:1});
+// while (res.hasNext()) printjson(res.next());
-assert.eq( 1, b.count( {i:i-1} ) ); // make sure last is there
-assert.eq( 0, b.count( {i:9.1} ) ); // make sure early one is gone
+assert.eq(1, b.count({i: i - 1})); // make sure last is there
+assert.eq(0, b.count({i: 9.1})); // make sure early one is gone
-
-assert( db.getCollectionNames().indexOf( "jstests_rename_b" ) >= 0 );
-assert( db.getCollectionNames().indexOf( "jstests_rename_a" ) < 0 );
-assert( db.jstests_rename_b.stats().capped );
+assert(db.getCollectionNames().indexOf("jstests_rename_b") >= 0);
+assert(db.getCollectionNames().indexOf("jstests_rename_a") < 0);
+assert(db.jstests_rename_b.stats().capped);
diff --git a/jstests/core/rename2.js b/jstests/core/rename2.js
index c913bcbe66e..efbc943ce4f 100644
--- a/jstests/core/rename2.js
+++ b/jstests/core/rename2.js
@@ -6,14 +6,14 @@ b = db.rename2b;
a.drop();
b.drop();
-a.save( { x : 1 } );
-a.save( { x : 2 } );
-a.save( { x : 3 } );
+a.save({x: 1});
+a.save({x: 2});
+a.save({x: 3});
-assert.eq( 3 , a.count() , "A" );
-assert.eq( 0 , b.count() , "B" );
+assert.eq(3, a.count(), "A");
+assert.eq(0, b.count(), "B");
-assert( a.renameCollection( "rename2b" ) , "the command" );
+assert(a.renameCollection("rename2b"), "the command");
-assert.eq( 0 , a.count() , "C" );
-assert.eq( 3 , b.count() , "D" );
+assert.eq(0, a.count(), "C");
+assert.eq(3, b.count(), "D");
diff --git a/jstests/core/rename3.js b/jstests/core/rename3.js
index 31a91772a75..aa1bd986b47 100644
--- a/jstests/core/rename3.js
+++ b/jstests/core/rename3.js
@@ -6,20 +6,20 @@ b = db.rename3b;
a.drop();
b.drop();
-a.save( { x : 1 } );
-b.save( { x : 2 } );
+a.save({x: 1});
+b.save({x: 2});
-assert.eq( 1 , a.findOne().x , "before 1a" );
-assert.eq( 2 , b.findOne().x , "before 2a" );
+assert.eq(1, a.findOne().x, "before 1a");
+assert.eq(2, b.findOne().x, "before 2a");
-res = b.renameCollection( a._shortName );
-assert.eq( 0 , res.ok , "should fail: " + tojson( res ) );
+res = b.renameCollection(a._shortName);
+assert.eq(0, res.ok, "should fail: " + tojson(res));
-assert.eq( 1 , a.findOne().x , "before 1b" );
-assert.eq( 2 , b.findOne().x , "before 2b" );
+assert.eq(1, a.findOne().x, "before 1b");
+assert.eq(2, b.findOne().x, "before 2b");
-res = b.renameCollection( a._shortName , true );
-assert.eq( 1 , res.ok , "should succeed:" + tojson( res ) );
+res = b.renameCollection(a._shortName, true);
+assert.eq(1, res.ok, "should succeed:" + tojson(res));
-assert.eq( 2 , a.findOne().x , "after 1" );
-assert.isnull( b.findOne() , "after 2" );
+assert.eq(2, a.findOne().x, "after 1");
+assert.isnull(b.findOne(), "after 2");
diff --git a/jstests/core/rename4.js b/jstests/core/rename4.js
index 92a26ef6ac0..904709175f9 100644
--- a/jstests/core/rename4.js
+++ b/jstests/core/rename4.js
@@ -1,113 +1,114 @@
t = db.jstests_rename4;
t.drop();
-function bad( f ) {
+function bad(f) {
var docsBeforeUpdate = t.find().toArray();
- var res = eval( f );
+ var res = eval(f);
- //Ensure error
+ // Ensure error
if (!res.hasWriteError()) {
print("Error:" + res.toString());
print("Existing docs (before)");
printjson(docsBeforeUpdate);
print("Existing docs (after)");
printjson(t.find().toArray());
- assert( false, "Expected error but didn't get one for: " + f );
+ assert(false, "Expected error but didn't get one for: " + f);
}
}
-bad( "t.update( {}, {$rename:{'a':'a'}} )" );
-bad( "t.update( {}, {$rename:{'':'a'}} )" );
-bad( "t.update( {}, {$rename:{'a':''}} )" );
-bad( "t.update( {}, {$rename:{'.a':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'.b'}} )" );
-bad( "t.update( {}, {$rename:{'a.':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b.'}} )" );
-bad( "t.update( {}, {$rename:{'a.b':'a'}} )" );
-bad( "t.update( {}, {$rename:{'a.$':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b.$'}} )" );
+bad("t.update( {}, {$rename:{'a':'a'}} )");
+bad("t.update( {}, {$rename:{'':'a'}} )");
+bad("t.update( {}, {$rename:{'a':''}} )");
+bad("t.update( {}, {$rename:{'.a':'b'}} )");
+bad("t.update( {}, {$rename:{'a':'.b'}} )");
+bad("t.update( {}, {$rename:{'a.':'b'}} )");
+bad("t.update( {}, {$rename:{'a':'b.'}} )");
+bad("t.update( {}, {$rename:{'a.b':'a'}} )");
+bad("t.update( {}, {$rename:{'a.$':'b'}} )");
+bad("t.update( {}, {$rename:{'a':'b.$'}} )");
// Only bad if input doc has field resulting in conflict
-t.save( {_id:1, a:2} );
-bad( "t.update( {}, {$rename:{'_id':'a'}} )" );
-bad( "t.update( {}, {$set:{b:1},$rename:{'a':'b'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b'},$set:{b:1}} )" );
-bad( "t.update( {}, {$rename:{'a':'b'},$set:{a:1}} )" );
-bad( "t.update( {}, {$set:{'b.c':1},$rename:{'a':'b'}} )" );
-bad( "t.update( {}, {$set:{b:1},$rename:{'a':'b.c'}} )" );
-bad( "t.update( {}, {$rename:{'a':'b'},$set:{'b.c':1}} )" );
-bad( "t.update( {}, {$rename:{'a':'b.c'},$set:{b:1}} )" );
-
+t.save({_id: 1, a: 2});
+bad("t.update( {}, {$rename:{'_id':'a'}} )");
+bad("t.update( {}, {$set:{b:1},$rename:{'a':'b'}} )");
+bad("t.update( {}, {$rename:{'a':'b'},$set:{b:1}} )");
+bad("t.update( {}, {$rename:{'a':'b'},$set:{a:1}} )");
+bad("t.update( {}, {$set:{'b.c':1},$rename:{'a':'b'}} )");
+bad("t.update( {}, {$set:{b:1},$rename:{'a':'b.c'}} )");
+bad("t.update( {}, {$rename:{'a':'b'},$set:{'b.c':1}} )");
+bad("t.update( {}, {$rename:{'a':'b.c'},$set:{b:1}} )");
t.remove({});
-t.save( {a:[1],b:{c:[2]},d:[{e:3}],f:4} );
-bad( "t.update( {}, {$rename:{'a.0':'f'}} )" );
-bad( "t.update( {}, {$rename:{'a.0':'g'}} )" );
-bad( "t.update( {}, {$rename:{'f':'a.0'}} )" );
-bad( "t.update( {}, {$rename:{'b.c.0':'f'}} )" );
-bad( "t.update( {}, {$rename:{'f':'b.c.0'}} )" );
-bad( "t.update( {}, {$rename:{'d.e':'d.f'}} )" );
-bad( "t.update( {}, {$rename:{'d.e':'f'}} )" );
-bad( "t.update( {}, {$rename:{'d.f':'d.e'}} )" );
-bad( "t.update( {}, {$rename:{'f':'d.e'}} )" );
-bad( "t.update( {}, {$rename:{'d.0.e':'d.f'}} )" );
-bad( "t.update( {}, {$rename:{'d.0.e':'f'}} )" );
-bad( "t.update( {}, {$rename:{'d.f':'d.0.e'}} )" );
-bad( "t.update( {}, {$rename:{'f':'d.0.e'}} )" );
-bad( "t.update( {}, {$rename:{'f.g':'a'}} )" );
-bad( "t.update( {}, {$rename:{'a':'f.g'}} )" );
+t.save({a: [1], b: {c: [2]}, d: [{e: 3}], f: 4});
+bad("t.update( {}, {$rename:{'a.0':'f'}} )");
+bad("t.update( {}, {$rename:{'a.0':'g'}} )");
+bad("t.update( {}, {$rename:{'f':'a.0'}} )");
+bad("t.update( {}, {$rename:{'b.c.0':'f'}} )");
+bad("t.update( {}, {$rename:{'f':'b.c.0'}} )");
+bad("t.update( {}, {$rename:{'d.e':'d.f'}} )");
+bad("t.update( {}, {$rename:{'d.e':'f'}} )");
+bad("t.update( {}, {$rename:{'d.f':'d.e'}} )");
+bad("t.update( {}, {$rename:{'f':'d.e'}} )");
+bad("t.update( {}, {$rename:{'d.0.e':'d.f'}} )");
+bad("t.update( {}, {$rename:{'d.0.e':'f'}} )");
+bad("t.update( {}, {$rename:{'d.f':'d.0.e'}} )");
+bad("t.update( {}, {$rename:{'f':'d.0.e'}} )");
+bad("t.update( {}, {$rename:{'f.g':'a'}} )");
+bad("t.update( {}, {$rename:{'a':'f.g'}} )");
-function good( start, mod, expected ) {
+function good(start, mod, expected) {
t.remove({});
- t.save( start );
- var res = t.update( {}, mod );
- assert.writeOK( res );
+ t.save(start);
+ var res = t.update({}, mod);
+ assert.writeOK(res);
var got = t.findOne();
delete got._id;
- assert.docEq( expected, got );
+ assert.docEq(expected, got);
}
-good( {a:1}, {$rename:{a:'b'}}, {b:1} );
-good( {a:1}, {$rename:{a:'bb'}}, {bb:1} );
-good( {b:1}, {$rename:{b:'a'}}, {a:1} );
-good( {bb:1}, {$rename:{bb:'a'}}, {a:1} );
-good( {a:{y:1}}, {$rename:{'a.y':'a.z'}}, {a:{z:1}} );
-good( {a:{yy:1}}, {$rename:{'a.yy':'a.z'}}, {a:{z:1}} );
-good( {a:{z:1}}, {$rename:{'a.z':'a.y'}}, {a:{y:1}} );
-good( {a:{zz:1}}, {$rename:{'a.zz':'a.y'}}, {a:{y:1}} );
-good( {a:{c:1}}, {$rename:{a:'b'}}, {b:{c:1}} );
-good( {aa:{c:1}}, {$rename:{aa:'b'}}, {b:{c:1}} );
-good( {a:1,b:2}, {$rename:{a:'b'}}, {b:1} );
-good( {aa:1,b:2}, {$rename:{aa:'b'}}, {b:1} );
-good( {a:1,bb:2}, {$rename:{a:'bb'}}, {bb:1} );
-good( {a:1}, {$rename:{a:'b.c'}}, {b:{c:1}} );
-good( {aa:1}, {$rename:{aa:'b.c'}}, {b:{c:1}} );
-good( {a:1,b:{}}, {$rename:{a:'b.c'}}, {b:{c:1}} );
-good( {aa:1,b:{}}, {$rename:{aa:'b.c'}}, {b:{c:1}} );
-good( {a:1}, {$rename:{b:'c'}}, {a:1} );
-good( {aa:1}, {$rename:{b:'c'}}, {aa:1} );
-good( {}, {$rename:{b:'c'}}, {} );
-good( {a:{b:1,c:2}}, {$rename:{'a.b':'d'}}, {a:{c:2},d:1} );
-good( {a:{bb:1,c:2}}, {$rename:{'a.bb':'d'}}, {a:{c:2},d:1} );
-good( {a:{b:1}}, {$rename:{'a.b':'d'}}, {a:{},d:1} );
-good( {a:[5]}, {$rename:{a:'b'}}, {b:[5]} );
-good( {aa:[5]}, {$rename:{aa:'b'}}, {b:[5]} );
-good( {'0':1}, {$rename:{'0':'5'}}, {'5':1} );
-good( {a:1,b:2}, {$rename:{a:'c'},$set:{b:5}}, {b:5,c:1} );
-good( {aa:1,b:2}, {$rename:{aa:'c'},$set:{b:5}}, {b:5,c:1} );
-good( {a:1,b:2}, {$rename:{z:'c'},$set:{b:5}}, {a:1,b:5} );
-good( {aa:1,b:2}, {$rename:{z:'c'},$set:{b:5}}, {aa:1,b:5} );
+good({a: 1}, {$rename: {a: 'b'}}, {b: 1});
+good({a: 1}, {$rename: {a: 'bb'}}, {bb: 1});
+good({b: 1}, {$rename: {b: 'a'}}, {a: 1});
+good({bb: 1}, {$rename: {bb: 'a'}}, {a: 1});
+good({a: {y: 1}}, {$rename: {'a.y': 'a.z'}}, {a: {z: 1}});
+good({a: {yy: 1}}, {$rename: {'a.yy': 'a.z'}}, {a: {z: 1}});
+good({a: {z: 1}}, {$rename: {'a.z': 'a.y'}}, {a: {y: 1}});
+good({a: {zz: 1}}, {$rename: {'a.zz': 'a.y'}}, {a: {y: 1}});
+good({a: {c: 1}}, {$rename: {a: 'b'}}, {b: {c: 1}});
+good({aa: {c: 1}}, {$rename: {aa: 'b'}}, {b: {c: 1}});
+good({a: 1, b: 2}, {$rename: {a: 'b'}}, {b: 1});
+good({aa: 1, b: 2}, {$rename: {aa: 'b'}}, {b: 1});
+good({a: 1, bb: 2}, {$rename: {a: 'bb'}}, {bb: 1});
+good({a: 1}, {$rename: {a: 'b.c'}}, {b: {c: 1}});
+good({aa: 1}, {$rename: {aa: 'b.c'}}, {b: {c: 1}});
+good({a: 1, b: {}}, {$rename: {a: 'b.c'}}, {b: {c: 1}});
+good({aa: 1, b: {}}, {$rename: {aa: 'b.c'}}, {b: {c: 1}});
+good({a: 1}, {$rename: {b: 'c'}}, {a: 1});
+good({aa: 1}, {$rename: {b: 'c'}}, {aa: 1});
+good({}, {$rename: {b: 'c'}}, {});
+good({a: {b: 1, c: 2}}, {$rename: {'a.b': 'd'}}, {a: {c: 2}, d: 1});
+good({a: {bb: 1, c: 2}}, {$rename: {'a.bb': 'd'}}, {a: {c: 2}, d: 1});
+good({a: {b: 1}}, {$rename: {'a.b': 'd'}}, {a: {}, d: 1});
+good({a: [5]}, {$rename: {a: 'b'}}, {b: [5]});
+good({aa: [5]}, {$rename: {aa: 'b'}}, {b: [5]});
+good({'0': 1}, {$rename: {'0': '5'}}, {'5': 1});
+good({a: 1, b: 2}, {$rename: {a: 'c'}, $set: {b: 5}}, {b: 5, c: 1});
+good({aa: 1, b: 2}, {$rename: {aa: 'c'}, $set: {b: 5}}, {b: 5, c: 1});
+good({a: 1, b: 2}, {$rename: {z: 'c'}, $set: {b: 5}}, {a: 1, b: 5});
+good({aa: 1, b: 2}, {$rename: {z: 'c'}, $set: {b: 5}}, {aa: 1, b: 5});
// (formerly) rewriting single field
-good( {a:{z:1,b:1}}, {$rename:{'a.b':'a.c'}}, {a:{c:1,z:1}} );
-good( {a:{z:1,tomato:1}}, {$rename:{'a.tomato':'a.potato'}}, {a:{potato:1,z:1}} );
-good( {a:{z:1,b:1,c:1}}, {$rename:{'a.b':'a.c'}}, {a:{c:1,z:1}} );
-good( {a:{z:1,tomato:1,potato:1}}, {$rename:{'a.tomato':'a.potato'}}, {a:{potato:1,z:1}} );
-good( {a:{z:1,b:1}}, {$rename:{'a.b':'a.cc'}}, {a:{cc:1,z:1}} );
-good( {a:{z:1,b:1,c:1}}, {$rename:{'a.b':'aa.c'}}, {a:{c:1,z:1},aa:{c:1}} );
+good({a: {z: 1, b: 1}}, {$rename: {'a.b': 'a.c'}}, {a: {c: 1, z: 1}});
+good({a: {z: 1, tomato: 1}}, {$rename: {'a.tomato': 'a.potato'}}, {a: {potato: 1, z: 1}});
+good({a: {z: 1, b: 1, c: 1}}, {$rename: {'a.b': 'a.c'}}, {a: {c: 1, z: 1}});
+good({a: {z: 1, tomato: 1, potato: 1}},
+ {$rename: {'a.tomato': 'a.potato'}},
+ {a: {potato: 1, z: 1}});
+good({a: {z: 1, b: 1}}, {$rename: {'a.b': 'a.cc'}}, {a: {cc: 1, z: 1}});
+good({a: {z: 1, b: 1, c: 1}}, {$rename: {'a.b': 'aa.c'}}, {a: {c: 1, z: 1}, aa: {c: 1}});
// invalid target, but missing source
-good( {a:1,c:4}, {$rename:{b:'c.d'}}, {a:1,c:4} );
+good({a: 1, c: 4}, {$rename: {b: 'c.d'}}, {a: 1, c: 4});
// TODO: This should be supported, and it is with the new update framework, but not with the
// old, and we currently don't have a good way to check which mode we are in. When we do have
@@ -119,19 +120,19 @@ good( {a:1,c:4}, {$rename:{b:'c.d'}}, {a:1,c:4} );
// check index
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-function l( start, mod, query, expected ) {
+function l(start, mod, query, expected) {
t.remove({});
- t.save( start );
- var res = t.update( {}, mod );
- assert.writeOK( res );
- var got = t.find( query ).hint( {a:1} ).next();
+ t.save(start);
+ var res = t.update({}, mod);
+ assert.writeOK(res);
+ var got = t.find(query).hint({a: 1}).next();
delete got._id;
- assert.docEq( expected, got );
+ assert.docEq(expected, got);
}
-l( {a:1}, {$rename:{a:'b'}}, {a:null}, {b:1} );
-l( {a:1}, {$rename:{a:'bb'}}, {a:null}, {bb:1} );
-l( {b:1}, {$rename:{b:'a'}}, {a:1}, {a:1} );
-l( {bb:1}, {$rename:{bb:'a'}}, {a:1}, {a:1} );
+l({a: 1}, {$rename: {a: 'b'}}, {a: null}, {b: 1});
+l({a: 1}, {$rename: {a: 'bb'}}, {a: null}, {bb: 1});
+l({b: 1}, {$rename: {b: 'a'}}, {a: 1}, {a: 1});
+l({bb: 1}, {$rename: {bb: 'a'}}, {a: 1}, {a: 1});
diff --git a/jstests/core/rename5.js b/jstests/core/rename5.js
index 927c767b981..313b520ed7d 100644
--- a/jstests/core/rename5.js
+++ b/jstests/core/rename5.js
@@ -3,44 +3,44 @@
t = db.jstests_rename5;
t.drop();
-t.ensureIndex( { a:1 } );
-t.save( { b:1 } );
+t.ensureIndex({a: 1});
+t.save({b: 1});
-t.update( {}, { $rename:{ a:'b' } } );
-assert.eq( 1, t.findOne().b );
+t.update({}, {$rename: {a: 'b'}});
+assert.eq(1, t.findOne().b);
// Test with another modifier.
-t.update( {}, { $rename:{ a:'b' }, $set:{ x:1 } } );
-assert.eq( 1, t.findOne().b );
-assert.eq( 1, t.findOne().x );
+t.update({}, {$rename: {a: 'b'}, $set: {x: 1}});
+assert.eq(1, t.findOne().b);
+assert.eq(1, t.findOne().x);
// Test with an in place modifier.
-t.update( {}, { $rename:{ a:'b' }, $inc:{ x:1 } } );
-assert.eq( 1, t.findOne().b );
-assert.eq( 2, t.findOne().x );
+t.update({}, {$rename: {a: 'b'}, $inc: {x: 1}});
+assert.eq(1, t.findOne().b);
+assert.eq(2, t.findOne().x);
// Check similar cases with upserts.
t.drop();
t.remove({});
-t.update( { b:1 }, { $rename:{ a:'b' } }, true );
-assert.eq( 1, t.findOne().b );
+t.update({b: 1}, {$rename: {a: 'b'}}, true);
+assert.eq(1, t.findOne().b);
t.remove({});
-t.update( { b:1 }, { $rename:{ a:'b' }, $set:{ c:1 } }, true );
-assert.eq( 1, t.findOne().b );
-assert.eq( 1, t.findOne().c );
+t.update({b: 1}, {$rename: {a: 'b'}, $set: {c: 1}}, true);
+assert.eq(1, t.findOne().b);
+assert.eq(1, t.findOne().c);
t.remove({});
-t.update( { b:1, c:2 }, { $rename:{ a:'b' }, $inc:{ c:1 } }, true );
-assert.eq( 1, t.findOne().b );
-assert.eq( 3, t.findOne().c );
+t.update({b: 1, c: 2}, {$rename: {a: 'b'}, $inc: {c: 1}}, true);
+assert.eq(1, t.findOne().b);
+assert.eq(3, t.findOne().c);
// Check a similar case with multiple renames of an unindexed document.
t.drop();
-t.save( { b:1, x:1 } );
-t.update( {}, { $rename: { a:'b', x:'y' } } );
-assert.eq( 1, t.findOne().b );
-assert.eq( 1, t.findOne().y );
-assert( !t.findOne().x );
+t.save({b: 1, x: 1});
+t.update({}, {$rename: {a: 'b', x: 'y'}});
+assert.eq(1, t.findOne().b);
+assert.eq(1, t.findOne().y);
+assert(!t.findOne().x);
diff --git a/jstests/core/rename6.js b/jstests/core/rename6.js
index 5e77b4c45a6..159e0e7d1b5 100644
--- a/jstests/core/rename6.js
+++ b/jstests/core/rename6.js
@@ -8,17 +8,19 @@ c = "rename2c";
dbc = testDB.getCollection(c);
d = "dest4567890123456789012345678901234567890123456789012345678901234567890";
dbd = testDB.getCollection(d);
-dbc.ensureIndex({ "name" : 1,
- "date" : 1,
- "time" : 1,
- "renameCollection" : 1,
- "mongodb" : 1,
- "testing" : 1,
- "data" : 1});
-//Checking for the newly created index and the _id index in original collection
+dbc.ensureIndex({
+ "name": 1,
+ "date": 1,
+ "time": 1,
+ "renameCollection": 1,
+ "mongodb": 1,
+ "testing": 1,
+ "data": 1
+});
+// Checking for the newly created index and the _id index in original collection
assert.eq(2, dbc.getIndexes().length, "Long Rename Init");
-//Should fail to rename collection as the index namespace is too long
-assert.commandFailed( dbc.renameCollection( dbd ) , "Long Rename Exec" );
-//Since we failed we should have the 2 indexes unmoved and no indexes under the new collection name
+// Should fail to rename collection as the index namespace is too long
+assert.commandFailed(dbc.renameCollection(dbd), "Long Rename Exec");
+// Since we failed we should have the 2 indexes unmoved and no indexes under the new collection name
assert.eq(2, dbc.getIndexes().length, "Long Rename Result 1");
assert.eq(0, dbd.getIndexes().length, "Long Rename Result 2");
diff --git a/jstests/core/rename7.js b/jstests/core/rename7.js
index 4b9258fd671..85b48c64ce7 100644
--- a/jstests/core/rename7.js
+++ b/jstests/core/rename7.js
@@ -4,9 +4,9 @@
// ***************************************************************
// Set up namespaces a and b.
-var admin = db.getMongo().getDB( "admin" );
-var db_a = db.getMongo().getDB( "db_a" );
-var db_b = db.getMongo().getDB( "db_b" );
+var admin = db.getMongo().getDB("admin");
+var db_a = db.getMongo().getDB("db_a");
+var db_b = db.getMongo().getDB("db_b");
var a = db_a.rename7;
var b = db_b.rename7;
@@ -19,54 +19,54 @@ a.drop();
b.drop();
// Put some documents and indexes in a.
-a.save( {a: 1} );
-a.save( {a: 2} );
-a.save( {a: 3} );
-a.ensureIndex( {a: 1} );
-a.ensureIndex( {b: 1} );
+a.save({a: 1});
+a.save({a: 2});
+a.save({a: 3});
+a.ensureIndex({a: 1});
+a.ensureIndex({b: 1});
-assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7", to: "db_b.rename7"} ) );
+assert.commandWorked(admin.runCommand({renameCollection: "db_a.rename7", to: "db_b.rename7"}));
-assert.eq( 0, a.find().count() );
-assert( db_a.getCollectionNames().indexOf( "rename7" ) < 0 );
+assert.eq(0, a.find().count());
+assert(db_a.getCollectionNames().indexOf("rename7") < 0);
-assert.eq( 3, b.find().count() );
-assert( db_b.getCollectionNames().indexOf( "rename7" ) >= 0 );
+assert.eq(3, b.find().count());
+assert(db_b.getCollectionNames().indexOf("rename7") >= 0);
a.drop();
b.drop();
// Test that the dropTarget option works when renaming across databases.
-a.save( {} );
-b.save( {} );
-assert.commandFailed( admin.runCommand( {renameCollection: "db_a.rename7", to: "db_b.rename7"} ) );
-assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7",
- to: "db_b.rename7", dropTarget: true} ) );
+a.save({});
+b.save({});
+assert.commandFailed(admin.runCommand({renameCollection: "db_a.rename7", to: "db_b.rename7"}));
+assert.commandWorked(
+ admin.runCommand({renameCollection: "db_a.rename7", to: "db_b.rename7", dropTarget: true}));
a.drop();
b.drop();
// Capped collection testing.
-db_a.createCollection( "rename7_capped", {capped:true, size:10000} );
+db_a.createCollection("rename7_capped", {capped: true, size: 10000});
a = db_a.rename7_capped;
b = db_b.rename7_capped;
-a.save( {a: 1} );
-a.save( {a: 2} );
-a.save( {a: 3} );
+a.save({a: 1});
+a.save({a: 2});
+a.save({a: 3});
previousMaxSize = a.stats().maxSize;
-assert.commandWorked( admin.runCommand( {renameCollection: "db_a.rename7_capped",
- to: "db_b.rename7_capped"} ) );
+assert.commandWorked(
+ admin.runCommand({renameCollection: "db_a.rename7_capped", to: "db_b.rename7_capped"}));
-assert.eq( 0, a.find().count() );
-assert( db_a.getCollectionNames().indexOf( "rename7_capped" ) < 0 );
+assert.eq(0, a.find().count());
+assert(db_a.getCollectionNames().indexOf("rename7_capped") < 0);
-assert.eq( 3, b.find().count() );
-assert( db_b.getCollectionNames().indexOf( "rename7_capped" ) >= 0 );
-printjson( db_b.rename7_capped.stats() );
-assert( db_b.rename7_capped.stats().capped );
-assert.eq( previousMaxSize, b.stats().maxSize );
+assert.eq(3, b.find().count());
+assert(db_b.getCollectionNames().indexOf("rename7_capped") >= 0);
+printjson(db_b.rename7_capped.stats());
+assert(db_b.rename7_capped.stats().capped);
+assert.eq(previousMaxSize, b.stats().maxSize);
a.drop();
b.drop();
diff --git a/jstests/core/rename8.js b/jstests/core/rename8.js
index 8b955824ea8..af332e30239 100644
--- a/jstests/core/rename8.js
+++ b/jstests/core/rename8.js
@@ -1,6 +1,7 @@
// SERVER-12591: prevent renaming to arbitrary system collections.
-var testdb = db.getSiblingDB("rename8"); // to avoid breaking other tests when we touch system.users
+var testdb =
+ db.getSiblingDB("rename8"); // to avoid breaking other tests when we touch system.users
var coll = testdb.rename8;
var systemNamespaces = testdb.system.namespaces;
var systemFoo = testdb.system.foo;
@@ -17,7 +18,7 @@ assert.commandFailed(systemFoo.renameCollection(coll.getName()));
// same with system.namespaces, even though it does exist
assert.commandFailed(coll.renameCollection(systemNamespaces.getName()));
-assert.commandFailed(coll.renameCollection(systemNamespaces.getName(), /*dropTarget*/true));
+assert.commandFailed(coll.renameCollection(systemNamespaces.getName(), /*dropTarget*/ true));
assert.commandFailed(systemNamespaces.renameCollection(coll.getName()));
// system.users is whitelisted so these should work
diff --git a/jstests/core/rename_stayTemp.js b/jstests/core/rename_stayTemp.js
index ccada6abf39..d8451af2d2d 100644
--- a/jstests/core/rename_stayTemp.js
+++ b/jstests/core/rename_stayTemp.js
@@ -4,17 +4,19 @@ dest = 'rename_stayTemp_dest';
db[orig].drop();
db[dest].drop();
-function ns(coll){ return db[coll].getFullName(); }
+function ns(coll) {
+ return db[coll].getFullName();
+}
-function istemp( name ) {
- var result = db.runCommand( "listCollections", { filter : { name : name } } );
- assert( result.ok );
- var collections = new DBCommandCursor( db.getMongo(), result ).toArray();
- assert.eq( 1, collections.length );
+function istemp(name) {
+ var result = db.runCommand("listCollections", {filter: {name: name}});
+ assert(result.ok);
+ var collections = new DBCommandCursor(db.getMongo(), result).toArray();
+ assert.eq(1, collections.length);
return collections[0].options.temp ? true : false;
}
-db.runCommand({create: orig, temp:1});
+db.runCommand({create: orig, temp: 1});
assert(istemp(orig));
db.adminCommand({renameCollection: ns(orig), to: ns(dest)});
@@ -22,11 +24,8 @@ assert(!istemp(dest));
db[dest].drop();
-db.runCommand({create: orig, temp:1});
-assert( istemp(orig) );
+db.runCommand({create: orig, temp: 1});
+assert(istemp(orig));
db.adminCommand({renameCollection: ns(orig), to: ns(dest), stayTemp: true});
-assert( istemp(dest) );
-
-
-
+assert(istemp(dest));
diff --git a/jstests/core/repair_database.js b/jstests/core/repair_database.js
index c7ac82320c7..45f936b022a 100644
--- a/jstests/core/repair_database.js
+++ b/jstests/core/repair_database.js
@@ -7,23 +7,26 @@
*/
// 1. Drop db
-var mydb = db.getSisterDB( "repairDB" );
+var mydb = db.getSisterDB("repairDB");
mydb.dropDatabase();
var myColl = mydb.a;
// 2
-var doc = {_id:1, a:"hello world"};
+var doc = {
+ _id: 1,
+ a: "hello world"
+};
myColl.insert(doc);
-myColl.ensureIndex({a:1});
+myColl.ensureIndex({a: 1});
mydb.repairDatabase();
var foundDoc = myColl.findOne();
assert.neq(null, foundDoc);
assert.eq(1, foundDoc._id);
-assert.docEq(doc, myColl.findOne({a:doc.a}));
-assert.docEq(doc, myColl.findOne({_id:1}));
+assert.docEq(doc, myColl.findOne({a: doc.a}));
+assert.docEq(doc, myColl.findOne({_id: 1}));
// 3
var myColl2 = mydb.b;
@@ -35,5 +38,5 @@ var myColl2 = mydb.b;
myColl.insert(doc);
myColl2.insert(doc);
mydb.repairDatabase();
-assert.docEq(doc, myColl.findOne({a:doc.a}));
-assert.docEq(doc, myColl2.findOne({a:doc.a}));
+assert.docEq(doc, myColl.findOne({a: doc.a}));
+assert.docEq(doc, myColl2.findOne({a: doc.a}));
diff --git a/jstests/core/repair_server12955.js b/jstests/core/repair_server12955.js
index 65c9b5f241a..ce0ffa9d11e 100644
--- a/jstests/core/repair_server12955.js
+++ b/jstests/core/repair_server12955.js
@@ -1,9 +1,9 @@
-mydb = db.getSisterDB( "repair_server12955" );
+mydb = db.getSisterDB("repair_server12955");
mydb.dropDatabase();
-mydb.foo.ensureIndex({a:"text"});
-mydb.foo.insert({a:"hello world"});
+mydb.foo.ensureIndex({a: "text"});
+mydb.foo.insert({a: "hello world"});
before = mydb.stats().dataFileVersion;
@@ -11,5 +11,5 @@ mydb.repairDatabase();
after = mydb.stats().dataFileVersion;
-assert.eq( before, after );
+assert.eq(before, after);
mydb.dropDatabase();
diff --git a/jstests/core/return_key.js b/jstests/core/return_key.js
index cf02357759d..b39764846d4 100644
--- a/jstests/core/return_key.js
+++ b/jstests/core/return_key.js
@@ -58,19 +58,25 @@ load("jstests/libs/analyze_plan.js");
assert(isIndexOnly(explain.queryPlanner.winningPlan));
// Unlike other projections, sortKey meta-projection can co-exist with returnKey.
- results = coll.find({}, {c: {$meta: 'sortKey'}})
- .hint({a: 1}).sort({a: -1}).returnKey().toArray();
+ results =
+ coll.find({}, {c: {$meta: 'sortKey'}}).hint({a: 1}).sort({a: -1}).returnKey().toArray();
assert.eq(results, [{a: 3, c: {'': 3}}, {a: 2, c: {'': 2}}, {a: 1, c: {'': 1}}]);
// returnKey with sortKey $meta where there is an in-memory sort.
- results = coll.find({}, {c: {$meta: 'sortKey'}})
- .hint({a: 1}).sort({b: 1}).returnKey().toArray();
+ results =
+ coll.find({}, {c: {$meta: 'sortKey'}}).hint({a: 1}).sort({b: 1}).returnKey().toArray();
assert.eq(results, [{a: 3, c: {'': 1}}, {a: 2, c: {'': 2}}, {a: 1, c: {'': 3}}]);
// returnKey with multiple sortKey $meta projections.
results = coll.find({}, {c: {$meta: 'sortKey'}, d: {$meta: 'sortKey'}})
- .hint({a: 1}).sort({b: 1}).returnKey().toArray();
- assert.eq(results, [{a: 3, c: {'': 1}, d: {'': 1}},
- {a: 2, c: {'': 2}, d: {'': 2}},
- {a: 1, c: {'': 3}, d: {'': 3}}]);
+ .hint({a: 1})
+ .sort({b: 1})
+ .returnKey()
+ .toArray();
+ assert.eq(results,
+ [
+ {a: 3, c: {'': 1}, d: {'': 1}},
+ {a: 2, c: {'': 2}, d: {'': 2}},
+ {a: 1, c: {'': 3}, d: {'': 3}}
+ ]);
})();
diff --git a/jstests/core/role_management_helpers.js b/jstests/core/role_management_helpers.js
index 9c24f552f84..fa25d8a2d57 100644
--- a/jstests/core/role_management_helpers.js
+++ b/jstests/core/role_management_helpers.js
@@ -26,112 +26,126 @@ function assertHasPrivilege(privilegeArray, privilege) {
return;
}
}
- assert(false, "Privilege " + tojson(privilege) + " not found in privilege array: " +
- tojson(privilegeArray));
+ assert(false,
+ "Privilege " + tojson(privilege) + " not found in privilege array: " +
+ tojson(privilegeArray));
}
(function(db) {
- var db = db.getSiblingDB("role_management_helpers");
- db.dropDatabase();
- db.dropAllRoles();
-
- db.createRole({role:'roleA',
- roles: [],
- privileges: [{resource: {db:db.getName(), collection: "foo"},
- actions: ['find']}]});
- db.createRole({role:'roleB', privileges: [], roles: ["roleA"]});
- db.createRole({role:'roleC', privileges: [], roles: []});
-
- // Test getRole
- var roleObj = db.getRole("roleA");
- assert.eq(0, roleObj.roles.length);
- assert.eq(null, roleObj.privileges);
- roleObj = db.getRole("roleA", {showPrivileges: true});
- assert.eq(1, roleObj.privileges.length);
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(1, roleObj.inheritedPrivileges.length); // inherited from roleA
- assertHasPrivilege(roleObj.inheritedPrivileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleA", db.getName());
-
- // Test getRoles
- var roles = db.getRoles();
- assert.eq(3, roles.length);
- printjson(roles);
- assert(roles[0].role == 'roleA' || roles[1].role == 'roleA' || roles[2].role == 'roleA');
- assert(roles[0].role == 'roleB' || roles[1].role == 'roleB' || roles[2].role == 'roleB');
- assert(roles[0].role == 'roleC' || roles[1].role == 'roleC' || roles[2].role == 'roleC');
- assert.eq(null, roles[0].inheritedPrivileges);
- var roles = db.getRoles({showPrivileges: true, showBuiltinRoles: true});
- assert.eq(9, roles.length);
- assert.neq(null, roles[0].inheritedPrivileges);
-
-
- // Granting roles to nonexistent role fails
- assert.throws(function() { db.grantRolesToRole("fakeRole", ['dbAdmin']); });
- // Granting roles to built-in role fails
- assert.throws(function() { db.grantRolesToRole("readWrite", ['dbAdmin']); });
- // Granting non-existant role fails
- assert.throws(function() { db.grantRolesToRole("roleB", ['dbAdmin', 'fakeRole']); });
-
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(1, roleObj.inheritedPrivileges.length);
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleA", db.getName());
-
- // Granting a role you already have is no problem
- db.grantRolesToRole("roleB", ['readWrite', 'roleC']);
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.gt(roleObj.inheritedPrivileges.length, 1); // Got privileges from readWrite role
- assert.eq(3, roleObj.roles.length);
- assertHasRole(roleObj.roles, "readWrite", db.getName());
- assertHasRole(roleObj.roles, "roleA", db.getName());
- assertHasRole(roleObj.roles, "roleC", db.getName());
-
- // Revoking roles the role doesn't have is fine
- db.revokeRolesFromRole("roleB", ['roleA', 'readWrite', 'dbAdmin']);
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(0, roleObj.inheritedPrivileges.length);
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleC", db.getName());
-
- // Privileges on the same resource get collapsed
- db.grantPrivilegesToRole("roleA",
- [{resource: {db:db.getName(), collection:""}, actions:['dropDatabase']},
- {resource: {db:db.getName(), collection:"foo"}, actions:['insert']}]);
- roleObj = db.getRole("roleA", {showPrivileges: true});
- assert.eq(0, roleObj.roles.length);
- assert.eq(2, roleObj.privileges.length);
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find', 'insert']});
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:""}, actions:['dropDatabase']});
-
- // Update role
- db.updateRole("roleA", {roles:['roleB'],
- privileges:[{resource: {db: db.getName(), collection:"foo"},
- actions:['find']}]});
- roleObj = db.getRole("roleA", {showPrivileges: true});
- assert.eq(1, roleObj.roles.length);
- assertHasRole(roleObj.roles, "roleB", db.getName());
- assert.eq(1, roleObj.privileges.length);
- assertHasPrivilege(roleObj.privileges,
- {resource: {db:db.getName(), collection:"foo"}, actions:['find']});
-
- // Test dropRole
- db.dropRole('roleC');
- assert.eq(null, db.getRole('roleC'));
- roleObj = db.getRole("roleB", {showPrivileges: true});
- assert.eq(0, roleObj.privileges.length);
- assert.eq(0, roleObj.roles.length);
-
- // Test dropAllRoles
- db.dropAllRoles();
- assert.eq(null, db.getRole('roleA'));
- assert.eq(null, db.getRole('roleB'));
- assert.eq(null, db.getRole('roleC'));
+ var db = db.getSiblingDB("role_management_helpers");
+ db.dropDatabase();
+ db.dropAllRoles();
+
+ db.createRole({
+ role: 'roleA',
+ roles: [],
+ privileges: [{resource: {db: db.getName(), collection: "foo"}, actions: ['find']}]
+ });
+ db.createRole({role: 'roleB', privileges: [], roles: ["roleA"]});
+ db.createRole({role: 'roleC', privileges: [], roles: []});
+
+ // Test getRole
+ var roleObj = db.getRole("roleA");
+ assert.eq(0, roleObj.roles.length);
+ assert.eq(null, roleObj.privileges);
+ roleObj = db.getRole("roleA", {showPrivileges: true});
+ assert.eq(1, roleObj.privileges.length);
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db: db.getName(), collection: "foo"}, actions: ['find']});
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(1, roleObj.inheritedPrivileges.length); // inherited from roleA
+ assertHasPrivilege(roleObj.inheritedPrivileges,
+ {resource: {db: db.getName(), collection: "foo"}, actions: ['find']});
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleA", db.getName());
+
+ // Test getRoles
+ var roles = db.getRoles();
+ assert.eq(3, roles.length);
+ printjson(roles);
+ assert(roles[0].role == 'roleA' || roles[1].role == 'roleA' || roles[2].role == 'roleA');
+ assert(roles[0].role == 'roleB' || roles[1].role == 'roleB' || roles[2].role == 'roleB');
+ assert(roles[0].role == 'roleC' || roles[1].role == 'roleC' || roles[2].role == 'roleC');
+ assert.eq(null, roles[0].inheritedPrivileges);
+ var roles = db.getRoles({showPrivileges: true, showBuiltinRoles: true});
+ assert.eq(9, roles.length);
+ assert.neq(null, roles[0].inheritedPrivileges);
+
+ // Granting roles to nonexistent role fails
+ assert.throws(function() {
+ db.grantRolesToRole("fakeRole", ['dbAdmin']);
+ });
+ // Granting roles to built-in role fails
+ assert.throws(function() {
+ db.grantRolesToRole("readWrite", ['dbAdmin']);
+ });
+ // Granting non-existant role fails
+ assert.throws(function() {
+ db.grantRolesToRole("roleB", ['dbAdmin', 'fakeRole']);
+ });
+
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(1, roleObj.inheritedPrivileges.length);
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleA", db.getName());
+
+ // Granting a role you already have is no problem
+ db.grantRolesToRole("roleB", ['readWrite', 'roleC']);
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.gt(roleObj.inheritedPrivileges.length, 1); // Got privileges from readWrite role
+ assert.eq(3, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "readWrite", db.getName());
+ assertHasRole(roleObj.roles, "roleA", db.getName());
+ assertHasRole(roleObj.roles, "roleC", db.getName());
+
+ // Revoking roles the role doesn't have is fine
+ db.revokeRolesFromRole("roleB", ['roleA', 'readWrite', 'dbAdmin']);
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(0, roleObj.inheritedPrivileges.length);
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleC", db.getName());
+
+ // Privileges on the same resource get collapsed
+ db.grantPrivilegesToRole(
+ "roleA",
+ [
+ {resource: {db: db.getName(), collection: ""}, actions: ['dropDatabase']},
+ {resource: {db: db.getName(), collection: "foo"}, actions: ['insert']}
+ ]);
+ roleObj = db.getRole("roleA", {showPrivileges: true});
+ assert.eq(0, roleObj.roles.length);
+ assert.eq(2, roleObj.privileges.length);
+ assertHasPrivilege(
+ roleObj.privileges,
+ {resource: {db: db.getName(), collection: "foo"}, actions: ['find', 'insert']});
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db: db.getName(), collection: ""}, actions: ['dropDatabase']});
+
+ // Update role
+ db.updateRole(
+ "roleA",
+ {
+ roles: ['roleB'],
+ privileges: [{resource: {db: db.getName(), collection: "foo"}, actions: ['find']}]
+ });
+ roleObj = db.getRole("roleA", {showPrivileges: true});
+ assert.eq(1, roleObj.roles.length);
+ assertHasRole(roleObj.roles, "roleB", db.getName());
+ assert.eq(1, roleObj.privileges.length);
+ assertHasPrivilege(roleObj.privileges,
+ {resource: {db: db.getName(), collection: "foo"}, actions: ['find']});
+
+ // Test dropRole
+ db.dropRole('roleC');
+ assert.eq(null, db.getRole('roleC'));
+ roleObj = db.getRole("roleB", {showPrivileges: true});
+ assert.eq(0, roleObj.privileges.length);
+ assert.eq(0, roleObj.roles.length);
+
+ // Test dropAllRoles
+ db.dropAllRoles();
+ assert.eq(null, db.getRole('roleA'));
+ assert.eq(null, db.getRole('roleB'));
+ assert.eq(null, db.getRole('roleC'));
}(db));
diff --git a/jstests/core/run_program1.js b/jstests/core/run_program1.js
index 871b53c8ddd..e5f320b0bf4 100644
--- a/jstests/core/run_program1.js
+++ b/jstests/core/run_program1.js
@@ -1,19 +1,19 @@
-if ( ! _isWindows() ) {
-
+if (!_isWindows()) {
// note that normal program exit returns 0
- assert.eq (0, runProgram('true'));
+ assert.eq(0, runProgram('true'));
assert.neq(0, runProgram('false'));
assert.neq(0, runProgram('this_program_doesnt_exit'));
- //verify output visually
+ // verify output visually
runProgram('echo', 'Hello', 'World.', 'How are you?');
- runProgram('bash', '-c', 'echo Hello World. "How are you?"'); // only one space is printed between Hello and World
+ runProgram('bash', '-c', 'echo Hello World. "How are you?"'); // only one space is
+ // printed between Hello
+ // and World
// numbers can be passed as numbers or strings
runProgram('sleep', 0.5);
runProgram('sleep', '0.5');
} else {
-
runProgram('cmd', '/c', 'echo hello windows');
}
diff --git a/jstests/core/server1470.js b/jstests/core/server1470.js
index 3ab39bb3c38..42587961665 100644
--- a/jstests/core/server1470.js
+++ b/jstests/core/server1470.js
@@ -2,19 +2,17 @@
t = db.server1470;
t.drop();
-q = { "name" : "first" , "pic" : { "$ref" : "foo", "$id" : ObjectId("4c48d04cd33a5a92628c9af6") } };
-t.update( q , {$set:{ x : 1 } } , true, true );
+q = {
+ "name": "first",
+ "pic": {"$ref": "foo", "$id": ObjectId("4c48d04cd33a5a92628c9af6")}
+};
+t.update(q, {$set: {x: 1}}, true, true);
ref = t.findOne().pic;
-assert.eq( "object", typeof( ref ) );
-assert.eq( q.pic["$ref"] , ref["$ref"] );
-assert.eq( q.pic["$id"] , ref["$id"] );
+assert.eq("object", typeof(ref));
+assert.eq(q.pic["$ref"], ref["$ref"]);
+assert.eq(q.pic["$id"], ref["$id"]);
// just make we haven't broken other update operators
t.drop();
-t.update( { _id : 1 , x : { $gt : 5 } } , { $set : { y : 1 } } , true );
-assert.eq( { _id : 1 , y : 1 } , t.findOne() );
-
-
-
-
-
+t.update({_id: 1, x: {$gt: 5}}, {$set: {y: 1}}, true);
+assert.eq({_id: 1, y: 1}, t.findOne());
diff --git a/jstests/core/server14753.js b/jstests/core/server14753.js
index 81d865996db..cd6ea309399 100644
--- a/jstests/core/server14753.js
+++ b/jstests/core/server14753.js
@@ -9,7 +9,11 @@
t.drop();
t.ensureIndex({a: 1});
t.ensureIndex({b: 1});
- for (var i = 0; i < 20; i++) { t.insert({b: i}); }
- for (var i = 0; i < 20; i++) { t.find({b: 1}).sort({a: 1}).next(); }
+ for (var i = 0; i < 20; i++) {
+ t.insert({b: i});
+ }
+ for (var i = 0; i < 20; i++) {
+ t.find({b: 1}).sort({a: 1}).next();
+ }
}());
diff --git a/jstests/core/server5346.js b/jstests/core/server5346.js
index f627d0e68bd..18f2f019e5e 100644
--- a/jstests/core/server5346.js
+++ b/jstests/core/server5346.js
@@ -2,14 +2,16 @@
t = db.server5346;
t.drop();
-x = { _id : 1 , versions : {} };
-t.insert( x );
+x = {
+ _id: 1,
+ versions: {}
+};
+t.insert(x);
-t.update({ _id : 1 }, { $inc : { "versions.2_01" : 1 } } );
-t.update({ _id : 1 }, { $inc : { "versions.2_1" : 2 } } );
-t.update({ _id : 1 }, { $inc : { "versions.01" : 3 } } );
-t.update({ _id : 1 }, { $inc : { "versions.1" : 4 } } );
+t.update({_id: 1}, {$inc: {"versions.2_01": 1}});
+t.update({_id: 1}, {$inc: {"versions.2_1": 2}});
+t.update({_id: 1}, {$inc: {"versions.01": 3}});
+t.update({_id: 1}, {$inc: {"versions.1": 4}});
// Make sure the correct fields are set, without duplicates.
-assert.docEq( { "_id" : 1, "versions" : { "01" : 3, "1" : 4, "2_01" : 1, "2_1" : 2 } },
- t.findOne());
+assert.docEq({"_id": 1, "versions": {"01": 3, "1": 4, "2_01": 1, "2_1": 2}}, t.findOne());
diff --git a/jstests/core/server7756.js b/jstests/core/server7756.js
index 5a7177ebcc9..844c3a40d4d 100644
--- a/jstests/core/server7756.js
+++ b/jstests/core/server7756.js
@@ -2,11 +2,10 @@
t = db.server7756;
t.drop();
-t.save( { a:[ { 1:'x' }, 'y' ] } );
+t.save({a: [{1: 'x'}, 'y']});
-assert.eq( 1, t.count( { 'a.1':'x' } ) );
-assert.eq( 1, t.count( { 'a.1':'y' } ) );
-
-assert.eq( 1, t.count( { 'a.1':/x/ } ) );
-assert.eq( 1, t.count( { 'a.1':/y/ } ) );
+assert.eq(1, t.count({'a.1': 'x'}));
+assert.eq(1, t.count({'a.1': 'y'}));
+assert.eq(1, t.count({'a.1': /x/}));
+assert.eq(1, t.count({'a.1': /y/}));
diff --git a/jstests/core/server9385.js b/jstests/core/server9385.js
index ee86891ce2a..5e2a82a7ef2 100644
--- a/jstests/core/server9385.js
+++ b/jstests/core/server9385.js
@@ -2,15 +2,15 @@
t = db.server9385;
t.drop();
-t.insert( { _id : 1, x : 1 } );
+t.insert({_id: 1, x: 1});
x = t.findOne();
x._id = 2;
-t.save( x );
+t.save(x);
-t.find().forEach( printjson );
+t.find().forEach(printjson);
-assert.eq( 2, t.find().count() );
-assert.eq( 2, t.find().itcount() );
+assert.eq(2, t.find().count());
+assert.eq(2, t.find().itcount());
-assert( t.findOne( { _id : 1 } ), "original insert missing" );
-assert( t.findOne( { _id : 2 } ), "save didn't work?" );
+assert(t.findOne({_id: 1}), "original insert missing");
+assert(t.findOne({_id: 2}), "save didn't work?");
diff --git a/jstests/core/server9547.js b/jstests/core/server9547.js
index 67cacfc22a7..9717893cbfb 100644
--- a/jstests/core/server9547.js
+++ b/jstests/core/server9547.js
@@ -4,7 +4,7 @@
var t = db.server9547;
t.drop();
-for (var i=0; i<10; i++) {
+for (var i = 0; i < 10; i++) {
t.save({a: i});
}
diff --git a/jstests/core/set1.js b/jstests/core/set1.js
index d741387af58..33840e3f431 100644
--- a/jstests/core/set1.js
+++ b/jstests/core/set1.js
@@ -2,8 +2,6 @@
t = db.set1;
t.drop();
-t.insert( { _id : 1, emb : {} });
-t.update( { _id : 1 }, { $set : { emb : { 'a.dot' : 'data'} }});
-assert.eq( { _id : 1 , emb : {} } , t.findOne() , "A" );
-
-
+t.insert({_id: 1, emb: {}});
+t.update({_id: 1}, {$set: {emb: {'a.dot': 'data'}}});
+assert.eq({_id: 1, emb: {}}, t.findOne(), "A");
diff --git a/jstests/core/set2.js b/jstests/core/set2.js
index 221ee407759..c5b6e1c9553 100644
--- a/jstests/core/set2.js
+++ b/jstests/core/set2.js
@@ -2,17 +2,16 @@
t = db.set2;
t.drop();
-t.save( { _id : 1 , x : true , y : { x : true } } );
-assert.eq( true , t.findOne().x );
+t.save({_id: 1, x: true, y: {x: true}});
+assert.eq(true, t.findOne().x);
-t.update( { _id : 1 } , { $set : { x : 17 } } );
-assert.eq( 17 , t.findOne().x );
+t.update({_id: 1}, {$set: {x: 17}});
+assert.eq(17, t.findOne().x);
-assert.eq( true , t.findOne().y.x );
-t.update( { _id : 1 } , { $set : { "y.x" : 17 } } );
-assert.eq( 17 , t.findOne().y.x );
-
-t.update( { _id : 1 } , { $set : { a : 2 , b : 3 } } );
-assert.eq( 2 , t.findOne().a );
-assert.eq( 3 , t.findOne().b );
+assert.eq(true, t.findOne().y.x);
+t.update({_id: 1}, {$set: {"y.x": 17}});
+assert.eq(17, t.findOne().y.x);
+t.update({_id: 1}, {$set: {a: 2, b: 3}});
+assert.eq(2, t.findOne().a);
+assert.eq(3, t.findOne().b);
diff --git a/jstests/core/set3.js b/jstests/core/set3.js
index f654ab64889..8f7d78d894f 100644
--- a/jstests/core/set3.js
+++ b/jstests/core/set3.js
@@ -2,10 +2,9 @@
t = db.set3;
t.drop();
-t.insert( { "test1" : { "test2" : { "abcdefghijklmnopqrstu" : {"id":1} } } } );
-t.update( {}, {"$set":{"test1.test2.abcdefghijklmnopqrstuvwxyz":{"id":2}}});
+t.insert({"test1": {"test2": {"abcdefghijklmnopqrstu": {"id": 1}}}});
+t.update({}, {"$set": {"test1.test2.abcdefghijklmnopqrstuvwxyz": {"id": 2}}});
x = t.findOne();
-assert.eq( 1 , x.test1.test2.abcdefghijklmnopqrstu.id , "A" );
-assert.eq( 2 , x.test1.test2.abcdefghijklmnopqrstuvwxyz.id , "B" );
-
+assert.eq(1, x.test1.test2.abcdefghijklmnopqrstu.id, "A");
+assert.eq(2, x.test1.test2.abcdefghijklmnopqrstuvwxyz.id, "B");
diff --git a/jstests/core/set4.js b/jstests/core/set4.js
index d26a241f322..989cf82b223 100644
--- a/jstests/core/set4.js
+++ b/jstests/core/set4.js
@@ -2,14 +2,20 @@
t = db.set4;
t.drop();
-orig = { _id:1 , a : [ { x : 1 } ]};
-t.insert( orig );
+orig = {
+ _id: 1,
+ a: [{x: 1}]
+};
+t.insert(orig);
-t.update( {}, { $set : { 'a.0.x' : 2, 'foo.bar' : 3 } } );
-orig.a[0].x = 2; orig.foo = { bar : 3 };
-assert.eq( orig , t.findOne() , "A" );
-
-t.update( {}, { $set : { 'a.0.x' : 4, 'foo.bar' : 5 } } );
-orig.a[0].x = 4; orig.foo.bar = 5;
-assert.eq( orig , t.findOne() , "B" );
+t.update({}, {$set: {'a.0.x': 2, 'foo.bar': 3}});
+orig.a[0].x = 2;
+orig.foo = {
+ bar: 3
+};
+assert.eq(orig, t.findOne(), "A");
+t.update({}, {$set: {'a.0.x': 4, 'foo.bar': 5}});
+orig.a[0].x = 4;
+orig.foo.bar = 5;
+assert.eq(orig, t.findOne(), "B");
diff --git a/jstests/core/set5.js b/jstests/core/set5.js
index e24c4fdf6a7..dfa28b1e3e8 100644
--- a/jstests/core/set5.js
+++ b/jstests/core/set5.js
@@ -2,16 +2,16 @@
t = db.set5;
t.drop();
-function check( want , err ){
+function check(want, err) {
var x = t.findOne();
delete x._id;
- assert.docEq( want , x , err );
+ assert.docEq(want, x, err);
}
-t.update( { a : 5 } , { $set : { a : 6 , b : null } } , true );
-check( { a : 6 , b : null } , "A" );
+t.update({a: 5}, {$set: {a: 6, b: null}}, true);
+check({a: 6, b: null}, "A");
t.drop();
-t.update( { z : 5 } , { $set : { z : 6 , b : null } } , true );
-check( { b : null , z : 6 } , "B" );
+t.update({z: 5}, {$set: {z: 6, b: null}}, true);
+check({b: null, z: 6}, "B");
diff --git a/jstests/core/set6.js b/jstests/core/set6.js
index 87a8100d232..bf0ece0dbf8 100644
--- a/jstests/core/set6.js
+++ b/jstests/core/set6.js
@@ -2,19 +2,21 @@
t = db.set6;
t.drop();
-x = { _id : 1 , r : new DBRef( "foo" , new ObjectId() ) };
-t.insert( x );
-assert.eq( x , t.findOne() , "A" );
+x = {
+ _id: 1,
+ r: new DBRef("foo", new ObjectId())
+};
+t.insert(x);
+assert.eq(x, t.findOne(), "A");
x.r.$id = new ObjectId();
-t.update({}, { $set : { r : x.r } } );
-assert.eq( x , t.findOne() , "B");
+t.update({}, {$set: {r: x.r}});
+assert.eq(x, t.findOne(), "B");
-x.r2 = new DBRef( "foo2" , 5 );
-t.update( {} , { $set : { "r2" : x.r2 } } );
-assert.eq( x , t.findOne() , "C" );
+x.r2 = new DBRef("foo2", 5);
+t.update({}, {$set: {"r2": x.r2}});
+assert.eq(x, t.findOne(), "C");
x.r.$id = 2;
-t.update( {} , { $set : { "r.$id" : 2 } } );
-assert.eq( x.r.$id , t.findOne().r.$id , "D");
-
+t.update({}, {$set: {"r.$id": 2}});
+assert.eq(x.r.$id, t.findOne().r.$id, "D");
diff --git a/jstests/core/set7.js b/jstests/core/set7.js
index bf9870eb015..8aba33aee94 100644
--- a/jstests/core/set7.js
+++ b/jstests/core/set7.js
@@ -5,64 +5,64 @@ var res;
t.drop();
-t.save( {a:[0,1,2,3]} );
-t.update( {}, {$set:{"a.0":2}} );
-assert.eq( [2,1,2,3], t.findOne().a );
+t.save({a: [0, 1, 2, 3]});
+t.update({}, {$set: {"a.0": 2}});
+assert.eq([2, 1, 2, 3], t.findOne().a);
-t.update( {}, {$set:{"a.4":5}} );
-assert.eq( [2,1,2,3,5], t.findOne().a );
+t.update({}, {$set: {"a.4": 5}});
+assert.eq([2, 1, 2, 3, 5], t.findOne().a);
-t.update( {}, {$set:{"a.9":9}} );
-assert.eq( [2,1,2,3,5,null,null,null,null,9], t.findOne().a );
+t.update({}, {$set: {"a.9": 9}});
+assert.eq([2, 1, 2, 3, 5, null, null, null, null, 9], t.findOne().a);
t.drop();
-t.save( {a:[0,1,2,3]} );
-t.update( {}, {$set:{"a.9":9,"a.7":7}} );
-assert.eq( [0,1,2,3,null,null,null,7,null,9], t.findOne().a );
+t.save({a: [0, 1, 2, 3]});
+t.update({}, {$set: {"a.9": 9, "a.7": 7}});
+assert.eq([0, 1, 2, 3, null, null, null, 7, null, 9], t.findOne().a);
t.drop();
-t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
-t.update( {}, {$set:{"a.11":11} } );
-assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+t.save({a: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]});
+t.update({}, {$set: {"a.11": 11}});
+assert.eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], t.findOne().a);
t.drop();
-t.save( {} );
-t.update( {}, {$set:{"a.0":4}} );
-assert.eq( {"0":4}, t.findOne().a );
+t.save({});
+t.update({}, {$set: {"a.0": 4}});
+assert.eq({"0": 4}, t.findOne().a);
t.drop();
-t.update( {"a.0":4}, {$set:{b:1}}, true );
-assert.eq( {"0":4}, t.findOne().a );
+t.update({"a.0": 4}, {$set: {b: 1}}, true);
+assert.eq({"0": 4}, t.findOne().a);
t.drop();
-t.save( {a:[]} );
-res = t.update( {}, {$set:{"a.f":1}} );
-assert.writeError( res );
-assert.eq( [], t.findOne().a );
+t.save({a: []});
+res = t.update({}, {$set: {"a.f": 1}});
+assert.writeError(res);
+assert.eq([], t.findOne().a);
// Test requiring proper ordering of multiple mods.
t.drop();
-t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
-t.update( {}, {$set:{"a.11":11,"a.2":-2}} );
-assert.eq( [0,1,-2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+t.save({a: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]});
+t.update({}, {$set: {"a.11": 11, "a.2": -2}});
+assert.eq([0, 1, -2, 3, 4, 5, 6, 7, 8, 9, 10, 11], t.findOne().a);
// Test upsert case
t.drop();
-t.update( {a:[0,1,2,3,4,5,6,7,8,9,10]}, {$set:{"a.11":11} }, true );
-assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+t.update({a: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}, {$set: {"a.11": 11}}, true);
+assert.eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], t.findOne().a);
// SERVER-3750
t.drop();
-t.save( {a:[]} );
-res = t.update( {}, {$set:{"a.1500000":1}} ); // current limit
-assert.writeOK( res );
+t.save({a: []});
+res = t.update({}, {$set: {"a.1500000": 1}}); // current limit
+assert.writeOK(res);
t.drop();
-t.save( {a:[]} );
-res = t.update( {}, {$set:{"a.1500001":1}} ); // 1 over limit
-assert.writeError( res );
+t.save({a: []});
+res = t.update({}, {$set: {"a.1500001": 1}}); // 1 over limit
+assert.writeError(res);
t.drop();
-t.save( {a:[]} );
-res = t.update( {}, {$set:{"a.1000000000":1}} ); // way over limit
-assert.writeError( res );
+t.save({a: []});
+res = t.update({}, {$set: {"a.1000000000": 1}}); // way over limit
+assert.writeError(res);
diff --git a/jstests/core/set_param1.js b/jstests/core/set_param1.js
index 3b31d2d8caa..2df37442518 100644
--- a/jstests/core/set_param1.js
+++ b/jstests/core/set_param1.js
@@ -1,14 +1,14 @@
// Tests for accessing logLevel server parameter using getParameter/setParameter commands
// and shell helpers.
-old = db.adminCommand( { "getParameter" : "*" } );
-tmp1 = db.adminCommand( { "setParameter" : 1 , "logLevel" : 5 } );
-tmp2 = db.adminCommand( { "setParameter" : 1 , "logLevel" : old.logLevel } );
-now = db.adminCommand( { "getParameter" : "*" } );
+old = db.adminCommand({"getParameter": "*"});
+tmp1 = db.adminCommand({"setParameter": 1, "logLevel": 5});
+tmp2 = db.adminCommand({"setParameter": 1, "logLevel": old.logLevel});
+now = db.adminCommand({"getParameter": "*"});
-assert.eq( old , now , "A" );
-assert.eq( old.logLevel , tmp1.was , "B" );
-assert.eq( 5 , tmp2.was , "C" );
+assert.eq(old, now, "A");
+assert.eq(old.logLevel, tmp1.was, "B");
+assert.eq(5, tmp2.was, "C");
//
// component verbosity
@@ -16,111 +16,103 @@ assert.eq( 5 , tmp2.was , "C" );
// verbosity for log component hierarchy
printjson(old.logComponentVerbosity);
-assert.neq( undefined, old.logComponentVerbosity, "log component verbosity not available" );
-assert.eq( old.logLevel, old.logComponentVerbosity.verbosity,
- "default component verbosity should match logLevel" );
-assert.neq( undefined, old.logComponentVerbosity.storage.journal.verbosity,
- "journal verbosity not available" );
+assert.neq(undefined, old.logComponentVerbosity, "log component verbosity not available");
+assert.eq(old.logLevel,
+ old.logComponentVerbosity.verbosity,
+ "default component verbosity should match logLevel");
+assert.neq(undefined,
+ old.logComponentVerbosity.storage.journal.verbosity,
+ "journal verbosity not available");
// Non-object log component verbosity should be rejected.
-assert.commandFailed(db.adminCommand( { "setParameter" : 1 ,
- logComponentVerbosity : "not an object" } ) );
+assert.commandFailed(db.adminCommand({"setParameter": 1, logComponentVerbosity: "not an object"}));
// Non-numeric verbosity for component should be rejected.
-assert.commandFailed( db.adminCommand( { "setParameter" : 1 ,
- logComponentVerbosity :
- { storage : { journal : {
- verbosity : "not a number" } } } } ) );
+assert.commandFailed(db.adminCommand({
+ "setParameter": 1,
+ logComponentVerbosity: {storage: {journal: {verbosity: "not a number"}}}
+}));
// Invalid component shall be rejected
-assert.commandFailed( db.adminCommand( { "setParameter" : 1 ,
- logComponentVerbosity :
- { NoSuchComponent : { verbosity : 2 } } } ) );
-
+assert.commandFailed(db.adminCommand(
+ {"setParameter": 1, logComponentVerbosity: {NoSuchComponent: {verbosity: 2}}}));
// Set multiple component log levels at once.
-(function () {
- assert.commandWorked( db.adminCommand( {
- "setParameter" : 1 ,
- logComponentVerbosity : {
- verbosity : 2,
- accessControl : { verbosity : 0 },
- storage : {
- verbosity : 3,
- journal : { verbosity : 5 }
- } } } ) );
-
- var result = assert.commandWorked( db.adminCommand(
- { "getParameter": 1, logComponentVerbosity : 1} )).logComponentVerbosity;
-
- assert.eq( 2, result.verbosity );
- assert.eq( 0, result.accessControl.verbosity );
- assert.eq( 3, result.storage.verbosity );
- assert.eq( 5, result.storage.journal.verbosity );
+(function() {
+ assert.commandWorked(db.adminCommand({
+ "setParameter": 1,
+ logComponentVerbosity: {
+ verbosity: 2,
+ accessControl: {verbosity: 0},
+ storage: {verbosity: 3, journal: {verbosity: 5}}
+ }
+ }));
+
+ var result =
+ assert.commandWorked(db.adminCommand({"getParameter": 1, logComponentVerbosity: 1}))
+ .logComponentVerbosity;
+
+ assert.eq(2, result.verbosity);
+ assert.eq(0, result.accessControl.verbosity);
+ assert.eq(3, result.storage.verbosity);
+ assert.eq(5, result.storage.journal.verbosity);
})();
-
// Set multiple component log levels at once.
// Unrecognized field names not mapping to a log component shall be rejected
// No changes shall apply.
-(function () {
- assert.commandFailed( db.adminCommand( {
- "setParameter" : 1 ,
- logComponentVerbosity : {
- verbosity : 6,
- accessControl : { verbosity : 5 },
- storage : {
- verbosity : 4,
- journal : { verbosity : 6 }
- },
- NoSuchComponent : { verbosity : 2 },
- extraField : 123 } } ) );
-
- var result = assert.commandWorked( db.adminCommand(
- { "getParameter": 1, logComponentVerbosity : 1} )).logComponentVerbosity;
-
- assert.eq( 2, result.verbosity );
- assert.eq( 0, result.accessControl.verbosity );
- assert.eq( 3, result.storage.verbosity );
- assert.eq( 5, result.storage.journal.verbosity );
+(function() {
+ assert.commandFailed(db.adminCommand({
+ "setParameter": 1,
+ logComponentVerbosity: {
+ verbosity: 6,
+ accessControl: {verbosity: 5},
+ storage: {verbosity: 4, journal: {verbosity: 6}},
+ NoSuchComponent: {verbosity: 2},
+ extraField: 123
+ }
+ }));
+
+ var result =
+ assert.commandWorked(db.adminCommand({"getParameter": 1, logComponentVerbosity: 1}))
+ .logComponentVerbosity;
+
+ assert.eq(2, result.verbosity);
+ assert.eq(0, result.accessControl.verbosity);
+ assert.eq(3, result.storage.verbosity);
+ assert.eq(5, result.storage.journal.verbosity);
})();
-
// Clear verbosity for default and journal.
-(function () {
- assert.commandWorked( db.adminCommand( {
- "setParameter" : 1 ,
- logComponentVerbosity : {
- verbosity: -1,
- storage : {
- journal : { verbosity : -1 } } } } ) );
-
- var result = assert.commandWorked( db.adminCommand(
- { "getParameter": 1, logComponentVerbosity : 1} )).logComponentVerbosity;
-
- assert.eq( 0, result.verbosity );
- assert.eq( 0, result.accessControl.verbosity );
- assert.eq( 3, result.storage.verbosity );
- assert.eq( -1, result.storage.journal.verbosity );
+(function() {
+ assert.commandWorked(db.adminCommand({
+ "setParameter": 1,
+ logComponentVerbosity: {verbosity: -1, storage: {journal: {verbosity: -1}}}
+ }));
+
+ var result =
+ assert.commandWorked(db.adminCommand({"getParameter": 1, logComponentVerbosity: 1}))
+ .logComponentVerbosity;
+
+ assert.eq(0, result.verbosity);
+ assert.eq(0, result.accessControl.verbosity);
+ assert.eq(3, result.storage.verbosity);
+ assert.eq(-1, result.storage.journal.verbosity);
})();
-
// Set accessControl verbosity using numerical level instead of
// subdocument with 'verbosity' field.
-(function () {
- assert.commandWorked( db.adminCommand( {
- "setParameter" : 1,
- logComponentVerbosity : {
- accessControl : 5 } } ) );
+(function() {
+ assert.commandWorked(
+ db.adminCommand({"setParameter": 1, logComponentVerbosity: {accessControl: 5}}));
- var result = assert.commandWorked( db.adminCommand(
- { "getParameter": 1, logComponentVerbosity : 1} )).logComponentVerbosity;
+ var result =
+ assert.commandWorked(db.adminCommand({"getParameter": 1, logComponentVerbosity: 1}))
+ .logComponentVerbosity;
- assert.eq( 5, result.accessControl.verbosity );
+ assert.eq(5, result.accessControl.verbosity);
})();
-
// Restore old verbosity values.
-assert.commandWorked( db.adminCommand( {
- "setParameter" : 1 ,
- logComponentVerbosity : old.logComponentVerbosity } ) );
+assert.commandWorked(
+ db.adminCommand({"setParameter": 1, logComponentVerbosity: old.logComponentVerbosity}));
diff --git a/jstests/core/shell1.js b/jstests/core/shell1.js
index 6d200e7a5b1..7ea23f8d3a5 100644
--- a/jstests/core/shell1.js
+++ b/jstests/core/shell1.js
@@ -1,8 +1,8 @@
x = 1;
-shellHelper( "show", "tables;" );
-shellHelper( "show", "tables" );
-shellHelper( "show", "tables ;" );
+shellHelper("show", "tables;");
+shellHelper("show", "tables");
+shellHelper("show", "tables ;");
// test slaveOk levels
assert(!db.getSlaveOk() && !db.test.getSlaveOk() && !db.getMongo().getSlaveOk(), "slaveOk 1");
@@ -12,4 +12,3 @@ db.setSlaveOk(false);
assert(!db.getSlaveOk() && !db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 3");
db.test.setSlaveOk(true);
assert(!db.getSlaveOk() && db.test.getSlaveOk() && db.getMongo().getSlaveOk(), "slaveOk 4");
-
diff --git a/jstests/core/shell_writeconcern.js b/jstests/core/shell_writeconcern.js
index 272a84c0a2c..f3f190061cf 100644
--- a/jstests/core/shell_writeconcern.js
+++ b/jstests/core/shell_writeconcern.js
@@ -9,11 +9,11 @@ collA.drop();
collB.drop();
// test inheritance
-db.setWriteConcern({w:1});
+db.setWriteConcern({w: 1});
assert.eq(1, db.getWriteConcern().toJSON().w);
assert.eq(1, collB.getWriteConcern().toJSON().w);
-collA.setWriteConcern({w:2});
+collA.setWriteConcern({w: 2});
assert.eq(2, collA.getWriteConcern().toJSON().w);
collA.unsetWriteConcern();
assert.eq(1, collA.getWriteConcern().toJSON().w);
@@ -24,55 +24,57 @@ assert.eq(undefined, collB.getWriteConcern());
assert.eq(undefined, db.getWriteConcern());
// test methods, by generating an error
-var res = assert.writeOK(collA.save({_id:1}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
+var res = assert.writeOK(collA.save({_id: 1}, {writeConcern: {w: 1}}));
+if (!db.getMongo().useWriteCommands()) {
assert.eq(1, res.n, tojson(res));
assert.eq(1, res.upserted, tojson(res));
} else {
assert.eq(1, res.nUpserted, tojson(res));
}
-var res = assert.writeOK(collA.update({_id:1}, {_id:1}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
+var res = assert.writeOK(collA.update({_id: 1}, {_id: 1}, {writeConcern: {w: 1}}));
+if (!db.getMongo().useWriteCommands()) {
assert.eq(1, res.n, tojson(res));
} else {
assert.eq(1, res.nMatched, tojson(res));
}
-var res = assert.writeOK(collA.update({_id:1}, {_id:1}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
+var res = assert.writeOK(collA.update({_id: 1}, {_id: 1}, {writeConcern: {w: 1}}));
+if (!db.getMongo().useWriteCommands()) {
assert.eq(1, res.n, tojson(res));
} else {
assert.eq(1, res.nMatched, tojson(res));
}
-var res = assert.writeOK(collA.insert({_id:2}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
+var res = assert.writeOK(collA.insert({_id: 2}, {writeConcern: {w: 1}}));
+if (!db.getMongo().useWriteCommands()) {
assert.eq(0, res.n, tojson(res));
} else {
assert.eq(1, res.nInserted, tojson(res));
}
-var res = assert.writeOK(collA.remove({_id:3}, {writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
+var res = assert.writeOK(collA.remove({_id: 3}, {writeConcern: {w: 1}}));
+if (!db.getMongo().useWriteCommands()) {
assert.eq(0, res.n, tojson(res));
} else {
assert.eq(0, res.nRemoved, tojson(res));
}
-var res = assert.writeOK(collA.remove({}, {justOne:true, writeConcern:{w:1}}));
-if (!db.getMongo().useWriteCommands() ) {
+var res = assert.writeOK(collA.remove({}, {justOne: true, writeConcern: {w: 1}}));
+if (!db.getMongo().useWriteCommands()) {
assert.eq(1, res.n, tojson(res));
} else {
assert.eq(1, res.nRemoved, tojson(res));
}
// Test ordered write concern, and that the write concern isn't run/error.
-assert.writeOK(collA.insert({_id:1}));
+assert.writeOK(collA.insert({_id: 1}));
-var res = assert.writeError(collA.insert([{_id:1}, {_id:1}], {ordered:true, writeConcern:{w:1}}));
+var res =
+ assert.writeError(collA.insert([{_id: 1}, {_id: 1}], {ordered: true, writeConcern: {w: 1}}));
assert.eq(1, res.getWriteErrors().length, tojson(res));
assert.eq(undefined, res.writeConcernErrors, tojson(res));
-var res = assert.writeError(collA.insert([{_id:1}, {_id:1}], {ordered:false, writeConcern:{w:1}}));
+var res =
+ assert.writeError(collA.insert([{_id: 1}, {_id: 1}], {ordered: false, writeConcern: {w: 1}}));
assert.eq(2, res.getWriteErrors().length, tojson(res));
assert.eq(undefined, res.writeConcernErrors, tojson(res));
diff --git a/jstests/core/shellkillop.js b/jstests/core/shellkillop.js
index b54ff74e237..0ac2ad681ed 100644
--- a/jstests/core/shellkillop.js
+++ b/jstests/core/shellkillop.js
@@ -1,61 +1,64 @@
baseName = "jstests_shellkillop";
-// 'retry' should be set to true in contexts where an exception should cause the test to be retried rather than to fail.
+// 'retry' should be set to true in contexts where an exception should cause the test to be retried
+// rather than to fail.
retry = false;
function testShellAutokillop() {
+ if (true) { // toggle to disable test
+ db[baseName].drop();
-if (true) { // toggle to disable test
- db[baseName].drop();
+ print("shellkillop.js insert data");
+ for (i = 0; i < 100000; ++i) {
+ db[baseName].insert({i: 1});
+ }
+ assert.eq(100000, db[baseName].count());
- print("shellkillop.js insert data");
- for (i = 0; i < 100000; ++i) {
- db[baseName].insert({ i: 1 });
- }
- assert.eq(100000, db[baseName].count());
+ // mongo --autokillop suppressed the ctrl-c "do you want to kill current operation" message
+ // it's just for testing purposes and thus not in the shell help
+ var evalStr = "print('SKO subtask started'); db." + baseName +
+ ".update( {}, {$set:{i:'abcdefghijkl'}}, false, true ); db." + baseName + ".count();";
+ print("shellkillop.js evalStr:" + evalStr);
+ spawn = startMongoProgramNoConnect(
+ "mongo", "--autokillop", "--port", myPort(), "--eval", evalStr);
- // mongo --autokillop suppressed the ctrl-c "do you want to kill current operation" message
- // it's just for testing purposes and thus not in the shell help
- var evalStr = "print('SKO subtask started'); db." + baseName + ".update( {}, {$set:{i:'abcdefghijkl'}}, false, true ); db." + baseName + ".count();";
- print("shellkillop.js evalStr:" + evalStr);
- spawn = startMongoProgramNoConnect("mongo", "--autokillop", "--port", myPort(), "--eval", evalStr);
+ sleep(100);
+ retry = true;
+ assert(db[baseName].find({i: 'abcdefghijkl'}).count() < 100000,
+ "update ran too fast, test won't be valid");
+ retry = false;
- sleep(100);
- retry = true;
- assert(db[baseName].find({ i: 'abcdefghijkl' }).count() < 100000, "update ran too fast, test won't be valid");
- retry = false;
+ stopMongoProgramByPid(spawn);
- stopMongoProgramByPid(spawn);
+ sleep(100);
- sleep(100);
+ print("count abcdefghijkl:" + db[baseName].find({i: 'abcdefghijkl'}).count());
- print("count abcdefghijkl:" + db[baseName].find({ i: 'abcdefghijkl' }).count());
+ var inprog = db.currentOp().inprog;
+ for (i in inprog) {
+ if (inprog[i].ns == "test." + baseName)
+ throw Error("shellkillop.js op is still running: " + tojson(inprog[i]));
+ }
- var inprog = db.currentOp().inprog;
- for (i in inprog) {
- if (inprog[i].ns == "test." + baseName)
- throw Error( "shellkillop.js op is still running: " + tojson( inprog[i] ) );
+ retry = true;
+ assert(db[baseName].find({i: 'abcdefghijkl'}).count() < 100000,
+ "update ran too fast, test was not valid");
+ retry = false;
}
-
- retry = true;
- assert(db[baseName].find({ i: 'abcdefghijkl' }).count() < 100000, "update ran too fast, test was not valid");
- retry = false;
-}
-
}
-for( var nTries = 0; nTries < 10 && retry; ++nTries ) {
+for (var nTries = 0; nTries < 10 && retry; ++nTries) {
try {
testShellAutokillop();
- } catch (e) {
- if ( !retry ) {
+ } catch (e) {
+ if (!retry) {
throw e;
}
- printjson( e );
- print( "retrying..." );
+ printjson(e);
+ print("retrying...");
}
}
-assert( !retry, "retried too many times" );
+assert(!retry, "retried too many times");
print("shellkillop.js SUCCESS");
diff --git a/jstests/core/shelltypes.js b/jstests/core/shelltypes.js
index e39c63272aa..d04b1b026bc 100644
--- a/jstests/core/shelltypes.js
+++ b/jstests/core/shelltypes.js
@@ -26,21 +26,25 @@ b = Timestamp(a.t, a.i);
printjson(a);
assert.eq(tojson(a), tojson(b), "timestamp");
-assert.throws(function() { Timestamp(-2, 3); }, null,
- "Timestamp time must not accept negative time" );
-assert.throws(function() { Timestamp(0, -1); }, null,
- "Timestamp increment must not accept negative time" );
-assert.throws(function() { Timestamp(0x10000 * 0x10000, 0); }, null,
- "Timestamp time must not accept values larger than 2**32 - 1");
-assert.throws(function() { Timestamp(0, 0x10000 * 0x10000); }, null,
- "Timestamp increment must not accept values larger than 2**32 - 1");
+assert.throws(function() {
+ Timestamp(-2, 3);
+}, null, "Timestamp time must not accept negative time");
+assert.throws(function() {
+ Timestamp(0, -1);
+}, null, "Timestamp increment must not accept negative time");
+assert.throws(function() {
+ Timestamp(0x10000 * 0x10000, 0);
+}, null, "Timestamp time must not accept values larger than 2**32 - 1");
+assert.throws(function() {
+ Timestamp(0, 0x10000 * 0x10000);
+}, null, "Timestamp increment must not accept values larger than 2**32 - 1");
a = new Timestamp(0x80008000, 0x80008000 + 0.5);
b = Timestamp(a.t, Math.round(a.i));
printjson(a);
assert.eq(tojson(a), tojson(b), "timestamp");
-a = new BinData(3,"VQ6EAOKbQdSnFkRmVUQAAA==");
+a = new BinData(3, "VQ6EAOKbQdSnFkRmVUQAAA==");
b = BinData(a.type, a.base64());
printjson(a);
assert.eq(tojson(a), tojson(b), "bindata");
@@ -77,36 +81,38 @@ var timestampA = a.getTimestamp();
var dateA = new Date(timestampA.getTime());
// ObjectId.fromDate - invalid input types
-assert.throws(function() { ObjectId.fromDate(undefined); }, null,
- "ObjectId.fromDate should error on undefined date" );
+assert.throws(function() {
+ ObjectId.fromDate(undefined);
+}, null, "ObjectId.fromDate should error on undefined date");
-assert.throws(function() { ObjectId.fromDate(12345); }, null,
- "ObjectId.fromDate should error on numerical value" );
+assert.throws(function() {
+ ObjectId.fromDate(12345);
+}, null, "ObjectId.fromDate should error on numerical value");
-assert.throws(function() { ObjectId.fromDate(dateA.toISOString()); }, null,
- "ObjectId.fromDate should error on string value" );
+assert.throws(function() {
+ ObjectId.fromDate(dateA.toISOString());
+}, null, "ObjectId.fromDate should error on string value");
// SERVER-14623 dates less than or equal to 1978-07-04T21:24:15Z fail
var checkFromDate = function(millis, expected, comment) {
var oid = ObjectId.fromDate(new Date(millis));
assert.eq(oid.valueOf(), expected, comment);
};
-checkFromDate(Math.pow(2,28) * 1000, "100000000000000000000000", "1978-07-04T21:24:16Z");
-checkFromDate((Math.pow(2,28) * 1000) - 1 , "0fffffff0000000000000000", "1978-07-04T21:24:15Z");
+checkFromDate(Math.pow(2, 28) * 1000, "100000000000000000000000", "1978-07-04T21:24:16Z");
+checkFromDate((Math.pow(2, 28) * 1000) - 1, "0fffffff0000000000000000", "1978-07-04T21:24:15Z");
checkFromDate(0, "000000000000000000000000", "start of epoch");
// test date upper limit
-checkFromDate((Math.pow(2,32) * 1000) - 1, "ffffffff0000000000000000", "last valid date");
-assert.throws(function() { ObjectId.fromDate(new Date(Math.pow(2,32) * 1000)); }, null,
- "ObjectId limited to 4 bytes for seconds" );
+checkFromDate((Math.pow(2, 32) * 1000) - 1, "ffffffff0000000000000000", "last valid date");
+assert.throws(function() {
+ ObjectId.fromDate(new Date(Math.pow(2, 32) * 1000));
+}, null, "ObjectId limited to 4 bytes for seconds");
// ObjectId.fromDate - Date
b = ObjectId.fromDate(dateA);
printjson(a);
assert.eq(tojson(a.getTimestamp()), tojson(b.getTimestamp()), "ObjectId.fromDate - Date");
-
-
// tojsonObject
// Empty object
@@ -131,4 +137,3 @@ assert.eq('{ "a" : 1, "b" : { "bb" : 2, "cc" : 3 } }',
tojsonObject({a: 1, b: {bb: 2, cc: 3}}, '', true));
assert.eq('{\n\t\t\t"a" : 1,\n\t\t\t"b" : {\n\t\t\t\t"bb" : 2,\n\t\t\t\t"cc" : 3\n\t\t\t}\n\t\t}',
tojsonObject({a: 1, b: {bb: 2, cc: 3}}, '\t\t'));
-
diff --git a/jstests/core/show_record_id.js b/jstests/core/show_record_id.js
index 566bdfff9a9..32fab6828a9 100644
--- a/jstests/core/show_record_id.js
+++ b/jstests/core/show_record_id.js
@@ -3,28 +3,28 @@
var t = db.show_record_id;
t.drop();
-function checkResults( arr ) {
- for( i in arr ) {
- a = arr[ i ];
- assert( a['$recordId'] );
+function checkResults(arr) {
+ for (i in arr) {
+ a = arr[i];
+ assert(a['$recordId']);
}
}
// Check query.
-t.save( {} );
-checkResults( t.find().showRecordId().toArray() );
+t.save({});
+checkResults(t.find().showRecordId().toArray());
// Check query and get more.
-t.save( {} );
-t.save( {} );
-checkResults( t.find().batchSize( 2 ).showRecordId().toArray() );
+t.save({});
+t.save({});
+checkResults(t.find().batchSize(2).showRecordId().toArray());
// Check with a covered index.
-t.ensureIndex( { a:1 } );
-checkResults( t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).showRecordId().toArray() );
-checkResults( t.find( {}, { _id:0, a:1 } ).hint( { a:1 } ).showRecordId().toArray() );
+t.ensureIndex({a: 1});
+checkResults(t.find({}, {_id: 0, a: 1}).hint({a: 1}).showRecordId().toArray());
+checkResults(t.find({}, {_id: 0, a: 1}).hint({a: 1}).showRecordId().toArray());
// Check with an idhack query.
t.drop();
t.save({_id: 0, a: 1});
-checkResults( t.find( { _id: 0 } ).showRecordId().toArray() );
+checkResults(t.find({_id: 0}).showRecordId().toArray());
diff --git a/jstests/core/skip1.js b/jstests/core/skip1.js
index c856e92cf72..ae8f589d616 100644
--- a/jstests/core/skip1.js
+++ b/jstests/core/skip1.js
@@ -3,16 +3,16 @@
var t = db.jstests_skip1;
-if ( 0 ) { // SERVER-2845
-t.drop();
+if (0) { // SERVER-2845
+ t.drop();
-t.ensureIndex( {a:1} );
-t.save( {a:5} );
-t.save( {a:5} );
-t.save( {a:5} );
+ t.ensureIndex({a: 1});
+ t.save({a: 5});
+ t.save({a: 5});
+ t.save({a: 5});
-assert.eq( 3, t.find( {a:5} ).skip( 2 ).explain().nscanned );
-assert.eq( 1, t.find( {a:5} ).skip( 2 ).explain().nscannedObjects );
+ assert.eq(3, t.find({a: 5}).skip(2).explain().nscanned);
+ assert.eq(1, t.find({a: 5}).skip(2).explain().nscannedObjects);
}
// SERVER-13537: Ensure that combinations of skip and limit don't crash
@@ -21,16 +21,16 @@ t.drop();
for (var i = 0; i < 10; i++) {
t.save({a: i});
}
-assert.eq( 9, t.find().sort({a: 1}).limit(2147483647).skip(1).itcount() );
-assert.eq( 0, t.find().sort({a: 1}).skip(2147483647).limit(1).itcount() );
+assert.eq(9, t.find().sort({a: 1}).limit(2147483647).skip(1).itcount());
+assert.eq(0, t.find().sort({a: 1}).skip(2147483647).limit(1).itcount());
if (!db.getMongo().useReadCommands()) {
// If we're using OP_QUERY/OP_GET_MORE reads rather than find/getMore command, then the skip and
// limit fields must fit inside a 32-bit signed integer.
- assert.throws( function() {
- assert.eq( 0, t.find().sort({a: 1}).skip(2147483648).itcount() );
+ assert.throws(function() {
+ assert.eq(0, t.find().sort({a: 1}).skip(2147483648).itcount());
});
- assert.throws( function() {
- assert.eq( 0, t.find().sort({a: 1}).limit(2147483648).itcount() );
+ assert.throws(function() {
+ assert.eq(0, t.find().sort({a: 1}).limit(2147483648).itcount());
});
}
diff --git a/jstests/core/slice1.js b/jstests/core/slice1.js
index 6037fe0eb1a..d723219af10 100644
--- a/jstests/core/slice1.js
+++ b/jstests/core/slice1.js
@@ -1,68 +1,68 @@
t = db.slice1;
t.drop();
-t.insert({_id:1, a:[0,1,2,3,4,5,-5,-4,-3,-2,-1], b:1, c:1});
+t.insert({_id: 1, a: [0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1], b: 1, c: 1});
// first three
-out = t.findOne({}, {a:{$slice:3}});
-assert.eq(out.a , [0,1,2], '1');
+out = t.findOne({}, {a: {$slice: 3}});
+assert.eq(out.a, [0, 1, 2], '1');
// last three
-out = t.findOne({}, {a:{$slice:-3}});
-assert.eq(out.a , [-3, -2, -1], '2');
+out = t.findOne({}, {a: {$slice: -3}});
+assert.eq(out.a, [-3, -2, -1], '2');
// skip 2, limit 3
-out = t.findOne({}, {a:{$slice:[2, 3]}});
-assert.eq(out.a , [2,3,4], '3');
+out = t.findOne({}, {a: {$slice: [2, 3]}});
+assert.eq(out.a, [2, 3, 4], '3');
// skip to fifth from last, limit 4
-out = t.findOne({}, {a:{$slice:[-5, 4]}});
-assert.eq(out.a , [-5, -4, -3, -2], '4');
+out = t.findOne({}, {a: {$slice: [-5, 4]}});
+assert.eq(out.a, [-5, -4, -3, -2], '4');
// skip to fifth from last, limit 10
-out = t.findOne({}, {a:{$slice:[-5, 10]}});
-assert.eq(out.a , [-5, -4, -3, -2, -1], '5');
-
+out = t.findOne({}, {a: {$slice: [-5, 10]}});
+assert.eq(out.a, [-5, -4, -3, -2, -1], '5');
// interaction with other fields
-out = t.findOne({}, {a:{$slice:3}});
-assert.eq(out.a , [0,1,2], 'A 1');
-assert.eq(out.b , 1, 'A 2');
-assert.eq(out.c , 1, 'A 3');
+out = t.findOne({}, {a: {$slice: 3}});
+assert.eq(out.a, [0, 1, 2], 'A 1');
+assert.eq(out.b, 1, 'A 2');
+assert.eq(out.c, 1, 'A 3');
-out = t.findOne({}, {a:{$slice:3}, b:true});
-assert.eq(out.a , [0,1,2], 'B 1');
-assert.eq(out.b , 1, 'B 2');
-assert.eq(out.c , undefined);
+out = t.findOne({}, {a: {$slice: 3}, b: true});
+assert.eq(out.a, [0, 1, 2], 'B 1');
+assert.eq(out.b, 1, 'B 2');
+assert.eq(out.c, undefined);
-out = t.findOne({}, {a:{$slice:3}, b:false});
-assert.eq(out.a , [0,1,2]);
-assert.eq(out.b , undefined);
-assert.eq(out.c , 1);
+out = t.findOne({}, {a: {$slice: 3}, b: false});
+assert.eq(out.a, [0, 1, 2]);
+assert.eq(out.b, undefined);
+assert.eq(out.c, 1);
t.drop();
-t.insert({comments: [{id:0, text:'a'},{id:1, text:'b'},{id:2, text:'c'},{id:3, text:'d'}], title:'foo'});
-
+t.insert({
+ comments: [{id: 0, text: 'a'}, {id: 1, text: 'b'}, {id: 2, text: 'c'}, {id: 3, text: 'd'}],
+ title: 'foo'
+});
-out = t.findOne({}, {comments:{$slice:2}, 'comments.id':true});
-assert.eq(out.comments , [{id:0}, {id:1}]);
-assert.eq(out.title , undefined);
+out = t.findOne({}, {comments: {$slice: 2}, 'comments.id': true});
+assert.eq(out.comments, [{id: 0}, {id: 1}]);
+assert.eq(out.title, undefined);
-out = t.findOne({}, {comments:{$slice:2}, 'comments.id':false});
-assert.eq(out.comments , [{text: 'a'}, {text: 'b'}]);
-assert.eq(out.title , 'foo');
+out = t.findOne({}, {comments: {$slice: 2}, 'comments.id': false});
+assert.eq(out.comments, [{text: 'a'}, {text: 'b'}]);
+assert.eq(out.title, 'foo');
-//nested arrays
+// nested arrays
t.drop();
-t.insert({_id:1, a:[[1,1,1], [2,2,2], [3,3,3]], b:1, c:1});
-
-out = t.findOne({}, {a:{$slice:1}});
-assert.eq(out.a , [[1,1,1]], 'n 1');
+t.insert({_id: 1, a: [[1, 1, 1], [2, 2, 2], [3, 3, 3]], b: 1, c: 1});
-out = t.findOne({}, {a:{$slice:-1}});
-assert.eq(out.a , [[3,3,3]], 'n 2');
+out = t.findOne({}, {a: {$slice: 1}});
+assert.eq(out.a, [[1, 1, 1]], 'n 1');
-out = t.findOne({}, {a:{$slice:[0,2]}});
-assert.eq(out.a , [[1,1,1],[2,2,2]], 'n 2');
+out = t.findOne({}, {a: {$slice: -1}});
+assert.eq(out.a, [[3, 3, 3]], 'n 2');
+out = t.findOne({}, {a: {$slice: [0, 2]}});
+assert.eq(out.a, [[1, 1, 1], [2, 2, 2]], 'n 2');
diff --git a/jstests/core/snapshot_queries.js b/jstests/core/snapshot_queries.js
index 684a4b9459e..e4aec435b10 100644
--- a/jstests/core/snapshot_queries.js
+++ b/jstests/core/snapshot_queries.js
@@ -20,7 +20,7 @@
assert.eq(1, cursor.next()["_id"]);
// Force a document move (on MMAP) while the query is yielded for a getMore.
- var bigString = Array(1024*1024).toString();
+ var bigString = Array(1024 * 1024).toString();
assert.writeOK(coll.update({_id: 1}, {$set: {padding: bigString}}));
assert.eq(2, cursor.next()["_id"]);
diff --git a/jstests/core/sort1.js b/jstests/core/sort1.js
index ce530872e8c..9bf92601bd1 100644
--- a/jstests/core/sort1.js
+++ b/jstests/core/sort1.js
@@ -1,48 +1,47 @@
-debug = function( s ){
- //print( s );
+debug = function(s) {
+ // print( s );
};
t = db.sort1;
t.drop();
-t.save({x:3,z:33});
-t.save({x:5,z:33});
-t.save({x:2,z:33});
-t.save({x:3,z:33});
-t.save({x:1,z:33});
-
-debug( "a" );
-for( var pass = 0; pass < 2; pass++ ) {
- assert( t.find().sort({x:1})[0].x == 1 );
- assert( t.find().sort({x:1}).skip(1)[0].x == 2 );
- assert( t.find().sort({x:-1})[0].x == 5 );
- assert( t.find().sort({x:-1})[1].x == 3 );
- assert.eq( t.find().sort({x:-1}).skip(0)[0].x , 5 );
- assert.eq( t.find().sort({x:-1}).skip(1)[0].x , 3 );
- t.ensureIndex({x:1});
-
+t.save({x: 3, z: 33});
+t.save({x: 5, z: 33});
+t.save({x: 2, z: 33});
+t.save({x: 3, z: 33});
+t.save({x: 1, z: 33});
+
+debug("a");
+for (var pass = 0; pass < 2; pass++) {
+ assert(t.find().sort({x: 1})[0].x == 1);
+ assert(t.find().sort({x: 1}).skip(1)[0].x == 2);
+ assert(t.find().sort({x: -1})[0].x == 5);
+ assert(t.find().sort({x: -1})[1].x == 3);
+ assert.eq(t.find().sort({x: -1}).skip(0)[0].x, 5);
+ assert.eq(t.find().sort({x: -1}).skip(1)[0].x, 3);
+ t.ensureIndex({x: 1});
}
-debug( "b" );
+debug("b");
assert(t.validate().valid);
t.drop();
-t.save({x:'a'});
-t.save({x:'aba'});
-t.save({x:'zed'});
-t.save({x:'foo'});
-
-debug( "c" );
-
-for( var pass = 0; pass < 2; pass++ ) {
- debug( tojson( t.find().sort( { "x" : 1 } ).limit(1).next() ) );
- assert.eq( "a" , t.find().sort({'x': 1}).limit(1).next().x , "c.1" );
- assert.eq( "a" , t.find().sort({'x': 1}).next().x , "c.2" );
- assert.eq( "zed" , t.find().sort({'x': -1}).limit(1).next().x , "c.3" );
- assert.eq( "zed" , t.find().sort({'x': -1}).next().x , "c.4" );
- t.ensureIndex({x:1});
+t.save({x: 'a'});
+t.save({x: 'aba'});
+t.save({x: 'zed'});
+t.save({x: 'foo'});
+
+debug("c");
+
+for (var pass = 0; pass < 2; pass++) {
+ debug(tojson(t.find().sort({"x": 1}).limit(1).next()));
+ assert.eq("a", t.find().sort({'x': 1}).limit(1).next().x, "c.1");
+ assert.eq("a", t.find().sort({'x': 1}).next().x, "c.2");
+ assert.eq("zed", t.find().sort({'x': -1}).limit(1).next().x, "c.3");
+ assert.eq("zed", t.find().sort({'x': -1}).next().x, "c.4");
+ t.ensureIndex({x: 1});
}
-debug( "d" );
+debug("d");
assert(t.validate().valid);
diff --git a/jstests/core/sort10.js b/jstests/core/sort10.js
index 657da665499..207be0226fa 100644
--- a/jstests/core/sort10.js
+++ b/jstests/core/sort10.js
@@ -3,23 +3,21 @@ t = db.sort10;
function checkSorting1(opts) {
t.drop();
- t.insert({ x: new Date(50000) });
- t.insert({ x: new Date(-50) });
+ t.insert({x: new Date(50000)});
+ t.insert({x: new Date(-50)});
var d = new Date(-50);
for (var pass = 0; pass < 2; pass++) {
- assert(t.find().sort({x:1})[0].x.valueOf() == d.valueOf());
- t.ensureIndex({ x: 1 }, opts);
- t.insert({ x: new Date() });
+ assert(t.find().sort({x: 1})[0].x.valueOf() == d.valueOf());
+ t.ensureIndex({x: 1}, opts);
+ t.insert({x: new Date()});
}
}
checkSorting1({});
-checkSorting1({"background":true});
-
-
+checkSorting1({"background": true});
function checkSorting2(dates, sortOrder) {
- cur = t.find().sort({x:sortOrder});
+ cur = t.find().sort({x: sortOrder});
assert.eq(dates.length, cur.count(), "Incorrect number of results returned");
index = 0;
while (cur.hasNext()) {
@@ -32,17 +30,19 @@ function checkSorting2(dates, sortOrder) {
t.drop();
dates = [new Date(-5000000000000), new Date(5000000000000), new Date(0), new Date(5), new Date(-5)];
for (var i = 0; i < dates.length; i++) {
- t.insert({x:dates[i]});
+ t.insert({x: dates[i]});
}
-dates.sort(function(a,b){return a - b;});
+dates.sort(function(a, b) {
+ return a - b;
+});
reverseDates = dates.slice(0).reverse();
checkSorting2(dates, 1);
checkSorting2(reverseDates, -1);
-t.ensureIndex({x:1});
+t.ensureIndex({x: 1});
checkSorting2(dates, 1);
checkSorting2(reverseDates, -1);
t.dropIndexes();
-t.ensureIndex({x:-1});
+t.ensureIndex({x: -1});
checkSorting2(dates, 1);
checkSorting2(reverseDates, -1);
diff --git a/jstests/core/sort2.js b/jstests/core/sort2.js
index 2528751cfc1..2cfb6baafc3 100644
--- a/jstests/core/sort2.js
+++ b/jstests/core/sort2.js
@@ -3,30 +3,30 @@
t = db.sort2;
t.drop();
-t.save({x:1, y:{a:5,b:4}});
-t.save({x:1, y:{a:7,b:3}});
-t.save({x:1, y:{a:2,b:3}});
-t.save({x:1, y:{a:9,b:3}});
-for( var pass = 0; pass < 2; pass++ ) {
- var res = t.find().sort({'y.a':1}).toArray();
- assert( res[0].y.a == 2 );
- assert( res[1].y.a == 5 );
- assert( res.length == 4 );
- t.ensureIndex({"y.a":1});
+t.save({x: 1, y: {a: 5, b: 4}});
+t.save({x: 1, y: {a: 7, b: 3}});
+t.save({x: 1, y: {a: 2, b: 3}});
+t.save({x: 1, y: {a: 9, b: 3}});
+for (var pass = 0; pass < 2; pass++) {
+ var res = t.find().sort({'y.a': 1}).toArray();
+ assert(res[0].y.a == 2);
+ assert(res[1].y.a == 5);
+ assert(res.length == 4);
+ t.ensureIndex({"y.a": 1});
}
assert(t.validate().valid);
t.drop();
-t.insert({ x: 1 });
-t.insert({ x: 5000000000 });
-t.insert({ x: NaN });
-t.insert({ x: Infinity });
-t.insert({ x: -Infinity });
+t.insert({x: 1});
+t.insert({x: 5000000000});
+t.insert({x: NaN});
+t.insert({x: Infinity});
+t.insert({x: -Infinity});
var good = [NaN, -Infinity, 1, 5000000000, Infinity];
for (var pass = 0; pass < 2; pass++) {
- var res = t.find({}, { _id: 0 }).sort({ x: 1 }).toArray();
+ var res = t.find({}, {_id: 0}).sort({x: 1}).toArray();
for (var i = 0; i < good.length; i++) {
assert(good[i].toString() == res[i].x.toString());
}
- t.ensureIndex({ x : 1 });
+ t.ensureIndex({x: 1});
}
diff --git a/jstests/core/sort3.js b/jstests/core/sort3.js
index f65b7445903..bfc1ee5134c 100644
--- a/jstests/core/sort3.js
+++ b/jstests/core/sort3.js
@@ -1,11 +1,20 @@
t = db.sort3;
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 5 } );
-t.save( { a : 3 } );
+t.save({a: 1});
+t.save({a: 5});
+t.save({a: 3});
-assert.eq( "1,5,3" , t.find().toArray().map( function(z){ return z.a; } ) );
+assert.eq("1,5,3",
+ t.find().toArray().map(function(z) {
+ return z.a;
+ }));
-assert.eq( "1,3,5" , t.find().sort( { a : 1 } ).toArray().map( function(z){ return z.a; } ) );
-assert.eq( "5,3,1" , t.find().sort( { a : -1 } ).toArray().map( function(z){ return z.a; } ) );
+assert.eq("1,3,5",
+ t.find().sort({a: 1}).toArray().map(function(z) {
+ return z.a;
+ }));
+assert.eq("5,3,1",
+ t.find().sort({a: -1}).toArray().map(function(z) {
+ return z.a;
+ }));
diff --git a/jstests/core/sort4.js b/jstests/core/sort4.js
index 9e4076b1c71..41b4e25fe11 100644
--- a/jstests/core/sort4.js
+++ b/jstests/core/sort4.js
@@ -1,22 +1,19 @@
t = db.sort4;
t.drop();
-
-function nice( sort , correct , extra ){
- var c = t.find().sort( sort );
+function nice(sort, correct, extra) {
+ var c = t.find().sort(sort);
var s = "";
- c.forEach(
- function(z){
- if ( s.length )
- s += ",";
- s += z.name;
- if ( z.prename )
- s += z.prename;
- }
- );
- print( tojson( sort ) + "\t" + s );
- if ( correct )
- assert.eq( correct , s , tojson( sort ) + "(" + extra + ")" );
+ c.forEach(function(z) {
+ if (s.length)
+ s += ",";
+ s += z.name;
+ if (z.prename)
+ s += z.prename;
+ });
+ print(tojson(sort) + "\t" + s);
+ if (correct)
+ assert.eq(correct, s, tojson(sort) + "(" + extra + ")");
return s;
}
@@ -25,19 +22,19 @@ t.save({name: 'A', prename: 'C'});
t.save({name: 'B', prename: 'B'});
t.save({name: 'B', prename: 'D'});
-nice( { name:1 } , "AB,AC,BB,BD" , "s1" );
-nice( { prename : 1 } , "AB,BB,AC,BD" , "s2" );
-nice( {name:1, prename:1} , "AB,AC,BB,BD" , "s3" );
+nice({name: 1}, "AB,AC,BB,BD", "s1");
+nice({prename: 1}, "AB,BB,AC,BD", "s2");
+nice({name: 1, prename: 1}, "AB,AC,BB,BD", "s3");
-t.save({name: 'A'});
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD" , "e1" );
+t.save({name: 'A'});
+nice({name: 1, prename: 1}, "A,AB,AC,BB,BD", "e1");
-t.save({name: 'C'});
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2" ); // SERVER-282
+t.save({name: 'C'});
+nice({name: 1, prename: 1}, "A,AB,AC,BB,BD,C", "e2"); // SERVER-282
-t.ensureIndex( { name : 1 , prename : 1 } );
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ia" ); // SERVER-282
+t.ensureIndex({name: 1, prename: 1});
+nice({name: 1, prename: 1}, "A,AB,AC,BB,BD,C", "e2ia"); // SERVER-282
t.dropIndexes();
-t.ensureIndex( { name : 1 } );
-nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ib" ); // SERVER-282
+t.ensureIndex({name: 1});
+nice({name: 1, prename: 1}, "A,AB,AC,BB,BD,C", "e2ib"); // SERVER-282
diff --git a/jstests/core/sort5.js b/jstests/core/sort5.js
index b90256ef79d..399c9fb4e28 100644
--- a/jstests/core/sort5.js
+++ b/jstests/core/sort5.js
@@ -8,14 +8,38 @@ t.save({_id: 9, x: 4, y: {a: 9, b: 3}});
// test compound sorting
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A no index" );
+assert.eq([4, 2, 3, 1],
+ t.find()
+ .sort({"y.b": 1, "y.a": -1})
+ .map(function(z) {
+ return z.x;
+ }),
+ "A no index");
t.ensureIndex({"y.b": 1, "y.a": -1});
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A index" );
+assert.eq([4, 2, 3, 1],
+ t.find()
+ .sort({"y.b": 1, "y.a": -1})
+ .map(function(z) {
+ return z.x;
+ }),
+ "A index");
assert(t.validate().valid, "A valid");
// test sorting on compound key involving _id
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B no index" );
+assert.eq([4, 2, 3, 1],
+ t.find()
+ .sort({"y.b": 1, _id: -1})
+ .map(function(z) {
+ return z.x;
+ }),
+ "B no index");
t.ensureIndex({"y.b": 1, "_id": -1});
-assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B index" );
+assert.eq([4, 2, 3, 1],
+ t.find()
+ .sort({"y.b": 1, _id: -1})
+ .map(function(z) {
+ return z.x;
+ }),
+ "B index");
assert(t.validate().valid, "B valid");
diff --git a/jstests/core/sort6.js b/jstests/core/sort6.js
index 323fb92a335..03a1b559831 100644
--- a/jstests/core/sort6.js
+++ b/jstests/core/sort6.js
@@ -1,38 +1,38 @@
t = db.sort6;
-function get( x ){
- return t.find().sort( { c : x } ).map( function(z){ return z._id; } );
+function get(x) {
+ return t.find().sort({c: x}).map(function(z) {
+ return z._id;
+ });
}
// part 1
t.drop();
-t.insert({_id:1,c:null});
-t.insert({_id:2,c:1});
-t.insert({_id:3,c:2});
+t.insert({_id: 1, c: null});
+t.insert({_id: 2, c: 1});
+t.insert({_id: 3, c: 2});
+assert.eq([3, 2, 1], get(-1), "A1"); // SERVER-635
+assert.eq([1, 2, 3], get(1), "A2");
-assert.eq( [3,2,1] , get( -1 ) , "A1" ); // SERVER-635
-assert.eq( [1,2,3] , get( 1 ) , "A2" );
-
-t.ensureIndex( { c : 1 } );
-
-assert.eq( [3,2,1] , get( -1 ) , "B1" );
-assert.eq( [1,2,3] , get( 1 ) , "B2" );
+t.ensureIndex({c: 1});
+assert.eq([3, 2, 1], get(-1), "B1");
+assert.eq([1, 2, 3], get(1), "B2");
// part 2
t.drop();
-t.insert({_id:1});
-t.insert({_id:2,c:1});
-t.insert({_id:3,c:2});
+t.insert({_id: 1});
+t.insert({_id: 2, c: 1});
+t.insert({_id: 3, c: 2});
-assert.eq( [3,2,1] , get( -1 ) , "C1" ); // SERVER-635
-assert.eq( [1,2,3] , get( 1 ) , "C2" );
+assert.eq([3, 2, 1], get(-1), "C1"); // SERVER-635
+assert.eq([1, 2, 3], get(1), "C2");
-t.ensureIndex( { c : 1 } );
+t.ensureIndex({c: 1});
-assert.eq( [3,2,1] , get( -1 ) , "D1" );
-assert.eq( [1,2,3] , get( 1 ) , "X2" );
+assert.eq([3, 2, 1], get(-1), "D1");
+assert.eq([1, 2, 3], get(1), "X2");
diff --git a/jstests/core/sort7.js b/jstests/core/sort7.js
index 0b98734e5ff..4377cd5d17b 100644
--- a/jstests/core/sort7.js
+++ b/jstests/core/sort7.js
@@ -5,21 +5,21 @@ t.drop();
// Compare indexed and unindexed sort order for an array embedded field.
-t.save( { a : [ { x : 2 } ] } );
-t.save( { a : [ { x : 1 } ] } );
-t.save( { a : [ { x : 3 } ] } );
-unindexed = t.find().sort( {"a.x":1} ).toArray();
-t.ensureIndex( { "a.x" : 1 } );
-indexed = t.find().sort( {"a.x":1} ).hint( {"a.x":1} ).toArray();
-assert.eq( unindexed, indexed );
+t.save({a: [{x: 2}]});
+t.save({a: [{x: 1}]});
+t.save({a: [{x: 3}]});
+unindexed = t.find().sort({"a.x": 1}).toArray();
+t.ensureIndex({"a.x": 1});
+indexed = t.find().sort({"a.x": 1}).hint({"a.x": 1}).toArray();
+assert.eq(unindexed, indexed);
// Now check when there are two objects in the array.
t.remove({});
-t.save( { a : [ { x : 2 }, { x : 3 } ] } );
-t.save( { a : [ { x : 1 }, { x : 4 } ] } );
-t.save( { a : [ { x : 3 }, { x : 2 } ] } );
-unindexed = t.find().sort( {"a.x":1} ).toArray();
-t.ensureIndex( { "a.x" : 1 } );
-indexed = t.find().sort( {"a.x":1} ).hint( {"a.x":1} ).toArray();
-assert.eq( unindexed, indexed );
+t.save({a: [{x: 2}, {x: 3}]});
+t.save({a: [{x: 1}, {x: 4}]});
+t.save({a: [{x: 3}, {x: 2}]});
+unindexed = t.find().sort({"a.x": 1}).toArray();
+t.ensureIndex({"a.x": 1});
+indexed = t.find().sort({"a.x": 1}).hint({"a.x": 1}).toArray();
+assert.eq(unindexed, indexed);
diff --git a/jstests/core/sort8.js b/jstests/core/sort8.js
index 916075502d7..72e5ce54d23 100644
--- a/jstests/core/sort8.js
+++ b/jstests/core/sort8.js
@@ -3,28 +3,28 @@
t = db.jstests_sort8;
t.drop();
-t.save( {a:[1,10]} );
-t.save( {a:5} );
-unindexedForward = t.find().sort( {a:1} ).toArray();
-unindexedReverse = t.find().sort( {a:-1} ).toArray();
-t.ensureIndex( {a:1} );
-indexedForward = t.find().sort( {a:1} ).hint( {a:1} ).toArray();
-indexedReverse = t.find().sort( {a:-1} ).hint( {a:1} ).toArray();
+t.save({a: [1, 10]});
+t.save({a: 5});
+unindexedForward = t.find().sort({a: 1}).toArray();
+unindexedReverse = t.find().sort({a: -1}).toArray();
+t.ensureIndex({a: 1});
+indexedForward = t.find().sort({a: 1}).hint({a: 1}).toArray();
+indexedReverse = t.find().sort({a: -1}).hint({a: 1}).toArray();
-assert.eq( unindexedForward, indexedForward );
-assert.eq( unindexedReverse, indexedReverse );
+assert.eq(unindexedForward, indexedForward);
+assert.eq(unindexedReverse, indexedReverse);
// Sorting is based on array members, not the array itself.
-assert.eq( [1,10], unindexedForward[ 0 ].a );
-assert.eq( [1,10], unindexedReverse[ 0 ].a );
+assert.eq([1, 10], unindexedForward[0].a);
+assert.eq([1, 10], unindexedReverse[0].a);
// Now try with a bounds constraint.
t.dropIndexes();
-unindexedForward = t.find({a:{$gte:5}}).sort( {a:1} ).toArray();
-unindexedReverse = t.find({a:{$lte:5}}).sort( {a:-1} ).toArray();
-t.ensureIndex( {a:1} );
-indexedForward = t.find({a:{$gte:5}}).sort( {a:1} ).hint( {a:1} ).toArray();
-indexedReverse = t.find({a:{$lte:5}}).sort( {a:-1} ).hint( {a:1} ).toArray();
+unindexedForward = t.find({a: {$gte: 5}}).sort({a: 1}).toArray();
+unindexedReverse = t.find({a: {$lte: 5}}).sort({a: -1}).toArray();
+t.ensureIndex({a: 1});
+indexedForward = t.find({a: {$gte: 5}}).sort({a: 1}).hint({a: 1}).toArray();
+indexedReverse = t.find({a: {$lte: 5}}).sort({a: -1}).hint({a: 1}).toArray();
-assert.eq( unindexedForward, indexedForward );
-assert.eq( unindexedReverse, indexedReverse );
+assert.eq(unindexedForward, indexedForward);
+assert.eq(unindexedReverse, indexedReverse);
diff --git a/jstests/core/sort9.js b/jstests/core/sort9.js
index 62407d6e96d..57496b40da1 100644
--- a/jstests/core/sort9.js
+++ b/jstests/core/sort9.js
@@ -3,24 +3,24 @@
t = db.jstests_sort9;
t.drop();
-t.save( {a:[]} );
-t.save( {a:[[]]} );
-assert.eq( 2, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
-assert.eq( 2, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 2, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
+t.save({a: []});
+t.save({a: [[]]});
+assert.eq(2, t.find({a: {$ne: 4}}).sort({a: 1}).itcount());
+assert.eq(2, t.find({'a.b': {$ne: 4}}).sort({'a.b': 1}).itcount());
+assert.eq(2, t.find({a: {$ne: 4}}).sort({'a.b': 1}).itcount());
t.drop();
-t.save( {} );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:0}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {a:{$exists:0}} ).sort( {'a.b':1} ).itcount() );
+t.save({});
+assert.eq(1, t.find({a: {$ne: 4}}).sort({a: 1}).itcount());
+assert.eq(1, t.find({'a.b': {$ne: 4}}).sort({'a.b': 1}).itcount());
+assert.eq(1, t.find({a: {$ne: 4}}).sort({'a.b': 1}).itcount());
+assert.eq(1, t.find({a: {$exists: 0}}).sort({a: 1}).itcount());
+assert.eq(1, t.find({a: {$exists: 0}}).sort({'a.b': 1}).itcount());
t.drop();
-t.save( {a:{}} );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {a:{$ne:4}} ).sort( {'a.b':1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$exists:0}} ).sort( {a:1} ).itcount() );
-assert.eq( 1, t.find( {'a.b':{$exists:0}} ).sort( {'a.b':1} ).itcount() );
+t.save({a: {}});
+assert.eq(1, t.find({a: {$ne: 4}}).sort({a: 1}).itcount());
+assert.eq(1, t.find({'a.b': {$ne: 4}}).sort({'a.b': 1}).itcount());
+assert.eq(1, t.find({a: {$ne: 4}}).sort({'a.b': 1}).itcount());
+assert.eq(1, t.find({'a.b': {$exists: 0}}).sort({a: 1}).itcount());
+assert.eq(1, t.find({'a.b': {$exists: 0}}).sort({'a.b': 1}).itcount());
diff --git a/jstests/core/sort_numeric.js b/jstests/core/sort_numeric.js
index 807f23dfe8d..df4e914b87b 100644
--- a/jstests/core/sort_numeric.js
+++ b/jstests/core/sort_numeric.js
@@ -5,31 +5,29 @@ t.drop();
// there are two numeric types int he db; make sure it handles them right
// for comparisons.
-t.save( { a : 3 } );
-t.save( { a : 3.1 } );
-t.save( { a : 2.9 } );
-t.save( { a : 1 } );
-t.save( { a : 1.9 } );
-t.save( { a : 5 } );
-t.save( { a : 4.9 } );
-t.save( { a : 2.91 } );
-
-for( var pass = 0; pass < 2; pass++ ) {
-
- var c = t.find().sort({a:1});
+t.save({a: 3});
+t.save({a: 3.1});
+t.save({a: 2.9});
+t.save({a: 1});
+t.save({a: 1.9});
+t.save({a: 5});
+t.save({a: 4.9});
+t.save({a: 2.91});
+
+for (var pass = 0; pass < 2; pass++) {
+ var c = t.find().sort({a: 1});
var last = 0;
- while( c.hasNext() ) {
+ while (c.hasNext()) {
current = c.next();
- assert( current.a > last );
+ assert(current.a > last);
last = current.a;
}
- assert( t.find({a:3}).count() == 1 );
- assert( t.find({a:3.0}).count() == 1 );
- assert( t.find({a:3.0}).length() == 1 );
+ assert(t.find({a: 3}).count() == 1);
+ assert(t.find({a: 3.0}).count() == 1);
+ assert(t.find({a: 3.0}).length() == 1);
- t.ensureIndex({a:1});
+ t.ensureIndex({a: 1});
}
assert(t.validate().valid);
-
diff --git a/jstests/core/sortb.js b/jstests/core/sortb.js
index e16c7d650e6..e4feea7ecfc 100644
--- a/jstests/core/sortb.js
+++ b/jstests/core/sortb.js
@@ -4,24 +4,28 @@
t = db.jstests_sortb;
t.drop();
-t.ensureIndex({b:1});
+t.ensureIndex({b: 1});
-for( i = 0; i < 100; ++i ) {
- t.save( {a:i,b:i} );
+for (i = 0; i < 100; ++i) {
+ t.save({a: i, b: i});
}
// These large documents will not be part of the initial set of "top 100" matches, and they will
// not be part of the final set of "top 100" matches returned to the client. However, they are an
// intermediate set of "top 100" matches and should trigger an in memory sort capacity exception.
-big = new Array( 1024 * 1024 ).toString();
-for( i = 100; i < 200; ++i ) {
- t.save( {a:i,b:i,big:big} );
+big = new Array(1024 * 1024).toString();
+for (i = 100; i < 200; ++i) {
+ t.save({a: i, b: i, big: big});
}
-for( i = 200; i < 300; ++i ) {
- t.save( {a:i,b:i} );
+for (i = 200; i < 300; ++i) {
+ t.save({a: i, b: i});
}
-assert.throws( function() { t.find().sort( {a:-1} ).hint( {b:1} ).limit( 100 ).itcount(); } );
-assert.throws( function() { t.find().sort( {a:-1} ).hint( {b:1} ).showDiskLoc().limit( 100 ).itcount(); } );
+assert.throws(function() {
+ t.find().sort({a: -1}).hint({b: 1}).limit(100).itcount();
+});
+assert.throws(function() {
+ t.find().sort({a: -1}).hint({b: 1}).showDiskLoc().limit(100).itcount();
+});
t.drop(); \ No newline at end of file
diff --git a/jstests/core/sortc.js b/jstests/core/sortc.js
index f9aa202508b..e2443599955 100644
--- a/jstests/core/sortc.js
+++ b/jstests/core/sortc.js
@@ -3,35 +3,35 @@
t = db.jstests_sortc;
t.drop();
-t.save( {a:1} );
-t.save( {a:2} );
+t.save({a: 1});
+t.save({a: 2});
-function checkA( a, sort, skip, query ) {
+function checkA(a, sort, skip, query) {
query = query || {};
- assert.eq( a, t.find( query ).sort( sort ).skip( skip )[ 0 ].a );
+ assert.eq(a, t.find(query).sort(sort).skip(skip)[0].a);
}
function checkSortAndSkip() {
- checkA( 1, {a:1}, 0 );
- checkA( 2, {a:1}, 1 );
+ checkA(1, {a: 1}, 0);
+ checkA(2, {a: 1}, 1);
- checkA( 1, {a:1}, 0, {a:{$gt:0},b:null} );
- checkA( 2, {a:1}, 1, {a:{$gt:0},b:null} );
+ checkA(1, {a: 1}, 0, {a: {$gt: 0}, b: null});
+ checkA(2, {a: 1}, 1, {a: {$gt: 0}, b: null});
- checkA( 2, {a:-1}, 0 );
- checkA( 1, {a:-1}, 1 );
+ checkA(2, {a: -1}, 0);
+ checkA(1, {a: -1}, 1);
- checkA( 2, {a:-1}, 0, {a:{$gt:0},b:null} );
- checkA( 1, {a:-1}, 1, {a:{$gt:0},b:null} );
+ checkA(2, {a: -1}, 0, {a: {$gt: 0}, b: null});
+ checkA(1, {a: -1}, 1, {a: {$gt: 0}, b: null});
- checkA( 1, {$natural:1}, 0 );
- checkA( 2, {$natural:1}, 1 );
+ checkA(1, {$natural: 1}, 0);
+ checkA(2, {$natural: 1}, 1);
- checkA( 2, {$natural:-1}, 0 );
- checkA( 1, {$natural:-1}, 1 );
+ checkA(2, {$natural: -1}, 0);
+ checkA(1, {$natural: -1}, 1);
}
checkSortAndSkip();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
checkSortAndSkip();
diff --git a/jstests/core/sortd.js b/jstests/core/sortd.js
index 963d32b0ca4..7012915f3ca 100644
--- a/jstests/core/sortd.js
+++ b/jstests/core/sortd.js
@@ -2,69 +2,68 @@
t = db.jstests_sortd;
-function checkNumSorted( n, query ) {
+function checkNumSorted(n, query) {
docs = query.toArray();
- assert.eq( n, docs.length );
- for( i = 1; i < docs.length; ++i ) {
- assert.lte( docs[ i-1 ].a, docs[ i ].a );
+ assert.eq(n, docs.length);
+ for (i = 1; i < docs.length; ++i) {
+ assert.lte(docs[i - 1].a, docs[i].a);
}
}
-
// Test results added by ordered and unordered plans, unordered plan finishes.
t.drop();
-t.save( {a:[1,2,3,4,5]} );
-t.save( {a:10} );
-t.ensureIndex( {a:1} );
+t.save({a: [1, 2, 3, 4, 5]});
+t.save({a: 10});
+t.ensureIndex({a: 1});
-assert.eq( 2, t.find( {a:{$gt:0}} ).sort( {a:1} ).itcount() );
-assert.eq( 2, t.find( {a:{$gt:0},b:null} ).sort( {a:1} ).itcount() );
+assert.eq(2, t.find({a: {$gt: 0}}).sort({a: 1}).itcount());
+assert.eq(2, t.find({a: {$gt: 0}, b: null}).sort({a: 1}).itcount());
// Test results added by ordered and unordered plans, ordered plan finishes.
t.drop();
-t.save( {a:1} );
-t.save( {a:10} );
-for( i = 2; i <= 9; ++i ) {
- t.save( {a:i} );
+t.save({a: 1});
+t.save({a: 10});
+for (i = 2; i <= 9; ++i) {
+ t.save({a: i});
}
-for( i = 0; i < 30; ++i ) {
- t.save( {a:100} );
+for (i = 0; i < 30; ++i) {
+ t.save({a: 100});
}
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-checkNumSorted( 10, t.find( {a:{$gte:0,$lte:10}} ).sort( {a:1} ) );
-checkNumSorted( 10, t.find( {a:{$gte:0,$lte:10},b:null} ).sort( {a:1} ) );
+checkNumSorted(10, t.find({a: {$gte: 0, $lte: 10}}).sort({a: 1}));
+checkNumSorted(10, t.find({a: {$gte: 0, $lte: 10}, b: null}).sort({a: 1}));
-// Test results added by ordered and unordered plans, ordered plan finishes and continues with getmore.
+// Test results added by ordered and unordered plans, ordered plan finishes and continues with
+// getmore.
t.drop();
-t.save( {a:1} );
-t.save( {a:200} );
-for( i = 2; i <= 199; ++i ) {
- t.save( {a:i} );
+t.save({a: 1});
+t.save({a: 200});
+for (i = 2; i <= 199; ++i) {
+ t.save({a: i});
}
-for( i = 0; i < 30; ++i ) {
- t.save( {a:2000} );
+for (i = 0; i < 30; ++i) {
+ t.save({a: 2000});
}
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
-checkNumSorted( 200, t.find( {a:{$gte:0,$lte:200}} ).sort( {a:1} ) );
-checkNumSorted( 200, t.find( {a:{$gte:0,$lte:200},b:null} ).sort( {a:1} ) );
+checkNumSorted(200, t.find({a: {$gte: 0, $lte: 200}}).sort({a: 1}));
+checkNumSorted(200, t.find({a: {$gte: 0, $lte: 200}, b: null}).sort({a: 1}));
// Test results added by ordered and unordered plans, with unordered results excluded during
// getmore.
t.drop();
-for( i = 399; i >= 0; --i ) {
- t.save( {a:i} );
+for (i = 399; i >= 0; --i) {
+ t.save({a: i});
}
-t.ensureIndex( {a:1} );
-
-checkNumSorted( 400, t.find( {a:{$gte:0,$lte:400},b:null} ).batchSize( 50 ).sort( {a:1} ) );
+t.ensureIndex({a: 1});
+checkNumSorted(400, t.find({a: {$gte: 0, $lte: 400}, b: null}).batchSize(50).sort({a: 1}));
diff --git a/jstests/core/sortf.js b/jstests/core/sortf.js
index 615791e25a5..1cd3449aa4f 100644
--- a/jstests/core/sortf.js
+++ b/jstests/core/sortf.js
@@ -4,17 +4,17 @@
t = db.jstests_sortf;
t.drop();
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
-for( i = 0; i < 100; ++i ) {
- t.save( {a:0,b:0} );
+for (i = 0; i < 100; ++i) {
+ t.save({a: 0, b: 0});
}
-big = new Array( 10 * 1000 * 1000 ).toString();
-for( i = 0; i < 5; ++i ) {
- t.save( {a:1,b:1,big:big} );
+big = new Array(10 * 1000 * 1000).toString();
+for (i = 0; i < 5; ++i) {
+ t.save({a: 1, b: 1, big: big});
}
-assert.eq( 5, t.find( {a:1} ).sort( {b:1} ).itcount() );
+assert.eq(5, t.find({a: 1}).sort({b: 1}).itcount());
t.drop(); \ No newline at end of file
diff --git a/jstests/core/sortg.js b/jstests/core/sortg.js
index ec69d8fc772..726fe9184a6 100644
--- a/jstests/core/sortg.js
+++ b/jstests/core/sortg.js
@@ -3,57 +3,57 @@
t = db.jstests_sortg;
t.drop();
-big = new Array( 1000000 ).toString();
+big = new Array(1000000).toString();
-for( i = 0; i < 100; ++i ) {
- t.save( {b:0} );
+for (i = 0; i < 100; ++i) {
+ t.save({b: 0});
}
-for( i = 0; i < 40; ++i ) {
- t.save( {a:0,x:big} );
+for (i = 0; i < 40; ++i) {
+ t.save({a: 0, x: big});
}
-function memoryException( sortSpec, querySpec ) {
+function memoryException(sortSpec, querySpec) {
querySpec = querySpec || {};
- var ex = assert.throws( function() {
- t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).itcount();
- } );
- assert( ex.toString().match( /Sort/ ) );
+ var ex = assert.throws(function() {
+ t.find(querySpec).sort(sortSpec).batchSize(1000).itcount();
+ });
+ assert(ex.toString().match(/Sort/));
}
-function noMemoryException( sortSpec, querySpec ) {
+function noMemoryException(sortSpec, querySpec) {
querySpec = querySpec || {};
- t.find( querySpec ).sort( sortSpec ).batchSize( 1000 ).itcount();
+ t.find(querySpec).sort(sortSpec).batchSize(1000).itcount();
}
// Unindexed sorts.
-memoryException( {a:1} );
-memoryException( {b:1} );
+memoryException({a: 1});
+memoryException({b: 1});
// Indexed sorts.
-noMemoryException( {_id:1} );
-noMemoryException( {$natural:1} );
+noMemoryException({_id: 1});
+noMemoryException({$natural: 1});
-assert.eq( 1, t.getIndexes().length );
+assert.eq(1, t.getIndexes().length);
-t.ensureIndex( {a:1} );
-t.ensureIndex( {b:1} );
-t.ensureIndex( {c:1} );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
+t.ensureIndex({c: 1});
-assert.eq( 4, t.getIndexes().length );
+assert.eq(4, t.getIndexes().length);
// These sorts are now indexed.
-noMemoryException( {a:1} );
-noMemoryException( {b:1} );
+noMemoryException({a: 1});
+noMemoryException({b: 1});
// A memory exception is triggered for an unindexed sort involving multiple plans.
-memoryException( {d:1}, {b:null,c:null} );
+memoryException({d: 1}, {b: null, c: null});
// With an indexed plan on _id:1 and an unindexed plan on b:1, the indexed plan
// should succeed even if the unindexed one would exhaust its memory limit.
-noMemoryException( {_id:1}, {b:null} );
+noMemoryException({_id: 1}, {b: null});
// With an unindexed plan on b:1 recorded for a query, the query should be
// retried when the unindexed plan exhausts its memory limit.
-noMemoryException( {_id:1}, {b:null} );
+noMemoryException({_id: 1}, {b: null});
t.drop();
diff --git a/jstests/core/sorth.js b/jstests/core/sorth.js
index e520ee50454..afad8b0cca8 100644
--- a/jstests/core/sorth.js
+++ b/jstests/core/sorth.js
@@ -1,140 +1,136 @@
// Tests for the $in/sort/limit optimization combined with inequality bounds. SERVER-5777
-
t = db.jstests_sorth;
t.drop();
/** Assert that the 'a' and 'b' fields of the documents match. */
-function assertMatch( expectedMatch, match ) {
+function assertMatch(expectedMatch, match) {
if (undefined !== expectedMatch.a) {
- assert.eq( expectedMatch.a, match.a );
+ assert.eq(expectedMatch.a, match.a);
}
if (undefined !== expectedMatch.b) {
- assert.eq( expectedMatch.b, match.b );
+ assert.eq(expectedMatch.b, match.b);
}
}
/** Assert an expected document or array of documents matches the 'matches' array. */
-function assertMatches( expectedMatches, matches ) {
- if ( expectedMatches.length == null ) {
- assertMatch( expectedMatches, matches[ 0 ] );
+function assertMatches(expectedMatches, matches) {
+ if (expectedMatches.length == null) {
+ assertMatch(expectedMatches, matches[0]);
}
- for( i = 0; i < expectedMatches.length; ++i ) {
- assertMatch( expectedMatches[ i ], matches[ i ] );
+ for (i = 0; i < expectedMatches.length; ++i) {
+ assertMatch(expectedMatches[i], matches[i]);
}
}
/** Generate a cursor using global parameters. */
-function find( query ) {
- return t.find( query ).sort( _sort ).limit( _limit ).hint( _hint );
+function find(query) {
+ return t.find(query).sort(_sort).limit(_limit).hint(_hint);
}
/** Check the expected matches for a query. */
-function checkMatches( expectedMatch, query ) {
- result = find( query ).toArray();
- assertMatches( expectedMatch, result );
- var count = find( query ).itcount();
- assert.eq( expectedMatch.length || 1, count );
+function checkMatches(expectedMatch, query) {
+ result = find(query).toArray();
+ assertMatches(expectedMatch, result);
+ var count = find(query).itcount();
+ assert.eq(expectedMatch.length || 1, count);
}
/** Reset data, index, and _sort and _hint globals. */
-function reset( sort, index ) {
+function reset(sort, index) {
t.drop();
- t.save( { a:1, b:1 } );
- t.save( { a:1, b:2 } );
- t.save( { a:1, b:3 } );
- t.save( { a:2, b:0 } );
- t.save( { a:2, b:3 } );
- t.save( { a:2, b:5 } );
- t.ensureIndex( index );
+ t.save({a: 1, b: 1});
+ t.save({a: 1, b: 2});
+ t.save({a: 1, b: 3});
+ t.save({a: 2, b: 0});
+ t.save({a: 2, b: 3});
+ t.save({a: 2, b: 5});
+ t.ensureIndex(index);
_sort = sort;
_hint = index;
}
-function checkForwardDirection( sort, index ) {
- reset( sort, index );
+function checkForwardDirection(sort, index) {
+ reset(sort, index);
_limit = -1;
// Lower bound checks.
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:0 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:2 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:3 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:3 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:4 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:4 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:5 } } );
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$gte: 0}});
+ checkMatches({a: 1, b: 1}, {a: {$in: [1, 2]}, b: {$gt: 0}});
+ checkMatches({a: 1, b: 1}, {a: {$in: [1, 2]}, b: {$gte: 1}});
+ checkMatches({a: 1, b: 2}, {a: {$in: [1, 2]}, b: {$gt: 1}});
+ checkMatches({a: 1, b: 2}, {a: {$in: [1, 2]}, b: {$gte: 2}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$gt: 2}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$gte: 3}});
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gt: 3}});
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gte: 4}});
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gt: 4}});
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gte: 5}});
// Upper bound checks.
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:0 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:1 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:1 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3 } } );
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$lte: 0}});
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$lt: 1}});
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$lte: 1}});
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$lt: 3}});
// Lower and upper bounds checks.
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lte:0 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lt:1 } } );
- checkMatches( { a:2, b:0 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0, $lte:1 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:0, $lte:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2, $lt:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:2.5, $lte:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:2.5, $lte:3 } } );
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$gte: 0, $lte: 0}});
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$gte: 0, $lt: 1}});
+ checkMatches({a: 2, b: 0}, {a: {$in: [1, 2]}, b: {$gte: 0, $lte: 1}});
+ checkMatches({a: 1, b: 1}, {a: {$in: [1, 2]}, b: {$gt: 0, $lte: 1}});
+ checkMatches({a: 1, b: 2}, {a: {$in: [1, 2]}, b: {$gte: 2, $lt: 3}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$gte: 2.5, $lte: 3}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$gt: 2.5, $lte: 3}});
// Limit is -2.
_limit = -2;
- checkMatches( [ { a:2, b:0 }, { a:1, b:1 } ],
- { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
+ checkMatches([{a: 2, b: 0}, {a: 1, b: 1}], {a: {$in: [1, 2]}, b: {$gte: 0}});
// We omit 'a' here because it's not defined whether or not we will see
// {a:2, b:3} or {a:1, b:3} first as our sort is over 'b'.
- checkMatches( [ { a:1, b:2 }, { b:3 } ],
- { a:{ $in:[ 1, 2 ] }, b:{ $gt:1 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gt:4 } } );
+ checkMatches([{a: 1, b: 2}, {b: 3}], {a: {$in: [1, 2]}, b: {$gt: 1}});
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gt: 4}});
// With an additional document between the $in values.
- t.save( { a:1.5, b:3 } );
- checkMatches( [ { a:2, b:0 }, { a:1, b:1 } ],
- { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
+ t.save({a: 1.5, b: 3});
+ checkMatches([{a: 2, b: 0}, {a: 1, b: 1}], {a: {$in: [1, 2]}, b: {$gte: 0}});
}
// Basic test with an index suffix order.
-checkForwardDirection( { b:1 }, { a:1, b:1 } );
+checkForwardDirection({b: 1}, {a: 1, b: 1});
// With an additonal index field.
-checkForwardDirection( { b:1 }, { a:1, b:1, c:1 } );
+checkForwardDirection({b: 1}, {a: 1, b: 1, c: 1});
// With an additonal reverse direction index field.
-checkForwardDirection( { b:1 }, { a:1, b:1, c:-1 } );
+checkForwardDirection({b: 1}, {a: 1, b: 1, c: -1});
// With an additonal ordered index field.
-checkForwardDirection( { b:1, c:1 }, { a:1, b:1, c:1 } );
+checkForwardDirection({b: 1, c: 1}, {a: 1, b: 1, c: 1});
// With an additonal reverse direction ordered index field.
-checkForwardDirection( { b:1, c:-1 }, { a:1, b:1, c:-1 } );
+checkForwardDirection({b: 1, c: -1}, {a: 1, b: 1, c: -1});
-function checkReverseDirection( sort, index ) {
- reset( sort, index );
+function checkReverseDirection(sort, index) {
+ reset(sort, index);
_limit = -1;
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:0 } } );
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $gte:5 } } );
-
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:5 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:5 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.1 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.5 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:3 } } );
-
- checkMatches( { a:2, b:5 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:5, $gte:5 } } );
- checkMatches( { a:1, b:1 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:2, $gte:1 } } );
- checkMatches( { a:1, b:2 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3, $gt:1 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lt:3.5, $gte:3 } } );
- checkMatches( { a:1, b:3 }, { a:{ $in:[ 1, 2 ] }, b:{ $lte:3, $gt:0 } } );
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gte: 0}});
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$gte: 5}});
+
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$lte: 5}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$lt: 5}});
+ checkMatches({a: 1, b: 2}, {a: {$in: [1, 2]}, b: {$lt: 3}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$lt: 3.1}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$lt: 3.5}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$lte: 3}});
+
+ checkMatches({a: 2, b: 5}, {a: {$in: [1, 2]}, b: {$lte: 5, $gte: 5}});
+ checkMatches({a: 1, b: 1}, {a: {$in: [1, 2]}, b: {$lt: 2, $gte: 1}});
+ checkMatches({a: 1, b: 2}, {a: {$in: [1, 2]}, b: {$lt: 3, $gt: 1}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$lt: 3.5, $gte: 3}});
+ checkMatches({a: 1, b: 3}, {a: {$in: [1, 2]}, b: {$lte: 3, $gt: 0}});
}
// With a descending order index.
-checkReverseDirection( { b:-1 }, { a:1, b:-1 } );
-checkReverseDirection( { b:-1 }, { a:1, b:-1, c:1 } );
-checkReverseDirection( { b:-1 }, { a:1, b:-1, c:-1 } );
-checkReverseDirection( { b:-1, c:1 }, { a:1, b:-1, c:1 } );
-checkReverseDirection( { b:-1, c:-1 }, { a:1, b:-1, c:-1 } );
+checkReverseDirection({b: -1}, {a: 1, b: -1});
+checkReverseDirection({b: -1}, {a: 1, b: -1, c: 1});
+checkReverseDirection({b: -1}, {a: 1, b: -1, c: -1});
+checkReverseDirection({b: -1, c: 1}, {a: 1, b: -1, c: 1});
+checkReverseDirection({b: -1, c: -1}, {a: 1, b: -1, c: -1});
diff --git a/jstests/core/sorti.js b/jstests/core/sorti.js
index 2e5cfe110d7..b6518818683 100644
--- a/jstests/core/sorti.js
+++ b/jstests/core/sorti.js
@@ -3,23 +3,23 @@
t = db.jstests_sorti;
t.drop();
-t.save( { a:1, b:0 } );
-t.save( { a:3, b:1 } );
-t.save( { a:2, b:2 } );
-t.save( { a:4, b:3 } );
+t.save({a: 1, b: 0});
+t.save({a: 3, b: 1});
+t.save({a: 2, b: 2});
+t.save({a: 4, b: 3});
-function checkBOrder( query ) {
+function checkBOrder(query) {
arr = query.toArray();
order = [];
- for( i in arr ) {
- a = arr[ i ];
- order.push( a.b );
+ for (i in arr) {
+ a = arr[i];
+ order.push(a.b);
}
- assert.eq( [ 0, 2, 1, 3 ], order );
+ assert.eq([0, 2, 1, 3], order);
}
-checkBOrder( t.find().sort( { a:1 } ) );
-checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ) );
-t.ensureIndex( { b:1 } );
-checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ) );
-checkBOrder( t.find( {}, { _id:0, b:1 } ).sort( { a:1 } ).hint( { b:1 } ) );
+checkBOrder(t.find().sort({a: 1}));
+checkBOrder(t.find({}, {_id: 0, b: 1}).sort({a: 1}));
+t.ensureIndex({b: 1});
+checkBOrder(t.find({}, {_id: 0, b: 1}).sort({a: 1}));
+checkBOrder(t.find({}, {_id: 0, b: 1}).sort({a: 1}).hint({b: 1}));
diff --git a/jstests/core/sortj.js b/jstests/core/sortj.js
index 7a73829b94e..4d8baa47e8f 100644
--- a/jstests/core/sortj.js
+++ b/jstests/core/sortj.js
@@ -4,14 +4,14 @@
t = db.jstests_sortj;
t.drop();
-t.ensureIndex( { a:1 } );
+t.ensureIndex({a: 1});
-big = new Array( 100000 ).toString();
-for( i = 0; i < 1000; ++i ) {
- t.save( { a:1, b:big } );
+big = new Array(100000).toString();
+for (i = 0; i < 1000; ++i) {
+ t.save({a: 1, b: big});
}
-assert.throws( function() {
- t.find( { a:{ $gte:0 }, c:null } ).sort( { d:1 } ).itcount();
- } );
+assert.throws(function() {
+ t.find({a: {$gte: 0}, c: null}).sort({d: 1}).itcount();
+});
t.drop(); \ No newline at end of file
diff --git a/jstests/core/sortk.js b/jstests/core/sortk.js
index da00fe80ba5..7ecb86fa6df 100644
--- a/jstests/core/sortk.js
+++ b/jstests/core/sortk.js
@@ -5,136 +5,146 @@ t.drop();
function resetCollection() {
t.drop();
- t.save( { a:1, b:1 } );
- t.save( { a:1, b:2 } );
- t.save( { a:1, b:3 } );
- t.save( { a:2, b:4 } );
- t.save( { a:2, b:5 } );
- t.save( { a:2, b:0 } );
+ t.save({a: 1, b: 1});
+ t.save({a: 1, b: 2});
+ t.save({a: 1, b: 3});
+ t.save({a: 2, b: 4});
+ t.save({a: 2, b: 5});
+ t.save({a: 2, b: 0});
}
resetCollection();
-t.ensureIndex( { a:1, b:1 } );
-
-function simpleQuery( extraFields, sort, hint ) {
- query = { a:{ $in:[ 1, 2 ] } };
- Object.extend( query, extraFields );
- sort = sort || { b:1 };
- hint = hint || { a:1, b:1 };
- return t.find( query ).sort( sort ).hint( hint );
+t.ensureIndex({a: 1, b: 1});
+
+function simpleQuery(extraFields, sort, hint) {
+ query = {
+ a: {$in: [1, 2]}
+ };
+ Object.extend(query, extraFields);
+ sort = sort || {
+ b: 1
+ };
+ hint = hint || {
+ a: 1,
+ b: 1
+ };
+ return t.find(query).sort(sort).hint(hint);
}
-function simpleQueryWithLimit( limit ) {
- return simpleQuery().limit( limit );
+function simpleQueryWithLimit(limit) {
+ return simpleQuery().limit(limit);
}
// The limit is -1.
-assert.eq( 0, simpleQueryWithLimit( -1 )[ 0 ].b );
+assert.eq(0, simpleQueryWithLimit(-1)[0].b);
// The limit is -2.
-assert.eq( 0, simpleQueryWithLimit( -2 )[ 0 ].b );
-assert.eq( 1, simpleQueryWithLimit( -2 )[ 1 ].b );
+assert.eq(0, simpleQueryWithLimit(-2)[0].b);
+assert.eq(1, simpleQueryWithLimit(-2)[1].b);
// A skip is applied.
-assert.eq( 1, simpleQueryWithLimit( -1 ).skip( 1 )[ 0 ].b );
+assert.eq(1, simpleQueryWithLimit(-1).skip(1)[0].b);
// No limit is applied.
-assert.eq( 6, simpleQueryWithLimit( 0 ).itcount() );
-assert.eq( 6, simpleQueryWithLimit( 0 ).explain( true ).executionStats.totalKeysExamined );
-assert.eq( 5, simpleQueryWithLimit( 0 ).skip( 1 ).itcount() );
+assert.eq(6, simpleQueryWithLimit(0).itcount());
+assert.eq(6, simpleQueryWithLimit(0).explain(true).executionStats.totalKeysExamined);
+assert.eq(5, simpleQueryWithLimit(0).skip(1).itcount());
// The query has additional constriants, preventing limit optimization.
-assert.eq( 2, simpleQuery( { $where:'this.b>=2' } ).limit( -1 )[ 0 ].b );
+assert.eq(2, simpleQuery({$where: 'this.b>=2'}).limit(-1)[0].b);
// The sort order is the reverse of the index order.
-assert.eq( 5, simpleQuery( {}, { b:-1 } ).limit( -1 )[ 0 ].b );
+assert.eq(5, simpleQuery({}, {b: -1}).limit(-1)[0].b);
// The sort order is the reverse of the index order on a constrained field.
-assert.eq( 0, simpleQuery( {}, { a:-1, b:1 } ).limit( -1 )[ 0 ].b );
+assert.eq(0, simpleQuery({}, {a: -1, b: 1}).limit(-1)[0].b);
// Without a hint, multiple cursors are attempted.
-assert.eq( 0, t.find( { a:{ $in:[ 1, 2 ] } } ).sort( { b:1 } ).limit( -1 )[ 0 ].b );
-explain = t.find( { a:{ $in:[ 1, 2 ] } } ).sort( { b:1 } ).limit( -1 ).explain( true );
-assert.eq( 1, explain.executionStats.nReturned );
+assert.eq(0, t.find({a: {$in: [1, 2]}}).sort({b: 1}).limit(-1)[0].b);
+explain = t.find({a: {$in: [1, 2]}}).sort({b: 1}).limit(-1).explain(true);
+assert.eq(1, explain.executionStats.nReturned);
// The expected first result now comes from the first interval.
-t.remove( { b:0 } );
-assert.eq( 1, simpleQueryWithLimit( -1 )[ 0 ].b );
+t.remove({b: 0});
+assert.eq(1, simpleQueryWithLimit(-1)[0].b);
// With three intervals.
-function inThreeIntervalQueryWithLimit( limit ) {
- return t.find( { a:{ $in: [ 1, 2, 3 ] } } ).sort( { b:1 } ).hint( { a:1, b:1 } ).limit( limit );
+function inThreeIntervalQueryWithLimit(limit) {
+ return t.find({a: {$in: [1, 2, 3]}}).sort({b: 1}).hint({a: 1, b: 1}).limit(limit);
}
-assert.eq( 1, inThreeIntervalQueryWithLimit( -1 )[ 0 ].b );
-assert.eq( 1, inThreeIntervalQueryWithLimit( -2 )[ 0 ].b );
-assert.eq( 2, inThreeIntervalQueryWithLimit( -2 )[ 1 ].b );
-t.save( { a:3, b:0 } );
-assert.eq( 0, inThreeIntervalQueryWithLimit( -1 )[ 0 ].b );
-assert.eq( 0, inThreeIntervalQueryWithLimit( -2 )[ 0 ].b );
-assert.eq( 1, inThreeIntervalQueryWithLimit( -2 )[ 1 ].b );
+assert.eq(1, inThreeIntervalQueryWithLimit(-1)[0].b);
+assert.eq(1, inThreeIntervalQueryWithLimit(-2)[0].b);
+assert.eq(2, inThreeIntervalQueryWithLimit(-2)[1].b);
+t.save({a: 3, b: 0});
+assert.eq(0, inThreeIntervalQueryWithLimit(-1)[0].b);
+assert.eq(0, inThreeIntervalQueryWithLimit(-2)[0].b);
+assert.eq(1, inThreeIntervalQueryWithLimit(-2)[1].b);
// The index is multikey.
t.remove({});
-t.save( { a:1, b:[ 0, 1, 2 ] } );
-t.save( { a:2, b:[ 0, 1, 2 ] } );
-t.save( { a:1, b:5 } );
-assert.eq( 3, simpleQueryWithLimit( -3 ).itcount() );
+t.save({a: 1, b: [0, 1, 2]});
+t.save({a: 2, b: [0, 1, 2]});
+t.save({a: 1, b: 5});
+assert.eq(3, simpleQueryWithLimit(-3).itcount());
// The index ordering is reversed.
resetCollection();
-t.ensureIndex( { a:1, b:-1 } );
+t.ensureIndex({a: 1, b: -1});
// The sort order is consistent with the index order.
-assert.eq( 5, simpleQuery( {}, { b:-1 }, { a:1, b:-1 } ).limit( -1 )[ 0 ].b );
+assert.eq(5, simpleQuery({}, {b: -1}, {a: 1, b: -1}).limit(-1)[0].b);
// The sort order is the reverse of the index order.
-assert.eq( 0, simpleQuery( {}, { b:1 }, { a:1, b:-1 } ).limit( -1 )[ 0 ].b );
+assert.eq(0, simpleQuery({}, {b: 1}, {a: 1, b: -1}).limit(-1)[0].b);
// An equality constraint precedes the $in constraint.
t.drop();
-t.ensureIndex( { a:1, b:1, c:1 } );
-t.save( { a:0, b:0, c:-1 } );
-t.save( { a:0, b:2, c:1 } );
-t.save( { a:1, b:1, c:1 } );
-t.save( { a:1, b:1, c:2 } );
-t.save( { a:1, b:1, c:3 } );
-t.save( { a:1, b:2, c:4 } );
-t.save( { a:1, b:2, c:5 } );
-t.save( { a:1, b:2, c:0 } );
-
-function eqInQueryWithLimit( limit ) {
- return t.find( { a:1, b:{ $in:[ 1, 2 ] } } ).sort( { c: 1 } ).hint( { a:1, b:1, c:1 } ).
- limit( limit );
+t.ensureIndex({a: 1, b: 1, c: 1});
+t.save({a: 0, b: 0, c: -1});
+t.save({a: 0, b: 2, c: 1});
+t.save({a: 1, b: 1, c: 1});
+t.save({a: 1, b: 1, c: 2});
+t.save({a: 1, b: 1, c: 3});
+t.save({a: 1, b: 2, c: 4});
+t.save({a: 1, b: 2, c: 5});
+t.save({a: 1, b: 2, c: 0});
+
+function eqInQueryWithLimit(limit) {
+ return t.find({a: 1, b: {$in: [1, 2]}}).sort({c: 1}).hint({a: 1, b: 1, c: 1}).limit(limit);
}
-function andEqInQueryWithLimit( limit ) {
- return t.find( { $and:[ { a:1 }, { b:{ $in:[ 1, 2 ] } } ] } ).sort( { c: 1 } ).
- hint( { a:1, b:1, c:1 } ).limit( limit );
+function andEqInQueryWithLimit(limit) {
+ return t.find({$and: [{a: 1}, {b: {$in: [1, 2]}}]})
+ .sort({c: 1})
+ .hint({a: 1, b: 1, c: 1})
+ .limit(limit);
}
// The limit is -1.
-assert.eq( 0, eqInQueryWithLimit( -1 )[ 0 ].c );
-assert.eq( 0, andEqInQueryWithLimit( -1 )[ 0 ].c );
+assert.eq(0, eqInQueryWithLimit(-1)[0].c);
+assert.eq(0, andEqInQueryWithLimit(-1)[0].c);
// The limit is -2.
-assert.eq( 0, eqInQueryWithLimit( -2 )[ 0 ].c );
-assert.eq( 1, eqInQueryWithLimit( -2 )[ 1 ].c );
-assert.eq( 0, andEqInQueryWithLimit( -2 )[ 0 ].c );
-assert.eq( 1, andEqInQueryWithLimit( -2 )[ 1 ].c );
-
-function inQueryWithLimit( limit, sort ) {
- sort = sort || { b:1 };
- return t.find( { a:{ $in:[ 0, 1 ] } } ).sort( sort ).hint( { a:1, b:1, c:1 } ).limit( limit );
+assert.eq(0, eqInQueryWithLimit(-2)[0].c);
+assert.eq(1, eqInQueryWithLimit(-2)[1].c);
+assert.eq(0, andEqInQueryWithLimit(-2)[0].c);
+assert.eq(1, andEqInQueryWithLimit(-2)[1].c);
+
+function inQueryWithLimit(limit, sort) {
+ sort = sort || {
+ b: 1
+ };
+ return t.find({a: {$in: [0, 1]}}).sort(sort).hint({a: 1, b: 1, c: 1}).limit(limit);
}
// The index has two suffix fields unconstrained by the query.
-assert.eq( 0, inQueryWithLimit( -1 )[ 0 ].b );
+assert.eq(0, inQueryWithLimit(-1)[0].b);
// The index has two ordered suffix fields unconstrained by the query.
-assert.eq( 0, inQueryWithLimit( -1, { b:1, c:1 } )[ 0 ].b );
+assert.eq(0, inQueryWithLimit(-1, {b: 1, c: 1})[0].b);
// The index has two ordered suffix fields unconstrained by the query and the limit is -2.
-assert.eq( 0, inQueryWithLimit( -2, { b:1, c:1 } )[ 0 ].b );
-assert.eq( 1, inQueryWithLimit( -2, { b:1, c:1 } )[ 1 ].b );
+assert.eq(0, inQueryWithLimit(-2, {b: 1, c: 1})[0].b);
+assert.eq(1, inQueryWithLimit(-2, {b: 1, c: 1})[1].b);
diff --git a/jstests/core/sortl.js b/jstests/core/sortl.js
index b7cf9b34958..247a175a6f0 100644
--- a/jstests/core/sortl.js
+++ b/jstests/core/sortl.js
@@ -14,18 +14,22 @@
assert.eq(res.next(), {_id: 1, a: 2, b: {"": 2}});
assert.eq(res.hasNext(), false);
- res = db.runCommand({findAndModify: coll.getName(),
- query: {_id: 1},
- update: {$set: {b: 1}},
- sort: {a: 1},
- fields: {c: {$meta: "sortKey"}}});
+ res = db.runCommand({
+ findAndModify: coll.getName(),
+ query: {_id: 1},
+ update: {$set: {b: 1}},
+ sort: {a: 1},
+ fields: {c: {$meta: "sortKey"}}
+ });
assert.commandFailedWithCode(res, ErrorCodes.BadValue, "$meta sortKey update");
- res = db.runCommand({findAndModify: coll.getName(),
- query: {_id: 1},
- remove: true,
- sort: {b: 1},
- fields: {c: {$meta: "sortKey"}}});
+ res = db.runCommand({
+ findAndModify: coll.getName(),
+ query: {_id: 1},
+ remove: true,
+ sort: {b: 1},
+ fields: {c: {$meta: "sortKey"}}
+ });
assert.commandFailedWithCode(res, ErrorCodes.BadValue, "$meta sortKey delete");
coll.drop();
diff --git a/jstests/core/splitvector.js b/jstests/core/splitvector.js
index 5306bd61ff4..233911d29c6 100644
--- a/jstests/core/splitvector.js
+++ b/jstests/core/splitvector.js
@@ -7,27 +7,26 @@
// collection in 'maxChunkSize' approximately-sized chunks. Its asserts fail otherwise.
// @param splitVec: an array with keys for field 'x'
// e.g. [ { x : 1927 }, { x : 3855 }, ...
-// @param numDocs: domain of 'x' field
+// @param numDocs: domain of 'x' field
// e.g. 20000
// @param maxChunkSize is in MBs.
//
-assertChunkSizes = function ( splitVec , numDocs , maxChunkSize , msg ){
- splitVec = [{ x: -1 }].concat( splitVec );
- splitVec.push( { x: numDocs+1 } );
- for ( i=0; i<splitVec.length-1; i++) {
+assertChunkSizes = function(splitVec, numDocs, maxChunkSize, msg) {
+ splitVec = [{x: -1}].concat(splitVec);
+ splitVec.push({x: numDocs + 1});
+ for (i = 0; i < splitVec.length - 1; i++) {
min = splitVec[i];
- max = splitVec[i+1];
+ max = splitVec[i + 1];
var avgObjSize = db.jstests_splitvector.stats().avgObjSize;
- size = db.runCommand( { datasize: "test.jstests_splitvector" , min: min , max: max } ).size;
-
+ size = db.runCommand({datasize: "test.jstests_splitvector", min: min, max: max}).size;
+
// It is okay for the last chunk to be smaller. A collection's size does not
// need to be exactly a multiple of maxChunkSize.
- if ( i < splitVec.length - 2 ) {
+ if (i < splitVec.length - 2) {
// We are within one object of the correct chunk size.
- assert.lt( Math.abs(maxChunkSize - size), avgObjSize , "A"+i );
- }
- else {
- assert.gt( maxChunkSize , size , "A"+i , msg + "b" );
+ assert.lt(Math.abs(maxChunkSize - size), avgObjSize, "A" + i);
+ } else {
+ assert.gt(maxChunkSize, size, "A" + i, msg + "b");
}
}
};
@@ -36,15 +35,15 @@ assertChunkSizes = function ( splitVec , numDocs , maxChunkSize , msg ){
// This is useful for checking that splitPoints have the same format as the original key pattern,
// even when sharding on a prefix key.
// Not very efficient, so only call when # of field names is small
-var assertFieldNamesMatch = function( splitPoint , keyPattern ){
- for ( var p in splitPoint ) {
- if( splitPoint.hasOwnProperty( p ) ) {
- assert( keyPattern.hasOwnProperty( p ) , "property " + p + " not in keyPattern" );
+var assertFieldNamesMatch = function(splitPoint, keyPattern) {
+ for (var p in splitPoint) {
+ if (splitPoint.hasOwnProperty(p)) {
+ assert(keyPattern.hasOwnProperty(p), "property " + p + " not in keyPattern");
}
}
- for ( var p in keyPattern ) {
- if( keyPattern.hasOwnProperty( p ) ){
- assert( splitPoint.hasOwnProperty( p ) , "property " + p + " not in splitPoint" );
+ for (var p in keyPattern) {
+ if (keyPattern.hasOwnProperty(p)) {
+ assert(splitPoint.hasOwnProperty(p), "property " + p + " not in splitPoint");
}
}
};
@@ -60,54 +59,63 @@ f = db.jstests_splitvector;
resetCollection();
// -------------------------
-// Case 1: missing parameters
-
-assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" } ).ok , "1a" );
-assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" , maxChunkSize: 1} ).ok , "1b" );
+// Case 1: missing parameters
+assert.eq(false, db.runCommand({splitVector: "test.jstests_splitvector"}).ok, "1a");
+assert.eq(false,
+ db.runCommand({splitVector: "test.jstests_splitvector", maxChunkSize: 1}).ok,
+ "1b");
// -------------------------
// Case 2: missing index
-assert.eq( false, db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } ).ok , "2");
-
+assert.eq(false,
+ db.runCommand(
+ {splitVector: "test.jstests_splitvector", keyPattern: {x: 1}, maxChunkSize: 1}).ok,
+ "2");
// -------------------------
// Case 3: empty collection
-f.ensureIndex( { x: 1} );
-assert.eq( [], db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } ).splitKeys , "3");
-
+f.ensureIndex({x: 1});
+assert.eq(
+ [],
+ db.runCommand({splitVector: "test.jstests_splitvector", keyPattern: {x: 1}, maxChunkSize: 1})
+ .splitKeys,
+ "3");
// -------------------------
// Case 4: uniform collection
resetCollection();
-f.ensureIndex( { x: 1 } );
+f.ensureIndex({x: 1});
var case4 = function() {
// Get baseline document size
filler = "";
- while( filler.length < 500 ) filler += "a";
- f.save( { x: 0, y: filler } );
- docSize = db.runCommand( { datasize: "test.jstests_splitvector" } ).size;
- assert.gt( docSize, 500 , "4a" );
+ while (filler.length < 500)
+ filler += "a";
+ f.save({x: 0, y: filler});
+ docSize = db.runCommand({datasize: "test.jstests_splitvector"}).size;
+ assert.gt(docSize, 500, "4a");
// Fill collection and get split vector for 1MB maxChunkSize
numDocs = 4500;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: i, y: filler } );
+ for (i = 1; i < numDocs; i++) {
+ f.save({x: i, y: filler});
}
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
+ res = db.runCommand(
+ {splitVector: "test.jstests_splitvector", keyPattern: {x: 1}, maxChunkSize: 1});
// splitVector aims at getting half-full chunks after split
- factor = 0.5;
-
- assert.eq( true , res.ok , "4b" );
- assert.close( numDocs*docSize / ((1<<20) * factor), res.splitKeys.length , "num split keys" , -1 );
- assertChunkSizes( res.splitKeys , numDocs, (1<<20) * factor , "4d" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ factor = 0.5;
+
+ assert.eq(true, res.ok, "4b");
+ assert.close(
+ numDocs * docSize / ((1 << 20) * factor), res.splitKeys.length, "num split keys", -1);
+ assertChunkSizes(res.splitKeys, numDocs, (1 << 20) * factor, "4d");
+ for (i = 0; i < res.splitKeys.length; i++) {
+ assertFieldNamesMatch(res.splitKeys[i], {x: 1});
}
};
case4();
@@ -116,20 +124,25 @@ case4();
// Case 5: limit number of split points
resetCollection();
-f.ensureIndex( { x: 1 } );
+f.ensureIndex({x: 1});
var case5 = function() {
// Fill collection and get split vector for 1MB maxChunkSize
numDocs = 4500;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: i, y: filler } );
+ for (i = 1; i < numDocs; i++) {
+ f.save({x: i, y: filler});
}
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 , maxSplitPoints: 1} );
-
- assert.eq( true , res.ok , "5a" );
- assert.eq( 1 , res.splitKeys.length , "5b" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ res = db.runCommand({
+ splitVector: "test.jstests_splitvector",
+ keyPattern: {x: 1},
+ maxChunkSize: 1,
+ maxSplitPoints: 1
+ });
+
+ assert.eq(true, res.ok, "5a");
+ assert.eq(1, res.splitKeys.length, "5b");
+ for (i = 0; i < res.splitKeys.length; i++) {
+ assertFieldNamesMatch(res.splitKeys[i], {x: 1});
}
};
case5();
@@ -138,20 +151,25 @@ case5();
// Case 6: limit number of objects in a chunk
resetCollection();
-f.ensureIndex( { x: 1 } );
+f.ensureIndex({x: 1});
var case6 = function() {
// Fill collection and get split vector for 1MB maxChunkSize
numDocs = 2000;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: i, y: filler } );
+ for (i = 1; i < numDocs; i++) {
+ f.save({x: i, y: filler});
}
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 , maxChunkObjects: 500} );
-
- assert.eq( true , res.ok , "6a" );
- assert.eq( 3 , res.splitKeys.length , "6b" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ res = db.runCommand({
+ splitVector: "test.jstests_splitvector",
+ keyPattern: {x: 1},
+ maxChunkSize: 1,
+ maxChunkObjects: 500
+ });
+
+ assert.eq(true, res.ok, "6a");
+ assert.eq(3, res.splitKeys.length, "6b");
+ for (i = 0; i < res.splitKeys.length; i++) {
+ assertFieldNamesMatch(res.splitKeys[i], {x: 1});
}
};
case6();
@@ -161,24 +179,25 @@ case6();
// [1111111111111111,2,3)
resetCollection();
-f.ensureIndex( { x: 1 } );
+f.ensureIndex({x: 1});
var case7 = function() {
// Fill collection and get split vector for 1MB maxChunkSize
numDocs = 2100;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: 1, y: filler } );
+ for (i = 1; i < numDocs; i++) {
+ f.save({x: 1, y: filler});
}
- for( i=1; i<10; i++ ){
- f.save( { x: 2, y: filler } );
+ for (i = 1; i < 10; i++) {
+ f.save({x: 2, y: filler});
}
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
+ res = db.runCommand(
+ {splitVector: "test.jstests_splitvector", keyPattern: {x: 1}, maxChunkSize: 1});
- assert.eq( true , res.ok , "7a" );
- assert.eq( 2 , res.splitKeys[0].x, "7b");
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ assert.eq(true, res.ok, "7a");
+ assert.eq(2, res.splitKeys[0].x, "7b");
+ for (i = 0; i < res.splitKeys.length; i++) {
+ assertFieldNamesMatch(res.splitKeys[i], {x: 1});
}
};
case7();
@@ -188,30 +207,31 @@ case7();
// [1, 22222222222222, 3)
resetCollection();
-f.ensureIndex( { x: 1 } );
+f.ensureIndex({x: 1});
var case8 = function() {
- for( i=1; i<10; i++ ){
- f.save( { x: 1, y: filler } );
+ for (i = 1; i < 10; i++) {
+ f.save({x: 1, y: filler});
}
numDocs = 2100;
- for( i=1; i<numDocs; i++ ){
- f.save( { x: 2, y: filler } );
+ for (i = 1; i < numDocs; i++) {
+ f.save({x: 2, y: filler});
}
- for( i=1; i<10; i++ ){
- f.save( { x: 3, y: filler } );
+ for (i = 1; i < 10; i++) {
+ f.save({x: 3, y: filler});
}
- res = db.runCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , maxChunkSize: 1 } );
+ res = db.runCommand(
+ {splitVector: "test.jstests_splitvector", keyPattern: {x: 1}, maxChunkSize: 1});
- assert.eq( true , res.ok , "8a" );
- assert.eq( 2 , res.splitKeys.length , "8b" );
- assert.eq( 2 , res.splitKeys[0].x , "8c" );
- assert.eq( 3 , res.splitKeys[1].x , "8d" );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ assert.eq(true, res.ok, "8a");
+ assert.eq(2, res.splitKeys.length, "8b");
+ assert.eq(2, res.splitKeys[0].x, "8c");
+ assert.eq(3, res.splitKeys[1].x, "8d");
+ for (i = 0; i < res.splitKeys.length; i++) {
+ assertFieldNamesMatch(res.splitKeys[i], {x: 1});
}
};
case8();
@@ -221,30 +241,31 @@ case8();
//
resetCollection();
-f.ensureIndex( { x: 1 } );
+f.ensureIndex({x: 1});
var case9 = function() {
- f.save( { x: 1 } );
- f.save( { x: 2 } );
- f.save( { x: 3 } );
-
- assert.eq( 3 , f.count() );
- print( f.getFullName() );
-
- res = db.runCommand( { splitVector: f.getFullName() , keyPattern: {x:1} , force : true } );
-
- assert.eq( true , res.ok , "9a" );
- assert.eq( 1 , res.splitKeys.length , "9b" );
- assert.eq( 2 , res.splitKeys[0].x , "9c" );
-
- if ( db.runCommand( "isMaster" ).msg != "isdbgrid" ) {
- res = db.adminCommand( { splitVector: "test.jstests_splitvector" , keyPattern: {x:1} , force : true } );
-
- assert.eq( true , res.ok , "9a: " + tojson(res) );
- assert.eq( 1 , res.splitKeys.length , "9b: " + tojson(res) );
- assert.eq( 2 , res.splitKeys[0].x , "9c: " + tojson(res) );
- for( i=0; i < res.splitKeys.length; i++ ){
- assertFieldNamesMatch( res.splitKeys[i] , {x : 1} );
+ f.save({x: 1});
+ f.save({x: 2});
+ f.save({x: 3});
+
+ assert.eq(3, f.count());
+ print(f.getFullName());
+
+ res = db.runCommand({splitVector: f.getFullName(), keyPattern: {x: 1}, force: true});
+
+ assert.eq(true, res.ok, "9a");
+ assert.eq(1, res.splitKeys.length, "9b");
+ assert.eq(2, res.splitKeys[0].x, "9c");
+
+ if (db.runCommand("isMaster").msg != "isdbgrid") {
+ res = db.adminCommand(
+ {splitVector: "test.jstests_splitvector", keyPattern: {x: 1}, force: true});
+
+ assert.eq(true, res.ok, "9a: " + tojson(res));
+ assert.eq(1, res.splitKeys.length, "9b: " + tojson(res));
+ assert.eq(2, res.splitKeys[0].x, "9c: " + tojson(res));
+ for (i = 0; i < res.splitKeys.length; i++) {
+ assertFieldNamesMatch(res.splitKeys[i], {x: 1});
}
}
};
@@ -255,51 +276,51 @@ case9();
//
resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
+f.ensureIndex({x: 1, y: 1});
case4();
resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+f.ensureIndex({x: 1, y: -1, z: 1});
case4();
resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
+f.ensureIndex({x: 1, y: 1});
case5();
resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+f.ensureIndex({x: 1, y: -1, z: 1});
case5();
resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
+f.ensureIndex({x: 1, y: 1});
case6();
resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+f.ensureIndex({x: 1, y: -1, z: 1});
case6();
resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
+f.ensureIndex({x: 1, y: 1});
case7();
resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+f.ensureIndex({x: 1, y: -1, z: 1});
case7();
resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
+f.ensureIndex({x: 1, y: 1});
case8();
resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+f.ensureIndex({x: 1, y: -1, z: 1});
case8();
resetCollection();
-f.ensureIndex( { x: 1, y: 1 } );
+f.ensureIndex({x: 1, y: 1});
case9();
resetCollection();
-f.ensureIndex( { x: 1, y: -1 , z : 1 } );
+f.ensureIndex({x: 1, y: -1, z: 1});
case9();
print("PASSED");
diff --git a/jstests/core/stages_and_hash.js b/jstests/core/stages_and_hash.js
index 3bf1bde4951..8dcc8cf1345 100644
--- a/jstests/core/stages_and_hash.js
+++ b/jstests/core/stages_and_hash.js
@@ -13,25 +13,49 @@ t.ensureIndex({bar: 1});
t.ensureIndex({baz: 1});
// Scan foo <= 20
-ixscan1 = {ixscan: {args:{name: "stages_and_hashed", keyPattern:{foo: 1},
- startKey: {"": 20}, endKey: {},
- endKeyInclusive: true, direction: -1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ name: "stages_and_hashed",
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: -1
+ }
+ }
+};
// Scan bar >= 40
-ixscan2 = {ixscan: {args:{name: "stages_and_hashed", keyPattern:{bar: 1},
- startKey: {"": 40}, endKey: {},
- endKeyInclusive: true, direction: 1}}};
+ixscan2 = {
+ ixscan: {
+ args: {
+ name: "stages_and_hashed",
+ keyPattern: {bar: 1},
+ startKey: {"": 40},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// bar = 50 - foo
// Intersection is (foo=0 bar=50, foo=1 bar=49, ..., foo=10 bar=40)
-andix1ix2 = {andHash: {args: { nodes: [ixscan1, ixscan2]}}};
+andix1ix2 = {
+ andHash: {args: {nodes: [ixscan1, ixscan2]}}
+};
res = db.runCommand({stageDebug: {plan: andix1ix2, collection: collname}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 11);
// Filter predicates from 2 indices. Tests that we union the idx info.
-andix1ix2filter = {fetch: {filter: {bar: {$in: [45, 46, 48]}, foo: {$in: [4,5,6]}},
- args: {node: {andHash: {args: {nodes: [ixscan1, ixscan2]}}}}}};
+andix1ix2filter = {
+ fetch: {
+ filter: {bar: {$in: [45, 46, 48]}, foo: {$in: [4, 5, 6]}},
+ args: {node: {andHash: {args: {nodes: [ixscan1, ixscan2]}}}}
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: andix1ix2filter}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 2);
diff --git a/jstests/core/stages_and_sorted.js b/jstests/core/stages_and_sorted.js
index e6e493de5f7..c29db7ce2eb 100644
--- a/jstests/core/stages_and_sorted.js
+++ b/jstests/core/stages_and_sorted.js
@@ -10,9 +10,9 @@ for (var i = 0; i < N; ++i) {
t.insert({baz: 12});
t.insert({bar: 1});
// This is the only thing that should be outputted in the and.
- t.insert({foo: 1, bar:1, baz: 12});
+ t.insert({foo: 1, bar: 1, baz: 12});
t.insert({bar: 1});
- t.insert({bar:1, baz: 12});
+ t.insert({bar: 1, baz: 12});
t.insert({baz: 12});
t.insert({foo: 1, baz: 12});
t.insert({baz: 12});
@@ -23,29 +23,60 @@ t.ensureIndex({bar: 1});
t.ensureIndex({baz: 1});
// Scan foo == 1
-ixscan1 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{foo: 1},
- startKey: {"": 1}, endKey: {"": 1},
- endKeyInclusive: true, direction: 1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ name: "stages_and_sorted",
+ keyPattern: {foo: 1},
+ startKey: {"": 1},
+ endKey: {"": 1},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// Scan bar == 1
-ixscan2 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{bar: 1},
- startKey: {"": 1}, endKey: {"": 1},
- endKeyInclusive: true, direction: 1}}};
+ixscan2 = {
+ ixscan: {
+ args: {
+ name: "stages_and_sorted",
+ keyPattern: {bar: 1},
+ startKey: {"": 1},
+ endKey: {"": 1},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// Scan baz == 12
-ixscan3 = {ixscan: {args:{name: "stages_and_sorted", keyPattern:{baz: 1},
- startKey: {"": 12}, endKey: {"": 12},
- endKeyInclusive: true, direction: 1}}};
+ixscan3 = {
+ ixscan: {
+ args: {
+ name: "stages_and_sorted",
+ keyPattern: {baz: 1},
+ startKey: {"": 12},
+ endKey: {"": 12},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// Intersect foo==1 with bar==1 with baz==12.
-andix1ix2 = {andSorted: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}};
+andix1ix2 = {
+ andSorted: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: andix1ix2}});
printjson(res);
assert.eq(res.ok, 1);
assert.eq(res.results.length, N);
// Might as well make sure that hashed does the same thing.
-andix1ix2hash = {andHash: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}};
+andix1ix2hash = {
+ andHash: {args: {nodes: [ixscan1, ixscan2, ixscan3]}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: andix1ix2hash}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, N);
diff --git a/jstests/core/stages_collection_scan.js b/jstests/core/stages_collection_scan.js
index fddd22f624a..47fb6edbdde 100644
--- a/jstests/core/stages_collection_scan.js
+++ b/jstests/core/stages_collection_scan.js
@@ -8,7 +8,9 @@ for (var i = 0; i < N; ++i) {
t.insert({foo: i});
}
-forward = {cscan: {args: {direction: 1}}};
+forward = {
+ cscan: {args: {direction: 1}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: forward}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, N);
@@ -16,23 +18,27 @@ assert.eq(res.results[0].foo, 0);
assert.eq(res.results[49].foo, 49);
// And, backwards.
-backward = {cscan: {args: {direction: -1}}};
+backward = {
+ cscan: {args: {direction: -1}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: backward}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, N);
assert.eq(res.results[0].foo, 49);
assert.eq(res.results[49].foo, 0);
-forwardFiltered = {cscan: {args: {direction: 1},
- filter: {foo: {$lt: 25}}}};
+forwardFiltered = {
+ cscan: {args: {direction: 1}, filter: {foo: {$lt: 25}}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: forwardFiltered}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 25);
assert.eq(res.results[0].foo, 0);
assert.eq(res.results[24].foo, 24);
-backwardFiltered = {cscan: {args: {direction: -1},
- filter: {foo: {$lt: 25}}}};
+backwardFiltered = {
+ cscan: {args: {direction: -1}, filter: {foo: {$lt: 25}}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: backwardFiltered}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 25);
diff --git a/jstests/core/stages_delete.js b/jstests/core/stages_delete.js
index ddfff2c2ea5..1624b1fcc6a 100644
--- a/jstests/core/stages_delete.js
+++ b/jstests/core/stages_delete.js
@@ -1,6 +1,8 @@
// Test basic delete stage functionality.
var coll = db.stages_delete;
-var collScanStage = {cscan: {args: {direction: 1}, filter: {deleteMe: true}}};
+var collScanStage = {
+ cscan: {args: {direction: 1}, filter: {deleteMe: true}}
+};
var deleteStage;
// Test delete stage with isMulti: true.
@@ -8,7 +10,9 @@ coll.drop();
assert.writeOK(coll.insert({deleteMe: true}));
assert.writeOK(coll.insert({deleteMe: true}));
assert.writeOK(coll.insert({deleteMe: false}));
-deleteStage = {delete: {args: {node: collScanStage, isMulti: true}}};
+deleteStage = {
+ delete: {args: {node: collScanStage, isMulti: true}}
+};
assert.eq(coll.count(), 3);
assert.commandWorked(db.runCommand({stageDebug: {collection: coll.getName(), plan: deleteStage}}));
assert.eq(coll.count(), 1);
@@ -19,7 +23,9 @@ coll.drop();
assert.writeOK(coll.insert({deleteMe: true}));
assert.writeOK(coll.insert({deleteMe: true}));
assert.writeOK(coll.insert({deleteMe: false}));
-deleteStage = {delete: {args: {node: collScanStage, isMulti: false}}};
+deleteStage = {
+ delete: {args: {node: collScanStage, isMulti: false}}
+};
assert.eq(coll.count(), 3);
assert.commandWorked(db.runCommand({stageDebug: {collection: coll.getName(), plan: deleteStage}}));
assert.eq(coll.count(), 2);
diff --git a/jstests/core/stages_fetch.js b/jstests/core/stages_fetch.js
index 2bff065a5d7..7adc52c67c5 100644
--- a/jstests/core/stages_fetch.js
+++ b/jstests/core/stages_fetch.js
@@ -12,20 +12,36 @@ t.ensureIndex({foo: 1});
// 20 <= foo <= 30
// bar == 25 (not covered, should error.)
-ixscan1 = {ixscan: {args:{keyPattern:{foo:1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1},
- filter: {bar: 25}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {"": 30},
+ endKeyInclusive: true,
+ direction: 1
+ },
+ filter: {bar: 25}
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 0);
// Now, add a fetch. We should be able to filter on the non-covered field since we fetched the obj.
-ixscan2 = {ixscan: {args:{keyPattern:{foo:1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1}}};
-fetch = {fetch: {args: {node: ixscan2}, filter: {bar: 25}}};
+ixscan2 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {"": 30},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
+fetch = {
+ fetch: {args: {node: ixscan2}, filter: {bar: 25}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: fetch}});
printjson(res);
assert.eq(res.ok, 1);
diff --git a/jstests/core/stages_ixscan.js b/jstests/core/stages_ixscan.js
index 3f920f70241..d6b8a7b1aee 100644
--- a/jstests/core/stages_ixscan.js
+++ b/jstests/core/stages_ixscan.js
@@ -12,60 +12,102 @@ t.ensureIndex({foo: 1});
t.ensureIndex({foo: 1, baz: 1});
// foo <= 20
-ixscan1 = {ixscan: {args:{keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {}, endKeyInclusive: true,
- direction: -1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: -1
+ }
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 21);
// 20 <= foo < 30
-ixscan1 = {ixscan: {args:{keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: false,
- direction: 1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {"": 30},
+ endKeyInclusive: false,
+ direction: 1
+ }
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 10);
// 20 <= foo <= 30
-ixscan1 = {ixscan: {args:{keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {"": 30},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 11);
// 20 <= foo <= 30
// foo == 25
-ixscan1 = {ixscan: {args:{keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {"" : 30}, endKeyInclusive: true,
- direction: 1},
- filter: {foo: 25}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {"": 30},
+ endKeyInclusive: true,
+ direction: 1
+ },
+ filter: {foo: 25}
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 1);
// 20 <= foo <= 30
// baz == 25 (in index so we can match against it.)
-ixscan1 = {ixscan: {args:{keyPattern:{foo:1, baz: 1},
- startKey: {foo: 20, baz: MinKey},
- endKey: {foo: 30, baz: MaxKey}, endKeyInclusive: true,
- direction: 1},
- filter: {baz: 25}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1, baz: 1},
+ startKey: {foo: 20, baz: MinKey},
+ endKey: {foo: 30, baz: MaxKey},
+ endKeyInclusive: true,
+ direction: 1
+ },
+ filter: {baz: 25}
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 1);
// 20 <= foo <= 30
// bar == 25 (not covered, should error.)
-ixscan1 = {ixscan: {args:{keyPattern:{foo:1, baz: 1},
- startKey: {foo: 20, baz: MinKey},
- endKey: {foo: 30, baz: MaxKey}, endKeyInclusive: true,
- direction: 1},
- filter: {bar: 25}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1, baz: 1},
+ startKey: {foo: 20, baz: MinKey},
+ endKey: {foo: 30, baz: MaxKey},
+ endKeyInclusive: true,
+ direction: 1
+ },
+ filter: {bar: 25}
+ }
+};
res = db.runCommand({stageDebug: {collection: collname, plan: ixscan1}});
assert.eq(res.ok, 0);
diff --git a/jstests/core/stages_limit_skip.js b/jstests/core/stages_limit_skip.js
index 7ca7b89b180..c582cb6b1e4 100644
--- a/jstests/core/stages_limit_skip.js
+++ b/jstests/core/stages_limit_skip.js
@@ -12,11 +12,20 @@ t.ensureIndex({foo: 1});
// foo <= 20, decreasing
// Limit of 5 results.
-ixscan1 = {ixscan: {args:{keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {}, endKeyInclusive: true,
- direction: -1}}};
-limit1 = {limit: {args: {node: ixscan1, num: 5}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: -1
+ }
+ }
+};
+limit1 = {
+ limit: {args: {node: ixscan1, num: 5}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: limit1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 5);
@@ -25,7 +34,9 @@ assert.eq(res.results[4].foo, 16);
// foo <= 20, decreasing
// Skip 5 results.
-skip1 = {skip: {args: {node: ixscan1, num: 5}}};
+skip1 = {
+ skip: {args: {node: ixscan1, num: 5}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: skip1}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 16);
diff --git a/jstests/core/stages_mergesort.js b/jstests/core/stages_mergesort.js
index 18c52bef40c..5156743078c 100644
--- a/jstests/core/stages_mergesort.js
+++ b/jstests/core/stages_mergesort.js
@@ -9,22 +9,38 @@ for (var i = 0; i < N; ++i) {
t.insert({baz: 1, bar: i});
}
-t.ensureIndex({foo: 1, bar:1});
-t.ensureIndex({baz: 1, bar:1});
+t.ensureIndex({foo: 1, bar: 1});
+t.ensureIndex({baz: 1, bar: 1});
// foo == 1
// We would (internally) use "": MinKey and "": MaxKey for the bar index bounds.
-ixscan1 = {ixscan: {args:{keyPattern:{foo: 1, bar:1},
- startKey: {foo: 1, bar: 0},
- endKey: {foo: 1, bar: 100000}, endKeyInclusive: true,
- direction: 1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1, bar: 1},
+ startKey: {foo: 1, bar: 0},
+ endKey: {foo: 1, bar: 100000},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// baz == 1
-ixscan2 = {ixscan: {args:{keyPattern:{baz: 1, bar:1},
- startKey: {baz: 1, bar: 0},
- endKey: {baz: 1, bar: 100000}, endKeyInclusive: true,
- direction: 1}}};
+ixscan2 = {
+ ixscan: {
+ args: {
+ keyPattern: {baz: 1, bar: 1},
+ startKey: {baz: 1, bar: 0},
+ endKey: {baz: 1, bar: 100000},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
-mergesort = {mergeSort: {args: {nodes: [ixscan1, ixscan2], pattern: {bar: 1}}}};
+mergesort = {
+ mergeSort: {args: {nodes: [ixscan1, ixscan2], pattern: {bar: 1}}}
+};
res = db.runCommand({stageDebug: {plan: mergesort, collection: collname}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 2 * N);
diff --git a/jstests/core/stages_or.js b/jstests/core/stages_or.js
index 0d4a47c5e06..6ea73efd1ed 100644
--- a/jstests/core/stages_or.js
+++ b/jstests/core/stages_or.js
@@ -13,22 +13,42 @@ t.ensureIndex({bar: 1});
t.ensureIndex({baz: 1});
// baz >= 40
-ixscan1 = {ixscan: {args:{keyPattern:{baz: 1},
- startKey: {"": 40}, endKey: {},
- endKeyInclusive: true, direction: 1}}};
+ixscan1 = {
+ ixscan: {
+ args: {
+ keyPattern: {baz: 1},
+ startKey: {"": 40},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// foo >= 40
-ixscan2 = {ixscan: {args:{keyPattern:{foo: 1},
- startKey: {"": 40}, endKey: {},
- endKeyInclusive: true, direction: 1}}};
+ixscan2 = {
+ ixscan: {
+ args: {
+ keyPattern: {foo: 1},
+ startKey: {"": 40},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: 1
+ }
+ }
+};
// OR of baz and foo. Baz == foo and we dedup.
-orix1ix2 = {or: {args: {nodes: [ixscan1, ixscan2], dedup:true}}};
+orix1ix2 = {
+ or: {args: {nodes: [ixscan1, ixscan2], dedup: true}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: orix1ix2}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 10);
// No deduping, 2x the results.
-orix1ix2nodd = {or: {args: {nodes: [ixscan1, ixscan2], dedup:false}}};
+orix1ix2nodd = {
+ or: {args: {nodes: [ixscan1, ixscan2], dedup: false}}
+};
res = db.runCommand({stageDebug: {collection: collname, plan: orix1ix2nodd}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 20);
diff --git a/jstests/core/stages_sort.js b/jstests/core/stages_sort.js
index 89b71a28f8b..b6cb5a456af 100644
--- a/jstests/core/stages_sort.js
+++ b/jstests/core/stages_sort.js
@@ -11,13 +11,23 @@ if (false) {
t.ensureIndex({foo: 1});
// Foo <= 20, descending.
- ixscan1 = {ixscan: {args:{name: "stages_sort", keyPattern:{foo: 1},
- startKey: {"": 20},
- endKey: {}, endKeyInclusive: true,
- direction: -1}}};
+ ixscan1 = {
+ ixscan: {
+ args: {
+ name: "stages_sort",
+ keyPattern: {foo: 1},
+ startKey: {"": 20},
+ endKey: {},
+ endKeyInclusive: true,
+ direction: -1
+ }
+ }
+ };
// Sort with foo ascending.
- sort1 = {sort: {args: {node: ixscan1, pattern: {foo: 1}}}};
+ sort1 = {
+ sort: {args: {node: ixscan1, pattern: {foo: 1}}}
+ };
res = db.runCommand({stageDebug: sort1});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 21);
@@ -25,10 +35,10 @@ if (false) {
assert.eq(res.results[20].foo, 20);
// Sort with a limit.
- //sort2 = {sort: {args: {node: ixscan1, pattern: {foo: 1}, limit: 2}}};
- //res = db.runCommand({stageDebug: sort2});
- //assert.eq(res.ok, 1);
- //assert.eq(res.results.length, 2);
- //assert.eq(res.results[0].foo, 0);
- //assert.eq(res.results[1].foo, 1);
+ // sort2 = {sort: {args: {node: ixscan1, pattern: {foo: 1}, limit: 2}}};
+ // res = db.runCommand({stageDebug: sort2});
+ // assert.eq(res.ok, 1);
+ // assert.eq(res.results.length, 2);
+ // assert.eq(res.results[0].foo, 0);
+ // assert.eq(res.results[1].foo, 1);
}
diff --git a/jstests/core/stages_text.js b/jstests/core/stages_text.js
index 6598d135b9f..d38ef316663 100644
--- a/jstests/core/stages_text.js
+++ b/jstests/core/stages_text.js
@@ -9,13 +9,11 @@ t.save({x: "az b x"});
t.ensureIndex({x: "text"});
// We expect to retrieve 'b'
-res = db.runCommand({stageDebug: {collection: collname,
- plan: {text: {args: {search: "b"}}}}});
+res = db.runCommand({stageDebug: {collection: collname, plan: {text: {args: {search: "b"}}}}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 1);
// I have not been indexed yet.
-res = db.runCommand({stageDebug: {collection: collname,
- plan: {text: {args: {search: "hari"}}}}});
+res = db.runCommand({stageDebug: {collection: collname, plan: {text: {args: {search: "hari"}}}}});
assert.eq(res.ok, 1);
assert.eq(res.results.length, 0);
diff --git a/jstests/core/startup_log.js b/jstests/core/startup_log.js
index e1a62991981..3b0cbe3464d 100644
--- a/jstests/core/startup_log.js
+++ b/jstests/core/startup_log.js
@@ -1,76 +1,101 @@
load('jstests/aggregation/extras/utils.js');
(function() {
-'use strict';
+ 'use strict';
-// Check that smallArray is entirely contained by largeArray
-// returns false if a member of smallArray is not in largeArray
-function arrayIsSubset(smallArray, largeArray) {
- for(var i = 0; i < smallArray.length; i++) {
- if(!Array.contains(largeArray, smallArray[i])) {
- print("Could not find " + smallArray[i] + " in largeArray");
- return false;
+ // Check that smallArray is entirely contained by largeArray
+ // returns false if a member of smallArray is not in largeArray
+ function arrayIsSubset(smallArray, largeArray) {
+ for (var i = 0; i < smallArray.length; i++) {
+ if (!Array.contains(largeArray, smallArray[i])) {
+ print("Could not find " + smallArray[i] + " in largeArray");
+ return false;
+ }
}
- }
- return true;
-}
+ return true;
+ }
-// Test startup_log
-var stats = db.getSisterDB( "local" ).startup_log.stats();
-assert(stats.capped);
+ // Test startup_log
+ var stats = db.getSisterDB("local").startup_log.stats();
+ assert(stats.capped);
-var latestStartUpLog = db.getSisterDB( "local" ).startup_log.find().sort( { $natural: -1 } ).limit(1).next();
-var serverStatus = db._adminCommand( "serverStatus" );
-var cmdLine = db._adminCommand( "getCmdLineOpts" ).parsed;
+ var latestStartUpLog =
+ db.getSisterDB("local").startup_log.find().sort({$natural: -1}).limit(1).next();
+ var serverStatus = db._adminCommand("serverStatus");
+ var cmdLine = db._adminCommand("getCmdLineOpts").parsed;
-// Test that the startup log has the expected keys
-var verbose = false;
-var expectedKeys = ["_id", "hostname", "startTime", "startTimeLocal", "cmdLine", "pid", "buildinfo"];
-var keys = Object.keySet(latestStartUpLog);
-assert(arrayEq(expectedKeys, keys, verbose), 'startup_log keys failed');
+ // Test that the startup log has the expected keys
+ var verbose = false;
+ var expectedKeys =
+ ["_id", "hostname", "startTime", "startTimeLocal", "cmdLine", "pid", "buildinfo"];
+ var keys = Object.keySet(latestStartUpLog);
+ assert(arrayEq(expectedKeys, keys, verbose), 'startup_log keys failed');
-// Tests _id implicitly - should be comprised of host-timestamp
-// Setup expected startTime and startTimeLocal from the supplied timestamp
-var _id = latestStartUpLog._id.split('-'); // _id should consist of host-timestamp
-var _idUptime = _id.pop();
-var _idHost = _id.join('-');
-var uptimeSinceEpochRounded = Math.floor(_idUptime/1000) * 1000;
-var startTime = new Date(uptimeSinceEpochRounded); // Expected startTime
+ // Tests _id implicitly - should be comprised of host-timestamp
+ // Setup expected startTime and startTimeLocal from the supplied timestamp
+ var _id = latestStartUpLog._id.split('-'); // _id should consist of host-timestamp
+ var _idUptime = _id.pop();
+ var _idHost = _id.join('-');
+ var uptimeSinceEpochRounded = Math.floor(_idUptime / 1000) * 1000;
+ var startTime = new Date(uptimeSinceEpochRounded); // Expected startTime
-assert.eq(_idHost, latestStartUpLog.hostname, "Hostname doesn't match one from _id");
-assert.eq(serverStatus.host.split(':')[0], latestStartUpLog.hostname, "Hostname doesn't match one in server status");
-assert.closeWithinMS(startTime, latestStartUpLog.startTime,
- "StartTime doesn't match one from _id", 2000); // Expect less than 2 sec delta
-assert.eq(cmdLine, latestStartUpLog.cmdLine, "cmdLine doesn't match that from getCmdLineOpts");
-assert.eq(serverStatus.pid, latestStartUpLog.pid, "pid doesn't match that from serverStatus");
+ assert.eq(_idHost, latestStartUpLog.hostname, "Hostname doesn't match one from _id");
+ assert.eq(serverStatus.host.split(':')[0],
+ latestStartUpLog.hostname,
+ "Hostname doesn't match one in server status");
+ assert.closeWithinMS(startTime,
+ latestStartUpLog.startTime,
+ "StartTime doesn't match one from _id",
+ 2000); // Expect less than 2 sec delta
+ assert.eq(cmdLine, latestStartUpLog.cmdLine, "cmdLine doesn't match that from getCmdLineOpts");
+ assert.eq(serverStatus.pid, latestStartUpLog.pid, "pid doesn't match that from serverStatus");
-// Test buildinfo
-var buildinfo = db.runCommand( "buildinfo" );
-delete buildinfo.ok; // Delete extra meta info not in startup_log
-var isMaster = db._adminCommand( "ismaster" );
+ // Test buildinfo
+ var buildinfo = db.runCommand("buildinfo");
+ delete buildinfo.ok; // Delete extra meta info not in startup_log
+ var isMaster = db._adminCommand("ismaster");
-// Test buildinfo has the expected keys
-var expectedKeys = ["version", "gitVersion", "allocator", "versionArray", "javascriptEngine",
- "openssl", "buildEnvironment", "debug", "maxBsonObjectSize", "bits", "modules" ];
+ // Test buildinfo has the expected keys
+ var expectedKeys = [
+ "version",
+ "gitVersion",
+ "allocator",
+ "versionArray",
+ "javascriptEngine",
+ "openssl",
+ "buildEnvironment",
+ "debug",
+ "maxBsonObjectSize",
+ "bits",
+ "modules"
+ ];
-var keys = Object.keySet(latestStartUpLog.buildinfo);
-// Disabled to check
-assert(arrayIsSubset(expectedKeys, keys), "buildinfo keys failed! \n expected:\t" + expectedKeys + "\n actual:\t" + keys);
-assert.eq(buildinfo, latestStartUpLog.buildinfo, "buildinfo doesn't match that from buildinfo command");
+ var keys = Object.keySet(latestStartUpLog.buildinfo);
+ // Disabled to check
+ assert(arrayIsSubset(expectedKeys, keys),
+ "buildinfo keys failed! \n expected:\t" + expectedKeys + "\n actual:\t" + keys);
+ assert.eq(buildinfo,
+ latestStartUpLog.buildinfo,
+ "buildinfo doesn't match that from buildinfo command");
-// Test version and version Array
-var version = latestStartUpLog.buildinfo.version.split('-')[0];
-var versionArray = latestStartUpLog.buildinfo.versionArray;
-var versionArrayCleaned = versionArray.slice(0, 3);
-if (versionArray[3] == -100) {
- versionArrayCleaned[2] -= 1;
-}
+ // Test version and version Array
+ var version = latestStartUpLog.buildinfo.version.split('-')[0];
+ var versionArray = latestStartUpLog.buildinfo.versionArray;
+ var versionArrayCleaned = versionArray.slice(0, 3);
+ if (versionArray[3] == -100) {
+ versionArrayCleaned[2] -= 1;
+ }
-assert.eq(serverStatus.version, latestStartUpLog.buildinfo.version, "Mongo version doesn't match that from ServerStatus");
-assert.eq(version, versionArrayCleaned.join('.'), "version doesn't match that from the versionArray");
-var jsEngine = latestStartUpLog.buildinfo.javascriptEngine;
-assert((jsEngine == "none") || jsEngine.startsWith("mozjs"));
-assert.eq(isMaster.maxBsonObjectSize, latestStartUpLog.buildinfo.maxBsonObjectSize, "maxBsonObjectSize doesn't match one from ismaster");
+ assert.eq(serverStatus.version,
+ latestStartUpLog.buildinfo.version,
+ "Mongo version doesn't match that from ServerStatus");
+ assert.eq(
+ version, versionArrayCleaned.join('.'), "version doesn't match that from the versionArray");
+ var jsEngine = latestStartUpLog.buildinfo.javascriptEngine;
+ assert((jsEngine == "none") || jsEngine.startsWith("mozjs"));
+ assert.eq(isMaster.maxBsonObjectSize,
+ latestStartUpLog.buildinfo.maxBsonObjectSize,
+ "maxBsonObjectSize doesn't match one from ismaster");
})();
diff --git a/jstests/core/storageDetailsCommand.js b/jstests/core/storageDetailsCommand.js
index 12baf9c4b92..cfd370cadaf 100644
--- a/jstests/core/storageDetailsCommand.js
+++ b/jstests/core/storageDetailsCommand.js
@@ -12,7 +12,6 @@ for (var i = 0; i < 3000; ++i) {
function test() {
var result = t.diskStorageStats({numberOfSlices: 100});
-
if (result["code"] === COMMAND_NOT_FOUND_CODE) {
print("storageDetails command not available: skipping");
return;
diff --git a/jstests/core/storefunc.js b/jstests/core/storefunc.js
index fae9e58bfa4..8598e9cc62b 100644
--- a/jstests/core/storefunc.js
+++ b/jstests/core/storefunc.js
@@ -4,42 +4,57 @@ var res;
s = testdb.system.js;
s.remove({});
-assert.eq( 0 , s.count() , "setup - A" );
-
-res = s.save( { _id : "x" , value : "3" } );
-assert( !res.hasWriteError() , "setup - B" );
-assert.eq( 1 , s.count() , "setup - C" );
-
-s.remove( { _id : "x" } );
-assert.eq( 0 , s.count() , "setup - D" );
-s.save( { _id : "x" , value : "4" } );
-assert.eq( 1 , s.count() , "setup - E" );
-
-assert.eq( 4 , s.findOne( { _id : "x" } ).value , "E2 " );
-
-assert.eq( 4 , s.findOne().value , "setup - F" );
-s.update( { _id : "x" } , { $set : { value : 5 } } );
-assert.eq( 1 , s.count() , "setup - G" );
-assert.eq( 5 , s.findOne().value , "setup - H" );
-
-assert.eq( 5 , testdb.eval( "return x" ) , "exec - 1 " );
-
-s.update( { _id : "x" } , { $set : { value : 6 } } );
-assert.eq( 1 , s.count() , "setup2 - A" );
-assert.eq( 6 , s.findOne().value , "setup - B" );
-
-assert.eq( 6 , testdb.eval( "return x" ) , "exec - 2 " );
-
-
-
-s.insert( { _id : "bar" , value : function( z ){ return 17 + z; } } );
-assert.eq( 22 , testdb.eval( "return bar(5);" ) , "exec - 3 " );
-
-assert( s.getIndexKeys().length > 0 , "no indexes" );
-assert( s.getIndexKeys()[0]._id , "no _id index" );
-
-assert.eq( "undefined" , testdb.eval( function(){ return typeof(zzz); } ) , "C1" );
-s.save( { _id : "zzz" , value : 5 } );
-assert.eq( "number" , testdb.eval( function(){ return typeof(zzz); } ) , "C2" );
-s.remove( { _id : "zzz" } );
-assert.eq( "undefined" , testdb.eval( function(){ return typeof(zzz); } ) , "C3" );
+assert.eq(0, s.count(), "setup - A");
+
+res = s.save({_id: "x", value: "3"});
+assert(!res.hasWriteError(), "setup - B");
+assert.eq(1, s.count(), "setup - C");
+
+s.remove({_id: "x"});
+assert.eq(0, s.count(), "setup - D");
+s.save({_id: "x", value: "4"});
+assert.eq(1, s.count(), "setup - E");
+
+assert.eq(4, s.findOne({_id: "x"}).value, "E2 ");
+
+assert.eq(4, s.findOne().value, "setup - F");
+s.update({_id: "x"}, {$set: {value: 5}});
+assert.eq(1, s.count(), "setup - G");
+assert.eq(5, s.findOne().value, "setup - H");
+
+assert.eq(5, testdb.eval("return x"), "exec - 1 ");
+
+s.update({_id: "x"}, {$set: {value: 6}});
+assert.eq(1, s.count(), "setup2 - A");
+assert.eq(6, s.findOne().value, "setup - B");
+
+assert.eq(6, testdb.eval("return x"), "exec - 2 ");
+
+s.insert({
+ _id: "bar",
+ value: function(z) {
+ return 17 + z;
+ }
+});
+assert.eq(22, testdb.eval("return bar(5);"), "exec - 3 ");
+
+assert(s.getIndexKeys().length > 0, "no indexes");
+assert(s.getIndexKeys()[0]._id, "no _id index");
+
+assert.eq("undefined",
+ testdb.eval(function() {
+ return typeof(zzz);
+ }),
+ "C1");
+s.save({_id: "zzz", value: 5});
+assert.eq("number",
+ testdb.eval(function() {
+ return typeof(zzz);
+ }),
+ "C2");
+s.remove({_id: "zzz"});
+assert.eq("undefined",
+ testdb.eval(function() {
+ return typeof(zzz);
+ }),
+ "C3");
diff --git a/jstests/core/string_with_nul_bytes.js b/jstests/core/string_with_nul_bytes.js
index a1f6e395dd2..e72cc0b6dc1 100644
--- a/jstests/core/string_with_nul_bytes.js
+++ b/jstests/core/string_with_nul_bytes.js
@@ -4,6 +4,6 @@ t = db.string_with_nul_bytes.js;
t.drop();
string = "string with a NUL (\0) byte";
-t.insert({str:string});
+t.insert({str: string});
assert.eq(t.findOne().str, string);
-assert.eq(t.findOne().str.length, string.length); // just to be sure
+assert.eq(t.findOne().str.length, string.length); // just to be sure
diff --git a/jstests/core/sub1.js b/jstests/core/sub1.js
index 324b21b75e8..d42677f3266 100644
--- a/jstests/core/sub1.js
+++ b/jstests/core/sub1.js
@@ -3,12 +3,15 @@
t = db.sub1;
t.drop();
-x = { a : 1 , b : { c : { d : 2 } } };
+x = {
+ a: 1,
+ b: {c: {d: 2}}
+};
-t.save( x );
+t.save(x);
y = t.findOne();
-assert.eq( 1 , y.a );
-assert.eq( 2 , y.b.c.d );
-print( tojson( y ) );
+assert.eq(1, y.a);
+assert.eq(2, y.b.c.d);
+print(tojson(y));
diff --git a/jstests/core/system_profile.js b/jstests/core/system_profile.js
index abfa0c98832..73d303a3277 100644
--- a/jstests/core/system_profile.js
+++ b/jstests/core/system_profile.js
@@ -26,21 +26,25 @@ assert.writeError(testDB.system.profile.remove({}));
// Using findAndModify to write to "system.profile" should fail.
assert.commandWorked(testDB.dropDatabase());
assert.commandWorked(testDB.createCollection("system.profile"));
-assert.commandFailed(
- testDB.system.profile.runCommand("findAndModify", {query: {}, update: {a: 1}}));
-assert.commandFailed(
- testDB.system.profile.runCommand("findAndModify", {query: {}, update: {a: 1}, upsert: true}));
+assert.commandFailed(testDB.system.profile.runCommand("findAndModify",
+ {query: {}, update: {a: 1}}));
+assert.commandFailed(testDB.system.profile.runCommand("findAndModify",
+ {query: {}, update: {a: 1}, upsert: true}));
assert.commandFailed(testDB.system.profile.runCommand("findAndModify", {query: {}, remove: true}));
// Using mapReduce to write to "system.profile" should fail.
assert.commandWorked(testDB.dropDatabase());
assert.writeOK(testDB.foo.insert({val: 1}));
assert.commandFailed(testDB.foo.runCommand("mapReduce",
- {map: function() { emit(0, this.val); },
- reduce: function(id, values) {
+ {
+ map: function() {
+ emit(0, this.val);
+ },
+ reduce: function(id, values) {
return Array.sum(values);
- },
- out: "system.profile"}));
+ },
+ out: "system.profile"
+ }));
// Using aggregate to write to "system.profile" should fail.
assert.commandWorked(testDB.dropDatabase());
@@ -50,12 +54,12 @@ assert.commandFailed(testDB.foo.runCommand("aggregate", {pipeline: [{$out: "syst
// Renaming to/from "system.profile" should fail.
assert.commandWorked(testDB.dropDatabase());
assert.commandWorked(testDB.createCollection("system.profile"));
-assert.commandFailed(testDB.adminCommand({renameCollection: testDB.system.profile.getFullName(),
- to: testDB.foo.getFullName()}));
+assert.commandFailed(testDB.adminCommand(
+ {renameCollection: testDB.system.profile.getFullName(), to: testDB.foo.getFullName()}));
assert.commandWorked(testDB.dropDatabase());
assert.commandWorked(testDB.createCollection("foo"));
-assert.commandFailed(testDB.adminCommand({renameCollection: testDB.foo.getFullName(),
- to: testDB.system.profile.getFullName()}));
+assert.commandFailed(testDB.adminCommand(
+ {renameCollection: testDB.foo.getFullName(), to: testDB.system.profile.getFullName()}));
// Copying a database containing "system.profile" should succeed. The "system.profile" collection
// should not be copied.
@@ -63,7 +67,7 @@ assert.commandWorked(testDB.dropDatabase());
assert.commandWorked(testDB.createCollection("foo"));
assert.commandWorked(testDB.createCollection("system.profile"));
assert.commandWorked(testDBCopy.dropDatabase());
-assert.commandWorked(testDB.adminCommand({copydb: 1, fromdb: testDB.getName(),
- todb: testDBCopy.getName()}));
+assert.commandWorked(
+ testDB.adminCommand({copydb: 1, fromdb: testDB.getName(), todb: testDBCopy.getName()}));
assert.commandWorked(testDBCopy.foo.stats());
assert.commandFailed(testDBCopy.system.profile.stats());
diff --git a/jstests/core/tailable_skip_limit.js b/jstests/core/tailable_skip_limit.js
index da2f80d0311..f771621ad83 100644
--- a/jstests/core/tailable_skip_limit.js
+++ b/jstests/core/tailable_skip_limit.js
@@ -55,7 +55,9 @@
assert.eq(7, cursor.next()["_id"]);
// Tailable with negative limit is an error.
- assert.throws(function() { t.find().addOption(2).limit(-100).next(); });
+ assert.throws(function() {
+ t.find().addOption(2).limit(-100).next();
+ });
// Tests that a tailable cursor over an empty capped collection produces a dead cursor, intended
// to be run on both mongod and mongos. For SERVER-20720.
diff --git a/jstests/core/temp_cleanup.js b/jstests/core/temp_cleanup.js
index d9dc7fdcccf..895f7c5f8b9 100644
--- a/jstests/core/temp_cleanup.js
+++ b/jstests/core/temp_cleanup.js
@@ -1,16 +1,22 @@
-mydb = db.getSisterDB( "temp_cleanup_test" );
+mydb = db.getSisterDB("temp_cleanup_test");
t = mydb.tempCleanup;
t.drop();
-t.insert( { x : 1 } );
+t.insert({x: 1});
-res = t.mapReduce( function(){ emit(1,1); } , function(){ return 1; } , "xyz" );
-printjson( res );
+res = t.mapReduce(
+ function() {
+ emit(1, 1);
+ },
+ function() {
+ return 1;
+ },
+ "xyz");
+printjson(res);
-assert.eq( 1 , t.count() , "A1" );
-assert.eq( 1 , mydb[res.result].count() , "A2" );
+assert.eq(1, t.count(), "A1");
+assert.eq(1, mydb[res.result].count(), "A2");
mydb.dropDatabase();
-
diff --git a/jstests/core/test_command_line_test_helpers.js b/jstests/core/test_command_line_test_helpers.js
index a66bd713327..6e001075ee0 100644
--- a/jstests/core/test_command_line_test_helpers.js
+++ b/jstests/core/test_command_line_test_helpers.js
@@ -1,6 +1,6 @@
load('jstests/libs/command_line/test_parsed_options.js');
-assert.docEq({ x : 1, y : 1 }, mergeOptions({ x : 1 }, { y : 1 }));
-assert.docEq({ x : 1, y : 1 }, mergeOptions({ x : 1, y : 2 }, { y : 1 }));
-assert.docEq({ x : { z : 1 }, y : 1 }, mergeOptions({ x : { z : 1 } }, { y : 1 }));
-assert.docEq({ x : { z : 1 } }, mergeOptions({ x : { z : 2 } }, { x : { z : 1 } }));
+assert.docEq({x: 1, y: 1}, mergeOptions({x: 1}, {y: 1}));
+assert.docEq({x: 1, y: 1}, mergeOptions({x: 1, y: 2}, {y: 1}));
+assert.docEq({x: {z: 1}, y: 1}, mergeOptions({x: {z: 1}}, {y: 1}));
+assert.docEq({x: {z: 1}}, mergeOptions({x: {z: 2}}, {x: {z: 1}}));
diff --git a/jstests/core/testminmax.js b/jstests/core/testminmax.js
index 803f1b48a0b..5e874397a04 100644
--- a/jstests/core/testminmax.js
+++ b/jstests/core/testminmax.js
@@ -1,14 +1,31 @@
t = db.minmaxtest;
t.drop();
-t.insert({"_id" : "IBM.N|00001264779918428889", "DESCRIPTION" : { "n" : "IBMSTK2", "o" : "IBM STK", "s" : "changed" } });
-t.insert({ "_id" : "VOD.N|00001264779918433344", "COMPANYNAME" : { "n" : "Vodafone Group PLC 2", "o" : "Vodafone Group PLC", "s" : "changed" } });
-t.insert({ "_id" : "IBM.N|00001264779918437075", "DESCRIPTION" : { "n" : "IBMSTK3", "o" : "IBM STK2", "s" : "changed" } });
-t.insert({ "_id" : "VOD.N|00001264779918441426", "COMPANYNAME" : { "n" : "Vodafone Group PLC 3", "o" : "Vodafone Group PLC 2", "s" : "changed" } });
+t.insert({
+ "_id": "IBM.N|00001264779918428889",
+ "DESCRIPTION": {"n": "IBMSTK2", "o": "IBM STK", "s": "changed"}
+});
+t.insert({
+ "_id": "VOD.N|00001264779918433344",
+ "COMPANYNAME": {"n": "Vodafone Group PLC 2", "o": "Vodafone Group PLC", "s": "changed"}
+});
+t.insert({
+ "_id": "IBM.N|00001264779918437075",
+ "DESCRIPTION": {"n": "IBMSTK3", "o": "IBM STK2", "s": "changed"}
+});
+t.insert({
+ "_id": "VOD.N|00001264779918441426",
+ "COMPANYNAME": {"n": "Vodafone Group PLC 3", "o": "Vodafone Group PLC 2", "s": "changed"}
+});
// temp:
-printjson( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).toArray() );
+printjson(t.find()
+ .min({"_id": "IBM.N|00000000000000000000"})
+ .max({"_id": "IBM.N|99999999999999999999"})
+ .toArray());
// this should be 2!! add assertion when fixed
// http://jira.mongodb.org/browse/SERVER-675
-print( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).count() );
-
+print(t.find()
+ .min({"_id": "IBM.N|00000000000000000000"})
+ .max({"_id": "IBM.N|99999999999999999999"})
+ .count());
diff --git a/jstests/core/top.js b/jstests/core/top.js
index 9dc1aad684e..1aff2a4136b 100644
--- a/jstests/core/top.js
+++ b/jstests/core/top.js
@@ -9,8 +9,8 @@ var testColl = testDB[name + "coll"];
// Ensure an empty collection exists for first top command
testColl.drop();
-testColl.insert({x:0});
-testColl.remove({x:0});
+testColl.insert({x: 0});
+testColl.remove({x: 0});
// get top statistics for the test collection
function getTop() {
@@ -23,22 +23,24 @@ var lastTop = getTop();
// return the number of operations since the last call to diffTop for the specified key
function diffTop(key) {
var thisTop = getTop();
- difference = { time : thisTop[key].time - lastTop[key].time,
- count : thisTop[key].count - lastTop[key].count };
+ difference = {
+ time: thisTop[key].time - lastTop[key].time,
+ count: thisTop[key].count - lastTop[key].count
+ };
lastTop[key] = thisTop[key];
assert.gte(difference.count, 0, "non-decreasing count");
assert.gte(difference.time, 0, "non-decreasing time");
// Time should advance iff operations were performed
- assert.eq(difference.count != 0, difference.time > 0,"non-zero time iff non-zero count");
+ assert.eq(difference.count != 0, difference.time > 0, "non-zero time iff non-zero count");
return difference;
}
var numRecords = 100;
// check stats for specified key are as expected
-var checked = { };
+var checked = {};
function checkStats(key, expected) {
checked[key]++;
var actual = diffTop(key).count;
@@ -46,28 +48,28 @@ function checkStats(key, expected) {
}
// Insert
-for(i = 0; i < numRecords; i++) {
- testColl.insert({_id:i});
+for (i = 0; i < numRecords; i++) {
+ testColl.insert({_id: i});
}
checkStats("insert", numRecords);
checkStats("writeLock", numRecords);
// Update
-for(i = 0; i < numRecords; i++) {
- testColl.update({_id:i},{x:i});
+for (i = 0; i < numRecords; i++) {
+ testColl.update({_id: i}, {x: i});
}
checkStats("update", numRecords);
// Queries
-var query = { };
-for(i = 0; i < numRecords; i++) {
- query[i] = testColl.find({x : {$gte:i}}).batchSize(2);
+var query = {};
+for (i = 0; i < numRecords; i++) {
+ query[i] = testColl.find({x: {$gte: i}}).batchSize(2);
assert.eq(query[i].next()._id, i);
}
-checkStats("queries" ,numRecords);
+checkStats("queries", numRecords);
// Getmore
-for(i = 0; i < numRecords / 2; i++) {
+for (i = 0; i < numRecords / 2; i++) {
assert.eq(query[i].next()._id, i + 1);
assert.eq(query[i].next()._id, i + 2);
assert.eq(query[i].next()._id, i + 3);
@@ -76,28 +78,26 @@ for(i = 0; i < numRecords / 2; i++) {
checkStats("getmore", numRecords);
// Remove
-for(i = 0; i < numRecords; i++) {
- testColl.remove({_id : 1});
+for (i = 0; i < numRecords; i++) {
+ testColl.remove({_id: 1});
}
checkStats("remove", numRecords);
// Upsert, note that these are counted as updates, not inserts
-for(i = 0; i < numRecords; i++) {
- testColl.update({_id:i},{x:i},{upsert:1});
+for (i = 0; i < numRecords; i++) {
+ testColl.update({_id: i}, {x: i}, {upsert: 1});
}
checkStats("update", numRecords);
-
// Commands
-diffTop("commands"); // ignore any commands before this
-for(i = 0; i < numRecords; i++) {
- assert.eq(testDB.runCommand({count:"toptestcoll"}).n, numRecords);
+diffTop("commands"); // ignore any commands before this
+for (i = 0; i < numRecords; i++) {
+ assert.eq(testDB.runCommand({count: "toptestcoll"}).n, numRecords);
}
checkStats("commands", numRecords);
-for(key in lastTop) {
- if (!(key in checked)) {
- printjson({key:key, stats:diffTop(key)});
- }
+for (key in lastTop) {
+ if (!(key in checked)) {
+ printjson({key: key, stats: diffTop(key)});
+ }
}
-
diff --git a/jstests/core/ts1.js b/jstests/core/ts1.js
index 34efa15f981..342ff3215d7 100644
--- a/jstests/core/ts1.js
+++ b/jstests/core/ts1.js
@@ -3,36 +3,35 @@ t.drop();
N = 20;
-for ( i=0; i<N; i++ ){
- t.insert( { _id : i , x : new Timestamp() } );
- sleep( 100 );
+for (i = 0; i < N; i++) {
+ t.insert({_id: i, x: new Timestamp()});
+ sleep(100);
}
-function get(i){
- return t.findOne( { _id : i } ).x;
+function get(i) {
+ return t.findOne({_id: i}).x;
}
-function cmp( a , b ){
- if ( a.t < b.t )
+function cmp(a, b) {
+ if (a.t < b.t)
return -1;
- if ( a.t > b.t )
+ if (a.t > b.t)
return 1;
-
+
return a.i - b.i;
}
-for ( i=0; i<N-1; i++ ){
+for (i = 0; i < N - 1; i++) {
a = get(i);
- b = get(i+1);
- //print( tojson(a) + "\t" + tojson(b) + "\t" + cmp(a,b) );
- assert.gt( 0 , cmp( a , b ) , "cmp " + i );
+ b = get(i + 1);
+ // print( tojson(a) + "\t" + tojson(b) + "\t" + cmp(a,b) );
+ assert.gt(0, cmp(a, b), "cmp " + i);
}
-assert.eq( N , t.find( { x : { $type : 17 } } ).itcount() , "B1" );
-assert.eq( 0 , t.find( { x : { $type : 3 } } ).itcount() , "B2" );
-
-t.insert( { _id : 100 , x : new Timestamp( 123456 , 50 ) } );
-x = t.findOne( { _id : 100 } ).x;
-assert.eq( 123456 , x.t , "C1" );
-assert.eq( 50 , x.i , "C2" );
+assert.eq(N, t.find({x: {$type: 17}}).itcount(), "B1");
+assert.eq(0, t.find({x: {$type: 3}}).itcount(), "B2");
+t.insert({_id: 100, x: new Timestamp(123456, 50)});
+x = t.findOne({_id: 100}).x;
+assert.eq(123456, x.t, "C1");
+assert.eq(50, x.i, "C2");
diff --git a/jstests/core/type1.js b/jstests/core/type1.js
index 7f101a2c027..78c5f9b033c 100644
--- a/jstests/core/type1.js
+++ b/jstests/core/type1.js
@@ -2,21 +2,20 @@
t = db.type1;
t.drop();
-t.save( { x : 1.1 } );
-t.save( { x : "3" } );
-t.save( { x : "asd" } );
-t.save( { x : "foo" } );
+t.save({x: 1.1});
+t.save({x: "3"});
+t.save({x: "asd"});
+t.save({x: "foo"});
-assert.eq( 4 , t.find().count() , "A1" );
-assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "A2" );
-assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "A3" );
-assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "A4" );
+assert.eq(4, t.find().count(), "A1");
+assert.eq(1, t.find({x: {$type: 1}}).count(), "A2");
+assert.eq(3, t.find({x: {$type: 2}}).count(), "A3");
+assert.eq(0, t.find({x: {$type: 3}}).count(), "A4");
+t.ensureIndex({x: 1});
-t.ensureIndex( { x : 1 } );
-
-assert.eq( 4 , t.find().count() , "B1" );
-assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "B2" );
-assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "B3" );
-assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "B4" );
-assert.eq( 1 , t.find( { x : { $regex:"f", $type : 2 } } ).count() , "B3" );
+assert.eq(4, t.find().count(), "B1");
+assert.eq(1, t.find({x: {$type: 1}}).count(), "B2");
+assert.eq(3, t.find({x: {$type: 2}}).count(), "B3");
+assert.eq(0, t.find({x: {$type: 3}}).count(), "B4");
+assert.eq(1, t.find({x: {$regex: "f", $type: 2}}).count(), "B3");
diff --git a/jstests/core/type2.js b/jstests/core/type2.js
index 820607e0b30..9c6baa37b94 100644
--- a/jstests/core/type2.js
+++ b/jstests/core/type2.js
@@ -3,17 +3,17 @@
t = db.jstests_type2;
t.drop();
-t.save( {a:null} );
-t.save( {} );
-t.save( {a:'a'} );
+t.save({a: null});
+t.save({});
+t.save({a: 'a'});
function test() {
- assert.eq( 2, t.count( {a:null} ) );
- assert.eq( 1, t.count( {a:{$type:10}} ) );
- assert.eq( 2, t.count( {a:{$exists:true}} ) );
- assert.eq( 1, t.count( {a:{$exists:false}} ) );
+ assert.eq(2, t.count({a: null}));
+ assert.eq(1, t.count({a: {$type: 10}}));
+ assert.eq(2, t.count({a: {$exists: true}}));
+ assert.eq(1, t.count({a: {$exists: false}}));
}
test();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
test(); \ No newline at end of file
diff --git a/jstests/core/type3.js b/jstests/core/type3.js
index aad21ca3ecb..fce2b03f6c4 100644
--- a/jstests/core/type3.js
+++ b/jstests/core/type3.js
@@ -3,55 +3,59 @@
t = db.jstests_type3;
t.drop();
-t.ensureIndex( {a:1} );
+t.ensureIndex({a: 1});
// Type Object
-t.save( {a:{'':''}} );
-assert.eq( 1, t.find( {a:{$type:3}} ).hint( {a:1} ).itcount() );
+t.save({a: {'': ''}});
+assert.eq(1, t.find({a: {$type: 3}}).hint({a: 1}).itcount());
// Type Array
t.remove({});
-t.save( {a:[['c']]} );
-assert.eq( 1, t.find( {a:{$type:4}} ).hint( {a:1} ).itcount() );
+t.save({a: [['c']]});
+assert.eq(1, t.find({a: {$type: 4}}).hint({a: 1}).itcount());
// Type RegEx
t.remove({});
-t.save( {a:/r/} );
-assert.eq( 1, t.find( {a:{$type:11}} ).hint( {a:1} ).itcount() );
+t.save({a: /r/});
+assert.eq(1, t.find({a: {$type: 11}}).hint({a: 1}).itcount());
// Type jstNULL
t.remove({});
-t.save( {a:null} );
-assert.eq( 1, t.find( {a:{$type:10}} ).hint( {a:1} ).itcount() );
+t.save({a: null});
+assert.eq(1, t.find({a: {$type: 10}}).hint({a: 1}).itcount());
// Type Undefined
t.remove({});
-t.save( {a:undefined} );
-assert.eq( 1, t.find( {a:{$type:6}} ).hint( {a:1} ).itcount() );
+t.save({a: undefined});
+assert.eq(1, t.find({a: {$type: 6}}).hint({a: 1}).itcount());
// This one won't be returned.
-t.save( {a:null} );
-assert.eq( 1, t.find( {a:{$type:6}} ).hint( {a:1} ).itcount() );
+t.save({a: null});
+assert.eq(1, t.find({a: {$type: 6}}).hint({a: 1}).itcount());
// Type Code
t.remove({});
-t.save( {a:function(){var a = 0;}} );
-assert.eq( 1, t.find( {a:{$type:13}} ).itcount() );
+t.save({
+ a: function() {
+ var a = 0;
+ }
+});
+assert.eq(1, t.find({a: {$type: 13}}).itcount());
// Type BinData
t.remove({});
-t.save( {a:new BinData(0,'')} );
-assert.eq( 1, t.find( {a:{$type:5}} ).itcount() );
+t.save({a: new BinData(0, '')});
+assert.eq(1, t.find({a: {$type: 5}}).itcount());
// Type Timestamp
t.remove({});
-t.save( {a:new Timestamp()} );
-t.save( {a:new Timestamp(0x80008000, 0)} );
-assert.eq( 2, t.find( {a:{$type:17}} ).itcount() );
-assert.eq( 0, t.find( {a:{$type:9}} ).itcount() );
+t.save({a: new Timestamp()});
+t.save({a: new Timestamp(0x80008000, 0)});
+assert.eq(2, t.find({a: {$type: 17}}).itcount());
+assert.eq(0, t.find({a: {$type: 9}}).itcount());
// Type Date
t.remove({});
-t.save( {a:new Date()} );
-assert.eq( 0, t.find( {a:{$type:17}} ).itcount() );
-assert.eq( 1, t.find( {a:{$type:9}} ).itcount() );
+t.save({a: new Date()});
+assert.eq(0, t.find({a: {$type: 17}}).itcount());
+assert.eq(1, t.find({a: {$type: 9}}).itcount());
diff --git a/jstests/core/type4.js b/jstests/core/type4.js
index 86e2f32c5d7..82197d4f1e2 100644
--- a/jstests/core/type4.js
+++ b/jstests/core/type4.js
@@ -1,4 +1,4 @@
-(function(){
+(function() {
"use strict";
// Tests for SERVER-20080
@@ -13,16 +13,16 @@
var oldReadMode = db.getMongo().readMode();
- assert.throws(function(){
+ assert.throws(function() {
(new _rand())();
}, [], "invoke constructor on natively injected function");
- assert.throws(function(){
- var doc = db.test.findOne();
- new doc();
+ assert.throws(function() {
+ var doc = db.test.findOne();
+ new doc();
}, [], "invoke constructor on BSON");
- assert.throws(function(){
+ assert.throws(function() {
db.getMongo().forceReadMode("commands");
var cursor = t.find();
cursor.next();
@@ -30,7 +30,7 @@
new cursor._cursor._cursorHandle();
}, [], "invoke constructor on CursorHandle");
- assert.throws(function(){
+ assert.throws(function() {
db.getMongo().forceReadMode("legacy");
var cursor = t.find();
cursor.next();
diff --git a/jstests/core/type5.js b/jstests/core/type5.js
index 414af2be7eb..d4dfc42d9f6 100644
--- a/jstests/core/type5.js
+++ b/jstests/core/type5.js
@@ -1,4 +1,4 @@
-(function(){
+(function() {
"use strict";
// This checks SERVER-20375 - Constrain JS method thisv
@@ -7,14 +7,14 @@
// prototypes of objects that aren't intended to have methods invoked on
// them.
- assert.throws(function(){
+ assert.throws(function() {
HexData(0, "aaaa").hex.apply({});
}, [], "invoke method on object of incorrect type");
- assert.throws(function(){
+ assert.throws(function() {
var x = HexData(0, "aaaa");
x.hex.apply(10);
}, [], "invoke method on incorrect type");
- assert.throws(function(){
+ assert.throws(function() {
var x = HexData(0, "aaaa");
x.hex.apply(x.__proto__);
}, [], "invoke method on prototype of correct type");
diff --git a/jstests/core/type6.js b/jstests/core/type6.js
index f8b29fe217d..39c3e2567bb 100644
--- a/jstests/core/type6.js
+++ b/jstests/core/type6.js
@@ -1,15 +1,17 @@
-(function(){
+(function() {
"use strict";
// SERVER-20319 Min/MaxKey check type of singleton
//
// make sure swapping min/max key's prototype doesn't blow things up
- assert.throws(function(){
- MinKey().__proto__.singleton = 1000; MinKey();
+ assert.throws(function() {
+ MinKey().__proto__.singleton = 1000;
+ MinKey();
}, [], "make sure manipulating MinKey's proto is safe");
- assert.throws(function(){
- MaxKey().__proto__.singleton = 1000; MaxKey();
+ assert.throws(function() {
+ MaxKey().__proto__.singleton = 1000;
+ MaxKey();
}, [], "make sure manipulating MaxKey's proto is safe");
})();
diff --git a/jstests/core/type7.js b/jstests/core/type7.js
index 870e48ad164..1d67922d491 100644
--- a/jstests/core/type7.js
+++ b/jstests/core/type7.js
@@ -1,4 +1,4 @@
-(function(){
+(function() {
"use strict";
// SERVER-20332 make JS NumberLong more robust
diff --git a/jstests/core/type8.js b/jstests/core/type8.js
index 246133b33d9..ceb4993ecb1 100644
--- a/jstests/core/type8.js
+++ b/jstests/core/type8.js
@@ -1,4 +1,4 @@
-(function(){
+(function() {
"use strict";
// SERVER-8246 Min/MaxKey should be comparable
diff --git a/jstests/core/uniqueness.js b/jstests/core/uniqueness.js
index 124748a91f4..8b919b9a6a9 100644
--- a/jstests/core/uniqueness.js
+++ b/jstests/core/uniqueness.js
@@ -6,51 +6,50 @@ t.drop();
// test uniqueness of _id
-res = t.save( { _id : 3 } );
-assert.writeOK( res );
+res = t.save({_id: 3});
+assert.writeOK(res);
// this should yield an error
-res = t.insert( { _id : 3 } );
-assert.writeError( res );
-assert.eq( 1, t.count() );
+res = t.insert({_id: 3});
+assert.writeError(res);
+assert.eq(1, t.count());
-res = t.insert( { _id : 4, x : 99 } );
-assert.writeOK( res );
+res = t.insert({_id: 4, x: 99});
+assert.writeOK(res);
// this should yield an error
-res = t.update( { _id : 4 } , { _id : 3, x : 99 } );
-assert.writeError( res );
-assert( t.findOne( {_id:4} ) );
+res = t.update({_id: 4}, {_id: 3, x: 99});
+assert.writeError(res);
+assert(t.findOne({_id: 4}));
// Check for an error message when we index and there are dups
db.jstests_uniqueness2.drop();
-db.jstests_uniqueness2.insert({a:3});
-db.jstests_uniqueness2.insert({a:3});
-assert.eq( 2, db.jstests_uniqueness2.count() );
-res = db.jstests_uniqueness2.ensureIndex({a:1}, true);
-assert.commandFailed( res );
-assert( res.errmsg.match( /E11000/ ) );
+db.jstests_uniqueness2.insert({a: 3});
+db.jstests_uniqueness2.insert({a: 3});
+assert.eq(2, db.jstests_uniqueness2.count());
+res = db.jstests_uniqueness2.ensureIndex({a: 1}, true);
+assert.commandFailed(res);
+assert(res.errmsg.match(/E11000/));
// Check for an error message when we index in the background and there are dups
db.jstests_uniqueness2.drop();
-db.jstests_uniqueness2.insert({a:3});
-db.jstests_uniqueness2.insert({a:3});
-assert.eq( 2, db.jstests_uniqueness2.count() );
-res = db.jstests_uniqueness2.ensureIndex({a:1}, {unique:true,background:true});
-assert.commandFailed( res );
-assert( res.errmsg.match( /E11000/ ) );
+db.jstests_uniqueness2.insert({a: 3});
+db.jstests_uniqueness2.insert({a: 3});
+assert.eq(2, db.jstests_uniqueness2.count());
+res = db.jstests_uniqueness2.ensureIndex({a: 1}, {unique: true, background: true});
+assert.commandFailed(res);
+assert(res.errmsg.match(/E11000/));
/* Check that if we update and remove _id, it gets added back by the DB */
/* - test when object grows */
t.drop();
-t.save( { _id : 'Z' } );
-t.update( {}, { k : 2 } );
-assert.eq( 'Z', t.findOne()._id, "uniqueness.js problem with adding back _id" );
+t.save({_id: 'Z'});
+t.update({}, {k: 2});
+assert.eq('Z', t.findOne()._id, "uniqueness.js problem with adding back _id");
/* - test when doesn't grow */
t.drop();
-t.save( { _id : 'Z', k : 3 } );
-t.update( {}, { k : 2 } );
-assert.eq( 'Z', t.findOne()._id, "uniqueness.js problem with adding back _id (2)" );
-
+t.save({_id: 'Z', k: 3});
+t.update({}, {k: 2});
+assert.eq('Z', t.findOne()._id, "uniqueness.js problem with adding back _id (2)");
diff --git a/jstests/core/unset.js b/jstests/core/unset.js
index f3cdcf03deb..14e18229723 100644
--- a/jstests/core/unset.js
+++ b/jstests/core/unset.js
@@ -1,19 +1,22 @@
t = db.unset;
t.drop();
-orig = { _id : 1, emb : {} };
+orig = {
+ _id: 1,
+ emb: {}
+};
t.insert(orig);
-t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
-t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
-assert.eq( orig , t.findOne() , "A" );
+t.update({_id: 1}, {$unset: {'emb.a': 1}});
+t.update({_id: 1}, {$unset: {'z': 1}});
+assert.eq(orig, t.findOne(), "A");
-t.update( { _id : 1 }, { $set : { 'emb.a' : 1 }});
-t.update( { _id : 1 }, { $set : { 'z' : 1 }});
+t.update({_id: 1}, {$set: {'emb.a': 1}});
+t.update({_id: 1}, {$set: {'z': 1}});
-t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
-t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
-assert.eq( orig , t.findOne() , "B" ); // note that emb isn't removed
+t.update({_id: 1}, {$unset: {'emb.a': 1}});
+t.update({_id: 1}, {$unset: {'z': 1}});
+assert.eq(orig, t.findOne(), "B"); // note that emb isn't removed
-t.update( { _id : 1 }, { $unset : { 'emb' : 1 }});
-assert.eq( {_id :1} , t.findOne() , "C" );
+t.update({_id: 1}, {$unset: {'emb': 1}});
+assert.eq({_id: 1}, t.findOne(), "C");
diff --git a/jstests/core/unset2.js b/jstests/core/unset2.js
index 501f9f16331..ec2772af507 100644
--- a/jstests/core/unset2.js
+++ b/jstests/core/unset2.js
@@ -3,23 +3,23 @@ var res;
t = db.unset2;
t.drop();
-t.save( {a:["a","b","c","d"]} );
-t.update( {}, {$unset:{"a.3":1}} );
-assert.eq( ["a","b","c",null], t.findOne().a );
-t.update( {}, {$unset:{"a.1":1}} );
-assert.eq( ["a",null,"c",null], t.findOne().a );
-t.update( {}, {$unset:{"a.0":1}} );
-assert.eq( [null,null,"c",null], t.findOne().a );
-t.update( {}, {$unset:{"a.4":1}} );
-assert.eq( [null,null,"c",null], t.findOne().a ); // no change
+t.save({a: ["a", "b", "c", "d"]});
+t.update({}, {$unset: {"a.3": 1}});
+assert.eq(["a", "b", "c", null], t.findOne().a);
+t.update({}, {$unset: {"a.1": 1}});
+assert.eq(["a", null, "c", null], t.findOne().a);
+t.update({}, {$unset: {"a.0": 1}});
+assert.eq([null, null, "c", null], t.findOne().a);
+t.update({}, {$unset: {"a.4": 1}});
+assert.eq([null, null, "c", null], t.findOne().a); // no change
t.drop();
-t.save( {a:["a","b","c","d","e"]} );
-t.update( {}, {$unset:{"a.2":1},$set:{"a.3":3,"a.4":4,"a.5":5}} );
-assert.eq( ["a","b",null,3,4,5], t.findOne().a );
+t.save({a: ["a", "b", "c", "d", "e"]});
+t.update({}, {$unset: {"a.2": 1}, $set: {"a.3": 3, "a.4": 4, "a.5": 5}});
+assert.eq(["a", "b", null, 3, 4, 5], t.findOne().a);
t.drop();
-t.save( {a:["a","b","c","d","e"]} );
-res = t.update( {}, {$unset:{"a.2":1},$set:{"a.2":4}} );
-assert.writeError( res );
-assert.eq( ["a","b","c","d","e"], t.findOne().a );
+t.save({a: ["a", "b", "c", "d", "e"]});
+res = t.update({}, {$unset: {"a.2": 1}, $set: {"a.2": 4}});
+assert.writeError(res);
+assert.eq(["a", "b", "c", "d", "e"], t.findOne().a);
diff --git a/jstests/core/update2.js b/jstests/core/update2.js
index 654914c1f45..15d84c81b41 100644
--- a/jstests/core/update2.js
+++ b/jstests/core/update2.js
@@ -1,18 +1,18 @@
f = db.ed_db_update2;
f.drop();
-f.save( { a: 4 } );
-f.update( { a: 4 }, { $inc: { a: 2 } } );
-assert.eq( 6, f.findOne().a );
+f.save({a: 4});
+f.update({a: 4}, {$inc: {a: 2}});
+assert.eq(6, f.findOne().a);
f.drop();
-f.save( { a: 4 } );
-f.ensureIndex( { a: 1 } );
-f.update( { a: 4 }, { $inc: { a: 2 } } );
-assert.eq( 6, f.findOne().a );
+f.save({a: 4});
+f.ensureIndex({a: 1});
+f.update({a: 4}, {$inc: {a: 2}});
+assert.eq(6, f.findOne().a);
// Verify that drop clears the index
f.drop();
-f.save( { a: 4 } );
-f.update( { a: 4 }, { $inc: { a: 2 } } );
-assert.eq( 6, f.findOne().a );
+f.save({a: 4});
+f.update({a: 4}, {$inc: {a: 2}});
+assert.eq(6, f.findOne().a);
diff --git a/jstests/core/update3.js b/jstests/core/update3.js
index d29d073a40a..79562fe72d0 100644
--- a/jstests/core/update3.js
+++ b/jstests/core/update3.js
@@ -3,26 +3,26 @@
f = db.jstests_update3;
f.drop();
-f.save( { a:1 } );
-f.update( {}, {$inc:{ a:1 }} );
-assert.eq( 2, f.findOne().a , "A" );
+f.save({a: 1});
+f.update({}, {$inc: {a: 1}});
+assert.eq(2, f.findOne().a, "A");
f.drop();
-f.save( { a:{ b: 1 } } );
-f.update( {}, {$inc:{ "a.b":1 }} );
-assert.eq( 2, f.findOne().a.b , "B" );
+f.save({a: {b: 1}});
+f.update({}, {$inc: {"a.b": 1}});
+assert.eq(2, f.findOne().a.b, "B");
f.drop();
-f.save( { a:{ b: 1 } } );
-f.update( {}, {$set:{ "a.b":5 }} );
-assert.eq( 5, f.findOne().a.b , "C" );
+f.save({a: {b: 1}});
+f.update({}, {$set: {"a.b": 5}});
+assert.eq(5, f.findOne().a.b, "C");
f.drop();
-f.save( {'_id':0} );
-f.update( {}, {$set:{'_id':5}} );
-assert.eq( 0, f.findOne()._id , "D" );
+f.save({'_id': 0});
+f.update({}, {$set: {'_id': 5}});
+assert.eq(0, f.findOne()._id, "D");
f.drop();
-f.save({_id:1, a:1});
-f.update({}, {$unset:{"a":1, "b.c":1}});
-assert.docEq(f.findOne(), {_id:1}, "E"); \ No newline at end of file
+f.save({_id: 1, a: 1});
+f.update({}, {$unset: {"a": 1, "b.c": 1}});
+assert.docEq(f.findOne(), {_id: 1}, "E"); \ No newline at end of file
diff --git a/jstests/core/update5.js b/jstests/core/update5.js
index 2728000f2d4..3ee44d2fba0 100644
--- a/jstests/core/update5.js
+++ b/jstests/core/update5.js
@@ -1,41 +1,39 @@
t = db.update5;
-function go( key ){
-
+function go(key) {
t.drop();
- function check( num , name ){
- assert.eq( 1 , t.find().count() , tojson( key ) + " count " + name );
- assert.eq( num , t.findOne().n , tojson( key ) + " value " + name );
+ function check(num, name) {
+ assert.eq(1, t.find().count(), tojson(key) + " count " + name);
+ assert.eq(num, t.findOne().n, tojson(key) + " value " + name);
}
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 1 , "A" );
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 2 , "B" );
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 3 , "C" );
-
+
+ t.update(key, {$inc: {n: 1}}, true);
+ check(1, "A");
+
+ t.update(key, {$inc: {n: 1}}, true);
+ check(2, "B");
+
+ t.update(key, {$inc: {n: 1}}, true);
+ check(3, "C");
+
var ik = {};
- for ( k in key )
+ for (k in key)
ik[k] = 1;
- t.ensureIndex( ik );
-
- t.update( key , { $inc : { n : 1 } } , true );
- check( 4 , "D" );
-
+ t.ensureIndex(ik);
+
+ t.update(key, {$inc: {n: 1}}, true);
+ check(4, "D");
}
-go( { a : 5 } );
-go( { a : 5 } );
+go({a: 5});
+go({a: 5});
-go( { a : 5 , b : 7 } );
-go( { a : null , b : 7 } );
+go({a: 5, b: 7});
+go({a: null, b: 7});
-go( { referer: 'blah' } );
-go( { referer: 'blah', lame: 'bar' } );
-go( { referer: 'blah', name: 'bar' } );
-go( { date: null, referer: 'blah', name: 'bar' } );
+go({referer: 'blah'});
+go({referer: 'blah', lame: 'bar'});
+go({referer: 'blah', name: 'bar'});
+go({date: null, referer: 'blah', name: 'bar'});
diff --git a/jstests/core/update6.js b/jstests/core/update6.js
index eda470abf45..76b676260f6 100644
--- a/jstests/core/update6.js
+++ b/jstests/core/update6.js
@@ -2,45 +2,42 @@
t = db.update6;
t.drop();
-t.save( { a : 1 , b : { c : 1 , d : 1 } } );
+t.save({a: 1, b: {c: 1, d: 1}});
-t.update( { a : 1 } , { $inc : { "b.c" : 1 } } );
-assert.eq( 2 , t.findOne().b.c , "A" );
-assert.eq( "c,d" , Object.keySet( t.findOne().b ).toString() , "B" );
+t.update({a: 1}, {$inc: {"b.c": 1}});
+assert.eq(2, t.findOne().b.c, "A");
+assert.eq("c,d", Object.keySet(t.findOne().b).toString(), "B");
-t.update( { a : 1 } , { $inc : { "b.0e" : 1 } } );
-assert.eq( 1 , t.findOne().b["0e"] , "C" );
-assert.docEq( { "c" : 2, "d" : 1, "0e" : 1 }, t.findOne().b, "D" );
+t.update({a: 1}, {$inc: {"b.0e": 1}});
+assert.eq(1, t.findOne().b["0e"], "C");
+assert.docEq({"c": 2, "d": 1, "0e": 1}, t.findOne().b, "D");
// -----
t.drop();
-t.save( {"_id" : 2 ,
- "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3} ,
- //"b323" : {"0720" : 1} ,
- }
- );
-
-
-assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks before" );
-t.update({_id:2},{$inc: { 'b3.0719' : 1}},true);
-assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks after" );
+t.save({
+ "_id": 2,
+ "b3": {"0720": 5, "0721": 12, "0722": 11, "0723": 3},
+ //"b323" : {"0720" : 1} ,
+});
+assert.eq(4, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 1 : ks before");
+t.update({_id: 2}, {$inc: {'b3.0719': 1}}, true);
+assert.eq(5, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 1 : ks after");
// -----
t.drop();
-t.save( {"_id" : 2 ,
- "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3} ,
- "b324" : {"0720" : 1} ,
- }
- );
-
-
-assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks before" );
-printjson( t.find({_id:2},{b3:1})[0].b3 );
-t.update({_id:2},{$inc: { 'b3.0719' : 1}} );
-printjson( t.find({_id:2},{b3:1})[0].b3 );
-assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks after" );
+t.save({
+ "_id": 2,
+ "b3": {"0720": 5, "0721": 12, "0722": 11, "0723": 3},
+ "b324": {"0720": 1},
+});
+
+assert.eq(4, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 2 : ks before");
+printjson(t.find({_id: 2}, {b3: 1})[0].b3);
+t.update({_id: 2}, {$inc: {'b3.0719': 1}});
+printjson(t.find({_id: 2}, {b3: 1})[0].b3);
+assert.eq(5, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 2 : ks after");
diff --git a/jstests/core/update7.js b/jstests/core/update7.js
index 199a331f9b2..9f92c3382ef 100644
--- a/jstests/core/update7.js
+++ b/jstests/core/update7.js
@@ -2,137 +2,136 @@
t = db.update7;
t.drop();
-function s(){
- return t.find().sort( { _id : 1 } ).map( function(z){ return z.x; } );
+function s() {
+ return t.find().sort({_id: 1}).map(function(z) {
+ return z.x;
+ });
}
-t.save( { _id : 1 , x : 1 } );
-t.save( { _id : 2 , x : 5 } );
+t.save({_id: 1, x: 1});
+t.save({_id: 2, x: 5});
-assert.eq( "1,5" , s() , "A" );
+assert.eq("1,5", s(), "A");
-t.update( {} , { $inc : { x : 1 } } );
-assert.eq( "2,5" , s() , "B" );
+t.update({}, {$inc: {x: 1}});
+assert.eq("2,5", s(), "B");
-t.update( { _id : 1 } , { $inc : { x : 1 } } );
-assert.eq( "3,5" , s() , "C" );
+t.update({_id: 1}, {$inc: {x: 1}});
+assert.eq("3,5", s(), "C");
-t.update( { _id : 2 } , { $inc : { x : 1 } } );
-assert.eq( "3,6" , s() , "D" );
+t.update({_id: 2}, {$inc: {x: 1}});
+assert.eq("3,6", s(), "D");
-t.update( {} , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "E" );
+t.update({}, {$inc: {x: 1}}, false, true);
+assert.eq("4,7", s(), "E");
-t.update( {} , { $set : { x : 2 } } , false , true );
-assert.eq( "2,2" , s() , "F" );
+t.update({}, {$set: {x: 2}}, false, true);
+assert.eq("2,2", s(), "F");
// non-matching in cursor
t.drop();
-t.save( { _id : 1 , x : 1 , a : 1 , b : 1 } );
-t.save( { _id : 2 , x : 5 , a : 1 , b : 2 } );
-assert.eq( "1,5" , s() , "B1" );
+t.save({_id: 1, x: 1, a: 1, b: 1});
+t.save({_id: 2, x: 5, a: 1, b: 2});
+assert.eq("1,5", s(), "B1");
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "2,6" , s() , "B2" );
+t.update({a: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("2,6", s(), "B2");
-t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "B3" );
+t.update({b: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("3,6", s(), "B3");
-t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "B4" );
+t.update({b: 3}, {$inc: {x: 1}}, false, true);
+assert.eq("3,6", s(), "B4");
-t.ensureIndex( { a : 1 } );
-t.ensureIndex( { b : 1 } );
+t.ensureIndex({a: 1});
+t.ensureIndex({b: 1});
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "B5" );
+t.update({a: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("4,7", s(), "B5");
-t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,7" , s() , "B6" );
+t.update({b: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("5,7", s(), "B6");
-t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,7" , s() , "B7" );
-
-t.update( { b : 2 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8" , s() , "B7" );
+t.update({b: 3}, {$inc: {x: 1}}, false, true);
+assert.eq("5,7", s(), "B7");
+t.update({b: 2}, {$inc: {x: 1}}, false, true);
+assert.eq("5,8", s(), "B7");
// multi-key
t.drop();
-t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
-t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
-assert.eq( "1,5" , s() , "C1" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "2,5" , s() , "C2" );
+t.save({_id: 1, x: 1, a: [1, 2]});
+t.save({_id: 2, x: 5, a: [2, 3]});
+assert.eq("1,5", s(), "C1");
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,5" , s() , "C3" );
+t.update({a: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("2,5", s(), "C2");
-t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "C4" );
+t.update({a: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("3,5", s(), "C3");
-t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "C5" );
+t.update({a: 3}, {$inc: {x: 1}}, false, true);
+assert.eq("3,6", s(), "C4");
-t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8" , s() , "C6" );
+t.update({a: 2}, {$inc: {x: 1}}, false, true);
+assert.eq("4,7", s(), "C5");
+t.update({a: {$gt: 0}}, {$inc: {x: 1}}, false, true);
+assert.eq("5,8", s(), "C6");
t.drop();
-t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
-t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
-t.ensureIndex( { a : 1 } );
-assert.eq( "1,5" , s() , "D1" );
-
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "2,5" , s() , "D2" );
+t.save({_id: 1, x: 1, a: [1, 2]});
+t.save({_id: 2, x: 5, a: [2, 3]});
+t.ensureIndex({a: 1});
+assert.eq("1,5", s(), "D1");
-t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,5" , s() , "D3" );
+t.update({a: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("2,5", s(), "D2");
-t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "3,6" , s() , "D4" );
+t.update({a: 1}, {$inc: {x: 1}}, false, true);
+assert.eq("3,5", s(), "D3");
-t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
-assert.eq( "4,7" , s() , "D5" );
+t.update({a: 3}, {$inc: {x: 1}}, false, true);
+assert.eq("3,6", s(), "D4");
-t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8" , s() , "D6" );
+t.update({a: 2}, {$inc: {x: 1}}, false, true);
+assert.eq("4,7", s(), "D5");
-t.update( { a : { $lt : 10 } } , { $inc : { x : -1 } } , false , true );
-assert.eq( "4,7" , s() , "D7" );
+t.update({a: {$gt: 0}}, {$inc: {x: 1}}, false, true);
+assert.eq("5,8", s(), "D6");
-// ---
+t.update({a: {$lt: 10}}, {$inc: {x: -1}}, false, true);
+assert.eq("4,7", s(), "D7");
-t.save( { _id : 3 } );
-assert.eq( "4,7," , s() , "E1" );
-t.update( {} , { $inc : { x : 1 } } , false , true );
-assert.eq( "5,8,1" , s() , "E2" );
+// ---
-for ( i = 4; i<8; i++ )
- t.save( { _id : i } );
-t.save( { _id : i , x : 1 } );
-assert.eq( "5,8,1,,,,,1" , s() , "E4" );
-t.update( {} , { $inc : { x : 1 } } , false , true );
-assert.eq( "6,9,2,1,1,1,1,2" , s() , "E5" );
+t.save({_id: 3});
+assert.eq("4,7,", s(), "E1");
+t.update({}, {$inc: {x: 1}}, false, true);
+assert.eq("5,8,1", s(), "E2");
+for (i = 4; i < 8; i++)
+ t.save({_id: i});
+t.save({_id: i, x: 1});
+assert.eq("5,8,1,,,,,1", s(), "E4");
+t.update({}, {$inc: {x: 1}}, false, true);
+assert.eq("6,9,2,1,1,1,1,2", s(), "E5");
// --- $inc indexed field
t.drop();
-t.save( { x : 1 } );
-t.save( { x : 2 } );
-t.save( { x : 3 } );
+t.save({x: 1});
+t.save({x: 2});
+t.save({x: 3});
-t.ensureIndex( { x : 1 } );
+t.ensureIndex({x: 1});
-assert.eq( "1,2,3" , s() , "F1" );
-t.update( { x : { $gt : 0 } } , { $inc : { x : 5 } } , false , true );
-assert.eq( "6,7,8" , s() , "F1" );
+assert.eq("1,2,3", s(), "F1");
+t.update({x: {$gt: 0}}, {$inc: {x: 5}}, false, true);
+assert.eq("6,7,8", s(), "F1");
diff --git a/jstests/core/update8.js b/jstests/core/update8.js
index f59bea15e15..596bc8695dd 100644
--- a/jstests/core/update8.js
+++ b/jstests/core/update8.js
@@ -2,10 +2,10 @@
t = db.update8;
t.drop();
-t.update( { _id : 1 , tags: {"$ne": "a"}}, {"$push": { tags : "a" } } , true );
-assert.eq( { _id : 1 , tags : [ "a" ] } , t.findOne() , "A" );
+t.update({_id: 1, tags: {"$ne": "a"}}, {"$push": {tags: "a"}}, true);
+assert.eq({_id: 1, tags: ["a"]}, t.findOne(), "A");
t.drop();
-//SERVER-390
-//t.update( { "x.y" : 1 } , { $inc : { i : 1 } } , true );
-//printjson( t.findOne() );
+// SERVER-390
+// t.update( { "x.y" : 1 } , { $inc : { i : 1 } } , true );
+// printjson( t.findOne() );
diff --git a/jstests/core/update9.js b/jstests/core/update9.js
index 0a51d658199..d119681a09e 100644
--- a/jstests/core/update9.js
+++ b/jstests/core/update9.js
@@ -2,18 +2,17 @@
t = db.update9;
t.drop();
-orig = { "_id" : 1 ,
- "question" : "a",
- "choices" : { "1" : { "choice" : "b" },
- "0" : { "choice" : "c" } } ,
-
- };
+orig = {
+ "_id": 1,
+ "question": "a",
+ "choices": {"1": {"choice": "b"}, "0": {"choice": "c"}},
-t.save( orig );
-assert.eq( orig , t.findOne() , "A" );
+};
-t.update({_id: 1, 'choices.0.votes': {$ne: 1}}, {$push: {'choices.0.votes': 1}});
+t.save(orig);
+assert.eq(orig, t.findOne(), "A");
-orig.choices["0"].votes = [ 1 ] ;
-assert.eq( orig.choices["0"] , t.findOne().choices["0"] , "B" );
+t.update({_id: 1, 'choices.0.votes': {$ne: 1}}, {$push: {'choices.0.votes': 1}});
+orig.choices["0"].votes = [1];
+assert.eq(orig.choices["0"], t.findOne().choices["0"], "B");
diff --git a/jstests/core/update_addToSet.js b/jstests/core/update_addToSet.js
index c12f029f6ae..05437148b2f 100644
--- a/jstests/core/update_addToSet.js
+++ b/jstests/core/update_addToSet.js
@@ -2,57 +2,66 @@
t = db.update_addToSet1;
t.drop();
-o = { _id : 1 , a : [ 2 , 1 ] };
-t.insert( o );
+o = {
+ _id: 1,
+ a: [2, 1]
+};
+t.insert(o);
-assert.eq( o , t.findOne() , "A1" );
+assert.eq(o, t.findOne(), "A1");
-t.update( {} , { $addToSet : { a : 3 } } );
-o.a.push( 3 );
-assert.eq( o , t.findOne() , "A2" );
+t.update({}, {$addToSet: {a: 3}});
+o.a.push(3);
+assert.eq(o, t.findOne(), "A2");
-t.update( {} , { $addToSet : { a : 3 } } );
-assert.eq( o , t.findOne() , "A3" );
+t.update({}, {$addToSet: {a: 3}});
+assert.eq(o, t.findOne(), "A3");
// SERVER-628
-t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
-o.a.push( 5 );
-o.a.push( 6 );
-assert.eq( o , t.findOne() , "B1" );
+t.update({}, {$addToSet: {a: {$each: [3, 5, 6]}}});
+o.a.push(5);
+o.a.push(6);
+assert.eq(o, t.findOne(), "B1");
t.drop();
-o = { _id : 1 , a : [ 3 , 5 , 6 ] };
-t.insert( o );
-t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
-assert.eq( o , t.findOne() , "B2" );
+o = {
+ _id: 1,
+ a: [3, 5, 6]
+};
+t.insert(o);
+t.update({}, {$addToSet: {a: {$each: [3, 5, 6]}}});
+assert.eq(o, t.findOne(), "B2");
t.drop();
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
-assert.eq( o , t.findOne() , "B3" );
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
-assert.eq( o , t.findOne() , "B4" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [3, 5, 6]}}}, true);
+assert.eq(o, t.findOne(), "B3");
+t.update({_id: 1}, {$addToSet: {a: {$each: [3, 5, 6]}}}, true);
+assert.eq(o, t.findOne(), "B4");
// SERVER-630
t.drop();
-t.update( { _id : 2 } , { $addToSet : { a : 3 } } , true );
-assert.eq( 1 , t.count() , "C1" );
-assert.eq( { _id : 2 , a : [ 3 ] } , t.findOne() , "C2" );
+t.update({_id: 2}, {$addToSet: {a: 3}}, true);
+assert.eq(1, t.count(), "C1");
+assert.eq({_id: 2, a: [3]}, t.findOne(), "C2");
// SERVER-3245
-o = {_id: 1, a: [1,2]};
+o = {
+ _id: 1,
+ a: [1, 2]
+};
t.drop();
-t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2]}}}, true );
-assert.eq( o, t.findOne(), "D1" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [1, 2]}}}, true);
+assert.eq(o, t.findOne(), "D1");
t.drop();
-t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2,1,2]}}}, true );
-assert.eq( o, t.findOne(), "D2" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [1, 2, 1, 2]}}}, true);
+assert.eq(o, t.findOne(), "D2");
t.drop();
-t.insert( {_id: 1} );
-t.update( {_id: 1}, {$addToSet: {a: {$each: [1,2,2,1]}}} );
-assert.eq( o, t.findOne(), "D3" );
+t.insert({_id: 1});
+t.update({_id: 1}, {$addToSet: {a: {$each: [1, 2, 2, 1]}}});
+assert.eq(o, t.findOne(), "D3");
-t.update( {_id: 1}, {$addToSet: {a: {$each: [3,2,2,3,3]}}} );
-o.a.push( 3 );
-assert.eq( o, t.findOne(), "D4" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [3, 2, 2, 3, 3]}}});
+o.a.push(3);
+assert.eq(o, t.findOne(), "D4");
diff --git a/jstests/core/update_addToSet2.js b/jstests/core/update_addToSet2.js
index dd73a4f3531..44ba8bce671 100644
--- a/jstests/core/update_addToSet2.js
+++ b/jstests/core/update_addToSet2.js
@@ -2,10 +2,12 @@
t = db.update_addToSet2;
t.drop();
-o = { _id : 1 };
-t.insert( { _id : 1 } );
+o = {
+ _id: 1
+};
+t.insert({_id: 1});
-t.update({},{$addToSet : {'kids' :{ 'name' : 'Bob', 'age': '4'}}});
-t.update({},{$addToSet : {'kids' :{ 'name' : 'Dan', 'age': '2'}}});
+t.update({}, {$addToSet: {'kids': {'name': 'Bob', 'age': '4'}}});
+t.update({}, {$addToSet: {'kids': {'name': 'Dan', 'age': '2'}}});
-printjson( t.findOne() );
+printjson(t.findOne());
diff --git a/jstests/core/update_addToSet3.js b/jstests/core/update_addToSet3.js
index fb6df7645f0..b37112042cc 100644
--- a/jstests/core/update_addToSet3.js
+++ b/jstests/core/update_addToSet3.js
@@ -2,17 +2,16 @@
t = db.update_addToSet3;
t.drop();
-t.insert( { _id : 1 } );
+t.insert({_id: 1});
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 6 , 5 , 4 ] } } } );
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 ] } , "A1" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [6, 5, 4]}}});
+assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4]}, "A1");
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 2 , 1 ] } } } );
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 ] } , "A2" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [3, 2, 1]}}});
+assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4, 3, 2, 1]}, "A2");
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 4 , 7 , 9 , 2 ] } } } );
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 , 7 , 9 ] } , "A3" );
-
-t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 12 , 13 , 12 ] } } } );
-assert.eq( t.findOne() , { _id : 1 , a : [ 6 , 5 , 4 , 3 , 2 , 1 , 7 , 9 , 12 , 13 ] } , "A4" );
+t.update({_id: 1}, {$addToSet: {a: {$each: [4, 7, 9, 2]}}});
+assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4, 3, 2, 1, 7, 9]}, "A3");
+t.update({_id: 1}, {$addToSet: {a: {$each: [12, 13, 12]}}});
+assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4, 3, 2, 1, 7, 9, 12, 13]}, "A4");
diff --git a/jstests/core/update_arraymatch1.js b/jstests/core/update_arraymatch1.js
index 9c1907b63f2..b8d78c3daee 100644
--- a/jstests/core/update_arraymatch1.js
+++ b/jstests/core/update_arraymatch1.js
@@ -2,15 +2,20 @@
t = db.update_arraymatch1;
t.drop();
-o = { _id : 1 , a : [ { x : 1 , y : 1 } , { x : 2 , y : 2 } , { x : 3 , y : 3 } ] };
-t.insert( o );
-assert.eq( o , t.findOne() , "A1" );
+o = {
+ _id: 1,
+ a: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}]
+};
+t.insert(o);
+assert.eq(o, t.findOne(), "A1");
-q = { "a.x" : 2 };
-t.update( q , { $set : { b : 5 } } );
+q = {
+ "a.x": 2
+};
+t.update(q, {$set: {b: 5}});
o.b = 5;
-assert.eq( o , t.findOne() , "A2" );
+assert.eq(o, t.findOne(), "A2");
-t.update( { "a.x" : 2 } , { $inc : { "a.$.y" : 1 } } );
+t.update({"a.x": 2}, {$inc: {"a.$.y": 1}});
o.a[1].y++;
-assert.eq( o , t.findOne() , "A3" );
+assert.eq(o, t.findOne(), "A3");
diff --git a/jstests/core/update_arraymatch2.js b/jstests/core/update_arraymatch2.js
index fc1e2f93fc5..ede1e0ad69a 100644
--- a/jstests/core/update_arraymatch2.js
+++ b/jstests/core/update_arraymatch2.js
@@ -1,16 +1,16 @@
t = db.update_arraymatch2;
t.drop();
-t.insert( { } );
-t.insert( { x : [1,2,3] } );
-t.insert( { x : 99 } );
-t.update( {x : 2}, { $inc : { "x.$" : 1 } } , false, true );
-assert( t.findOne({x:1}).x[1] == 3, "A1" );
+t.insert({});
+t.insert({x: [1, 2, 3]});
+t.insert({x: 99});
+t.update({x: 2}, {$inc: {"x.$": 1}}, false, true);
+assert(t.findOne({x: 1}).x[1] == 3, "A1");
-t.insert( { x : { y : [8,7,6] } } );
-t.update( {'x.y' : 7}, { $inc : { "x.y.$" : 1 } } , false, true );
-assert.eq( 8 , t.findOne({"x.y" : 8}).x.y[1] , "B1" );
+t.insert({x: {y: [8, 7, 6]}});
+t.update({'x.y': 7}, {$inc: {"x.y.$": 1}}, false, true);
+assert.eq(8, t.findOne({"x.y": 8}).x.y[1], "B1");
-t.insert( { x : [90,91,92], y : ['a', 'b', 'c'] } );
-t.update( { x : 92} , { $set : { 'y.$' : 'z' } }, false, true );
-assert.eq( 'z', t.findOne({x:92}).y[2], "B2" );
+t.insert({x: [90, 91, 92], y: ['a', 'b', 'c']});
+t.update({x: 92}, {$set: {'y.$': 'z'}}, false, true);
+assert.eq('z', t.findOne({x: 92}).y[2], "B2");
diff --git a/jstests/core/update_arraymatch3.js b/jstests/core/update_arraymatch3.js
index 96fa0a5cbb5..5fe2c4a1f16 100644
--- a/jstests/core/update_arraymatch3.js
+++ b/jstests/core/update_arraymatch3.js
@@ -2,16 +2,15 @@
t = db.update_arraymatch3;
t.drop();
-o = { _id : 1 ,
- title : "ABC",
- comments : [ { "by" : "joe", "votes" : 3 },
- { "by" : "jane", "votes" : 7 }
- ]
- };
+o = {
+ _id: 1,
+ title: "ABC",
+ comments: [{"by": "joe", "votes": 3}, {"by": "jane", "votes": 7}]
+};
-t.save( o );
-assert.eq( o , t.findOne() , "A1" );
+t.save(o);
+assert.eq(o, t.findOne(), "A1");
-t.update( {'comments.by':'joe'}, {$inc:{'comments.$.votes':1}}, false, true );
+t.update({'comments.by': 'joe'}, {$inc: {'comments.$.votes': 1}}, false, true);
o.comments[0].votes++;
-assert.eq( o , t.findOne() , "A2" );
+assert.eq(o, t.findOne(), "A2");
diff --git a/jstests/core/update_arraymatch4.js b/jstests/core/update_arraymatch4.js
index d445168ca25..fabe07f7337 100644
--- a/jstests/core/update_arraymatch4.js
+++ b/jstests/core/update_arraymatch4.js
@@ -2,17 +2,18 @@
t = db.update_arraymatch4;
t.drop();
-x = { _id : 1 , arr : ["A1","B1","C1"] };
-t.insert( x );
-assert.eq( x , t.findOne() , "A1" );
+x = {
+ _id: 1,
+ arr: ["A1", "B1", "C1"]
+};
+t.insert(x);
+assert.eq(x, t.findOne(), "A1");
x.arr[0] = "A2";
-t.update( { arr : "A1" } , { $set : { "arr.$" : "A2" } } );
-assert.eq( x , t.findOne() , "A2" );
+t.update({arr: "A1"}, {$set: {"arr.$": "A2"}});
+assert.eq(x, t.findOne(), "A2");
-t.ensureIndex( { arr : 1 } );
+t.ensureIndex({arr: 1});
x.arr[0] = "A3";
-t.update( { arr : "A2" } , { $set : { "arr.$" : "A3" } } );
-assert.eq( x , t.findOne() , "A3" ); // SERVER-1055
-
-
+t.update({arr: "A2"}, {$set: {"arr.$": "A3"}});
+assert.eq(x, t.findOne(), "A3"); // SERVER-1055
diff --git a/jstests/core/update_arraymatch5.js b/jstests/core/update_arraymatch5.js
index b468d0113ea..39768c8d2c5 100644
--- a/jstests/core/update_arraymatch5.js
+++ b/jstests/core/update_arraymatch5.js
@@ -2,14 +2,17 @@
t = db.update_arraymatch5;
t.drop();
-t.insert({abc:{visible:true}, testarray:[{foobar_id:316, visible:true, xxx: 1}]});
-t.ensureIndex({'abc.visible':1, 'testarray.visible':1 , 'testarray.xxx': 1});
-assert( t.findOne({'abc.visible':true, testarray:{'$elemMatch': {visible:true, xxx:1}}}) , "A1" );
-assert( t.findOne({testarray:{'$elemMatch': {visible:true, xxx:1}}}) , "A2" );
+t.insert({abc: {visible: true}, testarray: [{foobar_id: 316, visible: true, xxx: 1}]});
+t.ensureIndex({'abc.visible': 1, 'testarray.visible': 1, 'testarray.xxx': 1});
+assert(t.findOne({'abc.visible': true, testarray: {'$elemMatch': {visible: true, xxx: 1}}}), "A1");
+assert(t.findOne({testarray: {'$elemMatch': {visible: true, xxx: 1}}}), "A2");
-t.update({'testarray.foobar_id':316}, {'$set': {'testarray.$.visible': true, 'testarray.$.xxx': 2}}, false, true);
+t.update({'testarray.foobar_id': 316},
+ {'$set': {'testarray.$.visible': true, 'testarray.$.xxx': 2}},
+ false,
+ true);
-assert( t.findOne() , "B1" );
-assert( t.findOne({testarray:{'$elemMatch': {visible:true, xxx:2}}}) , "B2" );
-assert( t.findOne({'abc.visible':true, testarray:{'$elemMatch': {visible:true, xxx:2}}}) , "B3" );
-assert.eq( 1 , t.find().count() , "B4" );
+assert(t.findOne(), "B1");
+assert(t.findOne({testarray: {'$elemMatch': {visible: true, xxx: 2}}}), "B2");
+assert(t.findOne({'abc.visible': true, testarray: {'$elemMatch': {visible: true, xxx: 2}}}), "B3");
+assert.eq(1, t.find().count(), "B4");
diff --git a/jstests/core/update_arraymatch6.js b/jstests/core/update_arraymatch6.js
index 71e443fa44f..fe4b09de8a0 100644
--- a/jstests/core/update_arraymatch6.js
+++ b/jstests/core/update_arraymatch6.js
@@ -3,13 +3,13 @@ t = db.jstests_update_arraymatch6;
t.drop();
function doTest() {
- t.save( {a: [{id: 1, x: [5,6,7]}, {id: 2, x: [8,9,10]}]} );
- res = t.update({'a.id': 1}, {$set: {'a.$.x': [1,1,1]}});
- assert.writeOK( res );
- assert.eq.automsg( "1", "t.findOne().a[ 0 ].x[ 0 ]" );
+ t.save({a: [{id: 1, x: [5, 6, 7]}, {id: 2, x: [8, 9, 10]}]});
+ res = t.update({'a.id': 1}, {$set: {'a.$.x': [1, 1, 1]}});
+ assert.writeOK(res);
+ assert.eq.automsg("1", "t.findOne().a[ 0 ].x[ 0 ]");
}
doTest();
t.drop();
-t.ensureIndex( { 'a.id':1 } );
+t.ensureIndex({'a.id': 1});
doTest(); \ No newline at end of file
diff --git a/jstests/core/update_arraymatch7.js b/jstests/core/update_arraymatch7.js
index 5621f60c39e..4c0302dbfec 100644
--- a/jstests/core/update_arraymatch7.js
+++ b/jstests/core/update_arraymatch7.js
@@ -6,14 +6,14 @@ t.drop();
function testPositionalInc() {
t.remove({});
- t.save( { a:[ { b:'match', count:0 } ] } );
- t.update( { 'a.b':'match' }, { $inc:{ 'a.$.count':1 } } );
+ t.save({a: [{b: 'match', count: 0}]});
+ t.update({'a.b': 'match'}, {$inc: {'a.$.count': 1}});
// Check that the positional $inc succeeded.
- assert( t.findOne( { 'a.count':1 } ) );
+ assert(t.findOne({'a.count': 1}));
}
testPositionalInc();
// Now check with a non multikey index.
-t.ensureIndex( { 'a.b' : 1 } );
+t.ensureIndex({'a.b': 1});
testPositionalInc();
diff --git a/jstests/core/update_arraymatch8.js b/jstests/core/update_arraymatch8.js
index 1e8ce377862..7e4eb59f37a 100644
--- a/jstests/core/update_arraymatch8.js
+++ b/jstests/core/update_arraymatch8.js
@@ -4,155 +4,155 @@
// array.$.name
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.$.name': 'new'}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
+t.ensureIndex({'array.name': 1});
+t.insert({'array': [{'name': 'old'}]});
+assert(t.findOne({'array.name': 'old'}));
+t.update({'array.name': 'old'}, {$set: {'array.$.name': 'new'}});
+assert(t.findOne({'array.name': 'new'}));
+assert(!t.findOne({'array.name': 'old'}));
// array.$ (failed in 2.2.2)
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.$': {'name':'new'}}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
+t.ensureIndex({'array.name': 1});
+t.insert({'array': [{'name': 'old'}]});
+assert(t.findOne({'array.name': 'old'}));
+t.update({'array.name': 'old'}, {$set: {'array.$': {'name': 'new'}}});
+assert(t.findOne({'array.name': 'new'}));
+assert(!t.findOne({'array.name': 'old'}));
// array.0.name
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.0.name': 'new'}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
+t.ensureIndex({'array.name': 1});
+t.insert({'array': [{'name': 'old'}]});
+assert(t.findOne({'array.name': 'old'}));
+t.update({'array.name': 'old'}, {$set: {'array.0.name': 'new'}});
+assert(t.findOne({'array.name': 'new'}));
+assert(!t.findOne({'array.name': 'old'}));
// array.0 (failed in 2.2.2)
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'name': 'old'}]} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {'array.name': 'old'}, {$set: {'array.0': {'name':'new'}}} );
-assert( t.findOne({'array.name': 'new'}) );
-assert( !t.findOne({'array.name': 'old'}) );
+t.ensureIndex({'array.name': 1});
+t.insert({'array': [{'name': 'old'}]});
+assert(t.findOne({'array.name': 'old'}));
+t.update({'array.name': 'old'}, {$set: {'array.0': {'name': 'new'}}});
+assert(t.findOne({'array.name': 'new'}));
+assert(!t.findOne({'array.name': 'old'}));
// // array.12.name
t = db.jstests_update_arraymatch8;
t.drop();
arr = new Array();
-for (var i=0; i<20; i++) {
+for (var i = 0; i < 20; i++) {
arr.push({'name': 'old'});
}
-t.ensureIndex( {'array.name': 1} );
-t.insert( {_id:0, 'array': arr} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {_id:0}, {$set: {'array.12.name': 'new'}} );
+t.ensureIndex({'array.name': 1});
+t.insert({_id: 0, 'array': arr});
+assert(t.findOne({'array.name': 'old'}));
+t.update({_id: 0}, {$set: {'array.12.name': 'new'}});
// note: both documents now have to be in the array
-assert( t.findOne({'array.name': 'new'}) );
-assert( t.findOne({'array.name': 'old'}) );
+assert(t.findOne({'array.name': 'new'}));
+assert(t.findOne({'array.name': 'old'}));
// array.12 (failed in 2.2.2)
t = db.jstests_update_arraymatch8;
t.drop();
arr = new Array();
-for (var i=0; i<20; i++) {
+for (var i = 0; i < 20; i++) {
arr.push({'name': 'old'});
}
-t.ensureIndex( {'array.name': 1} );
-t.insert( {_id:0, 'array': arr} );
-assert( t.findOne({'array.name': 'old'}) );
-t.update( {_id:0}, {$set: {'array.12': {'name':'new'}}} );
+t.ensureIndex({'array.name': 1});
+t.insert({_id: 0, 'array': arr});
+assert(t.findOne({'array.name': 'old'}));
+t.update({_id: 0}, {$set: {'array.12': {'name': 'new'}}});
// note: both documents now have to be in the array
-assert( t.findOne({'array.name': 'new'}) );
-assert( t.findOne({'array.name': 'old'}) );
+assert(t.findOne({'array.name': 'new'}));
+assert(t.findOne({'array.name': 'old'}));
// array.$.123a.name
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.123a.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.$.123a.name': 'new'}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
+t.ensureIndex({'array.123a.name': 1});
+t.insert({'array': [{'123a': {'name': 'old'}}]});
+assert(t.findOne({'array.123a.name': 'old'}));
+t.update({'array.123a.name': 'old'}, {$set: {'array.$.123a.name': 'new'}});
+assert(t.findOne({'array.123a.name': 'new'}));
+assert(!t.findOne({'array.123a.name': 'old'}));
// array.$.123a
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.$.123a': {'name': 'new'}}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
+t.ensureIndex({'array.name': 1});
+t.insert({'array': [{'123a': {'name': 'old'}}]});
+assert(t.findOne({'array.123a.name': 'old'}));
+t.update({'array.123a.name': 'old'}, {$set: {'array.$.123a': {'name': 'new'}}});
+assert(t.findOne({'array.123a.name': 'new'}));
+assert(!t.findOne({'array.123a.name': 'old'}));
// array.0.123a.name
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.123a.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.0.123a.name': 'new'}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
+t.ensureIndex({'array.123a.name': 1});
+t.insert({'array': [{'123a': {'name': 'old'}}]});
+assert(t.findOne({'array.123a.name': 'old'}));
+t.update({'array.123a.name': 'old'}, {$set: {'array.0.123a.name': 'new'}});
+assert(t.findOne({'array.123a.name': 'new'}));
+assert(!t.findOne({'array.123a.name': 'old'}));
// array.0.123a
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'array.name': 1} );
-t.insert( {'array': [{'123a':{'name': 'old'}}]} );
-assert( t.findOne({'array.123a.name': 'old'}) );
-t.update( {'array.123a.name': 'old'}, {$set: {'array.0.123a': {'name': 'new'}}} );
-assert( t.findOne({'array.123a.name': 'new'}) );
-assert( !t.findOne({'array.123a.name': 'old'}) );
+t.ensureIndex({'array.name': 1});
+t.insert({'array': [{'123a': {'name': 'old'}}]});
+assert(t.findOne({'array.123a.name': 'old'}));
+t.update({'array.123a.name': 'old'}, {$set: {'array.0.123a': {'name': 'new'}}});
+assert(t.findOne({'array.123a.name': 'new'}));
+assert(!t.findOne({'array.123a.name': 'old'}));
// a.0.b
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'a.0.b': 1} );
-t.insert( {'a': [ [ { b:'old' } ] ] } );
-assert( t.findOne({'a.0.0.b': 'old'}) );
-assert( t.findOne({'a.0.b': 'old'}) );
-t.update( {}, {$set: {'a.0.0.b': 'new'}} );
-assert( t.findOne({'a.0.b': 'new'}) );
-assert( !t.findOne({'a.0.b': 'old'}) );
+t.ensureIndex({'a.0.b': 1});
+t.insert({'a': [[{b: 'old'}]]});
+assert(t.findOne({'a.0.0.b': 'old'}));
+assert(t.findOne({'a.0.b': 'old'}));
+t.update({}, {$set: {'a.0.0.b': 'new'}});
+assert(t.findOne({'a.0.b': 'new'}));
+assert(!t.findOne({'a.0.b': 'old'}));
// a.0.b.c
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'a.0.b.c': 1} );
-t.insert( {'a': [ { b:[ { c:'old' } ] } ] } );
-assert( t.findOne({'a.0.b.0.c': 'old'}) );
-assert( t.findOne({'a.b.0.c': 'old'}) );
-assert( t.findOne({'a.0.b.c': 'old'}) );
-assert( t.findOne({'a.b.c': 'old'}) );
-t.update( {}, {$set: {'a.0.b.0.c': 'new'}} );
-assert( t.findOne({'a.0.b.c': 'new'}) );
-assert( !t.findOne({'a.0.b.c': 'old'}) );
+t.ensureIndex({'a.0.b.c': 1});
+t.insert({'a': [{b: [{c: 'old'}]}]});
+assert(t.findOne({'a.0.b.0.c': 'old'}));
+assert(t.findOne({'a.b.0.c': 'old'}));
+assert(t.findOne({'a.0.b.c': 'old'}));
+assert(t.findOne({'a.b.c': 'old'}));
+t.update({}, {$set: {'a.0.b.0.c': 'new'}});
+assert(t.findOne({'a.0.b.c': 'new'}));
+assert(!t.findOne({'a.0.b.c': 'old'}));
// a.b.$ref
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'a.b.$ref': 1} );
-t.insert( {'a': [ { 'b':{ '$ref':'old', '$id':0 } } ] } );
-assert( t.findOne({'a.b.$ref': 'old'}) );
-assert( t.findOne({'a.0.b.$ref': 'old'}) );
-t.update( {}, {$set: {'a.0.b.$ref': 'new'}} );
-assert( t.findOne({'a.b.$ref': 'new'}) );
-assert( !t.findOne({'a.b.$ref': 'old'}) );
+t.ensureIndex({'a.b.$ref': 1});
+t.insert({'a': [{'b': {'$ref': 'old', '$id': 0}}]});
+assert(t.findOne({'a.b.$ref': 'old'}));
+assert(t.findOne({'a.0.b.$ref': 'old'}));
+t.update({}, {$set: {'a.0.b.$ref': 'new'}});
+assert(t.findOne({'a.b.$ref': 'new'}));
+assert(!t.findOne({'a.b.$ref': 'old'}));
// a.b and a-b
t = db.jstests_update_arraymatch8;
t.drop();
-t.ensureIndex( {'a.b': 1} );
-t.ensureIndex( {'a-b': 1} );
-t.insert( {'a':{'b':'old'}} );
-assert( t.findOne({'a.b': 'old'}) );
-t.update( {}, {$set: {'a': {'b': 'new'}}} );
-assert( t.findOne({'a.b': 'new'}) );
-assert( !t.findOne({'a.b': 'old'}) );
+t.ensureIndex({'a.b': 1});
+t.ensureIndex({'a-b': 1});
+t.insert({'a': {'b': 'old'}});
+assert(t.findOne({'a.b': 'old'}));
+t.update({}, {$set: {'a': {'b': 'new'}}});
+assert(t.findOne({'a.b': 'new'}));
+assert(!t.findOne({'a.b': 'old'}));
diff --git a/jstests/core/update_bit_examples.js b/jstests/core/update_bit_examples.js
index 3374b502055..adcf6976572 100644
--- a/jstests/core/update_bit_examples.js
+++ b/jstests/core/update_bit_examples.js
@@ -5,21 +5,21 @@ coll.drop();
// $bit and
coll.remove({});
-coll.save({_id:1, a:NumberInt(2)});
+coll.save({_id: 1, a: NumberInt(2)});
res = coll.update({}, {$bit: {a: {and: NumberInt(4)}}});
assert.writeOK(res);
assert.eq(coll.findOne().a, 0);
// $bit or
coll.remove({});
-coll.save({_id:1, a:NumberInt(2)});
+coll.save({_id: 1, a: NumberInt(2)});
res = coll.update({}, {$bit: {a: {or: NumberInt(4)}}});
assert.writeOK(res);
assert.eq(coll.findOne().a, 6);
// $bit xor
coll.remove({});
-coll.save({_id:1, a:NumberInt(0)});
+coll.save({_id: 1, a: NumberInt(0)});
res = coll.update({}, {$bit: {a: {xor: NumberInt(4)}}});
assert.writeOK(res);
assert.eq(coll.findOne().a, 4);
diff --git a/jstests/core/update_blank1.js b/jstests/core/update_blank1.js
index 8fe58419ddc..0777c68c745 100644
--- a/jstests/core/update_blank1.js
+++ b/jstests/core/update_blank1.js
@@ -2,9 +2,12 @@
t = db.update_blank1;
t.drop();
-orig = { "" : 1 , _id : 2 , "a" : 3 , "b" : 4 };
-t.insert( orig );
-var res = t.update( {} , { $set : { "c" : 5 } } );
-print( res );
+orig = {
+ "": 1,
+ _id: 2, "a": 3, "b": 4
+};
+t.insert(orig);
+var res = t.update({}, {$set: {"c": 5}});
+print(res);
orig["c"] = 5;
-assert.docEq( orig , t.findOne() , "after $set" ); // SERVER-2651
+assert.docEq(orig, t.findOne(), "after $set"); // SERVER-2651
diff --git a/jstests/core/update_currentdate_examples.js b/jstests/core/update_currentdate_examples.js
index 3430c261481..466ce96e5cc 100644
--- a/jstests/core/update_currentdate_examples.js
+++ b/jstests/core/update_currentdate_examples.js
@@ -5,21 +5,21 @@ coll.drop();
// $currentDate default
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$currentDate: {a: true}});
assert.writeOK(res);
assert(coll.findOne().a.constructor == Date);
// $currentDate type = date
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$currentDate: {a: {$type: "date"}}});
assert.writeOK(res);
assert(coll.findOne().a.constructor == Date);
// $currentDate type = timestamp
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$currentDate: {a: {$type: "timestamp"}}});
assert.writeOK(res);
assert(coll.findOne().a.constructor == Timestamp);
diff --git a/jstests/core/update_dbref.js b/jstests/core/update_dbref.js
index d4c9ed7354f..71729c203e5 100644
--- a/jstests/core/update_dbref.js
+++ b/jstests/core/update_dbref.js
@@ -4,37 +4,39 @@ var res;
t = db.jstests_update_dbref;
t.drop();
-res = t.save({_id:1, a: new DBRef("a", "b")});
+res = t.save({_id: 1, a: new DBRef("a", "b")});
assert(!res.hasWriteError(), "failed to save dbref");
-assert.docEq({_id:1, a: new DBRef("a", "b")}, t.findOne());
+assert.docEq({_id: 1, a: new DBRef("a", "b")}, t.findOne());
res = t.update({}, {$set: {"a.$id": 2}});
assert(!res.hasWriteError(), "a.$id update");
-assert.docEq({_id:1, a: new DBRef("a", 2)}, t.findOne());
+assert.docEq({_id: 1, a: new DBRef("a", 2)}, t.findOne());
res = t.update({}, {$set: {"a.$ref": "b"}});
assert(!res.hasWriteError(), "a.$ref update");
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne());
// Bad updates
res = t.update({}, {$set: {"$id": 3}});
assert.writeError(res);
assert(/\$id/.test(res.getWriteError()), "expected bad update because of $id");
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne());
res = t.update({}, {$set: {"$ref": "foo"}});
assert.writeError(res);
assert(/\$ref/.test(res.getWriteError()), "expected bad update because of $ref");
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne());
res = t.update({}, {$set: {"$db": "aDB"}});
assert.writeError(res);
assert(/\$db/.test(res.getWriteError()), "expected bad update because of $db");
-assert.docEq({_id:1, a: new DBRef("b", 2)}, t.findOne());
+assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne());
res = t.update({}, {$set: {"b.$id": 2}});
-assert(res.hasWriteError(), "b.$id update should fail -- doc:" + tojson(t.findOne()) + " result:" + res.toString());
+assert(res.hasWriteError(),
+ "b.$id update should fail -- doc:" + tojson(t.findOne()) + " result:" + res.toString());
res = t.update({}, {$set: {"b.$ref": 2}});
-assert(res.hasWriteError(), "b.$ref update should fail -- doc:" + tojson(t.findOne()) + " result:" + res.toString());
+assert(res.hasWriteError(),
+ "b.$ref update should fail -- doc:" + tojson(t.findOne()) + " result:" + res.toString());
diff --git a/jstests/core/update_find_and_modify_id.js b/jstests/core/update_find_and_modify_id.js
index 12720be9d84..a75a5595451 100644
--- a/jstests/core/update_find_and_modify_id.js
+++ b/jstests/core/update_find_and_modify_id.js
@@ -2,7 +2,10 @@
// an _id in the update document, as long as the _id will not be modified
var t = db.jstests_server4516;
-var startingDoc = {_id: 1, a: 1};
+var startingDoc = {
+ _id: 1,
+ a: 1
+};
function prepare() {
t.drop();
@@ -32,7 +35,7 @@ function update_fails(updateDoc, qid) {
assert.eq(t.findOne(), startingDoc);
prepare();
- assert.throws(function () {
+ assert.throws(function() {
t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true});
});
assert.eq(t.count(), 1);
diff --git a/jstests/core/update_invalid1.js b/jstests/core/update_invalid1.js
index 46b68f7db63..3fd96d61f38 100644
--- a/jstests/core/update_invalid1.js
+++ b/jstests/core/update_invalid1.js
@@ -2,5 +2,5 @@
t = db.update_invalid1;
t.drop();
-t.update( { _id : 5 } , { $set : { $inc : { x : 5 } } } , true );
-assert.eq( 0 , t.count() , "A1" );
+t.update({_id: 5}, {$set: {$inc: {x: 5}}}, true);
+assert.eq(0, t.count(), "A1");
diff --git a/jstests/core/update_min_max_examples.js b/jstests/core/update_min_max_examples.js
index 7acfd5bb45b..a8a86f22986 100644
--- a/jstests/core/update_min_max_examples.js
+++ b/jstests/core/update_min_max_examples.js
@@ -4,59 +4,65 @@ var coll = db.update_min_max;
coll.drop();
// $min for number
-coll.insert({_id:1, a:2});
-res = coll.update({_id:1}, {$min: {a: 1}});
+coll.insert({_id: 1, a: 2});
+res = coll.update({_id: 1}, {$min: {a: 1}});
assert.writeOK(res);
-assert.eq(coll.findOne({_id:1}).a, 1);
+assert.eq(coll.findOne({_id: 1}).a, 1);
// $max for number
-coll.insert({_id:2, a:2});
-res = coll.update({_id:2}, {$max: {a: 1}});
+coll.insert({_id: 2, a: 2});
+res = coll.update({_id: 2}, {$max: {a: 1}});
assert.writeOK(res);
-assert.eq(coll.findOne({_id:2}).a, 2);
+assert.eq(coll.findOne({_id: 2}).a, 2);
// $min for Date
-coll.insert({_id:3, a: new Date()});
-var origDoc = coll.findOne({_id:3});
+coll.insert({_id: 3, a: new Date()});
+var origDoc = coll.findOne({_id: 3});
sleep(2);
-res = coll.update({_id:3}, {$min: {a: new Date()}});
+res = coll.update({_id: 3}, {$min: {a: new Date()}});
assert.writeOK(res);
-assert.eq(coll.findOne({_id:3}).a, origDoc.a);
+assert.eq(coll.findOne({_id: 3}).a, origDoc.a);
// $max for Date
-coll.insert({_id:4, a: new Date()});
+coll.insert({_id: 4, a: new Date()});
sleep(2);
var newDate = new Date();
-res = coll.update({_id:4}, {$max: {a: newDate}});
+res = coll.update({_id: 4}, {$max: {a: newDate}});
assert.writeOK(res);
-assert.eq(coll.findOne({_id:4}).a, newDate);
+assert.eq(coll.findOne({_id: 4}).a, newDate);
// $max for small number
-coll.insert({_id:5, a: 1e-15 });
+coll.insert({_id: 5, a: 1e-15});
// Slightly bigger than 1e-15.
var biggerval = 0.000000000000001000000000000001;
-res = coll.update({_id:5}, {$max: {a : biggerval}});
+res = coll.update({_id: 5}, {$max: {a: biggerval}});
assert.writeOK(res);
-assert.eq(coll.findOne({_id:5}).a, biggerval);
+assert.eq(coll.findOne({_id: 5}).a, biggerval);
// $min for a small number
-coll.insert({_id:6, a: biggerval });
-res = coll.update({_id:6}, {$min: {a : 1e-15}});
+coll.insert({_id: 6, a: biggerval});
+res = coll.update({_id: 6}, {$min: {a: 1e-15}});
assert.writeOK(res);
-assert.eq(coll.findOne({_id:6}).a, 1e-15);
+assert.eq(coll.findOne({_id: 6}).a, 1e-15);
// $max with positional operator
-var insertdoc = {_id:7, y: [{a:2}, {a:6}, {a:[9,1,1]}]};
+var insertdoc = {
+ _id: 7,
+ y: [{a: 2}, {a: 6}, {a: [9, 1, 1]}]
+};
coll.insert(insertdoc);
-res = coll.update({_id: 7, "y.a": 6 }, {$max: {"y.$.a" : 7 }});
+res = coll.update({_id: 7, "y.a": 6}, {$max: {"y.$.a": 7}});
assert.writeOK(res);
insertdoc.y[1].a = 7;
-assert.docEq(coll.findOne({_id:7}), insertdoc);
+assert.docEq(coll.findOne({_id: 7}), insertdoc);
// $min with positional operator
-insertdoc = {_id:8, y: [{a:2}, {a:6}, {a:[9,1,1]}]};
+insertdoc = {
+ _id: 8,
+ y: [{a: 2}, {a: 6}, {a: [9, 1, 1]}]
+};
coll.insert(insertdoc);
-res = coll.update({_id: 8, "y.a": 6 }, {$min: {"y.$.a" : 5 }});
+res = coll.update({_id: 8, "y.a": 6}, {$min: {"y.$.a": 5}});
assert.writeOK(res);
insertdoc.y[1].a = 5;
-assert.docEq(coll.findOne({_id:8}), insertdoc);
+assert.docEq(coll.findOne({_id: 8}), insertdoc);
diff --git a/jstests/core/update_mul_examples.js b/jstests/core/update_mul_examples.js
index 36738b93990..390ae57efd8 100644
--- a/jstests/core/update_mul_examples.js
+++ b/jstests/core/update_mul_examples.js
@@ -5,35 +5,35 @@ coll.drop();
// $mul positive
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$mul: {a: 10}});
assert.writeOK(res);
assert.eq(coll.findOne().a, 20);
// $mul negative
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$mul: {a: -10}});
assert.writeOK(res);
assert.eq(coll.findOne().a, -20);
// $mul zero
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$mul: {a: 0}});
assert.writeOK(res);
assert.eq(coll.findOne().a, 0);
// $mul decimal
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$mul: {a: 1.1}});
assert.writeOK(res);
assert.eq(coll.findOne().a, 2.2);
// $mul negative decimal
coll.remove({});
-coll.save({_id:1, a:2});
+coll.save({_id: 1, a: 2});
res = coll.update({}, {$mul: {a: -0.1}});
assert.writeOK(res);
assert.eq(coll.findOne().a, -0.2);
diff --git a/jstests/core/update_multi3.js b/jstests/core/update_multi3.js
index 38341dcd13f..10e639803b5 100644
--- a/jstests/core/update_multi3.js
+++ b/jstests/core/update_multi3.js
@@ -1,25 +1,22 @@
t = db.update_multi3;
-function test( useIndex ){
+function test(useIndex) {
t.drop();
-
- if ( useIndex )
- t.ensureIndex({k:1});
- for (i=0; i<10; i++) {
- t.save({ _id : i , k: 'x', a: []});
+ if (useIndex)
+ t.ensureIndex({k: 1});
+
+ for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: 'x', a: []});
}
-
- t.update({k: 'x'}, {$push: {a: 'y'}}, false, true);
- t.find( { k : "x" } ).forEach(
- function(z){
- assert.eq( [ "y" ] , z.a , "useIndex: " + useIndex );
- }
- );
+ t.update({k: 'x'}, {$push: {a: 'y'}}, false, true);
+ t.find({k: "x"}).forEach(function(z) {
+ assert.eq(["y"], z.a, "useIndex: " + useIndex);
+ });
}
-test( false );
-test( true );
+test(false);
+test(true);
diff --git a/jstests/core/update_multi4.js b/jstests/core/update_multi4.js
index f290894298c..3b5c2f04b29 100644
--- a/jstests/core/update_multi4.js
+++ b/jstests/core/update_multi4.js
@@ -2,17 +2,15 @@
t = db.update_mulit4;
t.drop();
-for(i=0;i<1000;i++){
- t.insert( { _id:i ,
- k:i%12,
- v:"v"+i%12 } );
-}
+for (i = 0; i < 1000; i++) {
+ t.insert({_id: i, k: i % 12, v: "v" + i % 12});
+}
-t.ensureIndex({k:1});
+t.ensureIndex({k: 1});
-assert.eq( 84 , t.count({k:2,v:"v2"} ) , "A0" );
+assert.eq(84, t.count({k: 2, v: "v2"}), "A0");
-t.update({k:2},{$set:{v:"two v2"}},false,true);
+t.update({k: 2}, {$set: {v: "two v2"}}, false, true);
-assert.eq( 0 , t.count({k:2,v:"v2"} ) , "A1" );
-assert.eq( 84 , t.count({k:2,v:"two v2"} ) , "A2" );
+assert.eq(0, t.count({k: 2, v: "v2"}), "A1");
+assert.eq(84, t.count({k: 2, v: "two v2"}), "A2");
diff --git a/jstests/core/update_multi5.js b/jstests/core/update_multi5.js
index c6d51ef0196..e29ad562d8c 100644
--- a/jstests/core/update_multi5.js
+++ b/jstests/core/update_multi5.js
@@ -3,15 +3,11 @@ t = db.update_multi5;
t.drop();
-t.insert({path: 'r1', subscribers: [1,2]});
-t.insert({path: 'r2', subscribers: [3,4]});
+t.insert({path: 'r1', subscribers: [1, 2]});
+t.insert({path: 'r2', subscribers: [3, 4]});
t.update({}, {$addToSet: {subscribers: 5}}, false, true);
-t.find().forEach(
- function(z){
- assert.eq( 3 , z.subscribers.length , z );
- }
-);
-
-
+t.find().forEach(function(z) {
+ assert.eq(3, z.subscribers.length, z);
+});
diff --git a/jstests/core/update_multi6.js b/jstests/core/update_multi6.js
index 39434d3f512..57e8112031c 100644
--- a/jstests/core/update_multi6.js
+++ b/jstests/core/update_multi6.js
@@ -3,9 +3,8 @@ var res;
t = db.update_multi6;
t.drop();
-t.update( { _id : 1 } , { _id : 1 , x : 1 , y : 2 } , true , false );
-assert( t.findOne( { _id : 1 } ) , "A" );
-
-res = t.update( { _id : 2 } , { _id : 2 , x : 1 , y : 2 } , true , true );
-assert.writeError( res );
+t.update({_id: 1}, {_id: 1, x: 1, y: 2}, true, false);
+assert(t.findOne({_id: 1}), "A");
+res = t.update({_id: 2}, {_id: 2, x: 1, y: 2}, true, true);
+assert.writeError(res);
diff --git a/jstests/core/update_replace.js b/jstests/core/update_replace.js
index 54f13f7ded2..44099851ef4 100644
--- a/jstests/core/update_replace.js
+++ b/jstests/core/update_replace.js
@@ -14,19 +14,19 @@ var res;
conn._skipValidation = true;
// Should not allow "." in field names
-res = t.save({_id:1, "a.a":1});
+res = t.save({_id: 1, "a.a": 1});
assert(res.hasWriteError(), "a.a");
// Should not allow "." in field names, embedded
-res = t.save({_id:1, a :{"a.a":1}});
+res = t.save({_id: 1, a: {"a.a": 1}});
assert(res.hasWriteError(), "a: a.a");
// Should not allow "$"-prefixed field names, caught before "." check
-res = t.save({_id:1, $a :{"a.a":1}});
+res = t.save({_id: 1, $a: {"a.a": 1}});
assert(res.hasWriteError(), "$a: a.a");
// Should not allow "$"-prefixed field names
-res = t.save({_id:1, $a: 1});
+res = t.save({_id: 1, $a: 1});
assert(res.hasWriteError(), "$a");
// _id validation checks
@@ -36,7 +36,7 @@ res = t.save({_id: /a/});
assert(res.hasWriteError(), "_id regex");
// Should not allow regex _id, even if not first
-res = t.save({a:2, _id: /a/});
+res = t.save({a: 2, _id: /a/});
assert(res.hasWriteError(), "a _id regex");
// Should not allow array _id
@@ -44,9 +44,9 @@ res = t.save({_id: [9]});
assert(res.hasWriteError(), "_id array");
// This is fine since _id isn't a top level field
-res = t.save({a :{ _id: [9]}});
+res = t.save({a: {_id: [9]}});
assert(!res.hasWriteError(), "embedded _id array");
// This is fine since _id isn't a top level field
-res = t.save({b:1, a :{ _id: [9]}});
+res = t.save({b: 1, a: {_id: [9]}});
assert(!res.hasWriteError(), "b embedded _id array");
diff --git a/jstests/core/update_server-12848.js b/jstests/core/update_server-12848.js
index f5ee9a8f2fa..c33e8dd9f62 100644
--- a/jstests/core/update_server-12848.js
+++ b/jstests/core/update_server-12848.js
@@ -8,13 +8,19 @@ var res;
var t = db.update_server_12848;
t.drop();
-var orig = { "_id" : 1, "a" : [ 1, [ ] ] };
+var orig = {
+ "_id": 1,
+ "a": [1, []]
+};
res = t.insert(orig);
assert.writeOK(res, "insert");
assert.eq(orig, t.findOne());
-res = t.update({ "_id" : 1 }, { $addToSet : { "a.1" : 1 } });
+res = t.update({"_id": 1}, {$addToSet: {"a.1": 1}});
assert.writeOK(res, "update");
-var updated = { "_id" : 1, "a" : [ 1, [ 1 ] ] };
+var updated = {
+ "_id": 1,
+ "a": [1, [1]]
+};
assert.eq(updated, t.findOne());
diff --git a/jstests/core/update_setOnInsert.js b/jstests/core/update_setOnInsert.js
index 4f53d0311b5..430f23ce6dd 100644
--- a/jstests/core/update_setOnInsert.js
+++ b/jstests/core/update_setOnInsert.js
@@ -2,32 +2,32 @@
t = db.update_setOnInsert;
var res;
-function dotest( useIndex ) {
+function dotest(useIndex) {
t.drop();
- if ( useIndex ) {
- t.ensureIndex( { a : 1 } );
+ if (useIndex) {
+ t.ensureIndex({a: 1});
}
- t.update( { _id: 5 }, { $inc : { x: 2 }, $setOnInsert : { a : 3 } }, true );
- assert.docEq( { _id : 5, a: 3, x : 2 }, t.findOne() );
+ t.update({_id: 5}, {$inc: {x: 2}, $setOnInsert: {a: 3}}, true);
+ assert.docEq({_id: 5, a: 3, x: 2}, t.findOne());
- t.update( { _id: 5 }, { $set : { a : 4 } }, true );
+ t.update({_id: 5}, {$set: {a: 4}}, true);
- t.update( { _id: 5 }, { $inc : { x: 2 }, $setOnInsert : { a : 3 } }, true );
- assert.docEq( { _id : 5, a: 4, x : 4 }, t.findOne() );
+ t.update({_id: 5}, {$inc: {x: 2}, $setOnInsert: {a: 3}}, true);
+ assert.docEq({_id: 5, a: 4, x: 4}, t.findOne());
}
-dotest( false );
-dotest( true );
+dotest(false);
+dotest(true);
// Cases for SERVER-9958 -- Allow _id $setOnInsert during insert (if upsert:true, and not doc found)
t.drop();
-res = t.update( {_id: 1} , { $setOnInsert: { "_id.a": new Date() } } , true );
-assert.writeError(res, "$setOnInsert _id.a worked" );
+res = t.update({_id: 1}, {$setOnInsert: {"_id.a": new Date()}}, true);
+assert.writeError(res, "$setOnInsert _id.a worked");
-res = t.update( {"_id.a": 4} , { $setOnInsert: { "_id.b": 1 } } , true );
-assert.writeError(res, "$setOnInsert _id.a/b worked" );
+res = t.update({"_id.a": 4}, {$setOnInsert: {"_id.b": 1}}, true);
+assert.writeError(res, "$setOnInsert _id.a/b worked");
-res = t.update( {"_id.a": 4} , { $setOnInsert: { "_id": {a:4, b:1} } } , true );
-assert.writeError(res, "$setOnInsert _id.a/a+b worked" );
+res = t.update({"_id.a": 4}, {$setOnInsert: {"_id": {a: 4, b: 1}}}, true);
+assert.writeError(res, "$setOnInsert _id.a/a+b worked");
diff --git a/jstests/core/updatea.js b/jstests/core/updatea.js
index 13d2dd0acfc..0c7a9e1c504 100644
--- a/jstests/core/updatea.js
+++ b/jstests/core/updatea.js
@@ -3,66 +3,71 @@ var res;
t = db.updatea;
t.drop();
-orig = { _id : 1 , a : [ { x : 1 , y : 2 } , { x : 10 , y : 11 } ] };
+orig = {
+ _id: 1,
+ a: [{x: 1, y: 2}, {x: 10, y: 11}]
+};
-res = t.save( orig );
+res = t.save(orig);
assert.writeOK(res);
// SERVER-181
-res = t.update( {} , { $set : { "a.0.x" : 3 } } );
+res = t.update({}, {$set: {"a.0.x": 3}});
assert.writeOK(res);
orig.a[0].x = 3;
-assert.eq( orig , t.findOne() , "A1" );
+assert.eq(orig, t.findOne(), "A1");
-res = t.update( {} , { $set : { "a.1.z" : 17 } } );
+res = t.update({}, {$set: {"a.1.z": 17}});
assert.writeOK(res);
orig.a[1].z = 17;
-assert.eq( orig , t.findOne() , "A2" );
+assert.eq(orig, t.findOne(), "A2");
// SERVER-273
-res = t.update( {} , { $unset : { "a.1.y" : 1 } } );
+res = t.update({}, {$unset: {"a.1.y": 1}});
assert.writeOK(res);
delete orig.a[1].y;
-assert.eq( orig , t.findOne() , "A3" );
+assert.eq(orig, t.findOne(), "A3");
// SERVER-333
t.drop();
-orig = { _id : 1 , comments : [ { name : "blah" , rate_up : 0 , rate_ups : [] } ] };
-res = t.save( orig );
+orig = {
+ _id: 1,
+ comments: [{name: "blah", rate_up: 0, rate_ups: []}]
+};
+res = t.save(orig);
assert.writeOK(res);
-
-res = t.update( {} , { $inc: { "comments.0.rate_up" : 1 } , $push: { "comments.0.rate_ups" : 99 } } );
+res = t.update({}, {$inc: {"comments.0.rate_up": 1}, $push: {"comments.0.rate_ups": 99}});
assert.writeOK(res);
orig.comments[0].rate_up++;
-orig.comments[0].rate_ups.push( 99 );
-assert.eq( orig , t.findOne() , "B1" );
+orig.comments[0].rate_ups.push(99);
+assert.eq(orig, t.findOne(), "B1");
t.drop();
-orig = { _id : 1 , a : [] };
-for ( i=0; i<12; i++ )
- orig.a.push( i );
-
-
-res = t.save( orig );
+orig = {
+ _id: 1,
+ a: []
+};
+for (i = 0; i < 12; i++)
+ orig.a.push(i);
+
+res = t.save(orig);
assert.writeOK(res);
-assert.eq( orig , t.findOne() , "C1" );
+assert.eq(orig, t.findOne(), "C1");
-res = t.update( {} , { $inc: { "a.0" : 1 } } );
+res = t.update({}, {$inc: {"a.0": 1}});
assert.writeOK(res);
orig.a[0]++;
-assert.eq( orig , t.findOne() , "C2" );
+assert.eq(orig, t.findOne(), "C2");
-res = t.update( {} , { $inc: { "a.10" : 1 } } );
+res = t.update({}, {$inc: {"a.10": 1}});
assert.writeOK(res);
orig.a[10]++;
-
// SERVER-3218
t.drop();
-t.insert({"a":{"c00":1}, 'c':2});
-res = t.update({"c":2}, {'$inc':{'a.c000':1}});
+t.insert({"a": {"c00": 1}, 'c': 2});
+res = t.update({"c": 2}, {'$inc': {'a.c000': 1}});
assert.writeOK(res);
-assert.eq( { "c00" : 1 , "c000" : 1 } , t.findOne().a , "D1" );
-
+assert.eq({"c00": 1, "c000": 1}, t.findOne().a, "D1");
diff --git a/jstests/core/updateb.js b/jstests/core/updateb.js
index 59e6348a47a..f90ac62b6c3 100644
--- a/jstests/core/updateb.js
+++ b/jstests/core/updateb.js
@@ -2,10 +2,12 @@
t = db.updateb;
t.drop();
-t.update( { "x.y" : 2 } , { $inc : { a : 7 } } , true );
+t.update({"x.y": 2}, {$inc: {a: 7}}, true);
-correct = { a : 7 , x : { y : 2 } };
+correct = {
+ a: 7,
+ x: {y: 2}
+};
got = t.findOne();
delete got._id;
-assert.docEq( correct , got , "A" );
-
+assert.docEq(correct, got, "A");
diff --git a/jstests/core/updatec.js b/jstests/core/updatec.js
index 0c77b8b3cda..8ce8cf4ecdd 100644
--- a/jstests/core/updatec.js
+++ b/jstests/core/updatec.js
@@ -2,13 +2,7 @@
t = db.updatec;
t.drop();
-t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
-t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
-
-assert.docEq(
- {
- "_id" : 123,
- "f" : [ 234, 234 ] ,
- "v" : { "i" : 123, "a" : 456 }
- } , t.findOne() );
+t.update({"_id": 123}, {$set: {"v": {"i": 123, "a": 456}}, $push: {"f": 234}}, 1, 0);
+t.update({"_id": 123}, {$set: {"v": {"i": 123, "a": 456}}, $push: {"f": 234}}, 1, 0);
+assert.docEq({"_id": 123, "f": [234, 234], "v": {"i": 123, "a": 456}}, t.findOne());
diff --git a/jstests/core/updated.js b/jstests/core/updated.js
index da314268eb0..1eaaae3b051 100644
--- a/jstests/core/updated.js
+++ b/jstests/core/updated.js
@@ -2,19 +2,25 @@
t = db.updated;
t.drop();
-o = { _id : Math.random() ,
- items:[null,null,null,null]
- };
+o = {
+ _id: Math.random(),
+ items: [null, null, null, null]
+};
-t.insert( o );
-assert.docEq( o , t.findOne() , "A1" );
+t.insert(o);
+assert.docEq(o, t.findOne(), "A1");
-o.items[0] = {amount:9000,itemId:1};
-t.update({},{$set:{"items.0":o.items[0]}});
-assert.docEq( o , t.findOne() , "A2" );
+o.items[0] = {
+ amount: 9000,
+ itemId: 1
+};
+t.update({}, {$set: {"items.0": o.items[0]}});
+assert.docEq(o, t.findOne(), "A2");
o.items[0].amount += 1000;
-o.items[1] = {amount:1,itemId:2};
-t.update({},{$inc:{"items.0.amount":1000},$set:{"items.1":o.items[1]}});
-assert.docEq( o , t.findOne() , "A3" );
-
+o.items[1] = {
+ amount: 1,
+ itemId: 2
+};
+t.update({}, {$inc: {"items.0.amount": 1000}, $set: {"items.1": o.items[1]}});
+assert.docEq(o, t.findOne(), "A3");
diff --git a/jstests/core/updatee.js b/jstests/core/updatee.js
index 85ba37c5c05..e2fac8af287 100644
--- a/jstests/core/updatee.js
+++ b/jstests/core/updatee.js
@@ -3,21 +3,22 @@
t = db.updatee;
t.drop();
-var o = { "_id" : 1,
- "actual" : {
- "key1" : "val1",
- "key2" : "val2",
- "001" : "val3",
- "002" : "val4",
- "0020000000000000000000" : "val5"
- },
- "profile-id" : "test" };
-
-
-t.insert( o );
-assert.eq( o , t.findOne() , "A1" );
-
-t.update({"profile-id" : "test"}, {$set: {"actual.0030000000000000000000": "val6"}});
+var o = {
+ "_id": 1,
+ "actual": {
+ "key1": "val1",
+ "key2": "val2",
+ "001": "val3",
+ "002": "val4",
+ "0020000000000000000000": "val5"
+ },
+ "profile-id": "test"
+};
+
+t.insert(o);
+assert.eq(o, t.findOne(), "A1");
+
+t.update({"profile-id": "test"}, {$set: {"actual.0030000000000000000000": "val6"}});
var q = t.findOne();
@@ -25,47 +26,47 @@ var q = t.findOne();
assert.eq(q.actual["0020000000000000000000"], "val5", "A2");
assert.eq(q.actual["0030000000000000000000"], "val6", "A3");
-t.update({"profile-id" : "test"}, {$set: {"actual.02": "v4"}});
+t.update({"profile-id": "test"}, {$set: {"actual.02": "v4"}});
q = t.findOne();
assert.eq(q.actual["02"], "v4", "A4");
assert.eq(q.actual["002"], "val4", "A5");
-t.update({"_id" : 1}, {$set : {"actual.2139043290148390248219423941.b" : 4}});
+t.update({"_id": 1}, {$set: {"actual.2139043290148390248219423941.b": 4}});
q = t.findOne();
assert.eq(q.actual["2139043290148390248219423941"].b, 4, "A6");
// non-nested
-t.update({"_id" : 1}, {$set : {"7213647182934612837492342341" : 1}});
-t.update({"_id" : 1}, {$set : {"7213647182934612837492342342" : 2}});
+t.update({"_id": 1}, {$set: {"7213647182934612837492342341": 1}});
+t.update({"_id": 1}, {$set: {"7213647182934612837492342342": 2}});
q = t.findOne();
assert.eq(q["7213647182934612837492342341"], 1, "A7 1");
assert.eq(q["7213647182934612837492342342"], 2, "A7 2");
// 0s
-t.update({"_id" : 1}, {$set : {"actual.000" : "val000"}});
+t.update({"_id": 1}, {$set: {"actual.000": "val000"}});
q = t.findOne();
assert.eq(q.actual["000"], "val000", "A8 zeros");
-t.update({"_id" : 1}, {$set : {"actual.00" : "val00"}});
+t.update({"_id": 1}, {$set: {"actual.00": "val00"}});
q = t.findOne();
assert.eq(q.actual["00"], "val00", "A8 00");
assert.eq(q.actual["000"], "val000", "A9");
-t.update({"_id" : 1}, {$set : {"actual.000" : "val000"}});
+t.update({"_id": 1}, {$set: {"actual.000": "val000"}});
q = t.findOne();
assert.eq(q.actual["000"], "val000", "A9");
assert.eq(q.actual["00"], "val00", "A10");
-t.update({"_id" : 1}, {$set : {"actual.01" : "val01"}});
+t.update({"_id": 1}, {$set: {"actual.01": "val01"}});
q = t.findOne();
assert.eq(q.actual["000"], "val000", "A11");
assert.eq(q.actual["01"], "val01", "A12");
// shouldn't work, but shouldn't do anything too heinous, either
-t.update({"_id" : 1}, {$set : {"0.." : "val01"}});
-t.update({"_id" : 1}, {$set : {"0..0" : "val01"}});
-t.update({"_id" : 1}, {$set : {".0" : "val01"}});
-t.update({"_id" : 1}, {$set : {"..0" : "val01"}});
-t.update({"_id" : 1}, {$set : {"0.0..0" : "val01"}});
+t.update({"_id": 1}, {$set: {"0..": "val01"}});
+t.update({"_id": 1}, {$set: {"0..0": "val01"}});
+t.update({"_id": 1}, {$set: {".0": "val01"}});
+t.update({"_id": 1}, {$set: {"..0": "val01"}});
+t.update({"_id": 1}, {$set: {"0.0..0": "val01"}});
diff --git a/jstests/core/updatef.js b/jstests/core/updatef.js
index e1164f2b939..6bc8df4e0c1 100644
--- a/jstests/core/updatef.js
+++ b/jstests/core/updatef.js
@@ -1,23 +1,24 @@
// Test unsafe management of nsdt on update command yield SERVER-3208
prefixNS = db.jstests_updatef;
-prefixNS.save( {} );
+prefixNS.save({});
t = db.jstests_updatef_actual;
t.drop();
-t.save( {a:0,b:[]} );
-for( i = 0; i < 1000; ++i ) {
- t.save( {a:100} );
+t.save({a: 0, b: []});
+for (i = 0; i < 1000; ++i) {
+ t.save({a: 100});
}
-t.save( {a:0,b:[]} );
+t.save({a: 0, b: []});
// Repeatedly rename jstests_updatef to jstests_updatef_ and back. This will
// invalidate the jstests_updatef_actual NamespaceDetailsTransient object.
-s = startParallelShell( "for( i=0; i < 100; ++i ) { db.jstests_updatef.renameCollection( 'jstests_updatef_' ); db.jstests_updatef_.renameCollection( 'jstests_updatef' ); }" );
+s = startParallelShell(
+ "for( i=0; i < 100; ++i ) { db.jstests_updatef.renameCollection( 'jstests_updatef_' ); db.jstests_updatef_.renameCollection( 'jstests_updatef' ); }");
-for( i=0; i < 20; ++i ) {
- t.update( {a:0}, {$push:{b:i}}, false, true );
+for (i = 0; i < 20; ++i) {
+ t.update({a: 0}, {$push: {b: i}}, false, true);
}
s();
diff --git a/jstests/core/updateg.js b/jstests/core/updateg.js
index f8d452f71b2..908df755376 100644
--- a/jstests/core/updateg.js
+++ b/jstests/core/updateg.js
@@ -3,15 +3,15 @@
t = db.jstests_updateg;
t.drop();
-t.update({}, { '$inc' : { 'all.t' : 1, 'all-copy.t' : 1 }}, true);
-assert.eq( 1, t.count( {all:{t:1},'all-copy':{t:1}} ) );
+t.update({}, {'$inc': {'all.t': 1, 'all-copy.t': 1}}, true);
+assert.eq(1, t.count({all: {t: 1}, 'all-copy': {t: 1}}));
t.drop();
-t.save({ 'all' : {}, 'all-copy' : {}});
-t.update({}, { '$inc' : { 'all.t' : 1, 'all-copy.t' : 1 }});
-assert.eq( 1, t.count( {all:{t:1},'all-copy':{t:1}} ) );
+t.save({'all': {}, 'all-copy': {}});
+t.update({}, {'$inc': {'all.t': 1, 'all-copy.t': 1}});
+assert.eq(1, t.count({all: {t: 1}, 'all-copy': {t: 1}}));
t.drop();
-t.save({ 'all11' : {}, 'all2' : {}});
-t.update({}, { '$inc' : { 'all11.t' : 1, 'all2.t' : 1 }});
-assert.eq( 1, t.count( {all11:{t:1},'all2':{t:1}} ) );
+t.save({'all11': {}, 'all2': {}});
+t.update({}, {'$inc': {'all11.t': 1, 'all2.t': 1}});
+assert.eq(1, t.count({all11: {t: 1}, 'all2': {t: 1}}));
diff --git a/jstests/core/updateh.js b/jstests/core/updateh.js
index c409b49edaf..91c985ac014 100644
--- a/jstests/core/updateh.js
+++ b/jstests/core/updateh.js
@@ -4,82 +4,82 @@ var res;
t = db.jstest_updateh;
t.drop();
-t.insert( {x:1} );
+t.insert({x: 1});
-res = t.update( {x:1}, {$set: {y:1}} ); // ok
-assert.writeOK( res );
+res = t.update({x: 1}, {$set: {y: 1}}); // ok
+assert.writeOK(res);
-res = t.update( {x:1}, {$set: {$z:1}} ); // not ok
-assert.writeError( res );
+res = t.update({x: 1}, {$set: {$z: 1}}); // not ok
+assert.writeError(res);
-res = t.update( {x:1}, {$set: {'a.$b':1}} ); // not ok
-assert.writeError( res );
+res = t.update({x: 1}, {$set: {'a.$b': 1}}); // not ok
+assert.writeError(res);
-res = t.update( {x:1}, {$inc: {$z:1}} ); // not ok
-assert.writeError( res );
+res = t.update({x: 1}, {$inc: {$z: 1}}); // not ok
+assert.writeError(res);
-res = t.update( {x:1}, {$pushAll: {$z:[1,2,3]}} ); // not ok
-assert.writeError( res );
+res = t.update({x: 1}, {$pushAll: {$z: [1, 2, 3]}}); // not ok
+assert.writeError(res);
-//Second section
+// Second section
t.drop();
-t.save( {_id:0, n: 0} );
+t.save({_id: 0, n: 0});
// Test that '$' cannot be the first character in a field.
// SERVER-7150
-res = t.update({ n: 0 }, { $set: { $x: 1 }});
+res = t.update({n: 0}, {$set: {$x: 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { $$$: 1 }});
+res = t.update({n: 0}, {$set: {$$$: 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { "sneaky.$x": 1 }});
+res = t.update({n: 0}, {$set: {"sneaky.$x": 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { "secret.agent$.$x": 1 }});
+res = t.update({n: 0}, {$set: {"secret.agent$.$x": 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { "$secret.agent.x": 1 }});
+res = t.update({n: 0}, {$set: {"$secret.agent.x": 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { "secret.agent$": 1 }});
+res = t.update({n: 0}, {$set: {"secret.agent$": 1}});
assert.writeOK(res);
-t.save( {_id:0, n: 0} );
+t.save({_id: 0, n: 0});
// Test that you cannot update database references into top level fields
// Enable after SERVER-14252 fixed: currently validation does not catch DBRef
// fields at the top level for update and will not cause an error here
-//res = t.update({ n: 0 }, { $set: {$ref: "1", $id: 1, $db: "1"}});
-//assert.writeError(res);
+// res = t.update({ n: 0 }, { $set: {$ref: "1", $id: 1, $db: "1"}});
+// assert.writeError(res);
-//res = t.update({ n: 0 }, { $set: {$ref: "1", $id: 1}});
-//assert.writeError(res);
+// res = t.update({ n: 0 }, { $set: {$ref: "1", $id: 1}});
+// assert.writeError(res);
// SERVER-11241: Validation used to allow any DBRef field name as a prefix
// thus allowing things like $idXXX
-res = t.update({ n: 0 }, { $set: { $reffoo: 1 }});
+res = t.update({n: 0}, {$set: {$reffoo: 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { $idbar: 1 }});
+res = t.update({n: 0}, {$set: {$idbar: 1}});
assert.writeError(res);
-res = t.update({ n: 0 }, { $set: { $dbbaz: 1 }});
+res = t.update({n: 0}, {$set: {$dbbaz: 1}});
assert.writeError(res);
// Test that '$id', '$db', and '$ref' are acceptable field names in
// the correct case ( subdoc)
// SERVER-3231
-res = t.update({ n: 0 }, { $set: { 'x.$ref': '1', 'x.$id': 1, 'x.$db': '1' }});
+res = t.update({n: 0}, {$set: {'x.$ref': '1', 'x.$id': 1, 'x.$db': '1'}});
assert.writeOK(res);
-t.save( {_id:0, n: 0} );
+t.save({_id: 0, n: 0});
// Test that '$' can occur elsewhere in a field name.
// SERVER-7557
-res = t.update({n: 0 }, { $set: { ke$sha: 1 }});
+res = t.update({n: 0}, {$set: {ke$sha: 1}});
assert.writeOK(res);
-t.save( {_id:0, n: 0} );
+t.save({_id: 0, n: 0});
-res = t.update({n: 0 }, { $set: { more$$moreproblem$: 1 }});
+res = t.update({n: 0}, {$set: {more$$moreproblem$: 1}});
assert.writeOK(res);
-t.save( {_id:0, n: 0} );
+t.save({_id: 0, n: 0});
diff --git a/jstests/core/updatei.js b/jstests/core/updatei.js
index e45b3fde5bb..d5bc3500ab0 100644
--- a/jstests/core/updatei.js
+++ b/jstests/core/updatei.js
@@ -6,81 +6,81 @@ t = db.updatei;
t.drop();
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "x" }, { $push: { a: "y" }}, { multi: true });
-t.find({ k : "x" }).forEach(function(z) {
- assert.eq([ "y" ], z.a, "multi update using object arg");
+t.update({k: "x"}, {$push: {a: "y"}}, {multi: true});
+t.find({k: "x"}).forEach(function(z) {
+ assert.eq(["y"], z.a, "multi update using object arg");
});
t.drop();
// Using a single update
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "x" }, { $push: { a: "y" }}, { multi: false });
-assert.eq(1, t.find({ "a": "y" }).count(), "update using object arg");
+t.update({k: "x"}, {$push: {a: "y"}}, {multi: false});
+assert.eq(1, t.find({"a": "y"}).count(), "update using object arg");
t.drop();
// Using upsert, found
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: true });
-assert.eq(1, t.find({ "k": "x", "a": "y" }).count(), "upsert (found) using object arg");
+t.update({k: "x"}, {$push: {a: "y"}}, {upsert: true});
+assert.eq(1, t.find({"k": "x", "a": "y"}).count(), "upsert (found) using object arg");
t.drop();
// Using upsert + multi, found
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: true, multi: true });
-t.find({ k : "x" }).forEach(function(z) {
- assert.eq([ "y" ], z.a, "multi + upsert (found) using object arg");
+t.update({k: "x"}, {$push: {a: "y"}}, {upsert: true, multi: true});
+t.find({k: "x"}).forEach(function(z) {
+ assert.eq(["y"], z.a, "multi + upsert (found) using object arg");
});
t.drop();
// Using upsert, not found
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "y" }, { $push: { a: "y" }}, { upsert: true });
-assert.eq(1, t.find({ "k": "y", "a": "y" }).count(), "upsert (not found) using object arg");
+t.update({k: "y"}, {$push: {a: "y"}}, {upsert: true});
+assert.eq(1, t.find({"k": "y", "a": "y"}).count(), "upsert (not found) using object arg");
t.drop();
// Without upsert, found
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "x" }, { $push: { a: "y" }}, { upsert: false });
-assert.eq(1, t.find({ "a": "y" }).count(), "no upsert (found) using object arg");
+t.update({k: "x"}, {$push: {a: "y"}}, {upsert: false});
+assert.eq(1, t.find({"a": "y"}).count(), "no upsert (found) using object arg");
t.drop();
// Without upsert, not found
-for (i=0; i<10; i++) {
- t.save({ _id : i, k: "x", a: [] });
+for (i = 0; i < 10; i++) {
+ t.save({_id: i, k: "x", a: []});
}
-t.update({ k: "y" }, { $push: { a: "y" }}, { upsert: false });
-assert.eq(0, t.find({ "a": "y" }).count(), "no upsert (not found) using object arg");
+t.update({k: "y"}, {$push: {a: "y"}}, {upsert: false});
+assert.eq(0, t.find({"a": "y"}).count(), "no upsert (not found) using object arg");
t.drop();
diff --git a/jstests/core/updatej.js b/jstests/core/updatej.js
index 6a70a4c2d51..378a29e4573 100644
--- a/jstests/core/updatej.js
+++ b/jstests/core/updatej.js
@@ -4,9 +4,9 @@
t = db.jstests_updatej;
t.drop();
-t.save( {a:[]} );
-t.save( {a:1} );
-t.save( {a:[]} );
+t.save({a: []});
+t.save({a: 1});
+t.save({a: []});
-t.update( {}, {$push:{a:2}}, false, true );
-assert.eq( 1, t.count( {a:2} ) );
+t.update({}, {$push: {a: 2}}, false, true);
+assert.eq(1, t.count({a: 2}));
diff --git a/jstests/core/updatek.js b/jstests/core/updatek.js
index b96f3138a81..1af9e6112e7 100644
--- a/jstests/core/updatek.js
+++ b/jstests/core/updatek.js
@@ -3,12 +3,11 @@
t = db.jstests_updatek;
t.drop();
-t.save( { _id:0, '1':{}, '01':{} } );
-t.update( {}, { $set:{ '1.b':1, '1.c':2 } } );
-assert.docEq( { "01" : { }, "1" : { "b" : 1, "c" : 2 }, "_id" : 0 }, t.findOne() );
+t.save({_id: 0, '1': {}, '01': {}});
+t.update({}, {$set: {'1.b': 1, '1.c': 2}});
+assert.docEq({"01": {}, "1": {"b": 1, "c": 2}, "_id": 0}, t.findOne());
t.drop();
-t.save( { _id:0, '1':{}, '01':{} } );
-t.update( {}, { $set:{ '1.b':1, '01.c':2 } } );
-assert.docEq( { "01" : { "c" : 2 }, "1" : { "b" : 1 }, "_id" : 0 }, t.findOne() );
-
+t.save({_id: 0, '1': {}, '01': {}});
+t.update({}, {$set: {'1.b': 1, '01.c': 2}});
+assert.docEq({"01": {"c": 2}, "1": {"b": 1}, "_id": 0}, t.findOne());
diff --git a/jstests/core/updatel.js b/jstests/core/updatel.js
index 6b6c8b20613..69aa997a224 100644
--- a/jstests/core/updatel.js
+++ b/jstests/core/updatel.js
@@ -9,71 +9,62 @@ var res;
t = db.jstests_updatel;
t.drop();
-
-
// The collection is empty, forcing an upsert. In this case the query has no array position match
// to substiture for the positional operator. SERVER-4713
-assert.writeError(t.update( {}, { $set:{ 'a.$.b':1 } }, true ));
-assert.eq( 0, t.count(), "No upsert occurred." );
-
-
+assert.writeError(t.update({}, {$set: {'a.$.b': 1}}, true));
+assert.eq(0, t.count(), "No upsert occurred.");
// Save a document to the collection so it is no longer empty.
-assert.writeOK(t.save( { _id:0 } ));
+assert.writeOK(t.save({_id: 0}));
// Now, with an existing document, trigger an update rather than an upsert. The query has no array
// position match to substiture for the positional operator. SERVER-6669
-assert.writeError(t.update( {}, { $set:{ 'a.$.b':1 } } ));
-assert.eq( [ { _id:0 } ], t.find().toArray(), "No update occurred." );
-
-
+assert.writeError(t.update({}, {$set: {'a.$.b': 1}}));
+assert.eq([{_id: 0}], t.find().toArray(), "No update occurred.");
// Now, try with an update by _id (without a query array match).
-assert.writeError(t.update( { _id:0 }, { $set:{ 'a.$.b':1 } } ));
-assert.eq( [ { _id:0 } ], t.find().toArray(), "No update occurred." );
-
-
+assert.writeError(t.update({_id: 0}, {$set: {'a.$.b': 1}}));
+assert.eq([{_id: 0}], t.find().toArray(), "No update occurred.");
// Seed the collection with a document suitable for the following check.
assert.writeOK(t.remove({}));
-assert.writeOK(t.save( { _id:0, a:[ { b:{ c:1 } } ] } ));
+assert.writeOK(t.save({_id: 0, a: [{b: {c: 1}}]}));
// Now, attempt to apply an update with two nested positional operators. There is a positional
// query match for the first positional operator but not the second. Note that dollar sign
// substitution for multiple positional opertors is not implemented (SERVER-831).
-assert.writeError(t.update( { 'a.b.c':1 }, { $set:{ 'a.$.b.$.c':2 } } ));
-assert.eq( [ { _id:0, a:[ { b:{ c:1 } } ] } ], t.find().toArray(), "No update occurred." );
-
+assert.writeError(t.update({'a.b.c': 1}, {$set: {'a.$.b.$.c': 2}}));
+assert.eq([{_id: 0, a: [{b: {c: 1}}]}], t.find().toArray(), "No update occurred.");
// SERVER-1155 test an update with the positional operator
// that has a regex in the query field
t.drop();
-assert.writeOK(t.insert({_id:1, arr:[{a:"z", b:1}]}));
+assert.writeOK(t.insert({_id: 1, arr: [{a: "z", b: 1}]}));
assert.writeOK(t.update({"arr.a": /^z$/}, {$set: {"arr.$.b": 2}}, false, true));
-assert.eq(t.findOne().arr[0], {a:"z", b:2});
+assert.eq(t.findOne().arr[0], {a: "z", b: 2});
t.drop();
-assert.writeOK(t.insert({_id:1, arr:[{a:"z",b:1}, {a:"abc",b:2}, {a:"lmn",b:3}]}));
+assert.writeOK(t.insert({_id: 1, arr: [{a: "z", b: 1}, {a: "abc", b: 2}, {a: "lmn", b: 3}]}));
assert.writeOK(t.update({"arr.a": /l/}, {$inc: {"arr.$.b": 2}}, false, true));
-assert.eq(t.findOne().arr[2], {a:"lmn", b:5});
+assert.eq(t.findOne().arr[2], {a: "lmn", b: 5});
// Test updates with ambiguous positional operator.
t.drop();
assert.writeOK(t.insert({_id: 0, a: [1, 2]}));
assert.writeError(t.update({$and: [{a: 1}, {a: 2}]}, {$set: {'a.$': 5}}));
-assert.eq( [ {_id: 0, a: [1, 2]} ], t.find().toArray(), "No update occurred." );
+assert.eq([{_id: 0, a: [1, 2]}], t.find().toArray(), "No update occurred.");
t.drop();
assert.writeOK(t.insert({_id: 0, a: [1], b: [2]}));
assert.writeError(t.update({a: 1, b: 2}, {$set: {'a.$': 5}}));
-assert.eq( [ {_id: 0, a: [1], b: [2]} ], t.find().toArray(), "No update occurred." );
+assert.eq([{_id: 0, a: [1], b: [2]}], t.find().toArray(), "No update occurred.");
t.drop();
assert.writeOK(t.insert({_id: 0, a: [1], b: [2]}));
assert.writeError(t.update({a: {$elemMatch: {$lt: 2}}, b: 2}, {$set: {'a.$': 5}}));
-assert.eq( [ {_id: 0, a: [1], b: [2]} ], t.find().toArray(), "No update occurred." );
+assert.eq([{_id: 0, a: [1], b: [2]}], t.find().toArray(), "No update occurred.");
t.drop();
assert.writeOK(t.insert({_id: 0, a: [{b: 1}, {c: 2}]}));
assert.writeError(t.update({'a.b': 1, 'a.c': 2}, {$set: {'a.$': 5}}));
-assert.eq( [ {_id: 0, a: [{b: 1}, {c: 2}]} ], t.find().toArray(), "No update occurred." ); \ No newline at end of file
+assert.eq([{_id: 0, a: [{b: 1}, {c: 2}]}], t.find().toArray(), "No update occurred."); \ No newline at end of file
diff --git a/jstests/core/updatem.js b/jstests/core/updatem.js
index a636ac99a59..1346a293f66 100644
--- a/jstests/core/updatem.js
+++ b/jstests/core/updatem.js
@@ -4,17 +4,17 @@ t = db.jstests_updatem;
t.drop();
// new _id from insert (upsert:true)
-t.update({a:1}, {$inc:{b:1}}, true);
-var doc = t.findOne({a:1});
+t.update({a: 1}, {$inc: {b: 1}}, true);
+var doc = t.findOne({a: 1});
assert(doc["_id"], "missing _id");
// new _id from insert (upsert:true)
-t.update({a:1}, {$inc:{b:1}}, true);
-var doc = t.findOne({a:1});
+t.update({a: 1}, {$inc: {b: 1}}, true);
+var doc = t.findOne({a: 1});
assert(doc["_id"], "missing _id");
// no _id on existing doc
-t.getDB().runCommand({godinsert:t.getName(), obj:{a:2}});
-t.update({a:2}, {$inc:{b:1}}, true);
-var doc = t.findOne({a:2});
+t.getDB().runCommand({godinsert: t.getName(), obj: {a: 2}});
+t.update({a: 2}, {$inc: {b: 1}}, true);
+var doc = t.findOne({a: 2});
assert(doc["_id"], "missing _id after update");
diff --git a/jstests/core/upsert_and.js b/jstests/core/upsert_and.js
index 744c9a9331d..f99fd995edb 100644
--- a/jstests/core/upsert_and.js
+++ b/jstests/core/upsert_and.js
@@ -3,35 +3,37 @@ var res;
coll = db.upsert4;
coll.drop();
-res = coll.update({_id: 1, $and: [{c: 1}, {d: 1}], a: 12} , {$inc: {y: 1}} , true);
+res = coll.update({_id: 1, $and: [{c: 1}, {d: 1}], a: 12}, {$inc: {y: 1}}, true);
assert.writeOK(res);
assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, a: 12, y: 1});
coll.remove({});
-res = coll.update({$and: [{c: 1}, {d: 1}]} , {$setOnInsert: {_id: 1}} , true);
+res = coll.update({$and: [{c: 1}, {d: 1}]}, {$setOnInsert: {_id: 1}}, true);
assert.writeOK(res);
assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1});
coll.remove({});
-res = coll.update({$and: [{c: 1}, {d: 1}, {$or: [{x:1}]}]} , {$setOnInsert: {_id: 1}} , true);
+res = coll.update({$and: [{c: 1}, {d: 1}, {$or: [{x: 1}]}]}, {$setOnInsert: {_id: 1}}, true);
assert.writeOK(res);
-assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, x:1});
+assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1, x: 1});
coll.remove({});
-res = coll.update({$and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]} , {$setOnInsert: {_id: 1}} , true);
+res = coll.update({$and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]}, {$setOnInsert: {_id: 1}}, true);
assert.writeOK(res);
assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1});
coll.remove({});
-res = coll.update({r: {$gt: 3}, $and: [{c: 1}, {d: 1}], $or: [{x:1}, {x:2}]} , {$setOnInsert: {_id: 1}} , true);
+res = coll.update(
+ {r: {$gt: 3}, $and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]}, {$setOnInsert: {_id: 1}}, true);
assert.writeOK(res);
assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1});
coll.remove({});
-res = coll.update({r: /s/, $and: [{c: 1}, {d: 1}], $or: [{x:1}, {x:2}]} , {$setOnInsert: {_id: 1}} , true);
+res = coll.update(
+ {r: /s/, $and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]}, {$setOnInsert: {_id: 1}}, true);
assert.writeOK(res);
assert.docEq(coll.findOne(), {_id: 1, c: 1, d: 1});
coll.remove({});
-res = coll.update({c:2, $and: [{c: 1}, {d: 1}]} , {$setOnInsert: {_id: 1}} , true);
+res = coll.update({c: 2, $and: [{c: 1}, {d: 1}]}, {$setOnInsert: {_id: 1}}, true);
assert.writeError(res);
diff --git a/jstests/core/upsert_fields.js b/jstests/core/upsert_fields.js
index c4e322b6910..86f5032f6dd 100644
--- a/jstests/core/upsert_fields.js
+++ b/jstests/core/upsert_fields.js
@@ -7,7 +7,7 @@ coll.drop();
var upsertedResult = function(query, expr) {
coll.drop();
- result = coll.update(query, expr, { upsert : true });
+ result = coll.update(query, expr, {upsert: true});
return result;
};
@@ -34,93 +34,95 @@ var upsertedXVal = function(query, expr) {
assert.neq(null, upsertedId({}, {}));
// _id autogenerated with add'l fields
-assert.neq(null, upsertedId({}, { a : 1 }));
-assert.eq(1, upsertedField({}, { a : 1 }, "a"));
-assert.neq(null, upsertedId({}, { $set : { a : 1 } }, "a"));
-assert.eq(1, upsertedField({}, { $set : { a : 1 } }, "a"));
-assert.neq(null, upsertedId({}, { $setOnInsert : { a : 1 } }, "a"));
-assert.eq(1, upsertedField({}, { $setOnInsert : { a : 1 } }, "a"));
+assert.neq(null, upsertedId({}, {a: 1}));
+assert.eq(1, upsertedField({}, {a: 1}, "a"));
+assert.neq(null, upsertedId({}, {$set: {a: 1}}, "a"));
+assert.eq(1, upsertedField({}, {$set: {a: 1}}, "a"));
+assert.neq(null, upsertedId({}, {$setOnInsert: {a: 1}}, "a"));
+assert.eq(1, upsertedField({}, {$setOnInsert: {a: 1}}, "a"));
// _id not autogenerated
-assert.eq(1, upsertedId({}, { _id : 1 }));
-assert.eq(1, upsertedId({}, { $set : { _id : 1 } }));
-assert.eq(1, upsertedId({}, { $setOnInsert : { _id : 1 } }));
+assert.eq(1, upsertedId({}, {_id: 1}));
+assert.eq(1, upsertedId({}, {$set: {_id: 1}}));
+assert.eq(1, upsertedId({}, {$setOnInsert: {_id: 1}}));
// _id type error
-assert.writeError(upsertedResult({}, { _id : [1, 2] }));
-assert.writeError(upsertedResult({}, { _id : undefined }));
-assert.writeError(upsertedResult({}, { $set : { _id : [1, 2] } }));
+assert.writeError(upsertedResult({}, {_id: [1, 2]}));
+assert.writeError(upsertedResult({}, {_id: undefined}));
+assert.writeError(upsertedResult({}, {$set: {_id: [1, 2]}}));
// Fails in v2.6, no validation
-assert.writeError(upsertedResult({}, { $setOnInsert : { _id : undefined } }));
+assert.writeError(upsertedResult({}, {$setOnInsert: {_id: undefined}}));
// Check things that are pretty much the same for replacement and $op style upserts
for (var i = 0; i < 3; i++) {
-
// replacement style
var expr = {};
// $op style
if (i == 1)
- expr = { $set : { a : 1 } };
+ expr = {
+ $set: {a: 1}
+ };
if (i == 2)
- expr = { $setOnInsert : { a : 1 } };
+ expr = {
+ $setOnInsert: {a: 1}
+ };
var isReplStyle = i == 0;
// _id extracted
- assert.eq(1, upsertedId({ _id : 1 }, expr));
+ assert.eq(1, upsertedId({_id: 1}, expr));
// All below fail in v2.6, no $ops for _id and $and/$or not explored
- assert.eq(1, upsertedId({ _id : { $eq : 1 } }, expr));
- assert.eq(1, upsertedId({ _id : { $all : [1] } }, expr));
- assert.eq(1, upsertedId({ $and : [{ _id : 1 }] }, expr));
- assert.eq(1, upsertedId({ $and : [{ _id : { $eq : 1 } }] }, expr));
- assert.eq(1, upsertedId({ $or : [{ _id : 1 }] }, expr));
- assert.eq(1, upsertedId({ $or : [{ _id : { $eq : 1 } }] }, expr));
+ assert.eq(1, upsertedId({_id: {$eq: 1}}, expr));
+ assert.eq(1, upsertedId({_id: {$all: [1]}}, expr));
+ assert.eq(1, upsertedId({$and: [{_id: 1}]}, expr));
+ assert.eq(1, upsertedId({$and: [{_id: {$eq: 1}}]}, expr));
+ assert.eq(1, upsertedId({$or: [{_id: 1}]}, expr));
+ assert.eq(1, upsertedId({$or: [{_id: {$eq: 1}}]}, expr));
// _id not extracted, autogenerated
- assert.neq(1, upsertedId({ _id : { $gt : 1 } }, expr));
- assert.neq(1, upsertedId({ _id : { $ne : 1 } }, expr));
- assert.neq(1, upsertedId({ _id : { $in : [1] } }, expr));
- assert.neq(1, upsertedId({ _id : { $in : [1, 2] } }, expr));
- assert.neq(1, upsertedId({ _id : { $elemMatch : { $eq : 1 } } }, expr));
- assert.neq(1, upsertedId({ _id : { $exists : true } }, expr));
- assert.neq(1, upsertedId({ _id : { $not : { $eq : 1 } } }, expr));
- assert.neq(1, upsertedId({ $or : [{ _id : 1 }, { _id : 1 }] }, expr));
- assert.neq(1, upsertedId({ $or : [{ _id : { $eq : 1 } }, { _id : 2 }] }, expr));
- assert.neq(1, upsertedId({ $nor : [{ _id : 1 }] }, expr));
- assert.neq(1, upsertedId({ $nor : [{ _id : { $eq : 1 } }] }, expr));
- assert.neq(1, upsertedId({ $nor : [{ _id : { $eq : 1 } }, { _id : 1 }] }, expr));
+ assert.neq(1, upsertedId({_id: {$gt: 1}}, expr));
+ assert.neq(1, upsertedId({_id: {$ne: 1}}, expr));
+ assert.neq(1, upsertedId({_id: {$in: [1]}}, expr));
+ assert.neq(1, upsertedId({_id: {$in: [1, 2]}}, expr));
+ assert.neq(1, upsertedId({_id: {$elemMatch: {$eq: 1}}}, expr));
+ assert.neq(1, upsertedId({_id: {$exists: true}}, expr));
+ assert.neq(1, upsertedId({_id: {$not: {$eq: 1}}}, expr));
+ assert.neq(1, upsertedId({$or: [{_id: 1}, {_id: 1}]}, expr));
+ assert.neq(1, upsertedId({$or: [{_id: {$eq: 1}}, {_id: 2}]}, expr));
+ assert.neq(1, upsertedId({$nor: [{_id: 1}]}, expr));
+ assert.neq(1, upsertedId({$nor: [{_id: {$eq: 1}}]}, expr));
+ assert.neq(1, upsertedId({$nor: [{_id: {$eq: 1}}, {_id: 1}]}, expr));
// _id extraction errors
- assert.writeError(upsertedResult({ _id : [1, 2] }, expr));
- assert.writeError(upsertedResult({ _id : undefined }, expr));
- assert.writeError(upsertedResult({ _id : { $eq : [1, 2] } }, expr));
- assert.writeError(upsertedResult({ _id : { $eq : undefined } }, expr));
- assert.writeError(upsertedResult({ _id : { $all : [ 1, 2 ] } }, expr));
+ assert.writeError(upsertedResult({_id: [1, 2]}, expr));
+ assert.writeError(upsertedResult({_id: undefined}, expr));
+ assert.writeError(upsertedResult({_id: {$eq: [1, 2]}}, expr));
+ assert.writeError(upsertedResult({_id: {$eq: undefined}}, expr));
+ assert.writeError(upsertedResult({_id: {$all: [1, 2]}}, expr));
// All below fail in v2.6, non-_id fields completely ignored
- assert.writeError(upsertedResult({ $and : [{ _id : 1 }, { _id : 1 }] }, expr));
- assert.writeError(upsertedResult({ $and : [{ _id : { $eq : 1 } }, { _id : 2 }] }, expr));
- assert.writeError(upsertedResult({ _id : 1, "_id.x" : 1 }, expr));
- assert.writeError(upsertedResult({ _id : { x : 1 }, "_id.x" : 1 }, expr));
+ assert.writeError(upsertedResult({$and: [{_id: 1}, {_id: 1}]}, expr));
+ assert.writeError(upsertedResult({$and: [{_id: {$eq: 1}}, {_id: 2}]}, expr));
+ assert.writeError(upsertedResult({_id: 1, "_id.x": 1}, expr));
+ assert.writeError(upsertedResult({_id: {x: 1}, "_id.x": 1}, expr));
// Special case - nested _id fields only used on $op-style updates
if (isReplStyle) {
// Fails in v2.6
- assert.writeError(upsertedResult({ "_id.x" : 1, "_id.y" : 2 }, expr));
- }
- else {
+ assert.writeError(upsertedResult({"_id.x": 1, "_id.y": 2}, expr));
+ } else {
// Fails in v2.6
- assert.docEq({ x : 1, y : 2 }, upsertedId({ "_id.x" : 1, "_id.y" : 2 }, expr));
+ assert.docEq({x: 1, y: 2}, upsertedId({"_id.x": 1, "_id.y": 2}, expr));
}
}
// regex _id in expression is an error, no regex ids allowed
-assert.writeError(upsertedResult({}, { _id : /abc/ }));
+assert.writeError(upsertedResult({}, {_id: /abc/}));
// Fails in v2.6, no validation
-assert.writeError(upsertedResult({}, { $set : { _id : /abc/ } }));
+assert.writeError(upsertedResult({}, {$set: {_id: /abc/}}));
// no regex _id extraction from query
-assert.neq(/abc/, upsertedId({ _id : /abc/ }, {}));
+assert.neq(/abc/, upsertedId({_id: /abc/}, {}));
//
// Regular field extraction
@@ -128,102 +130,103 @@ assert.neq(/abc/, upsertedId({ _id : /abc/ }, {}));
// Check things that are pretty much the same for replacement and $op style upserts
for (var i = 0; i < 3; i++) {
-
// replacement style
var expr = {};
// $op style
if (i == 1)
- expr = { $set : { a : 1 } };
+ expr = {
+ $set: {a: 1}
+ };
if (i == 2)
- expr = { $setOnInsert : { a : 1 } };
+ expr = {
+ $setOnInsert: {a: 1}
+ };
var isReplStyle = i == 0;
// field extracted when replacement style
var value = isReplStyle ? undefined : 1;
- assert.eq(value, upsertedXVal({ x : 1 }, expr));
- assert.eq(value, upsertedXVal({ x : { $eq : 1 } }, expr));
- assert.eq(value, upsertedXVal({ x : { $all : [1] } }, expr));
- assert.eq(value, upsertedXVal({ $and : [{ x : 1 }] }, expr));
- assert.eq(value, upsertedXVal({ $and : [{ x : { $eq : 1 } }] }, expr));
- assert.eq(value, upsertedXVal({ $or : [{ x : 1 }] }, expr));
- assert.eq(value, upsertedXVal({ $or : [{ x : { $eq : 1 } }] }, expr));
+ assert.eq(value, upsertedXVal({x: 1}, expr));
+ assert.eq(value, upsertedXVal({x: {$eq: 1}}, expr));
+ assert.eq(value, upsertedXVal({x: {$all: [1]}}, expr));
+ assert.eq(value, upsertedXVal({$and: [{x: 1}]}, expr));
+ assert.eq(value, upsertedXVal({$and: [{x: {$eq: 1}}]}, expr));
+ assert.eq(value, upsertedXVal({$or: [{x: 1}]}, expr));
+ assert.eq(value, upsertedXVal({$or: [{x: {$eq: 1}}]}, expr));
// Special types extracted
- assert.eq(isReplStyle ? undefined : [1, 2], upsertedXVal({ x : [1, 2] }, expr));
+ assert.eq(isReplStyle ? undefined : [1, 2], upsertedXVal({x: [1, 2]}, expr));
// field not extracted
- assert.eq(undefined, upsertedXVal({ x : { $gt : 1 } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $ne : 1 } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $in : [1] } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $in : [1, 2] } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $elemMatch : { $eq : 1 } } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $exists : true } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $not : { $eq : 1 } } }, expr));
- assert.eq(undefined, upsertedXVal({ $or : [{ x : 1 }, { x : 1 }] }, expr));
- assert.eq(undefined, upsertedXVal({ $or : [{ x : { $eq : 1 } }, { x : 2 }] }, expr));
- assert.eq(undefined, upsertedXVal({ $nor : [{ x : 1 }] }, expr));
- assert.eq(undefined, upsertedXVal({ $nor : [{ x : { $eq : 1 } }] }, expr));
- assert.eq(undefined, upsertedXVal({ $nor : [{ x : { $eq : 1 } }, { x : 1 }] }, expr));
+ assert.eq(undefined, upsertedXVal({x: {$gt: 1}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$ne: 1}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$in: [1]}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$in: [1, 2]}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$elemMatch: {$eq: 1}}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$exists: true}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$not: {$eq: 1}}}, expr));
+ assert.eq(undefined, upsertedXVal({$or: [{x: 1}, {x: 1}]}, expr));
+ assert.eq(undefined, upsertedXVal({$or: [{x: {$eq: 1}}, {x: 2}]}, expr));
+ assert.eq(undefined, upsertedXVal({$nor: [{x: 1}]}, expr));
+ assert.eq(undefined, upsertedXVal({$nor: [{x: {$eq: 1}}]}, expr));
+ assert.eq(undefined, upsertedXVal({$nor: [{x: {$eq: 1}}, {x: 1}]}, expr));
// field extraction errors
- assert.writeError(upsertedResult({ x : undefined }, expr));
+ assert.writeError(upsertedResult({x: undefined}, expr));
if (!isReplStyle) {
- assert.writeError(upsertedResult({ x : { 'x.x' : 1 } }, expr));
- assert.writeError(upsertedResult({ x : { $all : [ 1, 2 ] } }, expr));
- assert.writeError(upsertedResult({ $and : [{ x : 1 }, { x : 1 }] }, expr));
- assert.writeError(upsertedResult({ $and : [{ x : { $eq : 1 } }, { x : 2 }] }, expr));
- }
- else {
- assert.eq(undefined, upsertedXVal({ x : { 'x.x' : 1 } }, expr));
- assert.eq(undefined, upsertedXVal({ x : { $all : [ 1, 2 ] } }, expr));
- assert.eq(undefined, upsertedXVal({ $and : [{ x : 1 }, { x : 1 }] }, expr));
- assert.eq(undefined, upsertedXVal({ $and : [{ x : { $eq : 1 } }, { x : 2 }] }, expr));
+ assert.writeError(upsertedResult({x: {'x.x': 1}}, expr));
+ assert.writeError(upsertedResult({x: {$all: [1, 2]}}, expr));
+ assert.writeError(upsertedResult({$and: [{x: 1}, {x: 1}]}, expr));
+ assert.writeError(upsertedResult({$and: [{x: {$eq: 1}}, {x: 2}]}, expr));
+ } else {
+ assert.eq(undefined, upsertedXVal({x: {'x.x': 1}}, expr));
+ assert.eq(undefined, upsertedXVal({x: {$all: [1, 2]}}, expr));
+ assert.eq(undefined, upsertedXVal({$and: [{x: 1}, {x: 1}]}, expr));
+ assert.eq(undefined, upsertedXVal({$and: [{x: {$eq: 1}}, {x: 2}]}, expr));
}
// nested field extraction
- var docValue = isReplStyle ? undefined : { x : 1 };
- assert.docEq(docValue, upsertedXVal({ "x.x" : 1 }, expr));
- assert.docEq(docValue, upsertedXVal({ "x.x" : { $eq : 1 } }, expr));
- assert.docEq(docValue, upsertedXVal({ "x.x" : { $all : [1] } }, expr));
- assert.docEq(docValue, upsertedXVal({ $and : [{ "x.x" : 1 }] }, expr));
- assert.docEq(docValue, upsertedXVal({ $and : [{ "x.x" : { $eq : 1 } }] }, expr));
- assert.docEq(docValue, upsertedXVal({ $or : [{ "x.x" : 1 }] }, expr));
- assert.docEq(docValue, upsertedXVal({ $or : [{ "x.x" : { $eq : 1 } }] }, expr));
+ var docValue = isReplStyle ? undefined : {
+ x: 1
+ };
+ assert.docEq(docValue, upsertedXVal({"x.x": 1}, expr));
+ assert.docEq(docValue, upsertedXVal({"x.x": {$eq: 1}}, expr));
+ assert.docEq(docValue, upsertedXVal({"x.x": {$all: [1]}}, expr));
+ assert.docEq(docValue, upsertedXVal({$and: [{"x.x": 1}]}, expr));
+ assert.docEq(docValue, upsertedXVal({$and: [{"x.x": {$eq: 1}}]}, expr));
+ assert.docEq(docValue, upsertedXVal({$or: [{"x.x": 1}]}, expr));
+ assert.docEq(docValue, upsertedXVal({$or: [{"x.x": {$eq: 1}}]}, expr));
// nested field conflicts
if (!isReplStyle) {
- assert.writeError(upsertedResult({ x : 1, "x.x" : 1 }, expr));
- assert.writeError(upsertedResult({ x : {}, "x.x" : 1 }, expr));
- assert.writeError(upsertedResult({ x : { x : 1 }, "x.x" : 1 }, expr));
- assert.writeError(upsertedResult({ x : { x : 1 }, "x.y" : 1 }, expr));
- assert.writeError(upsertedResult({ x : [1, { x : 1 }], "x.x" : 1 }, expr));
- }
- else {
- assert.eq(undefined, upsertedXVal({ x : 1, "x.x" : 1 }, expr));
- assert.eq(undefined, upsertedXVal({ x : {}, "x.x" : 1 }, expr));
- assert.eq(undefined, upsertedXVal({ x : { x : 1 }, "x.x" : 1 }, expr));
- assert.eq(undefined, upsertedXVal({ x : { x : 1 }, "x.y" : 1 }, expr));
- assert.eq(undefined, upsertedXVal({ x : [1, { x : 1 }], "x.x" : 1 }, expr));
+ assert.writeError(upsertedResult({x: 1, "x.x": 1}, expr));
+ assert.writeError(upsertedResult({x: {}, "x.x": 1}, expr));
+ assert.writeError(upsertedResult({x: {x: 1}, "x.x": 1}, expr));
+ assert.writeError(upsertedResult({x: {x: 1}, "x.y": 1}, expr));
+ assert.writeError(upsertedResult({x: [1, {x: 1}], "x.x": 1}, expr));
+ } else {
+ assert.eq(undefined, upsertedXVal({x: 1, "x.x": 1}, expr));
+ assert.eq(undefined, upsertedXVal({x: {}, "x.x": 1}, expr));
+ assert.eq(undefined, upsertedXVal({x: {x: 1}, "x.x": 1}, expr));
+ assert.eq(undefined, upsertedXVal({x: {x: 1}, "x.y": 1}, expr));
+ assert.eq(undefined, upsertedXVal({x: [1, {x: 1}], "x.x": 1}, expr));
}
-
}
// regex field in expression is a value
-assert.eq(/abc/, upsertedXVal({}, { x : /abc/ }));
-assert.eq(/abc/, upsertedXVal({}, { $set : { x : /abc/ } }));
+assert.eq(/abc/, upsertedXVal({}, {x: /abc/}));
+assert.eq(/abc/, upsertedXVal({}, {$set: {x: /abc/}}));
// no regex field extraction from query unless $eq'd
-assert.eq(/abc/, upsertedXVal({ x : { $eq : /abc/ } }, { $set : { a : 1 } }));
-assert.eq(undefined, upsertedXVal({ x : /abc/ }, { $set : { a : 1 } }));
+assert.eq(/abc/, upsertedXVal({x: {$eq: /abc/}}, {$set: {a: 1}}));
+assert.eq(undefined, upsertedXVal({x: /abc/}, {$set: {a: 1}}));
// replacement-style updates ignore conflicts *except* on _id field
-assert.eq(1, upsertedId({ _id : 1, x : [1, { x : 1 }], "x.x" : 1 }, {}));
+assert.eq(1, upsertedId({_id: 1, x: [1, {x: 1}], "x.x": 1}, {}));
// DBRef special cases
// make sure query doesn't error when creating doc for insert, since it's missing the rest of the
// dbref fields. SERVER-14024
// Fails in 2.6.1->3
-assert.docEq(tojson(DBRef("a", 1)), upsertedXVal({ "x.$id" : 1 },
- { $set : { x : DBRef("a", 1) } }));
+assert.docEq(tojson(DBRef("a", 1)), upsertedXVal({"x.$id": 1}, {$set: {x: DBRef("a", 1)}}));
diff --git a/jstests/core/upsert_shell.js b/jstests/core/upsert_shell.js
index a4cf3f4b923..5b7ac501aef 100644
--- a/jstests/core/upsert_shell.js
+++ b/jstests/core/upsert_shell.js
@@ -4,49 +4,49 @@ t = db.upsert1;
t.drop();
// make sure the new _id is returned when $mods are used
-l = t.update( { x : 1 } , { $inc : { y : 1 } } , true );
-assert( l.getUpsertedId() , "A1 - " + tojson(l) );
-assert.eq( l.getUpsertedId()._id.str , t.findOne()._id.str , "A2" );
+l = t.update({x: 1}, {$inc: {y: 1}}, true);
+assert(l.getUpsertedId(), "A1 - " + tojson(l));
+assert.eq(l.getUpsertedId()._id.str, t.findOne()._id.str, "A2");
// make sure the new _id is returned on a replacement (no $mod in update)
-l = t.update( { x : 2 } , { x : 2 , y : 3 } , true );
-assert( l.getUpsertedId() , "B1 - " + tojson(l) );
-assert.eq( l.getUpsertedId()._id.str , t.findOne( { x : 2 } )._id.str , "B2" );
-assert.eq( 2 , t.find().count() , "B3" );
+l = t.update({x: 2}, {x: 2, y: 3}, true);
+assert(l.getUpsertedId(), "B1 - " + tojson(l));
+assert.eq(l.getUpsertedId()._id.str, t.findOne({x: 2})._id.str, "B2");
+assert.eq(2, t.find().count(), "B3");
// use the _id from the query for the insert
-l = t.update({_id:3}, {$set: {a:'123'}}, true);
-assert( l.getUpsertedId() , "C1 - " + tojson(l) );
-assert.eq( l.getUpsertedId()._id , 3 , "C2 - " + tojson(l) );
+l = t.update({_id: 3}, {$set: {a: '123'}}, true);
+assert(l.getUpsertedId(), "C1 - " + tojson(l));
+assert.eq(l.getUpsertedId()._id, 3, "C2 - " + tojson(l));
// test with an embedded doc for the _id field
-l = t.update({_id:{a:1}}, {$set: {a:123}}, true);
-assert( l.getUpsertedId() , "D1 - " + tojson(l) );
-assert.eq( l.getUpsertedId()._id , {a:1} , "D2 - " + tojson(l) );
+l = t.update({_id: {a: 1}}, {$set: {a: 123}}, true);
+assert(l.getUpsertedId(), "D1 - " + tojson(l));
+assert.eq(l.getUpsertedId()._id, {a: 1}, "D2 - " + tojson(l));
// test with a range query
-l = t.update({_id: {$gt:100}}, {$set: {a:123}}, true);
-assert( l.getUpsertedId() , "E1 - " + tojson(l) );
-assert.neq( l.getUpsertedId()._id , 100 , "E2 - " + tojson(l) );
+l = t.update({_id: {$gt: 100}}, {$set: {a: 123}}, true);
+assert(l.getUpsertedId(), "E1 - " + tojson(l));
+assert.neq(l.getUpsertedId()._id, 100, "E2 - " + tojson(l));
// test with an _id query
-l = t.update({_id: 1233}, {$set: {a:123}}, true);
-assert( l.getUpsertedId() , "F1 - " + tojson(l) );
-assert.eq( l.getUpsertedId()._id , 1233 , "F2 - " + tojson(l) );
+l = t.update({_id: 1233}, {$set: {a: 123}}, true);
+assert(l.getUpsertedId(), "F1 - " + tojson(l));
+assert.eq(l.getUpsertedId()._id, 1233, "F2 - " + tojson(l));
// test with an embedded _id query
-l = t.update({_id: {a:1, b:2}}, {$set: {a:123}}, true);
-assert( l.getUpsertedId() , "G1 - " + tojson(l) );
-assert.eq( l.getUpsertedId()._id , {a:1, b:2} , "G2 - " + tojson(l) );
+l = t.update({_id: {a: 1, b: 2}}, {$set: {a: 123}}, true);
+assert(l.getUpsertedId(), "G1 - " + tojson(l));
+assert.eq(l.getUpsertedId()._id, {a: 1, b: 2}, "G2 - " + tojson(l));
// test with no _id inserted
db.no_id.drop();
-db.createCollection("no_id", {autoIndexId:false});
-l = db.no_id.update({foo:1}, {$set:{a:1}}, true);
-assert( l.getUpsertedId() , "H1 - " + tojson(l) );
-assert( !l.hasWriteError(), "H1.5 No error expected - " + tojson(l) );
-assert.eq( 0, db.no_id.getIndexes().length, "H2" );
-assert.eq( 1, db.no_id.count(), "H3" );
-var newDoc = db.no_id.findOne();
+db.createCollection("no_id", {autoIndexId: false});
+l = db.no_id.update({foo: 1}, {$set: {a: 1}}, true);
+assert(l.getUpsertedId(), "H1 - " + tojson(l));
+assert(!l.hasWriteError(), "H1.5 No error expected - " + tojson(l));
+assert.eq(0, db.no_id.getIndexes().length, "H2");
+assert.eq(1, db.no_id.count(), "H3");
+var newDoc = db.no_id.findOne();
delete newDoc["_id"];
-assert.eq( { foo : 1, a : 1 }, newDoc, "H4" );
+assert.eq({foo: 1, a: 1}, newDoc, "H4");
diff --git a/jstests/core/useindexonobjgtlt.js b/jstests/core/useindexonobjgtlt.js
index 026b1802843..c790019af9e 100755..100644
--- a/jstests/core/useindexonobjgtlt.js
+++ b/jstests/core/useindexonobjgtlt.js
@@ -1,14 +1,10 @@
t = db.factories;
t.drop();
-t.insert( { name: "xyz", metro: { city: "New York", state: "NY" } } );
-t.ensureIndex( { metro : 1 } );
+t.insert({name: "xyz", metro: {city: "New York", state: "NY"}});
+t.ensureIndex({metro: 1});
-assert( db.factories.find().count() );
+assert(db.factories.find().count());
-assert.eq( 1, db.factories.find( { metro: { city: "New York", state: "NY" } } )
- .hint({metro: 1})
- .count() );
+assert.eq(1, db.factories.find({metro: {city: "New York", state: "NY"}}).hint({metro: 1}).count());
-assert.eq( 1, db.factories.find( { metro: { $gte : { city: "New York" } } } )
- .hint({metro: 1})
- .count() );
+assert.eq(1, db.factories.find({metro: {$gte: {city: "New York"}}}).hint({metro: 1}).count());
diff --git a/jstests/core/user_management_helpers.js b/jstests/core/user_management_helpers.js
index e06302deba5..4db373a1def 100644
--- a/jstests/core/user_management_helpers.js
+++ b/jstests/core/user_management_helpers.js
@@ -10,87 +10,98 @@ function assertHasRole(rolesArray, roleName, roleDB) {
assert(false, "role " + roleName + "@" + roleDB + " not found in array: " + tojson(rolesArray));
}
-
function runTest(db) {
- var db = db.getSiblingDB("user_management_helpers");
- db.dropDatabase();
- db.dropAllUsers();
-
- db.createUser({user: "spencer", pwd: "password", roles: ['readWrite']});
- db.createUser({user: "andy", pwd: "password", roles: ['readWrite']});
-
- // Test getUser
- var userObj = db.getUser('spencer');
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
-
- // Test getUsers
- var users = db.getUsers();
- assert.eq(2, users.length);
- assert(users[0].user == 'spencer' || users[1].user == 'spencer');
- assert(users[0].user == 'andy' || users[1].user == 'andy');
- assert.eq(1, users[0].roles.length);
- assert.eq(1, users[1].roles.length);
- assertHasRole(users[0].roles, "readWrite", db.getName());
- assertHasRole(users[1].roles, "readWrite", db.getName());
-
- // Granting roles to nonexistent user fails
- assert.throws(function() { db.grantRolesToUser("fakeUser", ['dbAdmin']); });
- // Granting non-existant role fails
- assert.throws(function() { db.grantRolesToUser("spencer", ['dbAdmin', 'fakeRole']); });
-
- userObj = db.getUser('spencer');
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
-
- // Granting a role you already have is no problem
- db.grantRolesToUser("spencer", ['readWrite', 'dbAdmin']);
- userObj = db.getUser('spencer');
- assert.eq(2, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
- assertHasRole(userObj.roles, "dbAdmin", db.getName());
-
- // Revoking roles the user doesn't have is fine
- db.revokeRolesFromUser("spencer", ['dbAdmin', 'read']);
- userObj = db.getUser('spencer');
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "readWrite", db.getName());
-
- // Update user
- db.updateUser("spencer", {customData: {hello: 'world'}, roles:['read']});
- userObj = db.getUser('spencer');
- assert.eq('world', userObj.customData.hello);
- assert.eq(1, userObj.roles.length);
- assertHasRole(userObj.roles, "read", db.getName());
-
- // Test dropUser
- db.dropUser('andy');
- assert.eq(null, db.getUser('andy'));
-
- // Test dropAllUsers
- db.dropAllUsers();
- assert.eq(0, db.getUsers().length);
-
- // Test password digestion
- assert.throws(function() {
- db.createUser({user:'user1', pwd:'x', roles:[], digestPassword: true});});
- assert.throws(function() {
- db.createUser({user:'user1', pwd:'x', roles:[], digestPassword: false});});
- assert.throws(function() {
- db.createUser({user:'user1', pwd:'x', roles:[], passwordDigestor: 'foo'});});
- db.createUser({user:'user1', pwd:'x', roles:[], passwordDigestor:"server"});
- db.createUser({user:'user2', pwd:'x', roles:[], passwordDigestor:"client"});
- assert(db.auth('user1', 'x'));
- assert(db.auth('user2', 'x'));
-
- assert.throws(function() { db.updateUser('user1', {pwd:'y', digestPassword: true});});
- assert.throws(function() { db.updateUser('user1', {pwd:'y', digestPassword: false});});
- assert.throws(function() { db.updateUser('user1', {pwd:'y', passwordDigestor: 'foo'});});
- db.updateUser('user1', {pwd:'y', passwordDigestor: 'server'});
- db.updateUser('user2', {pwd:'y', passwordDigestor: 'client'});
- assert(db.auth('user1', 'y'));
- assert(db.auth('user2', 'y'));
-
+ var db = db.getSiblingDB("user_management_helpers");
+ db.dropDatabase();
+ db.dropAllUsers();
+
+ db.createUser({user: "spencer", pwd: "password", roles: ['readWrite']});
+ db.createUser({user: "andy", pwd: "password", roles: ['readWrite']});
+
+ // Test getUser
+ var userObj = db.getUser('spencer');
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+
+ // Test getUsers
+ var users = db.getUsers();
+ assert.eq(2, users.length);
+ assert(users[0].user == 'spencer' || users[1].user == 'spencer');
+ assert(users[0].user == 'andy' || users[1].user == 'andy');
+ assert.eq(1, users[0].roles.length);
+ assert.eq(1, users[1].roles.length);
+ assertHasRole(users[0].roles, "readWrite", db.getName());
+ assertHasRole(users[1].roles, "readWrite", db.getName());
+
+ // Granting roles to nonexistent user fails
+ assert.throws(function() {
+ db.grantRolesToUser("fakeUser", ['dbAdmin']);
+ });
+ // Granting non-existant role fails
+ assert.throws(function() {
+ db.grantRolesToUser("spencer", ['dbAdmin', 'fakeRole']);
+ });
+
+ userObj = db.getUser('spencer');
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+
+ // Granting a role you already have is no problem
+ db.grantRolesToUser("spencer", ['readWrite', 'dbAdmin']);
+ userObj = db.getUser('spencer');
+ assert.eq(2, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+ assertHasRole(userObj.roles, "dbAdmin", db.getName());
+
+ // Revoking roles the user doesn't have is fine
+ db.revokeRolesFromUser("spencer", ['dbAdmin', 'read']);
+ userObj = db.getUser('spencer');
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "readWrite", db.getName());
+
+ // Update user
+ db.updateUser("spencer", {customData: {hello: 'world'}, roles: ['read']});
+ userObj = db.getUser('spencer');
+ assert.eq('world', userObj.customData.hello);
+ assert.eq(1, userObj.roles.length);
+ assertHasRole(userObj.roles, "read", db.getName());
+
+ // Test dropUser
+ db.dropUser('andy');
+ assert.eq(null, db.getUser('andy'));
+
+ // Test dropAllUsers
+ db.dropAllUsers();
+ assert.eq(0, db.getUsers().length);
+
+ // Test password digestion
+ assert.throws(function() {
+ db.createUser({user: 'user1', pwd: 'x', roles: [], digestPassword: true});
+ });
+ assert.throws(function() {
+ db.createUser({user: 'user1', pwd: 'x', roles: [], digestPassword: false});
+ });
+ assert.throws(function() {
+ db.createUser({user: 'user1', pwd: 'x', roles: [], passwordDigestor: 'foo'});
+ });
+ db.createUser({user: 'user1', pwd: 'x', roles: [], passwordDigestor: "server"});
+ db.createUser({user: 'user2', pwd: 'x', roles: [], passwordDigestor: "client"});
+ assert(db.auth('user1', 'x'));
+ assert(db.auth('user2', 'x'));
+
+ assert.throws(function() {
+ db.updateUser('user1', {pwd: 'y', digestPassword: true});
+ });
+ assert.throws(function() {
+ db.updateUser('user1', {pwd: 'y', digestPassword: false});
+ });
+ assert.throws(function() {
+ db.updateUser('user1', {pwd: 'y', passwordDigestor: 'foo'});
+ });
+ db.updateUser('user1', {pwd: 'y', passwordDigestor: 'server'});
+ db.updateUser('user2', {pwd: 'y', passwordDigestor: 'client'});
+ assert(db.auth('user1', 'y'));
+ assert(db.auth('user2', 'y'));
}
try {
diff --git a/jstests/core/validate_cmd_ns.js b/jstests/core/validate_cmd_ns.js
index 4caf038a129..93f4a798a61 100644
--- a/jstests/core/validate_cmd_ns.js
+++ b/jstests/core/validate_cmd_ns.js
@@ -5,21 +5,20 @@
*/
// Note: _exec gives you the raw response from the server.
-var res = db.$cmd.find({ whatsmyuri: 1 })._exec().next();
+var res = db.$cmd.find({whatsmyuri: 1})._exec().next();
assert.commandFailed(res);
assert(res.errmsg.indexOf('bad numberToReturn') > -1);
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(0)._exec().next();
+res = db.$cmd.find({whatsmyuri: 1}).limit(0)._exec().next();
assert.commandFailed(res);
assert(res.errmsg.indexOf('bad numberToReturn') > -1);
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(-2)._exec().next();
+res = db.$cmd.find({whatsmyuri: 1}).limit(-2)._exec().next();
assert.commandFailed(res);
assert(res.errmsg.indexOf('bad numberToReturn') > -1);
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(1).next();
+res = db.$cmd.find({whatsmyuri: 1}).limit(1).next();
assert.commandWorked(res);
-res = db.$cmd.find({ whatsmyuri: 1 }).limit(-1).next();
+res = db.$cmd.find({whatsmyuri: 1}).limit(-1).next();
assert.commandWorked(res);
-
diff --git a/jstests/core/validate_pseudocommand_ns.js b/jstests/core/validate_pseudocommand_ns.js
index bee314d421d..8faf4f802d8 100644
--- a/jstests/core/validate_pseudocommand_ns.js
+++ b/jstests/core/validate_pseudocommand_ns.js
@@ -4,10 +4,20 @@
if (!db.getMongo().useReadCommands()) {
var inprog = db.$cmd.sys.inprog;
// nToReturn must be 1 or -1.
- assert.doesNotThrow(function() { inprog.find().limit(-1).next(); });
- assert.doesNotThrow(function() { inprog.find().limit(1).next(); });
- assert.throws(function() { inprog.find().limit(0).next(); });
- assert.throws(function() { inprog.find().limit(-2).next(); });
- assert.throws(function() { inprog.find().limit(99).next(); });
+ assert.doesNotThrow(function() {
+ inprog.find().limit(-1).next();
+ });
+ assert.doesNotThrow(function() {
+ inprog.find().limit(1).next();
+ });
+ assert.throws(function() {
+ inprog.find().limit(0).next();
+ });
+ assert.throws(function() {
+ inprog.find().limit(-2).next();
+ });
+ assert.throws(function() {
+ inprog.find().limit(99).next();
+ });
}
})();
diff --git a/jstests/core/validate_user_documents.js b/jstests/core/validate_user_documents.js
index 825e1e7de11..9c12e6075a7 100644
--- a/jstests/core/validate_user_documents.js
+++ b/jstests/core/validate_user_documents.js
@@ -1,11 +1,10 @@
// Ensure that inserts and updates of the system.users collection validate the schema of inserted
// documents.
-mydb = db.getSisterDB( "validate_user_documents" );
+mydb = db.getSisterDB("validate_user_documents");
function assertGLEOK(status) {
- assert(status.ok && status.err === null,
- "Expected OK status object; found " + tojson(status));
+ assert(status.ok && status.err === null, "Expected OK status object; found " + tojson(status));
}
function assertGLENotOK(status) {
@@ -21,34 +20,26 @@ mydb.dropAllUsers();
//
// V0 user document document; insert should fail.
-assert.commandFailed(mydb.runCommand({ createUser:1,
- user: "spencer",
- pwd: "password",
- readOnly: true }));
+assert.commandFailed(
+ mydb.runCommand({createUser: 1, user: "spencer", pwd: "password", readOnly: true}));
// V1 user document; insert should fail.
-assert.commandFailed(mydb.runCommand({ createUser:1,
- user: "spencer",
- userSource: "test2",
- roles: ["dbAdmin"] }));
+assert.commandFailed(
+ mydb.runCommand({createUser: 1, user: "spencer", userSource: "test2", roles: ["dbAdmin"]}));
// Valid V2 user document; insert should succeed.
-assert.commandWorked(mydb.runCommand({ createUser: "spencer",
- pwd: "password",
- roles: ["dbAdmin"] }));
+assert.commandWorked(mydb.runCommand({createUser: "spencer", pwd: "password", roles: ["dbAdmin"]}));
// Valid V2 user document; insert should succeed.
-assert.commandWorked(mydb.runCommand({ createUser: "andy",
- pwd: "password",
- roles: [{role: "dbAdmin",
- db: "validate_user_documents",
- hasRole: true,
- canDelegate: false}] }));
+assert.commandWorked(mydb.runCommand({
+ createUser: "andy",
+ pwd: "password",
+ roles:
+ [{role: "dbAdmin", db: "validate_user_documents", hasRole: true, canDelegate: false}]
+}));
// Non-existent role; insert should fail
-assert.commandFailed(mydb.runCommand({ createUser: "bob",
- pwd: "password",
- roles: ["fakeRole123"] }));
+assert.commandFailed(mydb.runCommand({createUser: "bob", pwd: "password", roles: ["fakeRole123"]}));
//
// Tests of the update path
diff --git a/jstests/core/verify_update_mods.js b/jstests/core/verify_update_mods.js
index 7806b7804d3..efd0beb80cc 100644
--- a/jstests/core/verify_update_mods.js
+++ b/jstests/core/verify_update_mods.js
@@ -3,81 +3,81 @@ var res;
t = db.update_mods;
t.drop();
-t.save({_id:1});
-res = t.update({}, {$set:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$set: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$unset:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$unset: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$inc:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$inc: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$mul:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$mul: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$push:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$push: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$pushAll:{a:[1]}});
+t.save({_id: 1});
+res = t.update({}, {$pushAll: {a: [1]}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$addToSet:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$addToSet: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$pull:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$pull: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$pop:{a:true}});
+t.save({_id: 1});
+res = t.update({}, {$pop: {a: true}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$rename:{a:"b"}});
+t.save({_id: 1});
+res = t.update({}, {$rename: {a: "b"}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$bit:{a:{and:NumberLong(1)}}});
+t.save({_id: 1});
+res = t.update({}, {$bit: {a: {and: NumberLong(1)}}});
assert.writeOK(res);
t.remove({});
// SERVER-3223 test $bit can do an upsert
-t.update({_id:1}, {$bit:{a:{and:NumberLong(3)}}}, true);
-assert.eq(t.findOne({_id:1}).a, NumberLong(0), "$bit upsert with and");
-t.update({_id:2}, {$bit:{b:{or:NumberLong(3)}}}, true);
-assert.eq(t.findOne({_id:2}).b, NumberLong(3), "$bit upsert with or (long)");
-t.update({_id:3}, {$bit:{"c.d":{or:NumberInt(3)}}}, true);
-assert.eq(t.findOne({_id:3}).c.d, NumberInt(3), "$bit upsert with or (int)");
+t.update({_id: 1}, {$bit: {a: {and: NumberLong(3)}}}, true);
+assert.eq(t.findOne({_id: 1}).a, NumberLong(0), "$bit upsert with and");
+t.update({_id: 2}, {$bit: {b: {or: NumberLong(3)}}}, true);
+assert.eq(t.findOne({_id: 2}).b, NumberLong(3), "$bit upsert with or (long)");
+t.update({_id: 3}, {$bit: {"c.d": {or: NumberInt(3)}}}, true);
+assert.eq(t.findOne({_id: 3}).c.d, NumberInt(3), "$bit upsert with or (int)");
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$currentDate:{a:true}});
+t.save({_id: 1});
+res = t.update({}, {$currentDate: {a: true}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$max:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$max: {a: 1}});
assert.writeOK(res);
t.remove({});
-t.save({_id:1});
-res = t.update({}, {$min:{a:1}});
+t.save({_id: 1});
+res = t.update({}, {$min: {a: 1}});
assert.writeOK(res);
t.remove({});
diff --git a/jstests/core/where1.js b/jstests/core/where1.js
index 6e3d693b996..85466901016 100644
--- a/jstests/core/where1.js
+++ b/jstests/core/where1.js
@@ -1,39 +1,49 @@
-t = db.getCollection( "where1" );
+t = db.getCollection("where1");
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
-assert.eq( 1 , t.find( function(){ return this.a == 2; } ).length() , "A" );
+assert.eq(1,
+ t.find(function() {
+ return this.a == 2;
+ }).length(),
+ "A");
-assert.eq( 1 , t.find( { $where : "return this.a == 2" } ).toArray().length , "B" );
-assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "C" );
+assert.eq(1, t.find({$where: "return this.a == 2"}).toArray().length, "B");
+assert.eq(1, t.find({$where: "this.a == 2"}).toArray().length, "C");
-assert.eq( 1 , t.find( "this.a == 2" ).toArray().length , "D" );
+assert.eq(1, t.find("this.a == 2").toArray().length, "D");
// SERVER-12117
// positional $ projection should fail on a $where query
-assert.throws( function() { t.find( { $where : "return this.a;" }, { 'a.$' : 1 } ).itcount(); } );
+assert.throws(function() {
+ t.find({$where: "return this.a;"}, {'a.$': 1}).itcount();
+});
// SERVER-12439: $where must be top-level
-assert.throws( function() { t.find( { a: 1, b: { $where : "this.a;" } } ).itcount(); } );
-assert.throws( function() { t.find( { a: { $where : "this.a;" } } ).itcount(); } );
-assert.throws( function() {
- t.find( { a: { $elemMatch : { $where : "this.a;" } } } ).itcount();
-} );
-assert.throws( function() {
- t.find( { a: 3, "b.c": { $where : "this.a;" } } ).itcount();
-} );
+assert.throws(function() {
+ t.find({a: 1, b: {$where: "this.a;"}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: {$where: "this.a;"}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: {$elemMatch: {$where: "this.a;"}}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: 3, "b.c": {$where: "this.a;"}}).itcount();
+});
// SERVER-13503
-assert.throws( function() {
- t.find( { a: { $elemMatch : { $where : "this.a;", b : 1 } } } ).itcount();
-} );
-assert.throws( function() {
- t.find( { a: { $elemMatch : { b : 1, $where : "this.a;" } } } ).itcount();
-} );
-assert.throws( function() {
- t.find( { a: { $elemMatch : { $and : [ { b : 1 }, { $where : "this.a;" } ] } } } ).itcount();
-} );
+assert.throws(function() {
+ t.find({a: {$elemMatch: {$where: "this.a;", b: 1}}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: {$elemMatch: {b: 1, $where: "this.a;"}}}).itcount();
+});
+assert.throws(function() {
+ t.find({a: {$elemMatch: {$and: [{b: 1}, {$where: "this.a;"}]}}}).itcount();
+});
diff --git a/jstests/core/where2.js b/jstests/core/where2.js
index 9262b3076b3..8b4314beb88 100644
--- a/jstests/core/where2.js
+++ b/jstests/core/where2.js
@@ -1,10 +1,10 @@
-t = db.getCollection( "where2" );
+t = db.getCollection("where2");
t.drop();
-t.save( { a : 1 } );
-t.save( { a : 2 } );
-t.save( { a : 3 } );
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
-assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "A" );
-assert.eq( 1 , t.find( { $where : "\nthis.a == 2" } ).toArray().length , "B" );
+assert.eq(1, t.find({$where: "this.a == 2"}).toArray().length, "A");
+assert.eq(1, t.find({$where: "\nthis.a == 2"}).toArray().length, "B");
diff --git a/jstests/core/where3.js b/jstests/core/where3.js
index 2eb53f82370..633276489a5 100644
--- a/jstests/core/where3.js
+++ b/jstests/core/where3.js
@@ -2,12 +2,16 @@
t = db.where3;
t.drop();
-t.save( { returned_date : 5 } );
-t.save( { returned_date : 6 } );
+t.save({returned_date: 5});
+t.save({returned_date: 6});
-assert.eq( 1, t.find(function(){ return this.returned_date == 5; }).count(), "A" );
-assert.eq( 1, t.find({ $where : "return this.returned_date == 5;" }).count(), "B" );
-assert.eq( 1, t.find({ $where : "this.returned_date == 5;" }).count(), "C" );
-assert.eq( 1, t.find({ $where : "(this.returned_date == 5);" }).count(), "D" );
-assert.eq( 1, t.find({ $where : "((this.returned_date == 5) && (5 == 5));"}).count(), "E" );
-assert.eq( 1, t.find({ $where : "x=this.returned_date;x == 5;" }).count(), "F" );
+assert.eq(1,
+ t.find(function() {
+ return this.returned_date == 5;
+ }).count(),
+ "A");
+assert.eq(1, t.find({$where: "return this.returned_date == 5;"}).count(), "B");
+assert.eq(1, t.find({$where: "this.returned_date == 5;"}).count(), "C");
+assert.eq(1, t.find({$where: "(this.returned_date == 5);"}).count(), "D");
+assert.eq(1, t.find({$where: "((this.returned_date == 5) && (5 == 5));"}).count(), "E");
+assert.eq(1, t.find({$where: "x=this.returned_date;x == 5;"}).count(), "F");
diff --git a/jstests/core/where4.js b/jstests/core/where4.js
index 685665c982a..28d621374c3 100644
--- a/jstests/core/where4.js
+++ b/jstests/core/where4.js
@@ -1,27 +1,37 @@
db.where4.drop();
-db.system.js.insert( { _id : "w4" , value : "5" } );
+db.system.js.insert({_id: "w4", value: "5"});
-db.where4.insert( { x : 1 , y : 1 } );
-db.where4.insert( { x : 2 , y : 1 } );
+db.where4.insert({x: 1, y: 1});
+db.where4.insert({x: 2, y: 1});
-db.where4.update( { $where : function() { return this.x == 1; } } ,
- { $inc : { y : 1 } } , false , true );
+db.where4.update(
+ {
+ $where: function() {
+ return this.x == 1;
+ }
+ },
+ {$inc: {y: 1}},
+ false,
+ true);
-
-assert.eq( 2 , db.where4.findOne( { x : 1 } ).y );
-assert.eq( 1 , db.where4.findOne( { x : 2 } ).y );
+assert.eq(2, db.where4.findOne({x: 1}).y);
+assert.eq(1, db.where4.findOne({x: 2}).y);
// Test that where queries work with stored javascript
-db.system.js.save( { _id : "where4_addOne" , value : function(x) { return x + 1; } } );
+db.system.js.save({
+ _id: "where4_addOne",
+ value: function(x) {
+ return x + 1;
+ }
+});
-db.where4.update( { $where : "where4_addOne(this.x) == 2" } ,
- { $inc : { y : 1 } } , false , true );
+db.where4.update({$where: "where4_addOne(this.x) == 2"}, {$inc: {y: 1}}, false, true);
-assert.eq( 3 , db.where4.findOne( { x : 1 } ).y );
-assert.eq( 1 , db.where4.findOne( { x : 2 } ).y );
+assert.eq(3, db.where4.findOne({x: 1}).y);
+assert.eq(1, db.where4.findOne({x: 2}).y);
-db.system.js.remove( { _id : "where4_equalsOne" } );
+db.system.js.remove({_id: "where4_equalsOne"});
-db.system.js.remove( { _id : "w4" } );
+db.system.js.remove({_id: "w4"});
diff --git a/jstests/core/write_result.js b/jstests/core/write_result.js
index 8cf505f688e..86486089c68 100644
--- a/jstests/core/write_result.js
+++ b/jstests/core/write_result.js
@@ -12,7 +12,7 @@ var result = null;
//
// Basic insert
coll.remove({});
-printjson( result = coll.insert({ foo : "bar" }) );
+printjson(result = coll.insert({foo: "bar"}));
assert.eq(result.nInserted, 1);
assert.eq(result.nUpserted, 0);
assert.eq(result.nMatched, 0);
@@ -28,7 +28,7 @@ assert.eq(coll.count(), 1);
// Basic upsert (using save)
coll.remove({});
var id = new ObjectId();
-printjson( result = coll.save({ _id : id, foo : "bar" }) );
+printjson(result = coll.save({_id: id, foo: "bar"}));
assert.eq(result.nInserted, 0);
assert.eq(result.nUpserted, 1);
assert.eq(result.nMatched, 0);
@@ -43,8 +43,8 @@ assert.eq(coll.count(), 1);
//
// Basic update
coll.remove({});
-coll.insert({ foo : "bar" });
-printjson( result = coll.update({ foo : "bar" }, { $set : { foo : "baz" } }) );
+coll.insert({foo: "bar"});
+printjson(result = coll.update({foo: "bar"}, {$set: {foo: "baz"}}));
assert.eq(result.nInserted, 0);
assert.eq(result.nUpserted, 0);
assert.eq(result.nMatched, 1);
@@ -59,11 +59,9 @@ assert.eq(coll.count(), 1);
//
// Basic multi-update
coll.remove({});
-coll.insert({ foo : "bar" });
-coll.insert({ foo : "bar", set : ['value'] });
-printjson( result = coll.update({ foo : "bar" },
- { $addToSet : { set : 'value' } },
- { multi : true }) );
+coll.insert({foo: "bar"});
+coll.insert({foo: "bar", set: ['value']});
+printjson(result = coll.update({foo: "bar"}, {$addToSet: {set: 'value'}}, {multi: true}));
assert.eq(result.nInserted, 0);
assert.eq(result.nUpserted, 0);
assert.eq(result.nMatched, 2);
@@ -78,8 +76,8 @@ assert.eq(coll.count(), 2);
//
// Basic remove
coll.remove({});
-coll.insert({ foo : "bar" });
-printjson( result = coll.remove({}) );
+coll.insert({foo: "bar"});
+printjson(result = coll.remove({}));
assert.eq(result.nInserted, 0);
assert.eq(result.nUpserted, 0);
assert.eq(result.nMatched, 0);
@@ -95,8 +93,8 @@ assert.eq(coll.count(), 0);
// Insert with error
coll.remove({});
var id = new ObjectId();
-coll.insert({ _id : id, foo : "bar" });
-printjson( result = coll.insert({ _id : id, foo : "baz" }) );
+coll.insert({_id: id, foo: "bar"});
+printjson(result = coll.insert({_id: id, foo: "baz"}));
assert.eq(result.nInserted, 0);
assert(result.getWriteError());
assert(result.getWriteError().errmsg);
@@ -106,8 +104,8 @@ assert.eq(coll.count(), 1);
//
// Update with error
coll.remove({});
-coll.insert({ foo : "bar" });
-printjson( result = coll.update({ foo : "bar" }, { $invalid : "expr" }) );
+coll.insert({foo: "bar"});
+printjson(result = coll.update({foo: "bar"}, {$invalid: "expr"}));
assert.eq(result.nUpserted, 0);
assert.eq(result.nMatched, 0);
if (coll.getMongo().writeMode() == "commands")
@@ -121,13 +119,12 @@ assert.eq(coll.count(), 1);
// Multi-update with error
coll.remove({});
var id = new ObjectId();
-for (var i = 0; i < 10; ++i) coll.insert({ value : NumberInt(i) });
-coll.insert({ value : "not a number" });
+for (var i = 0; i < 10; ++i)
+ coll.insert({value: NumberInt(i)});
+coll.insert({value: "not a number"});
// $bit operator fails when the field is not integer
// Note that multi-updates do not currently report partial stats if they fail
-printjson( result = coll.update({},
- { $bit : { value : { and : NumberInt(0) } } },
- { multi : true }) );
+printjson(result = coll.update({}, {$bit: {value: {and: NumberInt(0)}}}, {multi: true}));
assert.eq(result.nUpserted, 0);
assert.eq(result.nMatched, 0);
if (coll.getMongo().writeMode() == "commands")
@@ -140,7 +137,7 @@ assert.eq(coll.count(), 11);
//
// Bulk insert
coll.remove({});
-printjson( result = coll.insert([{ foo : "bar" }, { foo : "baz" }]) );
+printjson(result = coll.insert([{foo: "bar"}, {foo: "baz"}]));
assert.eq(result.nInserted, 2);
assert(!result.hasWriteErrors());
assert(!result.hasWriteConcernError());
@@ -151,8 +148,7 @@ assert.eq(coll.count(), 2);
coll.remove({});
var id = new ObjectId();
// Second insert fails with duplicate _id
-printjson( result = coll.insert([{ _id : id, foo : "bar" },
- { _id : id, foo : "baz" }]) );
+printjson(result = coll.insert([{_id: id, foo: "bar"}, {_id: id, foo: "baz"}]));
assert.eq(result.nInserted, 1);
assert(result.hasWriteErrors());
assert(!result.hasWriteConcernError());
@@ -162,8 +158,8 @@ assert.eq(coll.count(), 1);
// Custom write concern
// (More detailed write concern tests require custom/replicated servers)
coll.remove({});
-coll.setWriteConcern({ w : "majority" });
-printjson( result = coll.insert({ foo : "bar" }) );
+coll.setWriteConcern({w: "majority"});
+printjson(result = coll.insert({foo: "bar"}));
assert.eq(result.nInserted, 1);
assert(!result.getWriteError());
assert(!result.getWriteConcernError());
@@ -174,13 +170,10 @@ coll.unsetWriteConcern();
// Write concern error
// NOTE: In a replica set write concern is checked after write
coll.remove({});
-var wRes = assert.writeError( coll.insert({ foo : "bar" }, { writeConcern : { w : "invalid" } }) );
+var wRes = assert.writeError(coll.insert({foo: "bar"}, {writeConcern: {w: "invalid"}}));
var res = assert.commandWorked(db.isMaster());
var replSet = res.hasOwnProperty("setName");
if (!replSet && coll.getMongo().writeMode() == "commands")
assert.eq(coll.count(), 0, "not-replset || command mode");
-else // compatibility,
+else // compatibility,
assert.eq(coll.count(), 1, "replset || non-command mode");
-
-
-