summaryrefslogtreecommitdiff
path: root/jstests/aggregation
diff options
context:
space:
mode:
authorclang-format-7.0.1 <adam.martin@10gen.com>2019-07-26 18:20:35 -0400
committerADAM David Alan Martin <adam.martin@10gen.com>2019-07-27 11:02:23 -0400
commit134a4083953270e8a11430395357fb70a29047ad (patch)
treedd428e1230e31d92b20b393dfdc17ffe7fa79cb6 /jstests/aggregation
parent1e46b5049003f427047e723ea5fab15b5a9253ca (diff)
downloadmongo-134a4083953270e8a11430395357fb70a29047ad.tar.gz
SERVER-41772 Apply clang-format 7.0.1 to the codebase
Diffstat (limited to 'jstests/aggregation')
-rw-r--r--jstests/aggregation/bugs/cond.js137
-rw-r--r--jstests/aggregation/bugs/cursor_timeout.js135
-rw-r--r--jstests/aggregation/bugs/explain_options_helper.js30
-rw-r--r--jstests/aggregation/bugs/firstlast.js225
-rw-r--r--jstests/aggregation/bugs/groupMissing.js94
-rw-r--r--jstests/aggregation/bugs/lookup_unwind_getmore.js68
-rw-r--r--jstests/aggregation/bugs/lookup_unwind_killcursor.js62
-rw-r--r--jstests/aggregation/bugs/match.js321
-rw-r--r--jstests/aggregation/bugs/match_swap_limit.js22
-rw-r--r--jstests/aggregation/bugs/reverseArray.js38
-rw-r--r--jstests/aggregation/bugs/server10176.js104
-rw-r--r--jstests/aggregation/bugs/server11118.js299
-rw-r--r--jstests/aggregation/bugs/server11675.js441
-rw-r--r--jstests/aggregation/bugs/server12015.js124
-rw-r--r--jstests/aggregation/bugs/server14421.js74
-rw-r--r--jstests/aggregation/bugs/server14670.js24
-rw-r--r--jstests/aggregation/bugs/server14691.js74
-rw-r--r--jstests/aggregation/bugs/server14872.js48
-rw-r--r--jstests/aggregation/bugs/server17224.js32
-rw-r--r--jstests/aggregation/bugs/server17943.js134
-rw-r--r--jstests/aggregation/bugs/server18198.js114
-rw-r--r--jstests/aggregation/bugs/server18222.js72
-rw-r--r--jstests/aggregation/bugs/server18427.js299
-rw-r--r--jstests/aggregation/bugs/server20163.js334
-rw-r--r--jstests/aggregation/bugs/server20168.js59
-rw-r--r--jstests/aggregation/bugs/server20169.js118
-rw-r--r--jstests/aggregation/bugs/server21632.js145
-rw-r--r--jstests/aggregation/bugs/server22093.js52
-rw-r--r--jstests/aggregation/bugs/server22580.js79
-rw-r--r--jstests/aggregation/bugs/server25590.js24
-rw-r--r--jstests/aggregation/bugs/server26462.js40
-rw-r--r--jstests/aggregation/bugs/server37750.js120
-rw-r--r--jstests/aggregation/bugs/server4588.js99
-rw-r--r--jstests/aggregation/bugs/server4589.js126
-rw-r--r--jstests/aggregation/bugs/server4638.js2
-rw-r--r--jstests/aggregation/bugs/server5012.js14
-rw-r--r--jstests/aggregation/bugs/server533.js56
-rw-r--r--jstests/aggregation/bugs/server6074.js148
-rw-r--r--jstests/aggregation/bugs/server6125.js6
-rw-r--r--jstests/aggregation/bugs/server6127.js28
-rw-r--r--jstests/aggregation/bugs/server6147.js70
-rw-r--r--jstests/aggregation/bugs/server6179.js98
-rw-r--r--jstests/aggregation/bugs/server6185.js20
-rw-r--r--jstests/aggregation/bugs/server6530.js48
-rw-r--r--jstests/aggregation/bugs/server6779.js26
-rw-r--r--jstests/aggregation/bugs/server7695_isodates.js469
-rw-r--r--jstests/aggregation/bugs/server7781.js232
-rw-r--r--jstests/aggregation/bugs/server8141.js72
-rw-r--r--jstests/aggregation/bugs/server8164.js280
-rw-r--r--jstests/aggregation/bugs/server8568.js72
-rw-r--r--jstests/aggregation/bugs/server8581.js64
-rw-r--r--jstests/aggregation/bugs/server9444.js90
-rw-r--r--jstests/aggregation/bugs/server9625.js116
-rw-r--r--jstests/aggregation/bugs/skip_limit_overflow.js193
-rw-r--r--jstests/aggregation/bugs/sort_arrays.js20
-rw-r--r--jstests/aggregation/bugs/substr.js4
-rw-r--r--jstests/aggregation/explain.js33
-rw-r--r--jstests/aggregation/explain_limit.js123
-rw-r--r--jstests/aggregation/explain_writing_aggs.js152
-rw-r--r--jstests/aggregation/expressions/arrayToObject.js128
-rw-r--r--jstests/aggregation/expressions/collation_expressions.js378
-rw-r--r--jstests/aggregation/expressions/convert.js621
-rw-r--r--jstests/aggregation/expressions/date_expressions_with_timezones.js150
-rw-r--r--jstests/aggregation/expressions/date_from_parts.js1794
-rw-r--r--jstests/aggregation/expressions/date_from_string.js1490
-rw-r--r--jstests/aggregation/expressions/date_from_string_on_error.js274
-rw-r--r--jstests/aggregation/expressions/date_from_string_on_null.js100
-rw-r--r--jstests/aggregation/expressions/date_to_parts.js571
-rw-r--r--jstests/aggregation/expressions/date_to_string.js494
-rw-r--r--jstests/aggregation/expressions/date_to_string_on_null.js107
-rw-r--r--jstests/aggregation/expressions/expression_mod.js160
-rw-r--r--jstests/aggregation/expressions/expression_trigonometric.js499
-rw-r--r--jstests/aggregation/expressions/floor_ceil.js62
-rw-r--r--jstests/aggregation/expressions/in.js389
-rw-r--r--jstests/aggregation/expressions/indexof_array.js82
-rw-r--r--jstests/aggregation/expressions/indexof_bytes.js202
-rw-r--r--jstests/aggregation/expressions/indexof_codepoints.js174
-rw-r--r--jstests/aggregation/expressions/let.js209
-rw-r--r--jstests/aggregation/expressions/merge_objects.js291
-rw-r--r--jstests/aggregation/expressions/objectToArray.js179
-rw-r--r--jstests/aggregation/expressions/object_ids_for_date_expressions.js157
-rw-r--r--jstests/aggregation/expressions/reduce.js103
-rw-r--r--jstests/aggregation/expressions/regex.js917
-rw-r--r--jstests/aggregation/expressions/regex_limits.js215
-rw-r--r--jstests/aggregation/expressions/round_trunc.js193
-rw-r--r--jstests/aggregation/expressions/size.js27
-rw-r--r--jstests/aggregation/expressions/split.js118
-rw-r--r--jstests/aggregation/expressions/switch.js262
-rw-r--r--jstests/aggregation/expressions/switch_errors.js96
-rw-r--r--jstests/aggregation/expressions/trim.js157
-rw-r--r--jstests/aggregation/extras/utils.js2
-rw-r--r--jstests/aggregation/group_conversion_to_distinct_scan.js1288
-rw-r--r--jstests/aggregation/illegal_reference_in_match.js48
-rw-r--r--jstests/aggregation/match_swapping_renamed_fields.js320
-rw-r--r--jstests/aggregation/mongos_merge.js801
-rw-r--r--jstests/aggregation/mongos_slaveok.js52
-rw-r--r--jstests/aggregation/optimize_away_pipeline.js550
-rw-r--r--jstests/aggregation/pipeline_pass_through_from_mongos.js258
-rw-r--r--jstests/aggregation/shard_targeting.js707
-rw-r--r--jstests/aggregation/sharded_agg_cleanup_on_error.js263
-rw-r--r--jstests/aggregation/single_stage_alias_error.js67
-rw-r--r--jstests/aggregation/sources/addFields/use_cases.js93
-rw-r--r--jstests/aggregation/sources/addFields/weather.js157
-rw-r--r--jstests/aggregation/sources/bucket/collation_bucket.js162
-rw-r--r--jstests/aggregation/sources/bucketauto/collation_bucketauto.js103
-rw-r--r--jstests/aggregation/sources/collStats/count.js112
-rw-r--r--jstests/aggregation/sources/collStats/query_exec_stats.js132
-rw-r--r--jstests/aggregation/sources/collStats/shard_host_info.js96
-rw-r--r--jstests/aggregation/sources/facet/inner_graphlookup.js60
-rw-r--r--jstests/aggregation/sources/facet/inner_lookup.js51
-rw-r--r--jstests/aggregation/sources/facet/use_cases.js310
-rw-r--r--jstests/aggregation/sources/geonear/collation_geonear.js138
-rw-r--r--jstests/aggregation/sources/geonear/distancefield_and_includelocs.js319
-rw-r--r--jstests/aggregation/sources/geonear/mindistance_and_maxdistance.js191
-rw-r--r--jstests/aggregation/sources/geonear/requires_geo_index.js38
-rw-r--r--jstests/aggregation/sources/graphLookup/airports.js78
-rw-r--r--jstests/aggregation/sources/graphLookup/basic.js133
-rw-r--r--jstests/aggregation/sources/graphLookup/collation_graphlookup.js108
-rw-r--r--jstests/aggregation/sources/graphLookup/error.js253
-rw-r--r--jstests/aggregation/sources/graphLookup/filter.js58
-rw-r--r--jstests/aggregation/sources/graphLookup/nested_objects.js70
-rw-r--r--jstests/aggregation/sources/graphLookup/socialite.js46
-rw-r--r--jstests/aggregation/sources/graphLookup/variables.js31
-rw-r--r--jstests/aggregation/sources/group/collation_group.js128
-rw-r--r--jstests/aggregation/sources/group/group_by_variable.js30
-rw-r--r--jstests/aggregation/sources/group/numeric_grouping.js28
-rw-r--r--jstests/aggregation/sources/group/text_score_grouping.js35
-rw-r--r--jstests/aggregation/sources/lookup/lookup_absorb_match.js46
-rw-r--r--jstests/aggregation/sources/lookup/lookup_contains_text.js91
-rw-r--r--jstests/aggregation/sources/lookup/lookup_non_correlated.js92
-rw-r--r--jstests/aggregation/sources/lookup/lookup_non_correlated_prefix.js122
-rw-r--r--jstests/aggregation/sources/lookup/lookup_sort_limit.js35
-rw-r--r--jstests/aggregation/sources/lookup/lookup_subpipeline.js615
-rw-r--r--jstests/aggregation/sources/lookup/lookup_subpipeline_geonear.js34
-rw-r--r--jstests/aggregation/sources/lookup/profile_lookup.js50
-rw-r--r--jstests/aggregation/sources/match/collation_match.js85
-rw-r--r--jstests/aggregation/sources/match/expr_match.js87
-rw-r--r--jstests/aggregation/sources/match/text_search_requires_index.js34
-rw-r--r--jstests/aggregation/sources/merge/all_modes.js556
-rw-r--r--jstests/aggregation/sources/merge/batch_writes.js112
-rw-r--r--jstests/aggregation/sources/merge/bypass_doc_validation.js366
-rw-r--r--jstests/aggregation/sources/merge/disallowed_in_lookup.js76
-rw-r--r--jstests/aggregation/sources/merge/exchange_explain.js308
-rw-r--r--jstests/aggregation/sources/merge/merge_to_referenced_collection.js181
-rw-r--r--jstests/aggregation/sources/merge/merge_to_same_collection.js22
-rw-r--r--jstests/aggregation/sources/merge/mode_fail_insert.js284
-rw-r--r--jstests/aggregation/sources/merge/mode_keep_existing_insert.js723
-rw-r--r--jstests/aggregation/sources/merge/mode_merge_discard.js454
-rw-r--r--jstests/aggregation/sources/merge/mode_merge_fail.js191
-rw-r--r--jstests/aggregation/sources/merge/mode_merge_insert.js711
-rw-r--r--jstests/aggregation/sources/merge/mode_pipeline_discard.js482
-rw-r--r--jstests/aggregation/sources/merge/mode_pipeline_fail.js158
-rw-r--r--jstests/aggregation/sources/merge/mode_pipeline_insert.js1250
-rw-r--r--jstests/aggregation/sources/merge/mode_replace_discard.js387
-rw-r--r--jstests/aggregation/sources/merge/mode_replace_fail.js187
-rw-r--r--jstests/aggregation/sources/merge/mode_replace_insert.js393
-rw-r--r--jstests/aggregation/sources/merge/on_fields_validation.js258
-rw-r--r--jstests/aggregation/sources/merge/requires_unique_index.js692
-rw-r--r--jstests/aggregation/sources/merge/use_cases.js167
-rw-r--r--jstests/aggregation/sources/out/out_in_lookup_not_allowed.js66
-rw-r--r--jstests/aggregation/sources/out/replace_collection.js102
-rw-r--r--jstests/aggregation/sources/out/required_last_position.js19
-rw-r--r--jstests/aggregation/sources/project/remove_redundant_projects.js285
-rw-r--r--jstests/aggregation/sources/redact/collation_redact.js67
-rw-r--r--jstests/aggregation/sources/replaceRoot/address.js163
-rw-r--r--jstests/aggregation/sources/replaceRoot/use_cases.js33
-rw-r--r--jstests/aggregation/sources/sort/collation_sort.js158
-rw-r--r--jstests/aggregation/sources/sort/collation_sort_japanese.js271
-rw-r--r--jstests/aggregation/sources/sort/explain_sort.js94
-rw-r--r--jstests/aggregation/sources/unset/unset.js55
-rw-r--r--jstests/aggregation/stages/skip_with_limit.js64
-rw-r--r--jstests/aggregation/testall.js1773
-rw-r--r--jstests/aggregation/testutils.js268
-rw-r--r--jstests/aggregation/use_query_project_and_sort.js99
-rw-r--r--jstests/aggregation/use_query_projection.js198
-rw-r--r--jstests/aggregation/use_query_sort.js126
-rw-r--r--jstests/aggregation/variables/layered_variables.js19
-rw-r--r--jstests/aggregation/variables/remove_system_variable.js108
178 files changed, 18612 insertions, 19123 deletions
diff --git a/jstests/aggregation/bugs/cond.js b/jstests/aggregation/bugs/cond.js
index 313316f4418..84831ca11a7 100644
--- a/jstests/aggregation/bugs/cond.js
+++ b/jstests/aggregation/bugs/cond.js
@@ -1,88 +1,87 @@
// $cond returns the evaluated second argument if the first evaluates to true but the evaluated
// third argument if the first evaluates to false.
(function() {
- "use strict";
- load('jstests/aggregation/extras/utils.js');
+"use strict";
+load('jstests/aggregation/extras/utils.js');
- const coll = db.jstests_aggregation_cond;
- coll.drop();
+const coll = db.jstests_aggregation_cond;
+coll.drop();
- coll.save({});
+coll.save({});
- function assertError(expectedErrorCode, condSpec) {
- assertErrorCode(coll, {$project: {a: {$cond: condSpec}}}, expectedErrorCode);
- }
+function assertError(expectedErrorCode, condSpec) {
+ assertErrorCode(coll, {$project: {a: {$cond: condSpec}}}, expectedErrorCode);
+}
- function assertResult(expectedResult, arg) {
- assert.eq(expectedResult, coll.aggregate({$project: {a: {$cond: arg}}}).toArray()[0].a);
- }
+function assertResult(expectedResult, arg) {
+ assert.eq(expectedResult, coll.aggregate({$project: {a: {$cond: arg}}}).toArray()[0].a);
+}
- // Wrong number of args.
- assertError(16020, []);
- assertError(16020, [1]);
- assertError(16020, [false]);
- assertError(16020, [1, 1]);
- assertError(16020, [1, 1, null, 1]);
- assertError(16020, [1, 1, 1, undefined]);
+// Wrong number of args.
+assertError(16020, []);
+assertError(16020, [1]);
+assertError(16020, [false]);
+assertError(16020, [1, 1]);
+assertError(16020, [1, 1, null, 1]);
+assertError(16020, [1, 1, 1, undefined]);
- // Bad object cases.
- assertError(17080, {"else": 1, then: 1});
- assertError(17081, {"if": 1, "else": 1});
- assertError(17082, {"if": 1, then: 1});
- assertError(17083, {asdf: 1, then: 1});
+// Bad object cases.
+assertError(17080, {"else": 1, then: 1});
+assertError(17081, {"if": 1, "else": 1});
+assertError(17082, {"if": 1, then: 1});
+assertError(17083, {asdf: 1, then: 1});
- // Literal expressions.
- assertResult(1, [true, 1, 2]);
- assertResult(2, [false, 1, 2]);
+// Literal expressions.
+assertResult(1, [true, 1, 2]);
+assertResult(2, [false, 1, 2]);
- // Order independence for object case.
- assertResult(1, {"if": true, "then": 1, "else": 2});
- assertResult(1, {"if": true, "else": 2, "then": 1});
- assertResult(1, {"then": 1, "if": true, "else": 2});
- assertResult(1, {"then": 1, "else": 2, "if": true});
- assertResult(1, {"else": 2, "then": 1, "if": true});
- assertResult(1, {"else": 2, "if": true, "then": 1});
+// Order independence for object case.
+assertResult(1, {"if": true, "then": 1, "else": 2});
+assertResult(1, {"if": true, "else": 2, "then": 1});
+assertResult(1, {"then": 1, "if": true, "else": 2});
+assertResult(1, {"then": 1, "else": 2, "if": true});
+assertResult(1, {"else": 2, "then": 1, "if": true});
+assertResult(1, {"else": 2, "if": true, "then": 1});
- // Computed expressions.
- assertResult(1, [{$and: []}, {$add: [1]}, {$add: [1, 1]}]);
- assertResult(2, [{$or: []}, {$add: [1]}, {$add: [1, 1]}]);
+// Computed expressions.
+assertResult(1, [{$and: []}, {$add: [1]}, {$add: [1, 1]}]);
+assertResult(2, [{$or: []}, {$add: [1]}, {$add: [1, 1]}]);
- assert(coll.drop());
- assert.writeOK(coll.insert({t: true, f: false, x: 'foo', y: 'bar'}));
+assert(coll.drop());
+assert.writeOK(coll.insert({t: true, f: false, x: 'foo', y: 'bar'}));
- // Field path expressions.
- assertResult('foo', ['$t', '$x', '$y']);
- assertResult('bar', ['$f', '$x', '$y']);
+// Field path expressions.
+assertResult('foo', ['$t', '$x', '$y']);
+assertResult('bar', ['$f', '$x', '$y']);
- assert(coll.drop());
- assert.writeOK(coll.insert({}));
+assert(coll.drop());
+assert.writeOK(coll.insert({}));
- // Coerce to bool.
- assertResult('a', [1, 'a', 'b']);
- assertResult('a', ['', 'a', 'b']);
- assertResult('b', [0, 'a', 'b']);
+// Coerce to bool.
+assertResult('a', [1, 'a', 'b']);
+assertResult('a', ['', 'a', 'b']);
+assertResult('b', [0, 'a', 'b']);
- // Nested.
- assert(coll.drop());
- assert.writeOK(coll.insert({noonSense: 'am', mealCombined: 'no'}));
- assert.writeOK(coll.insert({noonSense: 'am', mealCombined: 'yes'}));
- assert.writeOK(coll.insert({noonSense: 'pm', mealCombined: 'yes'}));
- assert.writeOK(coll.insert({noonSense: 'pm', mealCombined: 'no'}));
- assert.eq(
- ['breakfast', 'brunch', 'dinner', 'linner'],
- coll.aggregate([
- {
- $project: {
- meal: {
- $cond: [
- {$eq: ['$noonSense', 'am']},
- {$cond: [{$eq: ['$mealCombined', 'yes']}, 'brunch', 'breakfast']},
- {$cond: [{$eq: ['$mealCombined', 'yes']}, 'linner', 'dinner']}
- ]
+// Nested.
+assert(coll.drop());
+assert.writeOK(coll.insert({noonSense: 'am', mealCombined: 'no'}));
+assert.writeOK(coll.insert({noonSense: 'am', mealCombined: 'yes'}));
+assert.writeOK(coll.insert({noonSense: 'pm', mealCombined: 'yes'}));
+assert.writeOK(coll.insert({noonSense: 'pm', mealCombined: 'no'}));
+assert.eq(['breakfast', 'brunch', 'dinner', 'linner'],
+ coll.aggregate([
+ {
+ $project: {
+ meal: {
+ $cond: [
+ {$eq: ['$noonSense', 'am']},
+ {$cond: [{$eq: ['$mealCombined', 'yes']}, 'brunch', 'breakfast']},
+ {$cond: [{$eq: ['$mealCombined', 'yes']}, 'linner', 'dinner']}
+ ]
+ }
}
- }
- },
- {$sort: {meal: 1}}
- ])
- .map(doc => doc.meal));
+ },
+ {$sort: {meal: 1}}
+ ])
+ .map(doc => doc.meal));
}());
diff --git a/jstests/aggregation/bugs/cursor_timeout.js b/jstests/aggregation/bugs/cursor_timeout.js
index f579fba407d..21260074d26 100644
--- a/jstests/aggregation/bugs/cursor_timeout.js
+++ b/jstests/aggregation/bugs/cursor_timeout.js
@@ -7,83 +7,82 @@
* ]
*/
(function() {
- 'use strict';
+'use strict';
- // Cursor timeout on mongod is handled by a single thread/timer that will sleep for
- // "clientCursorMonitorFrequencySecs" and add the sleep value to each operation's duration when
- // it wakes up, timing out those whose "now() - last accessed since" time exceeds. A cursor
- // timeout of 2 seconds with a monitor frequency of 1 second means an effective timeout period
- // of 1 to 2 seconds.
- const cursorTimeoutMs = 2000;
- const cursorMonitorFrequencySecs = 1;
+// Cursor timeout on mongod is handled by a single thread/timer that will sleep for
+// "clientCursorMonitorFrequencySecs" and add the sleep value to each operation's duration when
+// it wakes up, timing out those whose "now() - last accessed since" time exceeds. A cursor
+// timeout of 2 seconds with a monitor frequency of 1 second means an effective timeout period
+// of 1 to 2 seconds.
+const cursorTimeoutMs = 2000;
+const cursorMonitorFrequencySecs = 1;
- const options = {
- setParameter: {
- internalDocumentSourceCursorBatchSizeBytes: 1,
- // We use the "cursorTimeoutMillis" server parameter to decrease how long it takes for a
- // non-exhausted cursor to time out. We use the "clientCursorMonitorFrequencySecs"
- // server parameter to make the ClientCursorMonitor that cleans up the timed out cursors
- // run more often. The combination of these server parameters reduces the amount of time
- // we need to wait within this test.
- cursorTimeoutMillis: cursorTimeoutMs,
- clientCursorMonitorFrequencySecs: cursorMonitorFrequencySecs,
- }
- };
- const conn = MongoRunner.runMongod(options);
- assert.neq(null, conn, 'mongod was unable to start up with options: ' + tojson(options));
+const options = {
+ setParameter: {
+ internalDocumentSourceCursorBatchSizeBytes: 1,
+ // We use the "cursorTimeoutMillis" server parameter to decrease how long it takes for a
+ // non-exhausted cursor to time out. We use the "clientCursorMonitorFrequencySecs"
+ // server parameter to make the ClientCursorMonitor that cleans up the timed out cursors
+ // run more often. The combination of these server parameters reduces the amount of time
+ // we need to wait within this test.
+ cursorTimeoutMillis: cursorTimeoutMs,
+ clientCursorMonitorFrequencySecs: cursorMonitorFrequencySecs,
+ }
+};
+const conn = MongoRunner.runMongod(options);
+assert.neq(null, conn, 'mongod was unable to start up with options: ' + tojson(options));
- const testDB = conn.getDB('test');
+const testDB = conn.getDB('test');
- // We use a batch size of 2 to ensure that the mongo shell does not exhaust the cursor on its
- // first batch.
- const batchSize = 2;
- const numMatches = 5;
+// We use a batch size of 2 to ensure that the mongo shell does not exhaust the cursor on its
+// first batch.
+const batchSize = 2;
+const numMatches = 5;
- function assertCursorTimesOut(collName, pipeline) {
- const res = assert.commandWorked(testDB.runCommand({
- aggregate: collName,
- pipeline: pipeline,
- cursor: {
- batchSize: batchSize,
- },
- }));
+function assertCursorTimesOut(collName, pipeline) {
+ const res = assert.commandWorked(testDB.runCommand({
+ aggregate: collName,
+ pipeline: pipeline,
+ cursor: {
+ batchSize: batchSize,
+ },
+ }));
- let serverStatus = assert.commandWorked(testDB.serverStatus());
- const expectedNumTimedOutCursors = serverStatus.metrics.cursor.timedOut + 1;
+ let serverStatus = assert.commandWorked(testDB.serverStatus());
+ const expectedNumTimedOutCursors = serverStatus.metrics.cursor.timedOut + 1;
- const cursor = new DBCommandCursor(testDB, res, batchSize);
+ const cursor = new DBCommandCursor(testDB, res, batchSize);
- // Wait until the idle cursor background job has killed the aggregation cursor.
- assert.soon(
- function() {
- serverStatus = assert.commandWorked(testDB.serverStatus());
- return +serverStatus.metrics.cursor.timedOut === expectedNumTimedOutCursors;
- },
- function() {
- return "aggregation cursor failed to time out: " +
- tojson(serverStatus.metrics.cursor);
- });
+ // Wait until the idle cursor background job has killed the aggregation cursor.
+ assert.soon(
+ function() {
+ serverStatus = assert.commandWorked(testDB.serverStatus());
+ return +serverStatus.metrics.cursor.timedOut === expectedNumTimedOutCursors;
+ },
+ function() {
+ return "aggregation cursor failed to time out: " + tojson(serverStatus.metrics.cursor);
+ });
- assert.eq(0, serverStatus.metrics.cursor.open.total, tojson(serverStatus));
+ assert.eq(0, serverStatus.metrics.cursor.open.total, tojson(serverStatus));
- // We attempt to exhaust the aggregation cursor to verify that sending a getMore returns an
- // error due to the cursor being killed.
- let err = assert.throws(function() {
- cursor.itcount();
- });
- assert.eq(ErrorCodes.CursorNotFound, err.code, tojson(err));
- }
+ // We attempt to exhaust the aggregation cursor to verify that sending a getMore returns an
+ // error due to the cursor being killed.
+ let err = assert.throws(function() {
+ cursor.itcount();
+ });
+ assert.eq(ErrorCodes.CursorNotFound, err.code, tojson(err));
+}
- assert.writeOK(testDB.source.insert({local: 1}));
- for (let i = 0; i < numMatches; ++i) {
- assert.writeOK(testDB.dest.insert({foreign: 1}));
- }
+assert.writeOK(testDB.source.insert({local: 1}));
+for (let i = 0; i < numMatches; ++i) {
+ assert.writeOK(testDB.dest.insert({foreign: 1}));
+}
- // Test that a regular aggregation cursor is killed when the timeout is reached.
- assertCursorTimesOut('dest', []);
+// Test that a regular aggregation cursor is killed when the timeout is reached.
+assertCursorTimesOut('dest', []);
- // Test that an aggregation cursor with a $lookup stage is killed when the timeout is reached.
- assertCursorTimesOut('source', [
+// Test that an aggregation cursor with a $lookup stage is killed when the timeout is reached.
+assertCursorTimesOut('source', [
{
$lookup: {
from: 'dest',
@@ -97,9 +96,9 @@
},
]);
- // Test that an aggregation cursor with nested $lookup stages is killed when the timeout is
- // reached.
- assertCursorTimesOut('source', [
+// Test that an aggregation cursor with nested $lookup stages is killed when the timeout is
+// reached.
+assertCursorTimesOut('source', [
{
$lookup: {
from: 'dest',
@@ -126,5 +125,5 @@
},
]);
- MongoRunner.stopMongod(conn);
+MongoRunner.stopMongod(conn);
})();
diff --git a/jstests/aggregation/bugs/explain_options_helper.js b/jstests/aggregation/bugs/explain_options_helper.js
index 17360acab73..0834d56e469 100644
--- a/jstests/aggregation/bugs/explain_options_helper.js
+++ b/jstests/aggregation/bugs/explain_options_helper.js
@@ -2,23 +2,25 @@
// This test was designed to reproduce SERVER-32300".
(function() {
- "use strict";
+"use strict";
- const coll = db.explain_options;
- coll.drop();
+const coll = db.explain_options;
+coll.drop();
- for (let i = 0; i < 10; ++i) {
- assert.writeOK(coll.insert({_id: i}));
- }
+for (let i = 0; i < 10; ++i) {
+ assert.writeOK(coll.insert({_id: i}));
+}
- const collation = {collation: {locale: "zh", backwards: false}};
+const collation = {
+ collation: {locale: "zh", backwards: false}
+};
- const firstResults = coll.aggregate([{$sort: {_id: 1}}], collation).toArray();
- // Issue an explain in order to verify that 'collation' is not modified to include the explain
- // flag.
- assert.commandWorked(coll.explain().aggregate([], collation));
+const firstResults = coll.aggregate([{$sort: {_id: 1}}], collation).toArray();
+// Issue an explain in order to verify that 'collation' is not modified to include the explain
+// flag.
+assert.commandWorked(coll.explain().aggregate([], collation));
- const secondResults = coll.aggregate([{$sort: {_id: 1}}], collation).toArray();
- // Assert that the result didn't change after an explain helper is issued.
- assert.eq(firstResults, secondResults);
+const secondResults = coll.aggregate([{$sort: {_id: 1}}], collation).toArray();
+// Assert that the result didn't change after an explain helper is issued.
+assert.eq(firstResults, secondResults);
}());
diff --git a/jstests/aggregation/bugs/firstlast.js b/jstests/aggregation/bugs/firstlast.js
index aa360a25b7e..8ab83fe30b7 100644
--- a/jstests/aggregation/bugs/firstlast.js
+++ b/jstests/aggregation/bugs/firstlast.js
@@ -2,120 +2,119 @@
* Tests the $first and $last accumulators in $group.
*/
(function() {
- 'use strict';
- const coll = db.jstests_aggregation_firstlast;
- coll.drop();
-
- /** Check expected $first and $last result values. */
- function assertFirstLast(expectedFirst, expectedLast, stages, expression) {
- let pipeline = [{$sort: {_id: 1}}];
- if (stages) {
- pipeline = pipeline.concat(stages);
- }
+'use strict';
+const coll = db.jstests_aggregation_firstlast;
+coll.drop();
+
+/** Check expected $first and $last result values. */
+function assertFirstLast(expectedFirst, expectedLast, stages, expression) {
+ let pipeline = [{$sort: {_id: 1}}];
+ if (stages) {
+ pipeline = pipeline.concat(stages);
+ }
- expression = expression || '$b';
- pipeline.push(
- {$group: {_id: '$a', first: {$first: expression}, last: {$last: expression}}});
-
- const result = coll.aggregate(pipeline).toArray();
- for (let i = 0; i < result.length; ++i) {
- if (result[i]._id === 1) {
- // Check results for group _id 1.
- assert.eq(expectedFirst, result[i].first);
- assert.eq(expectedLast, result[i].last);
- return;
- }
+ expression = expression || '$b';
+ pipeline.push({$group: {_id: '$a', first: {$first: expression}, last: {$last: expression}}});
+
+ const result = coll.aggregate(pipeline).toArray();
+ for (let i = 0; i < result.length; ++i) {
+ if (result[i]._id === 1) {
+ // Check results for group _id 1.
+ assert.eq(expectedFirst, result[i].first);
+ assert.eq(expectedLast, result[i].last);
+ return;
}
- throw new Error('Expected $group _id "1" is missing');
}
-
- // One document.
- assert.writeOK(coll.insert({a: 1, b: 1}));
- assertFirstLast(1, 1);
-
- // Two documents.
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assertFirstLast(1, 2);
-
- // Three documents.
- assert.writeOK(coll.insert({a: 1, b: 3}));
- assertFirstLast(1, 3);
-
- // Another 'a' key value does not affect outcome.
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 3, b: 0}));
- assert.writeOK(coll.insert({a: 1, b: 1}));
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assert.writeOK(coll.insert({a: 1, b: 3}));
- assert.writeOK(coll.insert({a: 2, b: 0}));
- assertFirstLast(1, 3);
-
- // Additional pipeline stages do not affect outcome if order is maintained.
- assertFirstLast(1, 3, [{$project: {x: '$a', y: '$b'}}, {$project: {a: '$x', b: '$y'}}]);
-
- // Additional pipeline stages affect outcome if order is modified.
- assertFirstLast(3, 1, [{$sort: {b: -1}}]);
-
- // Skip and limit affect the results seen.
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: 1}));
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assert.writeOK(coll.insert({a: 1, b: 3}));
- assertFirstLast(1, 2, [{$limit: 2}]);
- assertFirstLast(2, 3, [{$skip: 1}, {$limit: 2}]);
- assertFirstLast(2, 2, [{$skip: 1}, {$limit: 1}]);
-
- // Mixed type values.
- assert.writeOK(coll.insert({a: 1, b: 'foo'}));
- assertFirstLast(1, 'foo');
-
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: 'bar'}));
- assert.writeOK(coll.insert({a: 1, b: true}));
- assertFirstLast('bar', true);
-
- // Value null.
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: null}));
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assertFirstLast(null, 2);
-
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assert.writeOK(coll.insert({a: 1, b: null}));
- assertFirstLast(2, null);
-
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: null}));
- assert.writeOK(coll.insert({a: 1, b: null}));
- assertFirstLast(null, null);
-
- // Value missing.
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1}));
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assertFirstLast(undefined, 2);
-
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assert.writeOK(coll.insert({a: 1}));
- assertFirstLast(2, undefined);
-
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1}));
- assert.writeOK(coll.insert({a: 1}));
- assertFirstLast(undefined, undefined);
-
- // Dotted field.
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: [{c: 1}, {c: 2}]}));
- assert.writeOK(coll.insert({a: 1, b: [{c: 6}, {}]}));
- assertFirstLast([1, 2], [6], [], '$b.c');
-
- // Computed expressions.
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 1, b: 1}));
- assert.writeOK(coll.insert({a: 1, b: 2}));
- assertFirstLast(1, 0, [], {$mod: ['$b', 2]});
- assertFirstLast(0, 1, [], {$mod: [{$add: ['$b', 1]}, 2]});
+ throw new Error('Expected $group _id "1" is missing');
+}
+
+// One document.
+assert.writeOK(coll.insert({a: 1, b: 1}));
+assertFirstLast(1, 1);
+
+// Two documents.
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assertFirstLast(1, 2);
+
+// Three documents.
+assert.writeOK(coll.insert({a: 1, b: 3}));
+assertFirstLast(1, 3);
+
+// Another 'a' key value does not affect outcome.
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 3, b: 0}));
+assert.writeOK(coll.insert({a: 1, b: 1}));
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assert.writeOK(coll.insert({a: 1, b: 3}));
+assert.writeOK(coll.insert({a: 2, b: 0}));
+assertFirstLast(1, 3);
+
+// Additional pipeline stages do not affect outcome if order is maintained.
+assertFirstLast(1, 3, [{$project: {x: '$a', y: '$b'}}, {$project: {a: '$x', b: '$y'}}]);
+
+// Additional pipeline stages affect outcome if order is modified.
+assertFirstLast(3, 1, [{$sort: {b: -1}}]);
+
+// Skip and limit affect the results seen.
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: 1}));
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assert.writeOK(coll.insert({a: 1, b: 3}));
+assertFirstLast(1, 2, [{$limit: 2}]);
+assertFirstLast(2, 3, [{$skip: 1}, {$limit: 2}]);
+assertFirstLast(2, 2, [{$skip: 1}, {$limit: 1}]);
+
+// Mixed type values.
+assert.writeOK(coll.insert({a: 1, b: 'foo'}));
+assertFirstLast(1, 'foo');
+
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: 'bar'}));
+assert.writeOK(coll.insert({a: 1, b: true}));
+assertFirstLast('bar', true);
+
+// Value null.
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: null}));
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assertFirstLast(null, 2);
+
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assert.writeOK(coll.insert({a: 1, b: null}));
+assertFirstLast(2, null);
+
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: null}));
+assert.writeOK(coll.insert({a: 1, b: null}));
+assertFirstLast(null, null);
+
+// Value missing.
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1}));
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assertFirstLast(undefined, 2);
+
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assert.writeOK(coll.insert({a: 1}));
+assertFirstLast(2, undefined);
+
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1}));
+assert.writeOK(coll.insert({a: 1}));
+assertFirstLast(undefined, undefined);
+
+// Dotted field.
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: [{c: 1}, {c: 2}]}));
+assert.writeOK(coll.insert({a: 1, b: [{c: 6}, {}]}));
+assertFirstLast([1, 2], [6], [], '$b.c');
+
+// Computed expressions.
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 1, b: 1}));
+assert.writeOK(coll.insert({a: 1, b: 2}));
+assertFirstLast(1, 0, [], {$mod: ['$b', 2]});
+assertFirstLast(0, 1, [], {$mod: [{$add: ['$b', 1]}, 2]});
}());
diff --git a/jstests/aggregation/bugs/groupMissing.js b/jstests/aggregation/bugs/groupMissing.js
index c08e70185b1..5f734abbee5 100644
--- a/jstests/aggregation/bugs/groupMissing.js
+++ b/jstests/aggregation/bugs/groupMissing.js
@@ -8,68 +8,68 @@
load('jstests/aggregation/extras/utils.js'); // For resultsEq.
(function() {
- "use strict";
+"use strict";
- var coll = db.groupMissing;
- coll.drop();
+var coll = db.groupMissing;
+coll.drop();
- coll.insert({a: null});
- coll.insert({});
+coll.insert({a: null});
+coll.insert({});
- var res = coll.aggregate({$group: {_id: "$a"}});
- var arr = res.toArray();
- assert.eq(arr.length, 1);
- assert.eq(arr[0]._id, null);
+var res = coll.aggregate({$group: {_id: "$a"}});
+var arr = res.toArray();
+assert.eq(arr.length, 1);
+assert.eq(arr[0]._id, null);
- coll.createIndex({a: 1});
- res = coll.aggregate({$sort: {a: 1}}, {$group: {_id: "$a"}});
- arr = res.toArray();
- assert.eq(arr.length, 1);
- assert.eq(arr[0]._id, null);
+coll.createIndex({a: 1});
+res = coll.aggregate({$sort: {a: 1}}, {$group: {_id: "$a"}});
+arr = res.toArray();
+assert.eq(arr.length, 1);
+assert.eq(arr[0]._id, null);
- coll.drop();
+coll.drop();
- coll.insert({a: null});
- coll.insert({});
+coll.insert({a: null});
+coll.insert({});
- // Bug, see SERVER-21992.
+// Bug, see SERVER-21992.
+res = coll.aggregate({$group: {_id: {a: "$a"}}});
+assert(resultsEq(res.toArray(), [{_id: {a: null}}]));
+
+// Correct behavior after SERVER-21992 is fixed.
+if (0) {
res = coll.aggregate({$group: {_id: {a: "$a"}}});
- assert(resultsEq(res.toArray(), [{_id: {a: null}}]));
+ assert(resultsEq(res.toArray(), [{_id: {a: null}}, {_id: {a: {}}}]));
+}
- // Correct behavior after SERVER-21992 is fixed.
- if (0) {
- res = coll.aggregate({$group: {_id: {a: "$a"}}});
- assert(resultsEq(res.toArray(), [{_id: {a: null}}, {_id: {a: {}}}]));
- }
+// Bug, see SERVER-21992.
+coll.createIndex({a: 1});
+res = coll.aggregate({$group: {_id: {a: "$a"}}});
+assert(resultsEq(res.toArray(), [{_id: {a: null}}]));
- // Bug, see SERVER-21992.
- coll.createIndex({a: 1});
+// Correct behavior after SERVER-21992 is fixed.
+if (0) {
res = coll.aggregate({$group: {_id: {a: "$a"}}});
- assert(resultsEq(res.toArray(), [{_id: {a: null}}]));
+ assert(resultsEq(res.toArray(), [{_id: {a: null}}, {_id: {a: {}}}]));
+}
- // Correct behavior after SERVER-21992 is fixed.
- if (0) {
- res = coll.aggregate({$group: {_id: {a: "$a"}}});
- assert(resultsEq(res.toArray(), [{_id: {a: null}}, {_id: {a: {}}}]));
- }
+coll.drop();
+coll.insert({a: null, b: 1});
+coll.insert({b: 1});
+coll.insert({a: null, b: 1});
- coll.drop();
- coll.insert({a: null, b: 1});
- coll.insert({b: 1});
- coll.insert({a: null, b: 1});
+res = coll.aggregate({$group: {_id: {a: "$a", b: "$b"}}});
+assert(resultsEq(res.toArray(), [{_id: {b: 1}}, {_id: {a: null, b: 1}}]));
- res = coll.aggregate({$group: {_id: {a: "$a", b: "$b"}}});
- assert(resultsEq(res.toArray(), [{_id: {b: 1}}, {_id: {a: null, b: 1}}]));
+// Bug, see SERVER-23229.
+coll.createIndex({a: 1, b: 1});
+res = coll.aggregate({$sort: {a: 1, b: 1}}, {$group: {_id: {a: "$a", b: "$b"}}});
+assert(resultsEq(res.toArray(), [{_id: {a: null, b: 1}}]));
- // Bug, see SERVER-23229.
+// Correct behavior after SERVER-23229 is fixed.
+if (0) {
coll.createIndex({a: 1, b: 1});
res = coll.aggregate({$sort: {a: 1, b: 1}}, {$group: {_id: {a: "$a", b: "$b"}}});
- assert(resultsEq(res.toArray(), [{_id: {a: null, b: 1}}]));
-
- // Correct behavior after SERVER-23229 is fixed.
- if (0) {
- coll.createIndex({a: 1, b: 1});
- res = coll.aggregate({$sort: {a: 1, b: 1}}, {$group: {_id: {a: "$a", b: "$b"}}});
- assert(resultsEq(res.toArray(), [{_id: {b: 1}}, {_id: {a: null, b: 1}}]));
- }
+ assert(resultsEq(res.toArray(), [{_id: {b: 1}}, {_id: {a: null, b: 1}}]));
+}
}());
diff --git a/jstests/aggregation/bugs/lookup_unwind_getmore.js b/jstests/aggregation/bugs/lookup_unwind_getmore.js
index 3ba7dbf4007..67b970de820 100644
--- a/jstests/aggregation/bugs/lookup_unwind_getmore.js
+++ b/jstests/aggregation/bugs/lookup_unwind_getmore.js
@@ -8,45 +8,47 @@
* ]
*/
(function() {
- 'use strict';
+'use strict';
- const options = {setParameter: 'internalDocumentSourceCursorBatchSizeBytes=1'};
- const conn = MongoRunner.runMongod(options);
- assert.neq(null, conn, 'mongod was unable to start up with options: ' + tojson(options));
+const options = {
+ setParameter: 'internalDocumentSourceCursorBatchSizeBytes=1'
+};
+const conn = MongoRunner.runMongod(options);
+assert.neq(null, conn, 'mongod was unable to start up with options: ' + tojson(options));
- const testDB = conn.getDB('test');
+const testDB = conn.getDB('test');
- /**
- * Executes an aggregrate with 'options.pipeline' and confirms that 'options.numResults' were
- * returned.
- */
- function runTest(options) {
- // The batchSize must be smaller than the number of documents returned by the $lookup. This
- // ensures that the mongo shell will issue a getMore when unwinding the $lookup results for
- // the same document in the 'source' collection, under a different OperationContext.
- const batchSize = 2;
+/**
+ * Executes an aggregrate with 'options.pipeline' and confirms that 'options.numResults' were
+ * returned.
+ */
+function runTest(options) {
+ // The batchSize must be smaller than the number of documents returned by the $lookup. This
+ // ensures that the mongo shell will issue a getMore when unwinding the $lookup results for
+ // the same document in the 'source' collection, under a different OperationContext.
+ const batchSize = 2;
- testDB.source.drop();
- assert.writeOK(testDB.source.insert({x: 1}));
+ testDB.source.drop();
+ assert.writeOK(testDB.source.insert({x: 1}));
- testDB.dest.drop();
- for (let i = 0; i < 5; ++i) {
- assert.writeOK(testDB.dest.insert({x: 1}));
- }
+ testDB.dest.drop();
+ for (let i = 0; i < 5; ++i) {
+ assert.writeOK(testDB.dest.insert({x: 1}));
+ }
- const res = assert.commandWorked(testDB.runCommand({
- aggregate: 'source',
- pipeline: options.pipeline,
- cursor: {
- batchSize: batchSize,
- },
- }));
+ const res = assert.commandWorked(testDB.runCommand({
+ aggregate: 'source',
+ pipeline: options.pipeline,
+ cursor: {
+ batchSize: batchSize,
+ },
+ }));
- const cursor = new DBCommandCursor(testDB, res, batchSize);
- assert.eq(options.numResults, cursor.itcount());
- }
+ const cursor = new DBCommandCursor(testDB, res, batchSize);
+ assert.eq(options.numResults, cursor.itcount());
+}
- runTest({
+runTest({
pipeline: [
{
$lookup: {
@@ -65,7 +67,7 @@
numResults: 5
});
- runTest({
+runTest({
pipeline: [
{
$lookup: {
@@ -99,5 +101,5 @@
numResults: 25
});
- MongoRunner.stopMongod(conn);
+MongoRunner.stopMongod(conn);
})();
diff --git a/jstests/aggregation/bugs/lookup_unwind_killcursor.js b/jstests/aggregation/bugs/lookup_unwind_killcursor.js
index 45da6350c2f..eab9d05c591 100644
--- a/jstests/aggregation/bugs/lookup_unwind_killcursor.js
+++ b/jstests/aggregation/bugs/lookup_unwind_killcursor.js
@@ -8,43 +8,45 @@
* ]
*/
(function() {
- 'use strict';
+'use strict';
- const options = {setParameter: 'internalDocumentSourceCursorBatchSizeBytes=1'};
- const conn = MongoRunner.runMongod(options);
- assert.neq(null, conn, 'mongod was unable to start up with options: ' + tojson(options));
+const options = {
+ setParameter: 'internalDocumentSourceCursorBatchSizeBytes=1'
+};
+const conn = MongoRunner.runMongod(options);
+assert.neq(null, conn, 'mongod was unable to start up with options: ' + tojson(options));
- const testDB = conn.getDB('test');
+const testDB = conn.getDB('test');
- function runTest(pipeline) {
- // We use a batch size of 2 to ensure that the mongo shell does not exhaust the cursor on
- // its first batch.
- const batchSize = 2;
+function runTest(pipeline) {
+ // We use a batch size of 2 to ensure that the mongo shell does not exhaust the cursor on
+ // its first batch.
+ const batchSize = 2;
- testDB.source.drop();
- assert.writeOK(testDB.source.insert({x: 1}));
+ testDB.source.drop();
+ assert.writeOK(testDB.source.insert({x: 1}));
- testDB.dest.drop();
- for (let i = 0; i < 5; ++i) {
- assert.writeOK(testDB.dest.insert({x: 1}));
- }
+ testDB.dest.drop();
+ for (let i = 0; i < 5; ++i) {
+ assert.writeOK(testDB.dest.insert({x: 1}));
+ }
- const res = assert.commandWorked(testDB.runCommand({
- aggregate: 'source',
- pipeline: pipeline,
- cursor: {
- batchSize: batchSize,
- },
- }));
+ const res = assert.commandWorked(testDB.runCommand({
+ aggregate: 'source',
+ pipeline: pipeline,
+ cursor: {
+ batchSize: batchSize,
+ },
+ }));
- const cursor = new DBCommandCursor(testDB, res, batchSize);
- cursor.close(); // Closing the cursor will issue the "killCursors" command.
+ const cursor = new DBCommandCursor(testDB, res, batchSize);
+ cursor.close(); // Closing the cursor will issue the "killCursors" command.
- const serverStatus = assert.commandWorked(testDB.adminCommand({serverStatus: 1}));
- assert.eq(0, serverStatus.metrics.cursor.open.total, tojson(serverStatus.metrics.cursor));
- }
+ const serverStatus = assert.commandWorked(testDB.adminCommand({serverStatus: 1}));
+ assert.eq(0, serverStatus.metrics.cursor.open.total, tojson(serverStatus.metrics.cursor));
+}
- runTest([
+runTest([
{
$lookup: {
from: 'dest',
@@ -60,7 +62,7 @@
},
]);
- runTest([
+runTest([
{
$lookup: {
from: 'dest',
@@ -91,5 +93,5 @@
},
]);
- MongoRunner.stopMongod(conn);
+MongoRunner.stopMongod(conn);
})();
diff --git a/jstests/aggregation/bugs/match.js b/jstests/aggregation/bugs/match.js
index 8cb4519a861..6a545ed60c1 100644
--- a/jstests/aggregation/bugs/match.js
+++ b/jstests/aggregation/bugs/match.js
@@ -2,166 +2,169 @@
// - Filtering behavior equivalent to a mongo query.
// - $where and geo operators are not allowed
(function() {
- "use strict";
-
- load('jstests/aggregation/extras/utils.js');
-
- const coll = db.jstests_aggregation_match;
- coll.drop();
-
- const identityProjection = {_id: '$_id', a: '$a'};
-
- /** Assert that an aggregation generated the expected error. */
- function assertError(expectedCode, matchSpec) {
- const matchStage = {$match: matchSpec};
- // Check where matching is folded in to DocumentSourceCursor.
- assertErrorCode(coll, [matchStage], expectedCode);
- // Check where matching is not folded in to DocumentSourceCursor.
- assertErrorCode(coll, [{$project: identityProjection}, matchStage], expectedCode);
+"use strict";
+
+load('jstests/aggregation/extras/utils.js');
+
+const coll = db.jstests_aggregation_match;
+coll.drop();
+
+const identityProjection = {
+ _id: '$_id',
+ a: '$a'
+};
+
+/** Assert that an aggregation generated the expected error. */
+function assertError(expectedCode, matchSpec) {
+ const matchStage = {$match: matchSpec};
+ // Check where matching is folded in to DocumentSourceCursor.
+ assertErrorCode(coll, [matchStage], expectedCode);
+ // Check where matching is not folded in to DocumentSourceCursor.
+ assertErrorCode(coll, [{$project: identityProjection}, matchStage], expectedCode);
+}
+
+/** Assert that the contents of two arrays are equal, ignoring element ordering. */
+function assertEqualResultsUnordered(one, two) {
+ let oneStr = one.map(function(x) {
+ return tojson(x);
+ });
+ let twoStr = two.map(function(x) {
+ return tojson(x);
+ });
+ oneStr.sort();
+ twoStr.sort();
+ assert.eq(oneStr, twoStr);
+}
+
+/** Assert that an aggregation result is as expected. */
+function assertResults(expectedResults, matchSpec) {
+ const findResults = coll.find(matchSpec).toArray();
+ if (expectedResults) {
+ assertEqualResultsUnordered(expectedResults, findResults);
}
-
- /** Assert that the contents of two arrays are equal, ignoring element ordering. */
- function assertEqualResultsUnordered(one, two) {
- let oneStr = one.map(function(x) {
- return tojson(x);
- });
- let twoStr = two.map(function(x) {
- return tojson(x);
- });
- oneStr.sort();
- twoStr.sort();
- assert.eq(oneStr, twoStr);
- }
-
- /** Assert that an aggregation result is as expected. */
- function assertResults(expectedResults, matchSpec) {
- const findResults = coll.find(matchSpec).toArray();
- if (expectedResults) {
- assertEqualResultsUnordered(expectedResults, findResults);
- }
- const matchStage = {$match: matchSpec};
- // Check where matching is folded in to DocumentSourceCursor.
- assertEqualResultsUnordered(findResults, coll.aggregate(matchStage).toArray());
- // Check where matching is not folded in to DocumentSourceCursor.
- assertEqualResultsUnordered(
- findResults, coll.aggregate({$project: identityProjection}, matchStage).toArray());
- }
-
- // Invalid matcher syntax.
- assertError(2, {a: {$mod: [0 /* invalid */, 0]}});
-
- // $where not allowed.
- assertError(ErrorCodes.BadValue, {$where: 'true'});
-
- // Geo not allowed.
- assertError(ErrorCodes.BadValue, {$match: {a: {$near: [0, 0]}}});
-
- function checkMatchResults(indexed) {
- // No results.
- coll.remove({});
- assertResults([], {});
-
- assert.writeOK(coll.insert({_id: 0, a: 1}));
- assert.writeOK(coll.insert({_id: 1, a: 2}));
- assert.writeOK(coll.insert({_id: 2, a: 3}));
-
- // Empty query.
- assertResults([{_id: 0, a: 1}, {_id: 1, a: 2}, {_id: 2, a: 3}], {});
-
- // Simple queries.
- assertResults([{_id: 0, a: 1}], {a: 1});
- assertResults([{_id: 1, a: 2}], {a: 2});
- assertResults([{_id: 1, a: 2}, {_id: 2, a: 3}], {a: {$gt: 1}});
- assertResults([{_id: 0, a: 1}, {_id: 1, a: 2}], {a: {$lte: 2}});
- assertResults([{_id: 0, a: 1}, {_id: 2, a: 3}], {a: {$in: [1, 3]}});
-
- // Regular expression.
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0, a: 'x'}));
- assert.writeOK(coll.insert({_id: 1, a: 'yx'}));
- assertResults([{_id: 0, a: 'x'}], {a: /^x/});
- assertResults([{_id: 0, a: 'x'}, {_id: 1, a: 'yx'}], {a: /x/});
-
- // Dotted field.
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0, a: {b: 4}}));
- assert.writeOK(coll.insert({_id: 1, a: 2}));
- assertResults([{_id: 0, a: {b: 4}}], {'a.b': 4});
-
- // Value within an array.
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0, a: [1, 2, 3]}));
- assert.writeOK(coll.insert({_id: 1, a: [2, 2, 3]}));
- assert.writeOK(coll.insert({_id: 2, a: [2, 2, 2]}));
- assertResults([{_id: 0, a: [1, 2, 3]}, {_id: 1, a: [2, 2, 3]}], {a: 3});
-
- // Missing, null, $exists matching.
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0}));
- assert.writeOK(coll.insert({_id: 1, a: null}));
- assert.writeOK(coll.insert({_id: 3, a: 0}));
- assertResults([{_id: 0}, {_id: 1, a: null}], {a: null});
- assertResults(null, {a: {$exists: true}});
- assertResults(null, {a: {$exists: false}});
-
- // $elemMatch
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0, a: [1, 2]}));
- assert.writeOK(coll.insert({_id: 1, a: [1, 2, 3]}));
- assertResults([{_id: 1, a: [1, 2, 3]}], {a: {$elemMatch: {$gt: 1, $mod: [2, 1]}}});
-
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0, a: [{b: 1}, {c: 2}]}));
- assert.writeOK(coll.insert({_id: 1, a: [{b: 1, c: 2}]}));
- assertResults([{_id: 1, a: [{b: 1, c: 2}]}], {a: {$elemMatch: {b: 1, c: 2}}});
-
- // $size
- coll.remove({});
- assert.writeOK(coll.insert({}));
- assert.writeOK(coll.insert({a: null}));
- assert.writeOK(coll.insert({a: []}));
- assert.writeOK(coll.insert({a: [1]}));
- assert.writeOK(coll.insert({a: [1, 2]}));
- assertResults(null, {a: {$size: 0}});
- assertResults(null, {a: {$size: 1}});
- assertResults(null, {a: {$size: 2}});
-
- // $type
- coll.remove({});
- assert.writeOK(coll.insert({}));
- assert.writeOK(coll.insert({a: null}));
- assert.writeOK(coll.insert({a: NumberInt(1)}));
- assert.writeOK(coll.insert({a: NumberLong(2)}));
- assert.writeOK(coll.insert({a: 66.6}));
- assert.writeOK(coll.insert({a: 'abc'}));
- assert.writeOK(coll.insert({a: /xyz/}));
- assert.writeOK(coll.insert({a: {q: 1}}));
- assert.writeOK(coll.insert({a: true}));
- assert.writeOK(coll.insert({a: new Date()}));
- assert.writeOK(coll.insert({a: new ObjectId()}));
- for (let type = 1; type <= 18; ++type) {
- assertResults(null, {a: {$type: type}});
- }
-
- coll.remove({});
- assert.writeOK(coll.insert({_id: 0, a: 1}));
- assert.writeOK(coll.insert({_id: 1, a: 2}));
- assert.writeOK(coll.insert({_id: 2, a: 3}));
-
- // $and
- assertResults([{_id: 1, a: 2}], {$and: [{a: 2}, {_id: 1}]});
- assertResults([], {$and: [{a: 1}, {_id: 1}]});
- assertResults([{_id: 1, a: 2}, {_id: 2, a: 3}],
- {$and: [{$or: [{_id: 1}, {a: 3}]}, {$or: [{_id: 2}, {a: 2}]}]});
-
- // $or
- assertResults([{_id: 0, a: 1}, {_id: 2, a: 3}], {$or: [{_id: 0}, {a: 3}]});
+ const matchStage = {$match: matchSpec};
+ // Check where matching is folded in to DocumentSourceCursor.
+ assertEqualResultsUnordered(findResults, coll.aggregate(matchStage).toArray());
+ // Check where matching is not folded in to DocumentSourceCursor.
+ assertEqualResultsUnordered(
+ findResults, coll.aggregate({$project: identityProjection}, matchStage).toArray());
+}
+
+// Invalid matcher syntax.
+assertError(2, {a: {$mod: [0 /* invalid */, 0]}});
+
+// $where not allowed.
+assertError(ErrorCodes.BadValue, {$where: 'true'});
+
+// Geo not allowed.
+assertError(ErrorCodes.BadValue, {$match: {a: {$near: [0, 0]}}});
+
+function checkMatchResults(indexed) {
+ // No results.
+ coll.remove({});
+ assertResults([], {});
+
+ assert.writeOK(coll.insert({_id: 0, a: 1}));
+ assert.writeOK(coll.insert({_id: 1, a: 2}));
+ assert.writeOK(coll.insert({_id: 2, a: 3}));
+
+ // Empty query.
+ assertResults([{_id: 0, a: 1}, {_id: 1, a: 2}, {_id: 2, a: 3}], {});
+
+ // Simple queries.
+ assertResults([{_id: 0, a: 1}], {a: 1});
+ assertResults([{_id: 1, a: 2}], {a: 2});
+ assertResults([{_id: 1, a: 2}, {_id: 2, a: 3}], {a: {$gt: 1}});
+ assertResults([{_id: 0, a: 1}, {_id: 1, a: 2}], {a: {$lte: 2}});
+ assertResults([{_id: 0, a: 1}, {_id: 2, a: 3}], {a: {$in: [1, 3]}});
+
+ // Regular expression.
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0, a: 'x'}));
+ assert.writeOK(coll.insert({_id: 1, a: 'yx'}));
+ assertResults([{_id: 0, a: 'x'}], {a: /^x/});
+ assertResults([{_id: 0, a: 'x'}, {_id: 1, a: 'yx'}], {a: /x/});
+
+ // Dotted field.
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0, a: {b: 4}}));
+ assert.writeOK(coll.insert({_id: 1, a: 2}));
+ assertResults([{_id: 0, a: {b: 4}}], {'a.b': 4});
+
+ // Value within an array.
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0, a: [1, 2, 3]}));
+ assert.writeOK(coll.insert({_id: 1, a: [2, 2, 3]}));
+ assert.writeOK(coll.insert({_id: 2, a: [2, 2, 2]}));
+ assertResults([{_id: 0, a: [1, 2, 3]}, {_id: 1, a: [2, 2, 3]}], {a: 3});
+
+ // Missing, null, $exists matching.
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0}));
+ assert.writeOK(coll.insert({_id: 1, a: null}));
+ assert.writeOK(coll.insert({_id: 3, a: 0}));
+ assertResults([{_id: 0}, {_id: 1, a: null}], {a: null});
+ assertResults(null, {a: {$exists: true}});
+ assertResults(null, {a: {$exists: false}});
+
+ // $elemMatch
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0, a: [1, 2]}));
+ assert.writeOK(coll.insert({_id: 1, a: [1, 2, 3]}));
+ assertResults([{_id: 1, a: [1, 2, 3]}], {a: {$elemMatch: {$gt: 1, $mod: [2, 1]}}});
+
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0, a: [{b: 1}, {c: 2}]}));
+ assert.writeOK(coll.insert({_id: 1, a: [{b: 1, c: 2}]}));
+ assertResults([{_id: 1, a: [{b: 1, c: 2}]}], {a: {$elemMatch: {b: 1, c: 2}}});
+
+ // $size
+ coll.remove({});
+ assert.writeOK(coll.insert({}));
+ assert.writeOK(coll.insert({a: null}));
+ assert.writeOK(coll.insert({a: []}));
+ assert.writeOK(coll.insert({a: [1]}));
+ assert.writeOK(coll.insert({a: [1, 2]}));
+ assertResults(null, {a: {$size: 0}});
+ assertResults(null, {a: {$size: 1}});
+ assertResults(null, {a: {$size: 2}});
+
+ // $type
+ coll.remove({});
+ assert.writeOK(coll.insert({}));
+ assert.writeOK(coll.insert({a: null}));
+ assert.writeOK(coll.insert({a: NumberInt(1)}));
+ assert.writeOK(coll.insert({a: NumberLong(2)}));
+ assert.writeOK(coll.insert({a: 66.6}));
+ assert.writeOK(coll.insert({a: 'abc'}));
+ assert.writeOK(coll.insert({a: /xyz/}));
+ assert.writeOK(coll.insert({a: {q: 1}}));
+ assert.writeOK(coll.insert({a: true}));
+ assert.writeOK(coll.insert({a: new Date()}));
+ assert.writeOK(coll.insert({a: new ObjectId()}));
+ for (let type = 1; type <= 18; ++type) {
+ assertResults(null, {a: {$type: type}});
}
- checkMatchResults(false);
- coll.createIndex({a: 1});
- checkMatchResults(true);
- coll.createIndex({'a.b': 1});
- coll.createIndex({'a.c': 1});
- checkMatchResults(true);
+ coll.remove({});
+ assert.writeOK(coll.insert({_id: 0, a: 1}));
+ assert.writeOK(coll.insert({_id: 1, a: 2}));
+ assert.writeOK(coll.insert({_id: 2, a: 3}));
+
+ // $and
+ assertResults([{_id: 1, a: 2}], {$and: [{a: 2}, {_id: 1}]});
+ assertResults([], {$and: [{a: 1}, {_id: 1}]});
+ assertResults([{_id: 1, a: 2}, {_id: 2, a: 3}],
+ {$and: [{$or: [{_id: 1}, {a: 3}]}, {$or: [{_id: 2}, {a: 2}]}]});
+
+ // $or
+ assertResults([{_id: 0, a: 1}, {_id: 2, a: 3}], {$or: [{_id: 0}, {a: 3}]});
+}
+
+checkMatchResults(false);
+coll.createIndex({a: 1});
+checkMatchResults(true);
+coll.createIndex({'a.b': 1});
+coll.createIndex({'a.c': 1});
+checkMatchResults(true);
})();
diff --git a/jstests/aggregation/bugs/match_swap_limit.js b/jstests/aggregation/bugs/match_swap_limit.js
index 3de26d6f4b5..7dabc7130ca 100644
--- a/jstests/aggregation/bugs/match_swap_limit.js
+++ b/jstests/aggregation/bugs/match_swap_limit.js
@@ -2,19 +2,19 @@
* Ensure that $match is always applied after $limit.
*/
(function() {
- "use strict";
+"use strict";
- let coll = db.jstests_match_swap_limit;
- coll.drop();
+let coll = db.jstests_match_swap_limit;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0, x: 1, y: 3}));
- assert.writeOK(coll.insert({_id: 1, x: 2, y: 2}));
- assert.writeOK(coll.insert({_id: 2, x: 3, y: 1}));
+assert.writeOK(coll.insert({_id: 0, x: 1, y: 3}));
+assert.writeOK(coll.insert({_id: 1, x: 2, y: 2}));
+assert.writeOK(coll.insert({_id: 2, x: 3, y: 1}));
- assert.eq([{_id: 1, x: 2, y: 2}],
- coll.aggregate([{$sort: {x: -1}}, {$limit: 2}, {$match: {y: {$gte: 2}}}]).toArray());
+assert.eq([{_id: 1, x: 2, y: 2}],
+ coll.aggregate([{$sort: {x: -1}}, {$limit: 2}, {$match: {y: {$gte: 2}}}]).toArray());
- assert.writeOK(coll.createIndex({x: 1}));
- assert.eq([{_id: 1, x: 2, y: 2}],
- coll.aggregate([{$sort: {x: -1}}, {$limit: 2}, {$match: {y: {$gte: 2}}}]).toArray());
+assert.writeOK(coll.createIndex({x: 1}));
+assert.eq([{_id: 1, x: 2, y: 2}],
+ coll.aggregate([{$sort: {x: -1}}, {$limit: 2}, {$match: {y: {$gte: 2}}}]).toArray());
}());
diff --git a/jstests/aggregation/bugs/reverseArray.js b/jstests/aggregation/bugs/reverseArray.js
index 0fa4010654b..cf80c040171 100644
--- a/jstests/aggregation/bugs/reverseArray.js
+++ b/jstests/aggregation/bugs/reverseArray.js
@@ -4,29 +4,29 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
+"use strict";
- var coll = db.reverseArray;
- coll.drop();
+var coll = db.reverseArray;
+coll.drop();
- // We need a document to flow through the pipeline, even though we don't care what fields it
- // has.
- coll.insert({});
+// We need a document to flow through the pipeline, even though we don't care what fields it
+// has.
+coll.insert({});
- assertErrorCode(coll, [{$project: {reversed: {$reverseArray: 1}}}], 34435);
+assertErrorCode(coll, [{$project: {reversed: {$reverseArray: 1}}}], 34435);
- var res = coll.aggregate([{$project: {reversed: {$reverseArray: {$literal: [1, 2]}}}}]);
- var output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].reversed, [2, 1]);
+var res = coll.aggregate([{$project: {reversed: {$reverseArray: {$literal: [1, 2]}}}}]);
+var output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].reversed, [2, 1]);
- var res = coll.aggregate([{$project: {reversed: {$reverseArray: {$literal: [[1, 2]]}}}}]);
- var output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].reversed, [[1, 2]]);
+var res = coll.aggregate([{$project: {reversed: {$reverseArray: {$literal: [[1, 2]]}}}}]);
+var output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].reversed, [[1, 2]]);
- var res = coll.aggregate([{$project: {reversed: {$reverseArray: "$notAField"}}}]);
- var output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].reversed, null);
+var res = coll.aggregate([{$project: {reversed: {$reverseArray: "$notAField"}}}]);
+var output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].reversed, null);
}());
diff --git a/jstests/aggregation/bugs/server10176.js b/jstests/aggregation/bugs/server10176.js
index 988beb24f13..9283c819342 100644
--- a/jstests/aggregation/bugs/server10176.js
+++ b/jstests/aggregation/bugs/server10176.js
@@ -4,61 +4,61 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- var coll = db.abs_expr;
- coll.drop();
+var coll = db.abs_expr;
+coll.drop();
- // valid types (numeric and null)
- assert.writeOK(coll.insert({_id: 0, a: 5}));
- assert.writeOK(coll.insert({_id: 1, a: -5}));
- assert.writeOK(coll.insert({_id: 2, a: 5.5}));
- assert.writeOK(coll.insert({_id: 3, a: -5.5}));
- assert.writeOK(coll.insert({_id: 4, a: NumberInt("5")}));
- assert.writeOK(coll.insert({_id: 5, a: NumberInt("-5")}));
- assert.writeOK(coll.insert({_id: 6, a: NumberLong("5")}));
- assert.writeOK(coll.insert({_id: 7, a: NumberLong("-5")}));
- assert.writeOK(coll.insert({_id: 8, a: 0.0}));
- assert.writeOK(coll.insert({_id: 9, a: -0.0}));
- assert.writeOK(coll.insert({_id: 10, a: NumberInt("0")}));
- // INT_MIN is -(2 ^ 31)
- assert.writeOK(coll.insert({_id: 11, a: NumberInt(-Math.pow(2, 31))}));
- assert.writeOK(coll.insert({_id: 12, a: -Math.pow(2, 31)}));
- // 1152921504606846977 is 2^60 + 1, an integer that can't be represented precisely as a double
- assert.writeOK(coll.insert({_id: 13, a: NumberLong("1152921504606846977")}));
- assert.writeOK(coll.insert({_id: 14, a: NumberLong("-1152921504606846977")}));
- assert.writeOK(coll.insert({_id: 15, a: null}));
- assert.writeOK(coll.insert({_id: 16, a: undefined}));
- assert.writeOK(coll.insert({_id: 17, a: NaN}));
- assert.writeOK(coll.insert({_id: 18}));
+// valid types (numeric and null)
+assert.writeOK(coll.insert({_id: 0, a: 5}));
+assert.writeOK(coll.insert({_id: 1, a: -5}));
+assert.writeOK(coll.insert({_id: 2, a: 5.5}));
+assert.writeOK(coll.insert({_id: 3, a: -5.5}));
+assert.writeOK(coll.insert({_id: 4, a: NumberInt("5")}));
+assert.writeOK(coll.insert({_id: 5, a: NumberInt("-5")}));
+assert.writeOK(coll.insert({_id: 6, a: NumberLong("5")}));
+assert.writeOK(coll.insert({_id: 7, a: NumberLong("-5")}));
+assert.writeOK(coll.insert({_id: 8, a: 0.0}));
+assert.writeOK(coll.insert({_id: 9, a: -0.0}));
+assert.writeOK(coll.insert({_id: 10, a: NumberInt("0")}));
+// INT_MIN is -(2 ^ 31)
+assert.writeOK(coll.insert({_id: 11, a: NumberInt(-Math.pow(2, 31))}));
+assert.writeOK(coll.insert({_id: 12, a: -Math.pow(2, 31)}));
+// 1152921504606846977 is 2^60 + 1, an integer that can't be represented precisely as a double
+assert.writeOK(coll.insert({_id: 13, a: NumberLong("1152921504606846977")}));
+assert.writeOK(coll.insert({_id: 14, a: NumberLong("-1152921504606846977")}));
+assert.writeOK(coll.insert({_id: 15, a: null}));
+assert.writeOK(coll.insert({_id: 16, a: undefined}));
+assert.writeOK(coll.insert({_id: 17, a: NaN}));
+assert.writeOK(coll.insert({_id: 18}));
- // valid use of $abs: numbers become positive, null/undefined/nonexistent become null
+// valid use of $abs: numbers become positive, null/undefined/nonexistent become null
- var results = coll.aggregate([{$project: {a: {$abs: "$a"}}}, {$sort: {_id: 1}}]).toArray();
- assert.eq(results, [
- {_id: 0, a: 5},
- {_id: 1, a: 5},
- {_id: 2, a: 5.5},
- {_id: 3, a: 5.5},
- {_id: 4, a: 5},
- {_id: 5, a: 5},
- {_id: 6, a: NumberLong("5")},
- {_id: 7, a: NumberLong("5")},
- {_id: 8, a: 0},
- {_id: 9, a: 0},
- {_id: 10, a: 0},
- {_id: 11, a: NumberLong(Math.pow(2, 31))},
- {_id: 12, a: Math.pow(2, 31)},
- {_id: 13, a: NumberLong("1152921504606846977")},
- {_id: 14, a: NumberLong("1152921504606846977")},
- {_id: 15, a: null},
- {_id: 16, a: null},
- {_id: 17, a: NaN},
- {_id: 18, a: null},
- ]);
- // Invalid
+var results = coll.aggregate([{$project: {a: {$abs: "$a"}}}, {$sort: {_id: 1}}]).toArray();
+assert.eq(results, [
+ {_id: 0, a: 5},
+ {_id: 1, a: 5},
+ {_id: 2, a: 5.5},
+ {_id: 3, a: 5.5},
+ {_id: 4, a: 5},
+ {_id: 5, a: 5},
+ {_id: 6, a: NumberLong("5")},
+ {_id: 7, a: NumberLong("5")},
+ {_id: 8, a: 0},
+ {_id: 9, a: 0},
+ {_id: 10, a: 0},
+ {_id: 11, a: NumberLong(Math.pow(2, 31))},
+ {_id: 12, a: Math.pow(2, 31)},
+ {_id: 13, a: NumberLong("1152921504606846977")},
+ {_id: 14, a: NumberLong("1152921504606846977")},
+ {_id: 15, a: null},
+ {_id: 16, a: null},
+ {_id: 17, a: NaN},
+ {_id: 18, a: null},
+]);
+// Invalid
- // using $abs on string
- assertErrorCode(coll, [{$project: {a: {$abs: "string"}}}], 28765);
+// using $abs on string
+assertErrorCode(coll, [{$project: {a: {$abs: "string"}}}], 28765);
- // using $abs on LLONG_MIN (-2 ^ 63)
- assertErrorCode(coll, [{$project: {a: {$abs: NumberLong("-9223372036854775808")}}}], 28680);
+// using $abs on LLONG_MIN (-2 ^ 63)
+assertErrorCode(coll, [{$project: {a: {$abs: NumberLong("-9223372036854775808")}}}], 28680);
}());
diff --git a/jstests/aggregation/bugs/server11118.js b/jstests/aggregation/bugs/server11118.js
index 27b3fa7597e..46e79c3a7cc 100644
--- a/jstests/aggregation/bugs/server11118.js
+++ b/jstests/aggregation/bugs/server11118.js
@@ -1,154 +1,153 @@
// SERVER-11118 Tests for $dateToString
(function() {
- "use strict";
-
- load('jstests/aggregation/extras/utils.js');
-
- const coll = db.server11118;
-
- // Used to verify expected output format
- function testFormat(date, formatStr, expectedStr) {
- coll.drop();
- assert.writeOK(coll.insert({date: date}));
-
- const res =
- coll.aggregate([{
- $project:
- {_id: 0, formatted: {$dateToString: {format: formatStr, date: "$date"}}}
- }])
- .toArray();
-
- assert.eq(res[0].formatted, expectedStr);
- }
-
- // Used to verify that server recognizes bad formats
- function testFormatError(formatObj, errCode) {
- coll.drop();
- assert.writeOK(coll.insert({date: ISODate()}));
-
- assertErrorCode(coll, {$project: {_id: 0, formatted: {$dateToString: formatObj}}}, errCode);
- }
-
- // Used to verify that only date values are accepted for date parameter
- function testDateValueError(dateVal, errCode) {
- coll.drop();
- assert.writeOK(coll.insert({date: dateVal}));
-
- assertErrorCode(
- coll, {$project: {formatted: {$dateToString: {format: "%Y", date: "$date"}}}}, errCode);
- }
-
- const now = ISODate();
-
- // Use all modifiers we can test with js provided function
- testFormat(now, "%%-%Y-%m-%d-%H-%M-%S-%L", [
- "%",
- now.getUTCFullYear().zeroPad(4),
- (now.getUTCMonth() + 1).zeroPad(2),
- now.getUTCDate().zeroPad(2),
- now.getUTCHours().zeroPad(2),
- now.getUTCMinutes().zeroPad(2),
- now.getUTCSeconds().zeroPad(2),
- now.getUTCMilliseconds().zeroPad(3)
- ].join("-"));
-
- // Padding tests
- const padme = ISODate("2001-02-03T04:05:06.007Z");
-
- testFormat(padme, "%%", "%");
- testFormat(padme, "%Y", padme.getUTCFullYear().zeroPad(4));
- testFormat(padme, "%m", (padme.getUTCMonth() + 1).zeroPad(2));
- testFormat(padme, "%d", padme.getUTCDate().zeroPad(2));
- testFormat(padme, "%H", padme.getUTCHours().zeroPad(2));
- testFormat(padme, "%M", padme.getUTCMinutes().zeroPad(2));
- testFormat(padme, "%S", padme.getUTCSeconds().zeroPad(2));
- testFormat(padme, "%L", padme.getUTCMilliseconds().zeroPad(3));
-
- // no space and multiple characters between modifiers
- testFormat(now, "%d%d***%d***%d**%d*%d", [
- now.getUTCDate().zeroPad(2),
- now.getUTCDate().zeroPad(2),
- "***",
- now.getUTCDate().zeroPad(2),
- "***",
- now.getUTCDate().zeroPad(2),
- "**",
- now.getUTCDate().zeroPad(2),
- "*",
- now.getUTCDate().zeroPad(2)
- ].join(""));
-
- // JS doesn't have equivalents of these format specifiers
- testFormat(ISODate('1999-01-02 03:04:05.006Z'), "%U-%w-%j", "00-7-002");
-
- // Missing date
- testFormatError({format: "%Y"}, 18628);
-
- // Extra field
- testFormatError({format: "%Y", date: "$date", extra: "whyamIhere"}, 18534);
-
- // Not an object
- testFormatError(["%Y", "$date"], 18629);
-
- // Use invalid modifier at middle of string
- testFormatError({format: "%Y-%q", date: "$date"}, 18536);
-
- // Odd number of percent signs at end
- testFormatError({format: "%U-%w-%j-%%%", date: "$date"}, 18535);
-
- // Odd number of percent signs at middle
- // will get interpreted as an invalid modifier since it will try to use '%A'
- testFormatError({format: "AAAAA%%%AAAAAA", date: "$date"}, 18536);
-
- // Format parameter not a string
- testFormatError({format: {iamalion: "roar"}, date: "$date"}, 18533);
-
- ///
- /// Additional Tests
- ///
-
- // Test document
- const date = ISODate("1999-08-29");
-
- testFormat(date, "%%d", "%d");
-
- // A very long string of "%"s
- const longstr = Array(1000).join("%%");
- const halfstr = Array(1000).join("%");
- testFormat(date, longstr, halfstr);
-
- // Dates as null (should return a null)
- testFormat(null, "%Y", null);
-
- ///
- /// Using non-date fields as date parameter *should fail*
- ///
-
- // Array
- testDateValueError([], 16006);
- testDateValueError([1, 2, 3], 16006);
-
- // Sub-object
- testDateValueError({}, 16006);
- testDateValueError({a: 1}, 16006);
-
- // String
- testDateValueError("blahblahblah", 16006);
-
- // Integer
- testDateValueError(1234, 16006);
-
- ///
- /// Using non-string fields as format strings
- ///
-
- // Array
- testFormatError({format: [], date: "$date"}, 18533);
- testFormatError({format: [1, 2, 3], date: "$date"}, 18533);
-
- // Integer
- testFormatError({format: 1, date: "$date"}, 18533);
+"use strict";
+
+load('jstests/aggregation/extras/utils.js');
+
+const coll = db.server11118;
+
+// Used to verify expected output format
+function testFormat(date, formatStr, expectedStr) {
+ coll.drop();
+ assert.writeOK(coll.insert({date: date}));
+
+ const res =
+ coll.aggregate([
+ {$project: {_id: 0, formatted: {$dateToString: {format: formatStr, date: "$date"}}}}
+ ])
+ .toArray();
+
+ assert.eq(res[0].formatted, expectedStr);
+}
+
+// Used to verify that server recognizes bad formats
+function testFormatError(formatObj, errCode) {
+ coll.drop();
+ assert.writeOK(coll.insert({date: ISODate()}));
+
+ assertErrorCode(coll, {$project: {_id: 0, formatted: {$dateToString: formatObj}}}, errCode);
+}
+
+// Used to verify that only date values are accepted for date parameter
+function testDateValueError(dateVal, errCode) {
+ coll.drop();
+ assert.writeOK(coll.insert({date: dateVal}));
+
+ assertErrorCode(
+ coll, {$project: {formatted: {$dateToString: {format: "%Y", date: "$date"}}}}, errCode);
+}
+
+const now = ISODate();
+
+// Use all modifiers we can test with js provided function
+testFormat(now, "%%-%Y-%m-%d-%H-%M-%S-%L", [
+ "%",
+ now.getUTCFullYear().zeroPad(4),
+ (now.getUTCMonth() + 1).zeroPad(2),
+ now.getUTCDate().zeroPad(2),
+ now.getUTCHours().zeroPad(2),
+ now.getUTCMinutes().zeroPad(2),
+ now.getUTCSeconds().zeroPad(2),
+ now.getUTCMilliseconds().zeroPad(3)
+].join("-"));
+
+// Padding tests
+const padme = ISODate("2001-02-03T04:05:06.007Z");
+
+testFormat(padme, "%%", "%");
+testFormat(padme, "%Y", padme.getUTCFullYear().zeroPad(4));
+testFormat(padme, "%m", (padme.getUTCMonth() + 1).zeroPad(2));
+testFormat(padme, "%d", padme.getUTCDate().zeroPad(2));
+testFormat(padme, "%H", padme.getUTCHours().zeroPad(2));
+testFormat(padme, "%M", padme.getUTCMinutes().zeroPad(2));
+testFormat(padme, "%S", padme.getUTCSeconds().zeroPad(2));
+testFormat(padme, "%L", padme.getUTCMilliseconds().zeroPad(3));
+
+// no space and multiple characters between modifiers
+testFormat(now, "%d%d***%d***%d**%d*%d", [
+ now.getUTCDate().zeroPad(2),
+ now.getUTCDate().zeroPad(2),
+ "***",
+ now.getUTCDate().zeroPad(2),
+ "***",
+ now.getUTCDate().zeroPad(2),
+ "**",
+ now.getUTCDate().zeroPad(2),
+ "*",
+ now.getUTCDate().zeroPad(2)
+].join(""));
+
+// JS doesn't have equivalents of these format specifiers
+testFormat(ISODate('1999-01-02 03:04:05.006Z'), "%U-%w-%j", "00-7-002");
+
+// Missing date
+testFormatError({format: "%Y"}, 18628);
+
+// Extra field
+testFormatError({format: "%Y", date: "$date", extra: "whyamIhere"}, 18534);
+
+// Not an object
+testFormatError(["%Y", "$date"], 18629);
+
+// Use invalid modifier at middle of string
+testFormatError({format: "%Y-%q", date: "$date"}, 18536);
+
+// Odd number of percent signs at end
+testFormatError({format: "%U-%w-%j-%%%", date: "$date"}, 18535);
+
+// Odd number of percent signs at middle
+// will get interpreted as an invalid modifier since it will try to use '%A'
+testFormatError({format: "AAAAA%%%AAAAAA", date: "$date"}, 18536);
+
+// Format parameter not a string
+testFormatError({format: {iamalion: "roar"}, date: "$date"}, 18533);
+
+///
+/// Additional Tests
+///
+
+// Test document
+const date = ISODate("1999-08-29");
+
+testFormat(date, "%%d", "%d");
+
+// A very long string of "%"s
+const longstr = Array(1000).join("%%");
+const halfstr = Array(1000).join("%");
+testFormat(date, longstr, halfstr);
+
+// Dates as null (should return a null)
+testFormat(null, "%Y", null);
+
+///
+/// Using non-date fields as date parameter *should fail*
+///
+
+// Array
+testDateValueError([], 16006);
+testDateValueError([1, 2, 3], 16006);
+
+// Sub-object
+testDateValueError({}, 16006);
+testDateValueError({a: 1}, 16006);
+
+// String
+testDateValueError("blahblahblah", 16006);
+
+// Integer
+testDateValueError(1234, 16006);
+
+///
+/// Using non-string fields as format strings
+///
+
+// Array
+testFormatError({format: [], date: "$date"}, 18533);
+testFormatError({format: [1, 2, 3], date: "$date"}, 18533);
+
+// Integer
+testFormatError({format: 1, date: "$date"}, 18533);
- // Date
- testFormatError({format: ISODate(), date: "$date"}, 18533);
+// Date
+testFormatError({format: ISODate(), date: "$date"}, 18533);
})();
diff --git a/jstests/aggregation/bugs/server11675.js b/jstests/aggregation/bugs/server11675.js
index 759b4393b30..2d02a1ff53e 100644
--- a/jstests/aggregation/bugs/server11675.js
+++ b/jstests/aggregation/bugs/server11675.js
@@ -1,227 +1,224 @@
// SERVER-11675 Text search integration with aggregation
(function() {
- load('jstests/aggregation/extras/utils.js'); // For 'assertErrorCode'.
- load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'
-
- const coll = db.server11675;
- coll.drop();
-
- assert.writeOK(coll.insert({_id: 1, text: "apple", words: 1}));
- assert.writeOK(coll.insert({_id: 2, text: "banana", words: 1}));
- assert.writeOK(coll.insert({_id: 3, text: "apple banana", words: 2}));
- assert.writeOK(coll.insert({_id: 4, text: "cantaloupe", words: 1}));
-
- assert.commandWorked(coll.createIndex({text: "text"}));
-
- // query should have subfields query, project, sort, skip and limit. All but query are optional.
- const assertSameAsFind = function(query) {
- let cursor = coll.find(query.query);
- const pipeline = [{$match: query.query}];
-
- if ('project' in query) {
- cursor = coll.find(query.query, query.project); // no way to add to constructed cursor
- pipeline.push({$project: query.project});
- }
-
- if ('sort' in query) {
- cursor = cursor.sort(query.sort);
- pipeline.push({$sort: query.sort});
- }
-
- if ('skip' in query) {
- cursor = cursor.skip(query.skip);
- pipeline.push({$skip: query.skip});
- }
-
- if ('limit' in query) {
- cursor = cursor.limit(query.limit);
- pipeline.push({$limit: query.limit});
- }
-
- const findRes = cursor.toArray();
- const aggRes = coll.aggregate(pipeline).toArray();
-
- // If the query doesn't specify its own sort, there is a possibility that find() and
- // aggregate() will return the same results in different orders. We sort by _id on the
- // client side, so that the results still count as equal.
- if (!query.hasOwnProperty("sort")) {
- findRes.sort(function(a, b) {
- return a._id - b._id;
- });
- aggRes.sort(function(a, b) {
- return a._id - b._id;
- });
- }
-
- assert.docEq(aggRes, findRes);
- };
-
- assertSameAsFind({query: {}}); // sanity check
- assertSameAsFind({query: {$text: {$search: "apple"}}});
- assertSameAsFind({query: {_id: 1, $text: {$search: "apple"}}});
- assertSameAsFind(
- {query: {$text: {$search: "apple"}}, project: {_id: 1, score: {$meta: "textScore"}}});
- assertSameAsFind({
- query: {$text: {$search: "apple banana"}},
- project: {_id: 1, score: {$meta: "textScore"}}
- });
- assertSameAsFind({
- query: {$text: {$search: "apple banana"}},
- project: {_id: 1, score: {$meta: "textScore"}},
- sort: {score: {$meta: "textScore"}}
- });
- assertSameAsFind({
- query: {$text: {$search: "apple banana"}},
- project: {_id: 1, score: {$meta: "textScore"}},
- sort: {score: {$meta: "textScore"}},
- limit: 1
- });
- assertSameAsFind({
- query: {$text: {$search: "apple banana"}},
- project: {_id: 1, score: {$meta: "textScore"}},
- sort: {score: {$meta: "textScore"}},
- skip: 1
- });
- assertSameAsFind({
- query: {$text: {$search: "apple banana"}},
- project: {_id: 1, score: {$meta: "textScore"}},
- sort: {score: {$meta: "textScore"}},
- skip: 1,
- limit: 1
- });
-
- // $meta sort specification should be rejected if it has additional keys.
- assert.throws(function() {
- coll.aggregate([
- {$match: {$text: {$search: 'apple banana'}}},
- {$sort: {textScore: {$meta: 'textScore', extra: 1}}}
- ])
- .itcount();
- });
-
- // $meta sort specification should be rejected if the type of meta sort is not known.
- assert.throws(function() {
- coll.aggregate([
- {$match: {$text: {$search: 'apple banana'}}},
- {$sort: {textScore: {$meta: 'unknown'}}}
- ])
- .itcount();
- });
-
- // Sort specification should be rejected if a $-keyword other than $meta is used.
- assert.throws(function() {
- coll.aggregate([
- {$match: {$text: {$search: 'apple banana'}}},
- {$sort: {textScore: {$notMeta: 'textScore'}}}
- ])
- .itcount();
- });
-
- // Sort specification should be rejected if it is a string, not an object with $meta.
- assert.throws(function() {
- coll.aggregate(
- [{$match: {$text: {$search: 'apple banana'}}}, {$sort: {textScore: 'textScore'}}])
- .itcount();
- });
-
- // sharded find requires projecting the score to sort, but sharded agg does not.
- var findRes = coll.find({$text: {$search: "apple banana"}}, {textScore: {$meta: 'textScore'}})
- .sort({textScore: {$meta: 'textScore'}})
- .map(function(obj) {
- delete obj.textScore; // remove it to match agg output
- return obj;
- });
- let res = coll.aggregate([
- {$match: {$text: {$search: 'apple banana'}}},
- {$sort: {textScore: {$meta: 'textScore'}}}
- ])
- .toArray();
- assert.eq(res, findRes);
-
- // Make sure {$meta: 'textScore'} can be used as a sub-expression
- res = coll.aggregate([
- {$match: {_id: 1, $text: {$search: 'apple'}}},
- {
- $project: {
- words: 1,
- score: {$meta: 'textScore'},
- wordsTimesScore: {$multiply: ['$words', {$meta: 'textScore'}]}
- }
- }
- ])
- .toArray();
- assert.eq(res[0].wordsTimesScore, res[0].words * res[0].score, tojson(res));
-
- // And can be used in $group
- res = coll.aggregate([
- {$match: {_id: 1, $text: {$search: 'apple banana'}}},
- {$group: {_id: {$meta: 'textScore'}, score: {$first: {$meta: 'textScore'}}}}
- ])
- .toArray();
- assert.eq(res[0]._id, res[0].score, tojson(res));
-
- // Make sure metadata crosses shard -> merger boundary
- res = coll.aggregate([
- {$match: {_id: 1, $text: {$search: 'apple'}}},
- {$project: {scoreOnShard: {$meta: 'textScore'}}},
- {$limit: 1}, // force a split. later stages run on merger
- {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}}
- ])
- .toArray();
- assert.eq(res[0].scoreOnMerger, res[0].scoreOnShard);
- let score = res[0].scoreOnMerger; // save for later tests
-
- // Make sure metadata crosses shard -> merger boundary even if not used on shard
- res = coll.aggregate([
- {$match: {_id: 1, $text: {$search: 'apple'}}},
- {$limit: 1}, // force a split. later stages run on merger
- {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}}
+load('jstests/aggregation/extras/utils.js'); // For 'assertErrorCode'.
+load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'
+
+const coll = db.server11675;
+coll.drop();
+
+assert.writeOK(coll.insert({_id: 1, text: "apple", words: 1}));
+assert.writeOK(coll.insert({_id: 2, text: "banana", words: 1}));
+assert.writeOK(coll.insert({_id: 3, text: "apple banana", words: 2}));
+assert.writeOK(coll.insert({_id: 4, text: "cantaloupe", words: 1}));
+
+assert.commandWorked(coll.createIndex({text: "text"}));
+
+// query should have subfields query, project, sort, skip and limit. All but query are optional.
+const assertSameAsFind = function(query) {
+ let cursor = coll.find(query.query);
+ const pipeline = [{$match: query.query}];
+
+ if ('project' in query) {
+ cursor = coll.find(query.query, query.project); // no way to add to constructed cursor
+ pipeline.push({$project: query.project});
+ }
+
+ if ('sort' in query) {
+ cursor = cursor.sort(query.sort);
+ pipeline.push({$sort: query.sort});
+ }
+
+ if ('skip' in query) {
+ cursor = cursor.skip(query.skip);
+ pipeline.push({$skip: query.skip});
+ }
+
+ if ('limit' in query) {
+ cursor = cursor.limit(query.limit);
+ pipeline.push({$limit: query.limit});
+ }
+
+ const findRes = cursor.toArray();
+ const aggRes = coll.aggregate(pipeline).toArray();
+
+ // If the query doesn't specify its own sort, there is a possibility that find() and
+ // aggregate() will return the same results in different orders. We sort by _id on the
+ // client side, so that the results still count as equal.
+ if (!query.hasOwnProperty("sort")) {
+ findRes.sort(function(a, b) {
+ return a._id - b._id;
+ });
+ aggRes.sort(function(a, b) {
+ return a._id - b._id;
+ });
+ }
+
+ assert.docEq(aggRes, findRes);
+};
+
+assertSameAsFind({query: {}}); // sanity check
+assertSameAsFind({query: {$text: {$search: "apple"}}});
+assertSameAsFind({query: {_id: 1, $text: {$search: "apple"}}});
+assertSameAsFind(
+ {query: {$text: {$search: "apple"}}, project: {_id: 1, score: {$meta: "textScore"}}});
+assertSameAsFind(
+ {query: {$text: {$search: "apple banana"}}, project: {_id: 1, score: {$meta: "textScore"}}});
+assertSameAsFind({
+ query: {$text: {$search: "apple banana"}},
+ project: {_id: 1, score: {$meta: "textScore"}},
+ sort: {score: {$meta: "textScore"}}
+});
+assertSameAsFind({
+ query: {$text: {$search: "apple banana"}},
+ project: {_id: 1, score: {$meta: "textScore"}},
+ sort: {score: {$meta: "textScore"}},
+ limit: 1
+});
+assertSameAsFind({
+ query: {$text: {$search: "apple banana"}},
+ project: {_id: 1, score: {$meta: "textScore"}},
+ sort: {score: {$meta: "textScore"}},
+ skip: 1
+});
+assertSameAsFind({
+ query: {$text: {$search: "apple banana"}},
+ project: {_id: 1, score: {$meta: "textScore"}},
+ sort: {score: {$meta: "textScore"}},
+ skip: 1,
+ limit: 1
+});
+
+// $meta sort specification should be rejected if it has additional keys.
+assert.throws(function() {
+ coll.aggregate([
+ {$match: {$text: {$search: 'apple banana'}}},
+ {$sort: {textScore: {$meta: 'textScore', extra: 1}}}
+ ])
+ .itcount();
+});
+
+// $meta sort specification should be rejected if the type of meta sort is not known.
+assert.throws(function() {
+ coll.aggregate([
+ {$match: {$text: {$search: 'apple banana'}}},
+ {$sort: {textScore: {$meta: 'unknown'}}}
+ ])
+ .itcount();
+});
+
+// Sort specification should be rejected if a $-keyword other than $meta is used.
+assert.throws(function() {
+ coll.aggregate([
+ {$match: {$text: {$search: 'apple banana'}}},
+ {$sort: {textScore: {$notMeta: 'textScore'}}}
+ ])
+ .itcount();
+});
+
+// Sort specification should be rejected if it is a string, not an object with $meta.
+assert.throws(function() {
+ coll.aggregate(
+ [{$match: {$text: {$search: 'apple banana'}}}, {$sort: {textScore: 'textScore'}}])
+ .itcount();
+});
+
+// sharded find requires projecting the score to sort, but sharded agg does not.
+var findRes = coll.find({$text: {$search: "apple banana"}}, {textScore: {$meta: 'textScore'}})
+ .sort({textScore: {$meta: 'textScore'}})
+ .map(function(obj) {
+ delete obj.textScore; // remove it to match agg output
+ return obj;
+ });
+let res = coll.aggregate([
+ {$match: {$text: {$search: 'apple banana'}}},
+ {$sort: {textScore: {$meta: 'textScore'}}}
])
.toArray();
- assert.eq(res[0].scoreOnMerger, score);
-
- // Make sure metadata works if first $project doesn't use it.
- res = coll.aggregate([
- {$match: {_id: 1, $text: {$search: 'apple'}}},
- {$project: {_id: 1}},
- {$project: {_id: 1, score: {$meta: 'textScore'}}}
- ])
- .toArray();
- assert.eq(res[0].score, score);
-
- // Make sure the pipeline fails if it tries to reference the text score and it doesn't exist.
- res = coll.runCommand(
- {aggregate: coll.getName(), pipeline: [{$project: {_id: 1, score: {$meta: 'textScore'}}}]});
- assert.commandFailed(res);
-
- // Make sure the metadata is 'missing()' when it doesn't exist because the document changed
- res = coll.aggregate([
- {$match: {_id: 1, $text: {$search: 'apple banana'}}},
- {$group: {_id: 1, score: {$first: {$meta: 'textScore'}}}},
- {$project: {_id: 1, scoreAgain: {$meta: 'textScore'}}},
- ])
- .toArray();
- assert(!("scoreAgain" in res[0]));
-
- // Make sure metadata works after a $unwind
- assert.writeOK(coll.insert({_id: 5, text: 'mango', words: [1, 2, 3]}));
- res = coll.aggregate([
- {$match: {$text: {$search: 'mango'}}},
- {$project: {score: {$meta: "textScore"}, _id: 1, words: 1}},
- {$unwind: '$words'},
- {$project: {scoreAgain: {$meta: "textScore"}, score: 1}}
- ])
- .toArray();
- assert.eq(res[0].scoreAgain, res[0].score);
-
- // Error checking
- // $match, but wrong position
- assertErrorCode(
- coll, [{$sort: {text: 1}}, {$match: {$text: {$search: 'apple banana'}}}], 17313);
-
- // wrong $stage, but correct position
- assertErrorCode(coll,
- [{$project: {searchValue: {$text: {$search: 'apple banana'}}}}],
- ErrorCodes.InvalidPipelineOperator);
- assertErrorCode(coll, [{$sort: {$text: {$search: 'apple banana'}}}], 17312);
+assert.eq(res, findRes);
+
+// Make sure {$meta: 'textScore'} can be used as a sub-expression
+res = coll.aggregate([
+ {$match: {_id: 1, $text: {$search: 'apple'}}},
+ {
+ $project: {
+ words: 1,
+ score: {$meta: 'textScore'},
+ wordsTimesScore: {$multiply: ['$words', {$meta: 'textScore'}]}
+ }
+ }
+ ])
+ .toArray();
+assert.eq(res[0].wordsTimesScore, res[0].words * res[0].score, tojson(res));
+
+// And can be used in $group
+res = coll.aggregate([
+ {$match: {_id: 1, $text: {$search: 'apple banana'}}},
+ {$group: {_id: {$meta: 'textScore'}, score: {$first: {$meta: 'textScore'}}}}
+ ])
+ .toArray();
+assert.eq(res[0]._id, res[0].score, tojson(res));
+
+// Make sure metadata crosses shard -> merger boundary
+res = coll.aggregate([
+ {$match: {_id: 1, $text: {$search: 'apple'}}},
+ {$project: {scoreOnShard: {$meta: 'textScore'}}},
+ {$limit: 1}, // force a split. later stages run on merger
+ {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}}
+ ])
+ .toArray();
+assert.eq(res[0].scoreOnMerger, res[0].scoreOnShard);
+let score = res[0].scoreOnMerger; // save for later tests
+
+// Make sure metadata crosses shard -> merger boundary even if not used on shard
+res = coll.aggregate([
+ {$match: {_id: 1, $text: {$search: 'apple'}}},
+ {$limit: 1}, // force a split. later stages run on merger
+ {$project: {scoreOnShard: 1, scoreOnMerger: {$meta: 'textScore'}}}
+ ])
+ .toArray();
+assert.eq(res[0].scoreOnMerger, score);
+
+// Make sure metadata works if first $project doesn't use it.
+res = coll.aggregate([
+ {$match: {_id: 1, $text: {$search: 'apple'}}},
+ {$project: {_id: 1}},
+ {$project: {_id: 1, score: {$meta: 'textScore'}}}
+ ])
+ .toArray();
+assert.eq(res[0].score, score);
+
+// Make sure the pipeline fails if it tries to reference the text score and it doesn't exist.
+res = coll.runCommand(
+ {aggregate: coll.getName(), pipeline: [{$project: {_id: 1, score: {$meta: 'textScore'}}}]});
+assert.commandFailed(res);
+
+// Make sure the metadata is 'missing()' when it doesn't exist because the document changed
+res = coll.aggregate([
+ {$match: {_id: 1, $text: {$search: 'apple banana'}}},
+ {$group: {_id: 1, score: {$first: {$meta: 'textScore'}}}},
+ {$project: {_id: 1, scoreAgain: {$meta: 'textScore'}}},
+ ])
+ .toArray();
+assert(!("scoreAgain" in res[0]));
+
+// Make sure metadata works after a $unwind
+assert.writeOK(coll.insert({_id: 5, text: 'mango', words: [1, 2, 3]}));
+res = coll.aggregate([
+ {$match: {$text: {$search: 'mango'}}},
+ {$project: {score: {$meta: "textScore"}, _id: 1, words: 1}},
+ {$unwind: '$words'},
+ {$project: {scoreAgain: {$meta: "textScore"}, score: 1}}
+ ])
+ .toArray();
+assert.eq(res[0].scoreAgain, res[0].score);
+
+// Error checking
+// $match, but wrong position
+assertErrorCode(coll, [{$sort: {text: 1}}, {$match: {$text: {$search: 'apple banana'}}}], 17313);
+
+// wrong $stage, but correct position
+assertErrorCode(coll,
+ [{$project: {searchValue: {$text: {$search: 'apple banana'}}}}],
+ ErrorCodes.InvalidPipelineOperator);
+assertErrorCode(coll, [{$sort: {$text: {$search: 'apple banana'}}}], 17312);
})();
diff --git a/jstests/aggregation/bugs/server12015.js b/jstests/aggregation/bugs/server12015.js
index 1b59a59545c..2c2b34d126c 100644
--- a/jstests/aggregation/bugs/server12015.js
+++ b/jstests/aggregation/bugs/server12015.js
@@ -9,77 +9,79 @@
load("jstests/aggregation/extras/utils.js"); // For orderedArrayEq.
(function() {
- "use strict";
- const coll = db.server12015;
- coll.drop();
- const indexSpec = {a: 1, b: 1};
+"use strict";
+const coll = db.server12015;
+coll.drop();
+const indexSpec = {
+ a: 1,
+ b: 1
+};
- assert.writeOK(coll.insert({_id: 0, a: 0, b: 0}));
- assert.writeOK(coll.insert({_id: 1, a: 0, b: 1}));
- assert.writeOK(coll.insert({_id: 2, a: 1, b: 0}));
- assert.writeOK(coll.insert({_id: 3, a: 1, b: 1}));
+assert.writeOK(coll.insert({_id: 0, a: 0, b: 0}));
+assert.writeOK(coll.insert({_id: 1, a: 0, b: 1}));
+assert.writeOK(coll.insert({_id: 2, a: 1, b: 0}));
+assert.writeOK(coll.insert({_id: 3, a: 1, b: 1}));
- /**
- * Helper to test that for a given pipeline, the same results are returned whether or not an
- * index is present. If 'ignoreSortOrder' is present, test for result parity without assuming
- * the order of results.
- */
- function assertResultsMatch(pipeline, ignoreSortOrder) {
- // Add a match stage to ensure index scans are considered for planning (workaround for
- // SERVER-20066).
- pipeline = [{$match: {a: {$gte: 0}}}].concat(pipeline);
+/**
+ * Helper to test that for a given pipeline, the same results are returned whether or not an
+ * index is present. If 'ignoreSortOrder' is present, test for result parity without assuming
+ * the order of results.
+ */
+function assertResultsMatch(pipeline, ignoreSortOrder) {
+ // Add a match stage to ensure index scans are considered for planning (workaround for
+ // SERVER-20066).
+ pipeline = [{$match: {a: {$gte: 0}}}].concat(pipeline);
- // Once with an index.
- assert.commandWorked(coll.ensureIndex(indexSpec));
- var resultsWithIndex = coll.aggregate(pipeline).toArray();
+ // Once with an index.
+ assert.commandWorked(coll.ensureIndex(indexSpec));
+ var resultsWithIndex = coll.aggregate(pipeline).toArray();
- // Again without an index.
- assert.commandWorked(coll.dropIndex(indexSpec));
- var resultsWithoutIndex = coll.aggregate(pipeline).toArray();
+ // Again without an index.
+ assert.commandWorked(coll.dropIndex(indexSpec));
+ var resultsWithoutIndex = coll.aggregate(pipeline).toArray();
- if (ignoreSortOrder) {
- assert(arrayEq(resultsWithIndex, resultsWithoutIndex), tojson({
- resultsWithIndex: resultsWithIndex,
- resultsWithoutIndex: resultsWithoutIndex
- }));
- } else {
- assert.eq(resultsWithIndex, resultsWithoutIndex);
- }
+ if (ignoreSortOrder) {
+ assert(
+ arrayEq(resultsWithIndex, resultsWithoutIndex),
+ tojson({resultsWithIndex: resultsWithIndex, resultsWithoutIndex: resultsWithoutIndex}));
+ } else {
+ assert.eq(resultsWithIndex, resultsWithoutIndex);
}
+}
- // Uncovered $project, no $sort.
- const ignoreSortOrder = true;
- assertResultsMatch([{$project: {_id: 1, a: 1, b: 1}}], ignoreSortOrder);
+// Uncovered $project, no $sort.
+const ignoreSortOrder = true;
+assertResultsMatch([{$project: {_id: 1, a: 1, b: 1}}], ignoreSortOrder);
- // Covered $project, no $sort.
- assertResultsMatch([{$project: {_id: 0, a: 1}}], ignoreSortOrder);
- assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}], ignoreSortOrder);
- assertResultsMatch([{$project: {_id: 0, a: 1, b: 1, c: {$literal: 1}}}], ignoreSortOrder);
- assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$project: {a: 1}}], ignoreSortOrder);
- assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$group: {_id: null, a: {$sum: "$a"}}}],
- ignoreSortOrder);
+// Covered $project, no $sort.
+assertResultsMatch([{$project: {_id: 0, a: 1}}], ignoreSortOrder);
+assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}], ignoreSortOrder);
+assertResultsMatch([{$project: {_id: 0, a: 1, b: 1, c: {$literal: 1}}}], ignoreSortOrder);
+assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$project: {a: 1}}], ignoreSortOrder);
+assertResultsMatch([{$project: {_id: 0, a: 1, b: 1}}, {$group: {_id: null, a: {$sum: "$a"}}}],
+ ignoreSortOrder);
- // Non-blocking $sort, uncovered $project.
- assertResultsMatch([{$sort: {a: -1, b: -1}}, {$project: {_id: 1, a: 1, b: 1}}]);
- assertResultsMatch([{$sort: {a: 1, b: 1}}, {$project: {_id: 1, a: 1, b: 1}}]);
- assertResultsMatch(
- [{$sort: {a: 1, b: 1}}, {$group: {_id: "$_id", arr: {$push: "$a"}, sum: {$sum: "$b"}}}],
- ignoreSortOrder);
+// Non-blocking $sort, uncovered $project.
+assertResultsMatch([{$sort: {a: -1, b: -1}}, {$project: {_id: 1, a: 1, b: 1}}]);
+assertResultsMatch([{$sort: {a: 1, b: 1}}, {$project: {_id: 1, a: 1, b: 1}}]);
+assertResultsMatch(
+ [{$sort: {a: 1, b: 1}}, {$group: {_id: "$_id", arr: {$push: "$a"}, sum: {$sum: "$b"}}}],
+ ignoreSortOrder);
- // Non-blocking $sort, covered $project.
- assertResultsMatch([{$sort: {a: -1, b: -1}}, {$project: {_id: 0, a: 1, b: 1}}]);
- assertResultsMatch([{$sort: {a: 1, b: 1}}, {$project: {_id: 0, a: 1, b: 1}}]);
- assertResultsMatch([{$sort: {a: 1, b: 1}}, {$group: {_id: "$b", arr: {$push: "$a"}}}],
- ignoreSortOrder);
+// Non-blocking $sort, covered $project.
+assertResultsMatch([{$sort: {a: -1, b: -1}}, {$project: {_id: 0, a: 1, b: 1}}]);
+assertResultsMatch([{$sort: {a: 1, b: 1}}, {$project: {_id: 0, a: 1, b: 1}}]);
+assertResultsMatch([{$sort: {a: 1, b: 1}}, {$group: {_id: "$b", arr: {$push: "$a"}}}],
+ ignoreSortOrder);
- // Blocking $sort, uncovered $project.
- assertResultsMatch([{$sort: {b: 1, a: -1}}, {$project: {_id: 1, a: 1, b: 1}}]);
- assertResultsMatch(
- [{$sort: {b: 1, a: -1}}, {$group: {_id: "$_id", arr: {$push: "$a"}, sum: {$sum: "$b"}}}],
- ignoreSortOrder);
+// Blocking $sort, uncovered $project.
+assertResultsMatch([{$sort: {b: 1, a: -1}}, {$project: {_id: 1, a: 1, b: 1}}]);
+assertResultsMatch(
+ [{$sort: {b: 1, a: -1}}, {$group: {_id: "$_id", arr: {$push: "$a"}, sum: {$sum: "$b"}}}],
+ ignoreSortOrder);
- // Blocking $sort, covered $project.
- assertResultsMatch([{$sort: {b: 1, a: -1}}, {$project: {_id: 0, a: 1, b: 1}}]);
- assertResultsMatch([{$sort: {b: 1, a: -1}}, {$group: {_id: "$b", arr: {$push: "$a"}}}],
- ignoreSortOrder);
+// Blocking $sort, covered $project.
+assertResultsMatch([{$sort: {b: 1, a: -1}}, {$project: {_id: 0, a: 1, b: 1}}]);
+assertResultsMatch([{$sort: {b: 1, a: -1}}, {$group: {_id: "$b", arr: {$push: "$a"}}}],
+ ignoreSortOrder);
}());
diff --git a/jstests/aggregation/bugs/server14421.js b/jstests/aggregation/bugs/server14421.js
index 3201e20a81a..b6701546e3d 100644
--- a/jstests/aggregation/bugs/server14421.js
+++ b/jstests/aggregation/bugs/server14421.js
@@ -1,40 +1,40 @@
// SERVER-14421 minDistance for $geoNear aggregation operator
(function() {
- 'use strict';
- var coll = db.mindistance;
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, loc: {type: "Point", coordinates: [0, 0]}},
- {_id: 1, loc: {type: "Point", coordinates: [0, 0.01]}}
- ]));
- var response = coll.createIndex({loc: "2dsphere"});
- assert.eq(response.ok, 1, "Could not create 2dsphere index");
- var results = coll.aggregate([{
- $geoNear: {
- minDistance: 10000,
- spherical: true,
- distanceField: "distance",
- near: {type: "Point", coordinates: [0, 0]}
- }
- }]);
- assert.eq(results.itcount(), 0);
- results = coll.aggregate([{
- $geoNear: {
- minDistance: 1,
- spherical: true,
- distanceField: "distance",
- near: {type: "Point", coordinates: [0, 0]}
- }
- }]);
- assert.eq(results.itcount(), 1);
- results = coll.aggregate([{
- $geoNear: {
- minDistance: 0,
- spherical: true,
- distanceField: "distance",
- near: {type: "Point", coordinates: [0, 0]}
- }
- }]);
- assert.eq(results.itcount(), 2);
- coll.drop();
+'use strict';
+var coll = db.mindistance;
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, loc: {type: "Point", coordinates: [0, 0]}},
+ {_id: 1, loc: {type: "Point", coordinates: [0, 0.01]}}
+]));
+var response = coll.createIndex({loc: "2dsphere"});
+assert.eq(response.ok, 1, "Could not create 2dsphere index");
+var results = coll.aggregate([{
+ $geoNear: {
+ minDistance: 10000,
+ spherical: true,
+ distanceField: "distance",
+ near: {type: "Point", coordinates: [0, 0]}
+ }
+}]);
+assert.eq(results.itcount(), 0);
+results = coll.aggregate([{
+ $geoNear: {
+ minDistance: 1,
+ spherical: true,
+ distanceField: "distance",
+ near: {type: "Point", coordinates: [0, 0]}
+ }
+}]);
+assert.eq(results.itcount(), 1);
+results = coll.aggregate([{
+ $geoNear: {
+ minDistance: 0,
+ spherical: true,
+ distanceField: "distance",
+ near: {type: "Point", coordinates: [0, 0]}
+ }
+}]);
+assert.eq(results.itcount(), 2);
+coll.drop();
}()); \ No newline at end of file
diff --git a/jstests/aggregation/bugs/server14670.js b/jstests/aggregation/bugs/server14670.js
index dc8a750e9db..adadb154da0 100644
--- a/jstests/aggregation/bugs/server14670.js
+++ b/jstests/aggregation/bugs/server14670.js
@@ -3,21 +3,19 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
+"use strict";
- var coll = db.substr;
- coll.drop();
+var coll = db.substr;
+coll.drop();
- // Need an empty document for the pipeline.
- coll.insert({});
+// Need an empty document for the pipeline.
+coll.insert({});
- assertErrorCode(coll,
- [{$project: {strLen: {$strLenBytes: 1}}}],
- 34473,
- "$strLenBytes requires a string argument.");
+assertErrorCode(coll,
+ [{$project: {strLen: {$strLenBytes: 1}}}],
+ 34473,
+ "$strLenBytes requires a string argument.");
- assertErrorCode(coll,
- [{$project: {strLen: {$strLenCP: 1}}}],
- 34471,
- "$strLenCP requires a string argument.");
+assertErrorCode(
+ coll, [{$project: {strLen: {$strLenCP: 1}}}], 34471, "$strLenCP requires a string argument.");
}());
diff --git a/jstests/aggregation/bugs/server14691.js b/jstests/aggregation/bugs/server14691.js
index 0ba010ac41a..2703f2dead9 100644
--- a/jstests/aggregation/bugs/server14691.js
+++ b/jstests/aggregation/bugs/server14691.js
@@ -1,52 +1,52 @@
// SERVER-14691: $avg aggregator should return null when it receives no input.
(function() {
- 'use strict';
+'use strict';
- var coll = db.accumulate_avg_sum_null;
+var coll = db.accumulate_avg_sum_null;
- // Test the $avg aggregator.
- coll.drop();
+// Test the $avg aggregator.
+coll.drop();
- // Null cases.
- assert.writeOK(coll.insert({a: 1, b: 2, c: 'string', d: null}));
+// Null cases.
+assert.writeOK(coll.insert({a: 1, b: 2, c: 'string', d: null}));
- // Missing field.
- var pipeline = [{$group: {_id: '$a', avg: {$avg: '$missing'}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
+// Missing field.
+var pipeline = [{$group: {_id: '$a', avg: {$avg: '$missing'}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
- // Non-numeric field.
- pipeline = [{$group: {_id: '$a', avg: {$avg: '$c'}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
+// Non-numeric field.
+pipeline = [{$group: {_id: '$a', avg: {$avg: '$c'}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
- // Field with value of null.
- pipeline = [{$group: {_id: '$a', avg: {$avg: '$d'}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
+// Field with value of null.
+pipeline = [{$group: {_id: '$a', avg: {$avg: '$d'}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
- // All three.
- coll.insert({a: 1, d: 'string'});
- coll.insert({a: 1});
- pipeline = [{$group: {_id: '$a', avg: {$avg: '$d'}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
+// All three.
+coll.insert({a: 1, d: 'string'});
+coll.insert({a: 1});
+pipeline = [{$group: {_id: '$a', avg: {$avg: '$d'}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: null}]);
- // Non-null cases.
- coll.drop();
- assert.writeOK(coll.insert({a: 1, b: 2}));
- pipeline = [{$group: {_id: '$a', avg: {$avg: '$b'}}}];
+// Non-null cases.
+coll.drop();
+assert.writeOK(coll.insert({a: 1, b: 2}));
+pipeline = [{$group: {_id: '$a', avg: {$avg: '$b'}}}];
- // One field.
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 2}]);
+// One field.
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 2}]);
- // Two fields.
- assert.writeOK(coll.insert({a: 1, b: 4}));
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 3}]);
+// Two fields.
+assert.writeOK(coll.insert({a: 1, b: 4}));
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 3}]);
- // Average of zero should still work.
- assert.writeOK(coll.insert({a: 1, b: -6}));
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 0}]);
+// Average of zero should still work.
+assert.writeOK(coll.insert({a: 1, b: -6}));
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 0}]);
- // Missing, null, or non-numeric fields should not error or affect the average.
- assert.writeOK(coll.insert({a: 1}));
- assert.writeOK(coll.insert({a: 1, b: 'string'}));
- assert.writeOK(coll.insert({a: 1, b: null}));
- assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 0}]);
+// Missing, null, or non-numeric fields should not error or affect the average.
+assert.writeOK(coll.insert({a: 1}));
+assert.writeOK(coll.insert({a: 1, b: 'string'}));
+assert.writeOK(coll.insert({a: 1, b: null}));
+assert.eq(coll.aggregate(pipeline).toArray(), [{_id: 1, avg: 0}]);
}());
diff --git a/jstests/aggregation/bugs/server14872.js b/jstests/aggregation/bugs/server14872.js
index 3be4018ac21..4787df5259b 100644
--- a/jstests/aggregation/bugs/server14872.js
+++ b/jstests/aggregation/bugs/server14872.js
@@ -4,36 +4,36 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
+'use strict';
- var coll = db.agg_concat_arrays_expr;
- coll.drop();
+var coll = db.agg_concat_arrays_expr;
+coll.drop();
- assert.writeOK(coll.insert({a: [1, 2], b: ['three'], c: [], d: [[3], 4], e: null, str: 'x'}));
+assert.writeOK(coll.insert({a: [1, 2], b: ['three'], c: [], d: [[3], 4], e: null, str: 'x'}));
- // Basic concatenation.
- var pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$b', '$c']}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{all: [1, 2, 'three']}]);
+// Basic concatenation.
+var pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$b', '$c']}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{all: [1, 2, 'three']}]);
- // Concatenation with nested arrays.
- pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$d']}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{all: [1, 2, [3], 4]}]);
+// Concatenation with nested arrays.
+pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$d']}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{all: [1, 2, [3], 4]}]);
- // Concatenation with 1 argument.
- pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a']}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{all: [1, 2]}]);
+// Concatenation with 1 argument.
+pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a']}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{all: [1, 2]}]);
- // Concatenation with no arguments.
- pipeline = [{$project: {_id: 0, all: {$concatArrays: []}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{all: []}]);
+// Concatenation with no arguments.
+pipeline = [{$project: {_id: 0, all: {$concatArrays: []}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{all: []}]);
- // Any nullish inputs will result in null.
- pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$e']}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{all: null}]);
- pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$f']}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{all: null}]);
+// Any nullish inputs will result in null.
+pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$e']}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{all: null}]);
+pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$f']}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{all: null}]);
- // Error on any non-array, non-null inputs.
- pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$str']}}}];
- assertErrorCode(coll, pipeline, 28664);
+// Error on any non-array, non-null inputs.
+pipeline = [{$project: {_id: 0, all: {$concatArrays: ['$a', '$str']}}}];
+assertErrorCode(coll, pipeline, 28664);
}());
diff --git a/jstests/aggregation/bugs/server17224.js b/jstests/aggregation/bugs/server17224.js
index 888c99b808c..31d0e889b8c 100644
--- a/jstests/aggregation/bugs/server17224.js
+++ b/jstests/aggregation/bugs/server17224.js
@@ -1,25 +1,25 @@
// SERVER-17224 An aggregation result with exactly the right size could crash the server rather than
// returning an error.
(function() {
- 'use strict';
+'use strict';
- var t = db.server17224;
- t.drop();
+var t = db.server17224;
+t.drop();
- // first 63MB
- for (var i = 0; i < 63; i++) {
- t.insert({a: new Array(1024 * 1024 + 1).join('a')});
- }
+// first 63MB
+for (var i = 0; i < 63; i++) {
+ t.insert({a: new Array(1024 * 1024 + 1).join('a')});
+}
- // the remaining ~1MB with room for field names and other overhead
- t.insert({a: new Array(1024 * 1024 - 1105).join('a')});
+// the remaining ~1MB with room for field names and other overhead
+t.insert({a: new Array(1024 * 1024 - 1105).join('a')});
- // do not use cursor form, since it has a different workaroud for this issue.
- assert.commandFailed(db.runCommand({
- aggregate: t.getName(),
- pipeline: [{$match: {}}, {$group: {_id: null, arr: {$push: {a: '$a'}}}}]
- }));
+// do not use cursor form, since it has a different workaroud for this issue.
+assert.commandFailed(db.runCommand({
+ aggregate: t.getName(),
+ pipeline: [{$match: {}}, {$group: {_id: null, arr: {$push: {a: '$a'}}}}]
+}));
- // Make sure the server is still up.
- assert.commandWorked(db.runCommand('ping'));
+// Make sure the server is still up.
+assert.commandWorked(db.runCommand('ping'));
}());
diff --git a/jstests/aggregation/bugs/server17943.js b/jstests/aggregation/bugs/server17943.js
index 075623c705d..6b510e2ddbc 100644
--- a/jstests/aggregation/bugs/server17943.js
+++ b/jstests/aggregation/bugs/server17943.js
@@ -4,80 +4,88 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
+'use strict';
- var coll = db.agg_filter_expr;
- coll.drop();
+var coll = db.agg_filter_expr;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0, a: [1, 2, 3, 4, 5]}));
- assert.writeOK(coll.insert({_id: 1, a: [2, 4]}));
- assert.writeOK(coll.insert({_id: 2, a: []}));
- assert.writeOK(coll.insert({_id: 3, a: [1]}));
- assert.writeOK(coll.insert({_id: 4, a: null}));
- assert.writeOK(coll.insert({_id: 5, a: undefined}));
- assert.writeOK(coll.insert({_id: 6}));
+assert.writeOK(coll.insert({_id: 0, a: [1, 2, 3, 4, 5]}));
+assert.writeOK(coll.insert({_id: 1, a: [2, 4]}));
+assert.writeOK(coll.insert({_id: 2, a: []}));
+assert.writeOK(coll.insert({_id: 3, a: [1]}));
+assert.writeOK(coll.insert({_id: 4, a: null}));
+assert.writeOK(coll.insert({_id: 5, a: undefined}));
+assert.writeOK(coll.insert({_id: 6}));
- // Create filter to only accept odd numbers.
- filterDoc = {input: '$a', as: 'x', cond: {$eq: [1, {$mod: ['$$x', 2]}]}};
- var expectedResults = [
- {_id: 0, b: [1, 3, 5]},
- {_id: 1, b: []},
- {_id: 2, b: []},
- {_id: 3, b: [1]},
- {_id: 4, b: null},
- {_id: 5, b: null},
- {_id: 6, b: null},
- ];
- var results =
- coll.aggregate([{$project: {b: {$filter: filterDoc}}}, {$sort: {_id: 1}}]).toArray();
- assert.eq(results, expectedResults);
+// Create filter to only accept odd numbers.
+filterDoc = {input: '$a', as: 'x', cond: {$eq: [1, {$mod: ['$$x', 2]}]}};
+var expectedResults = [
+ {_id: 0, b: [1, 3, 5]},
+ {_id: 1, b: []},
+ {_id: 2, b: []},
+ {_id: 3, b: [1]},
+ {_id: 4, b: null},
+ {_id: 5, b: null},
+ {_id: 6, b: null},
+];
+var results = coll.aggregate([{$project: {b: {$filter: filterDoc}}}, {$sort: {_id: 1}}]).toArray();
+assert.eq(results, expectedResults);
- // create filter that uses the default variable name in 'cond'
- filterDoc = {input: '$a', cond: {$eq: [2, '$$this']}};
- expectedResults = [
- {_id: 0, b: [2]},
- {_id: 1, b: [2]},
- {_id: 2, b: []},
- {_id: 3, b: []},
- {_id: 4, b: null},
- {_id: 5, b: null},
- {_id: 6, b: null},
- ];
- results = coll.aggregate([{$project: {b: {$filter: filterDoc}}}, {$sort: {_id: 1}}]).toArray();
- assert.eq(results, expectedResults);
+// create filter that uses the default variable name in 'cond'
+filterDoc = {
+ input: '$a',
+ cond: {$eq: [2, '$$this']}
+};
+expectedResults = [
+ {_id: 0, b: [2]},
+ {_id: 1, b: [2]},
+ {_id: 2, b: []},
+ {_id: 3, b: []},
+ {_id: 4, b: null},
+ {_id: 5, b: null},
+ {_id: 6, b: null},
+];
+results = coll.aggregate([{$project: {b: {$filter: filterDoc}}}, {$sort: {_id: 1}}]).toArray();
+assert.eq(results, expectedResults);
- // Invalid filter expressions.
+// Invalid filter expressions.
- // '$filter' is not a document.
- var filterDoc = 'string';
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28646);
+// '$filter' is not a document.
+var filterDoc = 'string';
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28646);
- // Extra field(s).
- filterDoc = {input: '$a', as: 'x', cond: true, extra: 1};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28647);
+// Extra field(s).
+filterDoc = {input: '$a', as: 'x', cond: true, extra: 1};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28647);
- // Missing 'input'.
- filterDoc = {as: 'x', cond: true};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28648);
+// Missing 'input'.
+filterDoc = {
+ as: 'x',
+ cond: true
+};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28648);
- // Missing 'cond'.
- filterDoc = {input: '$a', as: 'x'};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28650);
+// Missing 'cond'.
+filterDoc = {input: '$a', as: 'x'};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28650);
- // 'as' is not a valid variable name.
- filterDoc = {input: '$a', as: '$x', cond: true};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 16867);
+// 'as' is not a valid variable name.
+filterDoc = {input: '$a', as: '$x', cond: true};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 16867);
- // 'input' is not an array.
- filterDoc = {input: 'string', as: 'x', cond: true};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28651);
+// 'input' is not an array.
+filterDoc = {input: 'string', as: 'x', cond: true};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28651);
- // 'cond' uses undefined variable name.
- filterDoc = {input: '$a', cond: {$eq: [1, '$$var']}};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 17276);
+// 'cond' uses undefined variable name.
+filterDoc = {
+ input: '$a',
+ cond: {$eq: [1, '$$var']}
+};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 17276);
- assert(coll.drop());
- assert.writeOK(coll.insert({a: 'string'}));
- filterDoc = {input: '$a', as: 'x', cond: true};
- assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28651);
+assert(coll.drop());
+assert.writeOK(coll.insert({a: 'string'}));
+filterDoc = {input: '$a', as: 'x', cond: true};
+assertErrorCode(coll, [{$project: {b: {$filter: filterDoc}}}], 28651);
}());
diff --git a/jstests/aggregation/bugs/server18198.js b/jstests/aggregation/bugs/server18198.js
index a182195a864..9aa26451161 100644
--- a/jstests/aggregation/bugs/server18198.js
+++ b/jstests/aggregation/bugs/server18198.js
@@ -1,67 +1,67 @@
// SERVER-18198 check read pref is only applied when there is no $out stage
// in aggregate shell helper
(function() {
- "use strict";
- var t = db.server18198;
- t.drop();
+"use strict";
+var t = db.server18198;
+t.drop();
- var mongo = db.getMongo();
+var mongo = db.getMongo();
- try {
- var commandsRan = [];
- // hook in our patched mongo
- var mockMongo = {
- getSlaveOk: function() {
- return true;
- },
- runCommand: function(db, cmd, opts) {
- commandsRan.push({db: db, cmd: cmd, opts: opts});
- return {ok: 1.0};
- },
- getReadPref: function() {
- return {mode: "secondaryPreferred"};
- },
- getReadPrefMode: function() {
- return "secondaryPreferred";
- },
- getMinWireVersion: function() {
- return mongo.getMinWireVersion();
- },
- getMaxWireVersion: function() {
- return mongo.getMaxWireVersion();
- },
- isReplicaSetMember: function() {
- return mongo.isReplicaSetMember();
- },
- isMongos: function() {
- return mongo.isMongos();
- },
- isCausalConsistency: function() {
- return false;
- },
- getClusterTime: function() {
- return mongo.getClusterTime();
- },
- };
+try {
+ var commandsRan = [];
+ // hook in our patched mongo
+ var mockMongo = {
+ getSlaveOk: function() {
+ return true;
+ },
+ runCommand: function(db, cmd, opts) {
+ commandsRan.push({db: db, cmd: cmd, opts: opts});
+ return {ok: 1.0};
+ },
+ getReadPref: function() {
+ return {mode: "secondaryPreferred"};
+ },
+ getReadPrefMode: function() {
+ return "secondaryPreferred";
+ },
+ getMinWireVersion: function() {
+ return mongo.getMinWireVersion();
+ },
+ getMaxWireVersion: function() {
+ return mongo.getMaxWireVersion();
+ },
+ isReplicaSetMember: function() {
+ return mongo.isReplicaSetMember();
+ },
+ isMongos: function() {
+ return mongo.isMongos();
+ },
+ isCausalConsistency: function() {
+ return false;
+ },
+ getClusterTime: function() {
+ return mongo.getClusterTime();
+ },
+ };
- db._mongo = mockMongo;
- db._session = new _DummyDriverSession(mockMongo);
+ db._mongo = mockMongo;
+ db._session = new _DummyDriverSession(mockMongo);
- // this query should not get a read pref
- t.aggregate([{$sort: {"x": 1}}, {$out: "foo"}]);
- assert.eq(commandsRan.length, 1);
- // check that it doesn't have a read preference
- assert(!commandsRan[0].cmd.hasOwnProperty("$readPreference"));
+ // this query should not get a read pref
+ t.aggregate([{$sort: {"x": 1}}, {$out: "foo"}]);
+ assert.eq(commandsRan.length, 1);
+ // check that it doesn't have a read preference
+ assert(!commandsRan[0].cmd.hasOwnProperty("$readPreference"));
- commandsRan = [];
+ commandsRan = [];
- t.aggregate([{$sort: {"x": 1}}]);
- // check another command was run
- assert.eq(commandsRan.length, 1);
- // check that it has a read preference
- assert(commandsRan[0].cmd.hasOwnProperty("$readPreference"));
- } finally {
- db._mongo = mongo;
- db._session = new _DummyDriverSession(mongo);
- }
+ t.aggregate([{$sort: {"x": 1}}]);
+ // check another command was run
+ assert.eq(commandsRan.length, 1);
+ // check that it has a read preference
+ assert(commandsRan[0].cmd.hasOwnProperty("$readPreference"));
+} finally {
+ db._mongo = mongo;
+ db._session = new _DummyDriverSession(mongo);
+}
})();
diff --git a/jstests/aggregation/bugs/server18222.js b/jstests/aggregation/bugs/server18222.js
index cea52b3970d..d27188bbb2b 100644
--- a/jstests/aggregation/bugs/server18222.js
+++ b/jstests/aggregation/bugs/server18222.js
@@ -1,43 +1,43 @@
// SERVER-18222: Add $isArray aggregation expression.
(function() {
- 'use strict';
- var coll = db.is_array_expr;
- coll.drop();
+'use strict';
+var coll = db.is_array_expr;
+coll.drop();
- // Non-array types.
- assert.writeOK(coll.insert({_id: 0, x: 0}));
- assert.writeOK(coll.insert({_id: 1, x: '0'}));
- assert.writeOK(coll.insert({_id: 2, x: new ObjectId()}));
- assert.writeOK(coll.insert({_id: 3, x: new NumberLong(0)}));
- assert.writeOK(coll.insert({_id: 4, x: {y: []}}));
- assert.writeOK(coll.insert({_id: 5, x: null}));
- assert.writeOK(coll.insert({_id: 6, x: NaN}));
- assert.writeOK(coll.insert({_id: 7, x: undefined}));
+// Non-array types.
+assert.writeOK(coll.insert({_id: 0, x: 0}));
+assert.writeOK(coll.insert({_id: 1, x: '0'}));
+assert.writeOK(coll.insert({_id: 2, x: new ObjectId()}));
+assert.writeOK(coll.insert({_id: 3, x: new NumberLong(0)}));
+assert.writeOK(coll.insert({_id: 4, x: {y: []}}));
+assert.writeOK(coll.insert({_id: 5, x: null}));
+assert.writeOK(coll.insert({_id: 6, x: NaN}));
+assert.writeOK(coll.insert({_id: 7, x: undefined}));
- // Array types.
- assert.writeOK(coll.insert({_id: 8, x: []}));
- assert.writeOK(coll.insert({_id: 9, x: [0]}));
- assert.writeOK(coll.insert({_id: 10, x: ['0']}));
+// Array types.
+assert.writeOK(coll.insert({_id: 8, x: []}));
+assert.writeOK(coll.insert({_id: 9, x: [0]}));
+assert.writeOK(coll.insert({_id: 10, x: ['0']}));
- // Project field is_array to represent whether the field x was an array.
- var results = coll.aggregate([
- {$sort: {_id: 1}},
- {$project: {isArray: {$isArray: '$x'}}},
- ])
- .toArray();
- var expectedResults = [
- {_id: 0, isArray: false},
- {_id: 1, isArray: false},
- {_id: 2, isArray: false},
- {_id: 3, isArray: false},
- {_id: 4, isArray: false},
- {_id: 5, isArray: false},
- {_id: 6, isArray: false},
- {_id: 7, isArray: false},
- {_id: 8, isArray: true},
- {_id: 9, isArray: true},
- {_id: 10, isArray: true},
- ];
+// Project field is_array to represent whether the field x was an array.
+var results = coll.aggregate([
+ {$sort: {_id: 1}},
+ {$project: {isArray: {$isArray: '$x'}}},
+ ])
+ .toArray();
+var expectedResults = [
+ {_id: 0, isArray: false},
+ {_id: 1, isArray: false},
+ {_id: 2, isArray: false},
+ {_id: 3, isArray: false},
+ {_id: 4, isArray: false},
+ {_id: 5, isArray: false},
+ {_id: 6, isArray: false},
+ {_id: 7, isArray: false},
+ {_id: 8, isArray: true},
+ {_id: 9, isArray: true},
+ {_id: 10, isArray: true},
+];
- assert.eq(results, expectedResults);
+assert.eq(results, expectedResults);
}());
diff --git a/jstests/aggregation/bugs/server18427.js b/jstests/aggregation/bugs/server18427.js
index f15c1f9e23e..fffbc51ef64 100644
--- a/jstests/aggregation/bugs/server18427.js
+++ b/jstests/aggregation/bugs/server18427.js
@@ -4,156 +4,151 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
- var coll = db.log_exponential_expressions;
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- var decimalE = NumberDecimal("2.718281828459045235360287471352662");
- var decimal1overE = NumberDecimal("0.3678794411714423215955237701614609");
-
- // Helper for testing that op returns expResult.
- function testOp(op, expResult) {
- var pipeline = [{$project: {_id: 0, result: op}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
- }
-
- // $log, $log10, $ln.
-
- // Valid input: numeric/null/NaN, base positive and not equal to 1, arg positive.
- // - NumberDouble
- testOp({$log: [10, 10]}, 1);
- testOp({$log10: [10]}, 1);
- testOp({$ln: [Math.E]}, 1);
- // - NumberDecimal
- testOp({$log: [NumberDecimal("10"), NumberDecimal("10")]}, NumberDecimal("1"));
- testOp({$log10: [NumberDecimal("10")]}, NumberDecimal("1"));
- // The below answer is actually correct: the input is an approximation of E
- testOp({$ln: [decimalE]}, NumberDecimal("0.9999999999999999999999999999999998"));
- // All types converted to doubles.
- testOp({$log: [NumberLong("10"), NumberLong("10")]}, 1);
- testOp({$log10: [NumberLong("10")]}, 1);
- testOp({$ln: [NumberLong("1")]}, 0);
- // LLONG_MAX is converted to a double.
- testOp({$log: [NumberLong("9223372036854775807"), 10]}, 18.964889726830812);
- // Null inputs result in null.
- testOp({$log: [null, 10]}, null);
- testOp({$log: [10, null]}, null);
- testOp({$log: [null, NumberDecimal(10)]}, null);
- testOp({$log: [NumberDecimal(10), null]}, null);
- testOp({$log10: [null]}, null);
- testOp({$ln: [null]}, null);
- // NaN inputs result in NaN.
- testOp({$log: [NaN, 10]}, NaN);
- testOp({$log: [10, NaN]}, NaN);
- testOp({$log: [NaN, NumberDecimal(10)]}, NaN);
- testOp({$log: [NumberDecimal(10), NaN]}, NaN);
- testOp({$log10: [NaN]}, NaN);
- testOp({$ln: [NaN]}, NaN);
-
- // Invalid input: non-numeric/non-null, bases not positive or equal to 1, args not positive.
-
- // Args/bases must be numeric or null.
- assertErrorCode(coll, [{$project: {log: {$log: ["string", 5]}}}], 28756);
- assertErrorCode(coll, [{$project: {log: {$log: [5, "string"]}}}], 28757);
- assertErrorCode(coll, [{$project: {log10: {$log10: ["string"]}}}], 28765);
- assertErrorCode(coll, [{$project: {ln: {$ln: ["string"]}}}], 28765);
- // Args/bases cannot equal 0.
- assertErrorCode(coll, [{$project: {log: {$log: [0, 5]}}}], 28758);
- assertErrorCode(coll, [{$project: {log: {$log: [5, 0]}}}], 28759);
- assertErrorCode(coll, [{$project: {log10: {$log10: [0]}}}], 28761);
- assertErrorCode(coll, [{$project: {ln: {$ln: [0]}}}], 28766);
- assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(0), NumberDecimal(5)]}}}], 28758);
- assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(5), NumberDecimal(0)]}}}], 28759);
- assertErrorCode(coll, [{$project: {log10: {$log10: [NumberDecimal(0)]}}}], 28761);
- assertErrorCode(coll, [{$project: {ln: {$ln: [NumberDecimal(0)]}}}], 28766);
- // Args/bases cannot be negative.
- assertErrorCode(coll, [{$project: {log: {$log: [-1, 5]}}}], 28758);
- assertErrorCode(coll, [{$project: {log: {$log: [5, -1]}}}], 28759);
- assertErrorCode(coll, [{$project: {log10: {$log10: [-1]}}}], 28761);
- assertErrorCode(coll, [{$project: {ln: {$ln: [-1]}}}], 28766);
- assertErrorCode(
- coll, [{$project: {log: {$log: [NumberDecimal(-1), NumberDecimal(5)]}}}], 28758);
- assertErrorCode(
- coll, [{$project: {log: {$log: [NumberDecimal(5), NumberDecimal(-1)]}}}], 28759);
- assertErrorCode(coll, [{$project: {log10: {$log10: [NumberDecimal(-1)]}}}], 28761);
- assertErrorCode(coll, [{$project: {ln: {$ln: [NumberDecimal(-1)]}}}], 28766);
- // Base can't equal 1.
- assertErrorCode(coll, [{$project: {log: {$log: [5, 1]}}}], 28759);
- assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(5), NumberDecimal(1)]}}}], 28759);
-
- // $pow, $exp.
-
- // Valid input - numeric/null/NaN.
-
- // $pow -- if either input is a double return a double.
- testOp({$pow: [10, 2]}, 100);
- testOp({$pow: [1 / 2, -1]}, 2);
- testOp({$pow: [-2, 2]}, 4);
- testOp({$pow: [NumberInt("2"), 2]}, 4);
- testOp({$pow: [-2, NumberInt("2")]}, 4);
- // $pow -- if either input is a NumberDecimal, return a NumberDecimal
- testOp({$pow: [NumberDecimal("10.0"), -2]},
- NumberDecimal("0.01000000000000000000000000000000000"));
- testOp({$pow: [0.5, NumberDecimal("-1")]},
- NumberDecimal("2.000000000000000000000000000000000"));
- testOp({$pow: [-2, NumberDecimal("2")]}, NumberDecimal("4.000000000000000000000000000000000"));
- testOp({$pow: [NumberInt("2"), NumberDecimal("2")]},
- NumberDecimal("4.000000000000000000000000000000000"));
- testOp({$pow: [NumberDecimal("-2.0"), NumberInt("2")]},
- NumberDecimal("4.000000000000000000000000000000000"));
- testOp({$pow: [NumberDecimal("10.0"), 2]},
- NumberDecimal("100.0000000000000000000000000000000"));
-
- // If exponent is negative and base not -1, 0, or 1, return a double.
- testOp({$pow: [NumberLong("2"), NumberLong("-1")]}, 1 / 2);
- testOp({$pow: [NumberInt("4"), NumberInt("-1")]}, 1 / 4);
- testOp({$pow: [NumberInt("4"), NumberLong("-1")]}, 1 / 4);
- testOp({$pow: [NumberInt("1"), NumberLong("-2")]}, NumberLong("1"));
- testOp({$pow: [NumberInt("-1"), NumberLong("-2")]}, NumberLong("1"));
- testOp({$pow: [NumberLong("-1"), NumberLong("-3")]}, NumberLong("-1"));
- // If result would overflow a long, return a double.
- testOp({$pow: [NumberInt("2"), NumberLong("63")]}, 9223372036854776000);
- // Exact decimal result
- testOp({$pow: [NumberInt("5"), NumberDecimal("-112")]},
- NumberDecimal("5192296858534827628530496329220096E-112"));
-
- // Result would be incorrect if double were returned.
- testOp({$pow: [NumberInt("3"), NumberInt("35")]}, NumberLong("50031545098999707"));
-
- // Else if either input is a long, return a long.
- testOp({$pow: [NumberInt("-2"), NumberLong("63")]}, NumberLong("-9223372036854775808"));
- testOp({$pow: [NumberInt("4"), NumberLong("2")]}, NumberLong("16"));
- testOp({$pow: [NumberLong("4"), NumberInt("2")]}, NumberLong("16"));
- testOp({$pow: [NumberLong("4"), NumberLong("2")]}, NumberLong("16"));
-
- // Else return an int if it fits.
- testOp({$pow: [NumberInt("4"), NumberInt("2")]}, 16);
-
- // $exp always returns doubles for non-zero non-decimal inputs, since e is a double.
- testOp({$exp: [NumberInt("-1")]}, 1 / Math.E);
- testOp({$exp: [NumberLong("1")]}, Math.E);
- // $exp returns decimal results for decimal inputs
- testOp({$exp: [NumberDecimal("-1")]}, decimal1overE);
- testOp({$exp: [NumberDecimal("1")]}, decimalE);
- // Null input results in null.
- testOp({$pow: [null, 2]}, null);
- testOp({$pow: [1 / 2, null]}, null);
- testOp({$pow: [null, NumberDecimal(2)]}, null);
- testOp({$pow: [NumberDecimal("0.5"), null]}, null);
- testOp({$exp: [null]}, null);
- // NaN input results in NaN.
- testOp({$pow: [NaN, 2]}, NaN);
- testOp({$pow: [1 / 2, NaN]}, NaN);
- testOp({$pow: [NaN, NumberDecimal(2)]}, NumberDecimal("NaN"));
- testOp({$pow: [NumberDecimal("0.5"), NaN]}, NumberDecimal("NaN"));
- testOp({$exp: [NaN]}, NaN);
-
- // Invalid inputs - non-numeric/non-null types, or 0 to a negative exponent.
- assertErrorCode(coll, [{$project: {pow: {$pow: [0, NumberLong("-1")]}}}], 28764);
- assertErrorCode(coll, [{$project: {pow: {$pow: ["string", 5]}}}], 28762);
- assertErrorCode(coll, [{$project: {pow: {$pow: [5, "string"]}}}], 28763);
- assertErrorCode(coll, [{$project: {exp: {$exp: ["string"]}}}], 28765);
- assertErrorCode(coll, [{$project: {pow: {$pow: [NumberDecimal(0), NumberLong("-1")]}}}], 28764);
- assertErrorCode(coll, [{$project: {pow: {$pow: ["string", NumberDecimal(5)]}}}], 28762);
+'use strict';
+var coll = db.log_exponential_expressions;
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+var decimalE = NumberDecimal("2.718281828459045235360287471352662");
+var decimal1overE = NumberDecimal("0.3678794411714423215955237701614609");
+
+// Helper for testing that op returns expResult.
+function testOp(op, expResult) {
+ var pipeline = [{$project: {_id: 0, result: op}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
+}
+
+// $log, $log10, $ln.
+
+// Valid input: numeric/null/NaN, base positive and not equal to 1, arg positive.
+// - NumberDouble
+testOp({$log: [10, 10]}, 1);
+testOp({$log10: [10]}, 1);
+testOp({$ln: [Math.E]}, 1);
+// - NumberDecimal
+testOp({$log: [NumberDecimal("10"), NumberDecimal("10")]}, NumberDecimal("1"));
+testOp({$log10: [NumberDecimal("10")]}, NumberDecimal("1"));
+// The below answer is actually correct: the input is an approximation of E
+testOp({$ln: [decimalE]}, NumberDecimal("0.9999999999999999999999999999999998"));
+// All types converted to doubles.
+testOp({$log: [NumberLong("10"), NumberLong("10")]}, 1);
+testOp({$log10: [NumberLong("10")]}, 1);
+testOp({$ln: [NumberLong("1")]}, 0);
+// LLONG_MAX is converted to a double.
+testOp({$log: [NumberLong("9223372036854775807"), 10]}, 18.964889726830812);
+// Null inputs result in null.
+testOp({$log: [null, 10]}, null);
+testOp({$log: [10, null]}, null);
+testOp({$log: [null, NumberDecimal(10)]}, null);
+testOp({$log: [NumberDecimal(10), null]}, null);
+testOp({$log10: [null]}, null);
+testOp({$ln: [null]}, null);
+// NaN inputs result in NaN.
+testOp({$log: [NaN, 10]}, NaN);
+testOp({$log: [10, NaN]}, NaN);
+testOp({$log: [NaN, NumberDecimal(10)]}, NaN);
+testOp({$log: [NumberDecimal(10), NaN]}, NaN);
+testOp({$log10: [NaN]}, NaN);
+testOp({$ln: [NaN]}, NaN);
+
+// Invalid input: non-numeric/non-null, bases not positive or equal to 1, args not positive.
+
+// Args/bases must be numeric or null.
+assertErrorCode(coll, [{$project: {log: {$log: ["string", 5]}}}], 28756);
+assertErrorCode(coll, [{$project: {log: {$log: [5, "string"]}}}], 28757);
+assertErrorCode(coll, [{$project: {log10: {$log10: ["string"]}}}], 28765);
+assertErrorCode(coll, [{$project: {ln: {$ln: ["string"]}}}], 28765);
+// Args/bases cannot equal 0.
+assertErrorCode(coll, [{$project: {log: {$log: [0, 5]}}}], 28758);
+assertErrorCode(coll, [{$project: {log: {$log: [5, 0]}}}], 28759);
+assertErrorCode(coll, [{$project: {log10: {$log10: [0]}}}], 28761);
+assertErrorCode(coll, [{$project: {ln: {$ln: [0]}}}], 28766);
+assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(0), NumberDecimal(5)]}}}], 28758);
+assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(5), NumberDecimal(0)]}}}], 28759);
+assertErrorCode(coll, [{$project: {log10: {$log10: [NumberDecimal(0)]}}}], 28761);
+assertErrorCode(coll, [{$project: {ln: {$ln: [NumberDecimal(0)]}}}], 28766);
+// Args/bases cannot be negative.
+assertErrorCode(coll, [{$project: {log: {$log: [-1, 5]}}}], 28758);
+assertErrorCode(coll, [{$project: {log: {$log: [5, -1]}}}], 28759);
+assertErrorCode(coll, [{$project: {log10: {$log10: [-1]}}}], 28761);
+assertErrorCode(coll, [{$project: {ln: {$ln: [-1]}}}], 28766);
+assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(-1), NumberDecimal(5)]}}}], 28758);
+assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(5), NumberDecimal(-1)]}}}], 28759);
+assertErrorCode(coll, [{$project: {log10: {$log10: [NumberDecimal(-1)]}}}], 28761);
+assertErrorCode(coll, [{$project: {ln: {$ln: [NumberDecimal(-1)]}}}], 28766);
+// Base can't equal 1.
+assertErrorCode(coll, [{$project: {log: {$log: [5, 1]}}}], 28759);
+assertErrorCode(coll, [{$project: {log: {$log: [NumberDecimal(5), NumberDecimal(1)]}}}], 28759);
+
+// $pow, $exp.
+
+// Valid input - numeric/null/NaN.
+
+// $pow -- if either input is a double return a double.
+testOp({$pow: [10, 2]}, 100);
+testOp({$pow: [1 / 2, -1]}, 2);
+testOp({$pow: [-2, 2]}, 4);
+testOp({$pow: [NumberInt("2"), 2]}, 4);
+testOp({$pow: [-2, NumberInt("2")]}, 4);
+// $pow -- if either input is a NumberDecimal, return a NumberDecimal
+testOp({$pow: [NumberDecimal("10.0"), -2]}, NumberDecimal("0.01000000000000000000000000000000000"));
+testOp({$pow: [0.5, NumberDecimal("-1")]}, NumberDecimal("2.000000000000000000000000000000000"));
+testOp({$pow: [-2, NumberDecimal("2")]}, NumberDecimal("4.000000000000000000000000000000000"));
+testOp({$pow: [NumberInt("2"), NumberDecimal("2")]},
+ NumberDecimal("4.000000000000000000000000000000000"));
+testOp({$pow: [NumberDecimal("-2.0"), NumberInt("2")]},
+ NumberDecimal("4.000000000000000000000000000000000"));
+testOp({$pow: [NumberDecimal("10.0"), 2]}, NumberDecimal("100.0000000000000000000000000000000"));
+
+// If exponent is negative and base not -1, 0, or 1, return a double.
+testOp({$pow: [NumberLong("2"), NumberLong("-1")]}, 1 / 2);
+testOp({$pow: [NumberInt("4"), NumberInt("-1")]}, 1 / 4);
+testOp({$pow: [NumberInt("4"), NumberLong("-1")]}, 1 / 4);
+testOp({$pow: [NumberInt("1"), NumberLong("-2")]}, NumberLong("1"));
+testOp({$pow: [NumberInt("-1"), NumberLong("-2")]}, NumberLong("1"));
+testOp({$pow: [NumberLong("-1"), NumberLong("-3")]}, NumberLong("-1"));
+// If result would overflow a long, return a double.
+testOp({$pow: [NumberInt("2"), NumberLong("63")]}, 9223372036854776000);
+// Exact decimal result
+testOp({$pow: [NumberInt("5"), NumberDecimal("-112")]},
+ NumberDecimal("5192296858534827628530496329220096E-112"));
+
+// Result would be incorrect if double were returned.
+testOp({$pow: [NumberInt("3"), NumberInt("35")]}, NumberLong("50031545098999707"));
+
+// Else if either input is a long, return a long.
+testOp({$pow: [NumberInt("-2"), NumberLong("63")]}, NumberLong("-9223372036854775808"));
+testOp({$pow: [NumberInt("4"), NumberLong("2")]}, NumberLong("16"));
+testOp({$pow: [NumberLong("4"), NumberInt("2")]}, NumberLong("16"));
+testOp({$pow: [NumberLong("4"), NumberLong("2")]}, NumberLong("16"));
+
+// Else return an int if it fits.
+testOp({$pow: [NumberInt("4"), NumberInt("2")]}, 16);
+
+// $exp always returns doubles for non-zero non-decimal inputs, since e is a double.
+testOp({$exp: [NumberInt("-1")]}, 1 / Math.E);
+testOp({$exp: [NumberLong("1")]}, Math.E);
+// $exp returns decimal results for decimal inputs
+testOp({$exp: [NumberDecimal("-1")]}, decimal1overE);
+testOp({$exp: [NumberDecimal("1")]}, decimalE);
+// Null input results in null.
+testOp({$pow: [null, 2]}, null);
+testOp({$pow: [1 / 2, null]}, null);
+testOp({$pow: [null, NumberDecimal(2)]}, null);
+testOp({$pow: [NumberDecimal("0.5"), null]}, null);
+testOp({$exp: [null]}, null);
+// NaN input results in NaN.
+testOp({$pow: [NaN, 2]}, NaN);
+testOp({$pow: [1 / 2, NaN]}, NaN);
+testOp({$pow: [NaN, NumberDecimal(2)]}, NumberDecimal("NaN"));
+testOp({$pow: [NumberDecimal("0.5"), NaN]}, NumberDecimal("NaN"));
+testOp({$exp: [NaN]}, NaN);
+
+// Invalid inputs - non-numeric/non-null types, or 0 to a negative exponent.
+assertErrorCode(coll, [{$project: {pow: {$pow: [0, NumberLong("-1")]}}}], 28764);
+assertErrorCode(coll, [{$project: {pow: {$pow: ["string", 5]}}}], 28762);
+assertErrorCode(coll, [{$project: {pow: {$pow: [5, "string"]}}}], 28763);
+assertErrorCode(coll, [{$project: {exp: {$exp: ["string"]}}}], 28765);
+assertErrorCode(coll, [{$project: {pow: {$pow: [NumberDecimal(0), NumberLong("-1")]}}}], 28764);
+assertErrorCode(coll, [{$project: {pow: {$pow: ["string", NumberDecimal(5)]}}}], 28762);
}());
diff --git a/jstests/aggregation/bugs/server20163.js b/jstests/aggregation/bugs/server20163.js
index e61ba606c24..a03e3c70fbe 100644
--- a/jstests/aggregation/bugs/server20163.js
+++ b/jstests/aggregation/bugs/server20163.js
@@ -3,138 +3,204 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
-
- var coll = db.zip;
- coll.drop();
-
- coll.insert({'long': [1, 2, 3], 'short': ['x', 'y']});
-
- var zipObj = 3;
- assertErrorCode(coll,
- [{$project: {zipped: {$zip: zipObj}}}],
- 34460,
- "$zip requires an object" + " as an argument.");
-
- zipObj = {inputs: []};
- assertErrorCode(coll,
- [{$project: {zipped: {$zip: zipObj}}}],
- 34465,
- "$zip requires at least" + " one input array");
-
- zipObj = {inputs: {"a": "b"}};
- assertErrorCode(coll, [{$project: {zipped: {$zip: zipObj}}}], 34461, "inputs is not an array");
-
- zipObj = {inputs: ["$a"], defaults: ["A"]};
- assertErrorCode(coll,
- [{$project: {zipped: {$zip: zipObj}}}],
- 34466,
- "cannot specify defaults" + " unless useLongestLength is true.");
-
- zipObj = {inputs: ["$a"], defaults: ["A", "B"], useLongestLength: true};
- assertErrorCode(coll,
- [{$project: {zipped: {$zip: zipObj}}}],
- 34467,
- "inputs and defaults" + " must be the same length.");
-
- zipObj = {inputs: ["$a"], defaults: {"a": "b"}};
- assertErrorCode(
- coll, [{$project: {zipped: {$zip: zipObj}}}], 34462, "defaults is not an" + " array");
-
- zipObj = {inputs: ["$a"], defaults: ["A"], useLongestLength: 1};
- assertErrorCode(
- coll, [{$project: {zipped: {$zip: zipObj}}}], 34463, "useLongestLength is not" + " a bool");
-
- zipObj = {inputs: ["$a", "$b"], defaults: ["A"], notAField: 1};
- assertErrorCode(coll, [{$project: {zipped: {$zip: zipObj}}}], 34464, "unknown argument");
-
- zipObj = {inputs: ["A", "B"]};
- assertErrorCode(coll,
- [{$project: {zipped: {$zip: zipObj}}}],
- 34468,
- "an element of inputs" + " was not an array.");
-
- zipObj = {inputs: [[1, 2, 3], ["A", "B", "C"]]};
- var res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- var output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[1, "A"], [2, "B"], [3, "C"]]);
-
- zipObj = {inputs: [[1, 2, 3], null]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, null);
-
- zipObj = {inputs: [null, [1, 2, 3]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, null);
-
- zipObj = {inputs: ["$missing", [1, 2, 3]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, null);
-
- zipObj = {inputs: [undefined, [1, 2, 3]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, null);
-
- zipObj = {inputs: [[1, 2, 3], ["A", "B"]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[1, "A"], [2, "B"]]);
-
- zipObj = {inputs: [["A", "B"], [1, 2, 3]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [["A", 1], ["B", 2]]);
-
- zipObj = {inputs: [[], []]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, []);
-
- zipObj = {inputs: [["$short"], ["$long"]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[['x', 'y'], [1, 2, 3]]]);
-
- zipObj = {inputs: ["$short", "$long"]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [['x', 1], ['y', 2]]);
-
- zipObj = {inputs: [["$long"]]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[[1, 2, 3]]]);
-
- zipObj = {inputs: [[1, 2, 3], ['a', 'b', 'c'], ['c', 'b', 'a']]};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[1, 'a', 'c'], [2, 'b', 'b'], [3, 'c', 'a']]);
-
- zipObj = {inputs: [[1, 2, 3], ["A", "B"]], defaults: ["C", "D"], useLongestLength: true};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[1, "A"], [2, "B"], [3, "D"]]);
-
- zipObj = {inputs: [[1, 2, 3], ["A", "B"]], useLongestLength: true};
- res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
- output = res.toArray();
- assert.eq(1, output.length);
- assert.eq(output[0].zipped, [[1, "A"], [2, "B"], [3, null]]);
+"use strict";
+
+var coll = db.zip;
+coll.drop();
+
+coll.insert({'long': [1, 2, 3], 'short': ['x', 'y']});
+
+var zipObj = 3;
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34460,
+ "$zip requires an object" +
+ " as an argument.");
+
+zipObj = {
+ inputs: []
+};
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34465,
+ "$zip requires at least" +
+ " one input array");
+
+zipObj = {
+ inputs: {"a": "b"}
+};
+assertErrorCode(coll, [{$project: {zipped: {$zip: zipObj}}}], 34461, "inputs is not an array");
+
+zipObj = {
+ inputs: ["$a"],
+ defaults: ["A"]
+};
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34466,
+ "cannot specify defaults" +
+ " unless useLongestLength is true.");
+
+zipObj = {
+ inputs: ["$a"],
+ defaults: ["A", "B"],
+ useLongestLength: true
+};
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34467,
+ "inputs and defaults" +
+ " must be the same length.");
+
+zipObj = {
+ inputs: ["$a"],
+ defaults: {"a": "b"}
+};
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34462,
+ "defaults is not an" +
+ " array");
+
+zipObj = {
+ inputs: ["$a"],
+ defaults: ["A"],
+ useLongestLength: 1
+};
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34463,
+ "useLongestLength is not" +
+ " a bool");
+
+zipObj = {
+ inputs: ["$a", "$b"],
+ defaults: ["A"],
+ notAField: 1
+};
+assertErrorCode(coll, [{$project: {zipped: {$zip: zipObj}}}], 34464, "unknown argument");
+
+zipObj = {
+ inputs: ["A", "B"]
+};
+assertErrorCode(coll,
+ [{$project: {zipped: {$zip: zipObj}}}],
+ 34468,
+ "an element of inputs" +
+ " was not an array.");
+
+zipObj = {
+ inputs: [[1, 2, 3], ["A", "B", "C"]]
+};
+var res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+var output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[1, "A"], [2, "B"], [3, "C"]]);
+
+zipObj = {
+ inputs: [[1, 2, 3], null]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, null);
+
+zipObj = {
+ inputs: [null, [1, 2, 3]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, null);
+
+zipObj = {
+ inputs: ["$missing", [1, 2, 3]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, null);
+
+zipObj = {
+ inputs: [undefined, [1, 2, 3]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, null);
+
+zipObj = {
+ inputs: [[1, 2, 3], ["A", "B"]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[1, "A"], [2, "B"]]);
+
+zipObj = {
+ inputs: [["A", "B"], [1, 2, 3]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [["A", 1], ["B", 2]]);
+
+zipObj = {
+ inputs: [[], []]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, []);
+
+zipObj = {
+ inputs: [["$short"], ["$long"]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[['x', 'y'], [1, 2, 3]]]);
+
+zipObj = {
+ inputs: ["$short", "$long"]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [['x', 1], ['y', 2]]);
+
+zipObj = {
+ inputs: [["$long"]]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[[1, 2, 3]]]);
+
+zipObj = {
+ inputs: [[1, 2, 3], ['a', 'b', 'c'], ['c', 'b', 'a']]
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[1, 'a', 'c'], [2, 'b', 'b'], [3, 'c', 'a']]);
+
+zipObj = {
+ inputs: [[1, 2, 3], ["A", "B"]],
+ defaults: ["C", "D"],
+ useLongestLength: true
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[1, "A"], [2, "B"], [3, "D"]]);
+
+zipObj = {
+ inputs: [[1, 2, 3], ["A", "B"]],
+ useLongestLength: true
+};
+res = coll.aggregate([{$project: {zipped: {$zip: zipObj}}}]);
+output = res.toArray();
+assert.eq(1, output.length);
+assert.eq(output[0].zipped, [[1, "A"], [2, "B"], [3, null]]);
}());
diff --git a/jstests/aggregation/bugs/server20168.js b/jstests/aggregation/bugs/server20168.js
index 2ff8c6e53cd..9a886bbc279 100644
--- a/jstests/aggregation/bugs/server20168.js
+++ b/jstests/aggregation/bugs/server20168.js
@@ -1,39 +1,38 @@
// SERVER-20168: Add option to $unwind to output a null result for empty arrays.
(function() {
- "use strict";
+"use strict";
- var coll = db.server20168;
- coll.drop();
+var coll = db.server20168;
+coll.drop();
- // Should return no results on a non-existent collection.
- var results = coll.aggregate([{$unwind: {path: "$x"}}]).toArray();
- assert.eq(0, results.length, "$unwind returned the wrong number of results");
+// Should return no results on a non-existent collection.
+var results = coll.aggregate([{$unwind: {path: "$x"}}]).toArray();
+assert.eq(0, results.length, "$unwind returned the wrong number of results");
- /**
- * Asserts that with the input 'inputDoc', an $unwind stage on 'unwindPath' should produce no
- * results if preserveNullAndEmptyArrays is not specified, and produces one result, equal to
- * 'outputDoc', if it is specified.
- */
- function testPreserveNullAndEmptyArraysParam(inputDoc, unwindPath, outputDoc) {
- coll.drop();
- assert.writeOK(coll.insert(inputDoc));
+/**
+ * Asserts that with the input 'inputDoc', an $unwind stage on 'unwindPath' should produce no
+ * results if preserveNullAndEmptyArrays is not specified, and produces one result, equal to
+ * 'outputDoc', if it is specified.
+ */
+function testPreserveNullAndEmptyArraysParam(inputDoc, unwindPath, outputDoc) {
+ coll.drop();
+ assert.writeOK(coll.insert(inputDoc));
- // If preserveNullAndEmptyArrays is passed, we should get an output document.
- var preservedResults =
- coll.aggregate([{$unwind: {path: unwindPath, preserveNullAndEmptyArrays: true}}])
- .toArray();
- assert.eq(1, preservedResults.length, "$unwind returned the wrong number of results");
- assert.eq(preservedResults[0],
- outputDoc,
- "Unexpected result for an $unwind with preserveNullAndEmptyArrays " +
- "(input was " + tojson(inputDoc) + ")");
+ // If preserveNullAndEmptyArrays is passed, we should get an output document.
+ var preservedResults =
+ coll.aggregate([{$unwind: {path: unwindPath, preserveNullAndEmptyArrays: true}}]).toArray();
+ assert.eq(1, preservedResults.length, "$unwind returned the wrong number of results");
+ assert.eq(preservedResults[0],
+ outputDoc,
+ "Unexpected result for an $unwind with preserveNullAndEmptyArrays " +
+ "(input was " + tojson(inputDoc) + ")");
- // If not, we should get no outputs.
- var defaultResults = coll.aggregate([{$unwind: {path: unwindPath}}]).toArray();
- assert.eq(0, defaultResults.length, "$unwind returned the wrong number of results");
- }
+ // If not, we should get no outputs.
+ var defaultResults = coll.aggregate([{$unwind: {path: unwindPath}}]).toArray();
+ assert.eq(0, defaultResults.length, "$unwind returned the wrong number of results");
+}
- testPreserveNullAndEmptyArraysParam({_id: 0}, "$x", {_id: 0});
- testPreserveNullAndEmptyArraysParam({_id: 0, x: null}, "$x", {_id: 0, x: null});
- testPreserveNullAndEmptyArraysParam({_id: 0, x: []}, "$x", {_id: 0});
+testPreserveNullAndEmptyArraysParam({_id: 0}, "$x", {_id: 0});
+testPreserveNullAndEmptyArraysParam({_id: 0, x: null}, "$x", {_id: 0, x: null});
+testPreserveNullAndEmptyArraysParam({_id: 0, x: []}, "$x", {_id: 0});
}());
diff --git a/jstests/aggregation/bugs/server20169.js b/jstests/aggregation/bugs/server20169.js
index 27995b8030c..2b5a969f803 100644
--- a/jstests/aggregation/bugs/server20169.js
+++ b/jstests/aggregation/bugs/server20169.js
@@ -3,59 +3,67 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
-
- var coll = db.range;
- coll.drop();
-
- // We need an input document to receive an output document.
- coll.insert({});
-
- var rangeObj = [1];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 28667,
- "range requires two" + " or three arguments");
-
- rangeObj = ["a", 1];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34443,
- "range requires a" + " numeric starting value");
-
- rangeObj = [1.1, 1];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34444,
- "range requires an" + " integral starting value");
-
- rangeObj = [1, "a"];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34445,
- "range requires a" + " numeric ending value");
-
- rangeObj = [1, 1.1];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34446,
- "range requires an" + " integral ending value");
-
- rangeObj = [1, 3, "a"];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34447,
- "range requires a" + " numeric step value");
-
- rangeObj = [1, 3, 1.1];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34448,
- "range requires an" + " integral step value");
-
- rangeObj = [1, 3, 0];
- assertErrorCode(coll,
- [{$project: {range: {$range: rangeObj}}}],
- 34449,
- "range requires a" + " non-zero step value");
+"use strict";
+
+var coll = db.range;
+coll.drop();
+
+// We need an input document to receive an output document.
+coll.insert({});
+
+var rangeObj = [1];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 28667,
+ "range requires two" +
+ " or three arguments");
+
+rangeObj = ["a", 1];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34443,
+ "range requires a" +
+ " numeric starting value");
+
+rangeObj = [1.1, 1];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34444,
+ "range requires an" +
+ " integral starting value");
+
+rangeObj = [1, "a"];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34445,
+ "range requires a" +
+ " numeric ending value");
+
+rangeObj = [1, 1.1];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34446,
+ "range requires an" +
+ " integral ending value");
+
+rangeObj = [1, 3, "a"];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34447,
+ "range requires a" +
+ " numeric step value");
+
+rangeObj = [1, 3, 1.1];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34448,
+ "range requires an" +
+ " integral step value");
+
+rangeObj = [1, 3, 0];
+assertErrorCode(coll,
+ [{$project: {range: {$range: rangeObj}}}],
+ 34449,
+ "range requires a" +
+ " non-zero step value");
}());
diff --git a/jstests/aggregation/bugs/server21632.js b/jstests/aggregation/bugs/server21632.js
index 944ca114ab6..c23d8836bea 100644
--- a/jstests/aggregation/bugs/server21632.js
+++ b/jstests/aggregation/bugs/server21632.js
@@ -11,77 +11,76 @@
// 2. We should not see any duplicate documents in any one $sample (this is only guaranteed if
// there are no ongoing write operations).
(function() {
- "use strict";
-
- var coll = db.server21632;
- coll.drop();
-
- // If there is no collection, or no documents in the collection, we should not get any results
- // from a sample.
- assert.eq([], coll.aggregate([{$sample: {size: 1}}]).toArray());
- assert.eq([], coll.aggregate([{$sample: {size: 10}}]).toArray());
-
- db.createCollection(coll.getName());
-
- // Test if we are running WT + LSM and if so, skip the test.
- // WiredTiger LSM random cursor implementation doesn't currently give random enough
- // distribution to pass this test case, so disable the test when checking an LSM
- // configuration for now. We will need revisit this before releasing WiredTiger LSM
- // as a supported file type. (See: WT-2403 for details on forthcoming changes)
-
- var storageEngine = jsTest.options().storageEngine || "wiredTiger";
-
- if (storageEngine == "wiredTiger" && coll.stats().wiredTiger.type == 'lsm') {
- return;
- }
-
- assert.eq([], coll.aggregate([{$sample: {size: 1}}]).toArray());
- assert.eq([], coll.aggregate([{$sample: {size: 10}}]).toArray());
-
- // If there is only one document, we should get that document.
- var paddingStr = "abcdefghijklmnopqrstuvwxyz";
- var firstDoc = {_id: 0, paddingStr: paddingStr};
- assert.writeOK(coll.insert(firstDoc));
- assert.eq([firstDoc], coll.aggregate([{$sample: {size: 1}}]).toArray());
- assert.eq([firstDoc], coll.aggregate([{$sample: {size: 10}}]).toArray());
-
- // Insert a bunch of documents.
- var bulk = coll.initializeUnorderedBulkOp();
- var nDocs = 1000;
- for (var id = 1; id < nDocs; id++) {
- bulk.insert({_id: id, paddingStr: paddingStr});
- }
- bulk.execute();
-
- // Will contain a document's _id as a key if we've ever seen that document.
- var cumulativeSeenIds = {};
- var sampleSize = 10;
-
- jsTestLog("About to do repeated samples, explain output: " +
- tojson(coll.explain().aggregate([{$sample: {size: sampleSize}}])));
-
- // Repeatedly ask for small samples of documents to get a cumulative sample of size 'nDocs'.
- for (var i = 0; i < nDocs / sampleSize; i++) {
- var results = coll.aggregate([{$sample: {size: sampleSize}}]).toArray();
-
- assert.eq(
- results.length, sampleSize, "$sample did not return the expected number of results");
-
- // Check that there are no duplicate documents in the result of any single sample.
- var idsThisSample = {};
- results.forEach(function recordId(result) {
- assert.lte(result._id, nDocs, "$sample returned an unknown document");
- assert(!idsThisSample[result._id],
- "A single $sample returned the same document twice: " + result._id);
-
- cumulativeSeenIds[result._id] = true;
- idsThisSample[result._id] = true;
- });
- }
-
- // An implementation would have to be very broken for this assertion to fail.
- assert.gte(Object.keys(cumulativeSeenIds).length, nDocs / 4);
-
- // Make sure we can return all documents in the collection.
- assert.eq(coll.aggregate([{$sample: {size: nDocs}}]).toArray().length, nDocs);
+"use strict";
+
+var coll = db.server21632;
+coll.drop();
+
+// If there is no collection, or no documents in the collection, we should not get any results
+// from a sample.
+assert.eq([], coll.aggregate([{$sample: {size: 1}}]).toArray());
+assert.eq([], coll.aggregate([{$sample: {size: 10}}]).toArray());
+
+db.createCollection(coll.getName());
+
+// Test if we are running WT + LSM and if so, skip the test.
+// WiredTiger LSM random cursor implementation doesn't currently give random enough
+// distribution to pass this test case, so disable the test when checking an LSM
+// configuration for now. We will need revisit this before releasing WiredTiger LSM
+// as a supported file type. (See: WT-2403 for details on forthcoming changes)
+
+var storageEngine = jsTest.options().storageEngine || "wiredTiger";
+
+if (storageEngine == "wiredTiger" && coll.stats().wiredTiger.type == 'lsm') {
+ return;
+}
+
+assert.eq([], coll.aggregate([{$sample: {size: 1}}]).toArray());
+assert.eq([], coll.aggregate([{$sample: {size: 10}}]).toArray());
+
+// If there is only one document, we should get that document.
+var paddingStr = "abcdefghijklmnopqrstuvwxyz";
+var firstDoc = {_id: 0, paddingStr: paddingStr};
+assert.writeOK(coll.insert(firstDoc));
+assert.eq([firstDoc], coll.aggregate([{$sample: {size: 1}}]).toArray());
+assert.eq([firstDoc], coll.aggregate([{$sample: {size: 10}}]).toArray());
+
+// Insert a bunch of documents.
+var bulk = coll.initializeUnorderedBulkOp();
+var nDocs = 1000;
+for (var id = 1; id < nDocs; id++) {
+ bulk.insert({_id: id, paddingStr: paddingStr});
+}
+bulk.execute();
+
+// Will contain a document's _id as a key if we've ever seen that document.
+var cumulativeSeenIds = {};
+var sampleSize = 10;
+
+jsTestLog("About to do repeated samples, explain output: " +
+ tojson(coll.explain().aggregate([{$sample: {size: sampleSize}}])));
+
+// Repeatedly ask for small samples of documents to get a cumulative sample of size 'nDocs'.
+for (var i = 0; i < nDocs / sampleSize; i++) {
+ var results = coll.aggregate([{$sample: {size: sampleSize}}]).toArray();
+
+ assert.eq(results.length, sampleSize, "$sample did not return the expected number of results");
+
+ // Check that there are no duplicate documents in the result of any single sample.
+ var idsThisSample = {};
+ results.forEach(function recordId(result) {
+ assert.lte(result._id, nDocs, "$sample returned an unknown document");
+ assert(!idsThisSample[result._id],
+ "A single $sample returned the same document twice: " + result._id);
+
+ cumulativeSeenIds[result._id] = true;
+ idsThisSample[result._id] = true;
+ });
+}
+
+// An implementation would have to be very broken for this assertion to fail.
+assert.gte(Object.keys(cumulativeSeenIds).length, nDocs / 4);
+
+// Make sure we can return all documents in the collection.
+assert.eq(coll.aggregate([{$sample: {size: nDocs}}]).toArray().length, nDocs);
})();
diff --git a/jstests/aggregation/bugs/server22093.js b/jstests/aggregation/bugs/server22093.js
index 61068e38493..618c65f85b7 100644
--- a/jstests/aggregation/bugs/server22093.js
+++ b/jstests/aggregation/bugs/server22093.js
@@ -11,42 +11,42 @@
load('jstests/libs/analyze_plan.js');
(function() {
- "use strict";
+"use strict";
- var coll = db.countscan;
- coll.drop();
+var coll = db.countscan;
+coll.drop();
- for (var i = 0; i < 3; i++) {
- for (var j = 0; j < 10; j += 2) {
- coll.insert({foo: i, bar: j});
- }
+for (var i = 0; i < 3; i++) {
+ for (var j = 0; j < 10; j += 2) {
+ coll.insert({foo: i, bar: j});
}
+}
- coll.ensureIndex({foo: 1});
+coll.ensureIndex({foo: 1});
- var simpleGroup = coll.aggregate([{$group: {_id: null, count: {$sum: 1}}}]).toArray();
+var simpleGroup = coll.aggregate([{$group: {_id: null, count: {$sum: 1}}}]).toArray();
- assert.eq(simpleGroup.length, 1);
- assert.eq(simpleGroup[0]["count"], 15);
+assert.eq(simpleGroup.length, 1);
+assert.eq(simpleGroup[0]["count"], 15);
- var explained = coll.explain().aggregate(
- [{$match: {foo: {$gt: 0}}}, {$group: {_id: null, count: {$sum: 1}}}]);
+var explained =
+ coll.explain().aggregate([{$match: {foo: {$gt: 0}}}, {$group: {_id: null, count: {$sum: 1}}}]);
- assert(planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
+assert(planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
- explained = coll.explain().aggregate([
- {$match: {foo: {$gt: 0}}},
- {$project: {_id: 0, a: {$literal: null}}},
- {$group: {_id: null, count: {$sum: 1}}}
- ]);
+explained = coll.explain().aggregate([
+ {$match: {foo: {$gt: 0}}},
+ {$project: {_id: 0, a: {$literal: null}}},
+ {$group: {_id: null, count: {$sum: 1}}}
+]);
- assert(planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
+assert(planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
- // Make sure a $count stage can use the COUNT_SCAN optimization.
- explained = coll.explain().aggregate([{$match: {foo: {$gt: 0}}}, {$count: "count"}]);
- assert(planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
+// Make sure a $count stage can use the COUNT_SCAN optimization.
+explained = coll.explain().aggregate([{$match: {foo: {$gt: 0}}}, {$count: "count"}]);
+assert(planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
- // A $match that is not a single range cannot use the COUNT_SCAN optimization.
- explained = coll.explain().aggregate([{$match: {foo: {$in: [0, 1]}}}, {$count: "count"}]);
- assert(!planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
+// A $match that is not a single range cannot use the COUNT_SCAN optimization.
+explained = coll.explain().aggregate([{$match: {foo: {$in: [0, 1]}}}, {$count: "count"}]);
+assert(!planHasStage(db, explained.stages[0].$cursor.queryPlanner.winningPlan, "COUNT_SCAN"));
}());
diff --git a/jstests/aggregation/bugs/server22580.js b/jstests/aggregation/bugs/server22580.js
index 3a448173875..3b9f81dbcfc 100644
--- a/jstests/aggregation/bugs/server22580.js
+++ b/jstests/aggregation/bugs/server22580.js
@@ -3,41 +3,46 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
-
- var coll = db.substrCP;
- coll.drop();
-
- // Need an empty document for pipeline.
- coll.insert({});
-
- assertErrorCode(coll,
- [{$project: {substr: {$substrCP: ["abc", 0, "a"]}}}],
- 34452,
- "$substrCP" + " does not accept non-numeric types as a length.");
-
- assertErrorCode(coll,
- [{$project: {substr: {$substrCP: ["abc", 0, NaN]}}}],
- 34453,
- "$substrCP" + " does not accept non-integers as a length.");
-
- assertErrorCode(coll,
- [{$project: {substr: {$substrCP: ["abc", "abc", 3]}}}],
- 34450,
- "$substrCP does not accept non-numeric types as a starting index.");
-
- assertErrorCode(coll,
- [{$project: {substr: {$substrCP: ["abc", 2.2, 3]}}}],
- 34451,
- "$substrCP" + " does not accept non-integers as a starting index.");
-
- assertErrorCode(coll,
- [{$project: {substr: {$substrCP: ["abc", -1, 3]}}}],
- 34455,
- "$substrCP " + "does not accept negative integers as inputs.");
-
- assertErrorCode(coll,
- [{$project: {substr: {$substrCP: ["abc", 1, -3]}}}],
- 34454,
- "$substrCP " + "does not accept negative integers as inputs.");
+"use strict";
+
+var coll = db.substrCP;
+coll.drop();
+
+// Need an empty document for pipeline.
+coll.insert({});
+
+assertErrorCode(coll,
+ [{$project: {substr: {$substrCP: ["abc", 0, "a"]}}}],
+ 34452,
+ "$substrCP" +
+ " does not accept non-numeric types as a length.");
+
+assertErrorCode(coll,
+ [{$project: {substr: {$substrCP: ["abc", 0, NaN]}}}],
+ 34453,
+ "$substrCP" +
+ " does not accept non-integers as a length.");
+
+assertErrorCode(coll,
+ [{$project: {substr: {$substrCP: ["abc", "abc", 3]}}}],
+ 34450,
+ "$substrCP does not accept non-numeric types as a starting index.");
+
+assertErrorCode(coll,
+ [{$project: {substr: {$substrCP: ["abc", 2.2, 3]}}}],
+ 34451,
+ "$substrCP" +
+ " does not accept non-integers as a starting index.");
+
+assertErrorCode(coll,
+ [{$project: {substr: {$substrCP: ["abc", -1, 3]}}}],
+ 34455,
+ "$substrCP " +
+ "does not accept negative integers as inputs.");
+
+assertErrorCode(coll,
+ [{$project: {substr: {$substrCP: ["abc", 1, -3]}}}],
+ 34454,
+ "$substrCP " +
+ "does not accept negative integers as inputs.");
}());
diff --git a/jstests/aggregation/bugs/server25590.js b/jstests/aggregation/bugs/server25590.js
index 329ae808a2c..b478f806029 100644
--- a/jstests/aggregation/bugs/server25590.js
+++ b/jstests/aggregation/bugs/server25590.js
@@ -1,19 +1,19 @@
// Test that an aggregate command where the "pipeline" field has the wrong type fails with a
// TypeMismatch error.
(function() {
- "use strict";
+"use strict";
- const coll = db.server25590;
- coll.drop();
+const coll = db.server25590;
+coll.drop();
- assert.writeOK(coll.insert({}));
+assert.writeOK(coll.insert({}));
- assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: 1}),
- ErrorCodes.TypeMismatch);
- assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: {}}),
- ErrorCodes.TypeMismatch);
- assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: [1, 2]}),
- ErrorCodes.TypeMismatch);
- assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: [1, null]}),
- ErrorCodes.TypeMismatch);
+assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: 1}),
+ ErrorCodes.TypeMismatch);
+assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: {}}),
+ ErrorCodes.TypeMismatch);
+assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: [1, 2]}),
+ ErrorCodes.TypeMismatch);
+assert.commandFailedWithCode(db.runCommand({aggregate: coll.getName(), pipeline: [1, null]}),
+ ErrorCodes.TypeMismatch);
})();
diff --git a/jstests/aggregation/bugs/server26462.js b/jstests/aggregation/bugs/server26462.js
index b0ef33ae35b..08225e54ce3 100644
--- a/jstests/aggregation/bugs/server26462.js
+++ b/jstests/aggregation/bugs/server26462.js
@@ -1,29 +1,29 @@
// Tests that adding a field that only contains metadata does not cause a segmentation fault when
// grouping on the added field.
(function() {
- "use strict";
+"use strict";
- // Drop the old test collection, if any.
- db.server26462.drop();
+// Drop the old test collection, if any.
+db.server26462.drop();
- // Insert some test documents into the collection.
- assert.writeOK(db.server26462.insert({"_id": 1, "title": "cakes and ale"}));
- assert.writeOK(db.server26462.insert({"_id": 2, "title": "more cakes"}));
- assert.writeOK(db.server26462.insert({"_id": 3, "title": "bread"}));
- assert.writeOK(db.server26462.insert({"_id": 4, "title": "some cakes"}));
+// Insert some test documents into the collection.
+assert.writeOK(db.server26462.insert({"_id": 1, "title": "cakes and ale"}));
+assert.writeOK(db.server26462.insert({"_id": 2, "title": "more cakes"}));
+assert.writeOK(db.server26462.insert({"_id": 3, "title": "bread"}));
+assert.writeOK(db.server26462.insert({"_id": 4, "title": "some cakes"}));
- // Create a text index on the documents.
- assert.commandWorked(db.server26462.createIndex({title: "text"}));
+// Create a text index on the documents.
+assert.commandWorked(db.server26462.createIndex({title: "text"}));
- // Add a metadata only field in the aggregation pipeline and use that field in the $group _id.
- let res = db.server26462
- .aggregate([
- {$match: {$text: {$search: "cake"}}},
- {$addFields: {fooScore: {$meta: "textScore"}}},
- {$group: {_id: "$fooScore", count: {$sum: 1}}}
- ])
- .itcount();
+// Add a metadata only field in the aggregation pipeline and use that field in the $group _id.
+let res = db.server26462
+ .aggregate([
+ {$match: {$text: {$search: "cake"}}},
+ {$addFields: {fooScore: {$meta: "textScore"}}},
+ {$group: {_id: "$fooScore", count: {$sum: 1}}}
+ ])
+ .itcount();
- // Assert that the command worked.
- assert.eq(2, res);
+// Assert that the command worked.
+assert.eq(2, res);
})();
diff --git a/jstests/aggregation/bugs/server37750.js b/jstests/aggregation/bugs/server37750.js
index cdfd098d87d..902c427c292 100644
--- a/jstests/aggregation/bugs/server37750.js
+++ b/jstests/aggregation/bugs/server37750.js
@@ -6,75 +6,75 @@
* requires_sharding]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.
- // Set up a 2-shard cluster. Configure 'internalQueryExecYieldIterations' on both shards such
- // that operations will yield on each PlanExecuter iteration.
- const st = new ShardingTest({
- name: jsTestName(),
- shards: 2,
- rs: {nodes: 1, setParameter: {internalQueryExecYieldIterations: 1}}
- });
+// Set up a 2-shard cluster. Configure 'internalQueryExecYieldIterations' on both shards such
+// that operations will yield on each PlanExecuter iteration.
+const st = new ShardingTest({
+ name: jsTestName(),
+ shards: 2,
+ rs: {nodes: 1, setParameter: {internalQueryExecYieldIterations: 1}}
+});
- const mongosDB = st.s.getDB(jsTestName());
- const mongosColl = mongosDB.test;
+const mongosDB = st.s.getDB(jsTestName());
+const mongosColl = mongosDB.test;
- // Shard the test collection, split it at {_id: 0}, and move the upper chunk to shard1.
- st.shardColl(mongosColl, {_id: 1}, {_id: 0}, {_id: 0});
+// Shard the test collection, split it at {_id: 0}, and move the upper chunk to shard1.
+st.shardColl(mongosColl, {_id: 1}, {_id: 0}, {_id: 0});
- // Insert enough documents on each shard to induce the $sample random-cursor optimization.
- for (let i = (-150); i < 150; ++i) {
- assert.commandWorked(mongosColl.insert({_id: i}));
- }
+// Insert enough documents on each shard to induce the $sample random-cursor optimization.
+for (let i = (-150); i < 150; ++i) {
+ assert.commandWorked(mongosColl.insert({_id: i}));
+}
- // Run the initial aggregate for the $sample stage.
- const cmdRes = assert.commandWorked(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: [{$sample: {size: 3}}],
- comment: "$sample random",
- cursor: {batchSize: 0}
- }));
- assert.eq(cmdRes.cursor.firstBatch.length, 0);
+// Run the initial aggregate for the $sample stage.
+const cmdRes = assert.commandWorked(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: [{$sample: {size: 3}}],
+ comment: "$sample random",
+ cursor: {batchSize: 0}
+}));
+assert.eq(cmdRes.cursor.firstBatch.length, 0);
- // Force each shard to hang on yield to allow for currentOp capture.
- FixtureHelpers.runCommandOnEachPrimary({
- db: mongosDB.getSiblingDB("admin"),
- cmdObj: {
- configureFailPoint: "setYieldAllLocksHang",
- mode: "alwaysOn",
- data: {namespace: mongosColl.getFullName()}
- }
- });
+// Force each shard to hang on yield to allow for currentOp capture.
+FixtureHelpers.runCommandOnEachPrimary({
+ db: mongosDB.getSiblingDB("admin"),
+ cmdObj: {
+ configureFailPoint: "setYieldAllLocksHang",
+ mode: "alwaysOn",
+ data: {namespace: mongosColl.getFullName()}
+ }
+});
- // Run $currentOp to confirm that the $sample getMore yields on both shards.
- const awaitShell = startParallelShell(() => {
- load("jstests/libs/fixture_helpers.js");
- assert.soon(() => db.getSiblingDB("admin")
- .aggregate([
- {$currentOp: {}},
- {
- $match: {
- "cursor.originatingCommand.comment": "$sample random",
- planSummary: "QUEUED_DATA, MULTI_ITERATOR",
- numYields: {$gt: 0}
- }
+// Run $currentOp to confirm that the $sample getMore yields on both shards.
+const awaitShell = startParallelShell(() => {
+ load("jstests/libs/fixture_helpers.js");
+ assert.soon(() => db.getSiblingDB("admin")
+ .aggregate([
+ {$currentOp: {}},
+ {
+ $match: {
+ "cursor.originatingCommand.comment": "$sample random",
+ planSummary: "QUEUED_DATA, MULTI_ITERATOR",
+ numYields: {$gt: 0}
}
- ])
- .itcount() === 2);
- // Release the failpoint and allow the getMores to complete.
- FixtureHelpers.runCommandOnEachPrimary({
- db: db.getSiblingDB("admin"),
- cmdObj: {configureFailPoint: "setYieldAllLocksHang", mode: "off"}
- });
- }, mongosDB.getMongo().port);
+ }
+ ])
+ .itcount() === 2);
+ // Release the failpoint and allow the getMores to complete.
+ FixtureHelpers.runCommandOnEachPrimary({
+ db: db.getSiblingDB("admin"),
+ cmdObj: {configureFailPoint: "setYieldAllLocksHang", mode: "off"}
+ });
+}, mongosDB.getMongo().port);
- // Retrieve the results for the $sample aggregation.
- const sampleCursor = new DBCommandCursor(mongosDB, cmdRes);
- assert.eq(sampleCursor.toArray().length, 3);
+// Retrieve the results for the $sample aggregation.
+const sampleCursor = new DBCommandCursor(mongosDB, cmdRes);
+assert.eq(sampleCursor.toArray().length, 3);
- // Confirm that the parallel shell completes successfully, and tear down the cluster.
- awaitShell();
- st.stop();
+// Confirm that the parallel shell completes successfully, and tear down the cluster.
+awaitShell();
+st.stop();
})(); \ No newline at end of file
diff --git a/jstests/aggregation/bugs/server4588.js b/jstests/aggregation/bugs/server4588.js
index 000cc8f0231..be04773c0ff 100644
--- a/jstests/aggregation/bugs/server4588.js
+++ b/jstests/aggregation/bugs/server4588.js
@@ -1,60 +1,55 @@
// SERVER-4588 Add option to $unwind to emit array index.
(function() {
- "use strict";
+"use strict";
- const coll = db.server4588;
- coll.drop();
+const coll = db.server4588;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
- assert.writeOK(coll.insert({_id: 1, x: null}));
- assert.writeOK(coll.insert({_id: 2, x: []}));
- assert.writeOK(coll.insert({_id: 3, x: [1, 2, 3]}));
- assert.writeOK(coll.insert({_id: 4, x: 5}));
+assert.writeOK(coll.insert({_id: 0}));
+assert.writeOK(coll.insert({_id: 1, x: null}));
+assert.writeOK(coll.insert({_id: 2, x: []}));
+assert.writeOK(coll.insert({_id: 3, x: [1, 2, 3]}));
+assert.writeOK(coll.insert({_id: 4, x: 5}));
- // Without includeArrayIndex.
- let actualResults =
- coll.aggregate([{$unwind: {path: "$x"}}, {$sort: {_id: 1, x: 1}}]).toArray();
- let expectedResults = [
- {_id: 3, x: 1},
- {_id: 3, x: 2},
- {_id: 3, x: 3},
- {_id: 4, x: 5},
- ];
- assert.eq(expectedResults, actualResults, "Incorrect results for normal $unwind");
+// Without includeArrayIndex.
+let actualResults = coll.aggregate([{$unwind: {path: "$x"}}, {$sort: {_id: 1, x: 1}}]).toArray();
+let expectedResults = [
+ {_id: 3, x: 1},
+ {_id: 3, x: 2},
+ {_id: 3, x: 3},
+ {_id: 4, x: 5},
+];
+assert.eq(expectedResults, actualResults, "Incorrect results for normal $unwind");
- // With includeArrayIndex, index inserted into a new field.
- actualResults =
- coll.aggregate(
- [{$unwind: {path: "$x", includeArrayIndex: "index"}}, {$sort: {_id: 1, x: 1}}])
- .toArray();
- expectedResults = [
- {_id: 3, x: 1, index: NumberLong(0)},
- {_id: 3, x: 2, index: NumberLong(1)},
- {_id: 3, x: 3, index: NumberLong(2)},
- {_id: 4, x: 5, index: null},
- ];
- assert.eq(expectedResults, actualResults, "Incorrect results $unwind with includeArrayIndex");
+// With includeArrayIndex, index inserted into a new field.
+actualResults =
+ coll.aggregate([{$unwind: {path: "$x", includeArrayIndex: "index"}}, {$sort: {_id: 1, x: 1}}])
+ .toArray();
+expectedResults = [
+ {_id: 3, x: 1, index: NumberLong(0)},
+ {_id: 3, x: 2, index: NumberLong(1)},
+ {_id: 3, x: 3, index: NumberLong(2)},
+ {_id: 4, x: 5, index: null},
+];
+assert.eq(expectedResults, actualResults, "Incorrect results $unwind with includeArrayIndex");
- // With both includeArrayIndex and preserveNullAndEmptyArrays.
- actualResults =
- coll.aggregate([
- {
- $unwind:
- {path: "$x", includeArrayIndex: "index", preserveNullAndEmptyArrays: true}
- },
- {$sort: {_id: 1, x: 1}}
- ])
- .toArray();
- expectedResults = [
- {_id: 0, index: null},
- {_id: 1, x: null, index: null},
- {_id: 2, index: null},
- {_id: 3, x: 1, index: NumberLong(0)},
- {_id: 3, x: 2, index: NumberLong(1)},
- {_id: 3, x: 3, index: NumberLong(2)},
- {_id: 4, x: 5, index: null},
- ];
- assert.eq(expectedResults,
- actualResults,
- "Incorrect results $unwind with includeArrayIndex and preserveNullAndEmptyArrays");
+// With both includeArrayIndex and preserveNullAndEmptyArrays.
+actualResults =
+ coll.aggregate([
+ {$unwind: {path: "$x", includeArrayIndex: "index", preserveNullAndEmptyArrays: true}},
+ {$sort: {_id: 1, x: 1}}
+ ])
+ .toArray();
+expectedResults = [
+ {_id: 0, index: null},
+ {_id: 1, x: null, index: null},
+ {_id: 2, index: null},
+ {_id: 3, x: 1, index: NumberLong(0)},
+ {_id: 3, x: 2, index: NumberLong(1)},
+ {_id: 3, x: 3, index: NumberLong(2)},
+ {_id: 4, x: 5, index: null},
+];
+assert.eq(expectedResults,
+ actualResults,
+ "Incorrect results $unwind with includeArrayIndex and preserveNullAndEmptyArrays");
}());
diff --git a/jstests/aggregation/bugs/server4589.js b/jstests/aggregation/bugs/server4589.js
index e7f2e1b9746..efa7254e4d9 100644
--- a/jstests/aggregation/bugs/server4589.js
+++ b/jstests/aggregation/bugs/server4589.js
@@ -4,67 +4,67 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
-
- var coll = db.agg_array_elem_at_expr;
- coll.drop();
-
- assert.writeOK(coll.insert({a: [1, 2, 3, 4, 5]}));
-
- // Normal indexing.
- var pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', 2]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: 3}]);
-
- // Indexing with a float.
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', 1.0]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: 2}]);
-
- // Indexing with a decimal
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', NumberDecimal('2.0')]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: 3}]);
-
- // Negative indexing.
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -1]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: 5}]);
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -5]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: 1}]);
-
- // Out of bounds positive.
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', 5]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', Math.pow(2, 31) - 1]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', NumberLong(Math.pow(2, 31) - 1)]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
-
- // Out of bounds negative.
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -6]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -Math.pow(2, 31)]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', NumberLong(-Math.pow(2, 31))]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
-
- // Null inputs.
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', null]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: null}]);
- pipeline = [{$project: {_id: 0, x: {$arrayElemAt: [null, 4]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{x: null}]);
-
- // Error cases.
-
- // Wrong number of arguments.
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [['one', 'arg']]}}}], 16020);
-
- // First argument is not an array.
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: ['one', 2]}}}], 28689);
-
- // Second argument is not numeric.
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], '2']}}}], 28690);
-
- // Second argument is not integral.
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], 1.5]}}}], 28691);
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], NumberDecimal('1.5')]}}}], 28691);
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], Math.pow(2, 32)]}}}], 28691);
- assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], -Math.pow(2, 31) - 1]}}}], 28691);
+'use strict';
+
+var coll = db.agg_array_elem_at_expr;
+coll.drop();
+
+assert.writeOK(coll.insert({a: [1, 2, 3, 4, 5]}));
+
+// Normal indexing.
+var pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', 2]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: 3}]);
+
+// Indexing with a float.
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', 1.0]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: 2}]);
+
+// Indexing with a decimal
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', NumberDecimal('2.0')]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: 3}]);
+
+// Negative indexing.
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -1]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: 5}]);
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -5]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: 1}]);
+
+// Out of bounds positive.
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', 5]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', Math.pow(2, 31) - 1]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', NumberLong(Math.pow(2, 31) - 1)]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
+
+// Out of bounds negative.
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -6]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', -Math.pow(2, 31)]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', NumberLong(-Math.pow(2, 31))]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{}]);
+
+// Null inputs.
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: ['$a', null]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: null}]);
+pipeline = [{$project: {_id: 0, x: {$arrayElemAt: [null, 4]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{x: null}]);
+
+// Error cases.
+
+// Wrong number of arguments.
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [['one', 'arg']]}}}], 16020);
+
+// First argument is not an array.
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: ['one', 2]}}}], 28689);
+
+// Second argument is not numeric.
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], '2']}}}], 28690);
+
+// Second argument is not integral.
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], 1.5]}}}], 28691);
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], NumberDecimal('1.5')]}}}], 28691);
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], Math.pow(2, 32)]}}}], 28691);
+assertErrorCode(coll, [{$project: {x: {$arrayElemAt: [[1, 2], -Math.pow(2, 31) - 1]}}}], 28691);
}());
diff --git a/jstests/aggregation/bugs/server4638.js b/jstests/aggregation/bugs/server4638.js
index 4934da94a34..ee6f7cfd6df 100644
--- a/jstests/aggregation/bugs/server4638.js
+++ b/jstests/aggregation/bugs/server4638.js
@@ -13,4 +13,4 @@ assert.eq(res[0].x, 0);
// Make sure having an undefined doesn't break pipelines that do use the field
res = t.aggregate({$project: {undef: 1}}).toArray();
assert.eq(res[0].undef, undefined);
-assert.eq(typeof(res[0].undef), "undefined");
+assert.eq(typeof (res[0].undef), "undefined");
diff --git a/jstests/aggregation/bugs/server5012.js b/jstests/aggregation/bugs/server5012.js
index a9955349490..14dfe914b52 100644
--- a/jstests/aggregation/bugs/server5012.js
+++ b/jstests/aggregation/bugs/server5012.js
@@ -1,11 +1,11 @@
(function() {
- "use strict";
- load('jstests/aggregation/data/articles.js');
+"use strict";
+load('jstests/aggregation/data/articles.js');
- const article = db.getSiblingDB("aggdb").getCollection("article");
- const cursor = article.aggregate(
- [{$sort: {_id: 1}}, {$project: {author: 1, _id: 0}}, {$project: {Writer: "$author"}}]);
- const expected = [{Writer: "bob"}, {Writer: "dave"}, {Writer: "jane"}];
+const article = db.getSiblingDB("aggdb").getCollection("article");
+const cursor = article.aggregate(
+ [{$sort: {_id: 1}}, {$project: {author: 1, _id: 0}}, {$project: {Writer: "$author"}}]);
+const expected = [{Writer: "bob"}, {Writer: "dave"}, {Writer: "jane"}];
- assert.eq(cursor.toArray(), expected);
+assert.eq(cursor.toArray(), expected);
}());
diff --git a/jstests/aggregation/bugs/server533.js b/jstests/aggregation/bugs/server533.js
index b64ddc9669f..d66c5d27ad8 100644
--- a/jstests/aggregation/bugs/server533.js
+++ b/jstests/aggregation/bugs/server533.js
@@ -4,32 +4,32 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
-
- var coll = db.agg_sample;
- coll.drop();
-
- // Should return no results on a collection that doesn't exist. Should not crash.
- assert.eq(coll.aggregate([{$sample: {size: 10}}]).toArray(), []);
-
- var nItems = 3;
- for (var i = 0; i < nItems; i++) {
- assert.writeOK(coll.insert({_id: i}));
- }
-
- [0, 1, nItems, nItems + 1].forEach(function(size) {
- var results = coll.aggregate([{$sample: {size: size}}]).toArray();
- assert.eq(results.length, Math.min(size, nItems));
- });
-
- // Multiple $sample stages are allowed.
- var results = coll.aggregate([{$sample: {size: nItems}}, {$sample: {size: 1}}]).toArray();
- assert.eq(results.length, 1);
-
- // Invalid options.
- assertErrorCode(coll, [{$sample: 'string'}], 28745);
- assertErrorCode(coll, [{$sample: {size: 'string'}}], 28746);
- assertErrorCode(coll, [{$sample: {size: -1}}], 28747);
- assertErrorCode(coll, [{$sample: {unknownOpt: true}}], 28748);
- assertErrorCode(coll, [{$sample: {/* no size */}}], 28749);
+'use strict';
+
+var coll = db.agg_sample;
+coll.drop();
+
+// Should return no results on a collection that doesn't exist. Should not crash.
+assert.eq(coll.aggregate([{$sample: {size: 10}}]).toArray(), []);
+
+var nItems = 3;
+for (var i = 0; i < nItems; i++) {
+ assert.writeOK(coll.insert({_id: i}));
+}
+
+[0, 1, nItems, nItems + 1].forEach(function(size) {
+ var results = coll.aggregate([{$sample: {size: size}}]).toArray();
+ assert.eq(results.length, Math.min(size, nItems));
+});
+
+// Multiple $sample stages are allowed.
+var results = coll.aggregate([{$sample: {size: nItems}}, {$sample: {size: 1}}]).toArray();
+assert.eq(results.length, 1);
+
+// Invalid options.
+assertErrorCode(coll, [{$sample: 'string'}], 28745);
+assertErrorCode(coll, [{$sample: {size: 'string'}}], 28746);
+assertErrorCode(coll, [{$sample: {size: -1}}], 28747);
+assertErrorCode(coll, [{$sample: {unknownOpt: true}}], 28748);
+assertErrorCode(coll, [{$sample: {/* no size */}}], 28749);
}());
diff --git a/jstests/aggregation/bugs/server6074.js b/jstests/aggregation/bugs/server6074.js
index 8adf6b7eca8..8e53459ba9e 100644
--- a/jstests/aggregation/bugs/server6074.js
+++ b/jstests/aggregation/bugs/server6074.js
@@ -4,78 +4,78 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
-
- var coll = db.agg_slice_expr;
- coll.drop();
-
- // Need to have at least one document to ensure the pipeline executes.
- assert.writeOK(coll.insert({}));
-
- function testSlice(sliceArgs, expArray) {
- var pipeline = [{$project: {_id: 0, slice: {$slice: sliceArgs}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{slice: expArray}]);
- }
-
- // Two argument form.
-
- testSlice([[0, 1, 2, 3, 4], 2], [0, 1]);
- testSlice([[0, 1, 2, 3, 4], 2.0], [0, 1]);
- // Negative count
- testSlice([[0, 1, 2, 3, 4], -2], [3, 4]);
- testSlice([[0, 1, 2, 3, 4], -2.0], [3, 4]);
- // Zero count.
- testSlice([[0, 1, 2, 3, 4], 0], []);
- // Out of bounds positive.
- testSlice([[0, 1, 2, 3, 4], 10], [0, 1, 2, 3, 4]);
- // Out of bounds negative.
- testSlice([[0, 1, 2, 3, 4], -10], [0, 1, 2, 3, 4]);
- // Null arguments
- testSlice([null, -10], null);
- testSlice([[0, 1, 2, 3, 4], null], null);
-
- // Three argument form.
-
- testSlice([[0, 1, 2, 3, 4], 1, 2], [1, 2]);
- testSlice([[0, 1, 2, 3, 4], 1.0, 2.0], [1, 2]);
- // Negative start index.
- testSlice([[0, 1, 2, 3, 4], -3, 2], [2, 3]);
- testSlice([[0, 1, 2, 3, 4], -5, 2], [0, 1]);
- // Slice starts out of bounds.
- testSlice([[0, 1, 2, 3, 4], -10, 2], [0, 1]);
- testSlice([[0, 1, 2, 3, 4], 10, 2], []);
- // Slice ends out of bounds.
- testSlice([[0, 1, 2, 3, 4], 4, 3], [4]);
- testSlice([[0, 1, 2, 3, 4], -1, 3], [4]);
- // Null arguments
- testSlice([[0, 1, 2, 3, 4], -1, null], null);
-
- // Error cases.
-
- // Wrong number of arguments.
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2, 3]]}}}], 28667);
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2, 3], 4, 5, 6]}}}], 28667);
-
- // First argument is not an array.
- assertErrorCode(coll, [{$project: {x: {$slice: ['one', 2]}}}], 28724);
-
- // Second argument is not numeric.
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], '2']}}}], 28725);
-
- // Second argument is not integral.
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 1.5]}}}], 28726);
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], Math.pow(2, 32)]}}}], 28726);
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], -Math.pow(2, 31) - 1]}}}], 28726);
-
- // Third argument is not numeric.
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, '2']}}}], 28727);
-
- // Third argument is not integral.
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, 1.5]}}}], 28728);
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, Math.pow(2, 32)]}}}], 28728);
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, -Math.pow(2, 31) - 1]}}}], 28728);
-
- // Third argument is not positive.
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, 0]}}}], 28729);
- assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, -1]}}}], 28729);
+'use strict';
+
+var coll = db.agg_slice_expr;
+coll.drop();
+
+// Need to have at least one document to ensure the pipeline executes.
+assert.writeOK(coll.insert({}));
+
+function testSlice(sliceArgs, expArray) {
+ var pipeline = [{$project: {_id: 0, slice: {$slice: sliceArgs}}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{slice: expArray}]);
+}
+
+// Two argument form.
+
+testSlice([[0, 1, 2, 3, 4], 2], [0, 1]);
+testSlice([[0, 1, 2, 3, 4], 2.0], [0, 1]);
+// Negative count
+testSlice([[0, 1, 2, 3, 4], -2], [3, 4]);
+testSlice([[0, 1, 2, 3, 4], -2.0], [3, 4]);
+// Zero count.
+testSlice([[0, 1, 2, 3, 4], 0], []);
+// Out of bounds positive.
+testSlice([[0, 1, 2, 3, 4], 10], [0, 1, 2, 3, 4]);
+// Out of bounds negative.
+testSlice([[0, 1, 2, 3, 4], -10], [0, 1, 2, 3, 4]);
+// Null arguments
+testSlice([null, -10], null);
+testSlice([[0, 1, 2, 3, 4], null], null);
+
+// Three argument form.
+
+testSlice([[0, 1, 2, 3, 4], 1, 2], [1, 2]);
+testSlice([[0, 1, 2, 3, 4], 1.0, 2.0], [1, 2]);
+// Negative start index.
+testSlice([[0, 1, 2, 3, 4], -3, 2], [2, 3]);
+testSlice([[0, 1, 2, 3, 4], -5, 2], [0, 1]);
+// Slice starts out of bounds.
+testSlice([[0, 1, 2, 3, 4], -10, 2], [0, 1]);
+testSlice([[0, 1, 2, 3, 4], 10, 2], []);
+// Slice ends out of bounds.
+testSlice([[0, 1, 2, 3, 4], 4, 3], [4]);
+testSlice([[0, 1, 2, 3, 4], -1, 3], [4]);
+// Null arguments
+testSlice([[0, 1, 2, 3, 4], -1, null], null);
+
+// Error cases.
+
+// Wrong number of arguments.
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2, 3]]}}}], 28667);
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2, 3], 4, 5, 6]}}}], 28667);
+
+// First argument is not an array.
+assertErrorCode(coll, [{$project: {x: {$slice: ['one', 2]}}}], 28724);
+
+// Second argument is not numeric.
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], '2']}}}], 28725);
+
+// Second argument is not integral.
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 1.5]}}}], 28726);
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], Math.pow(2, 32)]}}}], 28726);
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], -Math.pow(2, 31) - 1]}}}], 28726);
+
+// Third argument is not numeric.
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, '2']}}}], 28727);
+
+// Third argument is not integral.
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, 1.5]}}}], 28728);
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, Math.pow(2, 32)]}}}], 28728);
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, -Math.pow(2, 31) - 1]}}}], 28728);
+
+// Third argument is not positive.
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, 0]}}}], 28729);
+assertErrorCode(coll, [{$project: {x: {$slice: [[1, 2], 0, -1]}}}], 28729);
}());
diff --git a/jstests/aggregation/bugs/server6125.js b/jstests/aggregation/bugs/server6125.js
index 592a560312a..bd4ab4ce220 100644
--- a/jstests/aggregation/bugs/server6125.js
+++ b/jstests/aggregation/bugs/server6125.js
@@ -65,9 +65,9 @@ function setupArray() {
{_id: 13, a: new Timestamp(1 / 1000, 1), ty: "Timestamp"},
{_id: 14, a: /regex/, ty: "RegExp"},
{
- _id: 15,
- a: new DBPointer("test.s6125", new ObjectId("0102030405060708090A0B0C")),
- ty: "DBPointer"
+ _id: 15,
+ a: new DBPointer("test.s6125", new ObjectId("0102030405060708090A0B0C")),
+ ty: "DBPointer"
},
{_id: 16, a: function() {}, ty: "Code"},
// Code with Scope not implemented in JS
diff --git a/jstests/aggregation/bugs/server6127.js b/jstests/aggregation/bugs/server6127.js
index 26585c87d21..1f11d858c83 100644
--- a/jstests/aggregation/bugs/server6127.js
+++ b/jstests/aggregation/bugs/server6127.js
@@ -7,21 +7,21 @@
* is no path). Previous it would uassert causing the aggregation to end.
*/
(function() {
- "use strict";
- db.s6127.drop();
+"use strict";
+db.s6127.drop();
- assert.writeOK(db.s6127.insert({_id: 0, a: 1}));
- assert.writeOK(db.s6127.insert({_id: 1, foo: 2}));
- assert.writeOK(db.s6127.insert({_id: 2, foo: {bar: 3}}));
+assert.writeOK(db.s6127.insert({_id: 0, a: 1}));
+assert.writeOK(db.s6127.insert({_id: 1, foo: 2}));
+assert.writeOK(db.s6127.insert({_id: 2, foo: {bar: 3}}));
- // Aggregate checking the field foo and the path foo.bar.
- const cursor = db.s6127.aggregate(
- [{$sort: {_id: 1}}, {$project: {_id: 0, "foo.bar": 1, field: "$foo", path: "$foo.bar"}}]);
+// Aggregate checking the field foo and the path foo.bar.
+const cursor = db.s6127.aggregate(
+ [{$sort: {_id: 1}}, {$project: {_id: 0, "foo.bar": 1, field: "$foo", path: "$foo.bar"}}]);
- // The first document should contain nothing as neither field exists, the second document should
- // contain only field as it has a value in foo, but foo does not have a field bar so it cannot
- // walk that path, the third document should have both the field and path as foo is an object
- // which has a field bar.
- const expected = [{}, {field: 2}, {foo: {bar: 3}, field: {bar: 3}, path: 3}];
- assert.eq(cursor.toArray(), expected);
+// The first document should contain nothing as neither field exists, the second document should
+// contain only field as it has a value in foo, but foo does not have a field bar so it cannot
+// walk that path, the third document should have both the field and path as foo is an object
+// which has a field bar.
+const expected = [{}, {field: 2}, {foo: {bar: 3}, field: {bar: 3}, path: 3}];
+assert.eq(cursor.toArray(), expected);
}());
diff --git a/jstests/aggregation/bugs/server6147.js b/jstests/aggregation/bugs/server6147.js
index 0969b366636..c74e1848512 100644
--- a/jstests/aggregation/bugs/server6147.js
+++ b/jstests/aggregation/bugs/server6147.js
@@ -6,44 +6,44 @@
* constant and a field regardless of whether they were equal or not.
*/
(function() {
- "use strict";
- db.s6147.drop();
+"use strict";
+db.s6147.drop();
- assert.writeOK(db.s6147.insert({a: 1}));
- assert.writeOK(db.s6147.insert({a: 2}));
+assert.writeOK(db.s6147.insert({a: 1}));
+assert.writeOK(db.s6147.insert({a: 2}));
- // Aggregate checking various combinations of the constant and the field.
- const cursor = db.s6147.aggregate([
- {$sort: {a: 1}},
- {
- $project: {
- _id: 0,
- constantAndField: {$ne: [1, "$a"]},
- fieldAndConstant: {$ne: ["$a", 1]},
- constantAndConstant: {$ne: [1, 1]},
- fieldAndField: {$ne: ["$a", "$a"]}
- }
+// Aggregate checking various combinations of the constant and the field.
+const cursor = db.s6147.aggregate([
+ {$sort: {a: 1}},
+ {
+ $project: {
+ _id: 0,
+ constantAndField: {$ne: [1, "$a"]},
+ fieldAndConstant: {$ne: ["$a", 1]},
+ constantAndConstant: {$ne: [1, 1]},
+ fieldAndField: {$ne: ["$a", "$a"]}
}
- ]);
+ }
+]);
- // In both documents, the constantAndConstant and fieldAndField should be false since they
- // compare something with itself. However, the constantAndField and fieldAndConstant should be
- // different as document one contains 1 which should return false and document 2 contains
- // something different so should return true.
- const expected = [
- {
- constantAndField: false,
- fieldAndConstant: false,
- constantAndConstant: false,
- fieldAndField: false
- },
- {
- constantAndField: true,
- fieldAndConstant: true,
- constantAndConstant: false,
- fieldAndField: false
- }
- ];
+// In both documents, the constantAndConstant and fieldAndField should be false since they
+// compare something with itself. However, the constantAndField and fieldAndConstant should be
+// different as document one contains 1 which should return false and document 2 contains
+// something different so should return true.
+const expected = [
+ {
+ constantAndField: false,
+ fieldAndConstant: false,
+ constantAndConstant: false,
+ fieldAndField: false
+ },
+ {
+ constantAndField: true,
+ fieldAndConstant: true,
+ constantAndConstant: false,
+ fieldAndField: false
+ }
+];
- assert.eq(cursor.toArray(), expected);
+assert.eq(cursor.toArray(), expected);
}());
diff --git a/jstests/aggregation/bugs/server6179.js b/jstests/aggregation/bugs/server6179.js
index a5e934a9e89..065f5b261ee 100644
--- a/jstests/aggregation/bugs/server6179.js
+++ b/jstests/aggregation/bugs/server6179.js
@@ -4,53 +4,53 @@
// requires_spawning_own_processes,
// ]
(function() {
- 'use strict';
-
- var s = new ShardingTest({shards: 2});
-
- assert.commandWorked(s.s0.adminCommand({enablesharding: "test"}));
- s.ensurePrimaryShard('test', s.shard1.shardName);
- assert.commandWorked(s.s0.adminCommand({shardcollection: "test.data", key: {_id: 1}}));
-
- var d = s.getDB("test");
-
- // Insert _id values 0 - 99
- var N = 100;
-
- var bulkOp = d.data.initializeOrderedBulkOp();
- for (var i = 0; i < N; ++i) {
- bulkOp.insert({_id: i, i: i % 10});
- }
- bulkOp.execute();
-
- // Split the data into 3 chunks
- assert.commandWorked(s.s0.adminCommand({split: "test.data", middle: {_id: 33}}));
- assert.commandWorked(s.s0.adminCommand({split: "test.data", middle: {_id: 66}}));
-
- // Migrate the middle chunk to another shard
- assert.commandWorked(s.s0.adminCommand(
- {movechunk: "test.data", find: {_id: 50}, to: s.getOther(s.getPrimaryShard("test")).name}));
-
- // Check that we get results rather than an error
- var result = d.data
- .aggregate({$group: {_id: '$_id', i: {$first: '$i'}}},
- {$group: {_id: '$i', avg_id: {$avg: '$_id'}}},
- {$sort: {_id: 1}})
- .toArray();
- var expected = [
- {"_id": 0, "avg_id": 45},
- {"_id": 1, "avg_id": 46},
- {"_id": 2, "avg_id": 47},
- {"_id": 3, "avg_id": 48},
- {"_id": 4, "avg_id": 49},
- {"_id": 5, "avg_id": 50},
- {"_id": 6, "avg_id": 51},
- {"_id": 7, "avg_id": 52},
- {"_id": 8, "avg_id": 53},
- {"_id": 9, "avg_id": 54}
- ];
-
- assert.eq(result, expected);
-
- s.stop();
+'use strict';
+
+var s = new ShardingTest({shards: 2});
+
+assert.commandWorked(s.s0.adminCommand({enablesharding: "test"}));
+s.ensurePrimaryShard('test', s.shard1.shardName);
+assert.commandWorked(s.s0.adminCommand({shardcollection: "test.data", key: {_id: 1}}));
+
+var d = s.getDB("test");
+
+// Insert _id values 0 - 99
+var N = 100;
+
+var bulkOp = d.data.initializeOrderedBulkOp();
+for (var i = 0; i < N; ++i) {
+ bulkOp.insert({_id: i, i: i % 10});
+}
+bulkOp.execute();
+
+// Split the data into 3 chunks
+assert.commandWorked(s.s0.adminCommand({split: "test.data", middle: {_id: 33}}));
+assert.commandWorked(s.s0.adminCommand({split: "test.data", middle: {_id: 66}}));
+
+// Migrate the middle chunk to another shard
+assert.commandWorked(s.s0.adminCommand(
+ {movechunk: "test.data", find: {_id: 50}, to: s.getOther(s.getPrimaryShard("test")).name}));
+
+// Check that we get results rather than an error
+var result = d.data
+ .aggregate({$group: {_id: '$_id', i: {$first: '$i'}}},
+ {$group: {_id: '$i', avg_id: {$avg: '$_id'}}},
+ {$sort: {_id: 1}})
+ .toArray();
+var expected = [
+ {"_id": 0, "avg_id": 45},
+ {"_id": 1, "avg_id": 46},
+ {"_id": 2, "avg_id": 47},
+ {"_id": 3, "avg_id": 48},
+ {"_id": 4, "avg_id": 49},
+ {"_id": 5, "avg_id": 50},
+ {"_id": 6, "avg_id": 51},
+ {"_id": 7, "avg_id": 52},
+ {"_id": 8, "avg_id": 53},
+ {"_id": 9, "avg_id": 54}
+];
+
+assert.eq(result, expected);
+
+s.stop();
})();
diff --git a/jstests/aggregation/bugs/server6185.js b/jstests/aggregation/bugs/server6185.js
index cf084d4b371..06eacdf791d 100644
--- a/jstests/aggregation/bugs/server6185.js
+++ b/jstests/aggregation/bugs/server6185.js
@@ -2,16 +2,16 @@
* Tests that projecting a non-existent subfield behaves identically in both query and aggregation.
*/
(function() {
- "use strict";
- const coll = db.c;
- coll.drop();
+"use strict";
+const coll = db.c;
+coll.drop();
- assert.writeOK(coll.insert({a: [1]}));
- assert.writeOK(coll.insert({a: {c: 1}}));
- assert.writeOK(coll.insert({a: [{c: 1}, {b: 1, c: 1}, {c: 1}]}));
- assert.writeOK(coll.insert({a: 1}));
- assert.writeOK(coll.insert({b: 1}));
+assert.writeOK(coll.insert({a: [1]}));
+assert.writeOK(coll.insert({a: {c: 1}}));
+assert.writeOK(coll.insert({a: [{c: 1}, {b: 1, c: 1}, {c: 1}]}));
+assert.writeOK(coll.insert({a: 1}));
+assert.writeOK(coll.insert({b: 1}));
- assert.eq(coll.aggregate([{$project: {'a.b': 1}}, {$sort: {_id: 1}}]).toArray(),
- coll.find({}, {'a.b': 1}).sort({_id: 1}).toArray());
+assert.eq(coll.aggregate([{$project: {'a.b': 1}}, {$sort: {_id: 1}}]).toArray(),
+ coll.find({}, {'a.b': 1}).sort({_id: 1}).toArray());
}());
diff --git a/jstests/aggregation/bugs/server6530.js b/jstests/aggregation/bugs/server6530.js
index 36a5d3deb3f..77dfcd703fb 100644
--- a/jstests/aggregation/bugs/server6530.js
+++ b/jstests/aggregation/bugs/server6530.js
@@ -2,31 +2,37 @@
* Test that $near queries are disallowed in $match stages.
*/
(function() {
- "use strict";
- load("jstests/aggregation/extras/utils.js");
+"use strict";
+load("jstests/aggregation/extras/utils.js");
- const coll = db.getCollection("no_near_in_match");
- coll.drop();
+const coll = db.getCollection("no_near_in_match");
+coll.drop();
- // Create indexes that could satisfy various $near queries.
- assert.commandWorked(coll.createIndex({point2d: "2d"}));
- assert.commandWorked(coll.createIndex({point2dsphere: "2dsphere"}));
+// Create indexes that could satisfy various $near queries.
+assert.commandWorked(coll.createIndex({point2d: "2d"}));
+assert.commandWorked(coll.createIndex({point2dsphere: "2dsphere"}));
- // Populate the collection so that successful queries can return at least one result.
- assert.writeOK(coll.insert({point2d: [0.25, 0.35]}));
- assert.writeOK(coll.insert({point2dsphere: [0.25, 0.35]}));
+// Populate the collection so that successful queries can return at least one result.
+assert.writeOK(coll.insert({point2d: [0.25, 0.35]}));
+assert.writeOK(coll.insert({point2dsphere: [0.25, 0.35]}));
- const nearQuery = {point2d: {$near: [0, 0]}};
- const nearSphereQuery = {point2dsphere: {$nearSphere: [0, 0]}};
- const geoNearQuery = {point2d: {$geoNear: [0, 0]}};
+const nearQuery = {
+ point2d: {$near: [0, 0]}
+};
+const nearSphereQuery = {
+ point2dsphere: {$nearSphere: [0, 0]}
+};
+const geoNearQuery = {
+ point2d: {$geoNear: [0, 0]}
+};
- // Test that normal finds return a result.
- assert.eq(1, coll.find(nearQuery).count());
- assert.eq(1, coll.find(nearSphereQuery).count());
- assert.eq(1, coll.find(geoNearQuery).count());
+// Test that normal finds return a result.
+assert.eq(1, coll.find(nearQuery).count());
+assert.eq(1, coll.find(nearSphereQuery).count());
+assert.eq(1, coll.find(geoNearQuery).count());
- // Test that we refuse to run $match with a near query.
- assertErrorCode(coll, {$match: nearQuery}, ErrorCodes.BadValue);
- assertErrorCode(coll, {$match: nearSphereQuery}, ErrorCodes.BadValue);
- assertErrorCode(coll, {$match: geoNearQuery}, ErrorCodes.BadValue);
+// Test that we refuse to run $match with a near query.
+assertErrorCode(coll, {$match: nearQuery}, ErrorCodes.BadValue);
+assertErrorCode(coll, {$match: nearSphereQuery}, ErrorCodes.BadValue);
+assertErrorCode(coll, {$match: geoNearQuery}, ErrorCodes.BadValue);
}());
diff --git a/jstests/aggregation/bugs/server6779.js b/jstests/aggregation/bugs/server6779.js
index 44f641ea15d..d9d48898068 100644
--- a/jstests/aggregation/bugs/server6779.js
+++ b/jstests/aggregation/bugs/server6779.js
@@ -1,20 +1,20 @@
// server 6779: serializing ExpressionCoerceToBool
// This test only fails in debug mode with the bug since that tests round-tripping
(function() {
- "use strict";
+"use strict";
- function test(op, val) {
- const coll = db.server6779;
- coll.drop();
- assert.writeOK(coll.insert({a: true}));
- assert.writeOK(coll.insert({a: false}));
+function test(op, val) {
+ const coll = db.server6779;
+ coll.drop();
+ assert.writeOK(coll.insert({a: true}));
+ assert.writeOK(coll.insert({a: false}));
- const obj = {};
- obj[op] = ['$a', val];
- const result = coll.aggregate([{$project: {_id: 0, bool: obj}}, {$sort: {bool: -1}}]);
+ const obj = {};
+ obj[op] = ['$a', val];
+ const result = coll.aggregate([{$project: {_id: 0, bool: obj}}, {$sort: {bool: -1}}]);
- assert.eq(result.toArray(), [{bool: true}, {bool: false}]);
- }
- test('$and', true);
- test('$or', false);
+ assert.eq(result.toArray(), [{bool: true}, {bool: false}]);
+}
+test('$and', true);
+test('$or', false);
}());
diff --git a/jstests/aggregation/bugs/server7695_isodates.js b/jstests/aggregation/bugs/server7695_isodates.js
index 4d969bf80f8..ca90c47f0fe 100644
--- a/jstests/aggregation/bugs/server7695_isodates.js
+++ b/jstests/aggregation/bugs/server7695_isodates.js
@@ -1,254 +1,251 @@
// SERVER-7695: Add $isoWeek, $isoWeekYear, and $isoDayOfWeek aggregation expressions.
(function() {
- "use strict";
- const coll = db.server7695;
- let testOpCount = 0;
-
- load('jstests/libs/dateutil.js');
-
- coll.drop();
-
- // Seed collection so that the pipeline will execute.
- assert.writeOK(coll.insert({}));
-
- /**
- * Helper for testing that 'op' returns 'expResult'.
- */
- function testOp(op, value, expResult) {
- testOpCount++;
- let pipeline = [{$project: {_id: 0, result: {}}}];
- pipeline[0].$project.result[op] = value;
- let msg = "Exptected {" + op + ": " + value + "} to equal: " + expResult;
- let res = coll.runCommand('aggregate', {pipeline: pipeline, cursor: {}});
-
- // in the case of $dateToString the date is on property date
- let date = value.date || value;
- if (date.valueOf() < 0 && _isWindows() && res.code === 16422) {
- // some versions of windows (but not all) fail with dates before 1970
- print("skipping test of " + date.tojson() +
- " because system doesn't support old dates");
- return;
- }
-
- if (date.valueOf() / 1000 < -2 * 1024 * 1024 * 1024 && res.code == 16421) {
- // we correctly detected that we are outside of the range of a 32-bit time_t
- print("skipping test of " + date.tojson() + " because it is outside of time_t range");
- return;
- }
+"use strict";
+const coll = db.server7695;
+let testOpCount = 0;
+
+load('jstests/libs/dateutil.js');
+
+coll.drop();
+
+// Seed collection so that the pipeline will execute.
+assert.writeOK(coll.insert({}));
+
+/**
+ * Helper for testing that 'op' returns 'expResult'.
+ */
+function testOp(op, value, expResult) {
+ testOpCount++;
+ let pipeline = [{$project: {_id: 0, result: {}}}];
+ pipeline[0].$project.result[op] = value;
+ let msg = "Exptected {" + op + ": " + value + "} to equal: " + expResult;
+ let res = coll.runCommand('aggregate', {pipeline: pipeline, cursor: {}});
+
+ // in the case of $dateToString the date is on property date
+ let date = value.date || value;
+ if (date.valueOf() < 0 && _isWindows() && res.code === 16422) {
+ // some versions of windows (but not all) fail with dates before 1970
+ print("skipping test of " + date.tojson() + " because system doesn't support old dates");
+ return;
+ }
- assert.eq(res.cursor.firstBatch[0].result, expResult, tojson(pipeline));
+ if (date.valueOf() / 1000 < -2 * 1024 * 1024 * 1024 && res.code == 16421) {
+ // we correctly detected that we are outside of the range of a 32-bit time_t
+ print("skipping test of " + date.tojson() + " because it is outside of time_t range");
+ return;
}
- // While development, there was a bug which caused an error with $dateToString if the order of
- // %V and %G changed, so I added this test to prevent regression.
- testOp('$dateToString', {date: new Date("1900-12-31T23:59:59Z"), format: "%V-%G"}, "01-1901");
- // This was failing, but it shouldn't as it is the same as above, only rotated.
- testOp('$dateToString', {date: new Date("1900-12-31T23:59:59Z"), format: "%G-%V"}, "1901-01");
-
- // 1900 is special because it's devisible by 4 and by 100 but not 400 so it's not a leap year.
- // 2000 is special, because it's devisible by 4, 100, 400 and so it is a leap year.
- const years = {
- common: [
- 1900, // Starting and ending on Monday (special).
- 2002, // Starting and ending on Tuesday.
- 2014, // Starting and ending on Wednesday.
- 2015, // Starting and ending on Thursday.
- 2010, // Starting and ending on Friday.
- 2011, // Starting and ending on Saturday.
- 2006, // Starting and ending on Sunday.
- ],
- leap: [
- 1996, // Starting on Monday, ending on Tuesday.
- 2008, // Starting on Tuesday, ending on Wednesday.
- 1992, // Starting on Wednesday, ending on Thursday.
- 2004, // Starting on Thursday, ending on Friday.
- 2016, // Starting on Friday, ending on Saturday.
- 2000, // Starting on Saturday, ending on Sunday (special).
- 2012, // Starting on Sunday, ending on Monday.
- ],
- commonAfterLeap: [
- 2001, // Starting and ending on Monday.
- 2013, // Starting and ending on Tuesday.
- 1997, // Starting and ending on Wednesday.
- 2009, // Starting and ending on Thursday.
- 1993, // Starting and ending on Friday.
- 2005, // Starting and ending on Saturday.
- 2017, // Starting and ending on Sunday.
- ],
- };
-
- const MONDAY = 1;
- const TUESDAY = 2;
- const WEDNESDAY = 3;
- const THURSDAY = 4;
- const FRIDAY = 5;
- const SATURDAY = 6;
- const SUNDAY = 7;
-
- ['common', 'leap', 'commonAfterLeap'].forEach(function(type) {
- years[type].forEach(function(year, day) {
- // forEach starts indexing at zero but weekdays start with Monday on 1 so we add +1.
- day = day + 1;
- let newYear = DateUtil.getNewYear(year);
- let endOfFirstWeekInYear = DateUtil.getEndOfFirstWeekInYear(year, day);
- let startOfSecondWeekInYear = DateUtil.getStartOfSecondWeekInYear(year, day);
- let birthday = DateUtil.getBirthday(year);
- let endOfSecondToLastWeekInYear =
- DateUtil.getEndOfSecondToLastWeekInYear(year, day, type);
- let startOfLastWeekInYear = DateUtil.getStartOfLastWeekInYear(year, day, type);
- let newYearsEve = DateUtil.getNewYearsEve(year);
-
- testOp('$isoDayOfWeek', newYear, day);
- testOp('$isoDayOfWeek', endOfFirstWeekInYear, SUNDAY);
- testOp('$isoDayOfWeek', startOfSecondWeekInYear, MONDAY);
- testOp('$isoDayOfWeek', endOfSecondToLastWeekInYear, SUNDAY);
- testOp('$isoDayOfWeek', startOfLastWeekInYear, MONDAY);
- if (type === 'leap') {
- testOp('$isoDayOfWeek', newYearsEve, DateUtil.shiftWeekday(day, 1));
- } else {
- testOp('$isoDayOfWeek', newYearsEve, day);
- }
+ assert.eq(res.cursor.firstBatch[0].result, expResult, tojson(pipeline));
+}
+
+// While development, there was a bug which caused an error with $dateToString if the order of
+// %V and %G changed, so I added this test to prevent regression.
+testOp('$dateToString', {date: new Date("1900-12-31T23:59:59Z"), format: "%V-%G"}, "01-1901");
+// This was failing, but it shouldn't as it is the same as above, only rotated.
+testOp('$dateToString', {date: new Date("1900-12-31T23:59:59Z"), format: "%G-%V"}, "1901-01");
+
+// 1900 is special because it's devisible by 4 and by 100 but not 400 so it's not a leap year.
+// 2000 is special, because it's devisible by 4, 100, 400 and so it is a leap year.
+const years = {
+ common: [
+ 1900, // Starting and ending on Monday (special).
+ 2002, // Starting and ending on Tuesday.
+ 2014, // Starting and ending on Wednesday.
+ 2015, // Starting and ending on Thursday.
+ 2010, // Starting and ending on Friday.
+ 2011, // Starting and ending on Saturday.
+ 2006, // Starting and ending on Sunday.
+ ],
+ leap: [
+ 1996, // Starting on Monday, ending on Tuesday.
+ 2008, // Starting on Tuesday, ending on Wednesday.
+ 1992, // Starting on Wednesday, ending on Thursday.
+ 2004, // Starting on Thursday, ending on Friday.
+ 2016, // Starting on Friday, ending on Saturday.
+ 2000, // Starting on Saturday, ending on Sunday (special).
+ 2012, // Starting on Sunday, ending on Monday.
+ ],
+ commonAfterLeap: [
+ 2001, // Starting and ending on Monday.
+ 2013, // Starting and ending on Tuesday.
+ 1997, // Starting and ending on Wednesday.
+ 2009, // Starting and ending on Thursday.
+ 1993, // Starting and ending on Friday.
+ 2005, // Starting and ending on Saturday.
+ 2017, // Starting and ending on Sunday.
+ ],
+};
+
+const MONDAY = 1;
+const TUESDAY = 2;
+const WEDNESDAY = 3;
+const THURSDAY = 4;
+const FRIDAY = 5;
+const SATURDAY = 6;
+const SUNDAY = 7;
+
+['common', 'leap', 'commonAfterLeap'].forEach(function(type) {
+ years[type].forEach(function(year, day) {
+ // forEach starts indexing at zero but weekdays start with Monday on 1 so we add +1.
+ day = day + 1;
+ let newYear = DateUtil.getNewYear(year);
+ let endOfFirstWeekInYear = DateUtil.getEndOfFirstWeekInYear(year, day);
+ let startOfSecondWeekInYear = DateUtil.getStartOfSecondWeekInYear(year, day);
+ let birthday = DateUtil.getBirthday(year);
+ let endOfSecondToLastWeekInYear = DateUtil.getEndOfSecondToLastWeekInYear(year, day, type);
+ let startOfLastWeekInYear = DateUtil.getStartOfLastWeekInYear(year, day, type);
+ let newYearsEve = DateUtil.getNewYearsEve(year);
+
+ testOp('$isoDayOfWeek', newYear, day);
+ testOp('$isoDayOfWeek', endOfFirstWeekInYear, SUNDAY);
+ testOp('$isoDayOfWeek', startOfSecondWeekInYear, MONDAY);
+ testOp('$isoDayOfWeek', endOfSecondToLastWeekInYear, SUNDAY);
+ testOp('$isoDayOfWeek', startOfLastWeekInYear, MONDAY);
+ if (type === 'leap') {
+ testOp('$isoDayOfWeek', newYearsEve, DateUtil.shiftWeekday(day, 1));
+ } else {
+ testOp('$isoDayOfWeek', newYearsEve, day);
+ }
- if (type === 'leap') {
- testOp('$isoDayOfWeek', birthday, DateUtil.shiftWeekday(day, 4));
- } else {
- testOp('$isoDayOfWeek', birthday, DateUtil.shiftWeekday(day, 3));
- }
+ if (type === 'leap') {
+ testOp('$isoDayOfWeek', birthday, DateUtil.shiftWeekday(day, 4));
+ } else {
+ testOp('$isoDayOfWeek', birthday, DateUtil.shiftWeekday(day, 3));
+ }
- testOp('$isoWeekYear', birthday, year);
- // In leap years staring on Thursday, the birthday is in week 28, every year else it is
- // in week 27.
- if (type === 'leap' && day === THURSDAY) {
- testOp('$isoWeek', birthday, 28);
- } else {
- testOp('$isoWeek', birthday, 27);
- }
+ testOp('$isoWeekYear', birthday, year);
+ // In leap years staring on Thursday, the birthday is in week 28, every year else it is
+ // in week 27.
+ if (type === 'leap' && day === THURSDAY) {
+ testOp('$isoWeek', birthday, 28);
+ } else {
+ testOp('$isoWeek', birthday, 27);
+ }
- if (day <= THURSDAY) {
- // A year starting between Monday and Thursday will always start in week 1.
- testOp('$isoWeek', newYear, 1);
- testOp('$isoWeekYear', newYear, year);
- testOp('$isoWeek', endOfFirstWeekInYear, 1);
- testOp('$isoWeekYear', endOfFirstWeekInYear, year);
- testOp('$isoWeek', startOfSecondWeekInYear, 2);
- testOp('$isoWeekYear', startOfSecondWeekInYear, year);
+ if (day <= THURSDAY) {
+ // A year starting between Monday and Thursday will always start in week 1.
+ testOp('$isoWeek', newYear, 1);
+ testOp('$isoWeekYear', newYear, year);
+ testOp('$isoWeek', endOfFirstWeekInYear, 1);
+ testOp('$isoWeekYear', endOfFirstWeekInYear, year);
+ testOp('$isoWeek', startOfSecondWeekInYear, 2);
+ testOp('$isoWeekYear', startOfSecondWeekInYear, year);
+ testOp(
+ '$dateToString', {format: '%G-W%V-%u', date: newYear}, "" + year + "-W01-" + day);
+ } else if (day == FRIDAY || (day == SATURDAY && type === 'commonAfterLeap')) {
+ // A year starting on Friday will always start with week 53 of the previous year.
+ // A common year starting on a Saturday and after a leap year will also start with
+ // week 53 of the previous year.
+ testOp('$isoWeek', newYear, 53);
+ testOp('$isoWeekYear', newYear, year - 1);
+ testOp('$isoWeek', endOfFirstWeekInYear, 53);
+ testOp('$isoWeekYear', endOfFirstWeekInYear, year - 1);
+ testOp('$isoWeek', startOfSecondWeekInYear, 1);
+ testOp('$isoWeekYear', startOfSecondWeekInYear, year);
+ testOp('$dateToString',
+ {format: '%G-W%V-%u', date: newYear},
+ "" + (year - 1) + "-W53-" + day);
+ } else {
+ // A year starting on Saturday (except after a leap year) or Sunday will always
+ // start with week 52 of the previous year.
+ testOp('$isoWeek', newYear, 52);
+ testOp('$isoWeekYear', newYear, year - 1);
+ testOp('$isoWeek', endOfFirstWeekInYear, 52);
+ testOp('$isoWeekYear', endOfFirstWeekInYear, year - 1);
+ testOp('$isoWeek', startOfSecondWeekInYear, 1);
+ testOp('$isoWeekYear', startOfSecondWeekInYear, year);
+ testOp('$dateToString',
+ {format: '%G-W%V-%u', date: newYear},
+ "" + (year - 1) + "-W52-" + day);
+ }
+
+ if (type === 'leap') {
+ if (day <= TUESDAY) {
+ // A leap year starting between Monday and Tuesday will always end in week 1 of
+ // the next year.
+ testOp('$isoWeek', newYearsEve, 1);
+ testOp('$isoWeekYear', newYearsEve, year + 1);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 1);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year + 1);
+ testOp('$dateToString',
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year + 1) + "-W01-" + DateUtil.shiftWeekday(day, 1));
+ } else if (day <= THURSDAY) {
+ // A leap year starting on Wednesday or Thursday will always end with week 53.
+ testOp('$isoWeek', newYearsEve, 53);
+ testOp('$isoWeekYear', newYearsEve, year);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 53);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year);
testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYear},
- "" + year + "-W01-" + day);
- } else if (day == FRIDAY || (day == SATURDAY && type === 'commonAfterLeap')) {
- // A year starting on Friday will always start with week 53 of the previous year.
- // A common year starting on a Saturday and after a leap year will also start with
- // week 53 of the previous year.
- testOp('$isoWeek', newYear, 53);
- testOp('$isoWeekYear', newYear, year - 1);
- testOp('$isoWeek', endOfFirstWeekInYear, 53);
- testOp('$isoWeekYear', endOfFirstWeekInYear, year - 1);
- testOp('$isoWeek', startOfSecondWeekInYear, 1);
- testOp('$isoWeekYear', startOfSecondWeekInYear, year);
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year) + "-W53-" + DateUtil.shiftWeekday(day, 1));
+ } else if (day <= SATURDAY) {
+ // A leap year starting on Friday or Sarturday will always and with week 52
+ testOp('$isoWeek', newYearsEve, 52);
+ testOp('$isoWeekYear', newYearsEve, year);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 51);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 52);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year);
testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYear},
- "" + (year - 1) + "-W53-" + day);
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year) + "-W52-" + DateUtil.shiftWeekday(day, 1));
} else {
- // A year starting on Saturday (except after a leap year) or Sunday will always
- // start with week 52 of the previous year.
- testOp('$isoWeek', newYear, 52);
- testOp('$isoWeekYear', newYear, year - 1);
- testOp('$isoWeek', endOfFirstWeekInYear, 52);
- testOp('$isoWeekYear', endOfFirstWeekInYear, year - 1);
- testOp('$isoWeek', startOfSecondWeekInYear, 1);
- testOp('$isoWeekYear', startOfSecondWeekInYear, year);
+ // A leap year starting on Sunday will always end with week 1
+ testOp('$isoWeek', newYearsEve, 1);
+ testOp('$isoWeekYear', newYearsEve, year + 1);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 51);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 52);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year);
testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYear},
- "" + (year - 1) + "-W52-" + day);
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year + 1) + "-W01-" + DateUtil.shiftWeekday(day, 1));
}
-
- if (type === 'leap') {
- if (day <= TUESDAY) {
- // A leap year starting between Monday and Tuesday will always end in week 1 of
- // the next year.
- testOp('$isoWeek', newYearsEve, 1);
- testOp('$isoWeekYear', newYearsEve, year + 1);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 1);
- testOp('$isoWeekYear', startOfLastWeekInYear, year + 1);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year + 1) + "-W01-" + DateUtil.shiftWeekday(day, 1));
- } else if (day <= THURSDAY) {
- // A leap year starting on Wednesday or Thursday will always end with week 53.
- testOp('$isoWeek', newYearsEve, 53);
- testOp('$isoWeekYear', newYearsEve, year);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 53);
- testOp('$isoWeekYear', startOfLastWeekInYear, year);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year) + "-W53-" + DateUtil.shiftWeekday(day, 1));
- } else if (day <= SATURDAY) {
- // A leap year starting on Friday or Sarturday will always and with week 52
- testOp('$isoWeek', newYearsEve, 52);
- testOp('$isoWeekYear', newYearsEve, year);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 51);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 52);
- testOp('$isoWeekYear', startOfLastWeekInYear, year);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year) + "-W52-" + DateUtil.shiftWeekday(day, 1));
- } else {
- // A leap year starting on Sunday will always end with week 1
- testOp('$isoWeek', newYearsEve, 1);
- testOp('$isoWeekYear', newYearsEve, year + 1);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 51);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 52);
- testOp('$isoWeekYear', startOfLastWeekInYear, year);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year + 1) + "-W01-" + DateUtil.shiftWeekday(day, 1));
- }
+ } else {
+ if (day <= WEDNESDAY) {
+ // A common year starting between Monday and Wednesday will always end in week 1
+ // of the next year.
+ testOp('$isoWeek', newYearsEve, 1);
+ testOp('$isoWeekYear', newYearsEve, year + 1);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 1);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year + 1);
+ testOp('$dateToString',
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year + 1) + "-W01-" + day);
+ } else if (day === THURSDAY) {
+ // A common year starting on Thursday will always end with week 53.
+ testOp('$isoWeek', newYearsEve, 53);
+ testOp('$isoWeekYear', newYearsEve, year);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 53);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year);
+ testOp('$dateToString',
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year) + "-W53-" + day);
} else {
- if (day <= WEDNESDAY) {
- // A common year starting between Monday and Wednesday will always end in week 1
- // of the next year.
- testOp('$isoWeek', newYearsEve, 1);
- testOp('$isoWeekYear', newYearsEve, year + 1);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 1);
- testOp('$isoWeekYear', startOfLastWeekInYear, year + 1);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year + 1) + "-W01-" + day);
- } else if (day === THURSDAY) {
- // A common year starting on Thursday will always end with week 53.
- testOp('$isoWeek', newYearsEve, 53);
- testOp('$isoWeekYear', newYearsEve, year);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 52);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 53);
- testOp('$isoWeekYear', startOfLastWeekInYear, year);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year) + "-W53-" + day);
- } else {
- // A common year starting on between Friday and Sunday will always end with week
- // 52.
- testOp('$isoWeek', newYearsEve, 52);
- testOp('$isoWeekYear', newYearsEve, year);
- testOp('$isoWeek', endOfSecondToLastWeekInYear, 51);
- testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
- testOp('$isoWeek', startOfLastWeekInYear, 52);
- testOp('$isoWeekYear', startOfLastWeekInYear, year);
- testOp('$dateToString',
- {format: '%G-W%V-%u', date: newYearsEve},
- "" + (year) + "-W52-" + day);
- }
+ // A common year starting on between Friday and Sunday will always end with week
+ // 52.
+ testOp('$isoWeek', newYearsEve, 52);
+ testOp('$isoWeekYear', newYearsEve, year);
+ testOp('$isoWeek', endOfSecondToLastWeekInYear, 51);
+ testOp('$isoWeekYear', endOfSecondToLastWeekInYear, year);
+ testOp('$isoWeek', startOfLastWeekInYear, 52);
+ testOp('$isoWeekYear', startOfLastWeekInYear, year);
+ testOp('$dateToString',
+ {format: '%G-W%V-%u', date: newYearsEve},
+ "" + (year) + "-W52-" + day);
}
- });
+ }
});
- assert.eq(testOpCount, 485, 'Expected 485 tests to run');
+});
+assert.eq(testOpCount, 485, 'Expected 485 tests to run');
})();
diff --git a/jstests/aggregation/bugs/server7781.js b/jstests/aggregation/bugs/server7781.js
index f755a6af0ad..19700cc2202 100644
--- a/jstests/aggregation/bugs/server7781.js
+++ b/jstests/aggregation/bugs/server7781.js
@@ -4,128 +4,128 @@
// requires_spawning_own_processes,
// ]
(function() {
- 'use strict';
-
- load('jstests/libs/geo_near_random.js');
- load('jstests/aggregation/extras/utils.js');
-
- var coll = 'server7781';
-
+'use strict';
+
+load('jstests/libs/geo_near_random.js');
+load('jstests/aggregation/extras/utils.js');
+
+var coll = 'server7781';
+
+db[coll].drop();
+db[coll].insert({loc: [0, 0]});
+
+// $geoNear is only allowed as the first stage in a pipeline, nowhere else.
+assert.throws(
+ () => db[coll].aggregate(
+ [{$match: {x: 1}}, {$geoNear: {near: [1, 1], spherical: true, distanceField: 'dis'}}]));
+
+const kDistanceField = "dis";
+const kIncludeLocsField = "loc";
+
+/**
+ * Tests the output of the $geoNear command. This function expects a document with the following
+ * fields:
+ * - 'geoNearSpec' is the specification for a $geoNear aggregation stage.
+ * - 'limit' is an integer limiting the number of pipeline results.
+ * - 'batchSize', if specified, is the batchSize to use for the aggregation.
+ */
+function testGeoNearStageOutput({geoNearSpec, limit, batchSize}) {
+ const aggOptions = batchSize ? {batchSize: batchSize} : {};
+ const result =
+ db[coll].aggregate([{$geoNear: geoNearSpec}, {$limit: limit}], aggOptions).toArray();
+ const errmsg = () => tojson(result);
+
+ // Verify that we got the expected number of results.
+ assert.eq(result.length, limit, errmsg);
+
+ // Run though the array, checking for proper sort order and sane computed distances.
+ result.reduce((lastDist, curDoc) => {
+ const curDist = curDoc[kDistanceField];
+
+ // Verify that distances are in increasing order.
+ assert.lte(lastDist, curDist, errmsg);
+
+ // Verify that the computed distance is correct.
+ const computed = Geo.sphereDistance(geoNearSpec["near"], curDoc[kIncludeLocsField]);
+ assert.close(computed, curDist, errmsg);
+ return curDist;
+ }, 0);
+}
+
+// We use this to generate points. Using a single global to avoid reseting RNG in each pass.
+var pointMaker = new GeoNearRandomTest(coll);
+
+function test(db, sharded, indexType) {
db[coll].drop();
- db[coll].insert({loc: [0, 0]});
-
- // $geoNear is only allowed as the first stage in a pipeline, nowhere else.
- assert.throws(
- () => db[coll].aggregate(
- [{$match: {x: 1}}, {$geoNear: {near: [1, 1], spherical: true, distanceField: 'dis'}}]));
-
- const kDistanceField = "dis";
- const kIncludeLocsField = "loc";
-
- /**
- * Tests the output of the $geoNear command. This function expects a document with the following
- * fields:
- * - 'geoNearSpec' is the specification for a $geoNear aggregation stage.
- * - 'limit' is an integer limiting the number of pipeline results.
- * - 'batchSize', if specified, is the batchSize to use for the aggregation.
- */
- function testGeoNearStageOutput({geoNearSpec, limit, batchSize}) {
- const aggOptions = batchSize ? {batchSize: batchSize} : {};
- const result =
- db[coll].aggregate([{$geoNear: geoNearSpec}, {$limit: limit}], aggOptions).toArray();
- const errmsg = () => tojson(result);
-
- // Verify that we got the expected number of results.
- assert.eq(result.length, limit, errmsg);
-
- // Run though the array, checking for proper sort order and sane computed distances.
- result.reduce((lastDist, curDoc) => {
- const curDist = curDoc[kDistanceField];
-
- // Verify that distances are in increasing order.
- assert.lte(lastDist, curDist, errmsg);
-
- // Verify that the computed distance is correct.
- const computed = Geo.sphereDistance(geoNearSpec["near"], curDoc[kIncludeLocsField]);
- assert.close(computed, curDist, errmsg);
- return curDist;
- }, 0);
- }
-
- // We use this to generate points. Using a single global to avoid reseting RNG in each pass.
- var pointMaker = new GeoNearRandomTest(coll);
- function test(db, sharded, indexType) {
- db[coll].drop();
-
- if (sharded) { // sharded setup
- var shards = [];
- var config = db.getSiblingDB("config");
- config.shards.find().forEach(function(shard) {
- shards.push(shard._id);
- });
+ if (sharded) { // sharded setup
+ var shards = [];
+ var config = db.getSiblingDB("config");
+ config.shards.find().forEach(function(shard) {
+ shards.push(shard._id);
+ });
+ assert.commandWorked(
+ db.adminCommand({shardCollection: db[coll].getFullName(), key: {rand: 1}}));
+ for (var i = 1; i < 10; i++) {
+ // split at 0.1, 0.2, ... 0.9
assert.commandWorked(
- db.adminCommand({shardCollection: db[coll].getFullName(), key: {rand: 1}}));
- for (var i = 1; i < 10; i++) {
- // split at 0.1, 0.2, ... 0.9
- assert.commandWorked(
- db.adminCommand({split: db[coll].getFullName(), middle: {rand: i / 10}}));
- db.adminCommand({
- moveChunk: db[coll].getFullName(),
- find: {rand: i / 10},
- to: shards[i % shards.length]
- });
- }
-
- assert.eq(config.chunks.count({'ns': db[coll].getFullName()}), 10);
- }
-
- // insert points
- var numPts = 10 * 1000;
- var bulk = db[coll].initializeUnorderedBulkOp();
- for (var i = 0; i < numPts; i++) {
- bulk.insert({rand: Math.random(), loc: pointMaker.mkPt()});
+ db.adminCommand({split: db[coll].getFullName(), middle: {rand: i / 10}}));
+ db.adminCommand({
+ moveChunk: db[coll].getFullName(),
+ find: {rand: i / 10},
+ to: shards[i % shards.length]
+ });
}
- assert.writeOK(bulk.execute());
-
- assert.eq(db[coll].count(), numPts);
-
- db[coll].ensureIndex({loc: indexType});
-
- // Test $geoNear with spherical coordinates.
- testGeoNearStageOutput({
- geoNearSpec: {
- near: pointMaker.mkPt(0.25),
- distanceField: kDistanceField,
- includeLocs: kIncludeLocsField,
- spherical: true,
- },
- limit: 100
- });
- // Test $geoNear with an initial batchSize of 1.
- testGeoNearStageOutput({
- geoNearSpec: {
- near: pointMaker.mkPt(0.25),
- distanceField: kDistanceField,
- includeLocs: kIncludeLocsField,
- spherical: true,
- },
- limit: 70,
- batchSize: 1
- });
+ assert.eq(config.chunks.count({'ns': db[coll].getFullName()}), 10);
}
- test(db, false, '2d');
- test(db, false, '2dsphere');
-
- var sharded = new ShardingTest({shards: 3, mongos: 1});
- assert.commandWorked(sharded.s0.adminCommand({enablesharding: "test"}));
- sharded.ensurePrimaryShard('test', sharded.shard1.shardName);
-
- test(sharded.getDB('test'), true, '2d');
- test(sharded.getDB('test'), true, '2dsphere');
-
- sharded.stop();
+ // insert points
+ var numPts = 10 * 1000;
+ var bulk = db[coll].initializeUnorderedBulkOp();
+ for (var i = 0; i < numPts; i++) {
+ bulk.insert({rand: Math.random(), loc: pointMaker.mkPt()});
+ }
+ assert.writeOK(bulk.execute());
+
+ assert.eq(db[coll].count(), numPts);
+
+ db[coll].ensureIndex({loc: indexType});
+
+ // Test $geoNear with spherical coordinates.
+ testGeoNearStageOutput({
+ geoNearSpec: {
+ near: pointMaker.mkPt(0.25),
+ distanceField: kDistanceField,
+ includeLocs: kIncludeLocsField,
+ spherical: true,
+ },
+ limit: 100
+ });
+
+ // Test $geoNear with an initial batchSize of 1.
+ testGeoNearStageOutput({
+ geoNearSpec: {
+ near: pointMaker.mkPt(0.25),
+ distanceField: kDistanceField,
+ includeLocs: kIncludeLocsField,
+ spherical: true,
+ },
+ limit: 70,
+ batchSize: 1
+ });
+}
+
+test(db, false, '2d');
+test(db, false, '2dsphere');
+
+var sharded = new ShardingTest({shards: 3, mongos: 1});
+assert.commandWorked(sharded.s0.adminCommand({enablesharding: "test"}));
+sharded.ensurePrimaryShard('test', sharded.shard1.shardName);
+
+test(sharded.getDB('test'), true, '2d');
+test(sharded.getDB('test'), true, '2dsphere');
+
+sharded.stop();
})();
diff --git a/jstests/aggregation/bugs/server8141.js b/jstests/aggregation/bugs/server8141.js
index 9777737517b..908fd952059 100644
--- a/jstests/aggregation/bugs/server8141.js
+++ b/jstests/aggregation/bugs/server8141.js
@@ -1,52 +1,50 @@
// SERVER-8141 Avoid treating arrays as literals in aggregation pipeline.
(function() {
- 'use strict';
- var coll = db.exprs_in_arrays;
- coll.drop();
+'use strict';
+var coll = db.exprs_in_arrays;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0, a: ['foo', 'bar', 'baz'], b: 'bar', c: 'Baz'}));
+assert.writeOK(coll.insert({_id: 0, a: ['foo', 'bar', 'baz'], b: 'bar', c: 'Baz'}));
- // An array of constants should still evaluate to an array of constants.
- var pipeline = [{$project: {_id: 0, d: ['constant', 1]}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['constant', 1]}]);
+// An array of constants should still evaluate to an array of constants.
+var pipeline = [{$project: {_id: 0, d: ['constant', 1]}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['constant', 1]}]);
- // A field name inside an array should take on the value of that field.
- pipeline = [{$project: {_id: 0, d: ['$b']}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar']}]);
+// A field name inside an array should take on the value of that field.
+pipeline = [{$project: {_id: 0, d: ['$b']}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar']}]);
- // An expression inside an array should be evaluated.
- pipeline = [{$project: {_id: 0, d: [{$toLower: 'FoO'}]}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['foo']}]);
+// An expression inside an array should be evaluated.
+pipeline = [{$project: {_id: 0, d: [{$toLower: 'FoO'}]}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['foo']}]);
- // Both an expression and a field name inside an array should be evaluated.
- pipeline = [{$project: {_id: 0, d: ['$b', {$toLower: 'FoO'}]}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar', 'foo']}]);
+// Both an expression and a field name inside an array should be evaluated.
+pipeline = [{$project: {_id: 0, d: ['$b', {$toLower: 'FoO'}]}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar', 'foo']}]);
- // A nested array should still be evaluated.
- pipeline = [{$project: {_id: 0, d: ['$b', 'constant', [1, {$toLower: 'FoO'}]]}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar', 'constant', [1, 'foo']]}]);
+// A nested array should still be evaluated.
+pipeline = [{$project: {_id: 0, d: ['$b', 'constant', [1, {$toLower: 'FoO'}]]}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar', 'constant', [1, 'foo']]}]);
- // Should still evaluate array elements inside arguments to an expression.
- pipeline = [{$project: {_id: 0, d: {$setIntersection: ['$a', ['$b']]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar']}]);
+// Should still evaluate array elements inside arguments to an expression.
+pipeline = [{$project: {_id: 0, d: {$setIntersection: ['$a', ['$b']]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['bar']}]);
- pipeline = [{$project: {_id: 0, d: {$setIntersection: ['$a', [{$toLower: 'FoO'}]]}}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['foo']}]);
+pipeline = [{$project: {_id: 0, d: {$setIntersection: ['$a', [{$toLower: 'FoO'}]]}}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: ['foo']}]);
- // Nested arrays.
- pipeline = [{
- $project: {
- _id: 0,
- d: {$setIntersection: [[[1, 'foo', 'bar']], [[1, {$toLower: 'FoO'}, '$b']]]}
- }
- }];
- assert.eq(coll.aggregate(pipeline).toArray(), [{d: [[1, 'foo', 'bar']]}]);
+// Nested arrays.
+pipeline = [{
+ $project:
+ {_id: 0, d: {$setIntersection: [[[1, 'foo', 'bar']], [[1, {$toLower: 'FoO'}, '$b']]]}}
+}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{d: [[1, 'foo', 'bar']]}]);
- coll.drop();
+coll.drop();
- // Should replace missing values with NULL to preserve indices.
- assert.writeOK(coll.insert({_id: 1, x: 1, z: 2}));
+// Should replace missing values with NULL to preserve indices.
+assert.writeOK(coll.insert({_id: 1, x: 1, z: 2}));
- pipeline = [{$project: {_id: 0, coordinate: ['$x', '$y', '$z']}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{coordinate: [1, null, 2]}]);
+pipeline = [{$project: {_id: 0, coordinate: ['$x', '$y', '$z']}}];
+assert.eq(coll.aggregate(pipeline).toArray(), [{coordinate: [1, null, 2]}]);
}());
diff --git a/jstests/aggregation/bugs/server8164.js b/jstests/aggregation/bugs/server8164.js
index 89cb360d91f..5b137b18d87 100644
--- a/jstests/aggregation/bugs/server8164.js
+++ b/jstests/aggregation/bugs/server8164.js
@@ -1,144 +1,144 @@
// SERVER-8164: ISODate doesn't handle years less than 100 properly.
(function() {
- assert.eq(tojson(ISODate("0000-01-01")), 'ISODate("0000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00")), 'ISODate("0000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00Z")), 'ISODate("0000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.123")), 'ISODate("0000-01-01T00:00:00.123Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.123Z")), 'ISODate("0000-01-01T00:00:00.123Z")');
-
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.1Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.10Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.100Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.1000Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
-
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.1234Z")), 'ISODate("0000-01-01T00:00:00.123Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:00.1235Z")), 'ISODate("0000-01-01T00:00:00.124Z")');
-
- /* Testing different years */
- assert.eq(tojson(ISODate("0000-01-01T00:00:00Z")), 'ISODate("0000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00Z")), 'ISODate("0001-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0069-01-01T00:00:00Z")), 'ISODate("0069-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0070-01-01T00:00:00Z")), 'ISODate("0070-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0099-01-01T00:00:00Z")), 'ISODate("0099-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0100-01-01T00:00:00Z")), 'ISODate("0100-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1800-01-01T00:00:00Z")), 'ISODate("1800-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1801-01-01T00:00:00Z")), 'ISODate("1801-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1869-01-01T00:00:00Z")), 'ISODate("1869-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1870-01-01T00:00:00Z")), 'ISODate("1870-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1899-01-01T00:00:00Z")), 'ISODate("1899-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1900-01-01T00:00:00Z")), 'ISODate("1900-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1901-01-01T00:00:00Z")), 'ISODate("1901-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1969-01-01T00:00:00Z")), 'ISODate("1969-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1970-01-01T00:00:00Z")), 'ISODate("1970-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1999-01-01T00:00:00Z")), 'ISODate("1999-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("2000-01-01T00:00:00Z")), 'ISODate("2000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("2001-01-01T00:00:00Z")), 'ISODate("2001-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("2069-01-01T00:00:00Z")), 'ISODate("2069-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("2070-01-01T00:00:00Z")), 'ISODate("2070-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("2099-01-01T00:00:00Z")), 'ISODate("2099-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("9999-01-01T00:00:00Z")), 'ISODate("9999-01-01T00:00:00Z")');
-
- /* Testing without - in date and : in time */
- assert.eq(tojson(ISODate("19980101T00:00:00Z")), 'ISODate("1998-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1999-0101T00:00:00Z")), 'ISODate("1999-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("200001-01T00:00:00Z")), 'ISODate("2000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1998-01-01T000000Z")), 'ISODate("1998-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("1999-01-01T00:0000Z")), 'ISODate("1999-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("2000-01-01T0000:00Z")), 'ISODate("2000-01-01T00:00:00Z")');
-
- /* Testing field overflows */
- assert.eq(tojson(ISODate("0000-01-01T00:00:60Z")), 'ISODate("0000-01-01T00:01:00Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:00:99Z")), 'ISODate("0000-01-01T00:01:39Z")');
-
- assert.eq(tojson(ISODate("0000-01-01T00:60:00Z")), 'ISODate("0000-01-01T01:00:00Z")');
- assert.eq(tojson(ISODate("0000-01-01T00:99:00Z")), 'ISODate("0000-01-01T01:39:00Z")');
-
- assert.eq(tojson(ISODate("0000-01-01T24:00:00Z")), 'ISODate("0000-01-02T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-01-01T99:00:00Z")), 'ISODate("0000-01-05T03:00:00Z")');
-
- assert.eq(tojson(ISODate("0000-01-32T00:00:00Z")), 'ISODate("0000-02-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-01-99T00:00:00Z")), 'ISODate("0000-04-08T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-02-29T00:00:00Z")), 'ISODate("0000-02-29T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-02-30T00:00:00Z")), 'ISODate("0000-03-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-02-31T00:00:00Z")), 'ISODate("0000-03-02T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-02-99T00:00:00Z")), 'ISODate("0000-05-09T00:00:00Z")');
-
- assert.eq(tojson(ISODate("0001-02-29T00:00:00Z")), 'ISODate("0001-03-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0001-02-30T00:00:00Z")), 'ISODate("0001-03-02T00:00:00Z")');
- assert.eq(tojson(ISODate("0001-02-31T00:00:00Z")), 'ISODate("0001-03-03T00:00:00Z")');
- assert.eq(tojson(ISODate("0001-02-99T00:00:00Z")), 'ISODate("0001-05-10T00:00:00Z")');
-
- assert.eq(tojson(ISODate("0000-13-01T00:00:00Z")), 'ISODate("0001-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("0000-99-01T00:00:00Z")), 'ISODate("0008-03-01T00:00:00Z")');
-
- /* Testing GMT offset instead of Z */
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+01")), 'ISODate("0000-12-31T23:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+99")), 'ISODate("0000-12-27T21:00:00Z")');
-
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-01")), 'ISODate("0001-01-01T01:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-99")), 'ISODate("0001-01-05T03:00:00Z")');
-
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+0100")), 'ISODate("0000-12-31T23:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+0160")), 'ISODate("0000-12-31T22:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+0199")), 'ISODate("0000-12-31T21:21:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+9999")), 'ISODate("0000-12-27T19:21:00Z")');
-
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-0100")), 'ISODate("0001-01-01T01:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-0160")), 'ISODate("0001-01-01T02:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-0199")), 'ISODate("0001-01-01T02:39:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-9999")), 'ISODate("0001-01-05T04:39:00Z")');
-
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+01:00")), 'ISODate("0000-12-31T23:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+01:60")), 'ISODate("0000-12-31T22:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+01:99")), 'ISODate("0000-12-31T21:21:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00+99:99")), 'ISODate("0000-12-27T19:21:00Z")');
-
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-01:00")), 'ISODate("0001-01-01T01:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-01:60")), 'ISODate("0001-01-01T02:00:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-01:99")), 'ISODate("0001-01-01T02:39:00Z")');
- assert.eq(tojson(ISODate("0001-01-01T00:00:00-99:99")), 'ISODate("0001-01-05T04:39:00Z")');
-
- /* Testing field underflows */
- assert.eq(tojson(ISODate("0001-01-00T00:00:00Z")), 'ISODate("0000-12-31T00:00:00Z")');
- assert.eq(tojson(ISODate("0001-00-00T00:00:00Z")), 'ISODate("0000-11-30T00:00:00Z")');
- assert.eq(tojson(ISODate("0001-00-01T00:00:00Z")), 'ISODate("0000-12-01T00:00:00Z")');
-
- /* Testing lowest and highest */
- assert.eq(tojson(ISODate("0000-01-01T00:00:00Z")), 'ISODate("0000-01-01T00:00:00Z")');
- assert.eq(tojson(ISODate("9999-12-31T23:59:59.999Z")), 'ISODate("9999-12-31T23:59:59.999Z")');
-
- /* Testing out of range */
+assert.eq(tojson(ISODate("0000-01-01")), 'ISODate("0000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00")), 'ISODate("0000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00Z")), 'ISODate("0000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.123")), 'ISODate("0000-01-01T00:00:00.123Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.123Z")), 'ISODate("0000-01-01T00:00:00.123Z")');
+
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.1Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.10Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.100Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.1000Z")), 'ISODate("0000-01-01T00:00:00.100Z")');
+
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.1234Z")), 'ISODate("0000-01-01T00:00:00.123Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:00.1235Z")), 'ISODate("0000-01-01T00:00:00.124Z")');
+
+/* Testing different years */
+assert.eq(tojson(ISODate("0000-01-01T00:00:00Z")), 'ISODate("0000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00Z")), 'ISODate("0001-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0069-01-01T00:00:00Z")), 'ISODate("0069-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0070-01-01T00:00:00Z")), 'ISODate("0070-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0099-01-01T00:00:00Z")), 'ISODate("0099-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0100-01-01T00:00:00Z")), 'ISODate("0100-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1800-01-01T00:00:00Z")), 'ISODate("1800-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1801-01-01T00:00:00Z")), 'ISODate("1801-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1869-01-01T00:00:00Z")), 'ISODate("1869-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1870-01-01T00:00:00Z")), 'ISODate("1870-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1899-01-01T00:00:00Z")), 'ISODate("1899-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1900-01-01T00:00:00Z")), 'ISODate("1900-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1901-01-01T00:00:00Z")), 'ISODate("1901-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1969-01-01T00:00:00Z")), 'ISODate("1969-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1970-01-01T00:00:00Z")), 'ISODate("1970-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1999-01-01T00:00:00Z")), 'ISODate("1999-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("2000-01-01T00:00:00Z")), 'ISODate("2000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("2001-01-01T00:00:00Z")), 'ISODate("2001-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("2069-01-01T00:00:00Z")), 'ISODate("2069-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("2070-01-01T00:00:00Z")), 'ISODate("2070-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("2099-01-01T00:00:00Z")), 'ISODate("2099-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("9999-01-01T00:00:00Z")), 'ISODate("9999-01-01T00:00:00Z")');
+
+/* Testing without - in date and : in time */
+assert.eq(tojson(ISODate("19980101T00:00:00Z")), 'ISODate("1998-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1999-0101T00:00:00Z")), 'ISODate("1999-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("200001-01T00:00:00Z")), 'ISODate("2000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1998-01-01T000000Z")), 'ISODate("1998-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("1999-01-01T00:0000Z")), 'ISODate("1999-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("2000-01-01T0000:00Z")), 'ISODate("2000-01-01T00:00:00Z")');
+
+/* Testing field overflows */
+assert.eq(tojson(ISODate("0000-01-01T00:00:60Z")), 'ISODate("0000-01-01T00:01:00Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:00:99Z")), 'ISODate("0000-01-01T00:01:39Z")');
+
+assert.eq(tojson(ISODate("0000-01-01T00:60:00Z")), 'ISODate("0000-01-01T01:00:00Z")');
+assert.eq(tojson(ISODate("0000-01-01T00:99:00Z")), 'ISODate("0000-01-01T01:39:00Z")');
+
+assert.eq(tojson(ISODate("0000-01-01T24:00:00Z")), 'ISODate("0000-01-02T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-01-01T99:00:00Z")), 'ISODate("0000-01-05T03:00:00Z")');
+
+assert.eq(tojson(ISODate("0000-01-32T00:00:00Z")), 'ISODate("0000-02-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-01-99T00:00:00Z")), 'ISODate("0000-04-08T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-02-29T00:00:00Z")), 'ISODate("0000-02-29T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-02-30T00:00:00Z")), 'ISODate("0000-03-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-02-31T00:00:00Z")), 'ISODate("0000-03-02T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-02-99T00:00:00Z")), 'ISODate("0000-05-09T00:00:00Z")');
+
+assert.eq(tojson(ISODate("0001-02-29T00:00:00Z")), 'ISODate("0001-03-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0001-02-30T00:00:00Z")), 'ISODate("0001-03-02T00:00:00Z")');
+assert.eq(tojson(ISODate("0001-02-31T00:00:00Z")), 'ISODate("0001-03-03T00:00:00Z")');
+assert.eq(tojson(ISODate("0001-02-99T00:00:00Z")), 'ISODate("0001-05-10T00:00:00Z")');
+
+assert.eq(tojson(ISODate("0000-13-01T00:00:00Z")), 'ISODate("0001-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("0000-99-01T00:00:00Z")), 'ISODate("0008-03-01T00:00:00Z")');
+
+/* Testing GMT offset instead of Z */
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+01")), 'ISODate("0000-12-31T23:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+99")), 'ISODate("0000-12-27T21:00:00Z")');
+
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-01")), 'ISODate("0001-01-01T01:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-99")), 'ISODate("0001-01-05T03:00:00Z")');
+
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+0100")), 'ISODate("0000-12-31T23:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+0160")), 'ISODate("0000-12-31T22:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+0199")), 'ISODate("0000-12-31T21:21:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+9999")), 'ISODate("0000-12-27T19:21:00Z")');
+
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-0100")), 'ISODate("0001-01-01T01:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-0160")), 'ISODate("0001-01-01T02:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-0199")), 'ISODate("0001-01-01T02:39:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-9999")), 'ISODate("0001-01-05T04:39:00Z")');
+
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+01:00")), 'ISODate("0000-12-31T23:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+01:60")), 'ISODate("0000-12-31T22:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+01:99")), 'ISODate("0000-12-31T21:21:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00+99:99")), 'ISODate("0000-12-27T19:21:00Z")');
+
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-01:00")), 'ISODate("0001-01-01T01:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-01:60")), 'ISODate("0001-01-01T02:00:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-01:99")), 'ISODate("0001-01-01T02:39:00Z")');
+assert.eq(tojson(ISODate("0001-01-01T00:00:00-99:99")), 'ISODate("0001-01-05T04:39:00Z")');
+
+/* Testing field underflows */
+assert.eq(tojson(ISODate("0001-01-00T00:00:00Z")), 'ISODate("0000-12-31T00:00:00Z")');
+assert.eq(tojson(ISODate("0001-00-00T00:00:00Z")), 'ISODate("0000-11-30T00:00:00Z")');
+assert.eq(tojson(ISODate("0001-00-01T00:00:00Z")), 'ISODate("0000-12-01T00:00:00Z")');
+
+/* Testing lowest and highest */
+assert.eq(tojson(ISODate("0000-01-01T00:00:00Z")), 'ISODate("0000-01-01T00:00:00Z")');
+assert.eq(tojson(ISODate("9999-12-31T23:59:59.999Z")), 'ISODate("9999-12-31T23:59:59.999Z")');
+
+/* Testing out of range */
+assert.throws(function() {
+ tojson(ISODate("0000-01-00T23:59:59.999Z"));
+});
+assert.throws(function() {
+ tojson(ISODate("9999-12-31T23:59:60Z"));
+});
+
+/* Testing broken format */
+var brokenFormatTests = [
+ "2017",
+ "2017-09",
+ "2017-09-16T18:37 25Z",
+ "2017-09-16T18 37:25Z",
+ "2017-09-16X18:37:25Z",
+ "2017-09 16T18:37:25Z",
+ "2017 09-16T18:37:25Z",
+ "2017-09-16T18:37:25 123Z",
+ "2017-09-16T18:37:25 0600",
+];
+
+brokenFormatTests.forEach(function(test) {
assert.throws(function() {
- tojson(ISODate("0000-01-00T23:59:59.999Z"));
- });
- assert.throws(function() {
- tojson(ISODate("9999-12-31T23:59:60Z"));
- });
-
- /* Testing broken format */
- var brokenFormatTests = [
- "2017",
- "2017-09",
- "2017-09-16T18:37 25Z",
- "2017-09-16T18 37:25Z",
- "2017-09-16X18:37:25Z",
- "2017-09 16T18:37:25Z",
- "2017 09-16T18:37:25Z",
- "2017-09-16T18:37:25 123Z",
- "2017-09-16T18:37:25 0600",
- ];
-
- brokenFormatTests.forEach(function(test) {
- assert.throws(function() {
- print(tojson(ISODate(test)));
- }, [tojson(test)]);
- });
-
- /* Testing conversion to milliseconds */
- assert.eq(ISODate("1969-12-31T23:59:59.999Z"), new Date(-1));
- assert.eq(ISODate("1969-12-31T23:59:59.000Z"), new Date(-1000));
- assert.eq(ISODate("1900-01-01T00:00:00.000Z"), new Date(-2208988800000));
- assert.eq(ISODate("1899-12-31T23:59:59.999Z"), new Date(-2208988800001));
- assert.eq(ISODate("0000-01-01T00:00:00.000Z"), new Date(-62167219200000));
- assert.eq(ISODate("9999-12-31T23:59:59.999Z"), new Date(253402300799999));
+ print(tojson(ISODate(test)));
+ }, [tojson(test)]);
+});
+
+/* Testing conversion to milliseconds */
+assert.eq(ISODate("1969-12-31T23:59:59.999Z"), new Date(-1));
+assert.eq(ISODate("1969-12-31T23:59:59.000Z"), new Date(-1000));
+assert.eq(ISODate("1900-01-01T00:00:00.000Z"), new Date(-2208988800000));
+assert.eq(ISODate("1899-12-31T23:59:59.999Z"), new Date(-2208988800001));
+assert.eq(ISODate("0000-01-01T00:00:00.000Z"), new Date(-62167219200000));
+assert.eq(ISODate("9999-12-31T23:59:59.999Z"), new Date(253402300799999));
}());
diff --git a/jstests/aggregation/bugs/server8568.js b/jstests/aggregation/bugs/server8568.js
index ae9a9ad8202..71793f5696b 100644
--- a/jstests/aggregation/bugs/server8568.js
+++ b/jstests/aggregation/bugs/server8568.js
@@ -4,40 +4,40 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
- var coll = db.sqrt;
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- // Helper for testing that op returns expResult.
- function testOp(op, expResult) {
- var pipeline = [{$project: {_id: 0, result: op}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
- }
-
- // Helper for testing that op results in error with code errorCode.
- function testError(op, errorCode) {
- var pipeline = [{$project: {_id: 0, result: op}}];
- assertErrorCode(coll, pipeline, errorCode);
- }
-
- // Valid input: Numeric arg >= 0, null, or NaN.
-
- testOp({$sqrt: [100]}, 10);
- testOp({$sqrt: [0]}, 0);
- // All types converted to doubles.
- testOp({$sqrt: [NumberLong("100")]}, 10);
- // LLONG_MAX is converted to a double.
- testOp({$sqrt: [NumberLong("9223372036854775807")]}, 3037000499.97605);
- // Null inputs result in null.
- testOp({$sqrt: [null]}, null);
- // NaN inputs result in NaN.
- testOp({$sqrt: [NaN]}, NaN);
-
- // Invalid input: non-numeric/non-null, arg is negative.
-
- // Arg must be numeric or null.
- testError({$sqrt: ["string"]}, 28765);
- // Args cannot be negative.
- testError({$sqrt: [-1]}, 28714);
+'use strict';
+var coll = db.sqrt;
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+// Helper for testing that op returns expResult.
+function testOp(op, expResult) {
+ var pipeline = [{$project: {_id: 0, result: op}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
+}
+
+// Helper for testing that op results in error with code errorCode.
+function testError(op, errorCode) {
+ var pipeline = [{$project: {_id: 0, result: op}}];
+ assertErrorCode(coll, pipeline, errorCode);
+}
+
+// Valid input: Numeric arg >= 0, null, or NaN.
+
+testOp({$sqrt: [100]}, 10);
+testOp({$sqrt: [0]}, 0);
+// All types converted to doubles.
+testOp({$sqrt: [NumberLong("100")]}, 10);
+// LLONG_MAX is converted to a double.
+testOp({$sqrt: [NumberLong("9223372036854775807")]}, 3037000499.97605);
+// Null inputs result in null.
+testOp({$sqrt: [null]}, null);
+// NaN inputs result in NaN.
+testOp({$sqrt: [NaN]}, NaN);
+
+// Invalid input: non-numeric/non-null, arg is negative.
+
+// Arg must be numeric or null.
+testError({$sqrt: ["string"]}, 28765);
+// Args cannot be negative.
+testError({$sqrt: [-1]}, 28714);
}());
diff --git a/jstests/aggregation/bugs/server8581.js b/jstests/aggregation/bugs/server8581.js
index 54b97be3d08..fa81578ccbd 100644
--- a/jstests/aggregation/bugs/server8581.js
+++ b/jstests/aggregation/bugs/server8581.js
@@ -79,47 +79,47 @@ a3result = [{
a4result = [
{
- _id: 1,
- level: 1,
- b: {
- level: 3,
- c: 5,
- d: [{level: 1, e: 4}, {f: 6}, "NOT AN OBJECT!!11!", [2, 3, 4, {level: 1, r: 11}]]
- },
- h: {level: 2, i: {level: 4, j: {level: 1, k: 8}}},
- l: {m: {level: 3, n: 12}},
- o: [],
- q: 14
+ _id: 1,
+ level: 1,
+ b: {
+ level: 3,
+ c: 5,
+ d: [{level: 1, e: 4}, {f: 6}, "NOT AN OBJECT!!11!", [2, 3, 4, {level: 1, r: 11}]]
+ },
+ h: {level: 2, i: {level: 4, j: {level: 1, k: 8}}},
+ l: {m: {level: 3, n: 12}},
+ o: [],
+ q: 14
},
{
- _id: 2,
- level: 4,
+ _id: 2,
+ level: 4,
}
];
a5result = [
{
- _id: 1,
- level: 1,
- b: {
- level: 3,
- c: 5,
- d: [
- {level: 1, e: 4},
- {f: 6},
- {level: 5, g: 9},
- "NOT AN OBJECT!!11!",
- [2, 3, 4, {level: 1, r: 11}, {level: 5, s: 99}]
- ]
- },
- h: {level: 2, i: {level: 4, j: {level: 1, k: 8}}},
- l: {m: {level: 3, n: 12}},
- o: [{level: 5, p: 19}],
- q: 14
+ _id: 1,
+ level: 1,
+ b: {
+ level: 3,
+ c: 5,
+ d: [
+ {level: 1, e: 4},
+ {f: 6},
+ {level: 5, g: 9},
+ "NOT AN OBJECT!!11!",
+ [2, 3, 4, {level: 1, r: 11}, {level: 5, s: 99}]
+ ]
+ },
+ h: {level: 2, i: {level: 4, j: {level: 1, k: 8}}},
+ l: {m: {level: 3, n: 12}},
+ o: [{level: 5, p: 19}],
+ q: 14
},
{
- _id: 2,
- level: 4,
+ _id: 2,
+ level: 4,
}
];
diff --git a/jstests/aggregation/bugs/server9444.js b/jstests/aggregation/bugs/server9444.js
index f3c6a449fad..6bb554c8e62 100644
--- a/jstests/aggregation/bugs/server9444.js
+++ b/jstests/aggregation/bugs/server9444.js
@@ -1,64 +1,64 @@
// server-9444 support disk storage of intermediate results in aggregation
(function() {
- 'use strict';
+'use strict';
- load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'
+load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'
- const t = db.server9444;
- t.drop();
+const t = db.server9444;
+t.drop();
- const sharded = FixtureHelpers.isSharded(t);
+const sharded = FixtureHelpers.isSharded(t);
- var memoryLimitMB = sharded ? 200 : 100;
+var memoryLimitMB = sharded ? 200 : 100;
- function loadData() {
- var bigStr = Array(1024 * 1024 + 1).toString(); // 1MB of ','
- for (var i = 0; i < memoryLimitMB + 1; i++)
- t.insert({_id: i, bigStr: i + bigStr, random: Math.random()});
+function loadData() {
+ var bigStr = Array(1024 * 1024 + 1).toString(); // 1MB of ','
+ for (var i = 0; i < memoryLimitMB + 1; i++)
+ t.insert({_id: i, bigStr: i + bigStr, random: Math.random()});
- assert.gt(t.stats().size, memoryLimitMB * 1024 * 1024);
- }
- loadData();
+ assert.gt(t.stats().size, memoryLimitMB * 1024 * 1024);
+}
+loadData();
- function test(pipeline, outOfMemoryCode) {
- // ensure by default we error out if exceeding memory limit
- var res = t.runCommand('aggregate', {pipeline: pipeline, cursor: {}});
- assert.commandFailed(res);
- assert.eq(res.code, outOfMemoryCode);
+function test(pipeline, outOfMemoryCode) {
+ // ensure by default we error out if exceeding memory limit
+ var res = t.runCommand('aggregate', {pipeline: pipeline, cursor: {}});
+ assert.commandFailed(res);
+ assert.eq(res.code, outOfMemoryCode);
- // ensure allowDiskUse: false does what it says
- res = t.runCommand('aggregate', {pipeline: pipeline, cursor: {}, allowDiskUse: false});
- assert.commandFailed(res);
- assert.eq(res.code, outOfMemoryCode);
+ // ensure allowDiskUse: false does what it says
+ res = t.runCommand('aggregate', {pipeline: pipeline, cursor: {}, allowDiskUse: false});
+ assert.commandFailed(res);
+ assert.eq(res.code, outOfMemoryCode);
- // allowDiskUse only supports bool. In particular, numbers aren't allowed.
- res = t.runCommand('aggregate', {pipeline: pipeline, cursor: {}, allowDiskUse: 1});
- assert.commandFailed(res);
+ // allowDiskUse only supports bool. In particular, numbers aren't allowed.
+ res = t.runCommand('aggregate', {pipeline: pipeline, cursor: {}, allowDiskUse: 1});
+ assert.commandFailed(res);
- // ensure we work when allowDiskUse === true
- res = t.aggregate(pipeline, {allowDiskUse: true});
- assert.eq(res.itcount(), t.count()); // all tests output one doc per input doc
- }
+ // ensure we work when allowDiskUse === true
+ res = t.aggregate(pipeline, {allowDiskUse: true});
+ assert.eq(res.itcount(), t.count()); // all tests output one doc per input doc
+}
- var groupCode = 16945;
- var sortCode = 16819;
- var sortLimitCode = 16820;
+var groupCode = 16945;
+var sortCode = 16819;
+var sortLimitCode = 16820;
- test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}], groupCode);
+test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}], groupCode);
- // sorting with _id would use index which doesn't require extsort
- test([{$sort: {random: 1}}], sortCode);
- test([{$sort: {bigStr: 1}}], sortCode); // big key and value
+// sorting with _id would use index which doesn't require extsort
+test([{$sort: {random: 1}}], sortCode);
+test([{$sort: {bigStr: 1}}], sortCode); // big key and value
- // make sure sort + large limit won't crash the server (SERVER-10136)
- test([{$sort: {bigStr: 1}}, {$limit: 1000 * 1000 * 1000}], sortLimitCode);
+// make sure sort + large limit won't crash the server (SERVER-10136)
+test([{$sort: {bigStr: 1}}, {$limit: 1000 * 1000 * 1000}], sortLimitCode);
- // test combining two extSorts in both same and different orders
- test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}, {$sort: {_id: 1}}], groupCode);
- test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}, {$sort: {_id: -1}}], groupCode);
- test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}, {$sort: {random: 1}}], groupCode);
- test([{$sort: {random: 1}}, {$group: {_id: '$_id', bigStr: {$first: '$bigStr'}}}], sortCode);
+// test combining two extSorts in both same and different orders
+test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}, {$sort: {_id: 1}}], groupCode);
+test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}, {$sort: {_id: -1}}], groupCode);
+test([{$group: {_id: '$_id', bigStr: {$min: '$bigStr'}}}, {$sort: {random: 1}}], groupCode);
+test([{$sort: {random: 1}}, {$group: {_id: '$_id', bigStr: {$first: '$bigStr'}}}], sortCode);
- // don't leave large collection laying around
- t.drop();
+// don't leave large collection laying around
+t.drop();
})();
diff --git a/jstests/aggregation/bugs/server9625.js b/jstests/aggregation/bugs/server9625.js
index 4a525aba518..4cbf487b5e0 100644
--- a/jstests/aggregation/bugs/server9625.js
+++ b/jstests/aggregation/bugs/server9625.js
@@ -5,70 +5,70 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- 'use strict';
- var coll = db.server9625;
- coll.drop();
- assert.writeOK(coll.insert({}));
+'use strict';
+var coll = db.server9625;
+coll.drop();
+assert.writeOK(coll.insert({}));
- // Helper for testing that op returns expResult.
- function testOp(op, expResult) {
- var pipeline = [{$project: {_id: 0, result: op}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
- }
+// Helper for testing that op returns expResult.
+function testOp(op, expResult) {
+ var pipeline = [{$project: {_id: 0, result: op}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
+}
- // ExpressionFromAccumulators take either a list of arguments or a single array argument.
- testOp({$avg: [1, 2, 3, 4, 5]}, 3);
- testOp({$avg: [[1, 2, 3, 4, 5]]}, 3);
- testOp({$min: [1, 2, 3, 4, 5]}, 1);
- testOp({$min: [[1, 2, 3, 4, 5]]}, 1);
- testOp({$max: [1, 2, 3, 4, 5]}, 5);
- testOp({$max: [[1, 2, 3, 4, 5]]}, 5);
- testOp({$sum: [1, 2, 3, 4, 5]}, 15);
- testOp({$sum: [[1, 2, 3, 4, 5]]}, 15);
- testOp({$stdDevPop: [1, 3]}, 1);
- testOp({$stdDevPop: [[1, 3]]}, 1);
- testOp({$stdDevSamp: [1, 2, 3]}, 1);
- testOp({$stdDevSamp: [[1, 2, 3]]}, 1);
+// ExpressionFromAccumulators take either a list of arguments or a single array argument.
+testOp({$avg: [1, 2, 3, 4, 5]}, 3);
+testOp({$avg: [[1, 2, 3, 4, 5]]}, 3);
+testOp({$min: [1, 2, 3, 4, 5]}, 1);
+testOp({$min: [[1, 2, 3, 4, 5]]}, 1);
+testOp({$max: [1, 2, 3, 4, 5]}, 5);
+testOp({$max: [[1, 2, 3, 4, 5]]}, 5);
+testOp({$sum: [1, 2, 3, 4, 5]}, 15);
+testOp({$sum: [[1, 2, 3, 4, 5]]}, 15);
+testOp({$stdDevPop: [1, 3]}, 1);
+testOp({$stdDevPop: [[1, 3]]}, 1);
+testOp({$stdDevSamp: [1, 2, 3]}, 1);
+testOp({$stdDevSamp: [[1, 2, 3]]}, 1);
- // Null arguments are ignored.
- testOp({$avg: [1, 2, 3, 4, 5, null]}, 3);
- testOp({$min: [1, 2, 3, 4, 5, null]}, 1);
- testOp({$max: [1, 2, 3, 4, 5, null]}, 5);
- testOp({$sum: [1, 2, 3, 4, 5, null]}, 15);
- testOp({$stdDevPop: [1, 3, null]}, 1);
- testOp({$stdDevSamp: [1, 2, 3, null]}, 1);
+// Null arguments are ignored.
+testOp({$avg: [1, 2, 3, 4, 5, null]}, 3);
+testOp({$min: [1, 2, 3, 4, 5, null]}, 1);
+testOp({$max: [1, 2, 3, 4, 5, null]}, 5);
+testOp({$sum: [1, 2, 3, 4, 5, null]}, 15);
+testOp({$stdDevPop: [1, 3, null]}, 1);
+testOp({$stdDevSamp: [1, 2, 3, null]}, 1);
- // NaN arguments are processed by all expressions.
- testOp({$avg: [1, 2, 3, 4, 5, NaN]}, NaN);
- testOp({$min: [1, 2, 3, 4, 5, NaN]}, NaN);
- testOp({$max: [1, 2, 3, 4, 5, NaN]}, 5);
- testOp({$sum: [1, 2, 3, 4, 5, NaN]}, NaN);
- testOp({$stdDevPop: [1, 3, NaN]}, NaN);
- testOp({$stdDevSamp: [1, 2, 3, NaN]}, NaN);
+// NaN arguments are processed by all expressions.
+testOp({$avg: [1, 2, 3, 4, 5, NaN]}, NaN);
+testOp({$min: [1, 2, 3, 4, 5, NaN]}, NaN);
+testOp({$max: [1, 2, 3, 4, 5, NaN]}, 5);
+testOp({$sum: [1, 2, 3, 4, 5, NaN]}, NaN);
+testOp({$stdDevPop: [1, 3, NaN]}, NaN);
+testOp({$stdDevSamp: [1, 2, 3, NaN]}, NaN);
- // Use at least one non-constant value in the following tests, to ensure
- // isAssociative() and isCommutative() are called. If all arguments are constant, the
- // optimization will evaluate them all into one, without calling isAssociative() nor
- // isCommutative().
- coll.drop();
- assert.writeOK(coll.insert({"a": 1, "b": 6}));
+// Use at least one non-constant value in the following tests, to ensure
+// isAssociative() and isCommutative() are called. If all arguments are constant, the
+// optimization will evaluate them all into one, without calling isAssociative() nor
+// isCommutative().
+coll.drop();
+assert.writeOK(coll.insert({"a": 1, "b": 6}));
- // These expressions are associative and commutative so inner expression can be combined with
- // outer.
- testOp({$sum: ["$a", 2, 3, {$sum: [4, 5]}]}, 15);
- testOp({$min: ["$a", 2, 3, {$min: [4, 5]}]}, 1);
- testOp({$max: ["$a", 2, 3, {$max: [4, 5]}]}, 5);
+// These expressions are associative and commutative so inner expression can be combined with
+// outer.
+testOp({$sum: ["$a", 2, 3, {$sum: [4, 5]}]}, 15);
+testOp({$min: ["$a", 2, 3, {$min: [4, 5]}]}, 1);
+testOp({$max: ["$a", 2, 3, {$max: [4, 5]}]}, 5);
- // These expressions are not associative and commutative so inner expression cannot be combined
- // with outer.
- testOp({$avg: ["$a", 3, {$avg: [4, 6]}]}, 3);
- testOp({$stdDevPop: ["$a", {$stdDevPop: [1, 3]}]}, 0);
- testOp({$stdDevSamp: ["$a", {$stdDevSamp: [1, 2, 3]}]}, 0);
+// These expressions are not associative and commutative so inner expression cannot be combined
+// with outer.
+testOp({$avg: ["$a", 3, {$avg: [4, 6]}]}, 3);
+testOp({$stdDevPop: ["$a", {$stdDevPop: [1, 3]}]}, 0);
+testOp({$stdDevSamp: ["$a", {$stdDevSamp: [1, 2, 3]}]}, 0);
- // If isAssociative() and isCommutative() did not return false when provided a single argument,
- // the single array argument provided to the inner expression would be ignored instead of
- // treated as a list of arguments, and these tests would fail.
- testOp({$sum: ["$a", 2, 3, {$sum: [["$a", 4, 5]]}]}, 16);
- testOp({$min: ["$b", 2, 3, {$min: [["$a", 4, 5]]}]}, 1);
- testOp({$max: ["$a", 2, 3, {$max: [["$b", 4, 5]]}]}, 6);
+// If isAssociative() and isCommutative() did not return false when provided a single argument,
+// the single array argument provided to the inner expression would be ignored instead of
+// treated as a list of arguments, and these tests would fail.
+testOp({$sum: ["$a", 2, 3, {$sum: [["$a", 4, 5]]}]}, 16);
+testOp({$min: ["$b", 2, 3, {$min: [["$a", 4, 5]]}]}, 1);
+testOp({$max: ["$a", 2, 3, {$max: [["$b", 4, 5]]}]}, 6);
}());
diff --git a/jstests/aggregation/bugs/skip_limit_overflow.js b/jstests/aggregation/bugs/skip_limit_overflow.js
index f0d7e0b27c7..50e665b178f 100644
--- a/jstests/aggregation/bugs/skip_limit_overflow.js
+++ b/jstests/aggregation/bugs/skip_limit_overflow.js
@@ -8,116 +8,115 @@
* @tags: [do_not_wrap_aggregations_in_facets, assumes_unsharded_collection]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStages' and other explain helpers.
+load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStages' and other explain helpers.
- const coll = db.server39788;
- coll.drop();
+const coll = db.server39788;
+coll.drop();
- function testPipeline(pipeline, expectedResult, optimizedAwayStages) {
- const explainOutput = coll.explain().aggregate(pipeline);
+function testPipeline(pipeline, expectedResult, optimizedAwayStages) {
+ const explainOutput = coll.explain().aggregate(pipeline);
- assert(explainOutput.hasOwnProperty("stages"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to use an aggregation framework in the explain output: " +
- tojson(explainOutput));
+ assert(explainOutput.hasOwnProperty("stages"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to use an aggregation framework in the explain output: " + tojson(explainOutput));
- if (optimizedAwayStages) {
- optimizedAwayStages.forEach(
- (stage) =>
- assert(!aggPlanHasStage(explainOutput, stage),
- "Expected pipeline " + tojsononeline(pipeline) + " to *not* include a " +
- stage + " stage in the explain output: " + tojson(explainOutput)));
- }
-
- for (let path in expectedResult) {
- const subPaths = path.split(".");
- const stageName = subPaths[0];
- const stages = getAggPlanStages(explainOutput, stageName);
- assert(stages !== null,
- "Expected pipeline " + tojsononeline(pipeline) + " to include a " + stageName +
- " stage in the explain output: " + tojson(explainOutput));
- assert(stages.length == expectedResult[path].length,
- "Expected pipeline " + tojsononeline(pipeline) + " to include " +
- expectedResult[path].length + stageName + " stages in the explain output: " +
- tojson(explainOutput));
- assert.eq(
- stages.reduce(
- (res, stage) => {
- res.push(subPaths.reduce((res, cur) => res[cur], stage));
- return res;
- },
- []),
- expectedResult[path],
- "Stage: " + stageName + ", path: " + path + ", explain: " + tojson(explainOutput));
- }
+ if (optimizedAwayStages) {
+ optimizedAwayStages.forEach(
+ (stage) =>
+ assert(!aggPlanHasStage(explainOutput, stage),
+ "Expected pipeline " + tojsononeline(pipeline) + " to *not* include a " +
+ stage + " stage in the explain output: " + tojson(explainOutput)));
+ }
- // Ensure the aggregate command doesn't fail.
- assert.eq(coll.aggregate(pipeline).toArray(), []);
+ for (let path in expectedResult) {
+ const subPaths = path.split(".");
+ const stageName = subPaths[0];
+ const stages = getAggPlanStages(explainOutput, stageName);
+ assert(stages !== null,
+ "Expected pipeline " + tojsononeline(pipeline) + " to include a " + stageName +
+ " stage in the explain output: " + tojson(explainOutput));
+ assert(stages.length == expectedResult[path].length,
+ "Expected pipeline " + tojsononeline(pipeline) + " to include " +
+ expectedResult[path].length + stageName +
+ " stages in the explain output: " + tojson(explainOutput));
+ assert.eq(
+ stages.reduce(
+ (res, stage) => {
+ res.push(subPaths.reduce((res, cur) => res[cur], stage));
+ return res;
+ },
+ []),
+ expectedResult[path],
+ "Stage: " + stageName + ", path: " + path + ", explain: " + tojson(explainOutput));
}
- // Case where overflow of limit + skip prevents limit stage from being absorbed. Values
- // are specified as integrals > MAX_LONG. Note that we cannot specify this huge value as
- // a NumberLong, as we get a number conversion error (even if it's passed as a string).
- testPipeline([{$sort: {x: -1}}, {$skip: 18446744073709552000}, {$limit: 6}],
- {"$limit": [NumberLong(6)], "$skip": [NumberLong("9223372036854775807")]});
- testPipeline([{$sort: {x: -1}}, {$skip: 6}, {$limit: 18446744073709552000}],
- {"$limit": [NumberLong("9223372036854775807")], "$skip": [NumberLong(6)]});
+ // Ensure the aggregate command doesn't fail.
+ assert.eq(coll.aggregate(pipeline).toArray(), []);
+}
+
+// Case where overflow of limit + skip prevents limit stage from being absorbed. Values
+// are specified as integrals > MAX_LONG. Note that we cannot specify this huge value as
+// a NumberLong, as we get a number conversion error (even if it's passed as a string).
+testPipeline([{$sort: {x: -1}}, {$skip: 18446744073709552000}, {$limit: 6}],
+ {"$limit": [NumberLong(6)], "$skip": [NumberLong("9223372036854775807")]});
+testPipeline([{$sort: {x: -1}}, {$skip: 6}, {$limit: 18446744073709552000}],
+ {"$limit": [NumberLong("9223372036854775807")], "$skip": [NumberLong(6)]});
- // Case where overflow of limit + skip prevents limit stage from being absorbed. One of the
- // values == MAX_LONG, another one is 1.
- testPipeline([{$sort: {x: -1}}, {$skip: NumberLong("9223372036854775807")}, {$limit: 1}],
- {"$limit": [NumberLong(1)], "$skip": [NumberLong("9223372036854775807")]});
- testPipeline([{$sort: {x: -1}}, {$skip: 1}, {$limit: NumberLong("9223372036854775807")}],
- {"$limit": [NumberLong("9223372036854775807")], "$skip": [NumberLong(1)]});
+// Case where overflow of limit + skip prevents limit stage from being absorbed. One of the
+// values == MAX_LONG, another one is 1.
+testPipeline([{$sort: {x: -1}}, {$skip: NumberLong("9223372036854775807")}, {$limit: 1}],
+ {"$limit": [NumberLong(1)], "$skip": [NumberLong("9223372036854775807")]});
+testPipeline([{$sort: {x: -1}}, {$skip: 1}, {$limit: NumberLong("9223372036854775807")}],
+ {"$limit": [NumberLong("9223372036854775807")], "$skip": [NumberLong(1)]});
- // Case where limit + skip do not overflow. Limit == MAX_LONG and skip is 0. Should be able to
- // absorb the limit and skip stages.
- // Note that we cannot specify limit == 0, so we expect an error in this case.
- testPipeline([{$sort: {x: -1}}, {$skip: 0}, {$limit: NumberLong("9223372036854775807")}],
- {"$cursor.limit": [NumberLong("9223372036854775807")]},
- ["$skip", "$limit"]);
+// Case where limit + skip do not overflow. Limit == MAX_LONG and skip is 0. Should be able to
+// absorb the limit and skip stages.
+// Note that we cannot specify limit == 0, so we expect an error in this case.
+testPipeline([{$sort: {x: -1}}, {$skip: 0}, {$limit: NumberLong("9223372036854775807")}],
+ {"$cursor.limit": [NumberLong("9223372036854775807")]},
+ ["$skip", "$limit"]);
- // Case where limit + skip do not overflow. One value is MAX_LONG - 1 and another one is 1.
- // Should be able to absorb the limit stage.
- testPipeline([{$sort: {x: -1}}, {$skip: NumberLong("9223372036854775806")}, {$limit: 1}],
- {
- "$cursor.limit": [NumberLong("9223372036854775807")],
- "$skip": [NumberLong("9223372036854775806")]
- },
- ["$limit"]);
- testPipeline([{$sort: {x: -1}}, {$skip: 1}, {$limit: NumberLong("9223372036854775806")}],
- {"$cursor.limit": [NumberLong("9223372036854775807")], "$skip": [NumberLong(1)]},
- ["$limit"]);
+// Case where limit + skip do not overflow. One value is MAX_LONG - 1 and another one is 1.
+// Should be able to absorb the limit stage.
+testPipeline([{$sort: {x: -1}}, {$skip: NumberLong("9223372036854775806")}, {$limit: 1}],
+ {
+ "$cursor.limit": [NumberLong("9223372036854775807")],
+ "$skip": [NumberLong("9223372036854775806")]
+ },
+ ["$limit"]);
+testPipeline([{$sort: {x: -1}}, {$skip: 1}, {$limit: NumberLong("9223372036854775806")}],
+ {"$cursor.limit": [NumberLong("9223372036854775807")], "$skip": [NumberLong(1)]},
+ ["$limit"]);
- // Case where limit + skip do not overflow. Both values are < MAX_LONG.
- testPipeline([{$sort: {x: -1}}, {$skip: 674761616283}, {$limit: 35361718}],
- {"$cursor.limit": [NumberLong(674796978001)], "$skip": [NumberLong(674761616283)]},
- ["$limit"]);
- testPipeline([{$sort: {x: -1}}, {$skip: 35361718}, {$limit: 674761616283}],
- {"$cursor.limit": [NumberLong(674796978001)], "$skip": [NumberLong(35361718)]},
- ["$limit"]);
+// Case where limit + skip do not overflow. Both values are < MAX_LONG.
+testPipeline([{$sort: {x: -1}}, {$skip: 674761616283}, {$limit: 35361718}],
+ {"$cursor.limit": [NumberLong(674796978001)], "$skip": [NumberLong(674761616283)]},
+ ["$limit"]);
+testPipeline([{$sort: {x: -1}}, {$skip: 35361718}, {$limit: 674761616283}],
+ {"$cursor.limit": [NumberLong(674796978001)], "$skip": [NumberLong(35361718)]},
+ ["$limit"]);
- // Case where where overflow of limit + skip + skip prevents limit stage from being absorbed.
- // One skip == MAX_LONG - 1, another one is 1. Should merge two skip stages into one.
- testPipeline(
- [{$sort: {x: -1}}, {$skip: 1}, {$skip: NumberLong("9223372036854775806")}, {$limit: 1}],
- {"$limit": [NumberLong(1)], "$skip": [NumberLong("9223372036854775807")]});
+// Case where where overflow of limit + skip + skip prevents limit stage from being absorbed.
+// One skip == MAX_LONG - 1, another one is 1. Should merge two skip stages into one.
+testPipeline(
+ [{$sort: {x: -1}}, {$skip: 1}, {$skip: NumberLong("9223372036854775806")}, {$limit: 1}],
+ {"$limit": [NumberLong(1)], "$skip": [NumberLong("9223372036854775807")]});
- // Case where where overflow of limit + skip + skip prevents limit stage from being absorbed.
- // One skip == MAX_LONG, another one is 1. Should not absorb or merge any stages.
- testPipeline(
- [{$sort: {x: -1}}, {$skip: 1}, {$skip: NumberLong("9223372036854775807")}, {$limit: 1}],
- {"$limit": [NumberLong(1)], "$skip": [NumberLong(1), NumberLong("9223372036854775807")]});
+// Case where where overflow of limit + skip + skip prevents limit stage from being absorbed.
+// One skip == MAX_LONG, another one is 1. Should not absorb or merge any stages.
+testPipeline(
+ [{$sort: {x: -1}}, {$skip: 1}, {$skip: NumberLong("9223372036854775807")}, {$limit: 1}],
+ {"$limit": [NumberLong(1)], "$skip": [NumberLong(1), NumberLong("9223372036854775807")]});
- // Case where sample size is > MAX_LONG.
- testPipeline([{$sample: {size: 18446744073709552000}}],
- {"$sample.size": [NumberLong("9223372036854775807")]});
- // Case where sample size is == MAX_LONG.
- testPipeline([{$sample: {size: NumberLong("9223372036854775807")}}],
- {"$sample.size": [NumberLong("9223372036854775807")]});
- // Case where sample size is == MAX_LONG - 1.
- testPipeline([{$sample: {size: NumberLong("9223372036854775806")}}],
- {"$sample.size": [NumberLong("9223372036854775806")]});
+// Case where sample size is > MAX_LONG.
+testPipeline([{$sample: {size: 18446744073709552000}}],
+ {"$sample.size": [NumberLong("9223372036854775807")]});
+// Case where sample size is == MAX_LONG.
+testPipeline([{$sample: {size: NumberLong("9223372036854775807")}}],
+ {"$sample.size": [NumberLong("9223372036854775807")]});
+// Case where sample size is == MAX_LONG - 1.
+testPipeline([{$sample: {size: NumberLong("9223372036854775806")}}],
+ {"$sample.size": [NumberLong("9223372036854775806")]});
})();
diff --git a/jstests/aggregation/bugs/sort_arrays.js b/jstests/aggregation/bugs/sort_arrays.js
index 9fbb707decb..e83b4466cc6 100644
--- a/jstests/aggregation/bugs/sort_arrays.js
+++ b/jstests/aggregation/bugs/sort_arrays.js
@@ -1,17 +1,17 @@
// Tests that sorting by a field that contains an array will sort by the minimum element in that
// array.
(function() {
- "use strict";
+"use strict";
- const coll = db.foo;
- coll.drop();
- assert.writeOK(coll.insert([{_id: 2, a: [2, 3]}, {_id: 3, a: [2, 4]}, {_id: 4, a: [2, 1]}]));
- const expectedOrder = [{_id: 4, a: [2, 1]}, {_id: 2, a: [2, 3]}, {_id: 3, a: [2, 4]}];
+const coll = db.foo;
+coll.drop();
+assert.writeOK(coll.insert([{_id: 2, a: [2, 3]}, {_id: 3, a: [2, 4]}, {_id: 4, a: [2, 1]}]));
+const expectedOrder = [{_id: 4, a: [2, 1]}, {_id: 2, a: [2, 3]}, {_id: 3, a: [2, 4]}];
- assert.eq(coll.aggregate([{$sort: {a: 1, _id: 1}}]).toArray(), expectedOrder);
- assert.eq(coll.find().sort({a: 1, _id: 1}).toArray(), expectedOrder);
+assert.eq(coll.aggregate([{$sort: {a: 1, _id: 1}}]).toArray(), expectedOrder);
+assert.eq(coll.find().sort({a: 1, _id: 1}).toArray(), expectedOrder);
- assert.commandWorked(coll.ensureIndex({a: 1}));
- assert.eq(coll.aggregate([{$sort: {a: 1, _id: 1}}]).toArray(), expectedOrder);
- assert.eq(coll.find().sort({a: 1, _id: 1}).toArray(), expectedOrder);
+assert.commandWorked(coll.ensureIndex({a: 1}));
+assert.eq(coll.aggregate([{$sort: {a: 1, _id: 1}}]).toArray(), expectedOrder);
+assert.eq(coll.find().sort({a: 1, _id: 1}).toArray(), expectedOrder);
}());
diff --git a/jstests/aggregation/bugs/substr.js b/jstests/aggregation/bugs/substr.js
index 1090b09dffb..c4eaff7e137 100644
--- a/jstests/aggregation/bugs/substr.js
+++ b/jstests/aggregation/bugs/substr.js
@@ -122,8 +122,8 @@ assert.eq(
a: {
$substrBytes: [
{
- $substrBytes:
- [{$substrBytes: [{$substrBytes: ['abcdefghij', 1, 6]}, 2, 5]}, 0, 3]
+ $substrBytes:
+ [{$substrBytes: [{$substrBytes: ['abcdefghij', 1, 6]}, 2, 5]}, 0, 3]
},
1,
1
diff --git a/jstests/aggregation/explain.js b/jstests/aggregation/explain.js
index 3e446afe43a..9203ce83d46 100644
--- a/jstests/aggregation/explain.js
+++ b/jstests/aggregation/explain.js
@@ -1,28 +1,27 @@
// Tests the behavior of explain() when used with the aggregation
// pipeline. Explain() should not read or modify the plan cache.
(function() {
- "use strict";
+"use strict";
- load('jstests/libs/analyze_plan.js'); // For getAggPlanStage().
+load('jstests/libs/analyze_plan.js'); // For getAggPlanStage().
- let coll = db.explain;
- coll.drop();
+let coll = db.explain;
+coll.drop();
- assert.commandWorked(coll.createIndex({x: 1}));
- assert.commandWorked(coll.createIndex({y: 1}));
+assert.commandWorked(coll.createIndex({x: 1}));
+assert.commandWorked(coll.createIndex({y: 1}));
- let result = coll.explain().aggregate([{$match: {x: 1, y: 1}}]);
- assert.eq(null, getAggPlanStage(result, "CACHED_PLAN"));
+let result = coll.explain().aggregate([{$match: {x: 1, y: 1}}]);
+assert.eq(null, getAggPlanStage(result, "CACHED_PLAN"));
- // At this point, there should be no entries in the plan cache.
- result = coll.explain().aggregate([{$match: {x: 1, y: 1}}]);
- assert.eq(null, getAggPlanStage(result, "CACHED_PLAN"));
+// At this point, there should be no entries in the plan cache.
+result = coll.explain().aggregate([{$match: {x: 1, y: 1}}]);
+assert.eq(null, getAggPlanStage(result, "CACHED_PLAN"));
- // Now add entry in the cache without explain().
- result = coll.aggregate([{$match: {x: 1, y: 1}}]);
-
- // Now there's an entry in the cache, make sure explain() doesn't use it.
- result = coll.explain().aggregate([{$match: {x: 1, y: 1}}]);
- assert.eq(null, getAggPlanStage(result, "CACHED_PLAN"));
+// Now add entry in the cache without explain().
+result = coll.aggregate([{$match: {x: 1, y: 1}}]);
+// Now there's an entry in the cache, make sure explain() doesn't use it.
+result = coll.explain().aggregate([{$match: {x: 1, y: 1}}]);
+assert.eq(null, getAggPlanStage(result, "CACHED_PLAN"));
})();
diff --git a/jstests/aggregation/explain_limit.js b/jstests/aggregation/explain_limit.js
index e3451dc0c7c..a0dabdc1b02 100644
--- a/jstests/aggregation/explain_limit.js
+++ b/jstests/aggregation/explain_limit.js
@@ -1,80 +1,79 @@
// Tests the behavior of explain() when used with the aggregation pipeline and limits.
// @tags: [do_not_wrap_aggregations_in_facets]
(function() {
- "use strict";
-
- load("jstests/libs/analyze_plan.js"); // For getAggPlanStages().
-
- let coll = db.explain_limit;
-
- const kMultipleSolutionLimit = 101;
- const kCollSize = kMultipleSolutionLimit + 5;
- const kLimit = 10;
-
- // Return whether or explain() was successful and contained the appropriate fields given the
- // requested verbosity. Checks that the number of documents examined is correct based on
- // 'multipleSolutions', which indicates there was more than one plan available.
- function checkResults({results, verbosity, multipleSolutions}) {
- let cursorSubdocs = getAggPlanStages(results, "$cursor");
- assert.gt(cursorSubdocs.length, 0);
- for (let stageResult of cursorSubdocs) {
- assert(stageResult.hasOwnProperty("$cursor"));
- let result = stageResult.$cursor;
-
- assert.eq(result.limit, NumberLong(kLimit), tojson(results));
-
- if (verbosity === "queryPlanner") {
- assert(!result.hasOwnProperty("executionStats"), tojson(results));
+"use strict";
+
+load("jstests/libs/analyze_plan.js"); // For getAggPlanStages().
+
+let coll = db.explain_limit;
+
+const kMultipleSolutionLimit = 101;
+const kCollSize = kMultipleSolutionLimit + 5;
+const kLimit = 10;
+
+// Return whether or explain() was successful and contained the appropriate fields given the
+// requested verbosity. Checks that the number of documents examined is correct based on
+// 'multipleSolutions', which indicates there was more than one plan available.
+function checkResults({results, verbosity, multipleSolutions}) {
+ let cursorSubdocs = getAggPlanStages(results, "$cursor");
+ assert.gt(cursorSubdocs.length, 0);
+ for (let stageResult of cursorSubdocs) {
+ assert(stageResult.hasOwnProperty("$cursor"));
+ let result = stageResult.$cursor;
+
+ assert.eq(result.limit, NumberLong(kLimit), tojson(results));
+
+ if (verbosity === "queryPlanner") {
+ assert(!result.hasOwnProperty("executionStats"), tojson(results));
+ } else {
+ // If it's "executionStats" or "allPlansExecution".
+ if (multipleSolutions) {
+ // If there's more than one plan available, we may run several of them against
+ // each other to see which is fastest. During this, our limit may be ignored
+ // and so explain may return that it examined more documents than we asked it
+ // to.
+ assert.lte(
+ result.executionStats.nReturned, kMultipleSolutionLimit, tojson(results));
+ assert.lte(result.executionStats.totalDocsExamined,
+ kMultipleSolutionLimit,
+ tojson(results));
} else {
- // If it's "executionStats" or "allPlansExecution".
- if (multipleSolutions) {
- // If there's more than one plan available, we may run several of them against
- // each other to see which is fastest. During this, our limit may be ignored
- // and so explain may return that it examined more documents than we asked it
- // to.
- assert.lte(
- result.executionStats.nReturned, kMultipleSolutionLimit, tojson(results));
- assert.lte(result.executionStats.totalDocsExamined,
- kMultipleSolutionLimit,
- tojson(results));
- } else {
- assert.eq(result.executionStats.nReturned, kLimit, tojson(results));
- assert.eq(result.executionStats.totalDocsExamined, kLimit, tojson(results));
- }
+ assert.eq(result.executionStats.nReturned, kLimit, tojson(results));
+ assert.eq(result.executionStats.totalDocsExamined, kLimit, tojson(results));
}
}
}
+}
- // explain() should respect limit.
- coll.drop();
- assert.commandWorked(coll.createIndex({a: 1}));
+// explain() should respect limit.
+coll.drop();
+assert.commandWorked(coll.createIndex({a: 1}));
- for (let i = 0; i < kCollSize; i++) {
- assert.writeOK(coll.insert({a: 1}));
- }
+for (let i = 0; i < kCollSize; i++) {
+ assert.writeOK(coll.insert({a: 1}));
+}
- const pipeline = [{$match: {a: 1}}, {$limit: kLimit}];
+const pipeline = [{$match: {a: 1}}, {$limit: kLimit}];
- let plannerLevel = coll.explain("queryPlanner").aggregate(pipeline);
- checkResults({results: plannerLevel, verbosity: "queryPlanner"});
+let plannerLevel = coll.explain("queryPlanner").aggregate(pipeline);
+checkResults({results: plannerLevel, verbosity: "queryPlanner"});
- let execLevel = coll.explain("executionStats").aggregate(pipeline);
- checkResults({results: execLevel, verbosity: "executionStats", multipleSolutions: false});
+let execLevel = coll.explain("executionStats").aggregate(pipeline);
+checkResults({results: execLevel, verbosity: "executionStats", multipleSolutions: false});
- let allPlansExecLevel = coll.explain("allPlansExecution").aggregate(pipeline);
- checkResults(
- {results: allPlansExecLevel, verbosity: "allPlansExecution", multipleSolutions: false});
+let allPlansExecLevel = coll.explain("allPlansExecution").aggregate(pipeline);
+checkResults(
+ {results: allPlansExecLevel, verbosity: "allPlansExecution", multipleSolutions: false});
- // Create a second index so that more than one plan is available.
- assert.commandWorked(coll.createIndex({a: 1, b: 1}));
+// Create a second index so that more than one plan is available.
+assert.commandWorked(coll.createIndex({a: 1, b: 1}));
- plannerLevel = coll.explain("queryPlanner").aggregate(pipeline);
- checkResults({results: plannerLevel, verbosity: "queryPlanner"});
+plannerLevel = coll.explain("queryPlanner").aggregate(pipeline);
+checkResults({results: plannerLevel, verbosity: "queryPlanner"});
- execLevel = coll.explain("executionStats").aggregate(pipeline);
- checkResults({results: execLevel, verbosity: "executionStats", multipleSolutions: true});
+execLevel = coll.explain("executionStats").aggregate(pipeline);
+checkResults({results: execLevel, verbosity: "executionStats", multipleSolutions: true});
- allPlansExecLevel = coll.explain("allPlansExecution").aggregate(pipeline);
- checkResults(
- {results: allPlansExecLevel, verbosity: "allPlansExecution", multipleSolutions: true});
+allPlansExecLevel = coll.explain("allPlansExecution").aggregate(pipeline);
+checkResults({results: allPlansExecLevel, verbosity: "allPlansExecution", multipleSolutions: true});
})();
diff --git a/jstests/aggregation/explain_writing_aggs.js b/jstests/aggregation/explain_writing_aggs.js
index 8cf58ba0040..412060bcfa6 100644
--- a/jstests/aggregation/explain_writing_aggs.js
+++ b/jstests/aggregation/explain_writing_aggs.js
@@ -6,90 +6,90 @@
* @tags: [assumes_unsharded_collection, assumes_write_concern_unchanged]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos().
- load("jstests/libs/analyze_plan.js"); // For getAggPlanStage().
- load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode().
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos().
+load("jstests/libs/analyze_plan.js"); // For getAggPlanStage().
+load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode().
- let sourceColl = db.explain_writing_aggs_source;
- let targetColl = db.explain_writing_aggs_target;
- sourceColl.drop();
- targetColl.drop();
+let sourceColl = db.explain_writing_aggs_source;
+let targetColl = db.explain_writing_aggs_target;
+sourceColl.drop();
+targetColl.drop();
- assert.writeOK(sourceColl.insert({_id: 1}));
+assert.writeOK(sourceColl.insert({_id: 1}));
- // Test that $out can be explained with 'queryPlanner' explain verbosity and does not perform
- // any writes.
- let explain = sourceColl.explain("queryPlanner").aggregate([{$out: targetColl.getName()}]);
- let outExplain = getAggPlanStage(explain, "$out");
- assert.neq(outExplain, null, explain);
- assert.eq(outExplain.$out, targetColl.getName(), explain);
- assert.eq(targetColl.find().itcount(), 0, explain);
+// Test that $out can be explained with 'queryPlanner' explain verbosity and does not perform
+// any writes.
+let explain = sourceColl.explain("queryPlanner").aggregate([{$out: targetColl.getName()}]);
+let outExplain = getAggPlanStage(explain, "$out");
+assert.neq(outExplain, null, explain);
+assert.eq(outExplain.$out, targetColl.getName(), explain);
+assert.eq(targetColl.find().itcount(), 0, explain);
- // Test each $merge mode with 'queryPlanner' explain verbosity.
- withEachMergeMode(function({whenMatchedMode, whenNotMatchedMode}) {
- const mergeStage = {
- $merge: {
- into: targetColl.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- };
- const explain = sourceColl.explain("queryPlanner").aggregate([mergeStage]);
- const mergeExplain = getAggPlanStage(explain, "$merge");
- assert.neq(mergeExplain, null, explain);
- assert(mergeExplain.hasOwnProperty("$merge"), explain);
- assert.eq(mergeExplain.$merge.whenMatched, whenMatchedMode, mergeExplain);
- assert.eq(mergeExplain.$merge.whenNotMatched, whenNotMatchedMode, mergeExplain);
- assert.eq(mergeExplain.$merge.on, "_id", mergeExplain);
- assert.eq(targetColl.find().itcount(), 0, explain);
- });
+// Test each $merge mode with 'queryPlanner' explain verbosity.
+withEachMergeMode(function({whenMatchedMode, whenNotMatchedMode}) {
+ const mergeStage = {
+ $merge: {
+ into: targetColl.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ };
+ const explain = sourceColl.explain("queryPlanner").aggregate([mergeStage]);
+ const mergeExplain = getAggPlanStage(explain, "$merge");
+ assert.neq(mergeExplain, null, explain);
+ assert(mergeExplain.hasOwnProperty("$merge"), explain);
+ assert.eq(mergeExplain.$merge.whenMatched, whenMatchedMode, mergeExplain);
+ assert.eq(mergeExplain.$merge.whenNotMatched, whenNotMatchedMode, mergeExplain);
+ assert.eq(mergeExplain.$merge.on, "_id", mergeExplain);
+ assert.eq(targetColl.find().itcount(), 0, explain);
+});
- function assertExecutionExplainFails(writingStage, verbosity) {
- assert.commandFailedWithCode(db.runCommand({
- explain: {aggregate: sourceColl.getName(), pipeline: [writingStage], cursor: {}},
- verbosity: verbosity
- }),
- [51029, 51184]);
- assert.eq(targetColl.find().itcount(), 0);
- }
+function assertExecutionExplainFails(writingStage, verbosity) {
+ assert.commandFailedWithCode(db.runCommand({
+ explain: {aggregate: sourceColl.getName(), pipeline: [writingStage], cursor: {}},
+ verbosity: verbosity
+ }),
+ [51029, 51184]);
+ assert.eq(targetColl.find().itcount(), 0);
+}
- // Test that 'executionStats' and 'allPlansExec' level explain fail with each $merge mode. These
- // explain modes must fail, since they would attempt to do writes. Explain must always be
- // read-only (including explain of update and delete, which describe what writes they _would_ do
- // if exected for real).
- withEachMergeMode(function({whenMatchedMode, whenNotMatchedMode}) {
- const mergeStage = {
- $merge: {
- into: targetColl.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- };
- assertExecutionExplainFails(mergeStage, "executionStats");
- assertExecutionExplainFails(mergeStage, "allPlansExecution");
- });
+// Test that 'executionStats' and 'allPlansExec' level explain fail with each $merge mode. These
+// explain modes must fail, since they would attempt to do writes. Explain must always be
+// read-only (including explain of update and delete, which describe what writes they _would_ do
+// if exected for real).
+withEachMergeMode(function({whenMatchedMode, whenNotMatchedMode}) {
+ const mergeStage = {
+ $merge: {
+ into: targetColl.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ };
+ assertExecutionExplainFails(mergeStage, "executionStats");
+ assertExecutionExplainFails(mergeStage, "allPlansExecution");
+});
- // Also test the $out stage since it also performs writes.
- assertExecutionExplainFails({$out: targetColl.getName()}, "executionStats");
- assertExecutionExplainFails({$out: targetColl.getName()}, "allPlansExecution");
+// Also test the $out stage since it also performs writes.
+assertExecutionExplainFails({$out: targetColl.getName()}, "executionStats");
+assertExecutionExplainFails({$out: targetColl.getName()}, "allPlansExecution");
- // Execution explain should fail even if the source collection does not exist.
- sourceColl.drop();
- withEachMergeMode(function({whenMatchedMode, whenNotMatchedMode}) {
- const mergeStage = {
- $merge: {
- into: targetColl.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- };
- assertExecutionExplainFails(mergeStage, "executionStats");
- assertExecutionExplainFails(mergeStage, "allPlansExecution");
- });
+// Execution explain should fail even if the source collection does not exist.
+sourceColl.drop();
+withEachMergeMode(function({whenMatchedMode, whenNotMatchedMode}) {
+ const mergeStage = {
+ $merge: {
+ into: targetColl.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ };
+ assertExecutionExplainFails(mergeStage, "executionStats");
+ assertExecutionExplainFails(mergeStage, "allPlansExecution");
+});
- // Also test the $out stage since it also performs writes.
- assertExecutionExplainFails({$out: targetColl.getName()}, "executionStats");
- assertExecutionExplainFails({$out: targetColl.getName()}, "allPlansExecution");
+// Also test the $out stage since it also performs writes.
+assertExecutionExplainFails({$out: targetColl.getName()}, "executionStats");
+assertExecutionExplainFails({$out: targetColl.getName()}, "allPlansExecution");
}());
diff --git a/jstests/aggregation/expressions/arrayToObject.js b/jstests/aggregation/expressions/arrayToObject.js
index 114d69b7aaa..df78b9f1aaf 100644
--- a/jstests/aggregation/expressions/arrayToObject.js
+++ b/jstests/aggregation/expressions/arrayToObject.js
@@ -1,77 +1,75 @@
// Tests for $arrayToObject aggregation expression.
(function() {
- "use strict";
+"use strict";
- // For assertErrorCode().
- load("jstests/aggregation/extras/utils.js");
+// For assertErrorCode().
+load("jstests/aggregation/extras/utils.js");
- let coll = db.array_to_object_expr;
- coll.drop();
+let coll = db.array_to_object_expr;
+coll.drop();
- // Write one document so that the aggregations which use $const produce a result.
- assert.writeOK(coll.insert({_id: "sentinel", a: 1}));
+// Write one document so that the aggregations which use $const produce a result.
+assert.writeOK(coll.insert({_id: "sentinel", a: 1}));
- /*
- * Check that the collapsed, object form of 'expanded' (which is computed using $arrayToObject)
- * matches our expectation.
- */
- function assertCollapsed(expanded, expectedCollapsed) {
- const result =
- coll.aggregate(
- [{$project: {collapsed: {$arrayToObject: {$const: expanded}}}}, {$limit: 1}])
- .toArray();
- assert.eq(result, [{_id: "sentinel", collapsed: expectedCollapsed}]);
- }
+/*
+ * Check that the collapsed, object form of 'expanded' (which is computed using $arrayToObject)
+ * matches our expectation.
+ */
+function assertCollapsed(expanded, expectedCollapsed) {
+ const result =
+ coll.aggregate([{$project: {collapsed: {$arrayToObject: {$const: expanded}}}}, {$limit: 1}])
+ .toArray();
+ assert.eq(result, [{_id: "sentinel", collapsed: expectedCollapsed}]);
+}
- /*
- * Check that $arrayToObject on the given value produces the expected error.
- */
- function assertPipelineErrors(expanded, errorCode) {
- assertErrorCode(
- coll,
- [{$project: {collapsed: {$arrayToObject: {$const: expanded}}}}, {$limit: 1}],
- errorCode);
- }
+/*
+ * Check that $arrayToObject on the given value produces the expected error.
+ */
+function assertPipelineErrors(expanded, errorCode) {
+ assertErrorCode(coll,
+ [{$project: {collapsed: {$arrayToObject: {$const: expanded}}}}, {$limit: 1}],
+ errorCode);
+}
- // $arrayToObject correctly converts a key-value pairs to an object.
- assertCollapsed([["price", 24], ["item", "apple"]], {"price": 24, "item": "apple"});
- assertCollapsed([{"k": "price", "v": 24}, {"k": "item", "v": "apple"}],
- {"price": 24, "item": "apple"});
- // If duplicate field names are in the array, $arrayToObject should use value from the last one.
- assertCollapsed([{"k": "price", "v": 24}, {"k": "price", "v": 100}], {"price": 100});
- assertCollapsed([["price", 24], ["price", 100]], {"price": 100});
+// $arrayToObject correctly converts a key-value pairs to an object.
+assertCollapsed([["price", 24], ["item", "apple"]], {"price": 24, "item": "apple"});
+assertCollapsed([{"k": "price", "v": 24}, {"k": "item", "v": "apple"}],
+ {"price": 24, "item": "apple"});
+// If duplicate field names are in the array, $arrayToObject should use value from the last one.
+assertCollapsed([{"k": "price", "v": 24}, {"k": "price", "v": 100}], {"price": 100});
+assertCollapsed([["price", 24], ["price", 100]], {"price": 100});
- assertCollapsed([["price", 24], ["item", "apple"]], {"price": 24, "item": "apple"});
- assertCollapsed([], {});
+assertCollapsed([["price", 24], ["item", "apple"]], {"price": 24, "item": "apple"});
+assertCollapsed([], {});
- assertCollapsed(null, null);
- assertCollapsed(undefined, null);
- assertCollapsed([{"k": "price", "v": null}], {"price": null});
- assertCollapsed([{"k": "price", "v": undefined}], {"price": undefined});
- // Need to manually check the case where 'expanded' is not in the document.
- assert.commandWorked(coll.insert({_id: "missing-expanded-field"}));
- const result = coll.aggregate([
- {$match: {_id: "missing-expanded-field"}},
- {$project: {collapsed: {$arrayToObject: "$expanded"}}}
- ])
- .toArray();
- assert.eq(result, [{_id: "missing-expanded-field", collapsed: null}]);
+assertCollapsed(null, null);
+assertCollapsed(undefined, null);
+assertCollapsed([{"k": "price", "v": null}], {"price": null});
+assertCollapsed([{"k": "price", "v": undefined}], {"price": undefined});
+// Need to manually check the case where 'expanded' is not in the document.
+assert.commandWorked(coll.insert({_id: "missing-expanded-field"}));
+const result = coll.aggregate([
+ {$match: {_id: "missing-expanded-field"}},
+ {$project: {collapsed: {$arrayToObject: "$expanded"}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: "missing-expanded-field", collapsed: null}]);
- assertPipelineErrors([{"k": "price", "v": 24}, ["item", "apple"]], 40391);
- assertPipelineErrors([["item", "apple"], {"k": "price", "v": 24}], 40396);
- assertPipelineErrors("string", 40386);
- assertPipelineErrors(ObjectId(), 40386);
- assertPipelineErrors(NumberLong(0), 40386);
- assertPipelineErrors([0], 40398);
- assertPipelineErrors([["missing_value"]], 40397);
- assertPipelineErrors([[321, 12]], 40395);
- assertPipelineErrors([["key", "value", "offset"]], 40397);
- assertPipelineErrors({y: []}, 40386);
- assertPipelineErrors([{y: "x", x: "y"}], 40393);
- assertPipelineErrors([{k: "missing"}], 40392);
- assertPipelineErrors([{k: 24, v: "string"}], 40394);
- assertPipelineErrors([{k: null, v: "nullKey"}], 40394);
- assertPipelineErrors([{k: undefined, v: "undefinedKey"}], 40394);
- assertPipelineErrors([{y: "ignored", k: "item", v: "pear"}], 40392);
- assertPipelineErrors(NaN, 40386);
+assertPipelineErrors([{"k": "price", "v": 24}, ["item", "apple"]], 40391);
+assertPipelineErrors([["item", "apple"], {"k": "price", "v": 24}], 40396);
+assertPipelineErrors("string", 40386);
+assertPipelineErrors(ObjectId(), 40386);
+assertPipelineErrors(NumberLong(0), 40386);
+assertPipelineErrors([0], 40398);
+assertPipelineErrors([["missing_value"]], 40397);
+assertPipelineErrors([[321, 12]], 40395);
+assertPipelineErrors([["key", "value", "offset"]], 40397);
+assertPipelineErrors({y: []}, 40386);
+assertPipelineErrors([{y: "x", x: "y"}], 40393);
+assertPipelineErrors([{k: "missing"}], 40392);
+assertPipelineErrors([{k: 24, v: "string"}], 40394);
+assertPipelineErrors([{k: null, v: "nullKey"}], 40394);
+assertPipelineErrors([{k: undefined, v: "undefinedKey"}], 40394);
+assertPipelineErrors([{y: "ignored", k: "item", v: "pear"}], 40392);
+assertPipelineErrors(NaN, 40386);
}());
diff --git a/jstests/aggregation/expressions/collation_expressions.js b/jstests/aggregation/expressions/collation_expressions.js
index 93f2ada0197..dc959791f2f 100644
--- a/jstests/aggregation/expressions/collation_expressions.js
+++ b/jstests/aggregation/expressions/collation_expressions.js
@@ -3,133 +3,135 @@
// Test that expressions which make can make string comparisons respect the collation.
(function() {
- "use strict";
+"use strict";
- // For testExpression() and testExpressionWithCollation().
- load("jstests/aggregation/extras/utils.js");
-
- var coll = db.collation_expressions;
- coll.drop();
-
- var results;
- const caseInsensitive = {locale: "en_US", strength: 2};
- const numericOrdering = {locale: "en_US", numericOrdering: true};
-
- // Test that $cmp respects the collection-default collation.
- assert.commandWorked(db.createCollection(coll.getName(), {collation: caseInsensitive}));
- testExpression(coll, {$cmp: ["a", "A"]}, 0);
-
- coll.drop();
-
- // Test that $cmp respects the collation.
- testExpressionWithCollation(coll, {$cmp: ["a", "A"]}, 0, caseInsensitive);
-
- // Test that $eq respects the collation.
- testExpressionWithCollation(coll, {$eq: ["a", "A"]}, true, caseInsensitive);
-
- // Test that $ne respects the collation.
- testExpressionWithCollation(coll, {$ne: ["a", "A"]}, false, caseInsensitive);
-
- // Test that $lt respects the collation.
- testExpressionWithCollation(coll, {$lt: ["2", "10"]}, true, numericOrdering);
-
- // Test that $lte respects the collation.
- testExpressionWithCollation(coll, {$lte: ["2", "10"]}, true, numericOrdering);
- testExpressionWithCollation(coll, {$lte: ["b", "B"]}, true, caseInsensitive);
-
- // Test that $gt respects the collation.
- testExpressionWithCollation(coll, {$gt: ["2", "10"]}, false, numericOrdering);
-
- // Test that $gte respects the collation.
- testExpressionWithCollation(coll, {$gte: ["2", "10"]}, false, numericOrdering);
- testExpressionWithCollation(coll, {$gte: ["b", "B"]}, true, caseInsensitive);
-
- // Test that $in respects the collation.
- testExpressionWithCollation(coll, {$in: ["A", [1, 2, "a", 3, 4]]}, true, caseInsensitive);
-
- // Test that $indexOfArray respects the collation.
- testExpressionWithCollation(
- coll, {$indexOfArray: [[1, 2, "a", "b", "c", "B"], "B"]}, 3, caseInsensitive);
-
- // Test that $indexOfBytes doesn't respect the collation.
- testExpressionWithCollation(coll, {$indexOfBytes: ["12abcB", "B"]}, 5, caseInsensitive);
-
- // Test that $indexOfCP doesn't respect the collation.
- testExpressionWithCollation(coll, {$indexOfCP: ["12abcB", "B"]}, 5, caseInsensitive);
-
- // Test that $strcasecmp doesn't respect the collation.
- testExpressionWithCollation(coll, {$strcasecmp: ["100", "2"]}, -1, numericOrdering);
-
- // Test that $setEquals respects the collation.
- testExpressionWithCollation(
- coll, {$setEquals: [["a", "B"], ["b", "A"]]}, true, caseInsensitive);
-
- // Test that $setIntersection respects the collation.
- results =
- coll.aggregate([{$project: {out: {$setIntersection: [["a", "B", "c"], ["d", "b", "A"]]}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(2, results[0].out.length);
-
- // Test that $setUnion respects the collation.
- results = coll.aggregate([{$project: {out: {$setUnion: [["a", "B", "c"], ["d", "b", "A"]]}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(4, results[0].out.length);
-
- // Test that $setDifference respects the collation.
- testExpressionWithCollation(
- coll, {$setDifference: [["a", "B"], ["b", "A"]]}, [], caseInsensitive);
-
- // Test that $setIsSubset respects the collation.
- testExpressionWithCollation(
- coll, {$setIsSubset: [["a", "B"], ["b", "A", "c"]]}, true, caseInsensitive);
-
- // Test that $split doesn't respect the collation.
- testExpressionWithCollation(coll, {$split: ["abc", "B"]}, ["abc"], caseInsensitive);
-
- // Test that an $and which can be optimized out respects the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, str: "A"}));
- results = coll.aggregate([{$project: {out: {$and: [{$eq: ["$str", "a"]}, {$eq: ["b", "B"]}]}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(true, results[0].out);
-
- // Test that an $and which cannot be optimized out respects the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, str: "A", str2: "B"}));
- results =
- coll.aggregate([{$project: {out: {$and: [{$eq: ["$str", "a"]}, {$eq: ["$str2", "b"]}]}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(true, results[0].out);
-
- // Test that an $or which can be optimized out respects the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, str: "A"}));
- results = coll.aggregate([{$project: {out: {$or: [{$eq: ["$str", "a"]}, {$eq: ["b", "c"]}]}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(true, results[0].out);
-
- // Test that an $or which cannot be optimized out respects the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, str: "A", str2: "B"}));
- results =
- coll.aggregate([{$project: {out: {$or: [{$eq: ["$str", "c"]}, {$eq: ["$str2", "b"]}]}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(true, results[0].out);
-
- // Test that $filter's subexpressions respect the collation.
- testExpressionWithCollation(coll,
+// For testExpression() and testExpressionWithCollation().
+load("jstests/aggregation/extras/utils.js");
+
+var coll = db.collation_expressions;
+coll.drop();
+
+var results;
+const caseInsensitive = {
+ locale: "en_US",
+ strength: 2
+};
+const numericOrdering = {
+ locale: "en_US",
+ numericOrdering: true
+};
+
+// Test that $cmp respects the collection-default collation.
+assert.commandWorked(db.createCollection(coll.getName(), {collation: caseInsensitive}));
+testExpression(coll, {$cmp: ["a", "A"]}, 0);
+
+coll.drop();
+
+// Test that $cmp respects the collation.
+testExpressionWithCollation(coll, {$cmp: ["a", "A"]}, 0, caseInsensitive);
+
+// Test that $eq respects the collation.
+testExpressionWithCollation(coll, {$eq: ["a", "A"]}, true, caseInsensitive);
+
+// Test that $ne respects the collation.
+testExpressionWithCollation(coll, {$ne: ["a", "A"]}, false, caseInsensitive);
+
+// Test that $lt respects the collation.
+testExpressionWithCollation(coll, {$lt: ["2", "10"]}, true, numericOrdering);
+
+// Test that $lte respects the collation.
+testExpressionWithCollation(coll, {$lte: ["2", "10"]}, true, numericOrdering);
+testExpressionWithCollation(coll, {$lte: ["b", "B"]}, true, caseInsensitive);
+
+// Test that $gt respects the collation.
+testExpressionWithCollation(coll, {$gt: ["2", "10"]}, false, numericOrdering);
+
+// Test that $gte respects the collation.
+testExpressionWithCollation(coll, {$gte: ["2", "10"]}, false, numericOrdering);
+testExpressionWithCollation(coll, {$gte: ["b", "B"]}, true, caseInsensitive);
+
+// Test that $in respects the collation.
+testExpressionWithCollation(coll, {$in: ["A", [1, 2, "a", 3, 4]]}, true, caseInsensitive);
+
+// Test that $indexOfArray respects the collation.
+testExpressionWithCollation(
+ coll, {$indexOfArray: [[1, 2, "a", "b", "c", "B"], "B"]}, 3, caseInsensitive);
+
+// Test that $indexOfBytes doesn't respect the collation.
+testExpressionWithCollation(coll, {$indexOfBytes: ["12abcB", "B"]}, 5, caseInsensitive);
+
+// Test that $indexOfCP doesn't respect the collation.
+testExpressionWithCollation(coll, {$indexOfCP: ["12abcB", "B"]}, 5, caseInsensitive);
+
+// Test that $strcasecmp doesn't respect the collation.
+testExpressionWithCollation(coll, {$strcasecmp: ["100", "2"]}, -1, numericOrdering);
+
+// Test that $setEquals respects the collation.
+testExpressionWithCollation(coll, {$setEquals: [["a", "B"], ["b", "A"]]}, true, caseInsensitive);
+
+// Test that $setIntersection respects the collation.
+results =
+ coll.aggregate([{$project: {out: {$setIntersection: [["a", "B", "c"], ["d", "b", "A"]]}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(2, results[0].out.length);
+
+// Test that $setUnion respects the collation.
+results = coll.aggregate([{$project: {out: {$setUnion: [["a", "B", "c"], ["d", "b", "A"]]}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(4, results[0].out.length);
+
+// Test that $setDifference respects the collation.
+testExpressionWithCollation(coll, {$setDifference: [["a", "B"], ["b", "A"]]}, [], caseInsensitive);
+
+// Test that $setIsSubset respects the collation.
+testExpressionWithCollation(
+ coll, {$setIsSubset: [["a", "B"], ["b", "A", "c"]]}, true, caseInsensitive);
+
+// Test that $split doesn't respect the collation.
+testExpressionWithCollation(coll, {$split: ["abc", "B"]}, ["abc"], caseInsensitive);
+
+// Test that an $and which can be optimized out respects the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, str: "A"}));
+results = coll.aggregate([{$project: {out: {$and: [{$eq: ["$str", "a"]}, {$eq: ["b", "B"]}]}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(true, results[0].out);
+
+// Test that an $and which cannot be optimized out respects the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, str: "A", str2: "B"}));
+results = coll.aggregate([{$project: {out: {$and: [{$eq: ["$str", "a"]}, {$eq: ["$str2", "b"]}]}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(true, results[0].out);
+
+// Test that an $or which can be optimized out respects the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, str: "A"}));
+results = coll.aggregate([{$project: {out: {$or: [{$eq: ["$str", "a"]}, {$eq: ["b", "c"]}]}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(true, results[0].out);
+
+// Test that an $or which cannot be optimized out respects the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, str: "A", str2: "B"}));
+results = coll.aggregate([{$project: {out: {$or: [{$eq: ["$str", "c"]}, {$eq: ["$str2", "b"]}]}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(true, results[0].out);
+
+// Test that $filter's subexpressions respect the collation.
+testExpressionWithCollation(coll,
{
$filter: {
input: {
@@ -146,8 +148,8 @@
["a", "A", "c", "C"],
caseInsensitive);
- // Test that $let's subexpressions respect the collation.
- testExpressionWithCollation(coll,
+// Test that $let's subexpressions respect the collation.
+testExpressionWithCollation(coll,
{
$let: {
vars: {str: {$cond: [{$eq: ["A", "a"]}, "b", "c"]}},
@@ -157,8 +159,8 @@
"d",
caseInsensitive);
- // Test that $map's subexpressions respect the collation.
- testExpressionWithCollation(
+// Test that $map's subexpressions respect the collation.
+testExpressionWithCollation(
coll,
{
$map: {
@@ -170,18 +172,18 @@
[true, false, true, false],
caseInsensitive);
- // Test that $group stage's _id expressions respect the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 1}));
- results = coll.aggregate([{$group: {_id: {a: {$eq: ["a", "A"]}, b: {$eq: ["b", "B"]}}}}],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq(true, results[0]._id.a);
- assert.eq(true, results[0]._id.b);
-
- // Test that $reduce's subexpressions respect the collation.
- testExpressionWithCollation(
+// Test that $group stage's _id expressions respect the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 1}));
+results = coll.aggregate([{$group: {_id: {a: {$eq: ["a", "A"]}, b: {$eq: ["b", "B"]}}}}],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(true, results[0]._id.a);
+assert.eq(true, results[0]._id.b);
+
+// Test that $reduce's subexpressions respect the collation.
+testExpressionWithCollation(
coll,
{
$reduce: {
@@ -195,50 +197,50 @@
{sum: 7},
caseInsensitive);
- // Test that $switch's subexpressions respect the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, a: "A"}));
- assert.writeOK(coll.insert({_id: 2, b: "B"}));
- assert.writeOK(coll.insert({_id: 3, c: "C"}));
- results = coll.aggregate([{
- $project: {
- out: {
- $switch: {
- branches: [
- {case: {$eq: ["$a", "a"]}, then: "foo"},
- {case: {$eq: ["$b", "b"]}, then: "bar"}
- ],
- default: "baz"
- }
- }
- }
- }],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(3, results.length);
- assert.eq("foo", results[0].out);
- assert.eq("bar", results[1].out);
- assert.eq("baz", results[2].out);
-
- // Test that a $zip's subexpressions respect the collation.
- coll.drop();
- assert.writeOK(coll.insert({_id: 0, evens: [0, 2, 4], odds: [1, 3]}));
- results = coll.aggregate([{
- $project: {
- out: {
- $zip: {
- inputs: [
- {$cond: [{$eq: ["A", "a"]}, "$evens", "$odds"]},
- {$cond: [{$eq: ["B", "b"]}, "$odds", "$evens"]}
- ],
- defaults: [0, {$cond: [{$eq: ["C", "c"]}, 5, 7]}],
- useLongestLength: true
- }
- }
- }
- }],
- {collation: caseInsensitive})
- .toArray();
- assert.eq(1, results.length);
- assert.eq([[0, 1], [2, 3], [4, 5]], results[0].out);
+// Test that $switch's subexpressions respect the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, a: "A"}));
+assert.writeOK(coll.insert({_id: 2, b: "B"}));
+assert.writeOK(coll.insert({_id: 3, c: "C"}));
+results = coll.aggregate([{
+ $project: {
+ out: {
+ $switch: {
+ branches: [
+ {case: {$eq: ["$a", "a"]}, then: "foo"},
+ {case: {$eq: ["$b", "b"]}, then: "bar"}
+ ],
+ default: "baz"
+ }
+ }
+ }
+ }],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(3, results.length);
+assert.eq("foo", results[0].out);
+assert.eq("bar", results[1].out);
+assert.eq("baz", results[2].out);
+
+// Test that a $zip's subexpressions respect the collation.
+coll.drop();
+assert.writeOK(coll.insert({_id: 0, evens: [0, 2, 4], odds: [1, 3]}));
+results = coll.aggregate([{
+ $project: {
+ out: {
+ $zip: {
+ inputs: [
+ {$cond: [{$eq: ["A", "a"]}, "$evens", "$odds"]},
+ {$cond: [{$eq: ["B", "b"]}, "$odds", "$evens"]}
+ ],
+ defaults: [0, {$cond: [{$eq: ["C", "c"]}, 5, 7]}],
+ useLongestLength: true
+ }
+ }
+ }
+ }],
+ {collation: caseInsensitive})
+ .toArray();
+assert.eq(1, results.length);
+assert.eq([[0, 1], [2, 3], [4, 5]], results[0].out);
})();
diff --git a/jstests/aggregation/expressions/convert.js b/jstests/aggregation/expressions/convert.js
index b32c6639751..4e56bf16265 100644
--- a/jstests/aggregation/expressions/convert.js
+++ b/jstests/aggregation/expressions/convert.js
@@ -2,325 +2,312 @@
* Tests behavior of $convert aggregation operator.
*/
(function() {
- "use strict";
-
- const coll = db.expression_convert;
- function populateCollection(documentList) {
- coll.drop();
- var bulk = coll.initializeOrderedBulkOp();
- documentList.forEach(doc => bulk.insert(doc));
- assert.writeOK(bulk.execute());
- }
-
- //
- // One test document for each possible conversion. Edge cases for these conversions are tested
- // in expression_convert_test.cpp.
- //
- var conversionTestDocs = [
- {_id: 0, input: 1.9, target: "double", expected: 1.9},
- {_id: 1, input: 1.9, target: "string", expected: "1.9"},
- {_id: 2, input: 1.9, target: "bool", expected: true},
- {_id: 3, input: 1.9, target: "date", expected: ISODate("1970-01-01T00:00:00.001Z")},
- {_id: 4, input: 1.9, target: "int", expected: NumberInt(1)},
- {_id: 5, input: 1.9, target: "long", expected: NumberLong(1)},
- {_id: 6, input: 1.9, target: "decimal", expected: NumberDecimal(1.9)},
-
- {_id: 7, input: "1.9", target: "double", expected: 1.9},
- {_id: 8, input: "str", target: "string", expected: "str"},
- {
- _id: 9,
- input: "0123456789abcdef01234567",
- target: "objectId",
- expected: ObjectId("0123456789abcdef01234567")
- },
- {_id: 10, input: "", target: "bool", expected: true},
- {
- _id: 11,
- input: "1970-01-01T00:00:00.001Z",
- target: "date",
- expected: ISODate("1970-01-01T00:00:00.001Z")
- },
- {_id: 12, input: "1", target: "int", expected: NumberInt(1)},
- {_id: 13, input: "1", target: "long", expected: NumberLong(1)},
- {_id: 14, input: "1.9", target: "decimal", expected: NumberDecimal("1.9")},
-
- {
- _id: 15,
- input: ObjectId("0123456789abcdef01234567"),
- target: "string",
- expected: "0123456789abcdef01234567"
- },
- {_id: 16, input: ObjectId("0123456789abcdef01234567"), target: "bool", expected: true},
- {
- _id: 17,
- input: ObjectId("0123456789abcdef01234567"),
- target: "objectId",
- expected: ObjectId("0123456789abcdef01234567")
- },
- {
- _id: 18,
- input: ObjectId("0123456789abcdef01234567"),
- target: "date",
- expected: ISODate("1970-08-09T22:25:43Z")
- },
-
- {_id: 19, input: false, target: "double", expected: 0.0},
- {_id: 20, input: false, target: "string", expected: "false"},
- {_id: 21, input: false, target: "bool", expected: false},
- {_id: 22, input: false, target: "int", expected: NumberInt(0)},
- {_id: 23, input: false, target: "long", expected: NumberLong(0)},
- {_id: 24, input: false, target: "decimal", expected: NumberDecimal(0)},
-
- {_id: 25, input: ISODate("1970-01-01T00:00:00.123Z"), target: "double", expected: 123.0},
- {
- _id: 26,
- input: ISODate("1970-01-01T00:00:00.123Z"),
- target: "string",
- expected: "1970-01-01T00:00:00.123Z"
- },
- {_id: 27, input: ISODate("1970-01-01T00:00:00.123Z"), target: "bool", expected: true},
- {
- _id: 28,
- input: ISODate("1970-01-01T00:00:00.123Z"),
- target: "date",
- expected: ISODate("1970-01-01T00:00:00.123Z")
- },
- {
- _id: 29,
- input: ISODate("1970-01-01T00:00:00.123Z"),
- target: "long",
- expected: NumberLong(123)
- },
- {
- _id: 30,
- input: ISODate("1970-01-01T00:00:00.123Z"),
- target: "decimal",
- expected: NumberDecimal("123")
- },
-
- {_id: 31, input: NumberInt(1), target: "double", expected: 1.0},
- {_id: 32, input: NumberInt(1), target: "string", expected: "1"},
- {_id: 33, input: NumberInt(1), target: "bool", expected: true},
- {_id: 34, input: NumberInt(1), target: "int", expected: NumberInt(1)},
- {_id: 35, input: NumberInt(1), target: "long", expected: NumberLong(1)},
- {_id: 36, input: NumberInt(1), target: "decimal", expected: NumberDecimal("1")},
-
- {_id: 37, input: NumberLong(1), target: "double", expected: 1.0},
- {_id: 38, input: NumberLong(1), target: "string", expected: "1"},
- {_id: 39, input: NumberLong(1), target: "bool", expected: true},
- {
- _id: 40,
- input: NumberLong(1),
- target: "date",
- expected: ISODate("1970-01-01T00:00:00.001Z")
- },
- {_id: 41, input: NumberLong(1), target: "int", expected: NumberInt(1)},
- {_id: 42, input: NumberLong(1), target: "long", expected: NumberLong(1)},
- {_id: 43, input: NumberLong(1), target: "decimal", expected: NumberDecimal("1")},
-
- {_id: 44, input: NumberDecimal("1.9"), target: "double", expected: 1.9},
- {_id: 45, input: NumberDecimal("1.9"), target: "string", expected: "1.9"},
- {_id: 46, input: NumberDecimal("1.9"), target: "bool", expected: true},
- {
- _id: 47,
- input: NumberDecimal("1.9"),
- target: "date",
- expected: ISODate("1970-01-01T00:00:00.001Z")
- },
- {_id: 48, input: NumberDecimal("1.9"), target: "int", expected: NumberInt(1)},
- {_id: 49, input: NumberDecimal("1.9"), target: "long", expected: NumberLong(1)},
- {_id: 50, input: NumberDecimal("1.9"), target: "decimal", expected: NumberDecimal("1.9")},
-
- {_id: 51, input: MinKey, target: "bool", expected: true},
- {_id: 52, input: {foo: 1, bar: 2}, target: "bool", expected: true},
- {_id: 53, input: [1, 2], target: "bool", expected: true},
- {
- _id: 54,
- input: BinData(0, "BBBBBBBBBBBBBBBBBBBBBBBBBBBB"),
- target: "bool",
- expected: true
- },
- {_id: 55, input: /B*/, target: "bool", expected: true},
- {_id: 56, input: new DBRef("db.test", "oid"), target: "bool", expected: true},
- {_id: 57, input: function() {}, target: "bool", expected: true},
- // Symbol and CodeWScope are not supported from JavaScript, so we can't test them here.
- {_id: 58, input: new Timestamp(1 / 1000, 1), target: "bool", expected: true},
- {_id: 59, input: MinKey, target: "bool", expected: true}
- ];
- populateCollection(conversionTestDocs);
-
- // Test $convert on each document.
- var pipeline = [
- {
- $project: {
- output: {$convert: {to: "$target", input: "$input"}},
- target: "$target",
- expected: "$expected"
- }
- },
- {$addFields: {outputType: {$type: "$output"}}},
- {$sort: {_id: 1}}
- ];
- var aggResult = coll.aggregate(pipeline).toArray();
- assert.eq(aggResult.length, conversionTestDocs.length);
-
- aggResult.forEach(doc => {
- assert.eq(doc.output, doc.expected, "Unexpected conversion: _id = " + doc._id);
- assert.eq(doc.outputType, doc.target, "Conversion to incorrect type: _id = " + doc._id);
- });
-
- // Test each conversion using the shorthand $toBool, $toString, etc. syntax.
+"use strict";
+
+const coll = db.expression_convert;
+function populateCollection(documentList) {
+ coll.drop();
+ var bulk = coll.initializeOrderedBulkOp();
+ documentList.forEach(doc => bulk.insert(doc));
+ assert.writeOK(bulk.execute());
+}
+
+//
+// One test document for each possible conversion. Edge cases for these conversions are tested
+// in expression_convert_test.cpp.
+//
+var conversionTestDocs = [
+ {_id: 0, input: 1.9, target: "double", expected: 1.9},
+ {_id: 1, input: 1.9, target: "string", expected: "1.9"},
+ {_id: 2, input: 1.9, target: "bool", expected: true},
+ {_id: 3, input: 1.9, target: "date", expected: ISODate("1970-01-01T00:00:00.001Z")},
+ {_id: 4, input: 1.9, target: "int", expected: NumberInt(1)},
+ {_id: 5, input: 1.9, target: "long", expected: NumberLong(1)},
+ {_id: 6, input: 1.9, target: "decimal", expected: NumberDecimal(1.9)},
+
+ {_id: 7, input: "1.9", target: "double", expected: 1.9},
+ {_id: 8, input: "str", target: "string", expected: "str"},
+ {
+ _id: 9,
+ input: "0123456789abcdef01234567",
+ target: "objectId",
+ expected: ObjectId("0123456789abcdef01234567")
+ },
+ {_id: 10, input: "", target: "bool", expected: true},
+ {
+ _id: 11,
+ input: "1970-01-01T00:00:00.001Z",
+ target: "date",
+ expected: ISODate("1970-01-01T00:00:00.001Z")
+ },
+ {_id: 12, input: "1", target: "int", expected: NumberInt(1)},
+ {_id: 13, input: "1", target: "long", expected: NumberLong(1)},
+ {_id: 14, input: "1.9", target: "decimal", expected: NumberDecimal("1.9")},
+
+ {
+ _id: 15,
+ input: ObjectId("0123456789abcdef01234567"),
+ target: "string",
+ expected: "0123456789abcdef01234567"
+ },
+ {_id: 16, input: ObjectId("0123456789abcdef01234567"), target: "bool", expected: true},
+ {
+ _id: 17,
+ input: ObjectId("0123456789abcdef01234567"),
+ target: "objectId",
+ expected: ObjectId("0123456789abcdef01234567")
+ },
+ {
+ _id: 18,
+ input: ObjectId("0123456789abcdef01234567"),
+ target: "date",
+ expected: ISODate("1970-08-09T22:25:43Z")
+ },
+
+ {_id: 19, input: false, target: "double", expected: 0.0},
+ {_id: 20, input: false, target: "string", expected: "false"},
+ {_id: 21, input: false, target: "bool", expected: false},
+ {_id: 22, input: false, target: "int", expected: NumberInt(0)},
+ {_id: 23, input: false, target: "long", expected: NumberLong(0)},
+ {_id: 24, input: false, target: "decimal", expected: NumberDecimal(0)},
+
+ {_id: 25, input: ISODate("1970-01-01T00:00:00.123Z"), target: "double", expected: 123.0},
+ {
+ _id: 26,
+ input: ISODate("1970-01-01T00:00:00.123Z"),
+ target: "string",
+ expected: "1970-01-01T00:00:00.123Z"
+ },
+ {_id: 27, input: ISODate("1970-01-01T00:00:00.123Z"), target: "bool", expected: true},
+ {
+ _id: 28,
+ input: ISODate("1970-01-01T00:00:00.123Z"),
+ target: "date",
+ expected: ISODate("1970-01-01T00:00:00.123Z")
+ },
+ {
+ _id: 29,
+ input: ISODate("1970-01-01T00:00:00.123Z"),
+ target: "long",
+ expected: NumberLong(123)
+ },
+ {
+ _id: 30,
+ input: ISODate("1970-01-01T00:00:00.123Z"),
+ target: "decimal",
+ expected: NumberDecimal("123")
+ },
+
+ {_id: 31, input: NumberInt(1), target: "double", expected: 1.0},
+ {_id: 32, input: NumberInt(1), target: "string", expected: "1"},
+ {_id: 33, input: NumberInt(1), target: "bool", expected: true},
+ {_id: 34, input: NumberInt(1), target: "int", expected: NumberInt(1)},
+ {_id: 35, input: NumberInt(1), target: "long", expected: NumberLong(1)},
+ {_id: 36, input: NumberInt(1), target: "decimal", expected: NumberDecimal("1")},
+
+ {_id: 37, input: NumberLong(1), target: "double", expected: 1.0},
+ {_id: 38, input: NumberLong(1), target: "string", expected: "1"},
+ {_id: 39, input: NumberLong(1), target: "bool", expected: true},
+ {_id: 40, input: NumberLong(1), target: "date", expected: ISODate("1970-01-01T00:00:00.001Z")},
+ {_id: 41, input: NumberLong(1), target: "int", expected: NumberInt(1)},
+ {_id: 42, input: NumberLong(1), target: "long", expected: NumberLong(1)},
+ {_id: 43, input: NumberLong(1), target: "decimal", expected: NumberDecimal("1")},
+
+ {_id: 44, input: NumberDecimal("1.9"), target: "double", expected: 1.9},
+ {_id: 45, input: NumberDecimal("1.9"), target: "string", expected: "1.9"},
+ {_id: 46, input: NumberDecimal("1.9"), target: "bool", expected: true},
+ {
+ _id: 47,
+ input: NumberDecimal("1.9"),
+ target: "date",
+ expected: ISODate("1970-01-01T00:00:00.001Z")
+ },
+ {_id: 48, input: NumberDecimal("1.9"), target: "int", expected: NumberInt(1)},
+ {_id: 49, input: NumberDecimal("1.9"), target: "long", expected: NumberLong(1)},
+ {_id: 50, input: NumberDecimal("1.9"), target: "decimal", expected: NumberDecimal("1.9")},
+
+ {_id: 51, input: MinKey, target: "bool", expected: true},
+ {_id: 52, input: {foo: 1, bar: 2}, target: "bool", expected: true},
+ {_id: 53, input: [1, 2], target: "bool", expected: true},
+ {_id: 54, input: BinData(0, "BBBBBBBBBBBBBBBBBBBBBBBBBBBB"), target: "bool", expected: true},
+ {_id: 55, input: /B*/, target: "bool", expected: true},
+ {_id: 56, input: new DBRef("db.test", "oid"), target: "bool", expected: true},
+ {_id: 57, input: function() {}, target: "bool", expected: true},
+ // Symbol and CodeWScope are not supported from JavaScript, so we can't test them here.
+ {_id: 58, input: new Timestamp(1 / 1000, 1), target: "bool", expected: true},
+ {_id: 59, input: MinKey, target: "bool", expected: true}
+];
+populateCollection(conversionTestDocs);
+
+// Test $convert on each document.
+var pipeline = [
+ {
+ $project: {
+ output: {$convert: {to: "$target", input: "$input"}},
+ target: "$target",
+ expected: "$expected"
+ }
+ },
+ {$addFields: {outputType: {$type: "$output"}}},
+ {$sort: {_id: 1}}
+];
+var aggResult = coll.aggregate(pipeline).toArray();
+assert.eq(aggResult.length, conversionTestDocs.length);
+
+aggResult.forEach(doc => {
+ assert.eq(doc.output, doc.expected, "Unexpected conversion: _id = " + doc._id);
+ assert.eq(doc.outputType, doc.target, "Conversion to incorrect type: _id = " + doc._id);
+});
+
+// Test each conversion using the shorthand $toBool, $toString, etc. syntax.
+pipeline = [
+ {
+ $project: {
+ output: {
+ $switch: {
+ branches: [
+ {case: {$eq: ["$target", "double"]}, then: {$toDouble: "$input"}},
+ {case: {$eq: ["$target", "string"]}, then: {$toString: "$input"}},
+ {case: {$eq: ["$target", "objectId"]}, then: {$toObjectId: "$input"}},
+ {case: {$eq: ["$target", "bool"]}, then: {$toBool: "$input"}},
+ {case: {$eq: ["$target", "date"]}, then: {$toDate: "$input"}},
+ {case: {$eq: ["$target", "int"]}, then: {$toInt: "$input"}},
+ {case: {$eq: ["$target", "long"]}, then: {$toLong: "$input"}},
+ {case: {$eq: ["$target", "decimal"]}, then: {$toDecimal: "$input"}}
+ ]
+ }
+ },
+ target: "$target",
+ expected: "$expected"
+ }
+ },
+ {$addFields: {outputType: {$type: "$output"}}},
+ {$sort: {_id: 1}}
+];
+aggResult = coll.aggregate(pipeline).toArray();
+assert.eq(aggResult.length, conversionTestDocs.length);
+
+aggResult.forEach(doc => {
+ assert.eq(doc.output, doc.expected, "Unexpected conversion: _id = " + doc._id);
+ assert.eq(doc.outputType, doc.target, "Conversion to incorrect type: _id = " + doc._id);
+});
+
+// Test a $convert expression with "onError" to make sure that error handling still allows an
+// error in the "input" expression to propagate.
+assert.throws(function() {
+ coll.aggregate([
+ {$project: {output: {$convert: {to: "string", input: {$divide: [1, 0]}, onError: "ERROR"}}}}
+ ]);
+}, [], "Pipeline should have failed");
+
+//
+// Unsupported conversions.
+//
+var illegalConversionTestDocs = [
+ {_id: 0, input: 1.9, target: "objectId"},
+
+ {_id: 1, input: ObjectId("0123456789abcdef01234567"), target: "double"},
+ {_id: 2, input: ObjectId("0123456789abcdef01234567"), target: "int"},
+ {_id: 3, input: ObjectId("0123456789abcdef01234567"), target: "long"},
+ {_id: 4, input: ObjectId("0123456789abcdef01234567"), target: "decimal"},
+
+ {_id: 5, input: false, target: "objectId"},
+ {_id: 6, input: false, target: "date"},
+
+ {_id: 7, input: ISODate("1970-01-01T00:00:00.123Z"), target: "objectId"},
+ {_id: 8, input: ISODate("1970-01-01T00:00:00.123Z"), target: "int"},
+
+ {_id: 9, input: NumberInt(1), target: "objectId"},
+ {_id: 10, input: NumberInt(1), target: "date"},
+
+ {_id: 11, input: NumberLong(1), target: "objectId"},
+
+ {_id: 12, input: NumberDecimal("1.9"), target: "objectId"},
+
+ {_id: 13, input: 1.9, target: "minKey"},
+ {_id: 14, input: 1.9, target: "missing"},
+ {_id: 15, input: 1.9, target: "object"},
+ {_id: 16, input: 1.9, target: "array"},
+ {_id: 17, input: 1.9, target: "binData"},
+ {_id: 18, input: 1.9, target: "undefined"},
+ {_id: 19, input: 1.9, target: "null"},
+ {_id: 20, input: 1.9, target: "regex"},
+ {_id: 21, input: 1.9, target: "dbPointer"},
+ {_id: 22, input: 1.9, target: "javascript"},
+ {_id: 23, input: 1.9, target: "symbol"},
+ {_id: 24, input: 1.9, target: "javascriptWithScope"},
+ {_id: 25, input: 1.9, target: "timestamp"},
+ {_id: 26, input: 1.9, target: "maxKey"},
+];
+populateCollection(illegalConversionTestDocs);
+
+// Test each document to ensure that the conversion throws an error.
+illegalConversionTestDocs.forEach(doc => {
pipeline = [
- {
- $project: {
- output: {
- $switch: {
- branches: [
- {case: {$eq: ["$target", "double"]}, then: {$toDouble: "$input"}},
- {case: {$eq: ["$target", "string"]}, then: {$toString: "$input"}},
- {case: {$eq: ["$target", "objectId"]}, then: {$toObjectId: "$input"}},
- {case: {$eq: ["$target", "bool"]}, then: {$toBool: "$input"}},
- {case: {$eq: ["$target", "date"]}, then: {$toDate: "$input"}},
- {case: {$eq: ["$target", "int"]}, then: {$toInt: "$input"}},
- {case: {$eq: ["$target", "long"]}, then: {$toLong: "$input"}},
- {case: {$eq: ["$target", "decimal"]}, then: {$toDecimal: "$input"}}
- ]
- }
- },
- target: "$target",
- expected: "$expected"
- }
- },
- {$addFields: {outputType: {$type: "$output"}}},
- {$sort: {_id: 1}}
+ {$match: {_id: doc._id}},
+ {$project: {output: {$convert: {to: "$target", input: "$input"}}}}
];
- aggResult = coll.aggregate(pipeline).toArray();
- assert.eq(aggResult.length, conversionTestDocs.length);
- aggResult.forEach(doc => {
- assert.eq(doc.output, doc.expected, "Unexpected conversion: _id = " + doc._id);
- assert.eq(doc.outputType, doc.target, "Conversion to incorrect type: _id = " + doc._id);
- });
-
- // Test a $convert expression with "onError" to make sure that error handling still allows an
- // error in the "input" expression to propagate.
assert.throws(function() {
- coll.aggregate([{
- $project:
- {output: {$convert: {to: "string", input: {$divide: [1, 0]}, onError: "ERROR"}}}
- }]);
- }, [], "Pipeline should have failed");
-
- //
- // Unsupported conversions.
- //
- var illegalConversionTestDocs = [
- {_id: 0, input: 1.9, target: "objectId"},
-
- {_id: 1, input: ObjectId("0123456789abcdef01234567"), target: "double"},
- {_id: 2, input: ObjectId("0123456789abcdef01234567"), target: "int"},
- {_id: 3, input: ObjectId("0123456789abcdef01234567"), target: "long"},
- {_id: 4, input: ObjectId("0123456789abcdef01234567"), target: "decimal"},
-
- {_id: 5, input: false, target: "objectId"},
- {_id: 6, input: false, target: "date"},
-
- {_id: 7, input: ISODate("1970-01-01T00:00:00.123Z"), target: "objectId"},
- {_id: 8, input: ISODate("1970-01-01T00:00:00.123Z"), target: "int"},
-
- {_id: 9, input: NumberInt(1), target: "objectId"},
- {_id: 10, input: NumberInt(1), target: "date"},
-
- {_id: 11, input: NumberLong(1), target: "objectId"},
-
- {_id: 12, input: NumberDecimal("1.9"), target: "objectId"},
-
- {_id: 13, input: 1.9, target: "minKey"},
- {_id: 14, input: 1.9, target: "missing"},
- {_id: 15, input: 1.9, target: "object"},
- {_id: 16, input: 1.9, target: "array"},
- {_id: 17, input: 1.9, target: "binData"},
- {_id: 18, input: 1.9, target: "undefined"},
- {_id: 19, input: 1.9, target: "null"},
- {_id: 20, input: 1.9, target: "regex"},
- {_id: 21, input: 1.9, target: "dbPointer"},
- {_id: 22, input: 1.9, target: "javascript"},
- {_id: 23, input: 1.9, target: "symbol"},
- {_id: 24, input: 1.9, target: "javascriptWithScope"},
- {_id: 25, input: 1.9, target: "timestamp"},
- {_id: 26, input: 1.9, target: "maxKey"},
- ];
- populateCollection(illegalConversionTestDocs);
-
- // Test each document to ensure that the conversion throws an error.
- illegalConversionTestDocs.forEach(doc => {
- pipeline = [
- {$match: {_id: doc._id}},
- {$project: {output: {$convert: {to: "$target", input: "$input"}}}}
- ];
-
- assert.throws(function() {
- coll.aggregate(pipeline);
- }, [], "Conversion should have failed: _id = " + doc._id);
- });
-
- // Test that each illegal conversion uses the 'onError' value.
- pipeline = [
- {$project: {output: {$convert: {to: "$target", input: "$input", onError: "ERROR"}}}},
- {$sort: {_id: 1}}
- ];
- var aggResult = coll.aggregate(pipeline).toArray();
- assert.eq(aggResult.length, illegalConversionTestDocs.length);
-
- aggResult.forEach(doc => {
- assert.eq(doc.output, "ERROR", "Unexpected result: _id = " + doc._id);
- });
-
- // Test that, when onError is missing, the missing value propagates to the result.
- pipeline = [
- {
- $project: {
- _id: false,
- output: {$convert: {to: "$target", input: "$input", onError: "$$REMOVE"}}
- }
- },
- {$sort: {_id: 1}}
- ];
- var aggResult = coll.aggregate(pipeline).toArray();
- assert.eq(aggResult.length, illegalConversionTestDocs.length);
-
- aggResult.forEach(doc => {
- assert.eq(doc, {});
- });
-
- //
- // One test document for each "nullish" value.
- //
- var nullTestDocs =
- [{_id: 0, input: null}, {_id: 1, input: undefined}, {_id: 2, /* input is missing */}];
- populateCollection(nullTestDocs);
-
- // Test that all nullish inputs result in the 'onNull' output.
- pipeline = [
- {$project: {output: {$convert: {to: "int", input: "$input", onNull: "NULL"}}}},
- {$sort: {_id: 1}}
- ];
- var aggResult = coll.aggregate(pipeline).toArray();
- assert.eq(aggResult.length, nullTestDocs.length);
-
- aggResult.forEach(doc => {
- assert.eq(doc.output, "NULL", "Unexpected result: _id = " + doc._id);
- });
-
- // Test that all nullish inputs result in the 'onNull' output _even_ if 'to' is nullish.
- pipeline = [
- {$project: {output: {$convert: {to: null, input: "$input", onNull: "NULL"}}}},
- {$sort: {_id: 1}}
- ];
- var aggResult = coll.aggregate(pipeline).toArray();
- assert.eq(aggResult.length, nullTestDocs.length);
-
- aggResult.forEach(doc => {
- assert.eq(doc.output, "NULL", "Unexpected result: _id = " + doc._id);
- });
+ coll.aggregate(pipeline);
+ }, [], "Conversion should have failed: _id = " + doc._id);
+});
+
+// Test that each illegal conversion uses the 'onError' value.
+pipeline = [
+ {$project: {output: {$convert: {to: "$target", input: "$input", onError: "ERROR"}}}},
+ {$sort: {_id: 1}}
+];
+var aggResult = coll.aggregate(pipeline).toArray();
+assert.eq(aggResult.length, illegalConversionTestDocs.length);
+
+aggResult.forEach(doc => {
+ assert.eq(doc.output, "ERROR", "Unexpected result: _id = " + doc._id);
+});
+
+// Test that, when onError is missing, the missing value propagates to the result.
+pipeline = [
+ {
+ $project:
+ {_id: false, output: {$convert: {to: "$target", input: "$input", onError: "$$REMOVE"}}}
+ },
+ {$sort: {_id: 1}}
+];
+var aggResult = coll.aggregate(pipeline).toArray();
+assert.eq(aggResult.length, illegalConversionTestDocs.length);
+
+aggResult.forEach(doc => {
+ assert.eq(doc, {});
+});
+
+//
+// One test document for each "nullish" value.
+//
+var nullTestDocs =
+ [{_id: 0, input: null}, {_id: 1, input: undefined}, {_id: 2, /* input is missing */}];
+populateCollection(nullTestDocs);
+
+// Test that all nullish inputs result in the 'onNull' output.
+pipeline = [
+ {$project: {output: {$convert: {to: "int", input: "$input", onNull: "NULL"}}}},
+ {$sort: {_id: 1}}
+];
+var aggResult = coll.aggregate(pipeline).toArray();
+assert.eq(aggResult.length, nullTestDocs.length);
+
+aggResult.forEach(doc => {
+ assert.eq(doc.output, "NULL", "Unexpected result: _id = " + doc._id);
+});
+
+// Test that all nullish inputs result in the 'onNull' output _even_ if 'to' is nullish.
+pipeline = [
+ {$project: {output: {$convert: {to: null, input: "$input", onNull: "NULL"}}}},
+ {$sort: {_id: 1}}
+];
+var aggResult = coll.aggregate(pipeline).toArray();
+assert.eq(aggResult.length, nullTestDocs.length);
+
+aggResult.forEach(doc => {
+ assert.eq(doc.output, "NULL", "Unexpected result: _id = " + doc._id);
+});
}());
diff --git a/jstests/aggregation/expressions/date_expressions_with_timezones.js b/jstests/aggregation/expressions/date_expressions_with_timezones.js
index 83f7488f26c..076800a5384 100644
--- a/jstests/aggregation/expressions/date_expressions_with_timezones.js
+++ b/jstests/aggregation/expressions/date_expressions_with_timezones.js
@@ -1,83 +1,83 @@
// Basic tests for using date expressions with time zone arguments.
(function() {
- "use strict";
+"use strict";
- const coll = db.date_expressions_with_time_zones;
- coll.drop();
+const coll = db.date_expressions_with_time_zones;
+coll.drop();
- assert.writeOK(coll.insert([
- // Three sales on 2017-06-16 in UTC.
- {_id: 0, date: new ISODate("2017-06-16T00:00:00.000Z"), sales: 1},
- {_id: 1, date: new ISODate("2017-06-16T12:02:21.013Z"), sales: 2},
- // Six sales on 2017-06-17 in UTC.
- {_id: 2, date: new ISODate("2017-06-17T00:00:00.000Z"), sales: 2},
- {_id: 3, date: new ISODate("2017-06-17T12:02:21.013Z"), sales: 2},
- {_id: 4, date: new ISODate("2017-06-17T15:00:33.101Z"), sales: 2},
- ]));
+assert.writeOK(coll.insert([
+ // Three sales on 2017-06-16 in UTC.
+ {_id: 0, date: new ISODate("2017-06-16T00:00:00.000Z"), sales: 1},
+ {_id: 1, date: new ISODate("2017-06-16T12:02:21.013Z"), sales: 2},
+ // Six sales on 2017-06-17 in UTC.
+ {_id: 2, date: new ISODate("2017-06-17T00:00:00.000Z"), sales: 2},
+ {_id: 3, date: new ISODate("2017-06-17T12:02:21.013Z"), sales: 2},
+ {_id: 4, date: new ISODate("2017-06-17T15:00:33.101Z"), sales: 2},
+]));
- // Compute how many sales happened on each day, in UTC.
- assert.eq(
- [
- {_id: {year: 2017, month: 6, day: 16}, totalSales: 3},
- {_id: {year: 2017, month: 6, day: 17}, totalSales: 6}
- ],
- coll.aggregate([
- {
- $group: {
- _id: {
- year: {$year: "$date"},
- month: {$month: "$date"},
- day: {$dayOfMonth: "$date"}
- },
- totalSales: {$sum: "$sales"}
- }
- },
- {$sort: {"_id.year": 1, "_id.month": 1, "_id.day": 1}}
- ])
- .toArray());
+// Compute how many sales happened on each day, in UTC.
+assert.eq(
+ [
+ {_id: {year: 2017, month: 6, day: 16}, totalSales: 3},
+ {_id: {year: 2017, month: 6, day: 17}, totalSales: 6}
+ ],
+ coll.aggregate([
+ {
+ $group: {
+ _id: {
+ year: {$year: "$date"},
+ month: {$month: "$date"},
+ day: {$dayOfMonth: "$date"}
+ },
+ totalSales: {$sum: "$sales"}
+ }
+ },
+ {$sort: {"_id.year": 1, "_id.month": 1, "_id.day": 1}}
+ ])
+ .toArray());
- // Compute how many sales happened on each day, in New York. The sales made at midnight should
- // move to the previous days.
- assert.eq(
- [
- {_id: {year: 2017, month: 6, day: 15}, totalSales: 1},
- {_id: {year: 2017, month: 6, day: 16}, totalSales: 4},
- {_id: {year: 2017, month: 6, day: 17}, totalSales: 4}
- ],
- coll.aggregate([
- {
- $group: {
- _id: {
- year: {$year: {date: "$date", timezone: "America/New_York"}},
- month: {$month: {date: "$date", timezone: "America/New_York"}},
- day: {$dayOfMonth: {date: "$date", timezone: "America/New_York"}}
- },
- totalSales: {$sum: "$sales"}
- }
- },
- {$sort: {"_id.year": 1, "_id.month": 1, "_id.day": 1}}
- ])
- .toArray());
+// Compute how many sales happened on each day, in New York. The sales made at midnight should
+// move to the previous days.
+assert.eq(
+ [
+ {_id: {year: 2017, month: 6, day: 15}, totalSales: 1},
+ {_id: {year: 2017, month: 6, day: 16}, totalSales: 4},
+ {_id: {year: 2017, month: 6, day: 17}, totalSales: 4}
+ ],
+ coll.aggregate([
+ {
+ $group: {
+ _id: {
+ year: {$year: {date: "$date", timezone: "America/New_York"}},
+ month: {$month: {date: "$date", timezone: "America/New_York"}},
+ day: {$dayOfMonth: {date: "$date", timezone: "America/New_York"}}
+ },
+ totalSales: {$sum: "$sales"}
+ }
+ },
+ {$sort: {"_id.year": 1, "_id.month": 1, "_id.day": 1}}
+ ])
+ .toArray());
- // Compute how many sales happened on each day, in Sydney (+10 hours).
- assert.eq(
- [
- {_id: {year: 2017, month: 6, day: 16}, totalSales: 3},
- {_id: {year: 2017, month: 6, day: 17}, totalSales: 4},
- {_id: {year: 2017, month: 6, day: 18}, totalSales: 2}
- ],
- coll.aggregate([
- {
- $group: {
- _id: {
- year: {$year: {date: "$date", timezone: "Australia/Sydney"}},
- month: {$month: {date: "$date", timezone: "Australia/Sydney"}},
- day: {$dayOfMonth: {date: "$date", timezone: "Australia/Sydney"}}
- },
- totalSales: {$sum: "$sales"}
- }
- },
- {$sort: {"_id.year": 1, "_id.month": 1, "_id.day": 1}}
- ])
- .toArray());
+// Compute how many sales happened on each day, in Sydney (+10 hours).
+assert.eq(
+ [
+ {_id: {year: 2017, month: 6, day: 16}, totalSales: 3},
+ {_id: {year: 2017, month: 6, day: 17}, totalSales: 4},
+ {_id: {year: 2017, month: 6, day: 18}, totalSales: 2}
+ ],
+ coll.aggregate([
+ {
+ $group: {
+ _id: {
+ year: {$year: {date: "$date", timezone: "Australia/Sydney"}},
+ month: {$month: {date: "$date", timezone: "Australia/Sydney"}},
+ day: {$dayOfMonth: {date: "$date", timezone: "Australia/Sydney"}}
+ },
+ totalSales: {$sum: "$sales"}
+ }
+ },
+ {$sort: {"_id.year": 1, "_id.month": 1, "_id.day": 1}}
+ ])
+ .toArray());
})();
diff --git a/jstests/aggregation/expressions/date_from_parts.js b/jstests/aggregation/expressions/date_from_parts.js
index 1f53aebc193..c58f6c77f7b 100644
--- a/jstests/aggregation/expressions/date_from_parts.js
+++ b/jstests/aggregation/expressions/date_from_parts.js
@@ -1,939 +1,903 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertErrMsgContains.
(function() {
- "use strict";
-
- const coll = db.dateFromParts;
-
- /* --------------------------------------------------------------------------------------- */
- /* Basic Sanity Checks */
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0, year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713},
- {
- _id: 1,
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713,
- timezone: "Europe/Amsterdam"
- },
- {
- _id: 2,
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713,
- timezone: "Asia/Tokyo"
- },
- {
- _id: 3,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713,
- timezone: "America/Chicago"
- }
- },
- ]));
-
- assert.eq(
- [
- {_id: 1, date: ISODate("2016-12-31T23:00:00Z")},
- {_id: 2, date: ISODate("2016-12-31T15:00:00Z")},
- ],
- coll.aggregate([
- {
- $match: {'year': {$exists: true}, 'timezone': {$exists: true}},
- },
- {$project: {date: {'$dateFromParts': {year: "$year", "timezone": "$timezone"}}}}
- ])
- .toArray());
-
- assert.eq(
- [
- {_id: 3, date: ISODate("2017-06-19T05:00:00Z")},
- ],
- coll.aggregate([
- {
- $match: {
- 'date.year': {$exists: true},
- },
+"use strict";
+
+const coll = db.dateFromParts;
+
+/* --------------------------------------------------------------------------------------- */
+/* Basic Sanity Checks */
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0, year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713},
+ {
+ _id: 1,
+ year: 2017,
+ month: 6,
+ day: 19,
+ hour: 15,
+ minute: 13,
+ second: 25,
+ millisecond: 713,
+ timezone: "Europe/Amsterdam"
+ },
+ {
+ _id: 2,
+ year: 2017,
+ month: 6,
+ day: 19,
+ hour: 15,
+ minute: 13,
+ second: 25,
+ millisecond: 713,
+ timezone: "Asia/Tokyo"
+ },
+ {
+ _id: 3,
+ date: {
+ year: 2017,
+ month: 6,
+ day: 19,
+ hour: 15,
+ minute: 13,
+ second: 25,
+ millisecond: 713,
+ timezone: "America/Chicago"
+ }
+ },
+]));
+
+assert.eq(
+ [
+ {_id: 1, date: ISODate("2016-12-31T23:00:00Z")},
+ {_id: 2, date: ISODate("2016-12-31T15:00:00Z")},
+ ],
+ coll.aggregate([
+ {
+ $match: {'year': {$exists: true}, 'timezone': {$exists: true}},
+ },
+ {$project: {date: {'$dateFromParts': {year: "$year", "timezone": "$timezone"}}}}
+ ])
+ .toArray());
+
+assert.eq(
+ [
+ {_id: 3, date: ISODate("2017-06-19T05:00:00Z")},
+ ],
+ coll.aggregate([
+ {
+ $match: {
+ 'date.year': {$exists: true},
},
- {
- $project: {
- date: {
- '$dateFromParts': {
- year: "$date.year",
- month: '$date.month',
- day: '$date.day',
- timezone: '$date.timezone'
- }
- }
- }
+ },
+ {
+ $project: {
+ date: {
+ '$dateFromParts': {
+ year: "$date.year",
+ month: '$date.month',
+ day: '$date.day',
+ timezone: '$date.timezone'
+ }
+ }
}
- ])
- .toArray());
-
- let pipeline = {$project: {date: {'$dateFromParts': "$date"}}};
- assertErrorCode(coll, pipeline, 40519);
-
- pipeline = {$project: {date: {'$dateFromParts': {"timezone": "$timezone"}}}};
- assertErrorCode(coll, pipeline, 40516);
-
- pipeline = {$project: {date: {'$dateFromParts': {year: false}}}};
- assertErrorCode(coll, pipeline, 40515);
-
- pipeline = {$project: {date: {'$dateFromParts': {year: 2012, "timezone": "DoesNot/Exist"}}}};
- assertErrorCode(coll, pipeline, 40485);
-
- pipeline = {$project: {date: {'$dateFromParts': {year: 2012, "timezone": 5}}}};
- assertErrorCode(coll, pipeline, 40517);
-
- /* --------------------------------------------------------------------------------------- */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {
- _id: 0,
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742,
- timezone: "Europe/Berlin"
- },
- ]));
-
- let pipelines = [
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: NumberInt("2017"),
- month: NumberInt("6"),
- day: NumberInt("23"),
- hour: NumberInt("14"),
- minute: NumberInt("27"),
- second: NumberInt("37"),
- millisecond: NumberInt("742")
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: NumberLong("2017"),
- month: NumberLong("6"),
- day: NumberLong("23"),
- hour: NumberLong("14"),
- minute: NumberLong("27"),
- second: NumberLong("37"),
- millisecond: NumberLong("742")
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: NumberDecimal("2017"),
- month: NumberDecimal("6"),
- day: NumberDecimal("23"),
- hour: NumberDecimal("14"),
- minute: NumberDecimal("27"),
- second: NumberDecimal("37"),
- millisecond: NumberDecimal("742")
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "+02:00",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "-02",
- year: 2017,
- month: 6,
- day: 23,
- hour: 10,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "+02:00",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "+04:15",
- year: 2017,
- month: 6,
- day: 23,
- hour: 16,
- minute: 42,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "$timezone",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: "$year",
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: "$month",
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: 6,
- day: "$day",
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: 6,
- day: 23,
- hour: "$hour",
- minute: 27,
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: "$minute",
- second: 37,
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: "$second",
- millisecond: 742
- }
- }
- }
- }],
- [{
- '$project': {
- date: {
- '$dateFromParts': {
- timezone: "Europe/Berlin",
- year: 2017,
- month: 6,
- day: 23,
- hour: 14,
- minute: 27,
- second: 37,
- millisecond: "$millisecond"
- }
- }
- }
- }],
- ];
-
- pipelines.forEach(function(pipeline) {
- assert.eq([{_id: 0, date: ISODate("2017-06-23T12:27:37.742Z")}],
- coll.aggregate(pipeline).toArray(),
- tojson(pipeline));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing whether it throws the right assert for missing values */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0},
- ]));
-
- pipelines = [
- [{'$project': {date: {'$dateFromParts': {year: "$year"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, month: "$month"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, day: "$day"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, hour: "$hour"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, minute: "$minute"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, second: "$second"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, millisecond: "$millisecond"}}}}],
- [{'$project': {date: {'$dateFromParts': {isoWeekYear: "$isoWeekYear"}}}}],
- [{'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, isoWeek: "$isoWeek"}}}}],
- [{
- '$project':
- {date: {'$dateFromParts': {isoWeekYear: 2017, isoDayOfWeek: "$isoDayOfWeek"}}}
- }],
- ];
-
- pipelines.forEach(function(pipeline) {
- assert.eq([{_id: 0, date: null}], coll.aggregate(pipeline).toArray(), tojson(pipeline));
- });
-
- pipeline = [{'$project': {date: {'$dateFromParts': {year: 2017, timezone: "$timezone"}}}}];
- assert.eq([{_id: 0, date: null}], coll.aggregate(pipeline).toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing whether it throws the right assert for uncoersable values */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0, falseValue: false},
- ]));
-
- pipelines = [
- [{'$project': {date: {'$dateFromParts': {year: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, month: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, day: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, hour: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, minute: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, second: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 2017, millisecond: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {isoWeekYear: "$falseValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, isoWeek: "$falseValue"}}}}],
- [{
- '$project':
- {date: {'$dateFromParts': {isoWeekYear: 2017, isoDayOfWeek: "$falseValue"}}}
- }],
- ];
-
- pipelines.forEach(function(pipeline) {
- assertErrorCode(coll, pipeline, 40515, tojson(pipeline));
- });
-
- pipeline = [{'$project': {date: {'$dateFromParts': {year: 2017, timezone: "$falseValue"}}}}];
- assertErrorCode(coll, pipeline, 40517);
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing whether it throws the right assert for uncoersable values */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0, outOfRangeValue: 10002},
- ]));
-
- pipelines = [
- [{'$project': {date: {'$dateFromParts': {year: "$outOfRangeValue"}}}}],
- [{'$project': {date: {'$dateFromParts': {year: -1}}}}],
- [{'$project': {date: {'$dateFromParts': {year: 10000}}}}],
- ];
-
- pipelines.forEach(function(pipeline) {
- assertErrorCode(coll, pipeline, 40523, tojson(pipeline));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing "out of range" under and overflows */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([{
- _id: 0,
- minusOne: -1,
- zero: 0,
- thirteen: 13,
- twentyFive: 25,
- sixtyOne: 61,
- thousandAndOne: 1001,
- tenThousandMinusOne: 9999,
- tenThousandAndOne: 10001,
- seventyMillionAndSomething: 71841012,
- secondsSinceEpoch: 1502095918,
- millisSinceEpoch: NumberLong("1502095918551"),
- }]));
-
- tests = [
- {expected: "0000-01-01T00:00:00.000Z", parts: {year: "$zero"}},
- {expected: "9999-01-01T00:00:00.000Z", parts: {year: "$tenThousandMinusOne"}},
- {expected: "2016-11-01T00:00:00.000Z", parts: {year: 2017, month: "$minusOne"}},
- {expected: "2016-12-01T00:00:00.000Z", parts: {year: 2017, month: "$zero"}},
- {expected: "2018-01-01T00:00:00.000Z", parts: {year: 2017, month: "$thirteen"}},
- {expected: "2016-12-30T00:00:00.000Z", parts: {year: 2017, day: "$minusOne"}},
- {expected: "2016-12-31T00:00:00.000Z", parts: {year: 2017, day: "$zero"}},
- {expected: "2017-03-02T00:00:00.000Z", parts: {year: 2017, day: "$sixtyOne"}},
- {expected: "2016-12-31T23:00:00.000Z", parts: {year: 2017, hour: "$minusOne"}},
- {expected: "2017-01-02T01:00:00.000Z", parts: {year: 2017, hour: "$twentyFive"}},
- {expected: "2016-12-31T23:59:00.000Z", parts: {year: 2017, minute: "$minusOne"}},
- {expected: "2017-01-01T00:00:00.000Z", parts: {year: 2017, minute: "$zero"}},
- {expected: "2017-01-01T01:01:00.000Z", parts: {year: 2017, minute: "$sixtyOne"}},
- {expected: "2016-12-31T23:59:59.000Z", parts: {year: 2017, second: "$minusOne"}},
- {expected: "2017-01-01T00:01:01.000Z", parts: {year: 2017, second: "$sixtyOne"}},
- {
- expected: "2019-04-12T11:50:12.000Z",
- parts: {year: 2017, second: "$seventyMillionAndSomething"}
- },
- {
- expected: "1972-04-11T11:50:12.000Z",
- parts: {year: 1970, second: "$seventyMillionAndSomething"}
- },
- {expected: "2017-08-07T08:51:58.000Z", parts: {year: 1970, second: "$secondsSinceEpoch"}},
- {expected: "2016-12-31T23:59:59.999Z", parts: {year: 2017, millisecond: "$minusOne"}},
- {expected: "2017-01-01T00:00:01.001Z", parts: {year: 2017, millisecond: "$thousandAndOne"}},
- {
- expected: "2017-01-01T19:57:21.012Z",
- parts: {year: 2017, millisecond: "$seventyMillionAndSomething"}
- },
- {
- expected: "2017-01-18T09:14:55.918Z",
- parts: {year: 2017, millisecond: "$secondsSinceEpoch"}
- },
- {
- expected: "1970-01-01T19:57:21.012Z",
- parts: {year: 1970, millisecond: "$seventyMillionAndSomething"}
- },
- {
- expected: "2017-08-07T08:51:58.551Z",
- parts: {year: 1970, millisecond: "$millisSinceEpoch"}
- },
- ];
-
- tests.forEach(function(test) {
- assert.eq(
- [
- {_id: 0, date: ISODate(test.expected)},
- ],
- coll.aggregate([{$project: {date: {"$dateFromParts": test.parts}}}]).toArray(),
- tojson(test));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /*
- * Testing double and Decimal128 millisecond values that aren't representable as a 64-bit
- * integer or overflow when converting to a 64-bit microsecond value.
- */
- coll.drop();
-
- assert.commandWorked(coll.insert([{
- _id: 0,
- veryBigDoubleA: 18014398509481984.0,
- veryBigDecimal128A: NumberDecimal("9223372036854775807"), // 2^63-1
- veryBigDoubleB: 18014398509481984000.0,
- veryBigDecimal128B: NumberDecimal("9223372036854775807000"), // (2^63-1) * 1000
- }]));
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDoubleA"}}}}];
- assertErrCodeAndErrMsgContains(
- coll,
- pipeline,
- ErrorCodes.DurationOverflow,
- "Overflow casting from a lower-precision duration to a higher-precision duration");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDecimal128A"}}}}];
- assertErrCodeAndErrMsgContains(
- coll,
- pipeline,
- ErrorCodes.DurationOverflow,
- "Overflow casting from a lower-precision duration to a higher-precision duration");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDoubleB"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40515, "'millisecond' must evaluate to an integer");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDecimal128B"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40515, "'millisecond' must evaluate to an integer");
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing that year values are only allowed in the range [0, 9999] and that month, day, hour,
- * and minute values are only allowed in the range [-32,768, 32,767]. */
- coll.drop();
-
- assert.commandWorked(coll.insert([{
+ }
+ ])
+ .toArray());
+
+let pipeline = {$project: {date: {'$dateFromParts': "$date"}}};
+assertErrorCode(coll, pipeline, 40519);
+
+pipeline = {
+ $project: {date: {'$dateFromParts': {"timezone": "$timezone"}}}
+};
+assertErrorCode(coll, pipeline, 40516);
+
+pipeline = {
+ $project: {date: {'$dateFromParts': {year: false}}}
+};
+assertErrorCode(coll, pipeline, 40515);
+
+pipeline = {
+ $project: {date: {'$dateFromParts': {year: 2012, "timezone": "DoesNot/Exist"}}}
+};
+assertErrorCode(coll, pipeline, 40485);
+
+pipeline = {
+ $project: {date: {'$dateFromParts': {year: 2012, "timezone": 5}}}
+};
+assertErrorCode(coll, pipeline, 40517);
+
+/* --------------------------------------------------------------------------------------- */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {
_id: 0,
- bigYear: 10000,
- smallYear: -1,
- prettyBigInt: 32768,
- prettyBigNegativeInt: -32769
- }]));
-
- pipeline = [{$project: {date: {"$dateFromParts": {year: "$bigYear"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40523, "'year' must evaluate to an integer in the range 0 to 9999");
-
- pipeline = [{$project: {date: {"$dateFromParts": {year: "$smallYear"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40523, "'year' must evaluate to an integer in the range 0 to 9999");
-
- pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: "$prettyBigInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'month' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, month: "$prettyBigNegativeInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'month' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, month: 1, day: "$prettyBigInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'day' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [{
- $project:
- {date: {"$dateFromParts": {year: 1970, month: 1, day: "$prettyBigNegativeInt"}}}
- }];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'day' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: "$prettyBigInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'hour' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, hour: "$prettyBigNegativeInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'hour' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {year: 1970, hour: 0, minute: "$prettyBigInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'minute' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [{
- $project:
- {date: {"$dateFromParts": {year: 1970, hour: 0, minute: "$prettyBigNegativeInt"}}}
- }];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'minute' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: "$bigYear"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31095, "'isoWeekYear' must evaluate to an integer in the range 0 to 9999");
-
- pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: "$smallYear"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31095, "'isoWeekYear' must evaluate to an integer in the range 0 to 9999");
-
- pipeline =
- [{$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoWeek: "$prettyBigInt"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'isoWeek' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [{
- $project:
- {date: {"$dateFromParts": {isoWeekYear: 1970, isoWeek: "$prettyBigNegativeInt"}}}
- }];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 31034, "'isoWeek' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [
- {$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoDayOfWeek: "$prettyBigInt"}}}}
- ];
- assertErrCodeAndErrMsgContains(
- coll,
- pipeline,
- 31034,
- "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]");
-
- pipeline = [{
- $project: {
- date: {"$dateFromParts": {isoWeekYear: 1970, isoDayOfWeek: "$prettyBigNegativeInt"}}
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742,
+ timezone: "Europe/Berlin"
+ },
+]));
+
+let pipelines = [
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
}
- }];
- assertErrCodeAndErrMsgContains(
- coll,
- pipeline,
- 31034,
- "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]");
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing wrong arguments */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0},
- ]));
-
- pipelines = [
- {code: 40519, pipeline: {'$project': {date: {'$dateFromParts': true}}}},
- {code: 40519, pipeline: {'$project': {date: {'$dateFromParts': []}}}},
-
- {code: 40518, pipeline: {'$project': {date: {'$dateFromParts': {unknown: true}}}}},
-
- {code: 40516, pipeline: {'$project': {date: {'$dateFromParts': {}}}}},
-
- {
- code: 40489,
- pipeline: {'$project': {date: {'$dateFromParts': {year: 2017, isoWeekYear: 2017}}}}
- },
- {code: 40489, pipeline: {'$project': {date: {'$dateFromParts': {year: 2017, isoWeek: 3}}}}},
- {
- code: 40489,
- pipeline: {'$project': {date: {'$dateFromParts': {year: 2017, isoDayOfWeek: 5}}}}
- },
- {
- code: 40489,
- pipeline: {'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, year: 2017}}}}
- },
-
- {
- code: 40525,
- pipeline: {'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, month: 12}}}}
- },
- {
- code: 40525,
- pipeline: {'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, day: 17}}}}
- },
- ];
-
- pipelines.forEach(function(item) {
- assertErrorCode(coll, item.pipeline, item.code, tojson(pipeline));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing wrong value (types) */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0, floatField: 2017.5, decimalField: NumberDecimal("2017.5")},
- ]));
-
- pipelines = [
- {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: "2017"}}}}},
- {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: 2017.3}}}}},
- {
- code: 40515,
- pipeline: {'$project': {date: {'$dateFromParts': {year: NumberDecimal("2017.3")}}}}
- },
- {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: "$floatField"}}}}},
- {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: "$decimalField"}}}}},
- ];
-
- pipelines.forEach(function(item) {
- assertErrorCode(coll, item.pipeline, item.code, tojson(pipeline));
- });
-
- /* --------------------------------------------------------------------------------------- */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {_id: 0, year: NumberDecimal("2017"), month: 6.0, day: NumberInt(19), hour: NumberLong(15)},
- {
- _id: 1,
- year: NumberDecimal("2017"),
- minute: 6.0,
- second: NumberInt(19),
- millisecond: NumberLong(15)
- },
- {_id: 2, isoWeekYear: NumberDecimal("2017"), isoWeek: 6.0, isoDayOfWeek: NumberInt(4)},
- ]));
-
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: NumberInt("2017"),
+ month: NumberInt("6"),
+ day: NumberInt("23"),
+ hour: NumberInt("14"),
+ minute: NumberInt("27"),
+ second: NumberInt("37"),
+ millisecond: NumberInt("742")
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: NumberLong("2017"),
+ month: NumberLong("6"),
+ day: NumberLong("23"),
+ hour: NumberLong("14"),
+ minute: NumberLong("27"),
+ second: NumberLong("37"),
+ millisecond: NumberLong("742")
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: NumberDecimal("2017"),
+ month: NumberDecimal("6"),
+ day: NumberDecimal("23"),
+ hour: NumberDecimal("14"),
+ minute: NumberDecimal("27"),
+ second: NumberDecimal("37"),
+ millisecond: NumberDecimal("742")
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "+02:00",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "-02",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 10,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "+02:00",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "+04:15",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 16,
+ minute: 42,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "$timezone",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: "$year",
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: "$month",
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: 6,
+ day: "$day",
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: "$hour",
+ minute: 27,
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: "$minute",
+ second: 37,
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: "$second",
+ millisecond: 742
+ }
+ }
+ }
+ }],
+ [{
+ '$project': {
+ date: {
+ '$dateFromParts': {
+ timezone: "Europe/Berlin",
+ year: 2017,
+ month: 6,
+ day: 23,
+ hour: 14,
+ minute: 27,
+ second: 37,
+ millisecond: "$millisecond"
+ }
+ }
+ }
+ }],
+];
+
+pipelines.forEach(function(pipeline) {
+ assert.eq([{_id: 0, date: ISODate("2017-06-23T12:27:37.742Z")}],
+ coll.aggregate(pipeline).toArray(),
+ tojson(pipeline));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing whether it throws the right assert for missing values */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0},
+]));
+
+pipelines = [
+ [{'$project': {date: {'$dateFromParts': {year: "$year"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, month: "$month"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, day: "$day"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, hour: "$hour"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, minute: "$minute"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, second: "$second"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, millisecond: "$millisecond"}}}}],
+ [{'$project': {date: {'$dateFromParts': {isoWeekYear: "$isoWeekYear"}}}}],
+ [{'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, isoWeek: "$isoWeek"}}}}],
+ [{'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, isoDayOfWeek: "$isoDayOfWeek"}}}}],
+];
+
+pipelines.forEach(function(pipeline) {
+ assert.eq([{_id: 0, date: null}], coll.aggregate(pipeline).toArray(), tojson(pipeline));
+});
+
+pipeline = [{'$project': {date: {'$dateFromParts': {year: 2017, timezone: "$timezone"}}}}];
+assert.eq([{_id: 0, date: null}], coll.aggregate(pipeline).toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing whether it throws the right assert for uncoersable values */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0, falseValue: false},
+]));
+
+pipelines = [
+ [{'$project': {date: {'$dateFromParts': {year: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, month: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, day: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, hour: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, minute: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, second: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 2017, millisecond: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {isoWeekYear: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, isoWeek: "$falseValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, isoDayOfWeek: "$falseValue"}}}}],
+];
+
+pipelines.forEach(function(pipeline) {
+ assertErrorCode(coll, pipeline, 40515, tojson(pipeline));
+});
+
+pipeline = [{'$project': {date: {'$dateFromParts': {year: 2017, timezone: "$falseValue"}}}}];
+assertErrorCode(coll, pipeline, 40517);
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing whether it throws the right assert for uncoersable values */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0, outOfRangeValue: 10002},
+]));
+
+pipelines = [
+ [{'$project': {date: {'$dateFromParts': {year: "$outOfRangeValue"}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: -1}}}}],
+ [{'$project': {date: {'$dateFromParts': {year: 10000}}}}],
+];
+
+pipelines.forEach(function(pipeline) {
+ assertErrorCode(coll, pipeline, 40523, tojson(pipeline));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing "out of range" under and overflows */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([{
+ _id: 0,
+ minusOne: -1,
+ zero: 0,
+ thirteen: 13,
+ twentyFive: 25,
+ sixtyOne: 61,
+ thousandAndOne: 1001,
+ tenThousandMinusOne: 9999,
+ tenThousandAndOne: 10001,
+ seventyMillionAndSomething: 71841012,
+ secondsSinceEpoch: 1502095918,
+ millisSinceEpoch: NumberLong("1502095918551"),
+}]));
+
+tests = [
+ {expected: "0000-01-01T00:00:00.000Z", parts: {year: "$zero"}},
+ {expected: "9999-01-01T00:00:00.000Z", parts: {year: "$tenThousandMinusOne"}},
+ {expected: "2016-11-01T00:00:00.000Z", parts: {year: 2017, month: "$minusOne"}},
+ {expected: "2016-12-01T00:00:00.000Z", parts: {year: 2017, month: "$zero"}},
+ {expected: "2018-01-01T00:00:00.000Z", parts: {year: 2017, month: "$thirteen"}},
+ {expected: "2016-12-30T00:00:00.000Z", parts: {year: 2017, day: "$minusOne"}},
+ {expected: "2016-12-31T00:00:00.000Z", parts: {year: 2017, day: "$zero"}},
+ {expected: "2017-03-02T00:00:00.000Z", parts: {year: 2017, day: "$sixtyOne"}},
+ {expected: "2016-12-31T23:00:00.000Z", parts: {year: 2017, hour: "$minusOne"}},
+ {expected: "2017-01-02T01:00:00.000Z", parts: {year: 2017, hour: "$twentyFive"}},
+ {expected: "2016-12-31T23:59:00.000Z", parts: {year: 2017, minute: "$minusOne"}},
+ {expected: "2017-01-01T00:00:00.000Z", parts: {year: 2017, minute: "$zero"}},
+ {expected: "2017-01-01T01:01:00.000Z", parts: {year: 2017, minute: "$sixtyOne"}},
+ {expected: "2016-12-31T23:59:59.000Z", parts: {year: 2017, second: "$minusOne"}},
+ {expected: "2017-01-01T00:01:01.000Z", parts: {year: 2017, second: "$sixtyOne"}},
+ {
+ expected: "2019-04-12T11:50:12.000Z",
+ parts: {year: 2017, second: "$seventyMillionAndSomething"}
+ },
+ {
+ expected: "1972-04-11T11:50:12.000Z",
+ parts: {year: 1970, second: "$seventyMillionAndSomething"}
+ },
+ {expected: "2017-08-07T08:51:58.000Z", parts: {year: 1970, second: "$secondsSinceEpoch"}},
+ {expected: "2016-12-31T23:59:59.999Z", parts: {year: 2017, millisecond: "$minusOne"}},
+ {expected: "2017-01-01T00:00:01.001Z", parts: {year: 2017, millisecond: "$thousandAndOne"}},
+ {
+ expected: "2017-01-01T19:57:21.012Z",
+ parts: {year: 2017, millisecond: "$seventyMillionAndSomething"}
+ },
+ {expected: "2017-01-18T09:14:55.918Z", parts: {year: 2017, millisecond: "$secondsSinceEpoch"}},
+ {
+ expected: "1970-01-01T19:57:21.012Z",
+ parts: {year: 1970, millisecond: "$seventyMillionAndSomething"}
+ },
+ {expected: "2017-08-07T08:51:58.551Z", parts: {year: 1970, millisecond: "$millisSinceEpoch"}},
+];
+
+tests.forEach(function(test) {
assert.eq(
[
- {_id: 0, date: ISODate("2017-06-19T15:00:00Z")},
+ {_id: 0, date: ISODate(test.expected)},
],
- coll.aggregate([
- {
- $match: {_id: 0},
- },
- {
- $project: {
- date: {
- '$dateFromParts':
- {year: "$year", month: "$month", day: "$day", hour: "$hour"}
- }
- }
+ coll.aggregate([{$project: {date: {"$dateFromParts": test.parts}}}]).toArray(),
+ tojson(test));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/*
+ * Testing double and Decimal128 millisecond values that aren't representable as a 64-bit
+ * integer or overflow when converting to a 64-bit microsecond value.
+ */
+coll.drop();
+
+assert.commandWorked(coll.insert([{
+ _id: 0,
+ veryBigDoubleA: 18014398509481984.0,
+ veryBigDecimal128A: NumberDecimal("9223372036854775807"), // 2^63-1
+ veryBigDoubleB: 18014398509481984000.0,
+ veryBigDecimal128B: NumberDecimal("9223372036854775807000"), // (2^63-1) * 1000
+}]));
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDoubleA"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll,
+ pipeline,
+ ErrorCodes.DurationOverflow,
+ "Overflow casting from a lower-precision duration to a higher-precision duration");
+
+pipeline =
+ [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDecimal128A"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll,
+ pipeline,
+ ErrorCodes.DurationOverflow,
+ "Overflow casting from a lower-precision duration to a higher-precision duration");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDoubleB"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 40515, "'millisecond' must evaluate to an integer");
+
+pipeline =
+ [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDecimal128B"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 40515, "'millisecond' must evaluate to an integer");
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing that year values are only allowed in the range [0, 9999] and that month, day, hour,
+ * and minute values are only allowed in the range [-32,768, 32,767]. */
+coll.drop();
+
+assert.commandWorked(coll.insert(
+ [{_id: 0, bigYear: 10000, smallYear: -1, prettyBigInt: 32768, prettyBigNegativeInt: -32769}]));
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: "$bigYear"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 40523, "'year' must evaluate to an integer in the range 0 to 9999");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: "$smallYear"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 40523, "'year' must evaluate to an integer in the range 0 to 9999");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: "$prettyBigInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'month' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: "$prettyBigNegativeInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'month' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: 1, day: "$prettyBigInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'day' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline =
+ [{$project: {date: {"$dateFromParts": {year: 1970, month: 1, day: "$prettyBigNegativeInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'day' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: "$prettyBigInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'hour' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: "$prettyBigNegativeInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'hour' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: 0, minute: "$prettyBigInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'minute' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [
+ {$project: {date: {"$dateFromParts": {year: 1970, hour: 0, minute: "$prettyBigNegativeInt"}}}}
+];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'minute' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: "$bigYear"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31095, "'isoWeekYear' must evaluate to an integer in the range 0 to 9999");
+
+pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: "$smallYear"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31095, "'isoWeekYear' must evaluate to an integer in the range 0 to 9999");
+
+pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoWeek: "$prettyBigInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'isoWeek' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline =
+ [{$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoWeek: "$prettyBigNegativeInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'isoWeek' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline =
+ [{$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoDayOfWeek: "$prettyBigInt"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]");
+
+pipeline = [{
+ $project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoDayOfWeek: "$prettyBigNegativeInt"}}}
+}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 31034, "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]");
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing wrong arguments */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0},
+]));
+
+pipelines = [
+ {code: 40519, pipeline: {'$project': {date: {'$dateFromParts': true}}}},
+ {code: 40519, pipeline: {'$project': {date: {'$dateFromParts': []}}}},
+
+ {code: 40518, pipeline: {'$project': {date: {'$dateFromParts': {unknown: true}}}}},
+
+ {code: 40516, pipeline: {'$project': {date: {'$dateFromParts': {}}}}},
+
+ {
+ code: 40489,
+ pipeline: {'$project': {date: {'$dateFromParts': {year: 2017, isoWeekYear: 2017}}}}
+ },
+ {code: 40489, pipeline: {'$project': {date: {'$dateFromParts': {year: 2017, isoWeek: 3}}}}},
+ {
+ code: 40489,
+ pipeline: {'$project': {date: {'$dateFromParts': {year: 2017, isoDayOfWeek: 5}}}}
+ },
+ {
+ code: 40489,
+ pipeline: {'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, year: 2017}}}}
+ },
+
+ {
+ code: 40525,
+ pipeline: {'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, month: 12}}}}
+ },
+ {code: 40525, pipeline: {'$project': {date: {'$dateFromParts': {isoWeekYear: 2017, day: 17}}}}},
+];
+
+pipelines.forEach(function(item) {
+ assertErrorCode(coll, item.pipeline, item.code, tojson(pipeline));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Testing wrong value (types) */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0, floatField: 2017.5, decimalField: NumberDecimal("2017.5")},
+]));
+
+pipelines = [
+ {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: "2017"}}}}},
+ {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: 2017.3}}}}},
+ {
+ code: 40515,
+ pipeline: {'$project': {date: {'$dateFromParts': {year: NumberDecimal("2017.3")}}}}
+ },
+ {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: "$floatField"}}}}},
+ {code: 40515, pipeline: {'$project': {date: {'$dateFromParts': {year: "$decimalField"}}}}},
+];
+
+pipelines.forEach(function(item) {
+ assertErrorCode(coll, item.pipeline, item.code, tojson(pipeline));
+});
+
+/* --------------------------------------------------------------------------------------- */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {_id: 0, year: NumberDecimal("2017"), month: 6.0, day: NumberInt(19), hour: NumberLong(15)},
+ {
+ _id: 1,
+ year: NumberDecimal("2017"),
+ minute: 6.0,
+ second: NumberInt(19),
+ millisecond: NumberLong(15)
+ },
+ {_id: 2, isoWeekYear: NumberDecimal("2017"), isoWeek: 6.0, isoDayOfWeek: NumberInt(4)},
+]));
+
+assert.eq(
+ [
+ {_id: 0, date: ISODate("2017-06-19T15:00:00Z")},
+ ],
+ coll.aggregate([
+ {
+ $match: {_id: 0},
+ },
+ {
+ $project: {
+ date: {
+ '$dateFromParts':
+ {year: "$year", month: "$month", day: "$day", hour: "$hour"}
+ }
+ }
+ }
+ ])
+ .toArray());
+
+assert.eq(
+ [
+ {_id: 1, date: ISODate("2017-01-01T00:06:19.015Z")},
+ ],
+ coll.aggregate([
+ {
+ $match: {_id: 1},
+ },
+ {
+ $project: {
+ date: {
+ '$dateFromParts': {
+ year: "$year",
+ minute: "$minute",
+ second: "$second",
+ millisecond: "$millisecond"
+ }
+ }
}
- ])
- .toArray());
+ }
+ ])
+ .toArray());
+
+assert.eq(
+ [
+ {_id: 2, date: ISODate("2017-02-09T00:00:00Z")},
+ ],
+ coll.aggregate([
+ {
+ $match: {_id: 2},
+ },
+ {
+ $project: {
+ date: {
+ '$dateFromParts': {
+ isoWeekYear: "$isoWeekYear",
+ isoWeek: "$isoWeek",
+ isoDayOfWeek: "$isoDayOfWeek"
+ }
+ }
+ }
+ }
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {
+ _id: 0,
+ year: NumberDecimal("2017"),
+ month: 6.0,
+ day: NumberInt(19),
+ hour: NumberLong(15),
+ minute: NumberDecimal(1),
+ second: 51,
+ millisecond: 551
+ },
+]));
+
+var tests = [
+ {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "-04:00"},
+ {expected: ISODate("2017-06-19T12:01:51.551Z"), tz: "+03"},
+ {expected: ISODate("2017-06-19T18:21:51.551Z"), tz: "-0320"},
+ {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "America/New_York"},
+ {expected: ISODate("2017-06-19T13:01:51.551Z"), tz: "Europe/Amsterdam"},
+];
+
+tests.forEach(function(test) {
assert.eq(
[
- {_id: 1, date: ISODate("2017-01-01T00:06:19.015Z")},
+ {_id: 0, date: test.expected},
],
- coll.aggregate([
- {
- $match: {_id: 1},
- },
- {
- $project: {
- date: {
- '$dateFromParts': {
- year: "$year",
- minute: "$minute",
- second: "$second",
- millisecond: "$millisecond"
- }
- }
- }
+ coll.aggregate([{
+ $project: {
+ date: {
+ "$dateFromParts": {
+ year: "$year",
+ month: "$month",
+ day: "$day",
+ hour: "$hour",
+ minute: "$minute",
+ second: "$second",
+ millisecond: "$millisecond",
+ timezone: test.tz
+ }
+ }
}
- ])
- .toArray());
+ }])
+ .toArray(),
+ tojson(test));
+});
+/* --------------------------------------------------------------------------------------- */
+
+coll.drop();
+
+assert.commandWorked(coll.insert([
+ {
+ _id: 0,
+ isoWeekYear: NumberDecimal("2017"),
+ isoWeek: 25.0,
+ isoDayOfWeek: NumberInt(1),
+ hour: NumberLong(15),
+ minute: NumberDecimal(1),
+ second: 51,
+ millisecond: 551
+ },
+]));
+
+var tests = [
+ {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "-04:00"},
+ {expected: ISODate("2017-06-19T12:01:51.551Z"), tz: "+03"},
+ {expected: ISODate("2017-06-19T18:21:51.551Z"), tz: "-0320"},
+ {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "America/New_York"},
+ {expected: ISODate("2017-06-19T13:01:51.551Z"), tz: "Europe/Amsterdam"},
+];
+
+tests.forEach(function(test) {
assert.eq(
[
- {_id: 2, date: ISODate("2017-02-09T00:00:00Z")},
+ {_id: 0, date: test.expected},
],
- coll.aggregate([
- {
- $match: {_id: 2},
- },
- {
- $project: {
- date: {
- '$dateFromParts': {
- isoWeekYear: "$isoWeekYear",
- isoWeek: "$isoWeek",
- isoDayOfWeek: "$isoDayOfWeek"
- }
- }
- }
- }
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {
- _id: 0,
- year: NumberDecimal("2017"),
- month: 6.0,
- day: NumberInt(19),
- hour: NumberLong(15),
- minute: NumberDecimal(1),
- second: 51,
- millisecond: 551
- },
- ]));
-
- var tests = [
- {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "-04:00"},
- {expected: ISODate("2017-06-19T12:01:51.551Z"), tz: "+03"},
- {expected: ISODate("2017-06-19T18:21:51.551Z"), tz: "-0320"},
- {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "America/New_York"},
- {expected: ISODate("2017-06-19T13:01:51.551Z"), tz: "Europe/Amsterdam"},
- ];
-
- tests.forEach(function(test) {
- assert.eq(
- [
- {_id: 0, date: test.expected},
- ],
- coll.aggregate([{
- $project: {
- date: {
- "$dateFromParts": {
- year: "$year",
- month: "$month",
- day: "$day",
- hour: "$hour",
- minute: "$minute",
- second: "$second",
- millisecond: "$millisecond",
- timezone: test.tz
- }
+ coll.aggregate([{
+ $project: {
+ date: {
+ "$dateFromParts": {
+ isoWeekYear: "$isoWeekYear",
+ isoWeek: "$isoWeek",
+ isoDayOfWeek: "$isoDayOfWeek",
+ hour: "$hour",
+ minute: "$minute",
+ second: "$second",
+ millisecond: "$millisecond",
+ timezone: test.tz
}
}
- }])
- .toArray(),
- tojson(test));
- });
-
- /* --------------------------------------------------------------------------------------- */
-
- coll.drop();
-
- assert.commandWorked(coll.insert([
- {
- _id: 0,
- isoWeekYear: NumberDecimal("2017"),
- isoWeek: 25.0,
- isoDayOfWeek: NumberInt(1),
- hour: NumberLong(15),
- minute: NumberDecimal(1),
- second: 51,
- millisecond: 551
- },
- ]));
-
- var tests = [
- {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "-04:00"},
- {expected: ISODate("2017-06-19T12:01:51.551Z"), tz: "+03"},
- {expected: ISODate("2017-06-19T18:21:51.551Z"), tz: "-0320"},
- {expected: ISODate("2017-06-19T19:01:51.551Z"), tz: "America/New_York"},
- {expected: ISODate("2017-06-19T13:01:51.551Z"), tz: "Europe/Amsterdam"},
- ];
-
- tests.forEach(function(test) {
- assert.eq(
- [
- {_id: 0, date: test.expected},
- ],
- coll.aggregate([{
- $project: {
- date: {
- "$dateFromParts": {
- isoWeekYear: "$isoWeekYear",
- isoWeek: "$isoWeek",
- isoDayOfWeek: "$isoDayOfWeek",
- hour: "$hour",
- minute: "$minute",
- second: "$second",
- millisecond: "$millisecond",
- timezone: test.tz
- }
- }
- }
- }])
- .toArray(),
- tojson(test));
- });
-
- /* --------------------------------------------------------------------------------------- */
+ }
+ }])
+ .toArray(),
+ tojson(test));
+});
+/* --------------------------------------------------------------------------------------- */
})();
diff --git a/jstests/aggregation/expressions/date_from_string.js b/jstests/aggregation/expressions/date_from_string.js
index 3e905d9fbfe..56c52de0f7e 100644
--- a/jstests/aggregation/expressions/date_from_string.js
+++ b/jstests/aggregation/expressions/date_from_string.js
@@ -1,796 +1,774 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertErrMsgContains.
(function() {
- "use strict";
-
- const coll = db.date_from_string;
-
- /* --------------------------------------------------------------------------------------- */
- /* Normal format tests. */
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- let testCases = [
- {
- expect: "2017-07-04T11:56:02Z",
- inputString: "2017-07-04T11:56:02Z",
- format: "%Y-%m-%dT%H:%M:%SZ"
- },
- {
- expect: "2017-07-04T11:56:02.813Z",
- inputString: "2017-07-04T11:56:02.813Z",
- format: "%Y-%m-%dT%H:%M:%S.%LZ"
- },
- {
- expect: "2017-07-04T11:56:02.810Z",
- inputString: "2017-07-04T11:56:02.81Z",
- format: "%Y-%m-%dT%H:%M:%S.%LZ"
- },
- {
- expect: "2017-07-04T11:56:02.800Z",
- inputString: "2017-07-04T11:56:02.8Z",
- format: "%Y-%m-%dT%H:%M:%S.%LZ"
- },
- {
- expect: "2017-07-04T11:56:02Z",
- inputString: "2017-07-04T11:56.02",
- format: "%Y-%m-%dT%H:%M.%S"
- },
- {
- expect: "2017-07-04T11:56:02.813Z",
- inputString: "2017-07-04T11:56.02.813",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- {
- expect: "2017-07-04T11:56:02.810Z",
- inputString: "2017-07-04T11:56.02.81",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- {
- expect: "2017-07-04T11:56:02.800Z",
- inputString: "2017-07-04T11:56.02.8",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- ];
- testCases.forEach(function(testCase) {
- assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate(
- {$project: {date: {$dateFromString: {dateString: testCase.inputString}}}})
- .toArray(),
- tojson(testCase));
- assert.eq(
- [{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString:
- {dateString: testCase.inputString, format: testCase.format}
- }
- }
- })
- .toArray(),
- tojson(testCase));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Normal format tests with timezone. */
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- testCases = [
- {
- expect: "2017-07-04T10:56:02Z",
- inputString: "2017-07-04T11:56.02",
- format: "%Y-%m-%dT%H:%M.%S"
- },
- {
- expect: "2017-07-04T10:56:02.813Z",
- inputString: "2017-07-04T11:56.02.813",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- {
- expect: "2017-07-04T10:56:02.810Z",
- inputString: "2017-07-04T11:56.02.81",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- {
- expect: "2017-07-04T10:56:02.800Z",
- inputString: "2017-07-04T11:56.02.8",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- ];
- testCases.forEach(function(testCase) {
- assert.eq(
- [{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString:
- {dateString: testCase.inputString, timezone: "Europe/London"}
- }
+"use strict";
+
+const coll = db.date_from_string;
+
+/* --------------------------------------------------------------------------------------- */
+/* Normal format tests. */
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+let testCases = [
+ {
+ expect: "2017-07-04T11:56:02Z",
+ inputString: "2017-07-04T11:56:02Z",
+ format: "%Y-%m-%dT%H:%M:%SZ"
+ },
+ {
+ expect: "2017-07-04T11:56:02.813Z",
+ inputString: "2017-07-04T11:56:02.813Z",
+ format: "%Y-%m-%dT%H:%M:%S.%LZ"
+ },
+ {
+ expect: "2017-07-04T11:56:02.810Z",
+ inputString: "2017-07-04T11:56:02.81Z",
+ format: "%Y-%m-%dT%H:%M:%S.%LZ"
+ },
+ {
+ expect: "2017-07-04T11:56:02.800Z",
+ inputString: "2017-07-04T11:56:02.8Z",
+ format: "%Y-%m-%dT%H:%M:%S.%LZ"
+ },
+ {
+ expect: "2017-07-04T11:56:02Z",
+ inputString: "2017-07-04T11:56.02",
+ format: "%Y-%m-%dT%H:%M.%S"
+ },
+ {
+ expect: "2017-07-04T11:56:02.813Z",
+ inputString: "2017-07-04T11:56.02.813",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+ {
+ expect: "2017-07-04T11:56:02.810Z",
+ inputString: "2017-07-04T11:56.02.81",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+ {
+ expect: "2017-07-04T11:56:02.800Z",
+ inputString: "2017-07-04T11:56.02.8",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+];
+testCases.forEach(function(testCase) {
+ assert.eq(
+ [{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({$project: {date: {$dateFromString: {dateString: testCase.inputString}}}})
+ .toArray(),
+ tojson(testCase));
+ assert.eq(
+ [{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {dateString: testCase.inputString, format: testCase.format}
}
- })
- .toArray(),
- tojson(testCase));
- assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: testCase.inputString,
- timezone: "Europe/London",
- format: testCase.format
- }
- }
+ }
+ })
+ .toArray(),
+ tojson(testCase));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Normal format tests with timezone. */
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+testCases = [
+ {
+ expect: "2017-07-04T10:56:02Z",
+ inputString: "2017-07-04T11:56.02",
+ format: "%Y-%m-%dT%H:%M.%S"
+ },
+ {
+ expect: "2017-07-04T10:56:02.813Z",
+ inputString: "2017-07-04T11:56.02.813",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+ {
+ expect: "2017-07-04T10:56:02.810Z",
+ inputString: "2017-07-04T11:56.02.81",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+ {
+ expect: "2017-07-04T10:56:02.800Z",
+ inputString: "2017-07-04T11:56.02.8",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+];
+testCases.forEach(function(testCase) {
+ assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: testCase.inputString, timezone: "Europe/London"}
}
- })
- .toArray(),
- tojson(testCase));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Normal format tests with UTC offset. */
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- testCases = [
- {
- expect: "2017-07-04T10:56:02Z",
- inputString: "2017-07-04T11:56.02",
- format: "%Y-%m-%dT%H:%M.%S"
- },
- {
- expect: "2017-07-04T10:56:02.813Z",
- inputString: "2017-07-04T11:56.02.813",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- {
- expect: "2017-07-04T10:56:02.810Z",
- inputString: "2017-07-04T11:56.02.81",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- {
- expect: "2017-07-04T10:56:02.800Z",
- inputString: "2017-07-04T11:56.02.8",
- format: "%Y-%m-%dT%H:%M.%S.%L"
- },
- ];
- testCases.forEach(function(testCase) {
- assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString:
- {dateString: testCase.inputString, timezone: "+01:00"}
+ }
+ })
+ .toArray(),
+ tojson(testCase));
+ assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {
+ dateString: testCase.inputString,
+ timezone: "Europe/London",
+ format: testCase.format
}
}
- })
- .toArray(),
- tojson(testCase));
- assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: testCase.inputString,
- timezone: "+01:00",
- format: testCase.format
- }
+ }
+ })
+ .toArray(),
+ tojson(testCase));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Normal format tests with UTC offset. */
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+testCases = [
+ {
+ expect: "2017-07-04T10:56:02Z",
+ inputString: "2017-07-04T11:56.02",
+ format: "%Y-%m-%dT%H:%M.%S"
+ },
+ {
+ expect: "2017-07-04T10:56:02.813Z",
+ inputString: "2017-07-04T11:56.02.813",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+ {
+ expect: "2017-07-04T10:56:02.810Z",
+ inputString: "2017-07-04T11:56.02.81",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+ {
+ expect: "2017-07-04T10:56:02.800Z",
+ inputString: "2017-07-04T11:56.02.8",
+ format: "%Y-%m-%dT%H:%M.%S.%L"
+ },
+];
+testCases.forEach(function(testCase) {
+ assert.eq(
+ [{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {$dateFromString: {dateString: testCase.inputString, timezone: "+01:00"}}
+ }
+ })
+ .toArray(),
+ tojson(testCase));
+ assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {
+ dateString: testCase.inputString,
+ timezone: "+01:00",
+ format: testCase.format
}
}
- })
- .toArray(),
- tojson(testCase));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Normal format tests from data. */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, dateString: "2017-07-06T12:35:37Z", format: "%Y-%m-%dT%H:%M:%SZ"},
- {_id: 1, dateString: "2017-07-06T12:35:37.513Z", format: "%Y-%m-%dT%H:%M:%S.%LZ"},
- {_id: 2, dateString: "2017-07-06T12:35:37", format: "%Y-%m-%dT%H:%M:%S"},
- {_id: 3, dateString: "2017-07-06T12:35:37.513", format: "%Y-%m-%dT%H:%M:%S.%L"},
- {_id: 4, dateString: "1960-07-10T12:10:37.448", format: "%Y-%m-%dT%H:%M:%S.%L"},
- ]));
-
- let expectedResults = [
- {"_id": 0, "date": ISODate("2017-07-06T12:35:37Z")},
- {"_id": 1, "date": ISODate("2017-07-06T12:35:37.513Z")},
- {"_id": 2, "date": ISODate("2017-07-06T12:35:37Z")},
- {"_id": 3, "date": ISODate("2017-07-06T12:35:37.513Z")},
- {"_id": 4, "date": ISODate("1960-07-10T12:10:37.448Z")},
- ];
- assert.eq(expectedResults,
- coll.aggregate([
- {
- $project: {date: {$dateFromString: {dateString: "$dateString"}}},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- // Repeat the test with an explicit format specifier string.
- assert.eq(
- expectedResults,
- coll.aggregate([
- {
- $project:
- {date: {$dateFromString: {dateString: "$dateString", format: "$format"}}},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- expectedResults = [
- {"_id": 0, "date": new Date(1499344537000)},
- {"_id": 1, "date": new Date(1499344537513)},
- {"_id": 2, "date": new Date(1499344537000)},
- {"_id": 3, "date": new Date(1499344537513)},
- {"_id": 4, "date": new Date(-299072962552)},
- ];
- assert.eq(expectedResults,
- coll.aggregate([
- {
- $project: {date: {$dateFromString: {dateString: "$dateString"}}},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- // Repeat the test with an explicit format specifier string.
- assert.eq(
- expectedResults,
- coll.aggregate([
- {
- $project:
- {date: {$dateFromString: {dateString: "$dateString", format: "$format"}}},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* Normal format tests from data, with time zone. */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, dateString: "2017-07-06T12:35:37.513", timezone: "GMT"},
- {_id: 1, dateString: "2017-07-06T12:35:37.513", timezone: "UTC"},
- {_id: 2, dateString: "1960-07-10T12:35:37.513", timezone: "America/New_York"},
- {_id: 3, dateString: "1960-07-10T12:35:37.513", timezone: "Europe/London"},
- {_id: 4, dateString: "2017-07-06T12:35:37.513", timezone: "America/Los_Angeles"},
- {_id: 5, dateString: "2017-07-06T12:35:37.513", timezone: "Europe/Paris"},
- {_id: 6, dateString: "2017-07-06T12:35:37.513", timezone: "+04:00"},
- ]));
-
- expectedResults = [
- {"_id": 0, "date": ISODate("2017-07-06T12:35:37.513Z")},
- {"_id": 1, "date": ISODate("2017-07-06T12:35:37.513Z")},
- {"_id": 2, "date": ISODate("1960-07-10T16:35:37.513Z")},
- {"_id": 3, "date": ISODate("1960-07-10T11:35:37.513Z")},
- {"_id": 4, "date": ISODate("2017-07-06T19:35:37.513Z")},
- {"_id": 5, "date": ISODate("2017-07-06T10:35:37.513Z")},
- {"_id": 6, "date": ISODate("2017-07-06T08:35:37.513Z")},
- ];
-
- assert.eq(
- expectedResults,
- coll.aggregate([
- {
- $project:
- {date: {$dateFromString: {dateString: "$dateString", timezone: "$timezone"}}},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- // Repeat the test with an explicit format specifier string.
- assert.eq(expectedResults,
- coll.aggregate([
- {
- $project: {
- date: {
- $dateFromString: {
- dateString: "$dateString",
- timezone: "$timezone",
- format: "%Y-%m-%dT%H:%M:%S.%L"
- }
- }
- },
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* dateString from data with timezone as constant */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, dateString: "2017-07-06T12:35:37"},
- ]));
-
- assert.eq(
- [
- {"_id": 0, "date": ISODate("2017-07-06T03:35:37Z")},
- ],
- coll.aggregate([
- {
- $project: {
- date: {$dateFromString: {dateString: "$dateString", timezone: "Asia/Tokyo"}}
- },
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* dateString from constant with timezone from data */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, timezone: "Europe/London"},
- {_id: 1, timezone: "America/New_York"},
- {_id: 2, timezone: "-05:00"},
- ]));
-
- assert.eq(
- [
- {"_id": 0, "date": ISODate("2017-07-19T17:52:35.199Z")},
- {"_id": 1, "date": ISODate("2017-07-19T22:52:35.199Z")},
- {"_id": 2, "date": ISODate("2017-07-19T23:52:35.199Z")},
- ],
- coll.aggregate([
- {
- $project: {
- date: {
- $dateFromString:
- {dateString: "2017-07-19T18:52:35.199", timezone: "$timezone"}
}
+ })
+ .toArray(),
+ tojson(testCase));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Normal format tests from data. */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, dateString: "2017-07-06T12:35:37Z", format: "%Y-%m-%dT%H:%M:%SZ"},
+ {_id: 1, dateString: "2017-07-06T12:35:37.513Z", format: "%Y-%m-%dT%H:%M:%S.%LZ"},
+ {_id: 2, dateString: "2017-07-06T12:35:37", format: "%Y-%m-%dT%H:%M:%S"},
+ {_id: 3, dateString: "2017-07-06T12:35:37.513", format: "%Y-%m-%dT%H:%M:%S.%L"},
+ {_id: 4, dateString: "1960-07-10T12:10:37.448", format: "%Y-%m-%dT%H:%M:%S.%L"},
+]));
+
+let expectedResults = [
+ {"_id": 0, "date": ISODate("2017-07-06T12:35:37Z")},
+ {"_id": 1, "date": ISODate("2017-07-06T12:35:37.513Z")},
+ {"_id": 2, "date": ISODate("2017-07-06T12:35:37Z")},
+ {"_id": 3, "date": ISODate("2017-07-06T12:35:37.513Z")},
+ {"_id": 4, "date": ISODate("1960-07-10T12:10:37.448Z")},
+];
+assert.eq(expectedResults,
+ coll.aggregate([
+ {
+ $project: {date: {$dateFromString: {dateString: "$dateString"}}},
},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* BI format tests. */
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- let pipelines = [
- {
- expect: "2017-01-01T00:00:00Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-01-01 00:00:00"}}}}
- },
- {
- expect: "2017-07-01T00:00:00Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-01 00:00:00"}}}}
- },
- {
- expect: "2017-07-06T00:00:00Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06"}}}}
- },
- {
- expect: "2017-07-06T00:00:00Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 00:00:00"}}}}
- },
- {
- expect: "2017-07-06T11:00:00Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 11:00:00"}}}}
- },
- {
- expect: "2017-07-06T11:36:00Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 11:36:00"}}}}
- },
- {
- expect: "2017-07-06T11:36:54Z",
- pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 11:36:54"}}}}
- },
- ];
- pipelines.forEach(function(pipeline) {
- assert.eq([{_id: 0, date: ISODate(pipeline.expect)}],
- coll.aggregate(pipeline.pipeline).toArray(),
- tojson(pipeline));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Tests with additional timezone information . */
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- testCases = [
- // GMT based variants
- {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT"},
- {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+00"},
- {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+00:00"},
- {expect: "2017-07-14T10:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+02"},
- {expect: "2017-07-14T10:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+02:00"},
- {expect: "2017-07-14T09:02:44.771Z", inputString: "2017-07-14T12:02:44.771+03"},
- {expect: "2017-07-14T08:32:44.771Z", inputString: "2017-07-14T12:02:44.771+0330"},
- {expect: "2017-07-14T08:32:44.771Z", inputString: "2017-07-14T12:02:44.771+03:30"},
- // With timezone abbreviations
- {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 UTC"},
- {expect: "2017-07-14T10:02:44.771Z", inputString: "2017-07-14T12:02:44.771 CEST"},
- {expect: "2017-07-14T17:02:44.771Z", inputString: "2017-07-14T12:02:44.771 EST"},
- {expect: "2017-07-14T19:02:44.771Z", inputString: "2017-07-14T12:02:44.771 PDT"},
- // A-I,K-Z are military time zones:
- // https://en.wikipedia.org/wiki/List_of_military_time_zones
- {expect: "2017-07-14T11:02:44.771Z", inputString: "2017-07-14T12:02:44.771 A"},
- {expect: "2017-07-14T01:02:44.771Z", inputString: "2017-07-14T12:02:44.771 L"},
- {expect: "2017-07-14T15:02:44.771Z", inputString: "2017-07-14T12:02:44.771 P"},
- {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 Z"},
- ];
- testCases.forEach(function(testCase) {
- assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate(
- {$project: {date: {$dateFromString: {dateString: testCase.inputString}}}})
- .toArray(),
- tojson(testCase));
- assert.eq([{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: testCase.inputString,
- format: "%Y-%m-%dT%H:%M:%S.%L%z"
- }
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+// Repeat the test with an explicit format specifier string.
+assert.eq(
+ expectedResults,
+ coll.aggregate([
+ {
+ $project: {date: {$dateFromString: {dateString: "$dateString", format: "$format"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+expectedResults = [
+ {"_id": 0, "date": new Date(1499344537000)},
+ {"_id": 1, "date": new Date(1499344537513)},
+ {"_id": 2, "date": new Date(1499344537000)},
+ {"_id": 3, "date": new Date(1499344537513)},
+ {"_id": 4, "date": new Date(-299072962552)},
+];
+assert.eq(expectedResults,
+ coll.aggregate([
+ {
+ $project: {date: {$dateFromString: {dateString: "$dateString"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+// Repeat the test with an explicit format specifier string.
+assert.eq(
+ expectedResults,
+ coll.aggregate([
+ {
+ $project: {date: {$dateFromString: {dateString: "$dateString", format: "$format"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* Normal format tests from data, with time zone. */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, dateString: "2017-07-06T12:35:37.513", timezone: "GMT"},
+ {_id: 1, dateString: "2017-07-06T12:35:37.513", timezone: "UTC"},
+ {_id: 2, dateString: "1960-07-10T12:35:37.513", timezone: "America/New_York"},
+ {_id: 3, dateString: "1960-07-10T12:35:37.513", timezone: "Europe/London"},
+ {_id: 4, dateString: "2017-07-06T12:35:37.513", timezone: "America/Los_Angeles"},
+ {_id: 5, dateString: "2017-07-06T12:35:37.513", timezone: "Europe/Paris"},
+ {_id: 6, dateString: "2017-07-06T12:35:37.513", timezone: "+04:00"},
+]));
+
+expectedResults = [
+ {"_id": 0, "date": ISODate("2017-07-06T12:35:37.513Z")},
+ {"_id": 1, "date": ISODate("2017-07-06T12:35:37.513Z")},
+ {"_id": 2, "date": ISODate("1960-07-10T16:35:37.513Z")},
+ {"_id": 3, "date": ISODate("1960-07-10T11:35:37.513Z")},
+ {"_id": 4, "date": ISODate("2017-07-06T19:35:37.513Z")},
+ {"_id": 5, "date": ISODate("2017-07-06T10:35:37.513Z")},
+ {"_id": 6, "date": ISODate("2017-07-06T08:35:37.513Z")},
+];
+
+assert.eq(
+ expectedResults,
+ coll.aggregate([
+ {
+ $project:
+ {date: {$dateFromString: {dateString: "$dateString", timezone: "$timezone"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+// Repeat the test with an explicit format specifier string.
+assert.eq(expectedResults,
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateFromString: {
+ dateString: "$dateString",
+ timezone: "$timezone",
+ format: "%Y-%m-%dT%H:%M:%S.%L"
}
}
- })
- .toArray(),
- tojson(testCase));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* BI format tests from data. */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, dateString: "2017-01-01 00:00:00"},
- {_id: 1, dateString: "2017-07-01 00:00:00"},
- {_id: 2, dateString: "2017-07-06"},
- {_id: 3, dateString: "2017-07-06 00:00:00"},
- {_id: 4, dateString: "2017-07-06 11:00:00"},
- {_id: 5, dateString: "2017-07-06 11:36:00"},
- {_id: 6, dateString: "2017-07-06 11:36:54"},
- ]));
-
- assert.eq(
- [
- {"_id": 0, "date": ISODate("2017-01-01T00:00:00Z")},
- {"_id": 1, "date": ISODate("2017-07-01T00:00:00Z")},
- {"_id": 2, "date": ISODate("2017-07-06T00:00:00Z")},
- {"_id": 3, "date": ISODate("2017-07-06T00:00:00Z")},
- {"_id": 4, "date": ISODate("2017-07-06T11:00:00Z")},
- {"_id": 5, "date": ISODate("2017-07-06T11:36:00Z")},
- {"_id": 6, "date": ISODate("2017-07-06T11:36:54Z")}
- ],
- coll.aggregate([
- {
- $project: {date: {$dateFromString: {dateString: "$dateString"}}},
+ },
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* dateString from data with timezone as constant */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, dateString: "2017-07-06T12:35:37"},
+]));
+
+assert.eq(
+ [
+ {"_id": 0, "date": ISODate("2017-07-06T03:35:37Z")},
+ ],
+ coll.aggregate([
+ {
+ $project:
+ {date: {$dateFromString: {dateString: "$dateString", timezone: "Asia/Tokyo"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* dateString from constant with timezone from data */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, timezone: "Europe/London"},
+ {_id: 1, timezone: "America/New_York"},
+ {_id: 2, timezone: "-05:00"},
+]));
+
+assert.eq(
+ [
+ {"_id": 0, "date": ISODate("2017-07-19T17:52:35.199Z")},
+ {"_id": 1, "date": ISODate("2017-07-19T22:52:35.199Z")},
+ {"_id": 2, "date": ISODate("2017-07-19T23:52:35.199Z")},
+ ],
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: "2017-07-19T18:52:35.199", timezone: "$timezone"}
+ }
},
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* Wacky format tests from data. */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, dateString: "July 4th, 2017"},
- {_id: 1, dateString: "July 4th, 2017 12:39:30 BST"},
- {_id: 2, dateString: "July 4th, 2017 11am"},
- {_id: 3, dateString: "July 4th, 2017 12pm"},
- {_id: 4, dateString: "7/4/17"},
- {_id: 5, dateString: "04-07-2017"},
- {_id: 6, dateString: "2017-Jul-04 noon"},
- {_id: 7, dateString: "2017-07-04 12:48:07 GMT+0545"},
- {_id: 8, dateString: "2017-07-04 12:48:07 GMT-0200"},
- ]));
-
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* BI format tests. */
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+let pipelines = [
+ {
+ expect: "2017-01-01T00:00:00Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-01-01 00:00:00"}}}}
+ },
+ {
+ expect: "2017-07-01T00:00:00Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-01 00:00:00"}}}}
+ },
+ {
+ expect: "2017-07-06T00:00:00Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06"}}}}
+ },
+ {
+ expect: "2017-07-06T00:00:00Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 00:00:00"}}}}
+ },
+ {
+ expect: "2017-07-06T11:00:00Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 11:00:00"}}}}
+ },
+ {
+ expect: "2017-07-06T11:36:00Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 11:36:00"}}}}
+ },
+ {
+ expect: "2017-07-06T11:36:54Z",
+ pipeline: {$project: {date: {$dateFromString: {dateString: "2017-07-06 11:36:54"}}}}
+ },
+];
+pipelines.forEach(function(pipeline) {
+ assert.eq([{_id: 0, date: ISODate(pipeline.expect)}],
+ coll.aggregate(pipeline.pipeline).toArray(),
+ tojson(pipeline));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Tests with additional timezone information . */
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+testCases = [
+ // GMT based variants
+ {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT"},
+ {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+00"},
+ {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+00:00"},
+ {expect: "2017-07-14T10:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+02"},
+ {expect: "2017-07-14T10:02:44.771Z", inputString: "2017-07-14T12:02:44.771 GMT+02:00"},
+ {expect: "2017-07-14T09:02:44.771Z", inputString: "2017-07-14T12:02:44.771+03"},
+ {expect: "2017-07-14T08:32:44.771Z", inputString: "2017-07-14T12:02:44.771+0330"},
+ {expect: "2017-07-14T08:32:44.771Z", inputString: "2017-07-14T12:02:44.771+03:30"},
+ // With timezone abbreviations
+ {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 UTC"},
+ {expect: "2017-07-14T10:02:44.771Z", inputString: "2017-07-14T12:02:44.771 CEST"},
+ {expect: "2017-07-14T17:02:44.771Z", inputString: "2017-07-14T12:02:44.771 EST"},
+ {expect: "2017-07-14T19:02:44.771Z", inputString: "2017-07-14T12:02:44.771 PDT"},
+ // A-I,K-Z are military time zones:
+ // https://en.wikipedia.org/wiki/List_of_military_time_zones
+ {expect: "2017-07-14T11:02:44.771Z", inputString: "2017-07-14T12:02:44.771 A"},
+ {expect: "2017-07-14T01:02:44.771Z", inputString: "2017-07-14T12:02:44.771 L"},
+ {expect: "2017-07-14T15:02:44.771Z", inputString: "2017-07-14T12:02:44.771 P"},
+ {expect: "2017-07-14T12:02:44.771Z", inputString: "2017-07-14T12:02:44.771 Z"},
+];
+testCases.forEach(function(testCase) {
assert.eq(
- [
- {"_id": 0, "date": ISODate("2017-07-04T00:00:00Z")},
- {"_id": 1, "date": ISODate("2017-07-04T11:39:30Z")},
- {"_id": 2, "date": ISODate("2017-07-04T11:00:00Z")},
- {"_id": 3, "date": ISODate("2017-07-04T12:00:00Z")},
- {"_id": 4, "date": ISODate("2017-07-04T00:00:00Z")},
- {"_id": 5, "date": ISODate("2017-07-04T00:00:00Z")},
- {"_id": 6, "date": ISODate("2017-07-04T12:00:00Z")},
- {"_id": 7, "date": ISODate("2017-07-04T07:03:07Z")},
- {"_id": 8, "date": ISODate("2017-07-04T14:48:07Z")},
- ],
- coll.aggregate([
- {
- $project: {date: {$dateFromString: {dateString: "$dateString"}}},
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- /* Tests formats that aren't supported with the normal $dateFromString parser. */
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
-
- testCases = [
- {inputString: "05 12 1988", format: "%d %m %Y", expect: "1988-12-05T00:00:00Z"},
- {inputString: "1992 04 26", format: "%Y %m %d", expect: "1992-04-26T00:00:00Z"},
- {inputString: "05*12*1988", format: "%d*%m*%Y", expect: "1988-12-05T00:00:00Z"},
- {inputString: "1992/04/26", format: "%Y/%m/%d", expect: "1992-04-26T00:00:00Z"},
- {inputString: "1992 % 04 % 26", format: "%Y %% %m %% %d", expect: "1992-04-26T00:00:00Z"},
- {
- inputString: "Day: 05 Month: 12 Year: 1988",
- format: "Day: %d Month: %m Year: %Y",
- expect: "1988-12-05T00:00:00Z"
- },
- {inputString: "Date: 1992/04/26", format: "Date: %Y/%m/%d", expect: "1992-04-26T00:00:00Z"},
- {inputString: "4/26/1992:+0445", format: "%m/%d/%Y:%z", expect: "1992-04-25T19:15:00Z"},
- {inputString: "4/26/1992:+285", format: "%m/%d/%Y:%Z", expect: "1992-04-25T19:15:00Z"},
- ];
- testCases.forEach(function(testCase) {
- assert.eq(
- [{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString:
- {dateString: testCase.inputString, format: testCase.format}
- }
+ [{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({$project: {date: {$dateFromString: {dateString: testCase.inputString}}}})
+ .toArray(),
+ tojson(testCase));
+ assert.eq(
+ [{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: testCase.inputString, format: "%Y-%m-%dT%H:%M:%S.%L%z"}
}
- })
- .toArray(),
- tojson(testCase));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Tests for ISO year, week of year, and day of the week. */
-
- testCases = [
- {inputString: "2017", format: "%G", expect: "2017-01-02T00:00:00Z"},
- {inputString: "2017, Week 53", format: "%G, Week %V", expect: "2018-01-01T00:00:00Z"},
- {inputString: "2017, Day 5", format: "%G, Day %u", expect: "2017-01-06T00:00:00Z"},
- {inputString: "53.7.2017", format: "%V.%u.%G", expect: "2018-01-07T00:00:00Z"},
- {inputString: "1.1.1", format: "%V.%u.%G", expect: "0001-01-01T00:00:00Z"},
- ];
- testCases.forEach(function(testCase) {
- assert.eq(
- [{_id: 0, date: ISODate(testCase.expect)}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString:
- {dateString: testCase.inputString, format: testCase.format}
- }
+ }
+ })
+ .toArray(),
+ tojson(testCase));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* BI format tests from data. */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, dateString: "2017-01-01 00:00:00"},
+ {_id: 1, dateString: "2017-07-01 00:00:00"},
+ {_id: 2, dateString: "2017-07-06"},
+ {_id: 3, dateString: "2017-07-06 00:00:00"},
+ {_id: 4, dateString: "2017-07-06 11:00:00"},
+ {_id: 5, dateString: "2017-07-06 11:36:00"},
+ {_id: 6, dateString: "2017-07-06 11:36:54"},
+]));
+
+assert.eq(
+ [
+ {"_id": 0, "date": ISODate("2017-01-01T00:00:00Z")},
+ {"_id": 1, "date": ISODate("2017-07-01T00:00:00Z")},
+ {"_id": 2, "date": ISODate("2017-07-06T00:00:00Z")},
+ {"_id": 3, "date": ISODate("2017-07-06T00:00:00Z")},
+ {"_id": 4, "date": ISODate("2017-07-06T11:00:00Z")},
+ {"_id": 5, "date": ISODate("2017-07-06T11:36:00Z")},
+ {"_id": 6, "date": ISODate("2017-07-06T11:36:54Z")}
+ ],
+ coll.aggregate([
+ {
+ $project: {date: {$dateFromString: {dateString: "$dateString"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* Wacky format tests from data. */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, dateString: "July 4th, 2017"},
+ {_id: 1, dateString: "July 4th, 2017 12:39:30 BST"},
+ {_id: 2, dateString: "July 4th, 2017 11am"},
+ {_id: 3, dateString: "July 4th, 2017 12pm"},
+ {_id: 4, dateString: "7/4/17"},
+ {_id: 5, dateString: "04-07-2017"},
+ {_id: 6, dateString: "2017-Jul-04 noon"},
+ {_id: 7, dateString: "2017-07-04 12:48:07 GMT+0545"},
+ {_id: 8, dateString: "2017-07-04 12:48:07 GMT-0200"},
+]));
+
+assert.eq(
+ [
+ {"_id": 0, "date": ISODate("2017-07-04T00:00:00Z")},
+ {"_id": 1, "date": ISODate("2017-07-04T11:39:30Z")},
+ {"_id": 2, "date": ISODate("2017-07-04T11:00:00Z")},
+ {"_id": 3, "date": ISODate("2017-07-04T12:00:00Z")},
+ {"_id": 4, "date": ISODate("2017-07-04T00:00:00Z")},
+ {"_id": 5, "date": ISODate("2017-07-04T00:00:00Z")},
+ {"_id": 6, "date": ISODate("2017-07-04T12:00:00Z")},
+ {"_id": 7, "date": ISODate("2017-07-04T07:03:07Z")},
+ {"_id": 8, "date": ISODate("2017-07-04T14:48:07Z")},
+ ],
+ coll.aggregate([
+ {
+ $project: {date: {$dateFromString: {dateString: "$dateString"}}},
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* Tests formats that aren't supported with the normal $dateFromString parser. */
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
+
+testCases = [
+ {inputString: "05 12 1988", format: "%d %m %Y", expect: "1988-12-05T00:00:00Z"},
+ {inputString: "1992 04 26", format: "%Y %m %d", expect: "1992-04-26T00:00:00Z"},
+ {inputString: "05*12*1988", format: "%d*%m*%Y", expect: "1988-12-05T00:00:00Z"},
+ {inputString: "1992/04/26", format: "%Y/%m/%d", expect: "1992-04-26T00:00:00Z"},
+ {inputString: "1992 % 04 % 26", format: "%Y %% %m %% %d", expect: "1992-04-26T00:00:00Z"},
+ {
+ inputString: "Day: 05 Month: 12 Year: 1988",
+ format: "Day: %d Month: %m Year: %Y",
+ expect: "1988-12-05T00:00:00Z"
+ },
+ {inputString: "Date: 1992/04/26", format: "Date: %Y/%m/%d", expect: "1992-04-26T00:00:00Z"},
+ {inputString: "4/26/1992:+0445", format: "%m/%d/%Y:%z", expect: "1992-04-25T19:15:00Z"},
+ {inputString: "4/26/1992:+285", format: "%m/%d/%Y:%Z", expect: "1992-04-25T19:15:00Z"},
+];
+testCases.forEach(function(testCase) {
+ assert.eq(
+ [{_id: 0, date: ISODate(testCase.expect)}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {dateString: testCase.inputString, format: testCase.format}
}
- })
- .toArray(),
- tojson(testCase));
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing whether it throws the right assert for missing elements of a date/time string. */
-
- coll.drop();
-
- assert.writeOK(coll.insert([
- {_id: 0},
- ]));
-
- pipelines = [
- [{'$project': {date: {$dateFromString: {dateString: "July 4th"}}}}],
- [{'$project': {date: {$dateFromString: {dateString: "12:50:53"}}}}],
- ];
-
- pipelines.forEach(function(pipeline) {
- assertErrCodeAndErrMsgContains(coll,
- pipeline,
- ErrorCodes.ConversionFailure,
- "an incomplete date/time string has been found");
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* Testing whether it throws the right assert for broken date/time strings. */
-
- coll.drop();
-
- assert.writeOK(coll.insert([
- {_id: 0},
- ]));
-
- pipelines = [
- [{'$project': {date: {$dateFromString: {dateString: "2017, 12:50:53"}}}}],
- [{'$project': {date: {$dateFromString: {dateString: "60.Monday1770/06:59"}}}}],
- ];
-
- pipelines.forEach(function(pipeline) {
- assertErrCodeAndErrMsgContains(
- coll, pipeline, ErrorCodes.ConversionFailure, "Error parsing date string");
- });
-
- /* --------------------------------------------------------------------------------------- */
- /* NULL returns. */
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-06-19T15:13:25.713Z")},
- {_id: 1, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: null},
- {_id: 2, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: undefined},
- ]));
-
- pipelines = [
- [{$project: {date: {$dateFromString: {dateString: "$tz"}}}}, {$sort: {_id: 1}}],
- [
- {
- $project:
- {date: {$dateFromString: {dateString: "2017-07-11T17:05:19Z", timezone: "$tz"}}}
- },
- {$sort: {_id: 1}}
- ],
- ];
- pipelines.forEach(function(pipeline) {
- assert.eq([{_id: 0, date: null}, {_id: 1, date: null}, {_id: 2, date: null}],
- coll.aggregate(pipeline).toArray(),
- tojson(pipeline));
- });
-
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0},
- {_id: 1, format: null},
- {_id: 2, format: undefined},
- ]));
-
+ }
+ })
+ .toArray(),
+ tojson(testCase));
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* Tests for ISO year, week of year, and day of the week. */
+
+testCases = [
+ {inputString: "2017", format: "%G", expect: "2017-01-02T00:00:00Z"},
+ {inputString: "2017, Week 53", format: "%G, Week %V", expect: "2018-01-01T00:00:00Z"},
+ {inputString: "2017, Day 5", format: "%G, Day %u", expect: "2017-01-06T00:00:00Z"},
+ {inputString: "53.7.2017", format: "%V.%u.%G", expect: "2018-01-07T00:00:00Z"},
+ {inputString: "1.1.1", format: "%V.%u.%G", expect: "0001-01-01T00:00:00Z"},
+];
+testCases.forEach(function(testCase) {
assert.eq(
- [{_id: 0, date: null}, {_id: 1, date: null}, {_id: 2, date: null}],
+ [{_id: 0, date: ISODate(testCase.expect)}],
coll.aggregate({
$project: {
date: {
- $dateFromString: {dateString: "2017-07-11T17:05:19Z", format: "$format"}
+ $dateFromString: {dateString: testCase.inputString, format: testCase.format}
}
}
})
- .toArray());
+ .toArray(),
+ tojson(testCase));
+});
- /* --------------------------------------------------------------------------------------- */
- /* Parse errors. */
+/* --------------------------------------------------------------------------------------- */
+/* Testing whether it throws the right assert for missing elements of a date/time string. */
- let pipeline = [{$project: {date: {$dateFromString: "no-object"}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40540, "$dateFromString only supports an object as an argument");
+coll.drop();
+
+assert.writeOK(coll.insert([
+ {_id: 0},
+]));
- pipeline = [{$project: {date: {$dateFromString: {"unknown": "$tz"}}}}];
- assertErrCodeAndErrMsgContains(coll, pipeline, 40541, "Unrecognized argument");
+pipelines = [
+ [{'$project': {date: {$dateFromString: {dateString: "July 4th"}}}}],
+ [{'$project': {date: {$dateFromString: {dateString: "12:50:53"}}}}],
+];
- pipeline = [{$project: {date: {$dateFromString: {dateString: 5}}}}];
+pipelines.forEach(function(pipeline) {
assertErrCodeAndErrMsgContains(coll,
pipeline,
ErrorCodes.ConversionFailure,
- "$dateFromString requires that 'dateString' be a string");
-
- /* --------------------------------------------------------------------------------------- */
- /* Passing in time zone with date/time string. */
-
- pipeline = {
- $project: {
- date: {
- $dateFromString:
- {dateString: "2017-07-12T22:23:55 GMT+02:00", timezone: "Europe/Amsterdam"}
- }
- }
- };
- assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
-
- pipeline = {
- $project: {
- date: {
- $dateFromString:
- {dateString: "2017-07-12T22:23:55Z", timezone: "Europe/Amsterdam"}
- }
- }
- };
- assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
-
- pipeline = {
- $project: {
- date: {
- $dateFromString: {
- dateString: "2017-07-12T22:23:55 America/New_York",
- timezone: "Europe/Amsterdam"
- }
- }
- }
- };
- assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
+ "an incomplete date/time string has been found");
+});
- pipeline = {
- $project: {date: {$dateFromString: {dateString: "2017-07-12T22:23:55 Europe/Amsterdam"}}}
- };
- assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
+/* --------------------------------------------------------------------------------------- */
+/* Testing whether it throws the right assert for broken date/time strings. */
- /* --------------------------------------------------------------------------------------- */
- /* Error cases for $dateFromString with format specifier string. */
+coll.drop();
- // Test umatched format specifier string.
- pipeline = [{$project: {date: {$dateFromString: {dateString: "2018-01", format: "%Y-%m-%d"}}}}];
- assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing");
+assert.writeOK(coll.insert([
+ {_id: 0},
+]));
- pipeline = [{$project: {date: {$dateFromString: {dateString: "2018-01", format: "%Y"}}}}];
- assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data");
+pipelines = [
+ [{'$project': {date: {$dateFromString: {dateString: "2017, 12:50:53"}}}}],
+ [{'$project': {date: {$dateFromString: {dateString: "60.Monday1770/06:59"}}}}],
+];
- // Test missing specifier prefix '%'.
- pipeline = [{$project: {date: {$dateFromString: {dateString: "1992-26-04", format: "Y-d-m"}}}}];
+pipelines.forEach(function(pipeline) {
assertErrCodeAndErrMsgContains(
- coll, pipeline, ErrorCodes.ConversionFailure, "Format literal not found");
-
- pipeline = [{$project: {date: {$dateFromString: {dateString: "1992", format: "%n"}}}}];
- assertErrCodeAndErrMsgContains(coll, pipeline, 18536, "Invalid format character");
-
- pipeline = [{
- $project: {
- date: {
- $dateFromString:
- {dateString: "4/26/1992:+0445", format: "%m/%d/%Y:%z", timezone: "+0500"}
- }
+ coll, pipeline, ErrorCodes.ConversionFailure, "Error parsing date string");
+});
+
+/* --------------------------------------------------------------------------------------- */
+/* NULL returns. */
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-06-19T15:13:25.713Z")},
+ {_id: 1, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: null},
+ {_id: 2, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: undefined},
+]));
+
+pipelines = [
+ [{$project: {date: {$dateFromString: {dateString: "$tz"}}}}, {$sort: {_id: 1}}],
+ [
+ {
+ $project:
+ {date: {$dateFromString: {dateString: "2017-07-11T17:05:19Z", timezone: "$tz"}}}
+ },
+ {$sort: {_id: 1}}
+ ],
+];
+pipelines.forEach(function(pipeline) {
+ assert.eq([{_id: 0, date: null}, {_id: 1, date: null}, {_id: 2, date: null}],
+ coll.aggregate(pipeline).toArray(),
+ tojson(pipeline));
+});
+
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0},
+ {_id: 1, format: null},
+ {_id: 2, format: undefined},
+]));
+
+assert.eq(
+ [{_id: 0, date: null}, {_id: 1, date: null}, {_id: 2, date: null}],
+ coll.aggregate({
+ $project:
+ {date: {$dateFromString: {dateString: "2017-07-11T17:05:19Z", format: "$format"}}}
+ })
+ .toArray());
+
+/* --------------------------------------------------------------------------------------- */
+/* Parse errors. */
+
+let pipeline = [{$project: {date: {$dateFromString: "no-object"}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 40540, "$dateFromString only supports an object as an argument");
+
+pipeline = [{$project: {date: {$dateFromString: {"unknown": "$tz"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 40541, "Unrecognized argument");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: 5}}}}];
+assertErrCodeAndErrMsgContains(coll,
+ pipeline,
+ ErrorCodes.ConversionFailure,
+ "$dateFromString requires that 'dateString' be a string");
+
+/* --------------------------------------------------------------------------------------- */
+/* Passing in time zone with date/time string. */
+
+pipeline = {
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: "2017-07-12T22:23:55 GMT+02:00", timezone: "Europe/Amsterdam"}
}
- }];
- assertErrCodeAndErrMsgContains(
- coll,
- pipeline,
- ErrorCodes.ConversionFailure,
- "you cannot pass in a date/time string with GMT offset together with a timezone argument");
-
- pipeline = [{$project: {date: {$dateFromString: {dateString: "4/26/1992", format: 5}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40684, "$dateFromString requires that 'format' be a string");
-
- pipeline = [{$project: {date: {$dateFromString: {dateString: "4/26/1992", format: {}}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 40684, "$dateFromString requires that 'format' be a string");
-
- pipeline =
- [{$project: {date: {$dateFromString: {dateString: "ISO Day 6", format: "ISO Day %u"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, ErrorCodes.ConversionFailure, "The parsed date was invalid");
-
- pipeline =
- [{$project: {date: {$dateFromString: {dateString: "ISO Week 52", format: "ISO Week %V"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, ErrorCodes.ConversionFailure, "The parsed date was invalid");
-
- pipeline = [{
- $project: {
- date: {$dateFromString: {dateString: "ISO Week 1, 2018", format: "ISO Week %V, %Y"}}
+ }
+};
+assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
+
+pipeline = {
+ $project: {
+ date: {$dateFromString: {dateString: "2017-07-12T22:23:55Z", timezone: "Europe/Amsterdam"}}
+ }
+};
+assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
+
+pipeline = {
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: "2017-07-12T22:23:55 America/New_York", timezone: "Europe/Amsterdam"}
}
- }];
- assertErrCodeAndErrMsgContains(coll,
- pipeline,
- ErrorCodes.ConversionFailure,
- "Mixing of ISO dates with natural dates is not allowed");
-
- pipeline =
- [{$project: {date: {$dateFromString: {dateString: "12/31/2018", format: "%m/%d/%G"}}}}];
- assertErrCodeAndErrMsgContains(coll,
- pipeline,
- ErrorCodes.ConversionFailure,
- "Mixing of ISO dates with natural dates is not allowed");
-
- // Test embedded null bytes in the 'dateString' and 'format' fields.
- pipeline =
- [{$project: {date: {$dateFromString: {dateString: "12/31\0/2018", format: "%m/%d/%Y"}}}}];
- assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing");
-
- pipeline =
- [{$project: {date: {$dateFromString: {dateString: "12/31/2018", format: "%m/%d\0/%Y"}}}}];
- assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data");
+ }
+};
+assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
+
+pipeline = {
+ $project: {date: {$dateFromString: {dateString: "2017-07-12T22:23:55 Europe/Amsterdam"}}}
+};
+assertErrorCode(coll, pipeline, ErrorCodes.ConversionFailure);
+
+/* --------------------------------------------------------------------------------------- */
+/* Error cases for $dateFromString with format specifier string. */
+
+// Test umatched format specifier string.
+pipeline = [{$project: {date: {$dateFromString: {dateString: "2018-01", format: "%Y-%m-%d"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: "2018-01", format: "%Y"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data");
+
+// Test missing specifier prefix '%'.
+pipeline = [{$project: {date: {$dateFromString: {dateString: "1992-26-04", format: "Y-d-m"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, ErrorCodes.ConversionFailure, "Format literal not found");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: "1992", format: "%n"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 18536, "Invalid format character");
+
+pipeline = [{
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: "4/26/1992:+0445", format: "%m/%d/%Y:%z", timezone: "+0500"}
+ }
+ }
+}];
+assertErrCodeAndErrMsgContains(
+ coll,
+ pipeline,
+ ErrorCodes.ConversionFailure,
+ "you cannot pass in a date/time string with GMT offset together with a timezone argument");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: "4/26/1992", format: 5}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 40684, "$dateFromString requires that 'format' be a string");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: "4/26/1992", format: {}}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 40684, "$dateFromString requires that 'format' be a string");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: "ISO Day 6", format: "ISO Day %u"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, ErrorCodes.ConversionFailure, "The parsed date was invalid");
+
+pipeline =
+ [{$project: {date: {$dateFromString: {dateString: "ISO Week 52", format: "ISO Week %V"}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, ErrorCodes.ConversionFailure, "The parsed date was invalid");
+
+pipeline = [{
+ $project: {date: {$dateFromString: {dateString: "ISO Week 1, 2018", format: "ISO Week %V, %Y"}}}
+}];
+assertErrCodeAndErrMsgContains(coll,
+ pipeline,
+ ErrorCodes.ConversionFailure,
+ "Mixing of ISO dates with natural dates is not allowed");
+
+pipeline = [{$project: {date: {$dateFromString: {dateString: "12/31/2018", format: "%m/%d/%G"}}}}];
+assertErrCodeAndErrMsgContains(coll,
+ pipeline,
+ ErrorCodes.ConversionFailure,
+ "Mixing of ISO dates with natural dates is not allowed");
+
+// Test embedded null bytes in the 'dateString' and 'format' fields.
+pipeline =
+ [{$project: {date: {$dateFromString: {dateString: "12/31\0/2018", format: "%m/%d/%Y"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing");
+
+pipeline =
+ [{$project: {date: {$dateFromString: {dateString: "12/31/2018", format: "%m/%d\0/%Y"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data");
})();
diff --git a/jstests/aggregation/expressions/date_from_string_on_error.js b/jstests/aggregation/expressions/date_from_string_on_error.js
index 2947c8ed35f..c944074657e 100644
--- a/jstests/aggregation/expressions/date_from_string_on_error.js
+++ b/jstests/aggregation/expressions/date_from_string_on_error.js
@@ -2,132 +2,76 @@
* Tests for the $dateFromString expression with the optional 'onError' parameter.
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrMsgContains.
+load("jstests/aggregation/extras/utils.js"); // For assertErrMsgContains.
- const onErrorValue = ISODate("2017-07-04T11:56:02Z");
- const coll = db.date_from_string_on_error;
- coll.drop();
+const onErrorValue = ISODate("2017-07-04T11:56:02Z");
+const coll = db.date_from_string_on_error;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
+assert.writeOK(coll.insert({_id: 0}));
- // Test that the 'onError' value is returned when 'dateString' is not a valid date/time.
- for (let inputDate of["July 4th",
- "12:50:53",
- "2017",
- "60.Monday1770/06:59",
- "Not even close",
- "July 4th, 10000"]) {
- assert.eq(
- [{_id: 0, date: onErrorValue}],
- coll.aggregate({
- $project:
- {date: {$dateFromString: {dateString: inputDate, onError: onErrorValue}}}
- })
- .toArray());
- }
-
- // Test that the 'onError' value is returned when 'dateString' is not a string.
- for (let inputDate of[5, {year: 2018, month: 2, day: 5}, ["2018-02-05"]]) {
- assert.eq(
- [{_id: 0, date: onErrorValue}],
- coll.aggregate({
- $project:
- {date: {$dateFromString: {dateString: inputDate, onError: onErrorValue}}}
- })
- .toArray());
- }
-
- // Test that the 'onError' value is ignored when 'dateString' is nullish.
- for (let inputDate of[null, undefined, "$missing"]) {
- assert.eq(
- [{_id: 0, date: null}],
- coll.aggregate({
- $project:
- {date: {$dateFromString: {dateString: inputDate, onError: onErrorValue}}}
- })
- .toArray());
- }
+// Test that the 'onError' value is returned when 'dateString' is not a valid date/time.
+for (let inputDate of ["July 4th",
+ "12:50:53",
+ "2017",
+ "60.Monday1770/06:59",
+ "Not even close",
+ "July 4th, 10000"]) {
+ assert.eq(
+ [{_id: 0, date: onErrorValue}],
+ coll.aggregate({
+ $project: {date: {$dateFromString: {dateString: inputDate, onError: onErrorValue}}}
+ })
+ .toArray());
+}
- // Test that the 'onError' value is returned for unmatched format strings.
- for (let inputFormat of["%Y", "%Y-%m-%dT%H", "Y-m-d"]) {
- assert.eq([{_id: 0, date: onErrorValue}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: "2018-02-06",
- format: inputFormat,
- onError: onErrorValue
- }
- }
- }
- })
- .toArray());
- }
+// Test that the 'onError' value is returned when 'dateString' is not a string.
+for (let inputDate of [5, {year: 2018, month: 2, day: 5}, ["2018-02-05"]]) {
+ assert.eq(
+ [{_id: 0, date: onErrorValue}],
+ coll.aggregate({
+ $project: {date: {$dateFromString: {dateString: inputDate, onError: onErrorValue}}}
+ })
+ .toArray());
+}
- // Test that null is returned when the 'timezone' or 'format' is nullish, regardless of the
- // 'onError' value.
- for (let nullishValue of[null, undefined, "$missing"]) {
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: "2018-02-06T11:56:02Z",
- format: nullishValue,
- onError: onErrorValue
- }
- }
- }
- })
- .toArray());
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: "2018-02-06T11:56:02Z",
- timezone: nullishValue,
- onError: onErrorValue
- }
- }
- }
- })
- .toArray());
- }
+// Test that the 'onError' value is ignored when 'dateString' is nullish.
+for (let inputDate of [null, undefined, "$missing"]) {
+ assert.eq(
+ [{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {date: {$dateFromString: {dateString: inputDate, onError: onErrorValue}}}
+ })
+ .toArray());
+}
- // Test that onError is returned when the input is not a string and other parameters are
- // nullish.
+// Test that the 'onError' value is returned for unmatched format strings.
+for (let inputFormat of ["%Y", "%Y-%m-%dT%H", "Y-m-d"]) {
assert.eq(
[{_id: 0, date: onErrorValue}],
coll.aggregate({
$project: {
- date: {$dateFromString: {dateString: 5, format: null, onError: onErrorValue}}
+ date: {
+ $dateFromString:
+ {dateString: "2018-02-06", format: inputFormat, onError: onErrorValue}
+ }
}
})
.toArray());
- assert.eq([{_id: 0, date: onErrorValue}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString:
- {dateString: 5, timezone: "$missing", onError: onErrorValue}
- }
- }
- })
- .toArray());
+}
- // Test that onError is ignored when the input is an invalid string and other parameters are
- // nullish.
+// Test that null is returned when the 'timezone' or 'format' is nullish, regardless of the
+// 'onError' value.
+for (let nullishValue of [null, undefined, "$missing"]) {
assert.eq([{_id: 0, date: null}],
coll.aggregate({
$project: {
date: {
$dateFromString: {
- dateString: "Invalid date string",
- format: null,
+ dateString: "2018-02-06T11:56:02Z",
+ format: nullishValue,
onError: onErrorValue
}
}
@@ -139,55 +83,95 @@
$project: {
date: {
$dateFromString: {
- dateString: "Invalid date string",
- timezone: "$missing",
+ dateString: "2018-02-06T11:56:02Z",
+ timezone: nullishValue,
onError: onErrorValue
}
}
}
})
.toArray());
+}
+
+// Test that onError is returned when the input is not a string and other parameters are
+// nullish.
+assert.eq(
+ [{_id: 0, date: onErrorValue}],
+ coll.aggregate({
+ $project:
+ {date: {$dateFromString: {dateString: 5, format: null, onError: onErrorValue}}}
+ })
+ .toArray());
+assert.eq(
+ [{_id: 0, date: onErrorValue}],
+ coll.aggregate({
+ $project: {
+ date:
+ {$dateFromString: {dateString: 5, timezone: "$missing", onError: onErrorValue}}
+ }
+ })
+ .toArray());
+
+// Test that onError is ignored when the input is an invalid string and other parameters are
+// nullish.
+assert.eq(
+ [{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString:
+ {dateString: "Invalid date string", format: null, onError: onErrorValue}
+ }
+ }
+ })
+ .toArray());
+assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {
+ dateString: "Invalid date string",
+ timezone: "$missing",
+ onError: onErrorValue
+ }
+ }
+ }
+ })
+ .toArray());
- // Test that 'onError' can be any type, not just an ISODate.
- for (let onError of[{}, 5, "Not a date", null, undefined]) {
- assert.eq(
- [{_id: 0, date: onError}],
- coll.aggregate({
- $project: {date: {$dateFromString: {dateString: "invalid", onError: onError}}}
- })
- .toArray());
- }
- // Test that a missing 'onError' value results in no output field when used within a $project
- // stage.
+// Test that 'onError' can be any type, not just an ISODate.
+for (let onError of [{}, 5, "Not a date", null, undefined]) {
assert.eq(
- [{_id: 0}],
+ [{_id: 0, date: onError}],
coll.aggregate(
- {$project: {date: {$dateFromString: {dateString: "invalid", onError: "$missing"}}}})
+ {$project: {date: {$dateFromString: {dateString: "invalid", onError: onError}}}})
.toArray());
+}
+// Test that a missing 'onError' value results in no output field when used within a $project
+// stage.
+assert.eq(
+ [{_id: 0}],
+ coll.aggregate(
+ {$project: {date: {$dateFromString: {dateString: "invalid", onError: "$missing"}}}})
+ .toArray());
- // Test that 'onError' is ignored when the 'format' is invalid.
- assertErrCodeAndErrMsgContains(
- coll,
- [{
- $project: {
- date: {
- $dateFromString: {dateString: "4/26/1992", format: 5, onError: onErrorValue}
- }
- }
- }],
- 40684,
- "$dateFromString requires that 'format' be a string");
+// Test that 'onError' is ignored when the 'format' is invalid.
+assertErrCodeAndErrMsgContains(
+ coll,
+ [{
+ $project:
+ {date: {$dateFromString: {dateString: "4/26/1992", format: 5, onError: onErrorValue}}}
+ }],
+ 40684,
+ "$dateFromString requires that 'format' be a string");
- assertErrCodeAndErrMsgContains(
- coll,
- [{
- $project: {
- date: {
- $dateFromString:
- {dateString: "4/26/1992", format: "%n", onError: onErrorValue}
- }
- }
- }],
- 18536,
- "Invalid format character '%n' in format string");
+assertErrCodeAndErrMsgContains(
+ coll,
+ [{
+ $project: {
+ date: {$dateFromString: {dateString: "4/26/1992", format: "%n", onError: onErrorValue}}
+ }
+ }],
+ 18536,
+ "Invalid format character '%n' in format string");
})();
diff --git a/jstests/aggregation/expressions/date_from_string_on_null.js b/jstests/aggregation/expressions/date_from_string_on_null.js
index 12b7d673984..caf7cf1216d 100644
--- a/jstests/aggregation/expressions/date_from_string_on_null.js
+++ b/jstests/aggregation/expressions/date_from_string_on_null.js
@@ -2,67 +2,65 @@
* Tests for the $dateFromString expression with the optional 'onNull' parameter.
*/
(function() {
- "use strict";
+"use strict";
- const onNullValue = ISODate("2017-07-04T11:56:02Z");
- const coll = db.date_from_string_on_null;
- coll.drop();
+const onNullValue = ISODate("2017-07-04T11:56:02Z");
+const coll = db.date_from_string_on_null;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
+assert.writeOK(coll.insert({_id: 0}));
- // Test that the 'onNull' value is returned when the 'dateString' is nullish.
- for (let inputDate of[null, undefined, "$missing"]) {
- assert.eq(
- [{_id: 0, date: onNullValue}],
- coll.aggregate({
- $project:
- {date: {$dateFromString: {dateString: inputDate, onNull: onNullValue}}}
- })
- .toArray());
- }
+// Test that the 'onNull' value is returned when the 'dateString' is nullish.
+for (let inputDate of [null, undefined, "$missing"]) {
+ assert.eq(
+ [{_id: 0, date: onNullValue}],
+ coll.aggregate(
+ {$project: {date: {$dateFromString: {dateString: inputDate, onNull: onNullValue}}}})
+ .toArray());
+}
- // Test that null is returned when the 'timezone' or 'format' is nullish, regardless of the
- // 'onNull' value.
- for (let nullishValue of[null, undefined, "$missing"]) {
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: "2018-02-06T11:56:02Z",
- format: nullishValue,
- onNull: onNullValue
- }
+// Test that null is returned when the 'timezone' or 'format' is nullish, regardless of the
+// 'onNull' value.
+for (let nullishValue of [null, undefined, "$missing"]) {
+ assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {
+ dateString: "2018-02-06T11:56:02Z",
+ format: nullishValue,
+ onNull: onNullValue
}
}
- })
- .toArray());
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateFromString: {
- dateString: "2018-02-06T11:56:02Z",
- timezone: nullishValue,
- onNull: onNullValue
- }
+ }
+ })
+ .toArray());
+ assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateFromString: {
+ dateString: "2018-02-06T11:56:02Z",
+ timezone: nullishValue,
+ onNull: onNullValue
}
}
- })
- .toArray());
- }
+ }
+ })
+ .toArray());
+}
- // Test that 'onNull' can be any type, not just an ISODate.
- for (let onNull of[{}, 5, "Not a date", null, undefined]) {
- assert.eq(
- [{_id: 0, date: onNull}],
- coll.aggregate(
- {$project: {date: {$dateFromString: {dateString: "$missing", onNull: onNull}}}})
- .toArray());
- }
+// Test that 'onNull' can be any type, not just an ISODate.
+for (let onNull of [{}, 5, "Not a date", null, undefined]) {
assert.eq(
- [{_id: 0}],
+ [{_id: 0, date: onNull}],
coll.aggregate(
- {$project: {date: {$dateFromString: {dateString: "$missing", onNull: "$missing"}}}})
+ {$project: {date: {$dateFromString: {dateString: "$missing", onNull: onNull}}}})
.toArray());
+}
+assert.eq(
+ [{_id: 0}],
+ coll.aggregate(
+ {$project: {date: {$dateFromString: {dateString: "$missing", onNull: "$missing"}}}})
+ .toArray());
})();
diff --git a/jstests/aggregation/expressions/date_to_parts.js b/jstests/aggregation/expressions/date_to_parts.js
index c1a41abf0b1..47344f5deda 100644
--- a/jstests/aggregation/expressions/date_to_parts.js
+++ b/jstests/aggregation/expressions/date_to_parts.js
@@ -1,195 +1,109 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
(function() {
- "use strict";
+"use strict";
- const coll = db.dateToParts;
- coll.drop();
+const coll = db.dateToParts;
+coll.drop();
- /* --------------------------------------------------------------------------------------- */
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "UTC"},
- {_id: 1, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "Europe/London"},
- {_id: 2, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "America/New_York", iso: true},
- {_id: 3, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "America/New_York", iso: false},
- ]));
+/* --------------------------------------------------------------------------------------- */
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "UTC"},
+ {_id: 1, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "Europe/London"},
+ {_id: 2, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "America/New_York", iso: true},
+ {_id: 3, date: new ISODate("2017-06-19T15:13:25.713Z"), tz: "America/New_York", iso: false},
+]));
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: 0,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 1,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 2,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 3,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}, {$sort: {_id: 1}}])
- .toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}, {$sort: {_id: 1}}])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: 0,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 1,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 16,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 16, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 2,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 11,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 11, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 3,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 11,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([
- {$project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz"}}}},
- {$sort: {_id: 1}}
- ])
- .toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 11, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([
+ {$project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz"}}}},
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: 0,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 1,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 16,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 16, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 2,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 11,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- {
+ date:
+ {year: 2017, month: 6, day: 19, hour: 11, minute: 13, second: 25, millisecond: 713}
+ },
+ {
_id: 3,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 11,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([
- {
- $project: {
- date:
- {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": false}}
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 11, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([
+ {
+ $project:
+ {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": false}}}
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: 0,
date: {
isoWeekYear: 2017,
@@ -200,8 +114,8 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
second: 25,
millisecond: 713
}
- },
- {
+ },
+ {
_id: 1,
date: {
isoWeekYear: 2017,
@@ -212,8 +126,8 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
second: 25,
millisecond: 713
}
- },
- {
+ },
+ {
_id: 2,
date: {
isoWeekYear: 2017,
@@ -224,8 +138,8 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
second: 25,
millisecond: 713
}
- },
- {
+ },
+ {
_id: 3,
date: {
isoWeekYear: 2017,
@@ -236,21 +150,20 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
second: 25,
millisecond: 713
}
- },
- ],
- coll.aggregate([
- {
- $project: {
- date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": true}}
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+ },
+ ],
+ coll.aggregate([
+ {
+ $project:
+ {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": true}}}
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: 2,
date: {
isoWeekYear: 2017,
@@ -261,104 +174,72 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
second: 25,
millisecond: 713
}
- },
- {
+ },
+ {
_id: 3,
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 11,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([
- {$match: {iso: {$exists: true}}},
- {
- $project: {
- date:
- {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": "$iso"}}
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 11, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([
+ {$match: {iso: {$exists: true}}},
+ {
+ $project:
+ {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": "$iso"}}}
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- /* Tests with timestamp */
- assert(coll.drop());
+/* --------------------------------------------------------------------------------------- */
+/* Tests with timestamp */
+assert(coll.drop());
- assert.writeOK(coll.insert([
- {
- _id: ObjectId("58c7cba47bbadf523cf2c313"),
- date: new ISODate("2017-06-19T15:13:25.713Z"),
- tz: "Europe/London"
- },
- ]));
+assert.writeOK(coll.insert([
+ {
+ _id: ObjectId("58c7cba47bbadf523cf2c313"),
+ date: new ISODate("2017-06-19T15:13:25.713Z"),
+ tz: "Europe/London"
+ },
+]));
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: ObjectId("58c7cba47bbadf523cf2c313"),
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 15,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}]).toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 15, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}]).toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: ObjectId("58c7cba47bbadf523cf2c313"),
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 16,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz"}}}}])
- .toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 16, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz"}}}}])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: ObjectId("58c7cba47bbadf523cf2c313"),
- date: {
- year: 2017,
- month: 6,
- day: 19,
- hour: 16,
- minute: 13,
- second: 25,
- millisecond: 713
- }
- },
- ],
- coll.aggregate([{
- $project: {
- date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": false}}
- }
- }])
- .toArray());
+ date:
+ {year: 2017, month: 6, day: 19, hour: 16, minute: 13, second: 25, millisecond: 713}
+ },
+ ],
+ coll.aggregate([{
+ $project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": false}}}
+ }])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: ObjectId("58c7cba47bbadf523cf2c313"),
date: {
isoWeekYear: 2017,
@@ -369,86 +250,88 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
second: 25,
millisecond: 713
}
- },
- ],
- coll.aggregate([{
- $project:
- {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": true}}}
- }])
- .toArray());
+ },
+ ],
+ coll.aggregate([{
+ $project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": true}}}
+ }])
+ .toArray());
- assert.eq(
- [
- {
+assert.eq(
+ [
+ {
_id: ObjectId("58c7cba47bbadf523cf2c313"),
- date:
- {year: 2017, month: 3, day: 14, hour: 10, minute: 53, second: 24, millisecond: 0}
- },
- ],
- coll.aggregate([{
- $project:
- {date: {'$dateToParts': {date: "$_id", "timezone": "$tz", "iso8601": false}}}
- }])
- .toArray());
-
- /* --------------------------------------------------------------------------------------- */
- assert(coll.drop());
+ date: {year: 2017, month: 3, day: 14, hour: 10, minute: 53, second: 24, millisecond: 0}
+ },
+ ],
+ coll.aggregate([{
+ $project: {date: {'$dateToParts': {date: "$_id", "timezone": "$tz", "iso8601": false}}}
+ }])
+ .toArray());
- assert.writeOK(coll.insert([
- {_id: 0, date: ISODate("2017-06-27T12:00:20Z")},
- ]));
+/* --------------------------------------------------------------------------------------- */
+assert(coll.drop());
- assert.eq(
- [
- {_id: 0, date: null},
- ],
- coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", timezone: "$tz"}}}}])
- .toArray());
+assert.writeOK(coll.insert([
+ {_id: 0, date: ISODate("2017-06-27T12:00:20Z")},
+]));
- /* --------------------------------------------------------------------------------------- */
- assert(coll.drop());
+assert.eq(
+ [
+ {_id: 0, date: null},
+ ],
+ coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", timezone: "$tz"}}}}])
+ .toArray());
- assert.writeOK(coll.insert([
- {_id: 0, date: ISODate("2017-06-27T12:00:20Z")},
- ]));
+/* --------------------------------------------------------------------------------------- */
+assert(coll.drop());
- assert.eq(
- [
- {_id: 0, date: null},
- ],
- coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", iso8601: "$iso8601"}}}}])
- .toArray());
+assert.writeOK(coll.insert([
+ {_id: 0, date: ISODate("2017-06-27T12:00:20Z")},
+]));
- /* --------------------------------------------------------------------------------------- */
- assert(coll.drop());
+assert.eq(
+ [
+ {_id: 0, date: null},
+ ],
+ coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date", iso8601: "$iso8601"}}}}])
+ .toArray());
- assert.writeOK(coll.insert([
- {_id: 0, tz: "Europe/London"},
- ]));
+/* --------------------------------------------------------------------------------------- */
+assert(coll.drop());
- assert.eq(
- [
- {_id: 0, date: null},
- ],
- coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}]).toArray());
+assert.writeOK(coll.insert([
+ {_id: 0, tz: "Europe/London"},
+]));
- /* --------------------------------------------------------------------------------------- */
+assert.eq(
+ [
+ {_id: 0, date: null},
+ ],
+ coll.aggregate([{$project: {date: {'$dateToParts': {date: "$date"}}}}]).toArray());
- let pipeline = {$project: {date: {'$dateToParts': {"timezone": "$tz"}}}};
- assertErrorCode(coll, pipeline, 40522);
+/* --------------------------------------------------------------------------------------- */
- pipeline = {
- $project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": 5}}}
- };
- assertErrorCode(coll, pipeline, 40521);
+let pipeline = {$project: {date: {'$dateToParts': {"timezone": "$tz"}}}};
+assertErrorCode(coll, pipeline, 40522);
- pipeline = {$project: {date: {'$dateToParts': {date: 42}}}};
- assertErrorCode(coll, pipeline, 16006);
+pipeline = {
+ $project: {date: {'$dateToParts': {date: "$date", "timezone": "$tz", "iso8601": 5}}}
+};
+assertErrorCode(coll, pipeline, 40521);
- pipeline = {$project: {date: {'$dateToParts': {date: "$date", "timezone": 5}}}};
- assertErrorCode(coll, pipeline, 40517);
+pipeline = {
+ $project: {date: {'$dateToParts': {date: 42}}}
+};
+assertErrorCode(coll, pipeline, 16006);
- pipeline = {$project: {date: {'$dateToParts': {date: "$date", "timezone": "DoesNot/Exist"}}}};
- assertErrorCode(coll, pipeline, 40485);
+pipeline = {
+ $project: {date: {'$dateToParts': {date: "$date", "timezone": 5}}}
+};
+assertErrorCode(coll, pipeline, 40517);
+pipeline = {
+ $project: {date: {'$dateToParts': {date: "$date", "timezone": "DoesNot/Exist"}}}
+};
+assertErrorCode(coll, pipeline, 40485);
})();
diff --git a/jstests/aggregation/expressions/date_to_string.js b/jstests/aggregation/expressions/date_to_string.js
index a1cbaa83fd9..b1cc145b627 100644
--- a/jstests/aggregation/expressions/date_to_string.js
+++ b/jstests/aggregation/expressions/date_to_string.js
@@ -1,294 +1,286 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode
(function() {
- "use strict";
+"use strict";
- const coll = db.date_to_string;
- coll.drop();
+const coll = db.date_to_string;
+coll.drop();
- /* --------------------------------------------------------------------------------------- */
+/* --------------------------------------------------------------------------------------- */
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "UTC"},
- {_id: 1, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "Europe/London"},
- {_id: 2, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "America/New_York"},
- {_id: 3, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "Australia/Eucla"},
- {_id: 4, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "Asia/Kathmandu"},
- {_id: 5, date: new ISODate("1935-07-10T11:36:37.133Z"), tz: "Europe/Amsterdam"},
- {_id: 6, date: new ISODate("1900-07-10T11:41:22.418Z"), tz: "America/Caracas"},
- ]));
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "UTC"},
+ {_id: 1, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "Europe/London"},
+ {_id: 2, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "America/New_York"},
+ {_id: 3, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "Australia/Eucla"},
+ {_id: 4, date: new ISODate("2017-07-04T14:56:42.911Z"), tz: "Asia/Kathmandu"},
+ {_id: 5, date: new ISODate("1935-07-10T11:36:37.133Z"), tz: "Europe/Amsterdam"},
+ {_id: 6, date: new ISODate("1900-07-10T11:41:22.418Z"), tz: "America/Caracas"},
+]));
- assert.eq(
- [
- {_id: 0, date: "2017-07-04 14:56:42 +0000 (0 minutes)"},
- {_id: 1, date: "2017-07-04 15:56:42 +0100 (60 minutes)"},
- {_id: 2, date: "2017-07-04 10:56:42 -0400 (-240 minutes)"},
- {_id: 3, date: "2017-07-04 23:41:42 +0845 (525 minutes)"},
- {_id: 4, date: "2017-07-04 20:41:42 +0545 (345 minutes)"},
- {_id: 5, date: "1935-07-10 12:56:09 +0119 (79 minutes)"},
- {_id: 6, date: "1900-07-10 07:13:42 -0427 (-267 minutes)"},
- ],
- coll.aggregate([
- {
- $project: {
- date: {
- $dateToString: {
- format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)",
- date: "$date",
- timezone: "$tz"
- }
- }
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+assert.eq(
+ [
+ {_id: 0, date: "2017-07-04 14:56:42 +0000 (0 minutes)"},
+ {_id: 1, date: "2017-07-04 15:56:42 +0100 (60 minutes)"},
+ {_id: 2, date: "2017-07-04 10:56:42 -0400 (-240 minutes)"},
+ {_id: 3, date: "2017-07-04 23:41:42 +0845 (525 minutes)"},
+ {_id: 4, date: "2017-07-04 20:41:42 +0545 (345 minutes)"},
+ {_id: 5, date: "1935-07-10 12:56:09 +0119 (79 minutes)"},
+ {_id: 6, date: "1900-07-10 07:13:42 -0427 (-267 minutes)"},
+ ],
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateToString: {
+ format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)",
+ date: "$date",
+ timezone: "$tz"
+ }
+ }
+ }
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- coll.drop();
+/* --------------------------------------------------------------------------------------- */
+coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
- {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
- {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
- ]));
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
+ {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
+ {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
+]));
- assert.eq(
- [
- {_id: 0, date: "2017-01-04 10:08:51 -0500 (-300 minutes)"},
- {_id: 1, date: "2017-07-04 11:09:12 -0400 (-240 minutes)"},
- {_id: 2, date: "2017-12-04 10:09:14 -0500 (-300 minutes)"},
- ],
- coll.aggregate([
- {
- $project: {
- date: {
- $dateToString: {
- format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)",
- date: "$date",
- timezone: "America/New_York"
- }
- }
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+assert.eq(
+ [
+ {_id: 0, date: "2017-01-04 10:08:51 -0500 (-300 minutes)"},
+ {_id: 1, date: "2017-07-04 11:09:12 -0400 (-240 minutes)"},
+ {_id: 2, date: "2017-12-04 10:09:14 -0500 (-300 minutes)"},
+ ],
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateToString: {
+ format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)",
+ date: "$date",
+ timezone: "America/New_York"
+ }
+ }
+ }
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- coll.drop();
+/* --------------------------------------------------------------------------------------- */
+coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
- {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
- {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
- ]));
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
+ {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
+ {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
+]));
- assert.eq(
- [
- {_id: 0, date: "2017-01-04 15:08:51 +0000 (0 minutes)"},
- {_id: 1, date: "2017-07-04 15:09:12 +0000 (0 minutes)"},
- {_id: 2, date: "2017-12-04 15:09:14 +0000 (0 minutes)"},
- ],
- coll.aggregate([
- {
- $project: {
- date: {
- $dateToString:
- {format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)", date: "$date"}
- }
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+assert.eq(
+ [
+ {_id: 0, date: "2017-01-04 15:08:51 +0000 (0 minutes)"},
+ {_id: 1, date: "2017-07-04 15:09:12 +0000 (0 minutes)"},
+ {_id: 2, date: "2017-12-04 15:09:14 +0000 (0 minutes)"},
+ ],
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateToString: {format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)", date: "$date"}
+ }
+ }
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- coll.drop();
+/* --------------------------------------------------------------------------------------- */
+coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-01-01T15:08:51.911Z")},
- {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
- {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
- ]));
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-01-01T15:08:51.911Z")},
+ {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
+ {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
+]));
- assert.eq(
- [
- {_id: 0, date: "Natural: 2017-W1-01, ISO: 2016-W7-52"},
- {_id: 1, date: "Natural: 2017-W3-27, ISO: 2017-W2-27"},
- {_id: 2, date: "Natural: 2017-W2-49, ISO: 2017-W1-49"},
- ],
- coll.aggregate([
- {
- $project: {
- date: {
- $dateToString:
- {format: "Natural: %Y-W%w-%U, ISO: %G-W%u-%V", date: "$date"}
- }
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+assert.eq(
+ [
+ {_id: 0, date: "Natural: 2017-W1-01, ISO: 2016-W7-52"},
+ {_id: 1, date: "Natural: 2017-W3-27, ISO: 2017-W2-27"},
+ {_id: 2, date: "Natural: 2017-W2-49, ISO: 2017-W1-49"},
+ ],
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateToString: {format: "Natural: %Y-W%w-%U, ISO: %G-W%u-%V", date: "$date"}
+ }
+ }
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- /* Test that missing expressions, turn into BSON null values */
- coll.drop();
+/* --------------------------------------------------------------------------------------- */
+/* Test that missing expressions, turn into BSON null values */
+coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
- {_id: 1, date: new ISODate("2017-01-04T15:08:51.911Z"), timezone: null},
- {_id: 2, date: new ISODate("2017-01-04T15:08:51.911Z"), timezone: undefined},
- {_id: 3, timezone: "Europe/Oslo"},
- {_id: 4, date: null, timezone: "Europe/Oslo"},
- {_id: 5, date: undefined, timezone: "Europe/Oslo"},
- ]));
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
+ {_id: 1, date: new ISODate("2017-01-04T15:08:51.911Z"), timezone: null},
+ {_id: 2, date: new ISODate("2017-01-04T15:08:51.911Z"), timezone: undefined},
+ {_id: 3, timezone: "Europe/Oslo"},
+ {_id: 4, date: null, timezone: "Europe/Oslo"},
+ {_id: 5, date: undefined, timezone: "Europe/Oslo"},
+]));
- assert.eq(
- [
- {_id: 0, date: null},
- {_id: 1, date: null},
- {_id: 2, date: null},
- {_id: 3, date: null},
- {_id: 4, date: null},
- {_id: 5, date: null},
- ],
- coll.aggregate([
- {
- $project: {
- date: {
- $dateToString: {
- format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)",
- date: "$date",
- timezone: "$timezone"
- }
- }
- }
- },
- {$sort: {_id: 1}}
- ])
- .toArray());
+assert.eq(
+ [
+ {_id: 0, date: null},
+ {_id: 1, date: null},
+ {_id: 2, date: null},
+ {_id: 3, date: null},
+ {_id: 4, date: null},
+ {_id: 5, date: null},
+ ],
+ coll.aggregate([
+ {
+ $project: {
+ date: {
+ $dateToString: {
+ format: "%Y-%m-%d %H:%M:%S %z (%Z minutes)",
+ date: "$date",
+ timezone: "$timezone"
+ }
+ }
+ }
+ },
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- /* Test that the default format is "%Y-%m-%dT%H:%M:%S.%LZ" if none specified. */
- coll.drop();
+/* --------------------------------------------------------------------------------------- */
+/* Test that the default format is "%Y-%m-%dT%H:%M:%S.%LZ" if none specified. */
+coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
- {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
- {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
- ]));
+assert.writeOK(coll.insert([
+ {_id: 0, date: new ISODate("2017-01-04T15:08:51.911Z")},
+ {_id: 1, date: new ISODate("2017-07-04T15:09:12.911Z")},
+ {_id: 2, date: new ISODate("2017-12-04T15:09:14.911Z")},
+]));
- assert.eq(
- [
- {_id: 0, date: "2017-01-04T10:08:51.911Z"},
- {_id: 1, date: "2017-07-04T11:09:12.911Z"},
- {_id: 2, date: "2017-12-04T10:09:14.911Z"},
- ],
- coll.aggregate([
- {$project: {date: {$dateToString: {date: "$date", timezone: "America/New_York"}}}},
- {$sort: {_id: 1}}
- ])
- .toArray());
+assert.eq(
+ [
+ {_id: 0, date: "2017-01-04T10:08:51.911Z"},
+ {_id: 1, date: "2017-07-04T11:09:12.911Z"},
+ {_id: 2, date: "2017-12-04T10:09:14.911Z"},
+ ],
+ coll.aggregate([
+ {$project: {date: {$dateToString: {date: "$date", timezone: "America/New_York"}}}},
+ {$sort: {_id: 1}}
+ ])
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
- /* Test that null is returned when 'format' evaluates to nullish. */
- coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
+/* --------------------------------------------------------------------------------------- */
+/* Test that null is returned when 'format' evaluates to nullish. */
+coll.drop();
+assert.writeOK(coll.insert({_id: 0}));
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateToString: {
- date: new ISODate("2017-01-04T15:08:51.911Z"),
- format: null,
- }
+assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateToString: {
+ date: new ISODate("2017-01-04T15:08:51.911Z"),
+ format: null,
}
}
- })
- .toArray());
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateToString: {
- date: new ISODate("2017-01-04T15:08:51.911Z"),
- format: undefined,
- }
+ }
+ })
+ .toArray());
+assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateToString: {
+ date: new ISODate("2017-01-04T15:08:51.911Z"),
+ format: undefined,
}
}
- })
- .toArray());
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateToString: {
- date: new ISODate("2017-01-04T15:08:51.911Z"),
- format: "$missing",
- }
+ }
+ })
+ .toArray());
+assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateToString: {
+ date: new ISODate("2017-01-04T15:08:51.911Z"),
+ format: "$missing",
}
}
- })
- .toArray());
+ }
+ })
+ .toArray());
- /* --------------------------------------------------------------------------------------- */
+/* --------------------------------------------------------------------------------------- */
- let pipeline = [{
- $project:
- {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: 5}}}
- }];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 18533, "$dateToString requires that 'format' be a string");
+let pipeline = [
+ {$project: {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: 5}}}}
+];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 18533, "$dateToString requires that 'format' be a string");
- pipeline =
- [{$project: {date: {$dateToString: {format: "%Y-%m-%d %H:%M:%S", timezone: "$tz"}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 18628, "Missing 'date' parameter to $dateToString");
+pipeline = [{$project: {date: {$dateToString: {format: "%Y-%m-%d %H:%M:%S", timezone: "$tz"}}}}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 18628, "Missing 'date' parameter to $dateToString");
- pipeline = [{
- $project: {
- date: {
- $dateToString: {
- date: new ISODate("2017-01-04T15:08:51.911Z"),
- format: "%Y-%m-%d %H:%M:%S",
- timezone: 5
- }
+pipeline = [{
+ $project: {
+ date: {
+ $dateToString: {
+ date: new ISODate("2017-01-04T15:08:51.911Z"),
+ format: "%Y-%m-%d %H:%M:%S",
+ timezone: 5
}
}
- }];
- assertErrCodeAndErrMsgContains(coll, pipeline, 40517, "timezone must evaluate to a string");
+ }
+}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 40517, "timezone must evaluate to a string");
- pipeline = [{$project: {date: {$dateToString: {format: "%Y-%m-%d %H:%M:%S", date: 42}}}}];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 16006, "can't convert from BSON type double to Date");
+pipeline = [{$project: {date: {$dateToString: {format: "%Y-%m-%d %H:%M:%S", date: 42}}}}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 16006, "can't convert from BSON type double to Date");
- pipeline = [{
- $project: {
- date: {
- $dateToString: {
- date: new ISODate("2017-01-04T15:08:51.911Z"),
- format: "%Y-%m-%d %H:%M:%S",
- timezone: "DoesNotExist"
- }
+pipeline = [{
+ $project: {
+ date: {
+ $dateToString: {
+ date: new ISODate("2017-01-04T15:08:51.911Z"),
+ format: "%Y-%m-%d %H:%M:%S",
+ timezone: "DoesNotExist"
}
}
- }];
- assertErrCodeAndErrMsgContains(coll, pipeline, 40485, "unrecognized time zone identifier");
+ }
+}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 40485, "unrecognized time zone identifier");
- pipeline = [{
- $project:
- {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: "%"}}}
- }];
- assertErrCodeAndErrMsgContains(coll, pipeline, 18535, "Unmatched '%' at end of format string");
+pipeline = [{
+ $project: {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: "%"}}}
+}];
+assertErrCodeAndErrMsgContains(coll, pipeline, 18535, "Unmatched '%' at end of format string");
- // Fails for unknown format specifier.
- pipeline = [{
- $project: {
- date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: "%n"}}
- }
- }];
- assertErrCodeAndErrMsgContains(
- coll, pipeline, 18536, "Invalid format character '%n' in format string");
+// Fails for unknown format specifier.
+pipeline = [{
+ $project: {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: "%n"}}}
+}];
+assertErrCodeAndErrMsgContains(
+ coll, pipeline, 18536, "Invalid format character '%n' in format string");
})();
diff --git a/jstests/aggregation/expressions/date_to_string_on_null.js b/jstests/aggregation/expressions/date_to_string_on_null.js
index e5b3ec50f1b..7b3bdc07538 100644
--- a/jstests/aggregation/expressions/date_to_string_on_null.js
+++ b/jstests/aggregation/expressions/date_to_string_on_null.js
@@ -2,76 +2,71 @@
* Tests for the $dateToString expression with the optional 'onNull' parameter.
*/
(function() {
- "use strict";
+"use strict";
- const onNullValue = ISODate("2017-07-04T11:56:02Z");
- const coll = db.date_to_string_on_null;
- coll.drop();
+const onNullValue = ISODate("2017-07-04T11:56:02Z");
+const coll = db.date_to_string_on_null;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0}));
+assert.writeOK(coll.insert({_id: 0}));
- for (let nullishValue of[null, undefined, "$missing"]) {
- // Test that the 'onNull' value is returned when the 'date' is nullish.
- assert.eq([{_id: 0, date: onNullValue}],
- coll.aggregate({
- $project: {
- date: {
- $dateToString: {
- date: nullishValue,
- format: "%Y-%m-%d %H:%M:%S",
- onNull: onNullValue
- }
- }
- }
- })
- .toArray());
-
- // Test that null is returned when the 'timezone' is nullish, regardless of the 'onNull'
- // value.
- assert.eq([{_id: 0, date: null}],
- coll.aggregate({
- $project: {
- date: {
- $dateToString: {
- date: "2018-02-06T11:56:02Z",
- format: "%Y-%m-%d %H:%M:%S",
- timezone: nullishValue,
- onNull: onNullValue
- }
- }
- }
- })
- .toArray());
- }
+for (let nullishValue of [null, undefined, "$missing"]) {
+ // Test that the 'onNull' value is returned when the 'date' is nullish.
+ assert.eq(
+ [{_id: 0, date: onNullValue}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateToString:
+ {date: nullishValue, format: "%Y-%m-%d %H:%M:%S", onNull: onNullValue}
+ }
+ }
+ })
+ .toArray());
- // Test that 'onNull' can be any type, not just an ISODate.
- for (let onNullValue of[{}, 5, "Not a date", null, undefined]) {
- assert.eq([{_id: 0, date: onNullValue}],
- coll.aggregate({
- $project: {
- date: {
- $dateToString: {
- date: "$missing",
- format: "%Y-%m-%d %H:%M:%S",
- onNull: onNullValue
- }
+ // Test that null is returned when the 'timezone' is nullish, regardless of the 'onNull'
+ // value.
+ assert.eq([{_id: 0, date: null}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateToString: {
+ date: "2018-02-06T11:56:02Z",
+ format: "%Y-%m-%d %H:%M:%S",
+ timezone: nullishValue,
+ onNull: onNullValue
}
}
- })
- .toArray());
- }
+ }
+ })
+ .toArray());
+}
- // Test that 'onNull' can be missing, resulting in no output field when used within a $project
- // stage.
+// Test that 'onNull' can be any type, not just an ISODate.
+for (let onNullValue of [{}, 5, "Not a date", null, undefined]) {
assert.eq(
- [{_id: 0}],
+ [{_id: 0, date: onNullValue}],
coll.aggregate({
$project: {
date: {
$dateToString:
- {date: "$missing", format: "%Y-%m-%d %H:%M:%S", onNull: "$missing"}
+ {date: "$missing", format: "%Y-%m-%d %H:%M:%S", onNull: onNullValue}
}
}
})
.toArray());
+}
+
+// Test that 'onNull' can be missing, resulting in no output field when used within a $project
+// stage.
+assert.eq([{_id: 0}],
+ coll.aggregate({
+ $project: {
+ date: {
+ $dateToString:
+ {date: "$missing", format: "%Y-%m-%d %H:%M:%S", onNull: "$missing"}
+ }
+ }
+ })
+ .toArray());
})();
diff --git a/jstests/aggregation/expressions/expression_mod.js b/jstests/aggregation/expressions/expression_mod.js
index 63469ca8177..677f634bc7e 100644
--- a/jstests/aggregation/expressions/expression_mod.js
+++ b/jstests/aggregation/expressions/expression_mod.js
@@ -3,92 +3,92 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExpression.
(function() {
- "use strict";
+"use strict";
- var testDB = db.getSiblingDB("expression_mod");
- assert.commandWorked(testDB.dropDatabase());
- var coll = testDB.getCollection("test");
+var testDB = db.getSiblingDB("expression_mod");
+assert.commandWorked(testDB.dropDatabase());
+var coll = testDB.getCollection("test");
- //
- // Confirm different input numeric types are evaluated correctly.
- //
+//
+// Confirm different input numeric types are evaluated correctly.
+//
- // Aggregate checking various combinations of number types.
- // The $match portion ensures they are of the correct type as the shell turns the ints back to
- // doubles at the end so we can not check types with assert.
- coll.save({});
- var result = coll.aggregate({
- $project: {
- _id: 0,
- dub_dub: {$mod: [138.5, 3.0]},
- dub_int: {$mod: [138.5, NumberLong(3)]},
- dub_long: {$mod: [138.5, NumberInt(3)]},
- int_dub: {$mod: [NumberInt(8), 3.25]},
- int_dubint: {$mod: [NumberInt(8), 3.0]},
- int_int: {$mod: [NumberInt(8), NumberInt(3)]},
- int_long: {$mod: [NumberInt(8), NumberLong(3)]},
- long_dub: {$mod: [NumberLong(8), 3.25]},
- long_dubint: {$mod: [NumberLong(8), 3.0]},
- long_dublong: {$mod: [NumberLong(500000000000), 450000000000.0]},
- long_int: {$mod: [NumberLong(8), NumberInt(3)]},
- long_long: {$mod: [NumberLong(8), NumberLong(3)]},
- verylong_verylong: {$mod: [NumberLong(800000000000), NumberLong(300000000000)]}
- }
- },
- {
- $match: {
- // 1 is NumberDouble
- dub_dub: {$type: 1},
- dub_int: {$type: 1},
- dub_long: {$type: 1},
- int_dub: {$type: 1},
- // 16 is NumberInt
- int_dubint: {$type: 16},
- int_int: {$type: 16},
- // 18 is NumberLong
- int_long: {$type: 18},
- long_dub: {$type: 1},
- long_dubint: {$type: 18},
- long_dublong: {$type: 1},
- long_int: {$type: 18},
- long_long: {$type: 18},
- verylong_verylong: {$type: 18}
- }
- });
+// Aggregate checking various combinations of number types.
+// The $match portion ensures they are of the correct type as the shell turns the ints back to
+// doubles at the end so we can not check types with assert.
+coll.save({});
+var result = coll.aggregate({
+ $project: {
+ _id: 0,
+ dub_dub: {$mod: [138.5, 3.0]},
+ dub_int: {$mod: [138.5, NumberLong(3)]},
+ dub_long: {$mod: [138.5, NumberInt(3)]},
+ int_dub: {$mod: [NumberInt(8), 3.25]},
+ int_dubint: {$mod: [NumberInt(8), 3.0]},
+ int_int: {$mod: [NumberInt(8), NumberInt(3)]},
+ int_long: {$mod: [NumberInt(8), NumberLong(3)]},
+ long_dub: {$mod: [NumberLong(8), 3.25]},
+ long_dubint: {$mod: [NumberLong(8), 3.0]},
+ long_dublong: {$mod: [NumberLong(500000000000), 450000000000.0]},
+ long_int: {$mod: [NumberLong(8), NumberInt(3)]},
+ long_long: {$mod: [NumberLong(8), NumberLong(3)]},
+ verylong_verylong: {$mod: [NumberLong(800000000000), NumberLong(300000000000)]}
+ }
+},
+ {
+ $match: {
+ // 1 is NumberDouble
+ dub_dub: {$type: 1},
+ dub_int: {$type: 1},
+ dub_long: {$type: 1},
+ int_dub: {$type: 1},
+ // 16 is NumberInt
+ int_dubint: {$type: 16},
+ int_int: {$type: 16},
+ // 18 is NumberLong
+ int_long: {$type: 18},
+ long_dub: {$type: 1},
+ long_dubint: {$type: 18},
+ long_dublong: {$type: 1},
+ long_int: {$type: 18},
+ long_long: {$type: 18},
+ verylong_verylong: {$type: 18}
+ }
+ });
- // Correct answers (it is mainly the types that are important here).
- var expectedResult = [{
- dub_dub: 0.5,
- dub_int: 0.5,
- dub_long: 0.5,
- int_dub: 1.5,
- int_dubint: 2,
- int_int: 2,
- int_long: NumberLong(2),
- long_dub: 1.5,
- long_dubint: NumberLong(2),
- long_dublong: 50000000000,
- long_int: NumberLong(2),
- long_long: NumberLong(2),
- verylong_verylong: NumberLong(200000000000)
- }];
+// Correct answers (it is mainly the types that are important here).
+var expectedResult = [{
+ dub_dub: 0.5,
+ dub_int: 0.5,
+ dub_long: 0.5,
+ int_dub: 1.5,
+ int_dubint: 2,
+ int_int: 2,
+ int_long: NumberLong(2),
+ long_dub: 1.5,
+ long_dubint: NumberLong(2),
+ long_dublong: 50000000000,
+ long_int: NumberLong(2),
+ long_long: NumberLong(2),
+ verylong_verylong: NumberLong(200000000000)
+}];
- assert.eq(result.toArray(), expectedResult, tojson(result));
+assert.eq(result.toArray(), expectedResult, tojson(result));
- //
- // Confirm error cases.
- //
+//
+// Confirm error cases.
+//
- // Confirm mod by 0 fails in an expected manner.
- assertErrorCode(coll, {$project: {a: {$mod: [10, 0 /*double*/]}}}, 16610);
- assertErrorCode(coll, {$project: {a: {$mod: [NumberInt(10), NumberInt(0)]}}}, 16610);
- assertErrorCode(coll, {$project: {a: {$mod: [NumberLong(10), NumberLong(0)]}}}, 16610);
+// Confirm mod by 0 fails in an expected manner.
+assertErrorCode(coll, {$project: {a: {$mod: [10, 0 /*double*/]}}}, 16610);
+assertErrorCode(coll, {$project: {a: {$mod: [NumberInt(10), NumberInt(0)]}}}, 16610);
+assertErrorCode(coll, {$project: {a: {$mod: [NumberLong(10), NumberLong(0)]}}}, 16610);
- // Confirm expected behavior for NaN and Infinity values.
- testExpression(coll, {$mod: [10, NaN]}, NaN);
- testExpression(coll, {$mod: [10, Infinity]}, 10);
- testExpression(coll, {$mod: [10, -Infinity]}, 10);
- testExpression(coll, {$mod: [Infinity, 10]}, NaN);
- testExpression(coll, {$mod: [-Infinity, 10]}, NaN);
- testExpression(coll, {$mod: [NaN, 10]}, NaN);
+// Confirm expected behavior for NaN and Infinity values.
+testExpression(coll, {$mod: [10, NaN]}, NaN);
+testExpression(coll, {$mod: [10, Infinity]}, 10);
+testExpression(coll, {$mod: [10, -Infinity]}, 10);
+testExpression(coll, {$mod: [Infinity, 10]}, NaN);
+testExpression(coll, {$mod: [-Infinity, 10]}, NaN);
+testExpression(coll, {$mod: [NaN, 10]}, NaN);
})();
diff --git a/jstests/aggregation/expressions/expression_trigonometric.js b/jstests/aggregation/expressions/expression_trigonometric.js
index 192e9743b62..468c6bccef3 100644
--- a/jstests/aggregation/expressions/expression_trigonometric.js
+++ b/jstests/aggregation/expressions/expression_trigonometric.js
@@ -1,254 +1,253 @@
// SERVER-32930: Basic integration tests for trigonometric aggregation expressions.
(function() {
- "use strict";
- // For assertErrorCode.
- load("jstests/aggregation/extras/utils.js");
-
- const coll = db.expression_trigonometric;
- coll.drop();
- // We need at least one document in the collection in order to test expressions, add it here.
- assert.commandWorked(coll.insert({}));
-
- // Helper for testing that op returns expResult.
- function testOp(op, expResult) {
- const pipeline = [{$project: {_id: 0, result: op}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
- }
-
- // Helper for testing that the aggregation expression 'op' returns expResult, approximately,
- // since NumberDecimal has so many representations for a given number (0 versus 0e-40 for
- // instance).
- function testOpApprox(op, expResult) {
- const pipeline = [{$project: {_id: 0, result: {$abs: {$subtract: [op, expResult]}}}}];
- assert.lt(coll.aggregate(pipeline).toArray(), [{result: NumberDecimal("0.00000005")}]);
- }
-
- // Simple successful int input.
- testOp({$acos: NumberInt(1)}, 0);
- testOp({$acosh: NumberInt(1)}, 0);
- testOp({$asin: NumberInt(0)}, 0);
- testOp({$asinh: NumberInt(0)}, 0);
- testOp({$atan: NumberInt(0)}, 0);
- testOp({$atan2: [NumberInt(0), NumberInt(1)]}, 0);
- testOp({$atan2: [NumberInt(0), NumberInt(0)]}, 0);
- testOp({$atanh: NumberInt(0)}, 0);
- testOp({$cos: NumberInt(0)}, 1);
- testOp({$cosh: NumberInt(0)}, 1);
- testOp({$sin: NumberInt(0)}, 0);
- testOp({$sinh: NumberInt(0)}, 0);
- testOp({$tan: NumberInt(0)}, 0);
- testOp({$tanh: NumberInt(0)}, 0);
- testOp({$degreesToRadians: NumberInt(0)}, 0);
- testOp({$radiansToDegrees: NumberInt(0)}, 0);
-
- // Simple successful long input.
- testOp({$acos: NumberLong(1)}, 0);
- testOp({$acosh: NumberLong(1)}, 0);
- testOp({$asin: NumberLong(0)}, 0);
- testOp({$asinh: NumberLong(0)}, 0);
- testOp({$atan: NumberLong(0)}, 0);
- testOp({$atan2: [NumberLong(0), NumberLong(1)]}, 0);
- testOp({$atan2: [NumberLong(0), NumberLong(0)]}, 0);
- testOp({$atanh: NumberLong(0)}, 0);
- testOp({$cos: NumberLong(0)}, 1);
- testOp({$cosh: NumberLong(0)}, 1);
- testOp({$sin: NumberLong(0)}, 0);
- testOp({$sinh: NumberLong(0)}, 0);
- testOp({$tan: NumberLong(0)}, 0);
- testOp({$tanh: NumberLong(0)}, 0);
- testOp({$degreesToRadians: NumberLong(0)}, 0);
- testOp({$radiansToDegrees: NumberLong(0)}, 0);
-
- // Simple successful double input.
- testOp({$acos: 1}, 0);
- testOp({$acosh: 1}, 0);
- testOp({$asin: 0}, 0);
- testOp({$asinh: 0}, 0);
- testOp({$atan: 0}, 0);
- testOp({$atan2: [0, 1]}, 0);
- testOp({$atan2: [0, 0]}, 0);
- testOp({$atanh: 0}, 0);
- testOp({$cos: 0}, 1);
- testOp({$cosh: 0}, 1);
- testOp({$sin: 0}, 0);
- testOp({$sinh: 0}, 0);
- testOp({$tan: 0}, 0);
- testOp({$tanh: 0}, 0);
- testOp({$degreesToRadians: 0}, 0);
- testOp({$radiansToDegrees: 0}, 0);
-
- // Simple successful decimal input.
- testOpApprox({$acos: NumberDecimal(1)}, NumberDecimal(0));
- testOpApprox({$acosh: NumberDecimal(1)}, NumberDecimal(0));
- testOpApprox({$asin: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$asinh: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$atan: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$atan2: [NumberDecimal(0), 1]}, NumberDecimal(0));
- testOpApprox({$atan2: [NumberDecimal(0), 0]}, NumberDecimal(0));
- testOpApprox({$atanh: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$cos: NumberDecimal(0)}, NumberDecimal(1));
- testOpApprox({$cosh: NumberDecimal(0)}, NumberDecimal(1));
- testOpApprox({$sin: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$sinh: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$tan: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$tanh: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$degreesToRadians: NumberDecimal(0)}, NumberDecimal(0));
- testOpApprox({$radiansToDegrees: NumberDecimal(0)}, NumberDecimal(0));
-
- // Infinity input produces out of bounds error.
- assertErrorCode(coll, [{$project: {a: {$acos: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal('-Infinity')}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal('Infinity')}}}], 50989);
-
- assertErrorCode(coll, [{$project: {a: {$acosh: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acosh: NumberDecimal('-Infinity')}}}], 50989);
-
- assertErrorCode(coll, [{$project: {a: {$asin: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal('-Infinity')}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal('Infinity')}}}], 50989);
-
- assertErrorCode(coll, [{$project: {a: {$atanh: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal('-Infinity')}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal('Infinity')}}}], 50989);
-
- assertErrorCode(coll, [{$project: {a: {$cos: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$cos: NumberDecimal('-Infinity')}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$cos: Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$cos: NumberDecimal('Infinity')}}}], 50989);
-
- assertErrorCode(coll, [{$project: {a: {$sin: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$sin: NumberDecimal('-Infinity')}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$sin: Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$sin: NumberDecimal('Infinity')}}}], 50989);
-
- assertErrorCode(coll, [{$project: {a: {$tan: -Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$tan: NumberDecimal('-Infinity')}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$tan: Infinity}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$tan: NumberDecimal('Infinity')}}}], 50989);
-
- // Infinity input produces Infinity as output.
- testOp({$acosh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
- testOp({$acosh: Infinity}, Infinity);
-
- testOp({$asinh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
- testOp({$asinh: NumberDecimal('-Infinity')}, NumberDecimal('-Infinity'));
- testOp({$asinh: Infinity}, Infinity);
- testOp({$asinh: -Infinity}, -Infinity);
- testOp({$cosh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
- testOp({$cosh: NumberDecimal('-Infinity')}, NumberDecimal('Infinity'));
- testOp({$cosh: Infinity}, Infinity);
- testOp({$cosh: -Infinity}, Infinity);
- testOp({$sinh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
- testOp({$sinh: NumberDecimal('-Infinity')}, NumberDecimal('-Infinity'));
- testOp({$sinh: Infinity}, Infinity);
- testOp({$sinh: -Infinity}, -Infinity);
-
- // Infinity produces finite output (due to asymptotic bounds).
- testOpApprox({$atan: NumberDecimal('Infinity')}, NumberDecimal(Math.PI / 2));
- testOpApprox({$atan: NumberDecimal('-Infinity')}, NumberDecimal(Math.Pi / 2));
- testOpApprox({$atan: Infinity}, Math.PI / 2);
- testOpApprox({$atan: -Infinity}, -Math.PI / 2);
-
- testOpApprox({$atan2: [NumberDecimal('Infinity'), 0]}, NumberDecimal(Math.PI / 2));
- testOpApprox({$atan2: [NumberDecimal('-Infinity'), 0]}, NumberDecimal(-Math.PI / 2));
- testOpApprox({$atan2: [NumberDecimal('-Infinity'), NumberDecimal("Infinity")]},
- NumberDecimal(-Math.PI / 4));
- testOpApprox({$atan2: [NumberDecimal('-Infinity'), NumberDecimal("-Infinity")]},
- NumberDecimal(-3 * Math.PI / 4));
- testOpApprox({$atan2: [NumberDecimal('0'), NumberDecimal("-Infinity")]},
- NumberDecimal(Math.PI));
- testOpApprox({$atan2: [NumberDecimal('0'), NumberDecimal("Infinity")]}, NumberDecimal(0));
-
- testOp({$tanh: NumberDecimal('Infinity')}, NumberDecimal('1'));
- testOp({$tanh: NumberDecimal('-Infinity')}, NumberDecimal('-1'));
-
- // Finite input produces infinite outputs.
- testOp({$atanh: NumberDecimal(1)}, NumberDecimal('Infinity'));
- testOp({$atanh: NumberDecimal(-1)}, NumberDecimal('-Infinity'));
- testOp({$atanh: 1}, Infinity);
- testOp({$atanh: -1}, -Infinity);
-
- testOp({$tanh: Infinity}, 1);
- testOp({$tanh: -Infinity}, -1);
-
- // Int argument out of bounds.
- assertErrorCode(coll, [{$project: {a: {$acos: NumberInt(-2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: NumberInt(2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberInt(-2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberInt(2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acosh: NumberInt(0)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberInt(2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberInt(-2)}}}], 50989);
-
- // Long argument out of bounds.
- assertErrorCode(coll, [{$project: {a: {$acos: NumberLong(-2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: NumberLong(2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberLong(-2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberLong(2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acosh: NumberLong(0)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberLong(2)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberLong(-2)}}}], 50989);
-
- // Double argument out of bounds.
- assertErrorCode(coll, [{$project: {a: {$acos: -1.1}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: 1.1}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: -1.1}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: 1.1}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acosh: 0.9}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: -1.00001}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: 1.00001}}}], 50989);
-
- // Decimal argument out of bounds.
- assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal(-1.1)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal(1.1)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal(-1.1)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal(1.1)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$acosh: NumberDecimal(0.9)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal(-1.00001)}}}], 50989);
- assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal(1.000001)}}}], 50989);
-
- // Check NaN is preserved.
- ["$acos", "$asin", "$atan", "$cos", "$sin", "$tan"].forEach(op => {
- testOp({[op]: NaN}, NaN);
- testOp({[op]: NumberDecimal(NaN)}, NumberDecimal(NaN));
- // Check the hyperbolic version of each function.
- testOp({[op + 'h']: NaN}, NaN);
- testOp({[op + 'h']: NumberDecimal(NaN)}, NumberDecimal(NaN));
- });
-
- ["$radiansToDegrees", "$degreesToRadians"].forEach(op => {
- testOp({[op]: NaN}, NaN);
- testOp({[op]: NumberDecimal(NaN)}, NumberDecimal(NaN));
- testOp({[op]: -Infinity}, -Infinity);
- testOp({[op]: NumberDecimal(-Infinity)}, NumberDecimal(-Infinity));
- testOp({[op]: Infinity}, Infinity);
- testOp({[op]: NumberDecimal(Infinity)}, NumberDecimal(Infinity));
- });
-
- testOp({$atan2: [NumberDecimal('NaN'), NumberDecimal('NaN')]}, NumberDecimal('NaN'));
- testOp({$atan2: [NumberDecimal('NaN'), NumberDecimal('0')]}, NumberDecimal('NaN'));
- testOp({$atan2: [NumberDecimal('0'), NumberDecimal('NaN')]}, NumberDecimal('NaN'));
-
- // Non-numeric input.
- assertErrorCode(coll, [{$project: {a: {$acos: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$acosh: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$asin: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$asinh: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$atan: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$atan2: ["string", "string"]}}}], 51044);
- assertErrorCode(coll, [{$project: {a: {$atan2: ["string", 0.0]}}}], 51044);
- assertErrorCode(coll, [{$project: {a: {$atan2: [0.0, "string"]}}}], 51045);
- assertErrorCode(coll, [{$project: {a: {$atanh: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$cos: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$cosh: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$sin: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$sinh: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$tan: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$tanh: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$degreesToRadians: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$radiansToDegrees: "string"}}}], 28765);
+"use strict";
+// For assertErrorCode.
+load("jstests/aggregation/extras/utils.js");
+
+const coll = db.expression_trigonometric;
+coll.drop();
+// We need at least one document in the collection in order to test expressions, add it here.
+assert.commandWorked(coll.insert({}));
+
+// Helper for testing that op returns expResult.
+function testOp(op, expResult) {
+ const pipeline = [{$project: {_id: 0, result: op}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
+}
+
+// Helper for testing that the aggregation expression 'op' returns expResult, approximately,
+// since NumberDecimal has so many representations for a given number (0 versus 0e-40 for
+// instance).
+function testOpApprox(op, expResult) {
+ const pipeline = [{$project: {_id: 0, result: {$abs: {$subtract: [op, expResult]}}}}];
+ assert.lt(coll.aggregate(pipeline).toArray(), [{result: NumberDecimal("0.00000005")}]);
+}
+
+// Simple successful int input.
+testOp({$acos: NumberInt(1)}, 0);
+testOp({$acosh: NumberInt(1)}, 0);
+testOp({$asin: NumberInt(0)}, 0);
+testOp({$asinh: NumberInt(0)}, 0);
+testOp({$atan: NumberInt(0)}, 0);
+testOp({$atan2: [NumberInt(0), NumberInt(1)]}, 0);
+testOp({$atan2: [NumberInt(0), NumberInt(0)]}, 0);
+testOp({$atanh: NumberInt(0)}, 0);
+testOp({$cos: NumberInt(0)}, 1);
+testOp({$cosh: NumberInt(0)}, 1);
+testOp({$sin: NumberInt(0)}, 0);
+testOp({$sinh: NumberInt(0)}, 0);
+testOp({$tan: NumberInt(0)}, 0);
+testOp({$tanh: NumberInt(0)}, 0);
+testOp({$degreesToRadians: NumberInt(0)}, 0);
+testOp({$radiansToDegrees: NumberInt(0)}, 0);
+
+// Simple successful long input.
+testOp({$acos: NumberLong(1)}, 0);
+testOp({$acosh: NumberLong(1)}, 0);
+testOp({$asin: NumberLong(0)}, 0);
+testOp({$asinh: NumberLong(0)}, 0);
+testOp({$atan: NumberLong(0)}, 0);
+testOp({$atan2: [NumberLong(0), NumberLong(1)]}, 0);
+testOp({$atan2: [NumberLong(0), NumberLong(0)]}, 0);
+testOp({$atanh: NumberLong(0)}, 0);
+testOp({$cos: NumberLong(0)}, 1);
+testOp({$cosh: NumberLong(0)}, 1);
+testOp({$sin: NumberLong(0)}, 0);
+testOp({$sinh: NumberLong(0)}, 0);
+testOp({$tan: NumberLong(0)}, 0);
+testOp({$tanh: NumberLong(0)}, 0);
+testOp({$degreesToRadians: NumberLong(0)}, 0);
+testOp({$radiansToDegrees: NumberLong(0)}, 0);
+
+// Simple successful double input.
+testOp({$acos: 1}, 0);
+testOp({$acosh: 1}, 0);
+testOp({$asin: 0}, 0);
+testOp({$asinh: 0}, 0);
+testOp({$atan: 0}, 0);
+testOp({$atan2: [0, 1]}, 0);
+testOp({$atan2: [0, 0]}, 0);
+testOp({$atanh: 0}, 0);
+testOp({$cos: 0}, 1);
+testOp({$cosh: 0}, 1);
+testOp({$sin: 0}, 0);
+testOp({$sinh: 0}, 0);
+testOp({$tan: 0}, 0);
+testOp({$tanh: 0}, 0);
+testOp({$degreesToRadians: 0}, 0);
+testOp({$radiansToDegrees: 0}, 0);
+
+// Simple successful decimal input.
+testOpApprox({$acos: NumberDecimal(1)}, NumberDecimal(0));
+testOpApprox({$acosh: NumberDecimal(1)}, NumberDecimal(0));
+testOpApprox({$asin: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$asinh: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$atan: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$atan2: [NumberDecimal(0), 1]}, NumberDecimal(0));
+testOpApprox({$atan2: [NumberDecimal(0), 0]}, NumberDecimal(0));
+testOpApprox({$atanh: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$cos: NumberDecimal(0)}, NumberDecimal(1));
+testOpApprox({$cosh: NumberDecimal(0)}, NumberDecimal(1));
+testOpApprox({$sin: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$sinh: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$tan: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$tanh: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$degreesToRadians: NumberDecimal(0)}, NumberDecimal(0));
+testOpApprox({$radiansToDegrees: NumberDecimal(0)}, NumberDecimal(0));
+
+// Infinity input produces out of bounds error.
+assertErrorCode(coll, [{$project: {a: {$acos: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal('-Infinity')}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal('Infinity')}}}], 50989);
+
+assertErrorCode(coll, [{$project: {a: {$acosh: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acosh: NumberDecimal('-Infinity')}}}], 50989);
+
+assertErrorCode(coll, [{$project: {a: {$asin: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal('-Infinity')}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal('Infinity')}}}], 50989);
+
+assertErrorCode(coll, [{$project: {a: {$atanh: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal('-Infinity')}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal('Infinity')}}}], 50989);
+
+assertErrorCode(coll, [{$project: {a: {$cos: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$cos: NumberDecimal('-Infinity')}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$cos: Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$cos: NumberDecimal('Infinity')}}}], 50989);
+
+assertErrorCode(coll, [{$project: {a: {$sin: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$sin: NumberDecimal('-Infinity')}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$sin: Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$sin: NumberDecimal('Infinity')}}}], 50989);
+
+assertErrorCode(coll, [{$project: {a: {$tan: -Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$tan: NumberDecimal('-Infinity')}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$tan: Infinity}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$tan: NumberDecimal('Infinity')}}}], 50989);
+
+// Infinity input produces Infinity as output.
+testOp({$acosh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
+testOp({$acosh: Infinity}, Infinity);
+
+testOp({$asinh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
+testOp({$asinh: NumberDecimal('-Infinity')}, NumberDecimal('-Infinity'));
+testOp({$asinh: Infinity}, Infinity);
+testOp({$asinh: -Infinity}, -Infinity);
+testOp({$cosh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
+testOp({$cosh: NumberDecimal('-Infinity')}, NumberDecimal('Infinity'));
+testOp({$cosh: Infinity}, Infinity);
+testOp({$cosh: -Infinity}, Infinity);
+testOp({$sinh: NumberDecimal('Infinity')}, NumberDecimal('Infinity'));
+testOp({$sinh: NumberDecimal('-Infinity')}, NumberDecimal('-Infinity'));
+testOp({$sinh: Infinity}, Infinity);
+testOp({$sinh: -Infinity}, -Infinity);
+
+// Infinity produces finite output (due to asymptotic bounds).
+testOpApprox({$atan: NumberDecimal('Infinity')}, NumberDecimal(Math.PI / 2));
+testOpApprox({$atan: NumberDecimal('-Infinity')}, NumberDecimal(Math.Pi / 2));
+testOpApprox({$atan: Infinity}, Math.PI / 2);
+testOpApprox({$atan: -Infinity}, -Math.PI / 2);
+
+testOpApprox({$atan2: [NumberDecimal('Infinity'), 0]}, NumberDecimal(Math.PI / 2));
+testOpApprox({$atan2: [NumberDecimal('-Infinity'), 0]}, NumberDecimal(-Math.PI / 2));
+testOpApprox({$atan2: [NumberDecimal('-Infinity'), NumberDecimal("Infinity")]},
+ NumberDecimal(-Math.PI / 4));
+testOpApprox({$atan2: [NumberDecimal('-Infinity'), NumberDecimal("-Infinity")]},
+ NumberDecimal(-3 * Math.PI / 4));
+testOpApprox({$atan2: [NumberDecimal('0'), NumberDecimal("-Infinity")]}, NumberDecimal(Math.PI));
+testOpApprox({$atan2: [NumberDecimal('0'), NumberDecimal("Infinity")]}, NumberDecimal(0));
+
+testOp({$tanh: NumberDecimal('Infinity')}, NumberDecimal('1'));
+testOp({$tanh: NumberDecimal('-Infinity')}, NumberDecimal('-1'));
+
+// Finite input produces infinite outputs.
+testOp({$atanh: NumberDecimal(1)}, NumberDecimal('Infinity'));
+testOp({$atanh: NumberDecimal(-1)}, NumberDecimal('-Infinity'));
+testOp({$atanh: 1}, Infinity);
+testOp({$atanh: -1}, -Infinity);
+
+testOp({$tanh: Infinity}, 1);
+testOp({$tanh: -Infinity}, -1);
+
+// Int argument out of bounds.
+assertErrorCode(coll, [{$project: {a: {$acos: NumberInt(-2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: NumberInt(2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberInt(-2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberInt(2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acosh: NumberInt(0)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberInt(2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberInt(-2)}}}], 50989);
+
+// Long argument out of bounds.
+assertErrorCode(coll, [{$project: {a: {$acos: NumberLong(-2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: NumberLong(2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberLong(-2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberLong(2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acosh: NumberLong(0)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberLong(2)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberLong(-2)}}}], 50989);
+
+// Double argument out of bounds.
+assertErrorCode(coll, [{$project: {a: {$acos: -1.1}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: 1.1}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: -1.1}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: 1.1}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acosh: 0.9}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: -1.00001}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: 1.00001}}}], 50989);
+
+// Decimal argument out of bounds.
+assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal(-1.1)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acos: NumberDecimal(1.1)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal(-1.1)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$asin: NumberDecimal(1.1)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$acosh: NumberDecimal(0.9)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal(-1.00001)}}}], 50989);
+assertErrorCode(coll, [{$project: {a: {$atanh: NumberDecimal(1.000001)}}}], 50989);
+
+// Check NaN is preserved.
+["$acos", "$asin", "$atan", "$cos", "$sin", "$tan"].forEach(op => {
+ testOp({[op]: NaN}, NaN);
+ testOp({[op]: NumberDecimal(NaN)}, NumberDecimal(NaN));
+ // Check the hyperbolic version of each function.
+ testOp({[op + 'h']: NaN}, NaN);
+ testOp({[op + 'h']: NumberDecimal(NaN)}, NumberDecimal(NaN));
+});
+
+["$radiansToDegrees", "$degreesToRadians"].forEach(op => {
+ testOp({[op]: NaN}, NaN);
+ testOp({[op]: NumberDecimal(NaN)}, NumberDecimal(NaN));
+ testOp({[op]: -Infinity}, -Infinity);
+ testOp({[op]: NumberDecimal(-Infinity)}, NumberDecimal(-Infinity));
+ testOp({[op]: Infinity}, Infinity);
+ testOp({[op]: NumberDecimal(Infinity)}, NumberDecimal(Infinity));
+});
+
+testOp({$atan2: [NumberDecimal('NaN'), NumberDecimal('NaN')]}, NumberDecimal('NaN'));
+testOp({$atan2: [NumberDecimal('NaN'), NumberDecimal('0')]}, NumberDecimal('NaN'));
+testOp({$atan2: [NumberDecimal('0'), NumberDecimal('NaN')]}, NumberDecimal('NaN'));
+
+// Non-numeric input.
+assertErrorCode(coll, [{$project: {a: {$acos: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$acosh: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$asin: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$asinh: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$atan: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$atan2: ["string", "string"]}}}], 51044);
+assertErrorCode(coll, [{$project: {a: {$atan2: ["string", 0.0]}}}], 51044);
+assertErrorCode(coll, [{$project: {a: {$atan2: [0.0, "string"]}}}], 51045);
+assertErrorCode(coll, [{$project: {a: {$atanh: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$cos: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$cosh: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$sin: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$sinh: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$tan: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$tanh: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$degreesToRadians: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$radiansToDegrees: "string"}}}], 28765);
}());
diff --git a/jstests/aggregation/expressions/floor_ceil.js b/jstests/aggregation/expressions/floor_ceil.js
index def7c4de59e..1b4830d0d0d 100644
--- a/jstests/aggregation/expressions/floor_ceil.js
+++ b/jstests/aggregation/expressions/floor_ceil.js
@@ -1,41 +1,41 @@
// The following are integration tests for $floor and $ceil.
(function() {
- "use strict";
+"use strict";
- // For assertErrorCode.
- load("jstests/aggregation/extras/utils.js");
+// For assertErrorCode.
+load("jstests/aggregation/extras/utils.js");
- var coll = db.server19548;
- coll.drop();
- // We need at least one document in the collection in order to test expressions, add it here.
- assert.commandWorked(coll.insert({}));
+var coll = db.server19548;
+coll.drop();
+// We need at least one document in the collection in order to test expressions, add it here.
+assert.commandWorked(coll.insert({}));
- // Helper for testing that op returns expResult.
- function testOp(op, expResult) {
- var pipeline = [{$project: {_id: 0, result: op}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
- }
+// Helper for testing that op returns expResult.
+function testOp(op, expResult) {
+ var pipeline = [{$project: {_id: 0, result: op}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
+}
- testOp({$ceil: NumberLong(4)}, NumberLong(4));
- testOp({$ceil: NaN}, NaN);
- testOp({$ceil: Infinity}, Infinity);
- testOp({$ceil: -Infinity}, -Infinity);
- testOp({$ceil: null}, null);
- testOp({$ceil: -2.0}, -2.0);
- testOp({$ceil: 0.9}, 1.0);
- testOp({$ceil: -1.2}, -1.0);
+testOp({$ceil: NumberLong(4)}, NumberLong(4));
+testOp({$ceil: NaN}, NaN);
+testOp({$ceil: Infinity}, Infinity);
+testOp({$ceil: -Infinity}, -Infinity);
+testOp({$ceil: null}, null);
+testOp({$ceil: -2.0}, -2.0);
+testOp({$ceil: 0.9}, 1.0);
+testOp({$ceil: -1.2}, -1.0);
- testOp({$floor: NumberLong(4)}, NumberLong(4));
- testOp({$floor: NaN}, NaN);
- testOp({$floor: Infinity}, Infinity);
- testOp({$floor: -Infinity}, -Infinity);
- testOp({$floor: null}, null);
- testOp({$floor: -2.0}, -2.0);
- testOp({$floor: 0.9}, 0.0);
- testOp({$floor: -1.2}, -2.0);
+testOp({$floor: NumberLong(4)}, NumberLong(4));
+testOp({$floor: NaN}, NaN);
+testOp({$floor: Infinity}, Infinity);
+testOp({$floor: -Infinity}, -Infinity);
+testOp({$floor: null}, null);
+testOp({$floor: -2.0}, -2.0);
+testOp({$floor: 0.9}, 0.0);
+testOp({$floor: -1.2}, -2.0);
- // Non-numeric input.
- assertErrorCode(coll, [{$project: {a: {$ceil: "string"}}}], 28765);
- assertErrorCode(coll, [{$project: {a: {$floor: "string"}}}], 28765);
+// Non-numeric input.
+assertErrorCode(coll, [{$project: {a: {$ceil: "string"}}}], 28765);
+assertErrorCode(coll, [{$project: {a: {$floor: "string"}}}], 28765);
}());
diff --git a/jstests/aggregation/expressions/in.js b/jstests/aggregation/expressions/in.js
index 7c91313e081..63ba02f1b4e 100644
--- a/jstests/aggregation/expressions/in.js
+++ b/jstests/aggregation/expressions/in.js
@@ -4,205 +4,204 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
+"use strict";
- const caseInsensitive = {locale: "en_US", strength: 2};
- var coll = db.in ;
- coll.drop();
-
- function testExpression(options) {
- coll.drop();
- testExpressionInternal(options);
- }
+const caseInsensitive = {
+ locale: "en_US",
+ strength: 2
+};
+var coll = db.in;
+coll.drop();
- function testExpressionHashIndex(options) {
- coll.drop();
- assert.commandWorked(coll.createIndex({elementField: "hashed"}));
- testExpressionInternal(options);
- }
+function testExpression(options) {
+ coll.drop();
+ testExpressionInternal(options);
+}
- function testExpressionCollectionCollation(options, collationSpec) {
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), {collation: collationSpec}));
- testExpressionInternal(options);
- }
+function testExpressionHashIndex(options) {
+ coll.drop();
+ assert.commandWorked(coll.createIndex({elementField: "hashed"}));
+ testExpressionInternal(options);
+}
- function testExpressionInternal(options) {
- var pipeline = {$project: {included: {$in: ["$elementField", {$literal: options.array}]}}};
- assert.writeOK(coll.insert({elementField: options.element}));
- var res = coll.aggregate(pipeline).toArray();
- assert.eq(res.length, 1);
- assert.eq(res[0].included, options.elementIsIncluded);
-
- if (options.queryFormShouldBeEquivalent) {
- var query = {elementField: {$in: options.array}};
- res = coll.find(query).toArray();
-
- if (options.elementIsIncluded) {
- assert.eq(res.length, 1);
- } else {
- assert.eq(res.length, 0);
- }
+function testExpressionCollectionCollation(options, collationSpec) {
+ coll.drop();
+ assert.commandWorked(db.createCollection(coll.getName(), {collation: collationSpec}));
+ testExpressionInternal(options);
+}
+
+function testExpressionInternal(options) {
+ var pipeline = {$project: {included: {$in: ["$elementField", {$literal: options.array}]}}};
+ assert.writeOK(coll.insert({elementField: options.element}));
+ var res = coll.aggregate(pipeline).toArray();
+ assert.eq(res.length, 1);
+ assert.eq(res[0].included, options.elementIsIncluded);
+
+ if (options.queryFormShouldBeEquivalent) {
+ var query = {elementField: {$in: options.array}};
+ res = coll.find(query).toArray();
+
+ if (options.elementIsIncluded) {
+ assert.eq(res.length, 1);
+ } else {
+ assert.eq(res.length, 0);
}
}
-
- testExpression(
- {element: 1, array: [1, 2, 3], elementIsIncluded: true, queryFormShouldBeEquivalent: true});
-
- testExpression({
- element: "A",
- array: ["a", "A", "a"],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- testExpression({
- element: {a: 1},
- array: [{b: 1}, 2],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: true
- });
-
- testExpression({
- element: {a: 1},
- array: [{a: 1}],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- testExpression({
- element: [1, 2],
- array: [[2, 1]],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: true
- });
-
- testExpression({
- element: [1, 2],
- array: [[1, 2]],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- // Test $in with duplicated target element.
- testExpression({
- element: 7,
- array: [3, 5, 7, 7, 9],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- // Test $in with other element within array duplicated.
- testExpression({
- element: 7,
- array: [3, 5, 7, 9, 9],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- // Test $in on unsorted array.
- testExpression({
- element: 7,
- array: [3, 10, 5, 7, 8, 9],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- // Test matching $in on unsorted array with duplicates.
- testExpression({
- element: 7,
- array: [7, 10, 7, 10, 2, 5, 3, 7],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- // Test non-matching $in on unsorted array with duplicates.
- testExpression({
- element: 8,
- array: [10, 7, 2, 5, 3],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: true
- });
-
- // Test $in with success due to collation on source collection.
- testExpressionCollectionCollation({
- element: "abcd",
- array: ["aBcD", "ABCD"],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- },
- caseInsensitive);
-
- // Test $in with a source collection that has a hash index on the relevant field.
- testExpressionHashIndex({
- element: 5,
- array: [10, 7, 2, 5, 3],
- elementIsIncluded: true,
- queryFormShouldBeEquivalent: true
- });
-
- testExpression(
- {element: 1, array: [], elementIsIncluded: false, queryFormShouldBeEquivalent: true});
-
- // Aggregation's $in has parity with query's $in except with regexes matching string values and
- // equality semantics with array values.
-
- testExpression({
- element: "abc",
- array: [/a/, /b/, /c/],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: false
- });
-
- testExpression({
- element: /a/,
- array: ["a", "b", "c"],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: false
- });
-
- testExpression({
- element: [],
- array: [1, 2, 3],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: false
- });
-
- testExpression({
- element: [1],
- array: [1, 2, 3],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: false
- });
-
- testExpression({
- element: [1, 2],
- array: [1, 2, 3],
- elementIsIncluded: false,
- queryFormShouldBeEquivalent: false
- });
-
- coll.drop();
- coll.insert({});
-
- var pipeline = {$project: {included: {$in: [[1, 2], 1]}}};
- assertErrorCode(coll, pipeline, 40081, "$in requires an array as a second argument");
-
- pipeline = {$project: {included: {$in: [1, null]}}};
- assertErrorCode(coll, pipeline, 40081, "$in requires an array as a second argument");
-
- pipeline = {$project: {included: {$in: [1, "$notAField"]}}};
- assertErrorCode(coll, pipeline, 40081, "$in requires an array as a second argument");
-
- pipeline = {$project: {included: {$in: null}}};
- assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
-
- pipeline = {$project: {included: {$in: [1]}}};
- assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
-
- pipeline = {$project: {included: {$in: []}}};
- assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
-
- pipeline = {$project: {included: {$in: [1, 2, 3]}}};
- assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
+}
+
+testExpression(
+ {element: 1, array: [1, 2, 3], elementIsIncluded: true, queryFormShouldBeEquivalent: true});
+
+testExpression({
+ element: "A",
+ array: ["a", "A", "a"],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+});
+
+testExpression({
+ element: {a: 1},
+ array: [{b: 1}, 2],
+ elementIsIncluded: false,
+ queryFormShouldBeEquivalent: true
+});
+
+testExpression(
+ {element: {a: 1}, array: [{a: 1}], elementIsIncluded: true, queryFormShouldBeEquivalent: true});
+
+testExpression({
+ element: [1, 2],
+ array: [[2, 1]],
+ elementIsIncluded: false,
+ queryFormShouldBeEquivalent: true
+});
+
+testExpression(
+ {element: [1, 2], array: [[1, 2]], elementIsIncluded: true, queryFormShouldBeEquivalent: true});
+
+// Test $in with duplicated target element.
+testExpression({
+ element: 7,
+ array: [3, 5, 7, 7, 9],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+});
+
+// Test $in with other element within array duplicated.
+testExpression({
+ element: 7,
+ array: [3, 5, 7, 9, 9],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+});
+
+// Test $in on unsorted array.
+testExpression({
+ element: 7,
+ array: [3, 10, 5, 7, 8, 9],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+});
+
+// Test matching $in on unsorted array with duplicates.
+testExpression({
+ element: 7,
+ array: [7, 10, 7, 10, 2, 5, 3, 7],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+});
+
+// Test non-matching $in on unsorted array with duplicates.
+testExpression({
+ element: 8,
+ array: [10, 7, 2, 5, 3],
+ elementIsIncluded: false,
+ queryFormShouldBeEquivalent: true
+});
+
+// Test $in with success due to collation on source collection.
+testExpressionCollectionCollation({
+ element: "abcd",
+ array: ["aBcD", "ABCD"],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+},
+ caseInsensitive);
+
+// Test $in with a source collection that has a hash index on the relevant field.
+testExpressionHashIndex({
+ element: 5,
+ array: [10, 7, 2, 5, 3],
+ elementIsIncluded: true,
+ queryFormShouldBeEquivalent: true
+});
+
+testExpression(
+ {element: 1, array: [], elementIsIncluded: false, queryFormShouldBeEquivalent: true});
+
+// Aggregation's $in has parity with query's $in except with regexes matching string values and
+// equality semantics with array values.
+
+testExpression({
+ element: "abc",
+ array: [/a/, /b/, /c/],
+ elementIsIncluded: false,
+ queryFormShouldBeEquivalent: false
+});
+
+testExpression({
+ element: /a/,
+ array: ["a", "b", "c"],
+ elementIsIncluded: false,
+ queryFormShouldBeEquivalent: false
+});
+
+testExpression(
+ {element: [], array: [1, 2, 3], elementIsIncluded: false, queryFormShouldBeEquivalent: false});
+
+testExpression(
+ {element: [1], array: [1, 2, 3], elementIsIncluded: false, queryFormShouldBeEquivalent: false});
+
+testExpression({
+ element: [1, 2],
+ array: [1, 2, 3],
+ elementIsIncluded: false,
+ queryFormShouldBeEquivalent: false
+});
+
+coll.drop();
+coll.insert({});
+
+var pipeline = {$project: {included: {$in: [[1, 2], 1]}}};
+assertErrorCode(coll, pipeline, 40081, "$in requires an array as a second argument");
+
+pipeline = {
+ $project: {included: {$in: [1, null]}}
+};
+assertErrorCode(coll, pipeline, 40081, "$in requires an array as a second argument");
+
+pipeline = {
+ $project: {included: {$in: [1, "$notAField"]}}
+};
+assertErrorCode(coll, pipeline, 40081, "$in requires an array as a second argument");
+
+pipeline = {
+ $project: {included: {$in: null}}
+};
+assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
+
+pipeline = {
+ $project: {included: {$in: [1]}}
+};
+assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
+
+pipeline = {
+ $project: {included: {$in: []}}
+};
+assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
+
+pipeline = {
+ $project: {included: {$in: [1, 2, 3]}}
+};
+assertErrorCode(coll, pipeline, 16020, "$in requires two arguments");
}());
diff --git a/jstests/aggregation/expressions/indexof_array.js b/jstests/aggregation/expressions/indexof_array.js
index 3fb445e5066..a32376b1f9d 100644
--- a/jstests/aggregation/expressions/indexof_array.js
+++ b/jstests/aggregation/expressions/indexof_array.js
@@ -3,58 +3,66 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExpression.
(function() {
- "use strict";
+"use strict";
- var coll = db.indexofarray;
- coll.drop();
+var coll = db.indexofarray;
+coll.drop();
- // Insert a dummy document to ensure something flows through the pipeline.
- assert.writeOK(coll.insert({}));
+// Insert a dummy document to ensure something flows through the pipeline.
+assert.writeOK(coll.insert({}));
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2]}, 1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2]}, 1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 4]}, -1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 4]}, -1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3, 2, 1], 2, 2]}, 3);
+testExpression(coll, {$indexOfArray: [[1, 2, 3, 2, 1], 2, 2]}, 3);
- testExpression(coll, {$indexOfArray: [[1, 2, 3, 4, 5], 4, 0, 3]}, -1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3, 4, 5], 4, 0, 3]}, -1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 1]}, 1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 1]}, 1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 0, 10]}, 1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 0, 10]}, 1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3, 2, 1, 2, 3], 2, 2, 4]}, 3);
+testExpression(coll, {$indexOfArray: [[1, 2, 3, 2, 1, 2, 3], 2, 2, 4]}, 3);
- testExpression(coll, {$indexOfArray: [null, 2]}, null);
+testExpression(coll, {$indexOfArray: [null, 2]}, null);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3]}, -1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3]}, -1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3, 1]}, -1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3, 1]}, -1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3, 3]}, -1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3, 3]}, -1);
- testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3, 5]}, -1);
+testExpression(coll, {$indexOfArray: [[1, 2, 3], 2, 3, 5]}, -1);
- testExpression(coll, {$indexOfArray: [[], 1]}, -1);
+testExpression(coll, {$indexOfArray: [[], 1]}, -1);
- var pipeline = {
- $project: {
- output: {
- $indexOfArray: ["string", "s"],
- }
+var pipeline = {
+ $project: {
+ output: {
+ $indexOfArray: ["string", "s"],
}
- };
- assertErrorCode(coll, pipeline, 40090);
-
- pipeline = {$project: {output: {$indexOfArray: [[1, 2, 3], 2, "bad"]}}};
- assertErrorCode(coll, pipeline, 40096);
-
- pipeline = {$project: {output: {$indexOfArray: [[1, 2, 3], 2, 0, "bad"]}}};
- assertErrorCode(coll, pipeline, 40096);
-
- pipeline = {$project: {output: {$indexOfArray: [[1, 2, 3], 2, -1]}}};
- assertErrorCode(coll, pipeline, 40097);
-
- pipeline = {$project: {output: {$indexOfArray: [[1, 2, 3], 2, 1, -1]}}};
- assertErrorCode(coll, pipeline, 40097);
+ }
+};
+assertErrorCode(coll, pipeline, 40090);
+
+pipeline = {
+ $project: {output: {$indexOfArray: [[1, 2, 3], 2, "bad"]}}
+};
+assertErrorCode(coll, pipeline, 40096);
+
+pipeline = {
+ $project: {output: {$indexOfArray: [[1, 2, 3], 2, 0, "bad"]}}
+};
+assertErrorCode(coll, pipeline, 40096);
+
+pipeline = {
+ $project: {output: {$indexOfArray: [[1, 2, 3], 2, -1]}}
+};
+assertErrorCode(coll, pipeline, 40097);
+
+pipeline = {
+ $project: {output: {$indexOfArray: [[1, 2, 3], 2, 1, -1]}}
+};
+assertErrorCode(coll, pipeline, 40097);
}());
diff --git a/jstests/aggregation/expressions/indexof_bytes.js b/jstests/aggregation/expressions/indexof_bytes.js
index d484ad50948..14bcead5293 100644
--- a/jstests/aggregation/expressions/indexof_bytes.js
+++ b/jstests/aggregation/expressions/indexof_bytes.js
@@ -3,137 +3,145 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExpression.
(function() {
- "use strict";
-
- function testExpressionBytes(coll, expression, result, shouldTestEquivalence = true) {
- testExpression(coll, expression, result);
-
- if (shouldTestEquivalence) {
- // If we are specifying a starting or ending index for the search, we should be able to
- // achieve equivalent behavior using $substrBytes.
- var indexOfSpec = expression["$indexOfBytes"];
- var input = indexOfSpec[0];
- var token = indexOfSpec[1];
- var start = indexOfSpec.length > 2 ? indexOfSpec[2] : 0;
- // Use $strLenBytes because JavaScript's length property is based off of UTF-16, not the
- // actual number of bytes.
- var end = indexOfSpec.length > 3 ? indexOfSpec[3] : {$strLenBytes: input};
-
- var substrExpr = {
- $indexOfBytes: [{$substrBytes: [input, start, {$subtract: [end, start]}]}, token]
- };
-
- // Since the new expression takes the index with respect to a shortened string, the
- // output index will differ from the index with respect to the full length string,
- // unless the output is -1.
- var substrResult = (result === -1) ? -1 : result - start;
-
- testExpression(coll, substrExpr, substrResult);
- }
+"use strict";
+
+function testExpressionBytes(coll, expression, result, shouldTestEquivalence = true) {
+ testExpression(coll, expression, result);
+
+ if (shouldTestEquivalence) {
+ // If we are specifying a starting or ending index for the search, we should be able to
+ // achieve equivalent behavior using $substrBytes.
+ var indexOfSpec = expression["$indexOfBytes"];
+ var input = indexOfSpec[0];
+ var token = indexOfSpec[1];
+ var start = indexOfSpec.length > 2 ? indexOfSpec[2] : 0;
+ // Use $strLenBytes because JavaScript's length property is based off of UTF-16, not the
+ // actual number of bytes.
+ var end = indexOfSpec.length > 3 ? indexOfSpec[3] : {$strLenBytes: input};
+
+ var substrExpr = {
+ $indexOfBytes: [{$substrBytes: [input, start, {$subtract: [end, start]}]}, token]
+ };
+
+ // Since the new expression takes the index with respect to a shortened string, the
+ // output index will differ from the index with respect to the full length string,
+ // unless the output is -1.
+ var substrResult = (result === -1) ? -1 : result - start;
+
+ testExpression(coll, substrExpr, substrResult);
}
+}
- var coll = db.indexofbytes;
- coll.drop();
+var coll = db.indexofbytes;
+coll.drop();
- // Insert a dummy document so something flows through the pipeline.
- assert.writeOK(coll.insert({}));
+// Insert a dummy document so something flows through the pipeline.
+assert.writeOK(coll.insert({}));
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "b"]}, 1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "b"]}, 1);
- testExpressionBytes(coll, {$indexOfBytes: ["abcba", "b"]}, 1);
+testExpressionBytes(coll, {$indexOfBytes: ["abcba", "b"]}, 1);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "d"]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "d"]}, -1);
- testExpressionBytes(coll, {$indexOfBytes: ["abcba", "b", 2]}, 3);
+testExpressionBytes(coll, {$indexOfBytes: ["abcba", "b", 2]}, 3);
- testExpressionBytes(coll, {$indexOfBytes: ["abcde", "d", 0, 2]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abcde", "d", 0, 2]}, -1);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 1]}, 1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 1]}, 1);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 0, 10]}, 1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 0, 10]}, 1);
- testExpressionBytes(coll, {$indexOfBytes: ["abcbabc", "b", 2, 4]}, 3);
+testExpressionBytes(coll, {$indexOfBytes: ["abcbabc", "b", 2, 4]}, 3);
- // $strLenBytes does not accept null as an input.
- testExpressionBytes(coll, {$indexOfBytes: [null, "b"]}, null, false);
+// $strLenBytes does not accept null as an input.
+testExpressionBytes(coll, {$indexOfBytes: [null, "b"]}, null, false);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 3]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 3]}, -1);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 3, 1]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 3, 1]}, -1);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 3, 5]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "b", 3, 5]}, -1);
- testExpressionBytes(coll, {$indexOfBytes: ["", " "]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["", " "]}, -1);
- testExpressionBytes(coll, {$indexOfBytes: [" ", ""]}, 0);
+testExpressionBytes(coll, {$indexOfBytes: [" ", ""]}, 0);
- testExpressionBytes(coll, {$indexOfBytes: ["", ""]}, 0);
+testExpressionBytes(coll, {$indexOfBytes: ["", ""]}, 0);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "", 3]}, 3);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "", 3]}, 3);
- testExpressionBytes(coll, {$indexOfBytes: ["abc", "", 1]}, 1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc", "", 1]}, 1);
- // Test with multi-byte tokens.
+// Test with multi-byte tokens.
- testExpressionBytes(coll, {$indexOfBytes: ["abcde", "de"]}, 3);
+testExpressionBytes(coll, {$indexOfBytes: ["abcde", "de"]}, 3);
- testExpressionBytes(coll, {$indexOfBytes: ["abcde", "def"]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abcde", "def"]}, -1);
- // Test with non-ASCII characters. Some tests do not test equivalence using $substrBytes because
- // $substrBytes disallows taking a substring that begins or ends in the middle of a UTF-8
- // encoding of a character.
- testExpressionBytes(coll, {$indexOfBytes: ["a∫∫b", "b"]}, 7);
+// Test with non-ASCII characters. Some tests do not test equivalence using $substrBytes because
+// $substrBytes disallows taking a substring that begins or ends in the middle of a UTF-8
+// encoding of a character.
+testExpressionBytes(coll, {$indexOfBytes: ["a∫∫b", "b"]}, 7);
- // $substrBytes would attempt to take the substring from the middle of a UTF-8
- // encoding of a character.
- testExpressionBytes(coll, {$indexOfBytes: ["a∫∫b", "b", 6]}, 7, false);
+// $substrBytes would attempt to take the substring from the middle of a UTF-8
+// encoding of a character.
+testExpressionBytes(coll, {$indexOfBytes: ["a∫∫b", "b", 6]}, 7, false);
- testExpressionBytes(coll, {$indexOfBytes: ["abc∫ba", "∫"]}, 3);
+testExpressionBytes(coll, {$indexOfBytes: ["abc∫ba", "∫"]}, 3);
- testExpressionBytes(coll, {$indexOfBytes: ["∫∫∫", "a"]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["∫∫∫", "a"]}, -1);
- // $substrBytes would attempt to take the substring from the middle of a UTF-8
- // encoding of a character.
- testExpressionBytes(coll, {$indexOfBytes: ["ab∫c", "c", 0, 3]}, -1, false);
+// $substrBytes would attempt to take the substring from the middle of a UTF-8
+// encoding of a character.
+testExpressionBytes(coll, {$indexOfBytes: ["ab∫c", "c", 0, 3]}, -1, false);
- testExpressionBytes(coll, {$indexOfBytes: ["abc∫b∫", "b∫"]}, 6);
+testExpressionBytes(coll, {$indexOfBytes: ["abc∫b∫", "b∫"]}, 6);
- // Test with embedded null bytes.
- testExpressionBytes(coll, {$indexOfBytes: ["abc\0d", "d"]}, 4);
+// Test with embedded null bytes.
+testExpressionBytes(coll, {$indexOfBytes: ["abc\0d", "d"]}, 4);
- testExpressionBytes(coll, {$indexOfBytes: ["abc\0", "\0"]}, 3);
+testExpressionBytes(coll, {$indexOfBytes: ["abc\0", "\0"]}, 3);
- testExpressionBytes(coll, {$indexOfBytes: ["abc\0d\0", "d", 5, 6]}, -1);
+testExpressionBytes(coll, {$indexOfBytes: ["abc\0d\0", "d", 5, 6]}, -1);
- // Error cases.
+// Error cases.
- var pipeline = {
- $project: {
- output: {
- $indexOfBytes: [3, "s"],
- }
- }
- };
- assertErrorCode(coll, pipeline, 40091);
-
- pipeline = {
- $project: {
- output: {
- $indexOfBytes: ["s", 3],
- }
+var pipeline = {
+ $project: {
+ output: {
+ $indexOfBytes: [3, "s"],
}
- };
- assertErrorCode(coll, pipeline, 40092);
-
- pipeline = {$project: {output: {$indexOfBytes: ["abc", "b", "bad"]}}};
- assertErrorCode(coll, pipeline, 40096);
-
- pipeline = {$project: {output: {$indexOfBytes: ["abc", "b", 0, "bad"]}}};
- assertErrorCode(coll, pipeline, 40096);
-
- pipeline = {$project: {output: {$indexOfBytes: ["abc", "b", -1]}}};
- assertErrorCode(coll, pipeline, 40097);
+ }
+};
+assertErrorCode(coll, pipeline, 40091);
- pipeline = {$project: {output: {$indexOfBytes: ["abc", "b", 1, -1]}}};
- assertErrorCode(coll, pipeline, 40097);
+pipeline = {
+ $project: {
+ output: {
+ $indexOfBytes: ["s", 3],
+ }
+ }
+};
+assertErrorCode(coll, pipeline, 40092);
+
+pipeline = {
+ $project: {output: {$indexOfBytes: ["abc", "b", "bad"]}}
+};
+assertErrorCode(coll, pipeline, 40096);
+
+pipeline = {
+ $project: {output: {$indexOfBytes: ["abc", "b", 0, "bad"]}}
+};
+assertErrorCode(coll, pipeline, 40096);
+
+pipeline = {
+ $project: {output: {$indexOfBytes: ["abc", "b", -1]}}
+};
+assertErrorCode(coll, pipeline, 40097);
+
+pipeline = {
+ $project: {output: {$indexOfBytes: ["abc", "b", 1, -1]}}
+};
+assertErrorCode(coll, pipeline, 40097);
}());
diff --git a/jstests/aggregation/expressions/indexof_codepoints.js b/jstests/aggregation/expressions/indexof_codepoints.js
index 506b1a13cfa..acc4a3b072d 100644
--- a/jstests/aggregation/expressions/indexof_codepoints.js
+++ b/jstests/aggregation/expressions/indexof_codepoints.js
@@ -3,117 +3,125 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExpression.
(function() {
- "use strict";
-
- function testExpressionCodePoints(coll, expression, result, shouldTestEquivalence = true) {
- testExpression(coll, expression, result);
-
- var indexOfSpec = expression["$indexOfCP"];
- if (shouldTestEquivalence) {
- // If we are specifying a starting or ending index for the search, we should be able to
- // achieve equivalent behavior using $substrCP.
- var input = indexOfSpec[0];
- var token = indexOfSpec[1];
- var start = indexOfSpec.length > 2 ? indexOfSpec[2] : 0;
- var end = indexOfSpec.length > 3 ? indexOfSpec[3] : {$strLenCP: input};
-
- var substrExpr = {
- $indexOfCP: [{$substrCP: [input, start, {$subtract: [end, start]}]}, token]
- };
-
- // Since the new expression takes the index with respect to a shortened string, the
- // output index will differ from the index with respect to the full length string,
- // unless the output is -1.
- var substrResult = (result === -1) ? -1 : result - start;
-
- testExpression(coll, substrExpr, substrResult);
- }
+"use strict";
+
+function testExpressionCodePoints(coll, expression, result, shouldTestEquivalence = true) {
+ testExpression(coll, expression, result);
+
+ var indexOfSpec = expression["$indexOfCP"];
+ if (shouldTestEquivalence) {
+ // If we are specifying a starting or ending index for the search, we should be able to
+ // achieve equivalent behavior using $substrCP.
+ var input = indexOfSpec[0];
+ var token = indexOfSpec[1];
+ var start = indexOfSpec.length > 2 ? indexOfSpec[2] : 0;
+ var end = indexOfSpec.length > 3 ? indexOfSpec[3] : {$strLenCP: input};
+
+ var substrExpr = {
+ $indexOfCP: [{$substrCP: [input, start, {$subtract: [end, start]}]}, token]
+ };
+
+ // Since the new expression takes the index with respect to a shortened string, the
+ // output index will differ from the index with respect to the full length string,
+ // unless the output is -1.
+ var substrResult = (result === -1) ? -1 : result - start;
+
+ testExpression(coll, substrExpr, substrResult);
}
+}
- var coll = db.indexofcp;
- coll.drop();
+var coll = db.indexofcp;
+coll.drop();
- // Insert a dummy document so something flows through the pipeline.
- assert.writeOK(coll.insert({}));
+// Insert a dummy document so something flows through the pipeline.
+assert.writeOK(coll.insert({}));
- testExpressionCodePoints(coll, {$indexOfCP: ["∫aƒ", "ƒ"]}, 2);
+testExpressionCodePoints(coll, {$indexOfCP: ["∫aƒ", "ƒ"]}, 2);
- testExpressionCodePoints(coll, {$indexOfCP: ["a∫c", "d"]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["a∫c", "d"]}, -1);
- testExpressionCodePoints(coll, {$indexOfCP: ["∫b∫ba", "b", 2]}, 3);
+testExpressionCodePoints(coll, {$indexOfCP: ["∫b∫ba", "b", 2]}, 3);
- testExpressionCodePoints(coll, {$indexOfCP: ["ab∫de", "d", 0, 3]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["ab∫de", "d", 0, 3]}, -1);
- testExpressionCodePoints(coll, {$indexOfCP: ["ab∫de", "d", 0, 4]}, 3);
+testExpressionCodePoints(coll, {$indexOfCP: ["ab∫de", "d", 0, 4]}, 3);
- testExpressionCodePoints(coll, {$indexOfCP: ["øøc", "ø", 1]}, 1);
+testExpressionCodePoints(coll, {$indexOfCP: ["øøc", "ø", 1]}, 1);
- testExpressionCodePoints(coll, {$indexOfCP: ["øƒc", "ƒ", 0, 10]}, 1);
+testExpressionCodePoints(coll, {$indexOfCP: ["øƒc", "ƒ", 0, 10]}, 1);
- testExpressionCodePoints(coll, {$indexOfCP: ["abcbabc", "b", 2, 4]}, 3);
+testExpressionCodePoints(coll, {$indexOfCP: ["abcbabc", "b", 2, 4]}, 3);
- // $strLenCP does not accept null as an input.
- testExpressionCodePoints(coll, {$indexOfCP: [null, "√"]}, null, false);
+// $strLenCP does not accept null as an input.
+testExpressionCodePoints(coll, {$indexOfCP: [null, "√"]}, null, false);
- testExpressionCodePoints(coll, {$indexOfCP: ["abc", "b", 3]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["abc", "b", 3]}, -1);
- // We are intentionally testing specifying an end index before the start index, which is why we
- // cannot use $substrCP in checking for equivalence.
- testExpressionCodePoints(coll, {$indexOfCP: ["a√cb", "b", 3, 1]}, -1, false);
+// We are intentionally testing specifying an end index before the start index, which is why we
+// cannot use $substrCP in checking for equivalence.
+testExpressionCodePoints(coll, {$indexOfCP: ["a√cb", "b", 3, 1]}, -1, false);
- testExpressionCodePoints(coll, {$indexOfCP: ["a∫b", "b", 3, 5]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["a∫b", "b", 3, 5]}, -1);
- testExpressionCodePoints(coll, {$indexOfCP: ["", "∫"]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["", "∫"]}, -1);
- testExpressionCodePoints(coll, {$indexOfCP: [" ", ""]}, 0);
+testExpressionCodePoints(coll, {$indexOfCP: [" ", ""]}, 0);
- testExpressionCodePoints(coll, {$indexOfCP: ["", ""]}, 0);
+testExpressionCodePoints(coll, {$indexOfCP: ["", ""]}, 0);
- testExpressionCodePoints(coll, {$indexOfCP: ["abc", "", 1]}, 1);
+testExpressionCodePoints(coll, {$indexOfCP: ["abc", "", 1]}, 1);
- // Test with multi-byte tokens.
+// Test with multi-byte tokens.
- testExpressionCodePoints(coll, {$indexOfCP: ["abcƒe", "ƒe"]}, 3);
+testExpressionCodePoints(coll, {$indexOfCP: ["abcƒe", "ƒe"]}, 3);
- testExpressionCodePoints(coll, {$indexOfCP: ["∫aeøø", "øøø"]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["∫aeøø", "øøø"]}, -1);
- // Test with embedded null bytes.
+// Test with embedded null bytes.
- testExpressionCodePoints(coll, {$indexOfCP: ["ab∫\0d", "d"]}, 4);
+testExpressionCodePoints(coll, {$indexOfCP: ["ab∫\0d", "d"]}, 4);
- testExpressionCodePoints(coll, {$indexOfCP: ["øbc\0", "\0"]}, 3);
+testExpressionCodePoints(coll, {$indexOfCP: ["øbc\0", "\0"]}, 3);
- testExpressionCodePoints(coll, {$indexOfCP: ["πbƒ\0d\0", "d", 5, 6]}, -1);
+testExpressionCodePoints(coll, {$indexOfCP: ["πbƒ\0d\0", "d", 5, 6]}, -1);
- // Error cases.
+// Error cases.
- var pipeline = {
- $project: {
- output: {
- $indexOfCP: [3, "s"],
- }
- }
- };
- assertErrorCode(coll, pipeline, 40093);
-
- pipeline = {
- $project: {
- output: {
- $indexOfCP: ["s", 3],
- }
+var pipeline = {
+ $project: {
+ output: {
+ $indexOfCP: [3, "s"],
}
- };
- assertErrorCode(coll, pipeline, 40094);
-
- pipeline = {$project: {output: {$indexOfCP: ["abc", "b", "bad"]}}};
- assertErrorCode(coll, pipeline, 40096);
-
- pipeline = {$project: {output: {$indexOfCP: ["abc", "b", 0, "bad"]}}};
- assertErrorCode(coll, pipeline, 40096);
-
- pipeline = {$project: {output: {$indexOfCP: ["abc", "b", -1]}}};
- assertErrorCode(coll, pipeline, 40097);
+ }
+};
+assertErrorCode(coll, pipeline, 40093);
- pipeline = {$project: {output: {$indexOfCP: ["abc", "b", 1, -1]}}};
- assertErrorCode(coll, pipeline, 40097);
+pipeline = {
+ $project: {
+ output: {
+ $indexOfCP: ["s", 3],
+ }
+ }
+};
+assertErrorCode(coll, pipeline, 40094);
+
+pipeline = {
+ $project: {output: {$indexOfCP: ["abc", "b", "bad"]}}
+};
+assertErrorCode(coll, pipeline, 40096);
+
+pipeline = {
+ $project: {output: {$indexOfCP: ["abc", "b", 0, "bad"]}}
+};
+assertErrorCode(coll, pipeline, 40096);
+
+pipeline = {
+ $project: {output: {$indexOfCP: ["abc", "b", -1]}}
+};
+assertErrorCode(coll, pipeline, 40097);
+
+pipeline = {
+ $project: {output: {$indexOfCP: ["abc", "b", 1, -1]}}
+};
+assertErrorCode(coll, pipeline, 40097);
}());
diff --git a/jstests/aggregation/expressions/let.js b/jstests/aggregation/expressions/let.js
index 5de6db8eebf..0a9959e0d7e 100644
--- a/jstests/aggregation/expressions/let.js
+++ b/jstests/aggregation/expressions/let.js
@@ -2,50 +2,48 @@
* Basic integration tests for the $let expression.
*/
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
-
- let coll = db.agg_expr_let;
- coll.drop();
- assert.commandWorked(coll.insert({zero: 0, one: 1, two: 2, three: 3, nested: {four: 4}}));
-
- function testExpr(expression, output) {
- const res = coll.aggregate([{$project: {output: expression}}]).toArray();
- assert.eq(res.length, 1, tojson(res));
- assert.eq(res[0].output, output, tojson(res));
-
- // Test in group:
- const result = coll.aggregate({$group: {_id: 0, res: {$sum: expression}}}).toArray();
- assert.eq(result, [{_id: 0, res: output}]);
- }
-
- // Basic tests.
- testExpr('$two', 2);
- testExpr('$$CURRENT.two', 2);
- testExpr('$$ROOT.two', 2);
-
- // Using sub expressions.
- testExpr({$add: ['$two', '$$CURRENT.three']}, 5);
- testExpr({$add: ['$$CURRENT.two', '$$ROOT.nested.four']}, 6);
-
- // Verify that the variables defined in $let work.
- testExpr({$let: {vars: {a: 10}, in : '$$a'}}, 10);
- testExpr({$let: {vars: {a: '$zero'}, in : '$$a'}}, 0);
- testExpr({$let: {vars: {a: {$add: ['$one', '$two']}, b: 10}, in : {$multiply: ['$$a', '$$b']}}},
- 30);
-
- // Verify that the outer level variable works in inner level $let.
- testExpr({
- $let: {
- vars: {var1: 1},
- in : {$let: {vars: {var2: "$$var1"}, in : {$sum: ["$$var1", "$$var2"]}}}
- }
- },
- 2);
-
- // Verify that the outer level variables get overwritten by inner level variables.
- testExpr({
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+
+let coll = db.agg_expr_let;
+coll.drop();
+assert.commandWorked(coll.insert({zero: 0, one: 1, two: 2, three: 3, nested: {four: 4}}));
+
+function testExpr(expression, output) {
+ const res = coll.aggregate([{$project: {output: expression}}]).toArray();
+ assert.eq(res.length, 1, tojson(res));
+ assert.eq(res[0].output, output, tojson(res));
+
+ // Test in group:
+ const result = coll.aggregate({$group: {_id: 0, res: {$sum: expression}}}).toArray();
+ assert.eq(result, [{_id: 0, res: output}]);
+}
+
+// Basic tests.
+testExpr('$two', 2);
+testExpr('$$CURRENT.two', 2);
+testExpr('$$ROOT.two', 2);
+
+// Using sub expressions.
+testExpr({$add: ['$two', '$$CURRENT.three']}, 5);
+testExpr({$add: ['$$CURRENT.two', '$$ROOT.nested.four']}, 6);
+
+// Verify that the variables defined in $let work.
+testExpr({$let: {vars: {a: 10}, in : '$$a'}}, 10);
+testExpr({$let: {vars: {a: '$zero'}, in : '$$a'}}, 0);
+testExpr({$let: {vars: {a: {$add: ['$one', '$two']}, b: 10}, in : {$multiply: ['$$a', '$$b']}}},
+ 30);
+
+// Verify that the outer level variable works in inner level $let.
+testExpr({
+ $let:
+ {vars: {var1: 1}, in : {$let: {vars: {var2: "$$var1"}, in : {$sum: ["$$var1", "$$var2"]}}}}
+},
+ 2);
+
+// Verify that the outer level variables get overwritten by inner level variables.
+testExpr({
$let: {
vars: {var1: "$one"},
in : {$let: {vars: {var2: "$$var1", var1: 3}, in : {$sum: ["$$var2", "$$var1"]}}}
@@ -53,40 +51,39 @@
},
4);
- // $let changing CURRENT
- testExpr({$let: {vars: {CURRENT: '$$ROOT.nested'}, in : {$multiply: ['$four', '$$ROOT.two']}}},
- 8);
- testExpr({
- $let: {
- vars: {CURRENT: '$$CURRENT.nested'}, // using original value of CURRENT
- in : {$multiply: ['$four', '$$ROOT.two']}
- }
- },
- 8);
- testExpr({
- $let: {
- vars: {CURRENT: '$nested'}, // same as last
- in : {$multiply: ['$four', '$$ROOT.two']}
- }
- },
- 8);
- testExpr({
- $let: {
- vars: {CURRENT: {$const: {ten: 10}}}, // "artificial" object
- in : {$multiply: ['$ten', '$$ROOT.two']}
- }
- },
- 20);
- testExpr({
- $let: {
- vars: {CURRENT: '$three'}, // sets current to the number 3 (not an object)
- in : {$multiply: ['$$CURRENT', '$$ROOT.two']}
- }
- },
- 6);
+// $let changing CURRENT
+testExpr({$let: {vars: {CURRENT: '$$ROOT.nested'}, in : {$multiply: ['$four', '$$ROOT.two']}}}, 8);
+testExpr({
+ $let: {
+ vars: {CURRENT: '$$CURRENT.nested'}, // using original value of CURRENT
+ in : {$multiply: ['$four', '$$ROOT.two']}
+ }
+},
+ 8);
+testExpr({
+ $let: {
+ vars: {CURRENT: '$nested'}, // same as last
+ in : {$multiply: ['$four', '$$ROOT.two']}
+ }
+},
+ 8);
+testExpr({
+ $let: {
+ vars: {CURRENT: {$const: {ten: 10}}}, // "artificial" object
+ in : {$multiply: ['$ten', '$$ROOT.two']}
+ }
+},
+ 20);
+testExpr({
+ $let: {
+ vars: {CURRENT: '$three'}, // sets current to the number 3 (not an object)
+ in : {$multiply: ['$$CURRENT', '$$ROOT.two']}
+ }
+},
+ 6);
- // Swapping with $let (ensures there is no ordering dependency in vars).
- testExpr({
+// Swapping with $let (ensures there is no ordering dependency in vars).
+testExpr({
$let: {
vars: {x: 6, y: 10},
in : {
@@ -99,34 +96,32 @@
}, // Not commutative!
4); // 10-6 not 6-10 or 6-6
- // Unicode is allowed.
- testExpr({$let: {vars: {'日本語': 10}, in : '$$日本語'}},
- 10); // Japanese for "Japanese language".
-
- // Can use ROOT and CURRENT directly with no subfield (SERVER-5916).
- coll.drop();
- coll.insert({_id: 'obj'});
- assert.eq(coll.aggregate({$project: {_id: 0, obj: '$$ROOT'}}).toArray(), [{obj: {_id: 'obj'}}]);
- assert.eq(coll.aggregate({$project: {_id: 0, obj: '$$CURRENT'}}).toArray(),
- [{obj: {_id: 'obj'}}]);
- assert.eq(coll.aggregate({$group: {_id: 0, objs: {$push: '$$ROOT'}}}).toArray(),
- [{_id: 0, objs: [{_id: 'obj'}]}]);
- assert.eq(coll.aggregate({$group: {_id: 0, objs: {$push: '$$CURRENT'}}}).toArray(),
- [{_id: 0, objs: [{_id: 'obj'}]}]);
-
- // Check name validity checks.
- assertErrorCode(coll, {$project: {a: {$let: {vars: {ROOT: 1}, in : '$$ROOT'}}}}, 16867);
- assertErrorCode(coll, {$project: {a: {$let: {vars: {FOO: 1}, in : '$$FOO'}}}}, 16867);
- assertErrorCode(coll, {$project: {a: {$let: {vars: {_underbar: 1}, in : '$$FOO'}}}}, 16867);
- assertErrorCode(coll, {$project: {a: {$let: {vars: {'a.b': 1}, in : '$$FOO'}}}}, 16868);
- assertErrorCode(coll, {$project: {a: {$let: {vars: {'a b': 1}, in : '$$FOO'}}}}, 16868);
- assertErrorCode(coll, {$project: {a: '$$_underbar'}}, 16870);
- assertErrorCode(coll, {$project: {a: '$$with spaces'}}, 16871);
-
- // Verify that variables defined in '$let' cannot be used to initialize other variables.
- assertErrorCode(
- coll,
- [{$project: {output: {$let: {vars: {var1: "$one", var2: "$$var1"}, in : "$$var1"}}}}],
- 17276);
-
+// Unicode is allowed.
+testExpr({$let: {vars: {'日本語': 10}, in : '$$日本語'}},
+ 10); // Japanese for "Japanese language".
+
+// Can use ROOT and CURRENT directly with no subfield (SERVER-5916).
+coll.drop();
+coll.insert({_id: 'obj'});
+assert.eq(coll.aggregate({$project: {_id: 0, obj: '$$ROOT'}}).toArray(), [{obj: {_id: 'obj'}}]);
+assert.eq(coll.aggregate({$project: {_id: 0, obj: '$$CURRENT'}}).toArray(), [{obj: {_id: 'obj'}}]);
+assert.eq(coll.aggregate({$group: {_id: 0, objs: {$push: '$$ROOT'}}}).toArray(),
+ [{_id: 0, objs: [{_id: 'obj'}]}]);
+assert.eq(coll.aggregate({$group: {_id: 0, objs: {$push: '$$CURRENT'}}}).toArray(),
+ [{_id: 0, objs: [{_id: 'obj'}]}]);
+
+// Check name validity checks.
+assertErrorCode(coll, {$project: {a: {$let: {vars: {ROOT: 1}, in : '$$ROOT'}}}}, 16867);
+assertErrorCode(coll, {$project: {a: {$let: {vars: {FOO: 1}, in : '$$FOO'}}}}, 16867);
+assertErrorCode(coll, {$project: {a: {$let: {vars: {_underbar: 1}, in : '$$FOO'}}}}, 16867);
+assertErrorCode(coll, {$project: {a: {$let: {vars: {'a.b': 1}, in : '$$FOO'}}}}, 16868);
+assertErrorCode(coll, {$project: {a: {$let: {vars: {'a b': 1}, in : '$$FOO'}}}}, 16868);
+assertErrorCode(coll, {$project: {a: '$$_underbar'}}, 16870);
+assertErrorCode(coll, {$project: {a: '$$with spaces'}}, 16871);
+
+// Verify that variables defined in '$let' cannot be used to initialize other variables.
+assertErrorCode(
+ coll,
+ [{$project: {output: {$let: {vars: {var1: "$one", var2: "$$var1"}, in : "$$var1"}}}}],
+ 17276);
}());
diff --git a/jstests/aggregation/expressions/merge_objects.js b/jstests/aggregation/expressions/merge_objects.js
index 599e182ff5a..e6d38ccc6a4 100644
--- a/jstests/aggregation/expressions/merge_objects.js
+++ b/jstests/aggregation/expressions/merge_objects.js
@@ -1,160 +1,147 @@
// Tests for the $mergeObjects aggregation expression.
(function() {
- "use strict";
-
- // For assertErrorCode().
- load("jstests/aggregation/extras/utils.js");
-
- let coll = db.merge_object_expr;
- coll.drop();
-
- // Test merging two objects together.
- assert.writeOK(coll.insert({_id: 0, subObject: {b: 1, c: 1}}));
- let result = coll.aggregate([
- {$match: {_id: 0}},
- {$project: {mergedDocument: {$mergeObjects: ["$subObject", {d: 1}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 0, mergedDocument: {b: 1, c: 1, d: 1}}]);
-
- // Test merging the root document with a new field.
- assert.writeOK(coll.insert({_id: 1, a: 0, b: 1}));
- result =
- coll.aggregate([
- {$match: {_id: 1}},
- {$project: {mergedDocument: {$mergeObjects: ["$$ROOT", {newField: "newValue"}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 1, mergedDocument: {_id: 1, a: 0, b: 1, newField: "newValue"}}]);
-
- // Test replacing a field in the root.
- assert.writeOK(coll.insert({_id: 2, a: 0, b: 1}));
- result = coll.aggregate([
- {$match: {_id: 2}},
- {$project: {mergedDocument: {$mergeObjects: ["$$ROOT", {a: "newValue"}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 2, mergedDocument: {_id: 2, a: "newValue", b: 1}}]);
-
- // Test overriding a document with root.
- assert.writeOK(coll.insert({_id: 3, a: 0, b: 1}));
- result =
- coll.aggregate([
- {$match: {_id: 3}},
- {$project: {mergedDocument: {$mergeObjects: [{a: "defaultValue"}, "$$ROOT"]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 3, mergedDocument: {a: 0, _id: 3, b: 1}}]);
-
- // Test replacing root with merged document.
- assert.writeOK(coll.insert({_id: 4, a: 0, subObject: {b: 1, c: 2}}));
- result = coll.aggregate([
- {$match: {_id: 4}},
- {$replaceRoot: {newRoot: {$mergeObjects: ["$$ROOT", "$subObject"]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 4, a: 0, subObject: {b: 1, c: 2}, b: 1, c: 2}]);
-
- // Test merging with an embedded object.
- assert.writeOK(coll.insert({_id: 5, subObject: {b: 1, c: 1}}));
- result = coll.aggregate([
- {$match: {_id: 5}},
- {
- $project: {
- mergedDocument:
- {$mergeObjects: ["$subObject", {subObject1: {d: 1}}, {e: 1}]}
- }
- }
- ])
- .toArray();
- assert.eq(result, [{_id: 5, mergedDocument: {b: 1, c: 1, subObject1: {d: 1}, e: 1}}]);
-
- // Test for errors on non-document types.
- assert.writeOK(coll.insert({_id: 6, a: "string"}));
- assertErrorCode(coll,
- [
- {$match: {_id: 6}},
- {$project: {mergedDocument: {$mergeObjects: ["$a", {a: "newString"}]}}}
- ],
- 40400);
-
- assert.writeOK(coll.insert({_id: 7, a: {b: 1}, c: 1}));
- assertErrorCode(
- coll,
- [{$match: {_id: 7}}, {$project: {mergedDocument: {$mergeObjects: ["$a", "$c"]}}}],
- 40400);
-
- // Test outputs with null values.
- assert.writeOK(coll.insert({_id: 8, a: {b: 1}}));
- result = coll.aggregate([
- {$match: {_id: 8}},
- {$project: {mergedDocument: {$mergeObjects: ["$a", {b: null}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 8, mergedDocument: {b: null}}]);
+"use strict";
- // Test output with undefined values.
- assert.writeOK(coll.insert({_id: 9, a: {b: 1}}));
- result = coll.aggregate([
- {$match: {_id: 9}},
- {$project: {mergedDocument: {$mergeObjects: ["$a", {b: undefined}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 9, mergedDocument: {b: undefined}}]);
-
- // Test output with missing values.
- assert.writeOK(coll.insert({_id: 10, a: {b: 1}}));
- result =
- coll.aggregate([
- {$match: {_id: 10}},
- {$project: {mergedDocument: {$mergeObjects: ["$a", {b: "$nonExistentField"}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 10, mergedDocument: {b: 1}}]);
-
- assert.writeOK(coll.insert({_id: 11, a: {b: 1}}));
- result = coll.aggregate([
- {$match: {_id: 11}},
- {$project: {mergedDocument: {$mergeObjects: ["$a", {b: ""}]}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 11, mergedDocument: {b: ""}}]);
+// For assertErrorCode().
+load("jstests/aggregation/extras/utils.js");
- // Test outputs with empty values.
- assert.writeOK(coll.insert({_id: 12, b: 1, c: 1}));
- result =
- coll.aggregate([{$match: {_id: 12}}, {$project: {mergedDocument: {$mergeObjects: [{}]}}}])
- .toArray();
- assert.eq(result, [{_id: 12, mergedDocument: {}}]);
+let coll = db.merge_object_expr;
+coll.drop();
- result = coll.aggregate(
- [{$match: {_id: 12}}, {$project: {mergedDocument: {$mergeObjects: [{}, {}]}}}])
- .toArray();
- assert.eq(result, [{_id: 12, mergedDocument: {}}]);
-
- // Test merge within a $group stage.
- assert.writeOK(coll.insert({_id: 13, group: 1, obj: {}}));
- assert.writeOK(coll.insert({_id: 14, group: 1, obj: {a: 2, b: 2}}));
- assert.writeOK(coll.insert({_id: 15, group: 1, obj: {a: 1, c: 3}}));
- assert.writeOK(coll.insert({_id: 16, group: 2, obj: {a: 1, b: 1}}));
- result = coll.aggregate([
- {$match: {_id: {$in: [13, 14, 15, 16]}}},
- {$sort: {_id: 1}},
- {$group: {_id: "$group", mergedDocument: {$mergeObjects: "$obj"}}},
- {$sort: {_id: 1}},
- ])
- .toArray();
- assert.eq(
- result,
- [{_id: 1, mergedDocument: {a: 1, b: 2, c: 3}}, {_id: 2, mergedDocument: {a: 1, b: 1}}]);
-
- // Test merge with $$REMOVE operator.
- assert.writeOK(coll.insert({_id: 17, a: {b: 2}}));
- result = coll.aggregate([
- {$match: {_id: 17}},
- {$project: {mergedDocument: {$mergeObjects: ["$a", {b: "$$REMOVE"}]}}}
+// Test merging two objects together.
+assert.writeOK(coll.insert({_id: 0, subObject: {b: 1, c: 1}}));
+let result = coll.aggregate([
+ {$match: {_id: 0}},
+ {$project: {mergedDocument: {$mergeObjects: ["$subObject", {d: 1}]}}}
])
.toArray();
- assert.eq(result, [{_id: 17, mergedDocument: {b: 2}}]);
-
+assert.eq(result, [{_id: 0, mergedDocument: {b: 1, c: 1, d: 1}}]);
+
+// Test merging the root document with a new field.
+assert.writeOK(coll.insert({_id: 1, a: 0, b: 1}));
+result = coll.aggregate([
+ {$match: {_id: 1}},
+ {$project: {mergedDocument: {$mergeObjects: ["$$ROOT", {newField: "newValue"}]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 1, mergedDocument: {_id: 1, a: 0, b: 1, newField: "newValue"}}]);
+
+// Test replacing a field in the root.
+assert.writeOK(coll.insert({_id: 2, a: 0, b: 1}));
+result = coll.aggregate([
+ {$match: {_id: 2}},
+ {$project: {mergedDocument: {$mergeObjects: ["$$ROOT", {a: "newValue"}]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 2, mergedDocument: {_id: 2, a: "newValue", b: 1}}]);
+
+// Test overriding a document with root.
+assert.writeOK(coll.insert({_id: 3, a: 0, b: 1}));
+result = coll.aggregate([
+ {$match: {_id: 3}},
+ {$project: {mergedDocument: {$mergeObjects: [{a: "defaultValue"}, "$$ROOT"]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 3, mergedDocument: {a: 0, _id: 3, b: 1}}]);
+
+// Test replacing root with merged document.
+assert.writeOK(coll.insert({_id: 4, a: 0, subObject: {b: 1, c: 2}}));
+result = coll.aggregate([
+ {$match: {_id: 4}},
+ {$replaceRoot: {newRoot: {$mergeObjects: ["$$ROOT", "$subObject"]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 4, a: 0, subObject: {b: 1, c: 2}, b: 1, c: 2}]);
+
+// Test merging with an embedded object.
+assert.writeOK(coll.insert({_id: 5, subObject: {b: 1, c: 1}}));
+result =
+ coll.aggregate([
+ {$match: {_id: 5}},
+ {
+ $project:
+ {mergedDocument: {$mergeObjects: ["$subObject", {subObject1: {d: 1}}, {e: 1}]}}
+ }
+ ])
+ .toArray();
+assert.eq(result, [{_id: 5, mergedDocument: {b: 1, c: 1, subObject1: {d: 1}, e: 1}}]);
+
+// Test for errors on non-document types.
+assert.writeOK(coll.insert({_id: 6, a: "string"}));
+assertErrorCode(
+ coll,
+ [{$match: {_id: 6}}, {$project: {mergedDocument: {$mergeObjects: ["$a", {a: "newString"}]}}}],
+ 40400);
+
+assert.writeOK(coll.insert({_id: 7, a: {b: 1}, c: 1}));
+assertErrorCode(
+ coll, [{$match: {_id: 7}}, {$project: {mergedDocument: {$mergeObjects: ["$a", "$c"]}}}], 40400);
+
+// Test outputs with null values.
+assert.writeOK(coll.insert({_id: 8, a: {b: 1}}));
+result =
+ coll.aggregate(
+ [{$match: {_id: 8}}, {$project: {mergedDocument: {$mergeObjects: ["$a", {b: null}]}}}])
+ .toArray();
+assert.eq(result, [{_id: 8, mergedDocument: {b: null}}]);
+
+// Test output with undefined values.
+assert.writeOK(coll.insert({_id: 9, a: {b: 1}}));
+result = coll.aggregate([
+ {$match: {_id: 9}},
+ {$project: {mergedDocument: {$mergeObjects: ["$a", {b: undefined}]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 9, mergedDocument: {b: undefined}}]);
+
+// Test output with missing values.
+assert.writeOK(coll.insert({_id: 10, a: {b: 1}}));
+result = coll.aggregate([
+ {$match: {_id: 10}},
+ {$project: {mergedDocument: {$mergeObjects: ["$a", {b: "$nonExistentField"}]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 10, mergedDocument: {b: 1}}]);
+
+assert.writeOK(coll.insert({_id: 11, a: {b: 1}}));
+result =
+ coll.aggregate(
+ [{$match: {_id: 11}}, {$project: {mergedDocument: {$mergeObjects: ["$a", {b: ""}]}}}])
+ .toArray();
+assert.eq(result, [{_id: 11, mergedDocument: {b: ""}}]);
+
+// Test outputs with empty values.
+assert.writeOK(coll.insert({_id: 12, b: 1, c: 1}));
+result = coll.aggregate([{$match: {_id: 12}}, {$project: {mergedDocument: {$mergeObjects: [{}]}}}])
+ .toArray();
+assert.eq(result, [{_id: 12, mergedDocument: {}}]);
+
+result =
+ coll.aggregate([{$match: {_id: 12}}, {$project: {mergedDocument: {$mergeObjects: [{}, {}]}}}])
+ .toArray();
+assert.eq(result, [{_id: 12, mergedDocument: {}}]);
+
+// Test merge within a $group stage.
+assert.writeOK(coll.insert({_id: 13, group: 1, obj: {}}));
+assert.writeOK(coll.insert({_id: 14, group: 1, obj: {a: 2, b: 2}}));
+assert.writeOK(coll.insert({_id: 15, group: 1, obj: {a: 1, c: 3}}));
+assert.writeOK(coll.insert({_id: 16, group: 2, obj: {a: 1, b: 1}}));
+result = coll.aggregate([
+ {$match: {_id: {$in: [13, 14, 15, 16]}}},
+ {$sort: {_id: 1}},
+ {$group: {_id: "$group", mergedDocument: {$mergeObjects: "$obj"}}},
+ {$sort: {_id: 1}},
+ ])
+ .toArray();
+assert.eq(result,
+ [{_id: 1, mergedDocument: {a: 1, b: 2, c: 3}}, {_id: 2, mergedDocument: {a: 1, b: 1}}]);
+
+// Test merge with $$REMOVE operator.
+assert.writeOK(coll.insert({_id: 17, a: {b: 2}}));
+result = coll.aggregate([
+ {$match: {_id: 17}},
+ {$project: {mergedDocument: {$mergeObjects: ["$a", {b: "$$REMOVE"}]}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 17, mergedDocument: {b: 2}}]);
}());
diff --git a/jstests/aggregation/expressions/objectToArray.js b/jstests/aggregation/expressions/objectToArray.js
index 5f92b0ae1a9..0ec4a40c2c0 100644
--- a/jstests/aggregation/expressions/objectToArray.js
+++ b/jstests/aggregation/expressions/objectToArray.js
@@ -1,95 +1,90 @@
// Tests for the $objectToArray aggregation expression.
(function() {
- "use strict";
-
- // For assertErrorCode().
- load("jstests/aggregation/extras/utils.js");
-
- let coll = db.object_to_array_expr;
- coll.drop();
-
- let object_to_array_expr = {$project: {expanded: {$objectToArray: "$subDoc"}}};
-
- // $objectToArray correctly converts a document to an array of key-value pairs.
- assert.writeOK(coll.insert({_id: 0, subDoc: {"a": 1, "b": 2, "c": "foo"}}));
- let result = coll.aggregate([{$match: {_id: 0}}, object_to_array_expr]).toArray();
- assert.eq(
- result,
- [{_id: 0, expanded: [{"k": "a", "v": 1}, {"k": "b", "v": 2}, {"k": "c", "v": "foo"}]}]);
-
- assert.writeOK(coll.insert({_id: 1, subDoc: {"y": []}}));
- result = coll.aggregate([{$match: {_id: 1}}, object_to_array_expr]).toArray();
- assert.eq(result, [{_id: 1, expanded: [{"k": "y", "v": []}]}]);
-
- assert.writeOK(coll.insert({_id: 2, subDoc: {"a": 1, "b": {"d": "string"}, "c": [1, 2]}}));
- result = coll.aggregate([{$match: {_id: 2}}, object_to_array_expr]).toArray();
- assert.eq(
- result, [{
- _id: 2,
- expanded:
- [{"k": "a", "v": 1}, {"k": "b", "v": {"d": "string"}}, {"k": "c", "v": [1, 2]}]
- }]);
-
- assert.writeOK(coll.insert({_id: 3, subDoc: {}}));
- result = coll.aggregate([{$match: {_id: 3}}, object_to_array_expr]).toArray();
- assert.eq(result, [{_id: 3, expanded: []}]);
-
- // Turns to array from the root of the document.
- assert.writeOK(coll.insert({_id: 4, "a": 1, "b": 2, "c": 3}));
- result =
- coll.aggregate([{$match: {_id: 4}}, {$project: {document: {$objectToArray: "$$ROOT"}}}])
- .toArray();
- assert.eq(result, [
- {
- _id: 4,
- document:
- [{"k": "_id", "v": 4}, {"k": "a", "v": 1}, {"k": "b", "v": 2}, {"k": "c", "v": 3}]
- }
- ]);
-
- assert.writeOK(coll.insert({_id: 5, "date": ISODate("2017-01-24T00:00:00")}));
- result = coll.aggregate([
- {$match: {_id: 5}},
- {$project: {document: {$objectToArray: {dayOfWeek: {$dayOfWeek: "$date"}}}}}
- ])
- .toArray();
- assert.eq(result, [{_id: 5, document: [{"k": "dayOfWeek", "v": 3}]}]);
-
- // $objectToArray errors on non-document types.
- assert.writeOK(coll.insert({_id: 6, subDoc: "string"}));
- assertErrorCode(coll, [{$match: {_id: 6}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 7, subDoc: ObjectId()}));
- assertErrorCode(coll, [{$match: {_id: 7}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 8, subDoc: NumberLong(0)}));
- assertErrorCode(coll, [{$match: {_id: 8}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 9, subDoc: []}));
- assertErrorCode(coll, [{$match: {_id: 9}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 10, subDoc: [0]}));
- assertErrorCode(coll, [{$match: {_id: 10}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 11, subDoc: ["string"]}));
- assertErrorCode(coll, [{$match: {_id: 11}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 12, subDoc: [{"a": "b"}]}));
- assertErrorCode(coll, [{$match: {_id: 12}}, object_to_array_expr], 40390);
-
- assert.writeOK(coll.insert({_id: 13, subDoc: NaN}));
- assertErrorCode(coll, [{$match: {_id: 13}}, object_to_array_expr], 40390);
-
- // $objectToArray outputs null on null-ish types.
- assert.writeOK(coll.insert({_id: 14, subDoc: null}));
- result = coll.aggregate([{$match: {_id: 14}}, object_to_array_expr]).toArray();
- assert.eq(result, [{_id: 14, expanded: null}]);
-
- assert.writeOK(coll.insert({_id: 15, subDoc: undefined}));
- result = coll.aggregate([{$match: {_id: 15}}, object_to_array_expr]).toArray();
- assert.eq(result, [{_id: 15, expanded: null}]);
-
- assert.writeOK(coll.insert({_id: 16}));
- result = coll.aggregate([{$match: {_id: 16}}, object_to_array_expr]).toArray();
- assert.eq(result, [{_id: 16, expanded: null}]);
+"use strict";
+
+// For assertErrorCode().
+load("jstests/aggregation/extras/utils.js");
+
+let coll = db.object_to_array_expr;
+coll.drop();
+
+let object_to_array_expr = {$project: {expanded: {$objectToArray: "$subDoc"}}};
+
+// $objectToArray correctly converts a document to an array of key-value pairs.
+assert.writeOK(coll.insert({_id: 0, subDoc: {"a": 1, "b": 2, "c": "foo"}}));
+let result = coll.aggregate([{$match: {_id: 0}}, object_to_array_expr]).toArray();
+assert.eq(result,
+ [{_id: 0, expanded: [{"k": "a", "v": 1}, {"k": "b", "v": 2}, {"k": "c", "v": "foo"}]}]);
+
+assert.writeOK(coll.insert({_id: 1, subDoc: {"y": []}}));
+result = coll.aggregate([{$match: {_id: 1}}, object_to_array_expr]).toArray();
+assert.eq(result, [{_id: 1, expanded: [{"k": "y", "v": []}]}]);
+
+assert.writeOK(coll.insert({_id: 2, subDoc: {"a": 1, "b": {"d": "string"}, "c": [1, 2]}}));
+result = coll.aggregate([{$match: {_id: 2}}, object_to_array_expr]).toArray();
+assert.eq(
+ result, [{
+ _id: 2,
+ expanded: [{"k": "a", "v": 1}, {"k": "b", "v": {"d": "string"}}, {"k": "c", "v": [1, 2]}]
+ }]);
+
+assert.writeOK(coll.insert({_id: 3, subDoc: {}}));
+result = coll.aggregate([{$match: {_id: 3}}, object_to_array_expr]).toArray();
+assert.eq(result, [{_id: 3, expanded: []}]);
+
+// Turns to array from the root of the document.
+assert.writeOK(coll.insert({_id: 4, "a": 1, "b": 2, "c": 3}));
+result = coll.aggregate([{$match: {_id: 4}}, {$project: {document: {$objectToArray: "$$ROOT"}}}])
+ .toArray();
+assert.eq(
+ result, [{
+ _id: 4,
+ document: [{"k": "_id", "v": 4}, {"k": "a", "v": 1}, {"k": "b", "v": 2}, {"k": "c", "v": 3}]
+ }]);
+
+assert.writeOK(coll.insert({_id: 5, "date": ISODate("2017-01-24T00:00:00")}));
+result = coll.aggregate([
+ {$match: {_id: 5}},
+ {$project: {document: {$objectToArray: {dayOfWeek: {$dayOfWeek: "$date"}}}}}
+ ])
+ .toArray();
+assert.eq(result, [{_id: 5, document: [{"k": "dayOfWeek", "v": 3}]}]);
+
+// $objectToArray errors on non-document types.
+assert.writeOK(coll.insert({_id: 6, subDoc: "string"}));
+assertErrorCode(coll, [{$match: {_id: 6}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 7, subDoc: ObjectId()}));
+assertErrorCode(coll, [{$match: {_id: 7}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 8, subDoc: NumberLong(0)}));
+assertErrorCode(coll, [{$match: {_id: 8}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 9, subDoc: []}));
+assertErrorCode(coll, [{$match: {_id: 9}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 10, subDoc: [0]}));
+assertErrorCode(coll, [{$match: {_id: 10}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 11, subDoc: ["string"]}));
+assertErrorCode(coll, [{$match: {_id: 11}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 12, subDoc: [{"a": "b"}]}));
+assertErrorCode(coll, [{$match: {_id: 12}}, object_to_array_expr], 40390);
+
+assert.writeOK(coll.insert({_id: 13, subDoc: NaN}));
+assertErrorCode(coll, [{$match: {_id: 13}}, object_to_array_expr], 40390);
+
+// $objectToArray outputs null on null-ish types.
+assert.writeOK(coll.insert({_id: 14, subDoc: null}));
+result = coll.aggregate([{$match: {_id: 14}}, object_to_array_expr]).toArray();
+assert.eq(result, [{_id: 14, expanded: null}]);
+
+assert.writeOK(coll.insert({_id: 15, subDoc: undefined}));
+result = coll.aggregate([{$match: {_id: 15}}, object_to_array_expr]).toArray();
+assert.eq(result, [{_id: 15, expanded: null}]);
+
+assert.writeOK(coll.insert({_id: 16}));
+result = coll.aggregate([{$match: {_id: 16}}, object_to_array_expr]).toArray();
+assert.eq(result, [{_id: 16, expanded: null}]);
}());
diff --git a/jstests/aggregation/expressions/object_ids_for_date_expressions.js b/jstests/aggregation/expressions/object_ids_for_date_expressions.js
index dae3ce0d280..bff8ab587b3 100644
--- a/jstests/aggregation/expressions/object_ids_for_date_expressions.js
+++ b/jstests/aggregation/expressions/object_ids_for_date_expressions.js
@@ -1,98 +1,97 @@
// SERVER-9406: Allow ObjectId type to be treated as a date in date related expressions
(function() {
- "use strict";
+"use strict";
- load('jstests/libs/dateutil.js');
+load('jstests/libs/dateutil.js');
- const coll = db.server9406;
- let testOpCount = 0;
+const coll = db.server9406;
+let testOpCount = 0;
- coll.drop();
+coll.drop();
- // Seed collection so that the pipeline will execute.
- assert.writeOK(coll.insert({}));
+// Seed collection so that the pipeline will execute.
+assert.writeOK(coll.insert({}));
- function makeObjectIdFromDate(dt) {
- try {
- return new ObjectId((dt.getTime() / 1000).toString(16) + "f000000000000000");
- } catch (e) {
- assert("Invalid date for conversion to Object Id: " + dt);
- }
+function makeObjectIdFromDate(dt) {
+ try {
+ return new ObjectId((dt.getTime() / 1000).toString(16) + "f000000000000000");
+ } catch (e) {
+ assert("Invalid date for conversion to Object Id: " + dt);
}
+}
- /**
- * Helper for testing that 'op' on 'value' is the same for dates as equivalent ObjectIds
- * 'value' is either a date value, or an object containing field 'date'.
- */
- function testOp(op, value) {
- testOpCount++;
+/**
+ * Helper for testing that 'op' on 'value' is the same for dates as equivalent ObjectIds
+ * 'value' is either a date value, or an object containing field 'date'.
+ */
+function testOp(op, value) {
+ testOpCount++;
- let pipeline = [{$project: {_id: 0, result: {}}}];
- pipeline[0].$project.result[op] = value;
- let res1 = coll.aggregate(pipeline).toArray()[0];
- if (value.date) {
- value.date = makeObjectIdFromDate(value.date);
- } else {
- value = makeObjectIdFromDate(value);
- }
- pipeline[0].$project.result[op] = value;
- let res2 = coll.aggregate(pipeline).toArray()[0];
-
- assert.eq(res2.result, res1.result, tojson(pipeline));
+ let pipeline = [{$project: {_id: 0, result: {}}}];
+ pipeline[0].$project.result[op] = value;
+ let res1 = coll.aggregate(pipeline).toArray()[0];
+ if (value.date) {
+ value.date = makeObjectIdFromDate(value.date);
+ } else {
+ value = makeObjectIdFromDate(value);
}
+ pipeline[0].$project.result[op] = value;
+ let res2 = coll.aggregate(pipeline).toArray()[0];
- testOp('$dateToString', {date: new Date("1980-12-31T23:59:59Z"), format: "%V-%G"});
- testOp('$dateToString', {date: new Date("1980-12-31T23:59:59Z"), format: "%G-%V"});
+ assert.eq(res2.result, res1.result, tojson(pipeline));
+}
- const years = [
- 2002, // Starting and ending on Tuesday.
- 2014, // Starting and ending on Wednesday.
- 2015, // Starting and ending on Thursday.
- 2010, // Starting and ending on Friday.
- 2011, // Starting and ending on Saturday.
- 2006, // Starting and ending on Sunday.
- 1996, // Starting on Monday, ending on Tuesday.
- 2008, // Starting on Tuesday, ending on Wednesday.
- 1992, // Starting on Wednesday, ending on Thursday.
- 2004, // Starting on Thursday, ending on Friday.
- 2016, // Starting on Friday, ending on Saturday.
- 2000, // Starting on Saturday, ending on Sunday (special).
- 2012 // Starting on Sunday, ending on Monday.
- ];
+testOp('$dateToString', {date: new Date("1980-12-31T23:59:59Z"), format: "%V-%G"});
+testOp('$dateToString', {date: new Date("1980-12-31T23:59:59Z"), format: "%G-%V"});
- const day = 1;
- years.forEach(function(year) {
- // forEach starts indexing at zero but weekdays start with Monday on 1 so we add +1.
- let newYear = DateUtil.getNewYear(year);
- let endOfFirstWeekInYear = DateUtil.getEndOfFirstWeekInYear(year, day);
- let startOfSecondWeekInYear = DateUtil.getStartOfSecondWeekInYear(year, day);
- let birthday = DateUtil.getBirthday(year);
- let newYearsEve = DateUtil.getNewYearsEve(year);
- let now = new Date();
- now.setYear(year);
- now.setMilliseconds(0);
+const years = [
+ 2002, // Starting and ending on Tuesday.
+ 2014, // Starting and ending on Wednesday.
+ 2015, // Starting and ending on Thursday.
+ 2010, // Starting and ending on Friday.
+ 2011, // Starting and ending on Saturday.
+ 2006, // Starting and ending on Sunday.
+ 1996, // Starting on Monday, ending on Tuesday.
+ 2008, // Starting on Tuesday, ending on Wednesday.
+ 1992, // Starting on Wednesday, ending on Thursday.
+ 2004, // Starting on Thursday, ending on Friday.
+ 2016, // Starting on Friday, ending on Saturday.
+ 2000, // Starting on Saturday, ending on Sunday (special).
+ 2012 // Starting on Sunday, ending on Monday.
+];
- testOp('$isoDayOfWeek', newYear);
- testOp('$isoDayOfWeek', endOfFirstWeekInYear);
- testOp('$isoDayOfWeek', startOfSecondWeekInYear);
- testOp('$isoWeekYear', birthday);
+const day = 1;
+years.forEach(function(year) {
+ // forEach starts indexing at zero but weekdays start with Monday on 1 so we add +1.
+ let newYear = DateUtil.getNewYear(year);
+ let endOfFirstWeekInYear = DateUtil.getEndOfFirstWeekInYear(year, day);
+ let startOfSecondWeekInYear = DateUtil.getStartOfSecondWeekInYear(year, day);
+ let birthday = DateUtil.getBirthday(year);
+ let newYearsEve = DateUtil.getNewYearsEve(year);
+ let now = new Date();
+ now.setYear(year);
+ now.setMilliseconds(0);
- testOp('$isoWeek', newYear);
- testOp('$isoWeek', now);
- testOp('$isoWeekYear', newYear);
- testOp('$isoWeek', endOfFirstWeekInYear);
- testOp('$dateToString', {format: '%G-W%V-%u', date: newYear});
- testOp('$isoWeek', endOfFirstWeekInYear);
- testOp('$year', endOfFirstWeekInYear);
- testOp('$month', endOfFirstWeekInYear);
- testOp('$dayOfMonth', endOfFirstWeekInYear);
- testOp('$dayOfWeek', birthday);
- testOp('$dayOfWeek', newYearsEve);
- testOp('$minute', newYearsEve);
- testOp('$second', now);
- testOp('$millisecond', newYear);
+ testOp('$isoDayOfWeek', newYear);
+ testOp('$isoDayOfWeek', endOfFirstWeekInYear);
+ testOp('$isoDayOfWeek', startOfSecondWeekInYear);
+ testOp('$isoWeekYear', birthday);
- });
- assert.eq(testOpCount, 236, 'Expected 236 tests to run');
+ testOp('$isoWeek', newYear);
+ testOp('$isoWeek', now);
+ testOp('$isoWeekYear', newYear);
+ testOp('$isoWeek', endOfFirstWeekInYear);
+ testOp('$dateToString', {format: '%G-W%V-%u', date: newYear});
+ testOp('$isoWeek', endOfFirstWeekInYear);
+ testOp('$year', endOfFirstWeekInYear);
+ testOp('$month', endOfFirstWeekInYear);
+ testOp('$dayOfMonth', endOfFirstWeekInYear);
+ testOp('$dayOfWeek', birthday);
+ testOp('$dayOfWeek', newYearsEve);
+ testOp('$minute', newYearsEve);
+ testOp('$second', now);
+ testOp('$millisecond', newYear);
+});
+assert.eq(testOpCount, 236, 'Expected 236 tests to run');
})();
diff --git a/jstests/aggregation/expressions/reduce.js b/jstests/aggregation/expressions/reduce.js
index 54a66fc8b56..2565b88ed60 100644
--- a/jstests/aggregation/expressions/reduce.js
+++ b/jstests/aggregation/expressions/reduce.js
@@ -3,26 +3,20 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExpression.
(function() {
- "use strict";
+"use strict";
- var coll = db.reduce;
+var coll = db.reduce;
- testExpression(
- coll,
- {
- $reduce:
- {input: [1, 2, 3], initialValue: {$literal: 0}, in : {$sum: ["$$this", "$$value"]}}
- },
- 6);
- testExpression(coll, {$reduce: {input: [], initialValue: {$literal: 0}, in : 10}}, 0);
- testExpression(
- coll,
- {
- $reduce:
- {input: [1, 2, 3], initialValue: [], in : {$concatArrays: ["$$value", ["$$this"]]}}
- },
- [1, 2, 3]);
- testExpression(coll,
+testExpression(
+ coll,
+ {$reduce: {input: [1, 2, 3], initialValue: {$literal: 0}, in : {$sum: ["$$this", "$$value"]}}},
+ 6);
+testExpression(coll, {$reduce: {input: [], initialValue: {$literal: 0}, in : 10}}, 0);
+testExpression(
+ coll,
+ {$reduce: {input: [1, 2, 3], initialValue: [], in : {$concatArrays: ["$$value", ["$$this"]]}}},
+ [1, 2, 3]);
+testExpression(coll,
{
$reduce: {
input: [1, 2],
@@ -32,8 +26,8 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExp
},
[[], []]);
- // A nested $reduce which sums each subarray, then multiplies the results.
- testExpression(coll,
+// A nested $reduce which sums each subarray, then multiplies the results.
+testExpression(coll,
{
$reduce: {
input: [[1, 2, 3], [4, 5]],
@@ -54,9 +48,9 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExp
},
54);
- // A nested $reduce using a $let to allow the inner $reduce to access the variables of the
- // outer.
- testExpression(coll,
+// A nested $reduce using a $let to allow the inner $reduce to access the variables of the
+// outer.
+testExpression(coll,
{
$reduce: {
input: [[0, 1], [2, 3]],
@@ -84,19 +78,18 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExp
},
{allElements: [0, 1, 2, 3], sumOfInner: 6});
- // Nullish input produces null as an output.
- testExpression(coll, {$reduce: {input: null, initialValue: {$literal: 0}, in : 5}}, null);
- testExpression(
- coll, {$reduce: {input: "$nonexistent", initialValue: {$literal: 0}, in : 5}}, null);
+// Nullish input produces null as an output.
+testExpression(coll, {$reduce: {input: null, initialValue: {$literal: 0}, in : 5}}, null);
+testExpression(coll, {$reduce: {input: "$nonexistent", initialValue: {$literal: 0}, in : 5}}, null);
- // Error cases for $reduce.
+// Error cases for $reduce.
- // $reduce requires an object.
- var pipeline = {$project: {reduced: {$reduce: 0}}};
- assertErrorCode(coll, pipeline, 40075);
+// $reduce requires an object.
+var pipeline = {$project: {reduced: {$reduce: 0}}};
+assertErrorCode(coll, pipeline, 40075);
- // Unknown field specified.
- pipeline = {
+// Unknown field specified.
+pipeline = {
$project: {
reduced: {
$reduce: {
@@ -108,27 +101,35 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExp
}
}
};
- assertErrorCode(coll, pipeline, 40076);
+assertErrorCode(coll, pipeline, 40076);
- // $reduce requires input to be specified.
- pipeline = {$project: {reduced: {$reduce: {initialValue: {$literal: 0}, in : {$literal: 0}}}}};
- assertErrorCode(coll, pipeline, 40077);
+// $reduce requires input to be specified.
+pipeline = {
+ $project: {reduced: {$reduce: {initialValue: {$literal: 0}, in : {$literal: 0}}}}
+};
+assertErrorCode(coll, pipeline, 40077);
- // $reduce requires initialValue to be specified.
- pipeline = {$project: {reduced: {$reduce: {input: {$literal: 0}, in : {$literal: 0}}}}};
- assertErrorCode(coll, pipeline, 40078);
+// $reduce requires initialValue to be specified.
+pipeline = {
+ $project: {reduced: {$reduce: {input: {$literal: 0}, in : {$literal: 0}}}}
+};
+assertErrorCode(coll, pipeline, 40078);
- // $reduce requires in to be specified.
- pipeline = {
- $project: {reduced: {$reduce: {input: {$literal: 0}, initialValue: {$literal: 0}}}}
- };
- assertErrorCode(coll, pipeline, 40079);
+// $reduce requires in to be specified.
+pipeline = {
+ $project: {reduced: {$reduce: {input: {$literal: 0}, initialValue: {$literal: 0}}}}
+};
+assertErrorCode(coll, pipeline, 40079);
- // $$value is undefined in the non-'in' arguments of $reduce.
- pipeline = {$project: {reduced: {$reduce: {input: "$$value", initialValue: [], in : []}}}};
- assertErrorCode(coll, pipeline, 17276);
+// $$value is undefined in the non-'in' arguments of $reduce.
+pipeline = {
+ $project: {reduced: {$reduce: {input: "$$value", initialValue: [], in : []}}}
+};
+assertErrorCode(coll, pipeline, 17276);
- // $$this is undefined in the non-'in' arguments of $reduce.
- pipeline = {$project: {reduced: {$reduce: {input: "$$this", initialValue: [], in : []}}}};
- assertErrorCode(coll, pipeline, 17276);
+// $$this is undefined in the non-'in' arguments of $reduce.
+pipeline = {
+ $project: {reduced: {$reduce: {input: "$$this", initialValue: [], in : []}}}
+};
+assertErrorCode(coll, pipeline, 17276);
}());
diff --git a/jstests/aggregation/expressions/regex.js b/jstests/aggregation/expressions/regex.js
index 2713828e5c6..bf128ec04cc 100644
--- a/jstests/aggregation/expressions/regex.js
+++ b/jstests/aggregation/expressions/regex.js
@@ -2,509 +2,490 @@
* Tests for $regexFind, $regexFindAll and $regexMatch aggregation expressions.
*/
(function() {
- 'use strict';
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode().
- const coll = db.regex_find_expr;
- coll.drop();
+'use strict';
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode().
+const coll = db.regex_find_expr;
+coll.drop();
- function testRegex(expression, inputObj, expectedOutput) {
- const result =
- coll.aggregate([
- {"$project": {_id: 0, "matches": {[expression]: inputObj}}},
- {"$sort": {"matches": 1}} // Sort to ensure the documents are returned in a
- // deterministic order for sharded clusters.
- ])
- .toArray();
- assert.eq(result, expectedOutput);
- }
- function testRegexForKey(expression, key, inputObj, expectedMatchObj) {
- const result =
- coll.aggregate(
- [{"$match": {"_id": key}}, {"$project": {"matches": {[expression]: inputObj}}}])
- .toArray();
- const expectedOutput = [{"_id": key, "matches": expectedMatchObj}];
- assert.eq(result, expectedOutput);
- }
+function testRegex(expression, inputObj, expectedOutput) {
+ const result =
+ coll.aggregate([
+ {"$project": {_id: 0, "matches": {[expression]: inputObj}}},
+ {"$sort": {"matches": 1}} // Sort to ensure the documents are returned in a
+ // deterministic order for sharded clusters.
+ ])
+ .toArray();
+ assert.eq(result, expectedOutput);
+}
+function testRegexForKey(expression, key, inputObj, expectedMatchObj) {
+ const result =
+ coll.aggregate(
+ [{"$match": {"_id": key}}, {"$project": {"matches": {[expression]: inputObj}}}])
+ .toArray();
+ const expectedOutput = [{"_id": key, "matches": expectedMatchObj}];
+ assert.eq(result, expectedOutput);
+}
- /**
- * This function validates the output against $regexFind, $regexFindAll and $regexMatch
- * expressions.
- */
- function testRegexFindAgg(inputObj, expectedOutputForFindAll) {
- testRegex("$regexFindAll", inputObj, expectedOutputForFindAll);
- // For each of the output document, get first element from "matches" array. This will
- // convert 'regexFindAll' output to 'regexFind' output.
- const expectedOutputForFind = expectedOutputForFindAll.map(
- (element) => ({matches: element.matches.length == 0 ? null : element.matches[0]}));
- testRegex("$regexFind", inputObj, expectedOutputForFind);
+/**
+ * This function validates the output against $regexFind, $regexFindAll and $regexMatch
+ * expressions.
+ */
+function testRegexFindAgg(inputObj, expectedOutputForFindAll) {
+ testRegex("$regexFindAll", inputObj, expectedOutputForFindAll);
+ // For each of the output document, get first element from "matches" array. This will
+ // convert 'regexFindAll' output to 'regexFind' output.
+ const expectedOutputForFind = expectedOutputForFindAll.map(
+ (element) => ({matches: element.matches.length == 0 ? null : element.matches[0]}));
+ testRegex("$regexFind", inputObj, expectedOutputForFind);
- // For each of the output document, if there is at least one element in the array, then
- // there is a match.
- const expectedOutputForMatch =
- expectedOutputForFindAll.map((element) => ({matches: element.matches.length != 0}));
- testRegex("$regexMatch", inputObj, expectedOutputForMatch);
- }
+ // For each of the output document, if there is at least one element in the array, then
+ // there is a match.
+ const expectedOutputForMatch =
+ expectedOutputForFindAll.map((element) => ({matches: element.matches.length != 0}));
+ testRegex("$regexMatch", inputObj, expectedOutputForMatch);
+}
- /**
- * This function validates the output against $regexFind, $regexFindAll and $regexMatch
- * expressions.
- */
- function testRegexFindAggForKey(key, inputObj, expectedOutputForFindAll) {
- testRegexForKey("$regexFindAll", key, inputObj, expectedOutputForFindAll);
+/**
+ * This function validates the output against $regexFind, $regexFindAll and $regexMatch
+ * expressions.
+ */
+function testRegexFindAggForKey(key, inputObj, expectedOutputForFindAll) {
+ testRegexForKey("$regexFindAll", key, inputObj, expectedOutputForFindAll);
- const expectedOutputForFind =
- expectedOutputForFindAll.length == 0 ? null : expectedOutputForFindAll[0];
- testRegexForKey("$regexFind", key, inputObj, expectedOutputForFind);
+ const expectedOutputForFind =
+ expectedOutputForFindAll.length == 0 ? null : expectedOutputForFindAll[0];
+ testRegexForKey("$regexFind", key, inputObj, expectedOutputForFind);
- const expectedOutputForMatch = expectedOutputForFindAll.length != 0;
- testRegexForKey("$regexMatch", key, inputObj, expectedOutputForMatch);
- }
+ const expectedOutputForMatch = expectedOutputForFindAll.length != 0;
+ testRegexForKey("$regexMatch", key, inputObj, expectedOutputForMatch);
+}
- /**
- * This function validates the output against $regexFind, $regexFindAll and $regexMatch
- * expressions.
- */
- function testRegexAggException(inputObj, exceptionCode) {
- assertErrorCode(
- coll, [{"$project": {"matches": {"$regexFindAll": inputObj}}}], exceptionCode);
- assertErrorCode(coll, [{"$project": {"matches": {"$regexFind": inputObj}}}], exceptionCode);
- assertErrorCode(
- coll, [{"$project": {"matches": {"$regexMatch": inputObj}}}], exceptionCode);
- }
+/**
+ * This function validates the output against $regexFind, $regexFindAll and $regexMatch
+ * expressions.
+ */
+function testRegexAggException(inputObj, exceptionCode) {
+ assertErrorCode(coll, [{"$project": {"matches": {"$regexFindAll": inputObj}}}], exceptionCode);
+ assertErrorCode(coll, [{"$project": {"matches": {"$regexFind": inputObj}}}], exceptionCode);
+ assertErrorCode(coll, [{"$project": {"matches": {"$regexMatch": inputObj}}}], exceptionCode);
+}
- (function testWithSingleMatch() {
- // Regex in string notation, find with multiple captures and matches.
- assert.commandWorked(coll.insert({_id: 0, text: "Simple Example "}));
- testRegexFindAggForKey(0, {input: "$text", regex: "(m(p))"}, [
- {"match": "mp", "idx": 2, "captures": ["mp", "p"]},
- {"match": "mp", "idx": 10, "captures": ["mp", "p"]}
- ]);
- // Regex in json syntax, with multiple captures and matches.
- testRegexFindAggForKey(0, {input: "$text", regex: /(m(p))/}, [
- {"match": "mp", "idx": 2, "captures": ["mp", "p"]},
- {"match": "mp", "idx": 10, "captures": ["mp", "p"]}
- ]);
- // Verify no overlapping match sub-strings.
- assert.commandWorked(coll.insert({_id: 112, text: "aaaaa aaaa"}));
- testRegexFindAggForKey(112, {input: "$text", regex: /(aa)/}, [
- {"match": "aa", "idx": 0, "captures": ["aa"]},
- {"match": "aa", "idx": 2, "captures": ["aa"]},
- {"match": "aa", "idx": 6, "captures": ["aa"]},
- {"match": "aa", "idx": 8, "captures": ["aa"]}
- ]);
- testRegexFindAggForKey(112, {input: "$text", regex: /(aa)+/}, [
- {"match": "aaaa", "idx": 0, "captures": ["aa"]},
- {"match": "aaaa", "idx": 6, "captures": ["aa"]}
- ]);
- // Verify greedy match.
- testRegexFindAggForKey(112, {input: "$text", regex: /(a+)/}, [
- {"match": "aaaaa", "idx": 0, "captures": ["aaaaa"]},
- {"match": "aaaa", "idx": 6, "captures": ["aaaa"]},
- ]);
- testRegexFindAggForKey(112, {input: "$text", regex: /(a)+/}, [
- {"match": "aaaaa", "idx": 0, "captures": ["a"]},
- {"match": "aaaa", "idx": 6, "captures": ["a"]},
- ]);
- // Verify lazy match.
- assert.commandWorked(coll.insert({_id: 113, text: "aaa aa"}));
- testRegexFindAggForKey(113, {input: "$text", regex: /(a+?)/}, [
- {"match": "a", "idx": 0, "captures": ["a"]},
- {"match": "a", "idx": 1, "captures": ["a"]},
- {"match": "a", "idx": 2, "captures": ["a"]},
- {"match": "a", "idx": 4, "captures": ["a"]},
- {"match": "a", "idx": 5, "captures": ["a"]}
- ]);
- testRegexFindAggForKey(113, {input: "$text", regex: /(a*?)/}, [
- {"match": "", "idx": 0, "captures": [""]},
- {"match": "", "idx": 1, "captures": [""]},
- {"match": "", "idx": 2, "captures": [""]},
- {"match": "", "idx": 3, "captures": [""]},
- {"match": "", "idx": 4, "captures": [""]},
- {"match": "", "idx": 5, "captures": [""]}
- ]);
+(function testWithSingleMatch() {
+ // Regex in string notation, find with multiple captures and matches.
+ assert.commandWorked(coll.insert({_id: 0, text: "Simple Example "}));
+ testRegexFindAggForKey(0, {input: "$text", regex: "(m(p))"}, [
+ {"match": "mp", "idx": 2, "captures": ["mp", "p"]},
+ {"match": "mp", "idx": 10, "captures": ["mp", "p"]}
+ ]);
+ // Regex in json syntax, with multiple captures and matches.
+ testRegexFindAggForKey(0, {input: "$text", regex: /(m(p))/}, [
+ {"match": "mp", "idx": 2, "captures": ["mp", "p"]},
+ {"match": "mp", "idx": 10, "captures": ["mp", "p"]}
+ ]);
+ // Verify no overlapping match sub-strings.
+ assert.commandWorked(coll.insert({_id: 112, text: "aaaaa aaaa"}));
+ testRegexFindAggForKey(112, {input: "$text", regex: /(aa)/}, [
+ {"match": "aa", "idx": 0, "captures": ["aa"]},
+ {"match": "aa", "idx": 2, "captures": ["aa"]},
+ {"match": "aa", "idx": 6, "captures": ["aa"]},
+ {"match": "aa", "idx": 8, "captures": ["aa"]}
+ ]);
+ testRegexFindAggForKey(112, {input: "$text", regex: /(aa)+/}, [
+ {"match": "aaaa", "idx": 0, "captures": ["aa"]},
+ {"match": "aaaa", "idx": 6, "captures": ["aa"]}
+ ]);
+ // Verify greedy match.
+ testRegexFindAggForKey(112, {input: "$text", regex: /(a+)/}, [
+ {"match": "aaaaa", "idx": 0, "captures": ["aaaaa"]},
+ {"match": "aaaa", "idx": 6, "captures": ["aaaa"]},
+ ]);
+ testRegexFindAggForKey(112, {input: "$text", regex: /(a)+/}, [
+ {"match": "aaaaa", "idx": 0, "captures": ["a"]},
+ {"match": "aaaa", "idx": 6, "captures": ["a"]},
+ ]);
+ // Verify lazy match.
+ assert.commandWorked(coll.insert({_id: 113, text: "aaa aa"}));
+ testRegexFindAggForKey(113, {input: "$text", regex: /(a+?)/}, [
+ {"match": "a", "idx": 0, "captures": ["a"]},
+ {"match": "a", "idx": 1, "captures": ["a"]},
+ {"match": "a", "idx": 2, "captures": ["a"]},
+ {"match": "a", "idx": 4, "captures": ["a"]},
+ {"match": "a", "idx": 5, "captures": ["a"]}
+ ]);
+ testRegexFindAggForKey(113, {input: "$text", regex: /(a*?)/}, [
+ {"match": "", "idx": 0, "captures": [""]},
+ {"match": "", "idx": 1, "captures": [""]},
+ {"match": "", "idx": 2, "captures": [""]},
+ {"match": "", "idx": 3, "captures": [""]},
+ {"match": "", "idx": 4, "captures": [""]},
+ {"match": "", "idx": 5, "captures": [""]}
+ ]);
- // Regex string groups within group.
- testRegexFindAggForKey(
- 0,
- {input: "$text", regex: "((S)(i)(m)(p)(l)(e))"},
- [{"match": "Simple", "idx": 0, "captures": ["Simple", "S", "i", "m", "p", "l", "e"]}]);
- testRegexFindAggForKey(
- 0,
- {input: "$text", regex: "(S)(i)(m)((p)(l)(e))"},
- [{"match": "Simple", "idx": 0, "captures": ["S", "i", "m", "ple", "p", "l", "e"]}]);
+ // Regex string groups within group.
+ testRegexFindAggForKey(
+ 0,
+ {input: "$text", regex: "((S)(i)(m)(p)(l)(e))"},
+ [{"match": "Simple", "idx": 0, "captures": ["Simple", "S", "i", "m", "p", "l", "e"]}]);
+ testRegexFindAggForKey(
+ 0,
+ {input: "$text", regex: "(S)(i)(m)((p)(l)(e))"},
+ [{"match": "Simple", "idx": 0, "captures": ["S", "i", "m", "ple", "p", "l", "e"]}]);
- // Regex email pattern.
- assert.commandWorked(
- coll.insert({_id: 1, text: "Some field text with email mongo@mongodb.com"}));
- testRegexFindAggForKey(
- 1,
- {input: "$text", regex: "([a-zA-Z0-9._-]+)@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+"},
- [{"match": "mongo@mongodb.com", "idx": 27, "captures": ["mongo"]}]);
+ // Regex email pattern.
+ assert.commandWorked(
+ coll.insert({_id: 1, text: "Some field text with email mongo@mongodb.com"}));
+ testRegexFindAggForKey(
+ 1,
+ {input: "$text", regex: "([a-zA-Z0-9._-]+)@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+"},
+ [{"match": "mongo@mongodb.com", "idx": 27, "captures": ["mongo"]}]);
- // Regex digits.
- assert.commandWorked(coll.insert({_id: 5, text: "Text with 02 digits"}));
- testRegexFindAggForKey(
- 5, {input: "$text", regex: /[0-9]+/}, [{"match": "02", "idx": 10, "captures": []}]);
- testRegexFindAggForKey(
- 5, {input: "$text", regex: /(\d+)/}, [{"match": "02", "idx": 10, "captures": ["02"]}]);
+ // Regex digits.
+ assert.commandWorked(coll.insert({_id: 5, text: "Text with 02 digits"}));
+ testRegexFindAggForKey(
+ 5, {input: "$text", regex: /[0-9]+/}, [{"match": "02", "idx": 10, "captures": []}]);
+ testRegexFindAggForKey(
+ 5, {input: "$text", regex: /(\d+)/}, [{"match": "02", "idx": 10, "captures": ["02"]}]);
- // Regex a non-capture group.
- assert.commandWorked(coll.insert({_id: 6, text: "1,2,3,4,5,6,7,8,9,10"}));
- testRegexFindAggForKey(6,
- {input: "$text", regex: /^(?:1|a)\,([0-9]+)/},
- [{"match": "1,2", "idx": 0, "captures": ["2"]}]);
+ // Regex a non-capture group.
+ assert.commandWorked(coll.insert({_id: 6, text: "1,2,3,4,5,6,7,8,9,10"}));
+ testRegexFindAggForKey(6,
+ {input: "$text", regex: /^(?:1|a)\,([0-9]+)/},
+ [{"match": "1,2", "idx": 0, "captures": ["2"]}]);
- // Regex quantifier.
- assert.commandWorked(coll.insert({_id: 7, text: "abc12defgh345jklm"}));
- testRegexFindAggForKey(
- 7, {input: "$text", regex: /[0-9]{3}/}, [{"match": "345", "idx": 10, "captures": []}]);
+ // Regex quantifier.
+ assert.commandWorked(coll.insert({_id: 7, text: "abc12defgh345jklm"}));
+ testRegexFindAggForKey(
+ 7, {input: "$text", regex: /[0-9]{3}/}, [{"match": "345", "idx": 10, "captures": []}]);
- // Regex case insensitive option.
- assert.commandWorked(coll.insert({_id: 8, text: "This Is Camel Case"}));
- testRegexFindAggForKey(8, {input: "$text", regex: /camel/}, []);
- testRegexFindAggForKey(
- 8, {input: "$text", regex: /camel/i}, [{"match": "Camel", "idx": 8, "captures": []}]);
- testRegexFindAggForKey(8,
- {input: "$text", regex: /camel/, options: "i"},
- [{"match": "Camel", "idx": 8, "captures": []}]);
- testRegexFindAggForKey(8,
- {input: "$text", regex: "camel", options: "i"},
- [{"match": "Camel", "idx": 8, "captures": []}]);
+ // Regex case insensitive option.
+ assert.commandWorked(coll.insert({_id: 8, text: "This Is Camel Case"}));
+ testRegexFindAggForKey(8, {input: "$text", regex: /camel/}, []);
+ testRegexFindAggForKey(
+ 8, {input: "$text", regex: /camel/i}, [{"match": "Camel", "idx": 8, "captures": []}]);
+ testRegexFindAggForKey(8,
+ {input: "$text", regex: /camel/, options: "i"},
+ [{"match": "Camel", "idx": 8, "captures": []}]);
+ testRegexFindAggForKey(8,
+ {input: "$text", regex: "camel", options: "i"},
+ [{"match": "Camel", "idx": 8, "captures": []}]);
- // Regex multi line option.
- assert.commandWorked(coll.insert({_id: 9, text: "Foo line1\nFoo line2\nFoo line3"}));
- // Verify no match with options flag off.
- testRegexFindAggForKey(9, {input: "$text", regex: /^Foo line\d$/}, []);
- // Verify match when flag is on.
- testRegexFindAggForKey(9, {input: "$text", regex: /(^Foo line\d$)/m}, [
- {"match": "Foo line1", "idx": 0, "captures": ["Foo line1"]},
- {"match": "Foo line2", "idx": 10, "captures": ["Foo line2"]},
- {"match": "Foo line3", "idx": 20, "captures": ["Foo line3"]}
- ]);
+ // Regex multi line option.
+ assert.commandWorked(coll.insert({_id: 9, text: "Foo line1\nFoo line2\nFoo line3"}));
+ // Verify no match with options flag off.
+ testRegexFindAggForKey(9, {input: "$text", regex: /^Foo line\d$/}, []);
+ // Verify match when flag is on.
+ testRegexFindAggForKey(9, {input: "$text", regex: /(^Foo line\d$)/m}, [
+ {"match": "Foo line1", "idx": 0, "captures": ["Foo line1"]},
+ {"match": "Foo line2", "idx": 10, "captures": ["Foo line2"]},
+ {"match": "Foo line3", "idx": 20, "captures": ["Foo line3"]}
+ ]);
- // Regex single line option.
- testRegexFindAggForKey(9, {input: "$text", regex: "Foo.*line"}, [
- {"match": "Foo line", "idx": 0, "captures": []},
- {"match": "Foo line", "idx": 10, "captures": []},
- {"match": "Foo line", "idx": 20, "captures": []}
- ]);
- testRegexFindAggForKey(
- 9,
- {input: "$text", regex: "Foo.*line", options: "s"},
- [{"match": "Foo line1\nFoo line2\nFoo line", "idx": 0, "captures": []}]);
+ // Regex single line option.
+ testRegexFindAggForKey(9, {input: "$text", regex: "Foo.*line"}, [
+ {"match": "Foo line", "idx": 0, "captures": []},
+ {"match": "Foo line", "idx": 10, "captures": []},
+ {"match": "Foo line", "idx": 20, "captures": []}
+ ]);
+ testRegexFindAggForKey(9,
+ {input: "$text", regex: "Foo.*line", options: "s"},
+ [{"match": "Foo line1\nFoo line2\nFoo line", "idx": 0, "captures": []}]);
- // Regex extended option.
- testRegexFindAggForKey(9, {input: "$text", regex: "F o o # a comment"}, []);
- testRegexFindAggForKey(9, {input: "$text", regex: "F o o # a comment", options: "x"}, [
+ // Regex extended option.
+ testRegexFindAggForKey(9, {input: "$text", regex: "F o o # a comment"}, []);
+ testRegexFindAggForKey(9, {input: "$text", regex: "F o o # a comment", options: "x"}, [
+ {"match": "Foo", "idx": 0, "captures": []},
+ {"match": "Foo", "idx": 10, "captures": []},
+ {"match": "Foo", "idx": 20, "captures": []}
+ ]);
+ testRegexFindAggForKey(
+ 9, {input: "$text", regex: "F o o # a comment \n\n# ignored", options: "x"}, [
{"match": "Foo", "idx": 0, "captures": []},
{"match": "Foo", "idx": 10, "captures": []},
{"match": "Foo", "idx": 20, "captures": []}
]);
- testRegexFindAggForKey(
- 9, {input: "$text", regex: "F o o # a comment \n\n# ignored", options: "x"}, [
- {"match": "Foo", "idx": 0, "captures": []},
- {"match": "Foo", "idx": 10, "captures": []},
- {"match": "Foo", "idx": 20, "captures": []}
- ]);
- testRegexFindAggForKey(9, {input: "$text", regex: "(F o o) # a comment", options: "x"}, [
- {"match": "Foo", "idx": 0, "captures": ["Foo"]},
- {"match": "Foo", "idx": 10, "captures": ["Foo"]},
- {"match": "Foo", "idx": 20, "captures": ["Foo"]}
- ]);
+ testRegexFindAggForKey(9, {input: "$text", regex: "(F o o) # a comment", options: "x"}, [
+ {"match": "Foo", "idx": 0, "captures": ["Foo"]},
+ {"match": "Foo", "idx": 10, "captures": ["Foo"]},
+ {"match": "Foo", "idx": 20, "captures": ["Foo"]}
+ ]);
- // Regex pattern from a document field value.
- assert.commandWorked(
- coll.insert({_id: 10, text: "Simple Value Example", pattern: "(m(p))"}));
- testRegexFindAggForKey(10, {input: "$text", regex: "$pattern"}, [
- {"match": "mp", "idx": 2, "captures": ["mp", "p"]},
- {"match": "mp", "idx": 16, "captures": ["mp", "p"]}
- ]);
- assert.commandWorked(coll.insert({_id: 11, text: "OtherText", pattern: /(T(e))xt$/}));
- testRegexFindAggForKey(11,
- {input: "$text", regex: "$pattern"},
- [{"match": "Text", "idx": 5, "captures": ["Te", "e"]}]);
+ // Regex pattern from a document field value.
+ assert.commandWorked(coll.insert({_id: 10, text: "Simple Value Example", pattern: "(m(p))"}));
+ testRegexFindAggForKey(10, {input: "$text", regex: "$pattern"}, [
+ {"match": "mp", "idx": 2, "captures": ["mp", "p"]},
+ {"match": "mp", "idx": 16, "captures": ["mp", "p"]}
+ ]);
+ assert.commandWorked(coll.insert({_id: 11, text: "OtherText", pattern: /(T(e))xt$/}));
+ testRegexFindAggForKey(11,
+ {input: "$text", regex: "$pattern"},
+ [{"match": "Text", "idx": 5, "captures": ["Te", "e"]}]);
- // Empty input matches empty regex.
- testRegexFindAggForKey(
- 0, {input: "", regex: ""}, [{"match": "", "idx": 0, "captures": []}]);
- // Empty captures groups.
- testRegexFindAggForKey(0, {input: "bbbb", regex: "()"}, [
- {"match": "", "idx": 0, "captures": [""]},
- {"match": "", "idx": 1, "captures": [""]},
- {"match": "", "idx": 2, "captures": [""]},
- {"match": "", "idx": 3, "captures": [""]}
- ]);
- // No matches.
- testRegexFindAggForKey(0, {input: "$text", regex: /foo/}, []);
- // Regex null.
- testRegexFindAggForKey(0, {input: "$text", regex: null}, []);
- // Input null.
- testRegexFindAggForKey(0, {input: null, regex: /valid/}, []);
- // Both null.
- testRegexFindAggForKey(0, {input: null, regex: null}, []);
- testRegexFindAggForKey(
- 0, {input: "$missingField", regex: "$missingField", options: "i"}, []);
- testRegexFindAggForKey(0, {input: "$missingField", regex: "$$REMOVE", options: "i"}, []);
- })();
+ // Empty input matches empty regex.
+ testRegexFindAggForKey(0, {input: "", regex: ""}, [{"match": "", "idx": 0, "captures": []}]);
+ // Empty captures groups.
+ testRegexFindAggForKey(0, {input: "bbbb", regex: "()"}, [
+ {"match": "", "idx": 0, "captures": [""]},
+ {"match": "", "idx": 1, "captures": [""]},
+ {"match": "", "idx": 2, "captures": [""]},
+ {"match": "", "idx": 3, "captures": [""]}
+ ]);
+ // No matches.
+ testRegexFindAggForKey(0, {input: "$text", regex: /foo/}, []);
+ // Regex null.
+ testRegexFindAggForKey(0, {input: "$text", regex: null}, []);
+ // Input null.
+ testRegexFindAggForKey(0, {input: null, regex: /valid/}, []);
+ // Both null.
+ testRegexFindAggForKey(0, {input: null, regex: null}, []);
+ testRegexFindAggForKey(0, {input: "$missingField", regex: "$missingField", options: "i"}, []);
+ testRegexFindAggForKey(0, {input: "$missingField", regex: "$$REMOVE", options: "i"}, []);
+})();
- (function testWithStartOptions() {
- coll.drop();
- assert.commandWorked(coll.insert({_id: 2, text: "cafétéria"}));
- assert.commandWorked(coll.insert({_id: 3, text: "ab\ncd"}));
+(function testWithStartOptions() {
+ coll.drop();
+ assert.commandWorked(coll.insert({_id: 2, text: "cafétéria"}));
+ assert.commandWorked(coll.insert({_id: 3, text: "ab\ncd"}));
- // LIMIT_MATCH option to limit the number of comparisons PCRE does internally.
- testRegexAggException({input: "$text", regex: "(*LIMIT_MATCH=1)fé"}, 51156);
- testRegexFindAggForKey(2,
- {input: "$text", regex: "(*LIMIT_MATCH=3)(fé)"},
- [{"match": "fé", "idx": 2, "captures": ["fé"]}]);
+ // LIMIT_MATCH option to limit the number of comparisons PCRE does internally.
+ testRegexAggException({input: "$text", regex: "(*LIMIT_MATCH=1)fé"}, 51156);
+ testRegexFindAggForKey(2,
+ {input: "$text", regex: "(*LIMIT_MATCH=3)(fé)"},
+ [{"match": "fé", "idx": 2, "captures": ["fé"]}]);
- // (*LF) would change the feed system to UNIX like and (*CR) to windows like. So '\n' would
- // match '.' with CR but not LF.
- testRegexFindAggForKey(3, {input: "$text", regex: "(*LF)ab.cd"}, []);
- testRegexFindAggForKey(3,
- {input: "$text", regex: "(*CR)ab.cd"},
- [{"match": "ab\ncd", "idx": 0, "captures": []}]);
+ // (*LF) would change the feed system to UNIX like and (*CR) to windows like. So '\n' would
+ // match '.' with CR but not LF.
+ testRegexFindAggForKey(3, {input: "$text", regex: "(*LF)ab.cd"}, []);
+ testRegexFindAggForKey(
+ 3, {input: "$text", regex: "(*CR)ab.cd"}, [{"match": "ab\ncd", "idx": 0, "captures": []}]);
- // Multiple start options.
- testRegexFindAggForKey(2,
- {input: "$text", regex: String.raw `(*LIMIT_MATCH=5)(*UCP)^(\w+)`},
- [{"match": "cafétéria", "idx": 0, "captures": ["cafétéria"]}]);
- testRegexAggException({input: "$text", regex: String.raw `(*LIMIT_MATCH=1)(*UCP)^(\w+)`},
- 51156);
- })();
+ // Multiple start options.
+ testRegexFindAggForKey(2,
+ {input: "$text", regex: String.raw`(*LIMIT_MATCH=5)(*UCP)^(\w+)`},
+ [{"match": "cafétéria", "idx": 0, "captures": ["cafétéria"]}]);
+ testRegexAggException({input: "$text", regex: String.raw`(*LIMIT_MATCH=1)(*UCP)^(\w+)`}, 51156);
+})();
- (function testWithUnicodeData() {
- coll.drop();
- // Unicode index counting.
- assert.commandWorked(coll.insert({_id: 2, text: "cafétéria"}));
- assert.commandWorked(coll.insert({_id: 3, text: "मा०गो डीबि"}));
- testRegexFindAggForKey(
- 2, {input: "$text", regex: "té"}, [{"match": "té", "idx": 4, "captures": []}]);
- testRegexFindAggForKey(
- 3, {input: "$text", regex: /म/}, [{"match": "म", "idx": 0, "captures": []}]);
- // Unicode with capture group.
- testRegexFindAggForKey(3,
- {input: "$text", regex: /(गो )/},
- [{"match": "गो ", "idx": 3, "captures": ["गो "]}]);
- // Test that regexes support Unicode character properties.
- testRegexFindAggForKey(2, {input: "$text", regex: String.raw `\p{Hangul}`}, []);
- testRegexFindAggForKey(2,
- {input: "$text", regex: String.raw `\p{Latin}+$`},
- [{"match": "cafétéria", "idx": 0, "captures": []}]);
- // Test that the (*UTF) and (*UTF8) options are accepted for unicode characters.
- assert.commandWorked(coll.insert({_id: 12, text: "༢༣༤༤༤༥12༥A"}));
- testRegexFindAggForKey(12, {input: "$text", regex: "(*UTF8)༤"}, [
- {"match": "༤", "idx": 2, "captures": []},
- {"match": "༤", "idx": 3, "captures": []},
- {"match": "༤", "idx": 4, "captures": []}
- ]);
- testRegexFindAggForKey(12, {input: "$text", regex: "(*UTF)༤"}, [
- {"match": "༤", "idx": 2, "captures": []},
- {"match": "༤", "idx": 3, "captures": []},
- {"match": "༤", "idx": 4, "captures": []}
- ]);
- // For ASCII characters.
- assert.commandWorked(coll.insert({_id: 4, text: "123444"}));
- testRegexFindAggForKey(4,
- {input: "$text", regex: "(*UTF8)(44)"},
- [{"match": "44", "idx": 3, "captures": ["44"]}]);
- testRegexFindAggForKey(4,
- {input: "$text", regex: "(*UTF)(44)"},
- [{"match": "44", "idx": 3, "captures": ["44"]}]);
+(function testWithUnicodeData() {
+ coll.drop();
+ // Unicode index counting.
+ assert.commandWorked(coll.insert({_id: 2, text: "cafétéria"}));
+ assert.commandWorked(coll.insert({_id: 3, text: "मा०गो डीबि"}));
+ testRegexFindAggForKey(
+ 2, {input: "$text", regex: "té"}, [{"match": "té", "idx": 4, "captures": []}]);
+ testRegexFindAggForKey(
+ 3, {input: "$text", regex: /म/}, [{"match": "म", "idx": 0, "captures": []}]);
+ // Unicode with capture group.
+ testRegexFindAggForKey(
+ 3, {input: "$text", regex: /(गो )/}, [{"match": "गो ", "idx": 3, "captures": ["गो "]}]);
+ // Test that regexes support Unicode character properties.
+ testRegexFindAggForKey(2, {input: "$text", regex: String.raw`\p{Hangul}`}, []);
+ testRegexFindAggForKey(2,
+ {input: "$text", regex: String.raw`\p{Latin}+$`},
+ [{"match": "cafétéria", "idx": 0, "captures": []}]);
+ // Test that the (*UTF) and (*UTF8) options are accepted for unicode characters.
+ assert.commandWorked(coll.insert({_id: 12, text: "༢༣༤༤༤༥12༥A"}));
+ testRegexFindAggForKey(12, {input: "$text", regex: "(*UTF8)༤"}, [
+ {"match": "༤", "idx": 2, "captures": []},
+ {"match": "༤", "idx": 3, "captures": []},
+ {"match": "༤", "idx": 4, "captures": []}
+ ]);
+ testRegexFindAggForKey(12, {input: "$text", regex: "(*UTF)༤"}, [
+ {"match": "༤", "idx": 2, "captures": []},
+ {"match": "༤", "idx": 3, "captures": []},
+ {"match": "༤", "idx": 4, "captures": []}
+ ]);
+ // For ASCII characters.
+ assert.commandWorked(coll.insert({_id: 4, text: "123444"}));
+ testRegexFindAggForKey(
+ 4, {input: "$text", regex: "(*UTF8)(44)"}, [{"match": "44", "idx": 3, "captures": ["44"]}]);
+ testRegexFindAggForKey(
+ 4, {input: "$text", regex: "(*UTF)(44)"}, [{"match": "44", "idx": 3, "captures": ["44"]}]);
- // When the (*UCP) option is specified, Unicode "word" characters are included in the '\w'
- // character type.
- testRegexFindAggForKey(12,
- {input: "$text", regex: String.raw `(*UCP)^(\w+)`},
- [{"match": "༢༣༤༤༤༥12༥A", "idx": 0, "captures": ["༢༣༤༤༤༥12༥A"]}]);
- // When the (*UCP) option is specified, [:digit:] becomes \p{N} and matches all Unicode
- // decimal digit characters.
- testRegexFindAggForKey(12,
- {input: "$text", regex: "(*UCP)^[[:digit:]]+"},
- [{"match": "༢༣༤༤༤༥12༥", "idx": 0, "captures": []}]);
- testRegexFindAggForKey(12, {input: "$text", regex: "(*UCP)[[:digit:]]+$"}, []);
- // When the (*UCP) option is specified, [:alpha:] becomes \p{L} and matches all Unicode
- // alphabetic characters.
- assert.commandWorked(coll.insert({_id: 13, text: "박정수AB"}));
- testRegexFindAggForKey(13,
- {input: "$text", regex: String.raw `(*UCP)^[[:alpha:]]+`},
- [{"match": "박정수AB", "idx": 0, "captures": []}]);
+ // When the (*UCP) option is specified, Unicode "word" characters are included in the '\w'
+ // character type.
+ testRegexFindAggForKey(12,
+ {input: "$text", regex: String.raw`(*UCP)^(\w+)`},
+ [{"match": "༢༣༤༤༤༥12༥A", "idx": 0, "captures": ["༢༣༤༤༤༥12༥A"]}]);
+ // When the (*UCP) option is specified, [:digit:] becomes \p{N} and matches all Unicode
+ // decimal digit characters.
+ testRegexFindAggForKey(12,
+ {input: "$text", regex: "(*UCP)^[[:digit:]]+"},
+ [{"match": "༢༣༤༤༤༥12༥", "idx": 0, "captures": []}]);
+ testRegexFindAggForKey(12, {input: "$text", regex: "(*UCP)[[:digit:]]+$"}, []);
+ // When the (*UCP) option is specified, [:alpha:] becomes \p{L} and matches all Unicode
+ // alphabetic characters.
+ assert.commandWorked(coll.insert({_id: 13, text: "박정수AB"}));
+ testRegexFindAggForKey(13,
+ {input: "$text", regex: String.raw`(*UCP)^[[:alpha:]]+`},
+ [{"match": "박정수AB", "idx": 0, "captures": []}]);
- // No match when options are not set.
- testRegexFindAggForKey(12, {input: "$text", regex: String.raw `^(\w+)`}, []);
- testRegexFindAggForKey(12, {input: "$text", regex: "^[[:digit:]]"}, []);
- testRegexFindAggForKey(2, {input: "$text", regex: "^[[:alpha:]]+$"}, []);
- })();
+ // No match when options are not set.
+ testRegexFindAggForKey(12, {input: "$text", regex: String.raw`^(\w+)`}, []);
+ testRegexFindAggForKey(12, {input: "$text", regex: "^[[:digit:]]"}, []);
+ testRegexFindAggForKey(2, {input: "$text", regex: "^[[:alpha:]]+$"}, []);
+})();
- (function testErrors() {
- coll.drop();
- assert.commandWorked(coll.insert({text: "string"}));
- // Null object.
- testRegexAggException(null, 51103);
- // Incorrect object parameter.
- testRegexAggException("incorrect type", 51103);
- // Test malformed regex.
- testRegexAggException({input: "$text", regex: "[0-9"}, 51111);
- testRegexAggException({regex: "[a-c", input: null}, 51111);
- // Malformed regex because start options not at the beginning.
- testRegexAggException({input: "$text", regex: "^(*UCP)[[:alpha:]]+$"}, 51111);
- testRegexAggException({input: "$text", regex: "((*UCP)[[:alpha:]]+$)"}, 51111);
- // At least one of the 'input' field is not string.
- assert.commandWorked(coll.insert({a: "string"}));
- assert.commandWorked(coll.insert({a: {b: "object"}}));
- testRegexAggException({input: "$a", regex: "valid"}, 51104);
- testRegexAggException({input: "$a", regex: null}, 51104);
- // 'regex' field is not string or regex.
- testRegexAggException({input: "$text", regex: ["incorrect"]}, 51105);
- // 'options' field is not string.
- testRegexAggException({input: "$text", regex: "valid", options: 123}, 51106);
- // Incorrect 'options' flag.
- testRegexAggException({input: "$text", regex: "valid", options: 'a'}, 51108);
- // 'options' are case-sensitive.
- testRegexAggException({input: "$text", regex: "valid", options: "I"}, 51108);
- testRegexAggException({options: "I", regex: null, input: null}, 51108);
- // Options specified in both 'regex' and 'options'.
- testRegexAggException({input: "$text", regex: /(m(p))/i, options: "i"}, 51107);
- testRegexAggException({input: "$text", regex: /(m(p))/i, options: "x"}, 51107);
- testRegexAggException({input: "$text", regex: /(m(p))/m, options: ""}, 51107);
- // 'regex' as string with null characters.
- testRegexAggException({input: "$text", regex: "sasd\0", options: "i"}, 51109);
- testRegexAggException({regex: "sa\x00sd", options: "i", input: null}, 51109);
- // 'options' as string with null characters.
- testRegexAggException({input: "$text", regex: /(m(p))/, options: "i\0"}, 51110);
- testRegexAggException({input: "$text", options: "i\x00", regex: null}, 51110);
- // Invalid parameter.
- testRegexAggException({input: "$text", invalid: "i"}, 31024);
- testRegexAggException({input: "$text", regex: "sa", invalid: "$missingField"}, 31024);
- testRegexAggException({input: "$text", regex: "sa", invalid: null}, 31024);
- testRegexAggException({input: "$text", regex: "sa", invalid: []}, 31024);
- // Regex not present.
- testRegexAggException({input: "$text"}, 31023);
- testRegexAggException({input: "$missingField"}, 31023);
- testRegexAggException({input: "$text", options: "invalid"}, 31023);
- // Input not present.
- testRegexAggException({regex: /valid/}, 31022);
- testRegexAggException({regex: "$missingField"}, 31022);
- testRegexAggException({regex: "[0-9"}, 31022);
- // Empty object.
- testRegexAggException({}, 31022);
- })();
+(function testErrors() {
+ coll.drop();
+ assert.commandWorked(coll.insert({text: "string"}));
+ // Null object.
+ testRegexAggException(null, 51103);
+ // Incorrect object parameter.
+ testRegexAggException("incorrect type", 51103);
+ // Test malformed regex.
+ testRegexAggException({input: "$text", regex: "[0-9"}, 51111);
+ testRegexAggException({regex: "[a-c", input: null}, 51111);
+ // Malformed regex because start options not at the beginning.
+ testRegexAggException({input: "$text", regex: "^(*UCP)[[:alpha:]]+$"}, 51111);
+ testRegexAggException({input: "$text", regex: "((*UCP)[[:alpha:]]+$)"}, 51111);
+ // At least one of the 'input' field is not string.
+ assert.commandWorked(coll.insert({a: "string"}));
+ assert.commandWorked(coll.insert({a: {b: "object"}}));
+ testRegexAggException({input: "$a", regex: "valid"}, 51104);
+ testRegexAggException({input: "$a", regex: null}, 51104);
+ // 'regex' field is not string or regex.
+ testRegexAggException({input: "$text", regex: ["incorrect"]}, 51105);
+ // 'options' field is not string.
+ testRegexAggException({input: "$text", regex: "valid", options: 123}, 51106);
+ // Incorrect 'options' flag.
+ testRegexAggException({input: "$text", regex: "valid", options: 'a'}, 51108);
+ // 'options' are case-sensitive.
+ testRegexAggException({input: "$text", regex: "valid", options: "I"}, 51108);
+ testRegexAggException({options: "I", regex: null, input: null}, 51108);
+ // Options specified in both 'regex' and 'options'.
+ testRegexAggException({input: "$text", regex: /(m(p))/i, options: "i"}, 51107);
+ testRegexAggException({input: "$text", regex: /(m(p))/i, options: "x"}, 51107);
+ testRegexAggException({input: "$text", regex: /(m(p))/m, options: ""}, 51107);
+ // 'regex' as string with null characters.
+ testRegexAggException({input: "$text", regex: "sasd\0", options: "i"}, 51109);
+ testRegexAggException({regex: "sa\x00sd", options: "i", input: null}, 51109);
+ // 'options' as string with null characters.
+ testRegexAggException({input: "$text", regex: /(m(p))/, options: "i\0"}, 51110);
+ testRegexAggException({input: "$text", options: "i\x00", regex: null}, 51110);
+ // Invalid parameter.
+ testRegexAggException({input: "$text", invalid: "i"}, 31024);
+ testRegexAggException({input: "$text", regex: "sa", invalid: "$missingField"}, 31024);
+ testRegexAggException({input: "$text", regex: "sa", invalid: null}, 31024);
+ testRegexAggException({input: "$text", regex: "sa", invalid: []}, 31024);
+ // Regex not present.
+ testRegexAggException({input: "$text"}, 31023);
+ testRegexAggException({input: "$missingField"}, 31023);
+ testRegexAggException({input: "$text", options: "invalid"}, 31023);
+ // Input not present.
+ testRegexAggException({regex: /valid/}, 31022);
+ testRegexAggException({regex: "$missingField"}, 31022);
+ testRegexAggException({regex: "[0-9"}, 31022);
+ // Empty object.
+ testRegexAggException({}, 31022);
+})();
- (function testMultipleMatches() {
- coll.drop();
- assert.commandWorked(coll.insert({a: "string1string2", regex: "(string[1-2])"}));
- assert.commandWorked(coll.insert({a: "string3 string4", regex: "(string[3-4])"}));
- assert.commandWorked(coll.insert({a: "string5 string6", regex: "(string[3-4])"}));
- // All documents match.
- testRegexFindAgg({input: "$a", regex: "(str.*?[0-9])"}, [
- {
- "matches": [
- {"match": "string1", "idx": 0, "captures": ["string1"]},
- {"match": "string2", "idx": 7, "captures": ["string2"]}
- ]
- },
- {
- "matches": [
- {"match": "string3", "idx": 0, "captures": ["string3"]},
- {"match": "string4", "idx": 8, "captures": ["string4"]}
- ]
- },
- {
- "matches": [
- {"match": "string5", "idx": 0, "captures": ["string5"]},
- {"match": "string6", "idx": 8, "captures": ["string6"]}
- ]
- }
- ]);
- // Only one match.
- testRegexFindAgg({input: "$a", regex: "(^.*[0-2]$)"}, [
- {"matches": []},
- {"matches": []},
- {"matches": [{"match": "string1string2", "idx": 0, "captures": ["string1string2"]}]}
+(function testMultipleMatches() {
+ coll.drop();
+ assert.commandWorked(coll.insert({a: "string1string2", regex: "(string[1-2])"}));
+ assert.commandWorked(coll.insert({a: "string3 string4", regex: "(string[3-4])"}));
+ assert.commandWorked(coll.insert({a: "string5 string6", regex: "(string[3-4])"}));
+ // All documents match.
+ testRegexFindAgg({input: "$a", regex: "(str.*?[0-9])"}, [
+ {
+ "matches": [
+ {"match": "string1", "idx": 0, "captures": ["string1"]},
+ {"match": "string2", "idx": 7, "captures": ["string2"]}
+ ]
+ },
+ {
+ "matches": [
+ {"match": "string3", "idx": 0, "captures": ["string3"]},
+ {"match": "string4", "idx": 8, "captures": ["string4"]}
+ ]
+ },
+ {
+ "matches": [
+ {"match": "string5", "idx": 0, "captures": ["string5"]},
+ {"match": "string6", "idx": 8, "captures": ["string6"]}
+ ]
+ }
+ ]);
+ // Only one match.
+ testRegexFindAgg({input: "$a", regex: "(^.*[0-2]$)"}, [
+ {"matches": []},
+ {"matches": []},
+ {"matches": [{"match": "string1string2", "idx": 0, "captures": ["string1string2"]}]}
- ]);
- // None match.
- testRegexFindAgg({input: "$a", regex: "(^.*[7-9]$)"},
- [{"matches": []}, {"matches": []}, {"matches": []}]);
+ ]);
+ // None match.
+ testRegexFindAgg({input: "$a", regex: "(^.*[7-9]$)"},
+ [{"matches": []}, {"matches": []}, {"matches": []}]);
- // All documents match when using variable regex.
- testRegexFindAgg({input: "$a", regex: "$regex"}, [
- {"matches": []},
- {
- "matches": [
- {"match": "string1", "idx": 0, "captures": ["string1"]},
- {"match": "string2", "idx": 7, "captures": ["string2"]}
- ]
- },
- {
- "matches": [
- {"match": "string3", "idx": 0, "captures": ["string3"]},
- {"match": "string4", "idx": 8, "captures": ["string4"]}
- ]
- }
- ]);
- })();
+ // All documents match when using variable regex.
+ testRegexFindAgg({input: "$a", regex: "$regex"}, [
+ {"matches": []},
+ {
+ "matches": [
+ {"match": "string1", "idx": 0, "captures": ["string1"]},
+ {"match": "string2", "idx": 7, "captures": ["string2"]}
+ ]
+ },
+ {
+ "matches": [
+ {"match": "string3", "idx": 0, "captures": ["string3"]},
+ {"match": "string4", "idx": 8, "captures": ["string4"]}
+ ]
+ }
+ ]);
+})();
- (function testInsideCondOperator() {
- coll.drop();
- assert.commandWorked(
- coll.insert({_id: 0, level: "Public Knowledge", info: "Company Name"}));
- assert.commandWorked(
- coll.insert({_id: 1, level: "Private Information", info: "Company Secret"}));
- const expectedResults =
- [{"_id": 0, "information": "Company Name"}, {"_id": 1, "information": "REDACTED"}];
- // For $regexFindAll.
- const resultFindAll =
- coll.aggregate([{
- "$project": {
- "information": {
- "$cond": [
- {
- "$eq":
- [{"$regexFindAll": {input: "$level", regex: /public/i}}, []]
- },
- "REDACTED",
- "$info"
- ]
- }
- }
- }])
- .toArray();
- assert.eq(resultFindAll, expectedResults);
- // For $regexMatch.
- const resultMatch =
- coll.aggregate([{
- "$project": {
- "information": {
- "$cond": [
- {"$regexMatch": {input: "$level", regex: /public/i}},
- "$info",
- "REDACTED"
- ]
- }
+(function testInsideCondOperator() {
+ coll.drop();
+ assert.commandWorked(coll.insert({_id: 0, level: "Public Knowledge", info: "Company Name"}));
+ assert.commandWorked(
+ coll.insert({_id: 1, level: "Private Information", info: "Company Secret"}));
+ const expectedResults =
+ [{"_id": 0, "information": "Company Name"}, {"_id": 1, "information": "REDACTED"}];
+ // For $regexFindAll.
+ const resultFindAll =
+ coll.aggregate([{
+ "$project": {
+ "information": {
+ "$cond": [
+ {"$eq": [{"$regexFindAll": {input: "$level", regex: /public/i}}, []]},
+ "REDACTED",
+ "$info"
+ ]
}
- }])
- .toArray();
- // For $regexFind.
- const resultFind =
- coll.aggregate([{
- "$project": {
- "information": {
- "$cond": [
- {
- "$ne":
- [{"$regexFind": {input: "$level", regex: /public/i}}, null]
- },
- "$info",
- "REDACTED"
- ]
- }
+ }
+ }])
+ .toArray();
+ assert.eq(resultFindAll, expectedResults);
+ // For $regexMatch.
+ const resultMatch = coll.aggregate([{
+ "$project": {
+ "information": {
+ "$cond": [
+ {"$regexMatch": {input: "$level", regex: /public/i}},
+ "$info",
+ "REDACTED"
+ ]
+ }
+ }
+ }])
+ .toArray();
+ // For $regexFind.
+ const resultFind =
+ coll.aggregate([{
+ "$project": {
+ "information": {
+ "$cond": [
+ {"$ne": [{"$regexFind": {input: "$level", regex: /public/i}}, null]},
+ "$info",
+ "REDACTED"
+ ]
}
- }])
- .toArray();
- // Validate that {$ne : [{$regexFind: ...}, null]} produces same result as
- // {$regexMatch: ...}.
- assert.eq(resultFind, resultMatch);
- assert.eq(resultFind, expectedResults);
- })();
+ }
+ }])
+ .toArray();
+ // Validate that {$ne : [{$regexFind: ...}, null]} produces same result as
+ // {$regexMatch: ...}.
+ assert.eq(resultFind, resultMatch);
+ assert.eq(resultFind, expectedResults);
+})();
}());
diff --git a/jstests/aggregation/expressions/regex_limits.js b/jstests/aggregation/expressions/regex_limits.js
index 8ae924f65eb..eceaede1b8b 100644
--- a/jstests/aggregation/expressions/regex_limits.js
+++ b/jstests/aggregation/expressions/regex_limits.js
@@ -2,119 +2,112 @@
* Tests to validate limits for $regexFind, $regexFindAll and $regexMatch aggregation expressions.
*/
(function() {
- 'use strict';
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode().
-
- const coll = db.regex_expr_limit;
- coll.drop();
- assert.commandWorked(coll.insert({z: "c".repeat(50000) + "d".repeat(50000) + "e"}));
-
- function testRegexAgg(inputObj, expectedOutputForFindAll) {
- const resultFindAll =
- coll.aggregate([{"$project": {_id: 0, "matches": {"$regexFindAll": inputObj}}}])
- .toArray();
- assert.eq(resultFindAll, [{"matches": expectedOutputForFindAll}]);
-
- const resultFind =
- coll.aggregate([{"$project": {_id: 0, "matches": {"$regexFind": inputObj}}}]).toArray();
- assert.eq(
- resultFind, [{
- "matches": expectedOutputForFindAll.length == 0 ? null : expectedOutputForFindAll[0]
- }]);
-
- const resultMatch =
- coll.aggregate([{"$project": {_id: 0, "matches": {"$regexMatch": inputObj}}}])
- .toArray();
- assert.eq(resultMatch, [{"matches": expectedOutputForFindAll.length != 0}]);
+'use strict';
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode().
+
+const coll = db.regex_expr_limit;
+coll.drop();
+assert.commandWorked(coll.insert({z: "c".repeat(50000) + "d".repeat(50000) + "e"}));
+
+function testRegexAgg(inputObj, expectedOutputForFindAll) {
+ const resultFindAll =
+ coll.aggregate([{"$project": {_id: 0, "matches": {"$regexFindAll": inputObj}}}]).toArray();
+ assert.eq(resultFindAll, [{"matches": expectedOutputForFindAll}]);
+
+ const resultFind =
+ coll.aggregate([{"$project": {_id: 0, "matches": {"$regexFind": inputObj}}}]).toArray();
+ assert.eq(
+ resultFind,
+ [{"matches": expectedOutputForFindAll.length == 0 ? null : expectedOutputForFindAll[0]}]);
+
+ const resultMatch =
+ coll.aggregate([{"$project": {_id: 0, "matches": {"$regexMatch": inputObj}}}]).toArray();
+ assert.eq(resultMatch, [{"matches": expectedOutputForFindAll.length != 0}]);
+}
+
+function testRegexAggException(inputObj, exceptionCode, expression) {
+ // If expression is defined, run tests only against that expression.
+ if (expression != undefined) {
+ assertErrorCode(coll, [{"$project": {"matches": {[expression]: inputObj}}}], exceptionCode);
+ return;
}
+ assertErrorCode(coll, [{"$project": {"matches": {"$regexFindAll": inputObj}}}], exceptionCode);
+ assertErrorCode(coll, [{"$project": {"matches": {"$regexFind": inputObj}}}], exceptionCode);
+ assertErrorCode(coll, [{"$project": {"matches": {"$regexMatch": inputObj}}}], exceptionCode);
+}
+
+(function testLongRegex() {
+ // PCRE doesn't have a direct limit on the regex string length. It will instead error when
+ // the internal memory used while compiling reaches 64KB. When there are no capture groups
+ // this limit is 32764.
+ // Reference : https://www.pcre.org/original/doc/html/pcrelimits.html
+ const kMaxRegexPatternLen = 32764;
+ const patternMaxLen = "c".repeat(kMaxRegexPatternLen);
+
+ // Test that a regex with maximum allowable pattern length can find a document.
+ testRegexAgg({input: "$z", regex: patternMaxLen},
+ [{match: patternMaxLen, "idx": 0, "captures": []}]);
+
+ // Test that a regex pattern exceeding the limit fails.
+ const patternTooLong = patternMaxLen + "c";
+ testRegexAggException({input: "$z", regex: patternTooLong}, 51111);
+})();
- function testRegexAggException(inputObj, exceptionCode, expression) {
- // If expression is defined, run tests only against that expression.
- if (expression != undefined) {
- assertErrorCode(
- coll, [{"$project": {"matches": {[expression]: inputObj}}}], exceptionCode);
- return;
- }
- assertErrorCode(
- coll, [{"$project": {"matches": {"$regexFindAll": inputObj}}}], exceptionCode);
- assertErrorCode(coll, [{"$project": {"matches": {"$regexFind": inputObj}}}], exceptionCode);
- assertErrorCode(
- coll, [{"$project": {"matches": {"$regexMatch": inputObj}}}], exceptionCode);
- }
+(function testBufferOverflow() {
+ // $regexFindAll will match each character individually, when the pattern is empty. If there
+ // are 'n' characters in the input, it would result to 'n' individual matches. If the
+ // pattern further has 'k' capture groups, then the output document will have 'n * k'
+ // sub-strings representing the captures.
+ const pattern = "(".repeat(100) + ")".repeat(100);
+ // If the intermediate document size exceeds 64MB at any point, we will stop further
+ // evaluation and throw an error.
+ testRegexAggException({input: "$z", regex: pattern}, 51151, "$regexFindAll");
+
+ const pattern2 = "()".repeat(100);
+ testRegexAggException({input: "$z", regex: pattern2}, 51151, "$regexFindAll");
+})();
- (function testLongRegex() {
- // PCRE doesn't have a direct limit on the regex string length. It will instead error when
- // the internal memory used while compiling reaches 64KB. When there are no capture groups
- // this limit is 32764.
- // Reference : https://www.pcre.org/original/doc/html/pcrelimits.html
- const kMaxRegexPatternLen = 32764;
- const patternMaxLen = "c".repeat(kMaxRegexPatternLen);
-
- // Test that a regex with maximum allowable pattern length can find a document.
- testRegexAgg({input: "$z", regex: patternMaxLen},
- [{match: patternMaxLen, "idx": 0, "captures": []}]);
-
- // Test that a regex pattern exceeding the limit fails.
- const patternTooLong = patternMaxLen + "c";
- testRegexAggException({input: "$z", regex: patternTooLong}, 51111);
- })();
-
- (function testBufferOverflow() {
- // $regexFindAll will match each character individually, when the pattern is empty. If there
- // are 'n' characters in the input, it would result to 'n' individual matches. If the
- // pattern further has 'k' capture groups, then the output document will have 'n * k'
- // sub-strings representing the captures.
- const pattern = "(".repeat(100) + ")".repeat(100);
- // If the intermediate document size exceeds 64MB at any point, we will stop further
- // evaluation and throw an error.
- testRegexAggException({input: "$z", regex: pattern}, 51151, "$regexFindAll");
-
- const pattern2 = "()".repeat(100);
- testRegexAggException({input: "$z", regex: pattern2}, 51151, "$regexFindAll");
- })();
-
- (function testNumberOfCaptureGroupLimit() {
- // Even though PCRE has a much higher limit on captures (65535), we will be limited by the
- // other limit, maximum internal memory it uses while compiling is 64KB. PCRE will use a lot
- // more memory when there are capture groups. As the number of capture groups increases, the
- // max length of the regex reduces by a factor of around 4.
- const approxAllowedCaptureGroups = 3999;
- let pattern = "(d)".repeat(approxAllowedCaptureGroups) + "e";
- const expectedOutputCaptures = new Array(approxAllowedCaptureGroups).fill('d');
-
- testRegexAgg({input: "$z", regex: pattern}, [{
- match: "d".repeat(approxAllowedCaptureGroups) + "e",
- "idx": 96001,
- "captures": expectedOutputCaptures
- }]);
-
- // In this case, during execution, PCRE will hit the PCRE_ERROR_RECURSIONLIMIT because of
- // high number of captures and return an error.
- const bufferExecutionFailure = 2553;
- pattern = "(d)".repeat(bufferExecutionFailure) + pattern;
- testRegexAggException({input: "$z", regex: pattern}, 51156);
-
- // Add one more capture group to the pattern so that it tips over the maximum regex length
- // limit, and verify that PCRE throws an error while attempting to compile.
- pattern = "(d)" + pattern;
- testRegexAggException({input: "$z", regex: pattern}, 51111);
- })();
-
- (function testMaxCaptureDepth() {
- const kMaxCaptureDepthLen = 250;
- // Create a pattern with 250 depth captures of the format '(((((...e...))))'.
- const patternMaxDepth =
- "(".repeat(kMaxCaptureDepthLen) + "e" + ")".repeat(kMaxCaptureDepthLen);
- const expectedOutputCaptures = new Array(kMaxCaptureDepthLen).fill('e');
-
- // Test that there is a match.
- testRegexAgg({input: "$z", regex: patternMaxDepth},
- [{match: "e", "idx": 100000, "captures": expectedOutputCaptures}]);
-
- // Add one more and verify that regex expression throws an error.
- const patternTooLong = '(' + patternMaxDepth + ')';
- testRegexAggException({input: "$z", regex: patternTooLong}, 51111);
- })();
+(function testNumberOfCaptureGroupLimit() {
+ // Even though PCRE has a much higher limit on captures (65535), we will be limited by the
+ // other limit, maximum internal memory it uses while compiling is 64KB. PCRE will use a lot
+ // more memory when there are capture groups. As the number of capture groups increases, the
+ // max length of the regex reduces by a factor of around 4.
+ const approxAllowedCaptureGroups = 3999;
+ let pattern = "(d)".repeat(approxAllowedCaptureGroups) + "e";
+ const expectedOutputCaptures = new Array(approxAllowedCaptureGroups).fill('d');
+
+ testRegexAgg({input: "$z", regex: pattern}, [{
+ match: "d".repeat(approxAllowedCaptureGroups) + "e",
+ "idx": 96001,
+ "captures": expectedOutputCaptures
+ }]);
+
+ // In this case, during execution, PCRE will hit the PCRE_ERROR_RECURSIONLIMIT because of
+ // high number of captures and return an error.
+ const bufferExecutionFailure = 2553;
+ pattern = "(d)".repeat(bufferExecutionFailure) + pattern;
+ testRegexAggException({input: "$z", regex: pattern}, 51156);
+
+ // Add one more capture group to the pattern so that it tips over the maximum regex length
+ // limit, and verify that PCRE throws an error while attempting to compile.
+ pattern = "(d)" + pattern;
+ testRegexAggException({input: "$z", regex: pattern}, 51111);
+})();
+(function testMaxCaptureDepth() {
+ const kMaxCaptureDepthLen = 250;
+ // Create a pattern with 250 depth captures of the format '(((((...e...))))'.
+ const patternMaxDepth = "(".repeat(kMaxCaptureDepthLen) + "e" +
+ ")".repeat(kMaxCaptureDepthLen);
+ const expectedOutputCaptures = new Array(kMaxCaptureDepthLen).fill('e');
+
+ // Test that there is a match.
+ testRegexAgg({input: "$z", regex: patternMaxDepth},
+ [{match: "e", "idx": 100000, "captures": expectedOutputCaptures}]);
+
+ // Add one more and verify that regex expression throws an error.
+ const patternTooLong = '(' + patternMaxDepth + ')';
+ testRegexAggException({input: "$z", regex: patternTooLong}, 51111);
+})();
})();
diff --git a/jstests/aggregation/expressions/round_trunc.js b/jstests/aggregation/expressions/round_trunc.js
index 0941d1e4275..735c2b54477 100644
--- a/jstests/aggregation/expressions/round_trunc.js
+++ b/jstests/aggregation/expressions/round_trunc.js
@@ -1,115 +1,114 @@
// Basic integration tests for the $round and $trunc aggregation expressions.
(function() {
- "use strict";
+"use strict";
- // For assertErrorCode.
- load("jstests/aggregation/extras/utils.js");
+// For assertErrorCode.
+load("jstests/aggregation/extras/utils.js");
- var coll = db.server19548;
- coll.drop();
- // Seed collection so that the pipeline will execute.
- assert.writeOK(coll.insert({}));
+var coll = db.server19548;
+coll.drop();
+// Seed collection so that the pipeline will execute.
+assert.writeOK(coll.insert({}));
- // Helper for testing that op returns expResult.
- function testOp(op, expResult) {
- var pipeline = [{$project: {_id: 0, result: op}}];
- assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
- }
+// Helper for testing that op returns expResult.
+function testOp(op, expResult) {
+ var pipeline = [{$project: {_id: 0, result: op}}];
+ assert.eq(coll.aggregate(pipeline).toArray(), [{result: expResult}]);
+}
- // Test $trunc and $round with one argument.
- testOp({$trunc: NumberLong(4)}, NumberLong(4));
- testOp({$trunc: NaN}, NaN);
- testOp({$trunc: Infinity}, Infinity);
- testOp({$trunc: -Infinity}, -Infinity);
- testOp({$trunc: null}, null);
- testOp({$trunc: -2.0}, -2.0);
- testOp({$trunc: 0.9}, 0.0);
- testOp({$trunc: -1.2}, -1.0);
- testOp({$trunc: NumberDecimal("-1.6")}, NumberDecimal("-1"));
+// Test $trunc and $round with one argument.
+testOp({$trunc: NumberLong(4)}, NumberLong(4));
+testOp({$trunc: NaN}, NaN);
+testOp({$trunc: Infinity}, Infinity);
+testOp({$trunc: -Infinity}, -Infinity);
+testOp({$trunc: null}, null);
+testOp({$trunc: -2.0}, -2.0);
+testOp({$trunc: 0.9}, 0.0);
+testOp({$trunc: -1.2}, -1.0);
+testOp({$trunc: NumberDecimal("-1.6")}, NumberDecimal("-1"));
- testOp({$round: NumberLong(4)}, NumberLong(4));
- testOp({$round: NaN}, NaN);
- testOp({$round: Infinity}, Infinity);
- testOp({$round: -Infinity}, -Infinity);
- testOp({$round: null}, null);
- testOp({$round: -2.0}, -2.0);
- testOp({$round: 0.9}, 1.0);
- testOp({$round: -1.2}, -1.0);
- testOp({$round: NumberDecimal("-1.6")}, NumberDecimal("-2"));
+testOp({$round: NumberLong(4)}, NumberLong(4));
+testOp({$round: NaN}, NaN);
+testOp({$round: Infinity}, Infinity);
+testOp({$round: -Infinity}, -Infinity);
+testOp({$round: null}, null);
+testOp({$round: -2.0}, -2.0);
+testOp({$round: 0.9}, 1.0);
+testOp({$round: -1.2}, -1.0);
+testOp({$round: NumberDecimal("-1.6")}, NumberDecimal("-2"));
- // Test $trunc and $round with two arguments.
- testOp({$trunc: [1.298, 0]}, 1);
- testOp({$trunc: [1.298, 1]}, 1.2);
- testOp({$trunc: [23.298, -1]}, 20);
- testOp({$trunc: [NumberDecimal("1.298"), 0]}, NumberDecimal("1"));
- testOp({$trunc: [NumberDecimal("1.298"), 1]}, NumberDecimal("1.2"));
- testOp({$trunc: [NumberDecimal("23.298"), -1]}, NumberDecimal("2E+1"));
- testOp({$trunc: [1.298, 100]}, 1.298);
- testOp({$trunc: [NumberDecimal("1.298912343250054252245154325"), NumberLong("20")]},
- NumberDecimal("1.29891234325005425224"));
- testOp({$trunc: [NumberDecimal("1.298"), NumberDecimal("100")]},
- NumberDecimal("1.298000000000000000000000000000000"));
+// Test $trunc and $round with two arguments.
+testOp({$trunc: [1.298, 0]}, 1);
+testOp({$trunc: [1.298, 1]}, 1.2);
+testOp({$trunc: [23.298, -1]}, 20);
+testOp({$trunc: [NumberDecimal("1.298"), 0]}, NumberDecimal("1"));
+testOp({$trunc: [NumberDecimal("1.298"), 1]}, NumberDecimal("1.2"));
+testOp({$trunc: [NumberDecimal("23.298"), -1]}, NumberDecimal("2E+1"));
+testOp({$trunc: [1.298, 100]}, 1.298);
+testOp({$trunc: [NumberDecimal("1.298912343250054252245154325"), NumberLong("20")]},
+ NumberDecimal("1.29891234325005425224"));
+testOp({$trunc: [NumberDecimal("1.298"), NumberDecimal("100")]},
+ NumberDecimal("1.298000000000000000000000000000000"));
- testOp({$round: [1.298, 0]}, 1);
- testOp({$round: [1.298, 1]}, 1.3);
- testOp({$round: [23.298, -1]}, 20);
- testOp({$round: [NumberDecimal("1.298"), 0]}, NumberDecimal("1"));
- testOp({$round: [NumberDecimal("1.298"), 1]}, NumberDecimal("1.3"));
- testOp({$round: [NumberDecimal("23.298"), -1]}, NumberDecimal("2E+1"));
- testOp({$round: [1.298, 100]}, 1.298);
- testOp({$round: [NumberDecimal("1.298912343250054252245154325"), NumberLong("20")]},
- NumberDecimal("1.29891234325005425225"));
- testOp({$round: [NumberDecimal("1.298"), NumberDecimal("100")]},
- NumberDecimal("1.298000000000000000000000000000000"));
+testOp({$round: [1.298, 0]}, 1);
+testOp({$round: [1.298, 1]}, 1.3);
+testOp({$round: [23.298, -1]}, 20);
+testOp({$round: [NumberDecimal("1.298"), 0]}, NumberDecimal("1"));
+testOp({$round: [NumberDecimal("1.298"), 1]}, NumberDecimal("1.3"));
+testOp({$round: [NumberDecimal("23.298"), -1]}, NumberDecimal("2E+1"));
+testOp({$round: [1.298, 100]}, 1.298);
+testOp({$round: [NumberDecimal("1.298912343250054252245154325"), NumberLong("20")]},
+ NumberDecimal("1.29891234325005425225"));
+testOp({$round: [NumberDecimal("1.298"), NumberDecimal("100")]},
+ NumberDecimal("1.298000000000000000000000000000000"));
- // Test $round overflow.
- testOp({$round: [NumberInt("2147483647"), -1]}, NumberLong("2147483650"));
- assertErrorCode(
- coll, [{$project: {a: {$round: [NumberLong("9223372036854775806"), -1]}}}], 51080);
+// Test $round overflow.
+testOp({$round: [NumberInt("2147483647"), -1]}, NumberLong("2147483650"));
+assertErrorCode(coll, [{$project: {a: {$round: [NumberLong("9223372036854775806"), -1]}}}], 51080);
- // Test $trunc and $round with more than 2 arguments.
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, 2, 3]}}}], 28667);
- assertErrorCode(coll, [{$project: {a: {$round: [1, 2, 3]}}}], 28667);
+// Test $trunc and $round with more than 2 arguments.
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, 2, 3]}}}], 28667);
+assertErrorCode(coll, [{$project: {a: {$round: [1, 2, 3]}}}], 28667);
- // Test non-numeric input to $trunc and $round.
- assertErrorCode(coll, [{$project: {a: {$round: "string"}}}], 51081);
- assertErrorCode(coll, [{$project: {a: {$trunc: "string"}}}], 51081);
+// Test non-numeric input to $trunc and $round.
+assertErrorCode(coll, [{$project: {a: {$round: "string"}}}], 51081);
+assertErrorCode(coll, [{$project: {a: {$trunc: "string"}}}], 51081);
- // Test NaN and Infinity numeric args.
- testOp({$round: [Infinity, 0]}, Infinity);
- testOp({$round: [-Infinity, 0]}, -Infinity);
- testOp({$round: [NaN, 0]}, NaN);
- testOp({$round: [NumberDecimal("Infinity"), 0]}, NumberDecimal("Infinity"));
- testOp({$round: [NumberDecimal("-Infinity"), 0]}, NumberDecimal("-Infinity"));
- testOp({$round: [NumberDecimal("NaN"), 0]}, NumberDecimal("NaN"));
+// Test NaN and Infinity numeric args.
+testOp({$round: [Infinity, 0]}, Infinity);
+testOp({$round: [-Infinity, 0]}, -Infinity);
+testOp({$round: [NaN, 0]}, NaN);
+testOp({$round: [NumberDecimal("Infinity"), 0]}, NumberDecimal("Infinity"));
+testOp({$round: [NumberDecimal("-Infinity"), 0]}, NumberDecimal("-Infinity"));
+testOp({$round: [NumberDecimal("NaN"), 0]}, NumberDecimal("NaN"));
- testOp({$trunc: [Infinity, 0]}, Infinity);
- testOp({$trunc: [-Infinity, 0]}, -Infinity);
- testOp({$trunc: [NaN, 0]}, NaN);
- testOp({$trunc: [NumberDecimal("Infinity"), 0]}, NumberDecimal("Infinity"));
- testOp({$trunc: [NumberDecimal("-Infinity"), 0]}, NumberDecimal("-Infinity"));
- testOp({$trunc: [NumberDecimal("NaN"), 0]}, NumberDecimal("NaN"));
+testOp({$trunc: [Infinity, 0]}, Infinity);
+testOp({$trunc: [-Infinity, 0]}, -Infinity);
+testOp({$trunc: [NaN, 0]}, NaN);
+testOp({$trunc: [NumberDecimal("Infinity"), 0]}, NumberDecimal("Infinity"));
+testOp({$trunc: [NumberDecimal("-Infinity"), 0]}, NumberDecimal("-Infinity"));
+testOp({$trunc: [NumberDecimal("NaN"), 0]}, NumberDecimal("NaN"));
- // Test precision arguments that are out of bounds.
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberLong("101")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberLong("-21")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberDecimal("101")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberDecimal("-21")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberInt("101")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberInt("-21")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, 101]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$round: [1, -21]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberLong("101")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberLong("-21")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberDecimal("101")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberDecimal("-21")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberInt("101")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberInt("-21")]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, 101]}}}], 51083);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, -21]}}}], 51083);
+// Test precision arguments that are out of bounds.
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberLong("101")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberLong("-21")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberDecimal("101")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberDecimal("-21")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberInt("101")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberInt("-21")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, 101]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$round: [1, -21]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberLong("101")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberLong("-21")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberDecimal("101")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberDecimal("-21")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberInt("101")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, NumberInt("-21")]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, 101]}}}], 51083);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, -21]}}}], 51083);
- // Test non-integral precision arguments.
- assertErrorCode(coll, [{$project: {a: {$round: [1, NumberDecimal("1.4")]}}}], 51082);
- assertErrorCode(coll, [{$project: {a: {$trunc: [1, 10.5]}}}], 51082);
+// Test non-integral precision arguments.
+assertErrorCode(coll, [{$project: {a: {$round: [1, NumberDecimal("1.4")]}}}], 51082);
+assertErrorCode(coll, [{$project: {a: {$trunc: [1, 10.5]}}}], 51082);
}());
diff --git a/jstests/aggregation/expressions/size.js b/jstests/aggregation/expressions/size.js
index c3ccec34fb3..4e21c71bf4e 100644
--- a/jstests/aggregation/expressions/size.js
+++ b/jstests/aggregation/expressions/size.js
@@ -2,22 +2,21 @@
* Test the $size expression.
*/
(function() {
- "use strict";
- load("jstests/aggregation/extras/utils.js");
+"use strict";
+load("jstests/aggregation/extras/utils.js");
- const coll = db.expression_size;
- coll.drop();
+const coll = db.expression_size;
+coll.drop();
- assert.writeOK(coll.insert({_id: 0, arr: []}));
- assert.writeOK(coll.insert({_id: 1, arr: [1]}));
- assert.writeOK(coll.insert({_id: 2, arr: ["asdf", "asdfasdf"]}));
- assert.writeOK(coll.insert({_id: 3, arr: [1, "asdf", 1234, 4.3, {key: 23}]}));
- assert.writeOK(coll.insert({_id: 4, arr: [3, [31, 31, 13, 13]]}));
+assert.writeOK(coll.insert({_id: 0, arr: []}));
+assert.writeOK(coll.insert({_id: 1, arr: [1]}));
+assert.writeOK(coll.insert({_id: 2, arr: ["asdf", "asdfasdf"]}));
+assert.writeOK(coll.insert({_id: 3, arr: [1, "asdf", 1234, 4.3, {key: 23}]}));
+assert.writeOK(coll.insert({_id: 4, arr: [3, [31, 31, 13, 13]]}));
- const result =
- coll.aggregate([{$sort: {_id: 1}}, {$project: {_id: 0, length: {$size: "$arr"}}}]);
- assert.eq(result.toArray(), [{length: 0}, {length: 1}, {length: 2}, {length: 5}, {length: 2}]);
+const result = coll.aggregate([{$sort: {_id: 1}}, {$project: {_id: 0, length: {$size: "$arr"}}}]);
+assert.eq(result.toArray(), [{length: 0}, {length: 1}, {length: 2}, {length: 5}, {length: 2}]);
- assert.writeOK(coll.insert({arr: 231}));
- assertErrorCode(coll, {$project: {_id: 0, length: {$size: "$arr"}}}, 17124);
+assert.writeOK(coll.insert({arr: 231}));
+assertErrorCode(coll, {$project: {_id: 0, length: {$size: "$arr"}}}, 17124);
}());
diff --git a/jstests/aggregation/expressions/split.js b/jstests/aggregation/expressions/split.js
index 7d3402bde4e..86200334395 100644
--- a/jstests/aggregation/expressions/split.js
+++ b/jstests/aggregation/expressions/split.js
@@ -3,58 +3,68 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and testExpression.
(function() {
- "use strict";
-
- var coll = db.split;
- coll.drop();
- assert.writeOK(coll.insert({}));
-
- testExpression(coll, {$split: ["abc", "b"]}, ["a", "c"]);
- testExpression(coll, {$split: ["aaa", "b"]}, ["aaa"]);
- testExpression(coll, {$split: ["a b a", "b"]}, ["a ", " a"]);
- testExpression(coll, {$split: ["a", "a"]}, ["", ""]);
- testExpression(coll, {$split: ["aa", "a"]}, ["", "", ""]);
- testExpression(coll, {$split: ["aaa", "a"]}, ["", "", "", ""]);
- testExpression(coll, {$split: ["", "a"]}, [""]);
- testExpression(coll, {$split: ["abc abc cba abc", "abc"]}, ["", " ", " cba ", ""]);
-
- // Ensure that $split operates correctly when the string has embedded null bytes.
- testExpression(coll, {$split: ["a\0b\0c", "\0"]}, ["a", "b", "c"]);
- testExpression(coll, {$split: ["\0a\0", "a"]}, ["\0", "\0"]);
- testExpression(coll, {$split: ["\0\0\0", "a"]}, ["\0\0\0"]);
-
- // Ensure that $split operates correctly when the string has multi-byte tokens or input strings.
- // Note that this expression is not unicode-aware; splitting is based wholly off of the byte
- // sequence of the input and token.
- testExpression(coll, {$split: ["∫a∫", "a"]}, ["∫", "∫"]);
- testExpression(coll, {$split: ["a∫∫a", "∫"]}, ["a", "", "a"]);
-
- // Ensure that $split produces null when given null as input.
- testExpression(coll, {$split: ["abc", null]}, null);
- testExpression(coll, {$split: [null, "abc"]}, null);
-
- // Ensure that $split produces null when given missing fields as input.
- testExpression(coll, {$split: ["$a", "a"]}, null);
- testExpression(coll, {$split: ["a", "$a"]}, null);
-
- // Ensure that $split errors when given more or less than two arguments.
- var pipeline = {$project: {split: {$split: []}}};
- assertErrorCode(coll, pipeline, 16020);
-
- pipeline = {$project: {split: {$split: ["a"]}}};
- assertErrorCode(coll, pipeline, 16020);
-
- pipeline = {$project: {split: {$split: ["a", "b", "c"]}}};
- assertErrorCode(coll, pipeline, 16020);
-
- // Ensure that $split errors when given non-string input.
- pipeline = {$project: {split: {$split: [1, "abc"]}}};
- assertErrorCode(coll, pipeline, 40085);
-
- pipeline = {$project: {split: {$split: ["abc", 1]}}};
- assertErrorCode(coll, pipeline, 40086);
-
- // Ensure that $split errors when given an empty separator.
- pipeline = {$project: {split: {$split: ["abc", ""]}}};
- assertErrorCode(coll, pipeline, 40087);
+"use strict";
+
+var coll = db.split;
+coll.drop();
+assert.writeOK(coll.insert({}));
+
+testExpression(coll, {$split: ["abc", "b"]}, ["a", "c"]);
+testExpression(coll, {$split: ["aaa", "b"]}, ["aaa"]);
+testExpression(coll, {$split: ["a b a", "b"]}, ["a ", " a"]);
+testExpression(coll, {$split: ["a", "a"]}, ["", ""]);
+testExpression(coll, {$split: ["aa", "a"]}, ["", "", ""]);
+testExpression(coll, {$split: ["aaa", "a"]}, ["", "", "", ""]);
+testExpression(coll, {$split: ["", "a"]}, [""]);
+testExpression(coll, {$split: ["abc abc cba abc", "abc"]}, ["", " ", " cba ", ""]);
+
+// Ensure that $split operates correctly when the string has embedded null bytes.
+testExpression(coll, {$split: ["a\0b\0c", "\0"]}, ["a", "b", "c"]);
+testExpression(coll, {$split: ["\0a\0", "a"]}, ["\0", "\0"]);
+testExpression(coll, {$split: ["\0\0\0", "a"]}, ["\0\0\0"]);
+
+// Ensure that $split operates correctly when the string has multi-byte tokens or input strings.
+// Note that this expression is not unicode-aware; splitting is based wholly off of the byte
+// sequence of the input and token.
+testExpression(coll, {$split: ["∫a∫", "a"]}, ["∫", "∫"]);
+testExpression(coll, {$split: ["a∫∫a", "∫"]}, ["a", "", "a"]);
+
+// Ensure that $split produces null when given null as input.
+testExpression(coll, {$split: ["abc", null]}, null);
+testExpression(coll, {$split: [null, "abc"]}, null);
+
+// Ensure that $split produces null when given missing fields as input.
+testExpression(coll, {$split: ["$a", "a"]}, null);
+testExpression(coll, {$split: ["a", "$a"]}, null);
+
+// Ensure that $split errors when given more or less than two arguments.
+var pipeline = {$project: {split: {$split: []}}};
+assertErrorCode(coll, pipeline, 16020);
+
+pipeline = {
+ $project: {split: {$split: ["a"]}}
+};
+assertErrorCode(coll, pipeline, 16020);
+
+pipeline = {
+ $project: {split: {$split: ["a", "b", "c"]}}
+};
+assertErrorCode(coll, pipeline, 16020);
+
+// Ensure that $split errors when given non-string input.
+pipeline = {
+ $project: {split: {$split: [1, "abc"]}}
+};
+assertErrorCode(coll, pipeline, 40085);
+
+pipeline = {
+ $project: {split: {$split: ["abc", 1]}}
+};
+assertErrorCode(coll, pipeline, 40086);
+
+// Ensure that $split errors when given an empty separator.
+pipeline = {
+ $project: {split: {$split: ["abc", ""]}}
+};
+assertErrorCode(coll, pipeline, 40087);
}());
diff --git a/jstests/aggregation/expressions/switch.js b/jstests/aggregation/expressions/switch.js
index 64cd9e1db2f..4521d629905 100644
--- a/jstests/aggregation/expressions/switch.js
+++ b/jstests/aggregation/expressions/switch.js
@@ -2,147 +2,143 @@
// of the expression.
(function() {
- "use strict";
-
- var coll = db.switch;
- coll.drop();
-
- // Insert an empty document so that something can flow through the pipeline.
- coll.insert({});
-
- // Ensure that a branch is correctly evaluated.
- var pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch": {
- "branches": [{"case": {"$eq": [1, 1]}, "then": "one is equal to one!"}],
- }
- }
- }
- };
- var res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": "one is equal to one!"});
-
- // Ensure that the first branch which matches is chosen.
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch": {
- "branches": [
- {"case": {"$eq": [1, 1]}, "then": "one is equal to one!"},
- {"case": {"$eq": [2, 2]}, "then": "two is equal to two!"}
- ],
- }
- }
- }
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": "one is equal to one!"});
-
- // Ensure that the default is chosen if no case matches.
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch": {
- "branches": [{"case": {"$eq": [1, 2]}, "then": "one is equal to two!"}],
- "default": "no case matched."
- }
+"use strict";
+
+var coll = db.switch;
+coll.drop();
+
+// Insert an empty document so that something can flow through the pipeline.
+coll.insert({});
+
+// Ensure that a branch is correctly evaluated.
+var pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {
+ "$switch": {
+ "branches": [{"case": {"$eq": [1, 1]}, "then": "one is equal to one!"}],
}
}
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": "no case matched."});
-
- // Ensure that nullish values are treated as false when they are a "case", and are null
- // otherwise.
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch": {
- "branches": [{"case": null, "then": "Null was true!"}],
- "default": "No case matched."
- }
+ }
+};
+var res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": "one is equal to one!"});
+
+// Ensure that the first branch which matches is chosen.
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {
+ "$switch": {
+ "branches": [
+ {"case": {"$eq": [1, 1]}, "then": "one is equal to one!"},
+ {"case": {"$eq": [2, 2]}, "then": "two is equal to two!"}
+ ],
}
}
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": "No case matched."});
-
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch": {
- "branches": [{"case": "$missingField", "then": "Null was true!"}],
- "default": "No case matched."
- }
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": "one is equal to one!"});
+
+// Ensure that the default is chosen if no case matches.
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {
+ "$switch": {
+ "branches": [{"case": {"$eq": [1, 2]}, "then": "one is equal to two!"}],
+ "default": "no case matched."
}
}
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": "No case matched."});
-
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {"$switch": {"branches": [{"case": true, "then": null}], "default": false}}
- }
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": null});
-
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch":
- {"branches": [{"case": true, "then": "$missingField"}], "default": false}
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": "no case matched."});
+
+// Ensure that nullish values are treated as false when they are a "case", and are null
+// otherwise.
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {
+ "$switch": {
+ "branches": [{"case": null, "then": "Null was true!"}],
+ "default": "No case matched."
}
}
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {});
-
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {"$switch": {"branches": [{"case": null, "then": false}], "default": null}}
- }
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {"output": null});
-
- pipeline = {
- "$project": {
- "_id": 0,
- "output": {
- "$switch":
- {"branches": [{"case": null, "then": false}], "default": "$missingField"}
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": "No case matched."});
+
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {
+ "$switch": {
+ "branches": [{"case": "$missingField", "then": "Null was true!"}],
+ "default": "No case matched."
}
}
- };
- res = coll.aggregate(pipeline).toArray();
-
- assert.eq(res.length, 1);
- assert.eq(res[0], {});
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": "No case matched."});
+
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {"$switch": {"branches": [{"case": true, "then": null}], "default": false}}
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": null});
+
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output":
+ {"$switch": {"branches": [{"case": true, "then": "$missingField"}], "default": false}}
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {});
+
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output": {"$switch": {"branches": [{"case": null, "then": false}], "default": null}}
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {"output": null});
+
+pipeline = {
+ "$project": {
+ "_id": 0,
+ "output":
+ {"$switch": {"branches": [{"case": null, "then": false}], "default": "$missingField"}}
+ }
+};
+res = coll.aggregate(pipeline).toArray();
+
+assert.eq(res.length, 1);
+assert.eq(res[0], {});
}());
diff --git a/jstests/aggregation/expressions/switch_errors.js b/jstests/aggregation/expressions/switch_errors.js
index 0d9023fb250..1cead260526 100644
--- a/jstests/aggregation/expressions/switch_errors.js
+++ b/jstests/aggregation/expressions/switch_errors.js
@@ -3,55 +3,65 @@
load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
(function() {
- "use strict";
+"use strict";
- var coll = db.switch;
- coll.drop();
+var coll = db.switch;
+coll.drop();
- var pipeline = {"$project": {"output": {"$switch": "not an object"}}};
- assertErrorCode(coll, pipeline, 40060, "$switch requires an object as an argument.");
+var pipeline = {"$project": {"output": {"$switch": "not an object"}}};
+assertErrorCode(coll, pipeline, 40060, "$switch requires an object as an argument.");
- pipeline = {"$project": {"output": {"$switch": {"branches": "not an array"}}}};
- assertErrorCode(coll, pipeline, 40061, "$switch requires 'branches' to be an array.");
+pipeline = {
+ "$project": {"output": {"$switch": {"branches": "not an array"}}}
+};
+assertErrorCode(coll, pipeline, 40061, "$switch requires 'branches' to be an array.");
- pipeline = {"$project": {"output": {"$switch": {"branches": ["not an object"]}}}};
- assertErrorCode(coll, pipeline, 40062, "$switch requires each branch to be an object.");
+pipeline = {
+ "$project": {"output": {"$switch": {"branches": ["not an object"]}}}
+};
+assertErrorCode(coll, pipeline, 40062, "$switch requires each branch to be an object.");
- pipeline = {"$project": {"output": {"$switch": {"branches": [{}]}}}};
- assertErrorCode(coll, pipeline, 40064, "$switch requires each branch have a 'case'.");
+pipeline = {
+ "$project": {"output": {"$switch": {"branches": [{}]}}}
+};
+assertErrorCode(coll, pipeline, 40064, "$switch requires each branch have a 'case'.");
- pipeline = {
- "$project": {
- "output": {
- "$switch": {
- "branches": [{
- "case": 1,
- }]
- }
+pipeline = {
+ "$project": {
+ "output": {
+ "$switch": {
+ "branches": [{
+ "case": 1,
+ }]
}
}
- };
- assertErrorCode(coll, pipeline, 40065, "$switch requires each branch have a 'then'.");
-
- pipeline = {
- "$project":
- {"output": {"$switch": {"branches": [{"case": true, "then": false, "badKey": 1}]}}}
- };
- assertErrorCode(coll, pipeline, 40063, "$switch found a branch with an unknown argument");
-
- pipeline = {"$project": {"output": {"$switch": {"notAnArgument": 1}}}};
- assertErrorCode(coll, pipeline, 40067, "$switch found an unknown argument");
-
- pipeline = {"$project": {"output": {"$switch": {"branches": []}}}};
- assertErrorCode(coll, pipeline, 40068, "$switch requires at least one branch");
-
- pipeline = {"$project": {"output": {"$switch": {}}}};
- assertErrorCode(coll, pipeline, 40068, "$switch requires at least one branch");
-
- coll.insert({x: 1});
- pipeline = {
- "$project":
- {"output": {"$switch": {"branches": [{"case": {"$eq": ["$x", 0]}, "then": 1}]}}}
- };
- assertErrorCode(coll, pipeline, 40066, "$switch has no default and an input matched no case");
+ }
+};
+assertErrorCode(coll, pipeline, 40065, "$switch requires each branch have a 'then'.");
+
+pipeline = {
+ "$project": {"output": {"$switch": {"branches": [{"case": true, "then": false, "badKey": 1}]}}}
+};
+assertErrorCode(coll, pipeline, 40063, "$switch found a branch with an unknown argument");
+
+pipeline = {
+ "$project": {"output": {"$switch": {"notAnArgument": 1}}}
+};
+assertErrorCode(coll, pipeline, 40067, "$switch found an unknown argument");
+
+pipeline = {
+ "$project": {"output": {"$switch": {"branches": []}}}
+};
+assertErrorCode(coll, pipeline, 40068, "$switch requires at least one branch");
+
+pipeline = {
+ "$project": {"output": {"$switch": {}}}
+};
+assertErrorCode(coll, pipeline, 40068, "$switch requires at least one branch");
+
+coll.insert({x: 1});
+pipeline = {
+ "$project": {"output": {"$switch": {"branches": [{"case": {"$eq": ["$x", 0]}, "then": 1}]}}}
+};
+assertErrorCode(coll, pipeline, 40066, "$switch has no default and an input matched no case");
}());
diff --git a/jstests/aggregation/expressions/trim.js b/jstests/aggregation/expressions/trim.js
index 34d8573f259..af197adca5a 100644
--- a/jstests/aggregation/expressions/trim.js
+++ b/jstests/aggregation/expressions/trim.js
@@ -2,98 +2,89 @@
* Basic tests for the $trim, $ltrim, and $rtrim expressions.
*/
(function() {
- "use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode, testExpression and
- // testExpressionWithCollation.
+"use strict";
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode, testExpression and
+ // testExpressionWithCollation.
- const coll = db.trim_expressions;
+const coll = db.trim_expressions;
- testExpression(coll, {$trim: {input: " abc "}}, "abc");
- testExpression(coll, {$trim: {input: " a b\nc "}}, "a b\nc");
- testExpression(coll, {$ltrim: {input: "\t abc "}}, "abc ");
- testExpression(coll, {$rtrim: {input: "\t abc "}}, "\t abc");
- testExpression(
- coll,
- {$map: {input: {$split: ["4, 5, 6, 7,8,9, 10", ","]}, in : {$trim: {input: "$$this"}}}},
- ["4", "5", "6", "7", "8", "9", "10"]);
+testExpression(coll, {$trim: {input: " abc "}}, "abc");
+testExpression(coll, {$trim: {input: " a b\nc "}}, "a b\nc");
+testExpression(coll, {$ltrim: {input: "\t abc "}}, "abc ");
+testExpression(coll, {$rtrim: {input: "\t abc "}}, "\t abc");
+testExpression(
+ coll,
+ {$map: {input: {$split: ["4, 5, 6, 7,8,9, 10", ","]}, in : {$trim: {input: "$$this"}}}},
+ ["4", "5", "6", "7", "8", "9", "10"]);
- // Test that the trim expressions do not respect the collation.
- const caseInsensitive = {locale: "en_US", strength: 2};
- testExpressionWithCollation(coll, {$trim: {input: "xXx", chars: "x"}}, "X", caseInsensitive);
- testExpressionWithCollation(coll, {$rtrim: {input: "xXx", chars: "x"}}, "xX", caseInsensitive);
- testExpressionWithCollation(coll, {$ltrim: {input: "xXx", chars: "x"}}, "Xx", caseInsensitive);
+// Test that the trim expressions do not respect the collation.
+const caseInsensitive = {
+ locale: "en_US",
+ strength: 2
+};
+testExpressionWithCollation(coll, {$trim: {input: "xXx", chars: "x"}}, "X", caseInsensitive);
+testExpressionWithCollation(coll, {$rtrim: {input: "xXx", chars: "x"}}, "xX", caseInsensitive);
+testExpressionWithCollation(coll, {$ltrim: {input: "xXx", chars: "x"}}, "Xx", caseInsensitive);
- // Test using inputs from documents.
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, name: ", Charlie"},
- {_id: 1, name: "Obama\t, Barack"},
- {_id: 2, name: " Ride, Sally "}
- ]));
+// Test using inputs from documents.
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, name: ", Charlie"},
+ {_id: 1, name: "Obama\t, Barack"},
+ {_id: 2, name: " Ride, Sally "}
+]));
- assert.eq(
- coll.aggregate([
- {$sort: {_id: 1}},
- {
- $project: {
- firstName: {$trim: {input: {$arrayElemAt: [{$split: ["$name", ","]}, 1]}}}
- }
- }
- ])
- .toArray(),
- [
- {_id: 0, firstName: "Charlie"},
- {_id: 1, firstName: "Barack"},
- {_id: 2, firstName: "Sally"}
- ]);
+assert.eq(
+ coll.aggregate([
+ {$sort: {_id: 1}},
+ {$project: {firstName: {$trim: {input: {$arrayElemAt: [{$split: ["$name", ","]}, 1]}}}}}
+ ])
+ .toArray(),
+ [{_id: 0, firstName: "Charlie"}, {_id: 1, firstName: "Barack"}, {_id: 2, firstName: "Sally"}]);
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, poorlyParsedWebTitle: "The title of my document"},
- {_id: 1, poorlyParsedWebTitle: "\u2001\u2002 Odd unicode indentation"},
- {_id: 2, poorlyParsedWebTitle: "\u2001\u2002 Odd unicode indentation\u200A"},
- ]));
- assert.eq(coll.aggregate([
- {$sort: {_id: 1}},
- {$project: {title: {$ltrim: {input: "$poorlyParsedWebTitle"}}}}
- ])
- .toArray(),
- [
- {_id: 0, title: "The title of my document"},
- {_id: 1, title: "Odd unicode indentation"},
- {_id: 2, title: "Odd unicode indentation\u200A"}
- ]);
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, poorlyParsedWebTitle: "The title of my document"},
+ {_id: 1, poorlyParsedWebTitle: "\u2001\u2002 Odd unicode indentation"},
+ {_id: 2, poorlyParsedWebTitle: "\u2001\u2002 Odd unicode indentation\u200A"},
+]));
+assert.eq(
+ coll.aggregate(
+ [{$sort: {_id: 1}}, {$project: {title: {$ltrim: {input: "$poorlyParsedWebTitle"}}}}])
+ .toArray(),
+ [
+ {_id: 0, title: "The title of my document"},
+ {_id: 1, title: "Odd unicode indentation"},
+ {_id: 2, title: "Odd unicode indentation\u200A"}
+ ]);
- coll.drop();
- assert.writeOK(coll.insert([
- {_id: 0, proof: "Left as an exercise for the reader∎"},
- {_id: 1, proof: "∎∃ proof∎"},
+coll.drop();
+assert.writeOK(coll.insert([
+ {_id: 0, proof: "Left as an exercise for the reader∎"},
+ {_id: 1, proof: "∎∃ proof∎"},
+ {_id: 2, proof: "Just view the problem as a continuous DAG whose elements are taylor series∎"},
+ {_id: 3, proof: null},
+ {_id: 4},
+]));
+assert.eq(
+ coll.aggregate(
+ [{$sort: {_id: 1}}, {$project: {proof: {$rtrim: {input: "$proof", chars: "∎"}}}}])
+ .toArray(),
+ [
+ {_id: 0, proof: "Left as an exercise for the reader"},
+ {_id: 1, proof: "∎∃ proof"},
{
- _id: 2,
- proof: "Just view the problem as a continuous DAG whose elements are taylor series∎"
- },
- {_id: 3, proof: null},
- {_id: 4},
- ]));
- assert.eq(
- coll.aggregate(
- [{$sort: {_id: 1}}, {$project: {proof: {$rtrim: {input: "$proof", chars: "∎"}}}}])
- .toArray(),
- [
- {_id: 0, proof: "Left as an exercise for the reader"},
- {_id: 1, proof: "∎∃ proof"},
- {
_id: 2,
proof: "Just view the problem as a continuous DAG whose elements are taylor series"
- },
- {_id: 3, proof: null},
- {_id: 4, proof: null},
- ]);
+ },
+ {_id: 3, proof: null},
+ {_id: 4, proof: null},
+ ]);
- // Test that errors are reported correctly.
- assertErrorCode(coll, [{$project: {x: {$trim: " x "}}}], 50696);
- assertErrorCode(coll, [{$project: {x: {$trim: {input: 4}}}}], 50699);
- assertErrorCode(coll, [{$project: {x: {$trim: {input: {$add: [4, 2]}}}}}], 50699);
- assertErrorCode(coll, [{$project: {x: {$trim: {input: "$_id"}}}}], 50699);
- assertErrorCode(coll, [{$project: {x: {$trim: {input: " x ", chars: "$_id"}}}}], 50700);
+// Test that errors are reported correctly.
+assertErrorCode(coll, [{$project: {x: {$trim: " x "}}}], 50696);
+assertErrorCode(coll, [{$project: {x: {$trim: {input: 4}}}}], 50699);
+assertErrorCode(coll, [{$project: {x: {$trim: {input: {$add: [4, 2]}}}}}], 50699);
+assertErrorCode(coll, [{$project: {x: {$trim: {input: "$_id"}}}}], 50699);
+assertErrorCode(coll, [{$project: {x: {$trim: {input: " x ", chars: "$_id"}}}}], 50700);
}());
diff --git a/jstests/aggregation/extras/utils.js b/jstests/aggregation/extras/utils.js
index 57f61956792..7364ecde54d 100644
--- a/jstests/aggregation/extras/utils.js
+++ b/jstests/aggregation/extras/utils.js
@@ -179,7 +179,7 @@ function arrayShallowCopy(a) {
* the same documents, although the order need not match and the _id values need not match.
*
* Are non-scalar values references?
-*/
+ */
function resultsEq(rl, rr, verbose = false) {
const debug = msg => verbose ? print(msg) : null; // Helper to log 'msg' iff 'verbose' is true.
diff --git a/jstests/aggregation/group_conversion_to_distinct_scan.js b/jstests/aggregation/group_conversion_to_distinct_scan.js
index 1982ac581ad..90b25268a7c 100644
--- a/jstests/aggregation/group_conversion_to_distinct_scan.js
+++ b/jstests/aggregation/group_conversion_to_distinct_scan.js
@@ -12,650 +12,650 @@
*/
(function() {
- "use strict";
-
- load("jstests/libs/analyze_plan.js");
-
- let coll = db.group_conversion_to_distinct_scan;
- coll.drop();
-
- // Add test data and indexes. Fields prefixed with "mk" are multikey.
- let indexList = [
- {pattern: {a: 1, b: 1, c: 1}, option: {}},
- {pattern: {mkA: 1, b: 1, c: 1}, option: {}},
- {pattern: {aa: 1, mkB: 1, c: 1}, option: {}},
- {pattern: {aa: 1, bb: 1, c: 1}, option: {}},
- {pattern: {"foo.a": 1, "foo.b": 1}, option: {}},
- {pattern: {"mkFoo.a": 1, "mkFoo.b": 1}, option: {}},
- {pattern: {"foo.a": 1, "mkFoo.b": 1}, option: {}}
- ];
-
- function createIndexes() {
- for (let indexSpec of indexList) {
- assert.commandWorked(coll.createIndex(indexSpec.pattern, indexSpec.option));
- }
+"use strict";
+
+load("jstests/libs/analyze_plan.js");
+
+let coll = db.group_conversion_to_distinct_scan;
+coll.drop();
+
+// Add test data and indexes. Fields prefixed with "mk" are multikey.
+let indexList = [
+ {pattern: {a: 1, b: 1, c: 1}, option: {}},
+ {pattern: {mkA: 1, b: 1, c: 1}, option: {}},
+ {pattern: {aa: 1, mkB: 1, c: 1}, option: {}},
+ {pattern: {aa: 1, bb: 1, c: 1}, option: {}},
+ {pattern: {"foo.a": 1, "foo.b": 1}, option: {}},
+ {pattern: {"mkFoo.a": 1, "mkFoo.b": 1}, option: {}},
+ {pattern: {"foo.a": 1, "mkFoo.b": 1}, option: {}}
+];
+
+function createIndexes() {
+ for (let indexSpec of indexList) {
+ assert.commandWorked(coll.createIndex(indexSpec.pattern, indexSpec.option));
}
- createIndexes();
-
- assert.commandWorked(coll.insert([
- {_id: 0, a: 1, b: 1, c: 1},
- {_id: 1, a: 1, b: 2, c: 2},
- {_id: 2, a: 1, b: 2, c: 3},
- {_id: 3, a: 1, b: 3, c: 2},
- {_id: 4, a: 2, b: 2, c: 2},
- {_id: 5, b: 1, c: 1},
- {_id: 6, a: null, b: 1, c: 1},
-
- {_id: 7, aa: 1, mkB: 2, bb: 2},
- {_id: 8, aa: 1, mkB: [1, 3], bb: 1},
- {_id: 9, aa: 2, mkB: [], bb: 3},
-
- {_id: 10, mkA: 1, c: 3},
- {_id: 11, mkA: [2, 3, 4], c: 3},
- {_id: 12, mkA: 2, c: 2},
- {_id: 13, mkA: 3, c: 4},
-
- {_id: 14, foo: {a: 1, b: 1}, mkFoo: {a: 1, b: 1}},
- {_id: 15, foo: {a: 1, b: 2}, mkFoo: {a: 1, b: 2}},
- {_id: 16, foo: {a: 2, b: 2}, mkFoo: {a: 2, b: 2}},
- {_id: 17, foo: {b: 1}, mkFoo: {b: 1}},
- {_id: 18, foo: {a: null, b: 1}, mkFoo: {a: null, b: 1}},
- {_id: 19, foo: {a: 3}, mkFoo: [{a: 3, b: 4}, {a: 4, b: 3}]},
-
- {_id: 20, str: "foo", d: 1},
- {_id: 21, str: "FoO", d: 2},
- {_id: 22, str: "bar", d: 4},
- {_id: 23, str: "bAr", d: 3}
- ]));
-
- // Helper for dropping an index and removing it from the list of indexes.
- function removeIndex(pattern) {
- assert.commandWorked(coll.dropIndex(pattern));
- indexList = indexList.filter((ix) => bsonWoCompare(ix.pattern, pattern) != 0);
- }
-
- function addIndex(pattern, option) {
- indexList.push({pattern: pattern, option: option});
- assert.commandWorked(coll.createIndex(pattern, option));
- }
-
- // Check that 'pipeline' returns the correct results with and without a hint added to the query.
- // We also test with and without indices to check all the possibilities. 'options' is the
- // options to pass to aggregate() and may be omitted. Similarly, the hint object can be omitted
- // and will default to a $natural hint.
- function assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, expectedResults, hintObj = {$natural: 1}, options = {}) {
- assert.commandWorked(coll.dropIndexes());
- const resultsNoIndex = coll.aggregate(pipeline, options).toArray();
+}
+createIndexes();
+
+assert.commandWorked(coll.insert([
+ {_id: 0, a: 1, b: 1, c: 1},
+ {_id: 1, a: 1, b: 2, c: 2},
+ {_id: 2, a: 1, b: 2, c: 3},
+ {_id: 3, a: 1, b: 3, c: 2},
+ {_id: 4, a: 2, b: 2, c: 2},
+ {_id: 5, b: 1, c: 1},
+ {_id: 6, a: null, b: 1, c: 1},
+
+ {_id: 7, aa: 1, mkB: 2, bb: 2},
+ {_id: 8, aa: 1, mkB: [1, 3], bb: 1},
+ {_id: 9, aa: 2, mkB: [], bb: 3},
+
+ {_id: 10, mkA: 1, c: 3},
+ {_id: 11, mkA: [2, 3, 4], c: 3},
+ {_id: 12, mkA: 2, c: 2},
+ {_id: 13, mkA: 3, c: 4},
+
+ {_id: 14, foo: {a: 1, b: 1}, mkFoo: {a: 1, b: 1}},
+ {_id: 15, foo: {a: 1, b: 2}, mkFoo: {a: 1, b: 2}},
+ {_id: 16, foo: {a: 2, b: 2}, mkFoo: {a: 2, b: 2}},
+ {_id: 17, foo: {b: 1}, mkFoo: {b: 1}},
+ {_id: 18, foo: {a: null, b: 1}, mkFoo: {a: null, b: 1}},
+ {_id: 19, foo: {a: 3}, mkFoo: [{a: 3, b: 4}, {a: 4, b: 3}]},
+
+ {_id: 20, str: "foo", d: 1},
+ {_id: 21, str: "FoO", d: 2},
+ {_id: 22, str: "bar", d: 4},
+ {_id: 23, str: "bAr", d: 3}
+]));
+
+// Helper for dropping an index and removing it from the list of indexes.
+function removeIndex(pattern) {
+ assert.commandWorked(coll.dropIndex(pattern));
+ indexList = indexList.filter((ix) => bsonWoCompare(ix.pattern, pattern) != 0);
+}
+
+function addIndex(pattern, option) {
+ indexList.push({pattern: pattern, option: option});
+ assert.commandWorked(coll.createIndex(pattern, option));
+}
+
+// Check that 'pipeline' returns the correct results with and without a hint added to the query.
+// We also test with and without indices to check all the possibilities. 'options' is the
+// options to pass to aggregate() and may be omitted. Similarly, the hint object can be omitted
+// and will default to a $natural hint.
+function assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
+ expectedResults,
+ hintObj = {
+ $natural: 1
+ },
+ options = {}) {
+ assert.commandWorked(coll.dropIndexes());
+ const resultsNoIndex = coll.aggregate(pipeline, options).toArray();
- createIndexes();
- const resultsWithIndex = coll.aggregate(pipeline, options).toArray();
-
- const passedOptions = Object.assign({}, {hint: hintObj}, options);
- const resultsWithHint = coll.aggregate(pipeline, passedOptions).toArray();
-
- assert.sameMembers(resultsNoIndex, resultsWithIndex);
- assert.sameMembers(resultsWithIndex, resultsWithHint);
- assert.sameMembers(resultsWithHint, expectedResults);
- }
-
- //
- // Verify that a $sort-$group pipeline can use DISTINCT_SCAN when the sort is available from an
- // index.
- //
- let pipeline = [{$sort: {a: 1}}, {$group: {_id: "$a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: null}, {_id: 1}, {_id: 2}]);
- let explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
-
- // Pipelines that use the DISTINCT_SCAN optimization should not also have a blocking sort.
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $group pipeline can use DISTINCT_SCAN even when the user does not specify a
- // sort.
- //
- pipeline = [{$group: {_id: "$a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: null}, {_id: 1}, {_id: 2}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $group pipeline with a $natural hint does not use DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$a"}}];
- explain = coll.explain().aggregate(pipeline, {hint: {$natural: 1}});
- assert.neq(null, getAggPlanStage(explain, "COLLSCAN"), explain);
-
- //
- // Verify that a $group pipeline with a pertinent hint as string does use DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$a"}}];
- explain = coll.explain().aggregate(pipeline, {hint: "a_1_b_1_c_1"});
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
-
- //
- // Verify that a $group pipeline with a pertinent hint as an object does use DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$a"}}];
- explain = coll.explain().aggregate(pipeline, {hint: {a: 1, b: 1, c: 1}});
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
-
- //
- // Verify that a $group pipeline with a non-pertinent hint does not use DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: 1}, {_id: 2}], {_id: 1});
- explain = coll.explain().aggregate(pipeline, {hint: {_id: 1}});
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), explain);
- assert.eq({_id: 1}, getAggPlanStage(explain, "IXSCAN").keyPattern);
-
- //
- // Verify that a $group pipeline with an index filter still uses DISTINCT_SCAN.
- //
- assert.commandWorked(db.runCommand({
- planCacheSetFilter: coll.getName(),
- query: {},
- projection: {a: 1, _id: 0},
- indexes: ["a_1_b_1_c_1"]
- }));
-
- pipeline = [{$group: {_id: "$a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: null}, {_id: 1}, {_id: 2}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(true, explain.stages[0].$cursor.queryPlanner.indexFilterSet);
-
- //
- // Verify that a $group pipeline with an index filter and $natural hint uses DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$a"}}];
- explain = coll.explain().aggregate(pipeline, {hint: {$natural: 1}});
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(true, explain.stages[0].$cursor.queryPlanner.indexFilterSet);
-
- //
- // Verify that a $group pipeline with an index filter and non-pertinent hint uses DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: 1}, {_id: 2}], {_id: 1});
- explain = coll.explain().aggregate(pipeline, {hint: {_id: 1}});
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(true, explain.stages[0].$cursor.queryPlanner.indexFilterSet);
-
- assert.commandWorked(db.runCommand({planCacheClearFilters: coll.getName()}));
-
- //
- // Verify that a $sort-$group pipeline _does not_ use a DISTINCT_SCAN on a multikey field.
- //
- pipeline = [{$sort: {mkA: 1}}, {$group: {_id: "$mkA"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}, {_id: [2, 3, 4]}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that a $sort-$group pipeline can use DISTINCT_SCAN when the sort is available from an
- // index and there are $first accumulators.
- //
- pipeline = [{$sort: {a: 1, b: 1}}, {$group: {_id: "$a", accum: {$first: "$b"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $sort-$group pipeline can use DISTINCT_SCAN when a $first accumulator needs the
- // entire document.
- //
- pipeline = [{$sort: {a: -1, b: -1}}, {$group: {_id: "$a", accum: {$first: "$$ROOT"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null, accum: {_id: 6, a: null, b: 1, c: 1}},
- {_id: 1, accum: {_id: 3, a: 1, b: 3, c: 2}},
- {_id: 2, accum: {_id: 4, a: 2, b: 2, c: 2}}
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern, explain);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $sort-$group pipeline can use DISTINCT_SCAN when sorting and grouping by fields
- // with dotted paths.
- //
- pipeline =
- [{$sort: {"foo.a": 1, "foo.b": 1}}, {$group: {_id: "$foo.a", accum: {$first: "$foo.b"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline,
- [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}, {_id: 3, accum: null}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({"foo.a": 1, "foo.b": 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $group pipeline can use DISTINCT_SCAN to group on a dotted path field, even
- // when the user does not specify a sort.
- //
- pipeline = [{$group: {_id: "$foo.a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
- [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that we _do not_ attempt to use a DISTINCT_SCAN on a multikey field.
- //
- pipeline = [{$group: {_id: "$mkA"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}, {_id: [2, 3, 4]}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that we may not use a DISTINCT_SCAN on a dotted field when the last component
- // is not multikey, but an intermediate component is.
- //
- pipeline = [{$group: {_id: "$mkFoo.a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null},
- {_id: 1},
- {_id: 2},
- {_id: [3, 4]},
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that we _do not_ attempt to use a DISTINCT_SCAN on a multikey dotted-path field when
- // a sort is present.
- //
- pipeline = [
- {$sort: {"mkFoo.a": 1, "mkFoo.b": 1}},
- {$group: {_id: "$mkFoo.a", accum: {$first: "$mkFoo.b"}}}
- ];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null, accum: null},
- {_id: 1, accum: 1},
- {_id: 2, accum: 2},
- {_id: [3, 4], accum: [4, 3]}
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that we _do not_ attempt a DISTINCT_SCAN to satisfy a sort on a multikey field, even
- // when the field we are grouping by is not multikey.
- //
- pipeline = [{$sort: {aa: 1, mkB: 1}}, {$group: {_id: "$aa", accum: {$first: "$mkB"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: null}, {_id: 1, accum: [1, 3]}, {_id: 2, accum: []}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), tojson(explain));
-
- //
- // Verify that with dotted paths we _do not_ attempt a DISTINCT_SCAN to satisfy a sort on a
- // multikey field, even when the field we are grouping by is not multikey.
- //
- pipeline = [
- {$sort: {"foo.a": 1, "mkFoo.b": 1}},
- {$group: {_id: "$foo.a", accum: {$first: "$mkFoo.b"}}}
- ];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null, accum: null},
- {_id: 1, accum: 1},
- {_id: 2, accum: 2},
- {_id: 3, accum: [4, 3]}
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that we can use a DISTINCT_SCAN on a multikey index to sort and group on a dotted-path
- // field, so long as the field we are sorting over is not multikey and comes before any multikey
- // fields in the index key pattern.
- //
- // We drop the {"foo.a": 1, "foo.b": 1} to force this test to use the multikey
- // {"foo.a": 1, "mkFoo.b"} index. The rest of the test doesn't use either of those indexes.
- //
- removeIndex({"foo.a": 1, "foo.b": 1});
- pipeline = [{$sort: {"foo.a": 1}}, {$group: {_id: "$foo.a"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
- [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({"foo.a": 1, "mkFoo.b": 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $sort-$group pipeline can use DISTINCT_SCAN even when there is a $first
- // accumulator that accesses a multikey field.
- //
- pipeline = [{$sort: {aa: 1, bb: 1}}, {$group: {_id: "$aa", accum: {$first: "$mkB"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: null}, {_id: 1, accum: [1, 3]}, {_id: 2, accum: []}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({aa: 1, bb: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $sort-$group pipeline can use DISTINCT_SCAN even when there is a $first
- // accumulator that includes an expression.
- //
- pipeline =
- [{$sort: {a: 1, b: 1}}, {$group: {_id: "$a", accum: {$first: {$add: ["$b", "$c"]}}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: null}, {_id: 1, accum: 2}, {_id: 2, accum: 4}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $match-$sort-$group pipeline can use a DISTINCT_SCAN to sort and group by a
- // field that is not the first field in a compound index, so long as the previous fields are
- // scanned with equality bounds (i.e., are point queries).
- //
- pipeline = [{$match: {a: 1}}, {$sort: {b: 1}}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Same as the previous case but with the sort order matching the index key pattern, so the
- // query planner does not need to infer the availability of a sort on {b: 1} based on the
- // equality bounds for the 'a field.
- //
- pipeline = [{$match: {a: 1}}, {$sort: {a: 1, b: 1}}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Same as the previous case but with no user-specified sort.
- //
- pipeline = [{$match: {a: 1}}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $match-$sort-$group pipeline _does not_ use a DISTINCT_SCAN to sort and group
- // on the second field of an index when there is no equality match on the first field.
- //
- pipeline = [{$sort: {a: 1, b: 1}}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
- [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that a $match-$sort-$limit-$group pipeline _does not_ coalesce the $sort-$limit and
- // then consider the result eligible for the DISTINCT_SCAN optimization.
- //
- // In this example, the {$limit: 3} filters out the document {a: 1, b: 3, c: 2}, which means we
- // don't see a {_id: 3} group. If we instead applied the {$limit: 3} after the $group stage, we
- // would incorrectly list three groups. DISTINCT_SCAN won't work here, because we have to
- // examine each document in order to determine which groups get filtered out by the $limit.
- //
- pipeline = [{$match: {a: 1}}, {$sort: {a: 1, b: 1}}, {$limit: 3}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}]);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that an additional $project stage does not lead to incorrect results (although it will
- // preclude the use of the DISTINCT_SCAN optimization).
- //
- pipeline =
- [{$match: {a: 1}}, {$project: {a: 1, b: 1}}, {$sort: {a: 1, b: 1}}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
-
- //
- // Verify that a $sort-$group can use a DISTINCT_SCAN even when the requested sort is the
- // reverse of the index's sort.
- //
- pipeline = [{$sort: {a: -1, b: -1}}, {$group: {_id: "$a", accum: {$first: "$b"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: 1}, {_id: 1, accum: 3}, {_id: 2, accum: 2}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $sort-$group pipeline _does not_ use DISTINCT_SCAN when there are non-$first
- // accumulators.
- //
- pipeline = [{$sort: {a: 1}}, {$group: {_id: "$a", accum: {$sum: "$b"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: 2}, {_id: 1, accum: 8}, {_id: 2, accum: 2}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- // An index scan is still possible, though.
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), explain);
- assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "IXSCAN").keyPattern);
- assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
-
- //
- // Verify that a $sort-$group pipeline _does not_ use DISTINCT_SCAN when documents are not
- // sorted by the field used for grouping.
- //
- pipeline = [{$sort: {b: 1}}, {$group: {_id: "$a", accum: {$first: "$b"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- //
- // Verify that a $match-$sort-$group pipeline _does not_ use a DISTINCT_SCAN when the match does
- // not provide equality (point query) bounds for each field before the grouped-by field in the
- // index.
- //
- pipeline = [{$match: {a: {$gt: 0}}}, {$sort: {b: 1}}, {$group: {_id: "$b"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
-
- ////////////////////////////////////////////////////////////////////////////////////////////////
- // We execute all the collation-related tests three times with three different configurations
- // (no index, index without collation, index with collation).
- //
- // Collation tests 1: no index on string field.
- ////////////////////////////////////////////////////////////////////////////////////////////////
-
- const collationOption = {collation: {locale: "en_US", strength: 2}};
-
- //
- // Verify that a $group on an unindexed field uses a collection scan.
- //
- pipeline = [{$group: {_id: "$str"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: "FoO"}, {_id: "bAr"}, {_id: "bar"}, {_id: "foo"}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- //
- // Verify that a collated $group on an unindexed field uses a collection scan.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: "bAr"}, {_id: "foo"}], {$natural: 1}, collationOption);
- explain = coll.explain().aggregate(pipeline, collationOption);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- //
- // Verify that a $sort-$group pipeline uses a collection scan.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null, accum: null},
- {_id: "FoO", accum: 2},
- {_id: "bAr", accum: 3},
- {_id: "bar", accum: 4},
- {_id: "foo", accum: 1}
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- //
- // Verify that a collated $sort-$group pipeline with a $first accumulator uses a collection
- // scan.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline,
- [{_id: null, accum: null}, {_id: "bAr", accum: 3}, {_id: "foo", accum: 1}],
- {$natural: 1},
- collationOption);
- explain = coll.explain().aggregate(pipeline, collationOption);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- ////////////////////////////////////////////////////////////////////////////////////////////////
- // Collation tests 2: index on string field with no collation.
- ////////////////////////////////////////////////////////////////////////////////////////////////
-
- addIndex({str: 1, d: 1});
-
- //
- // Verify that a $group uses a DISTINCT_SCAN.
- //
- pipeline = [{$group: {_id: "$str"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: "FoO"}, {_id: "bAr"}, {_id: "bar"}, {_id: "foo"}]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
-
- //
- // Verify that a $sort-$group pipeline with a collation _does not_ scan the index, which is not
- // aware of the collation.
- //
- // Note that, when using a case-insensitive collation, "bAr" and "bar" will get grouped
- // together, and the decision as to which one will represent the group is arbitary. The
- // tie-breaking {d: 1} component of the sort forces a specific decision for this aggregation,
- // making this test more reliable.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: "bAr"}, {_id: "foo"}], {$natural: 1}, collationOption);
- explain = coll.explain().aggregate(pipeline, collationOption);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- //
- // Verify that a $sort-$group pipeline uses a DISTINCT_SCAN.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null, accum: null},
- {_id: "FoO", accum: 2},
- {_id: "bAr", accum: 3},
- {_id: "bar", accum: 4},
- {_id: "foo", accum: 1}
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
-
- //
- // Verify that a $sort-$group that use a collation and includes a $first accumulators _does
- // not_ scan the index, which is not aware of the collation.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline,
- [{_id: null, accum: null}, {_id: "bAr", accum: 3}, {_id: "foo", accum: 1}],
- {$natural: 1},
- collationOption);
- explain = coll.explain().aggregate(pipeline, collationOption);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- ////////////////////////////////////////////////////////////////////////////////////////////////
- // Collation tests 3: index on string field with case-insensitive collation.
- ////////////////////////////////////////////////////////////////////////////////////////////////
-
- removeIndex({str: 1, d: 1});
- addIndex({str: 1, d: 1}, collationOption);
-
- //
- // Verify that a $group with no collation _does not_ scan the index, which does have a
- // collation.
- //
- pipeline = [{$group: {_id: "$str"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: "FoO"}, {_id: "bAr"}, {_id: "bar"}, {_id: "foo"}]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- //
- // Verify that a $sort-$group with a collation uses a DISTINCT_SCAN on the index, which uses a
- // matching collation.
- //
- // Note that, when using a case-insensitive collation, "bAr" and "bar" will get grouped
- // together, and the decision as to which one will represent the group is arbitary. The
- // tie-breaking {d: 1} component of the sort forces a specific decision for this aggregation,
- // making this test more reliable.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str"}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline, [{_id: null}, {_id: "bAr"}, {_id: "foo"}], {$natural: 1}, collationOption);
- explain = coll.explain().aggregate(pipeline, collationOption);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
-
- //
- // Verify that a $sort-$group pipeline with no collation _does not_ scan the index, which does
- // have a collation.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
- {_id: null, accum: null},
- {_id: "FoO", accum: 2},
- {_id: "bAr", accum: 3},
- {_id: "bar", accum: 4},
- {_id: "foo", accum: 1}
- ]);
- explain = coll.explain().aggregate(pipeline);
- assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
-
- //
- // Verify that a $sort-$group pipeline that uses a collation and includes a $first accumulator
- // uses a DISTINCT_SCAN, which uses a matching collation.
- //
- pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
- assertResultsMatchWithAndWithoutHintandIndexes(
- pipeline,
- [{_id: null, accum: null}, {_id: "bAr", accum: 3}, {_id: "foo", accum: 1}],
- {$natural: 1},
- collationOption);
- explain = coll.explain().aggregate(pipeline, collationOption);
- assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
- assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+ createIndexes();
+ const resultsWithIndex = coll.aggregate(pipeline, options).toArray();
+
+ const passedOptions = Object.assign({}, {hint: hintObj}, options);
+ const resultsWithHint = coll.aggregate(pipeline, passedOptions).toArray();
+
+ assert.sameMembers(resultsNoIndex, resultsWithIndex);
+ assert.sameMembers(resultsWithIndex, resultsWithHint);
+ assert.sameMembers(resultsWithHint, expectedResults);
+}
+
+//
+// Verify that a $sort-$group pipeline can use DISTINCT_SCAN when the sort is available from an
+// index.
+//
+let pipeline = [{$sort: {a: 1}}, {$group: {_id: "$a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: null}, {_id: 1}, {_id: 2}]);
+let explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+
+// Pipelines that use the DISTINCT_SCAN optimization should not also have a blocking sort.
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $group pipeline can use DISTINCT_SCAN even when the user does not specify a
+// sort.
+//
+pipeline = [{$group: {_id: "$a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: null}, {_id: 1}, {_id: 2}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $group pipeline with a $natural hint does not use DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$a"}}];
+explain = coll.explain().aggregate(pipeline, {hint: {$natural: 1}});
+assert.neq(null, getAggPlanStage(explain, "COLLSCAN"), explain);
+
+//
+// Verify that a $group pipeline with a pertinent hint as string does use DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$a"}}];
+explain = coll.explain().aggregate(pipeline, {hint: "a_1_b_1_c_1"});
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+
+//
+// Verify that a $group pipeline with a pertinent hint as an object does use DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$a"}}];
+explain = coll.explain().aggregate(pipeline, {hint: {a: 1, b: 1, c: 1}});
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+
+//
+// Verify that a $group pipeline with a non-pertinent hint does not use DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: 1}, {_id: 2}], {_id: 1});
+explain = coll.explain().aggregate(pipeline, {hint: {_id: 1}});
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+assert.eq({_id: 1}, getAggPlanStage(explain, "IXSCAN").keyPattern);
+
+//
+// Verify that a $group pipeline with an index filter still uses DISTINCT_SCAN.
+//
+assert.commandWorked(db.runCommand({
+ planCacheSetFilter: coll.getName(),
+ query: {},
+ projection: {a: 1, _id: 0},
+ indexes: ["a_1_b_1_c_1"]
+}));
+
+pipeline = [{$group: {_id: "$a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: null}, {_id: 1}, {_id: 2}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(true, explain.stages[0].$cursor.queryPlanner.indexFilterSet);
+
+//
+// Verify that a $group pipeline with an index filter and $natural hint uses DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$a"}}];
+explain = coll.explain().aggregate(pipeline, {hint: {$natural: 1}});
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(true, explain.stages[0].$cursor.queryPlanner.indexFilterSet);
+
+//
+// Verify that a $group pipeline with an index filter and non-pertinent hint uses DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: 1}, {_id: 2}], {_id: 1});
+explain = coll.explain().aggregate(pipeline, {hint: {_id: 1}});
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(true, explain.stages[0].$cursor.queryPlanner.indexFilterSet);
+
+assert.commandWorked(db.runCommand({planCacheClearFilters: coll.getName()}));
+
+//
+// Verify that a $sort-$group pipeline _does not_ use a DISTINCT_SCAN on a multikey field.
+//
+pipeline = [{$sort: {mkA: 1}}, {$group: {_id: "$mkA"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}, {_id: [2, 3, 4]}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that a $sort-$group pipeline can use DISTINCT_SCAN when the sort is available from an
+// index and there are $first accumulators.
+//
+pipeline = [{$sort: {a: 1, b: 1}}, {$group: {_id: "$a", accum: {$first: "$b"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $sort-$group pipeline can use DISTINCT_SCAN when a $first accumulator needs the
+// entire document.
+//
+pipeline = [{$sort: {a: -1, b: -1}}, {$group: {_id: "$a", accum: {$first: "$$ROOT"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
+ {_id: null, accum: {_id: 6, a: null, b: 1, c: 1}},
+ {_id: 1, accum: {_id: 3, a: 1, b: 3, c: 2}},
+ {_id: 2, accum: {_id: 4, a: 2, b: 2, c: 2}}
+]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern, explain);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $sort-$group pipeline can use DISTINCT_SCAN when sorting and grouping by fields
+// with dotted paths.
+//
+pipeline =
+ [{$sort: {"foo.a": 1, "foo.b": 1}}, {$group: {_id: "$foo.a", accum: {$first: "$foo.b"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline,
+ [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}, {_id: 3, accum: null}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({"foo.a": 1, "foo.b": 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $group pipeline can use DISTINCT_SCAN to group on a dotted path field, even
+// when the user does not specify a sort.
+//
+pipeline = [{$group: {_id: "$foo.a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
+ [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that we _do not_ attempt to use a DISTINCT_SCAN on a multikey field.
+//
+pipeline = [{$group: {_id: "$mkA"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}, {_id: [2, 3, 4]}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that we may not use a DISTINCT_SCAN on a dotted field when the last component
+// is not multikey, but an intermediate component is.
+//
+pipeline = [{$group: {_id: "$mkFoo.a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
+ {_id: null},
+ {_id: 1},
+ {_id: 2},
+ {_id: [3, 4]},
+]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that we _do not_ attempt to use a DISTINCT_SCAN on a multikey dotted-path field when
+// a sort is present.
+//
+pipeline = [
+ {$sort: {"mkFoo.a": 1, "mkFoo.b": 1}},
+ {$group: {_id: "$mkFoo.a", accum: {$first: "$mkFoo.b"}}}
+];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
+ {_id: null, accum: null},
+ {_id: 1, accum: 1},
+ {_id: 2, accum: 2},
+ {_id: [3, 4], accum: [4, 3]}
+]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that we _do not_ attempt a DISTINCT_SCAN to satisfy a sort on a multikey field, even
+// when the field we are grouping by is not multikey.
+//
+pipeline = [{$sort: {aa: 1, mkB: 1}}, {$group: {_id: "$aa", accum: {$first: "$mkB"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: null}, {_id: 1, accum: [1, 3]}, {_id: 2, accum: []}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), tojson(explain));
+
+//
+// Verify that with dotted paths we _do not_ attempt a DISTINCT_SCAN to satisfy a sort on a
+// multikey field, even when the field we are grouping by is not multikey.
+//
+pipeline =
+ [{$sort: {"foo.a": 1, "mkFoo.b": 1}}, {$group: {_id: "$foo.a", accum: {$first: "$mkFoo.b"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline,
+ [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}, {_id: 3, accum: [4, 3]}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that we can use a DISTINCT_SCAN on a multikey index to sort and group on a dotted-path
+// field, so long as the field we are sorting over is not multikey and comes before any multikey
+// fields in the index key pattern.
+//
+// We drop the {"foo.a": 1, "foo.b": 1} to force this test to use the multikey
+// {"foo.a": 1, "mkFoo.b"} index. The rest of the test doesn't use either of those indexes.
+//
+removeIndex({"foo.a": 1, "foo.b": 1});
+pipeline = [{$sort: {"foo.a": 1}}, {$group: {_id: "$foo.a"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
+ [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({"foo.a": 1, "mkFoo.b": 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $sort-$group pipeline can use DISTINCT_SCAN even when there is a $first
+// accumulator that accesses a multikey field.
+//
+pipeline = [{$sort: {aa: 1, bb: 1}}, {$group: {_id: "$aa", accum: {$first: "$mkB"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: null}, {_id: 1, accum: [1, 3]}, {_id: 2, accum: []}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({aa: 1, bb: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $sort-$group pipeline can use DISTINCT_SCAN even when there is a $first
+// accumulator that includes an expression.
+//
+pipeline = [{$sort: {a: 1, b: 1}}, {$group: {_id: "$a", accum: {$first: {$add: ["$b", "$c"]}}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: null}, {_id: 1, accum: 2}, {_id: 2, accum: 4}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $match-$sort-$group pipeline can use a DISTINCT_SCAN to sort and group by a
+// field that is not the first field in a compound index, so long as the previous fields are
+// scanned with equality bounds (i.e., are point queries).
+//
+pipeline = [{$match: {a: 1}}, {$sort: {b: 1}}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Same as the previous case but with the sort order matching the index key pattern, so the
+// query planner does not need to infer the availability of a sort on {b: 1} based on the
+// equality bounds for the 'a field.
+//
+pipeline = [{$match: {a: 1}}, {$sort: {a: 1, b: 1}}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Same as the previous case but with no user-specified sort.
+//
+pipeline = [{$match: {a: 1}}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $match-$sort-$group pipeline _does not_ use a DISTINCT_SCAN to sort and group
+// on the second field of an index when there is no equality match on the first field.
+//
+pipeline = [{$sort: {a: 1, b: 1}}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline,
+ [{_id: null}, {_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that a $match-$sort-$limit-$group pipeline _does not_ coalesce the $sort-$limit and
+// then consider the result eligible for the DISTINCT_SCAN optimization.
+//
+// In this example, the {$limit: 3} filters out the document {a: 1, b: 3, c: 2}, which means we
+// don't see a {_id: 3} group. If we instead applied the {$limit: 3} after the $group stage, we
+// would incorrectly list three groups. DISTINCT_SCAN won't work here, because we have to
+// examine each document in order to determine which groups get filtered out by the $limit.
+//
+pipeline = [{$match: {a: 1}}, {$sort: {a: 1, b: 1}}, {$limit: 3}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}]);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that an additional $project stage does not lead to incorrect results (although it will
+// preclude the use of the DISTINCT_SCAN optimization).
+//
+pipeline =
+ [{$match: {a: 1}}, {$project: {a: 1, b: 1}}, {$sort: {a: 1, b: 1}}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
+
+//
+// Verify that a $sort-$group can use a DISTINCT_SCAN even when the requested sort is the
+// reverse of the index's sort.
+//
+pipeline = [{$sort: {a: -1, b: -1}}, {$group: {_id: "$a", accum: {$first: "$b"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: 1}, {_id: 1, accum: 3}, {_id: 2, accum: 2}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $sort-$group pipeline _does not_ use DISTINCT_SCAN when there are non-$first
+// accumulators.
+//
+pipeline = [{$sort: {a: 1}}, {$group: {_id: "$a", accum: {$sum: "$b"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: 2}, {_id: 1, accum: 8}, {_id: 2, accum: 2}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+// An index scan is still possible, though.
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+assert.eq({a: 1, b: 1, c: 1}, getAggPlanStage(explain, "IXSCAN").keyPattern);
+assert.eq(null, getAggPlanStage(explain, "SORT"), explain);
+
+//
+// Verify that a $sort-$group pipeline _does not_ use DISTINCT_SCAN when documents are not
+// sorted by the field used for grouping.
+//
+pipeline = [{$sort: {b: 1}}, {$group: {_id: "$a", accum: {$first: "$b"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null, accum: null}, {_id: 1, accum: 1}, {_id: 2, accum: 2}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+//
+// Verify that a $match-$sort-$group pipeline _does not_ use a DISTINCT_SCAN when the match does
+// not provide equality (point query) bounds for each field before the grouped-by field in the
+// index.
+//
+pipeline = [{$match: {a: {$gt: 0}}}, {$sort: {b: 1}}, {$group: {_id: "$b"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [{_id: 1}, {_id: 2}, {_id: 3}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+
+////////////////////////////////////////////////////////////////////////////////////////////////
+// We execute all the collation-related tests three times with three different configurations
+// (no index, index without collation, index with collation).
+//
+// Collation tests 1: no index on string field.
+////////////////////////////////////////////////////////////////////////////////////////////////
+
+const collationOption = {
+ collation: {locale: "en_US", strength: 2}
+};
+
+//
+// Verify that a $group on an unindexed field uses a collection scan.
+//
+pipeline = [{$group: {_id: "$str"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: "FoO"}, {_id: "bAr"}, {_id: "bar"}, {_id: "foo"}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+//
+// Verify that a collated $group on an unindexed field uses a collection scan.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: "bAr"}, {_id: "foo"}], {$natural: 1}, collationOption);
+explain = coll.explain().aggregate(pipeline, collationOption);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+//
+// Verify that a $sort-$group pipeline uses a collection scan.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
+ {_id: null, accum: null},
+ {_id: "FoO", accum: 2},
+ {_id: "bAr", accum: 3},
+ {_id: "bar", accum: 4},
+ {_id: "foo", accum: 1}
+]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+//
+// Verify that a collated $sort-$group pipeline with a $first accumulator uses a collection
+// scan.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline,
+ [{_id: null, accum: null}, {_id: "bAr", accum: 3}, {_id: "foo", accum: 1}],
+ {$natural: 1},
+ collationOption);
+explain = coll.explain().aggregate(pipeline, collationOption);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+////////////////////////////////////////////////////////////////////////////////////////////////
+// Collation tests 2: index on string field with no collation.
+////////////////////////////////////////////////////////////////////////////////////////////////
+
+addIndex({str: 1, d: 1});
+
+//
+// Verify that a $group uses a DISTINCT_SCAN.
+//
+pipeline = [{$group: {_id: "$str"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: "FoO"}, {_id: "bAr"}, {_id: "bar"}, {_id: "foo"}]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+
+//
+// Verify that a $sort-$group pipeline with a collation _does not_ scan the index, which is not
+// aware of the collation.
+//
+// Note that, when using a case-insensitive collation, "bAr" and "bar" will get grouped
+// together, and the decision as to which one will represent the group is arbitary. The
+// tie-breaking {d: 1} component of the sort forces a specific decision for this aggregation,
+// making this test more reliable.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: "bAr"}, {_id: "foo"}], {$natural: 1}, collationOption);
+explain = coll.explain().aggregate(pipeline, collationOption);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+//
+// Verify that a $sort-$group pipeline uses a DISTINCT_SCAN.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
+ {_id: null, accum: null},
+ {_id: "FoO", accum: 2},
+ {_id: "bAr", accum: 3},
+ {_id: "bar", accum: 4},
+ {_id: "foo", accum: 1}
+]);
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+
+//
+// Verify that a $sort-$group that use a collation and includes a $first accumulators _does
+// not_ scan the index, which is not aware of the collation.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline,
+ [{_id: null, accum: null}, {_id: "bAr", accum: 3}, {_id: "foo", accum: 1}],
+ {$natural: 1},
+ collationOption);
+explain = coll.explain().aggregate(pipeline, collationOption);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+////////////////////////////////////////////////////////////////////////////////////////////////
+// Collation tests 3: index on string field with case-insensitive collation.
+////////////////////////////////////////////////////////////////////////////////////////////////
+
+removeIndex({str: 1, d: 1});
+addIndex({str: 1, d: 1}, collationOption);
+
+//
+// Verify that a $group with no collation _does not_ scan the index, which does have a
+// collation.
+//
+pipeline = [{$group: {_id: "$str"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: "FoO"}, {_id: "bAr"}, {_id: "bar"}, {_id: "foo"}]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+//
+// Verify that a $sort-$group with a collation uses a DISTINCT_SCAN on the index, which uses a
+// matching collation.
+//
+// Note that, when using a case-insensitive collation, "bAr" and "bar" will get grouped
+// together, and the decision as to which one will represent the group is arbitary. The
+// tie-breaking {d: 1} component of the sort forces a specific decision for this aggregation,
+// making this test more reliable.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str"}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline, [{_id: null}, {_id: "bAr"}, {_id: "foo"}], {$natural: 1}, collationOption);
+explain = coll.explain().aggregate(pipeline, collationOption);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
+
+//
+// Verify that a $sort-$group pipeline with no collation _does not_ scan the index, which does
+// have a collation.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(pipeline, [
+ {_id: null, accum: null},
+ {_id: "FoO", accum: 2},
+ {_id: "bAr", accum: 3},
+ {_id: "bar", accum: 4},
+ {_id: "foo", accum: 1}
+]);
+explain = coll.explain().aggregate(pipeline);
+assert.eq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq(null, getAggPlanStage(explain, "IXSCAN"), explain);
+
+//
+// Verify that a $sort-$group pipeline that uses a collation and includes a $first accumulator
+// uses a DISTINCT_SCAN, which uses a matching collation.
+//
+pipeline = [{$sort: {str: 1, d: 1}}, {$group: {_id: "$str", accum: {$first: "$d"}}}];
+assertResultsMatchWithAndWithoutHintandIndexes(
+ pipeline,
+ [{_id: null, accum: null}, {_id: "bAr", accum: 3}, {_id: "foo", accum: 1}],
+ {$natural: 1},
+ collationOption);
+explain = coll.explain().aggregate(pipeline, collationOption);
+assert.neq(null, getAggPlanStage(explain, "DISTINCT_SCAN"), explain);
+assert.eq({str: 1, d: 1}, getAggPlanStage(explain, "DISTINCT_SCAN").keyPattern);
}());
diff --git a/jstests/aggregation/illegal_reference_in_match.js b/jstests/aggregation/illegal_reference_in_match.js
index 7ef6c904406..eca2df0009c 100644
--- a/jstests/aggregation/illegal_reference_in_match.js
+++ b/jstests/aggregation/illegal_reference_in_match.js
@@ -2,35 +2,35 @@
// illegal inside the aggregation system is used in a $match that is not pushed down to the query
// system, the correct error is raised.
(function() {
- "use strict";
+"use strict";
- const coll = db.illegal_reference_in_match;
- assert.commandWorked(coll.insert({a: 1}));
+const coll = db.illegal_reference_in_match;
+assert.commandWorked(coll.insert({a: 1}));
- const pipeline = [
- // The limit stage prevents the planner from pushing the match into the query layer.
- {$limit: 10},
+const pipeline = [
+ // The limit stage prevents the planner from pushing the match into the query layer.
+ {$limit: 10},
- // 'a.$c' is an illegal path in the aggregation system (though it is legal in the query
- // system). The $limit above forces this $match to run as an aggregation stage, so the path
- // will be interpreted as illegal.
- {$match: {"a.$c": 4}},
+ // 'a.$c' is an illegal path in the aggregation system (though it is legal in the query
+ // system). The $limit above forces this $match to run as an aggregation stage, so the path
+ // will be interpreted as illegal.
+ {$match: {"a.$c": 4}},
- // This inclusion-projection allows the planner to determine that the only necessary fields
- // we need to fetch from the document are "_id" (by default), "a.$c" (since we do a match
- // on it) and "dummy" since we include/rename it as part of this $project.
+ // This inclusion-projection allows the planner to determine that the only necessary fields
+ // we need to fetch from the document are "_id" (by default), "a.$c" (since we do a match
+ // on it) and "dummy" since we include/rename it as part of this $project.
- // The reason we need to explicitly include a "dummy" field, rather than just including
- // "a.$c" is that, as mentioned before, a.$c is an illegal path in the aggregation system,
- // so if we use it as part of the project, the $project will fail to parse (and the
- // relevant code will not be exercised).
- {
- $project: {
- "newAndUnrelatedField": "$dummy",
- }
+ // The reason we need to explicitly include a "dummy" field, rather than just including
+ // "a.$c" is that, as mentioned before, a.$c is an illegal path in the aggregation system,
+ // so if we use it as part of the project, the $project will fail to parse (and the
+ // relevant code will not be exercised).
+ {
+ $project: {
+ "newAndUnrelatedField": "$dummy",
}
- ];
+ }
+];
- const err = assert.throws(() => coll.aggregate(pipeline));
- assert.eq(err.code, 16410);
+const err = assert.throws(() => coll.aggregate(pipeline));
+assert.eq(err.code, 16410);
})();
diff --git a/jstests/aggregation/match_swapping_renamed_fields.js b/jstests/aggregation/match_swapping_renamed_fields.js
index 92340a868cb..e537f249454 100644
--- a/jstests/aggregation/match_swapping_renamed_fields.js
+++ b/jstests/aggregation/match_swapping_renamed_fields.js
@@ -4,112 +4,105 @@
* @tags: [do_not_wrap_aggregations_in_facets]
*/
(function() {
- "use strict";
-
- load("jstests/libs/analyze_plan.js");
-
- let coll = db.match_swapping_renamed_fields;
- coll.drop();
-
- assert.writeOK(coll.insert([{a: 1, b: 1, c: 1}, {a: 2, b: 2, c: 2}, {a: 3, b: 3, c: 3}]));
- assert.commandWorked(coll.createIndex({a: 1}));
-
- // Test that a $match can result in index usage after moving past a field renamed by $project.
- let pipeline = [{$project: {_id: 0, z: "$a", c: 1}}, {$match: {z: {$gt: 1}}}];
- assert.eq(2, coll.aggregate(pipeline).itcount());
- let explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
-
- // Test that a $match can result in index usage after moving past a field renamed by $addFields.
- pipeline = [{$addFields: {z: "$a"}}, {$match: {z: {$gt: 1}}}];
- assert.eq(2, coll.aggregate(pipeline).itcount());
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
-
- // Test that a $match with $type can result in index usage after moving past a field renamed by
- // $project.
- pipeline = [{$project: {_id: 0, z: "$a", c: 1}}, {$match: {z: {$type: "number"}}}];
- assert.eq(3, coll.aggregate(pipeline).itcount());
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
-
- // Test that a partially dependent match can split, with a rename applied, resulting in index
- // usage.
- pipeline =
- [{$project: {z: "$a", zz: {$sum: ["$a", "$b"]}}}, {$match: {z: {$gt: 1}, zz: {$lt: 5}}}];
- assert.eq(1, coll.aggregate(pipeline).itcount());
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
-
- // Test that a match can swap past several renames, resulting in index usage.
- pipeline = [
- {$project: {d: "$a"}},
- {$addFields: {e: "$$CURRENT.d"}},
- {$project: {f: "$$ROOT.e"}},
- {$match: {f: {$gt: 1}}}
- ];
- assert.eq(2, coll.aggregate(pipeline).itcount());
- explain = coll.explain().aggregate(pipeline);
- assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0, a: [{b: 1, c: 1}, {b: 2, c: 2}]}));
- assert.writeOK(coll.insert({_id: 1, a: [{b: 3, c: 3}, {b: 4, c: 4}]}));
- assert.commandWorked(coll.createIndex({"a.b": 1, "a.c": 1}));
-
- // Test that a $match can result in index usage after moving past a dotted array path renamed by
- // a $map inside a $project.
- pipeline = [
- {$project: {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b", f: "$$iter.c"}}}}},
- {$match: {"d.e": 1, "d.f": 2}}
- ];
- assert.eq([{_id: 0, d: [{e: 1, f: 1}, {e: 2, f: 2}]}], coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- let ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b": 1, "a.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- // Test that a $match can result in index usage after moving past a dotted array path renamed by
- // a $map inside an $addFields. This time the match expression is partially dependent and should
- // get split.
- pipeline = [
- {
- $addFields:
- {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b", f: "$$iter.c"}}}, g: 2}
- },
- {$match: {"d.e": 1, g: 2}}
- ];
- assert.eq([{_id: 0, a: [{b: 1, c: 1}, {b: 2, c: 2}], d: [{e: 1, f: 1}, {e: 2, f: 2}], g: 2}],
- coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b": 1, "a.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- // Test that match swapping behaves correctly when a $map contains a rename but also computes a
- // new field.
- pipeline = [
- {
- $addFields:
- {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b", f: {$literal: 99}}}}}
- },
- {$match: {"d.e": 1, "d.f": 99}}
- ];
- assert.eq([{_id: 0, a: [{b: 1, c: 1}, {b: 2, c: 2}], d: [{e: 1, f: 99}, {e: 2, f: 99}]}],
- coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b": 1, "a.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- coll.drop();
- assert.writeOK(coll.insert({_id: 0, a: [{b: [{c: 1}, {c: 2}]}, {b: [{c: 3}, {c: 4}]}]}));
- assert.writeOK(coll.insert({_id: 1, a: [{b: [{c: 5}, {c: 6}]}, {b: [{c: 7}, {c: 8}]}]}));
- assert.commandWorked(coll.createIndex({"a.b.c": 1}));
-
- // Test that a $match can result in index usage by moving past a rename of a field inside
- // two-levels of arrays. The rename is expressed using nested $map inside a $project.
- pipeline = [
+"use strict";
+
+load("jstests/libs/analyze_plan.js");
+
+let coll = db.match_swapping_renamed_fields;
+coll.drop();
+
+assert.writeOK(coll.insert([{a: 1, b: 1, c: 1}, {a: 2, b: 2, c: 2}, {a: 3, b: 3, c: 3}]));
+assert.commandWorked(coll.createIndex({a: 1}));
+
+// Test that a $match can result in index usage after moving past a field renamed by $project.
+let pipeline = [{$project: {_id: 0, z: "$a", c: 1}}, {$match: {z: {$gt: 1}}}];
+assert.eq(2, coll.aggregate(pipeline).itcount());
+let explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
+
+// Test that a $match can result in index usage after moving past a field renamed by $addFields.
+pipeline = [{$addFields: {z: "$a"}}, {$match: {z: {$gt: 1}}}];
+assert.eq(2, coll.aggregate(pipeline).itcount());
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
+
+// Test that a $match with $type can result in index usage after moving past a field renamed by
+// $project.
+pipeline = [{$project: {_id: 0, z: "$a", c: 1}}, {$match: {z: {$type: "number"}}}];
+assert.eq(3, coll.aggregate(pipeline).itcount());
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
+
+// Test that a partially dependent match can split, with a rename applied, resulting in index
+// usage.
+pipeline = [{$project: {z: "$a", zz: {$sum: ["$a", "$b"]}}}, {$match: {z: {$gt: 1}, zz: {$lt: 5}}}];
+assert.eq(1, coll.aggregate(pipeline).itcount());
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
+
+// Test that a match can swap past several renames, resulting in index usage.
+pipeline = [
+ {$project: {d: "$a"}},
+ {$addFields: {e: "$$CURRENT.d"}},
+ {$project: {f: "$$ROOT.e"}},
+ {$match: {f: {$gt: 1}}}
+];
+assert.eq(2, coll.aggregate(pipeline).itcount());
+explain = coll.explain().aggregate(pipeline);
+assert.neq(null, getAggPlanStage(explain, "IXSCAN"), tojson(explain));
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0, a: [{b: 1, c: 1}, {b: 2, c: 2}]}));
+assert.writeOK(coll.insert({_id: 1, a: [{b: 3, c: 3}, {b: 4, c: 4}]}));
+assert.commandWorked(coll.createIndex({"a.b": 1, "a.c": 1}));
+
+// Test that a $match can result in index usage after moving past a dotted array path renamed by
+// a $map inside a $project.
+pipeline = [
+ {$project: {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b", f: "$$iter.c"}}}}},
+ {$match: {"d.e": 1, "d.f": 2}}
+];
+assert.eq([{_id: 0, d: [{e: 1, f: 1}, {e: 2, f: 2}]}], coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+let ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b": 1, "a.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+// Test that a $match can result in index usage after moving past a dotted array path renamed by
+// a $map inside an $addFields. This time the match expression is partially dependent and should
+// get split.
+pipeline = [
+ {$addFields: {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b", f: "$$iter.c"}}}, g: 2}},
+ {$match: {"d.e": 1, g: 2}}
+];
+assert.eq([{_id: 0, a: [{b: 1, c: 1}, {b: 2, c: 2}], d: [{e: 1, f: 1}, {e: 2, f: 2}], g: 2}],
+ coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b": 1, "a.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+// Test that match swapping behaves correctly when a $map contains a rename but also computes a
+// new field.
+pipeline = [
+ {$addFields: {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b", f: {$literal: 99}}}}}},
+ {$match: {"d.e": 1, "d.f": 99}}
+];
+assert.eq([{_id: 0, a: [{b: 1, c: 1}, {b: 2, c: 2}], d: [{e: 1, f: 99}, {e: 2, f: 99}]}],
+ coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b": 1, "a.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+coll.drop();
+assert.writeOK(coll.insert({_id: 0, a: [{b: [{c: 1}, {c: 2}]}, {b: [{c: 3}, {c: 4}]}]}));
+assert.writeOK(coll.insert({_id: 1, a: [{b: [{c: 5}, {c: 6}]}, {b: [{c: 7}, {c: 8}]}]}));
+assert.commandWorked(coll.createIndex({"a.b.c": 1}));
+
+// Test that a $match can result in index usage by moving past a rename of a field inside
+// two-levels of arrays. The rename is expressed using nested $map inside a $project.
+pipeline = [
{
$project: {
d: {
@@ -131,16 +124,16 @@
},
{$match: {"d.e.f": 7}}
];
- assert.eq([{_id: 1, d: [{e: [{f: 5}, {f: 6}]}, {e: [{f: 7}, {f: 8}]}]}],
- coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- // Test that a $match can result in index usage by moving past a rename of a field inside
- // two-levels of arrays. The rename is expressed using nested $map inside an $addFields.
- pipeline = [
+assert.eq([{_id: 1, d: [{e: [{f: 5}, {f: 6}]}, {e: [{f: 7}, {f: 8}]}]}],
+ coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+// Test that a $match can result in index usage by moving past a rename of a field inside
+// two-levels of arrays. The rename is expressed using nested $map inside an $addFields.
+pipeline = [
{
$addFields: {
d: {
@@ -162,53 +155,50 @@
},
{$match: {"d.b.c": 7}}
];
- assert.eq([{
- _id: 1,
- a: [{b: [{c: 5}, {c: 6}]}, {b: [{c: 7}, {c: 8}]}],
- d: [{b: [{c: 5}, {c: 6}]}, {b: [{c: 7}, {c: 8}]}]
- }],
- coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- // Test that we correctly match on the subfield of a renamed field. Here, a match on "x.b.c"
- // follows an "a" to "x" rename. When we move the match stage in front of the rename, the match
- // should also get rewritten to use "a.b.c" as its filter.
- pipeline = [{$project: {x: "$a"}}, {$match: {"x.b.c": 1}}];
- assert.eq([{_id: 0, x: [{b: [{c: 1}, {c: 2}]}, {b: [{c: 3}, {c: 4}]}]}],
- coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- // Test that we correctly match on the subfield of a renamed field when the rename results from
- // a $map operation. Here, a match on "d.e.c" follows an "a.b" to "d.e" rename. When we move the
- // match stage in front of the renaming $map operation, the match should also get rewritten to
- // use "a.b.c" as its filter.
- pipeline = [
- {$project: {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b"}}}}},
- {$match: {"d.e.c": 7}}
- ];
- assert.eq([{_id: 1, d: [{e: [{c: 5}, {c: 6}]}, {e: [{c: 7}, {c: 8}]}]}],
- coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
-
- // Test multiple renames. Designed to reproduce SERVER-32690.
- pipeline = [
- {$_internalInhibitOptimization: {}},
- {$project: {x: "$x", y: "$x"}},
- {$match: {y: 1, w: 1}}
- ];
- assert.eq([], coll.aggregate(pipeline).toArray());
- explain = coll.explain().aggregate(pipeline);
- // We expect that the $match stage has been split into two, since one predicate has an
- // applicable rename that allows swapping, while the other does not.
- let matchStages = getAggPlanStages(explain, "$match");
- assert.eq(2, matchStages.length);
+assert.eq([{
+ _id: 1,
+ a: [{b: [{c: 5}, {c: 6}]}, {b: [{c: 7}, {c: 8}]}],
+ d: [{b: [{c: 5}, {c: 6}]}, {b: [{c: 7}, {c: 8}]}]
+ }],
+ coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+// Test that we correctly match on the subfield of a renamed field. Here, a match on "x.b.c"
+// follows an "a" to "x" rename. When we move the match stage in front of the rename, the match
+// should also get rewritten to use "a.b.c" as its filter.
+pipeline = [{$project: {x: "$a"}}, {$match: {"x.b.c": 1}}];
+assert.eq([{_id: 0, x: [{b: [{c: 1}, {c: 2}]}, {b: [{c: 3}, {c: 4}]}]}],
+ coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+// Test that we correctly match on the subfield of a renamed field when the rename results from
+// a $map operation. Here, a match on "d.e.c" follows an "a.b" to "d.e" rename. When we move the
+// match stage in front of the renaming $map operation, the match should also get rewritten to
+// use "a.b.c" as its filter.
+pipeline = [
+ {$project: {d: {$map: {input: "$a", as: "iter", in : {e: "$$iter.b"}}}}},
+ {$match: {"d.e.c": 7}}
+];
+assert.eq([{_id: 1, d: [{e: [{c: 5}, {c: 6}]}, {e: [{c: 7}, {c: 8}]}]}],
+ coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+ixscan = getAggPlanStage(explain, "IXSCAN");
+assert.neq(null, ixscan, tojson(explain));
+assert.eq({"a.b.c": 1}, ixscan.keyPattern, tojson(ixscan));
+
+// Test multiple renames. Designed to reproduce SERVER-32690.
+pipeline =
+ [{$_internalInhibitOptimization: {}}, {$project: {x: "$x", y: "$x"}}, {$match: {y: 1, w: 1}}];
+assert.eq([], coll.aggregate(pipeline).toArray());
+explain = coll.explain().aggregate(pipeline);
+// We expect that the $match stage has been split into two, since one predicate has an
+// applicable rename that allows swapping, while the other does not.
+let matchStages = getAggPlanStages(explain, "$match");
+assert.eq(2, matchStages.length);
}());
diff --git a/jstests/aggregation/mongos_merge.js b/jstests/aggregation/mongos_merge.js
index 67a6d433312..f6bbeea0122 100644
--- a/jstests/aggregation/mongos_merge.js
+++ b/jstests/aggregation/mongos_merge.js
@@ -18,226 +18,221 @@
*/
(function() {
- load("jstests/libs/profiler.js"); // For profilerHas*OrThrow helper functions.
- load('jstests/libs/geo_near_random.js'); // For GeoNearRandomTest.
- load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
- load("jstests/libs/discover_topology.js"); // For findDataBearingNodes.
-
- const st = new ShardingTest({shards: 2, mongos: 1, config: 1});
-
- const mongosDB = st.s0.getDB(jsTestName());
- const mongosColl = mongosDB[jsTestName()];
- const unshardedColl = mongosDB[jsTestName() + "_unsharded"];
-
- const shard0DB = primaryShardDB = st.shard0.getDB(jsTestName());
- const shard1DB = st.shard1.getDB(jsTestName());
-
- assert.commandWorked(mongosDB.dropDatabase());
-
- // Always merge pipelines which cannot merge on mongoS on the primary shard instead, so we know
- // where to check for $mergeCursors.
- assert.commandWorked(
- mongosDB.adminCommand({setParameter: 1, internalQueryAlwaysMergeOnPrimaryShard: true}));
-
- // Enable sharding on the test DB and ensure its primary is shard0.
- assert.commandWorked(mongosDB.adminCommand({enableSharding: mongosDB.getName()}));
- st.ensurePrimaryShard(mongosDB.getName(), st.shard0.shardName);
-
- // Shard the test collection on _id.
- assert.commandWorked(
- mongosDB.adminCommand({shardCollection: mongosColl.getFullName(), key: {_id: 1}}));
-
- // We will need to test $geoNear on this collection, so create a 2dsphere index.
- assert.commandWorked(mongosColl.createIndex({geo: "2dsphere"}));
-
- // We will test that $textScore metadata is not propagated to the user, so create a text index.
- assert.commandWorked(mongosColl.createIndex({text: "text"}));
-
- // Split the collection into 4 chunks: [MinKey, -100), [-100, 0), [0, 100), [100, MaxKey).
- assert.commandWorked(
- mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: -100}}));
- assert.commandWorked(
- mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 0}}));
- assert.commandWorked(
- mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 100}}));
-
- // Move the [0, 100) and [100, MaxKey) chunks to shard1.
- assert.commandWorked(mongosDB.adminCommand(
- {moveChunk: mongosColl.getFullName(), find: {_id: 50}, to: st.shard1.shardName}));
- assert.commandWorked(mongosDB.adminCommand(
- {moveChunk: mongosColl.getFullName(), find: {_id: 150}, to: st.shard1.shardName}));
-
- // Create a random geo co-ord generator for testing.
- var georng = new GeoNearRandomTest(mongosColl);
-
- // Write 400 documents across the 4 chunks.
- for (let i = -200; i < 200; i++) {
- assert.writeOK(mongosColl.insert(
- {_id: i, a: [i], b: {redactThisDoc: true}, c: true, geo: georng.mkPt(), text: "txt"}));
- assert.writeOK(unshardedColl.insert({_id: i, x: i}));
- }
+load("jstests/libs/profiler.js"); // For profilerHas*OrThrow helper functions.
+load('jstests/libs/geo_near_random.js'); // For GeoNearRandomTest.
+load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
+load("jstests/libs/discover_topology.js"); // For findDataBearingNodes.
- let testNameHistory = new Set();
+const st = new ShardingTest({shards: 2, mongos: 1, config: 1});
- // Clears system.profile and restarts the profiler on the primary shard. We enable profiling to
- // verify that no $mergeCursors occur during tests where we expect the merge to run on mongoS.
- function startProfiling() {
- assert.commandWorked(primaryShardDB.setProfilingLevel(0));
- primaryShardDB.system.profile.drop();
- assert.commandWorked(primaryShardDB.setProfilingLevel(2));
- }
+const mongosDB = st.s0.getDB(jsTestName());
+const mongosColl = mongosDB[jsTestName()];
+const unshardedColl = mongosDB[jsTestName() + "_unsharded"];
- /**
- * Runs the aggregation specified by 'pipeline', verifying that:
- * - The number of documents returned by the aggregation matches 'expectedCount'.
- * - The merge was performed on a mongoS if 'mergeType' is 'mongos', and on a shard otherwise.
- */
- function assertMergeBehaviour(
- {testName, pipeline, mergeType, batchSize, allowDiskUse, expectedCount}) {
- // Ensure that this test has a unique name.
- assert(!testNameHistory.has(testName));
- testNameHistory.add(testName);
-
- // Create the aggregation options from the given arguments.
- const opts = {
- comment: testName,
- cursor: (batchSize ? {batchSize: batchSize} : {}),
- };
-
- if (allowDiskUse !== undefined) {
- opts.allowDiskUse = allowDiskUse;
- }
+const shard0DB = primaryShardDB = st.shard0.getDB(jsTestName());
+const shard1DB = st.shard1.getDB(jsTestName());
- // Verify that the explain() output's 'mergeType' field matches our expectation.
- assert.eq(
- assert.commandWorked(mongosColl.explain().aggregate(pipeline, Object.extend({}, opts)))
- .mergeType,
- mergeType);
-
- // Verify that the aggregation returns the expected number of results.
- assert.eq(mongosColl.aggregate(pipeline, opts).itcount(), expectedCount);
-
- // Verify that a $mergeCursors aggregation ran on the primary shard if 'mergeType' is not
- // 'mongos', and that no such aggregation ran otherwise.
- profilerHasNumMatchingEntriesOrThrow({
- profileDB: primaryShardDB,
- numExpectedMatches: (mergeType === "mongos" ? 0 : 1),
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: 1}
- }
- });
- }
+assert.commandWorked(mongosDB.dropDatabase());
- /**
- * Throws an assertion if the aggregation specified by 'pipeline' does not produce
- * 'expectedCount' results, or if the merge phase is not performed on the mongoS.
- */
- function assertMergeOnMongoS({testName, pipeline, batchSize, allowDiskUse, expectedCount}) {
- assertMergeBehaviour({
- testName: testName,
- pipeline: pipeline,
- mergeType: "mongos",
- batchSize: (batchSize || 10),
- allowDiskUse: allowDiskUse,
- expectedCount: expectedCount
- });
- }
+// Always merge pipelines which cannot merge on mongoS on the primary shard instead, so we know
+// where to check for $mergeCursors.
+assert.commandWorked(
+ mongosDB.adminCommand({setParameter: 1, internalQueryAlwaysMergeOnPrimaryShard: true}));
- /**
- * Throws an assertion if the aggregation specified by 'pipeline' does not produce
- * 'expectedCount' results, or if the merge phase was not performed on a shard.
- */
- function assertMergeOnMongoD(
- {testName, pipeline, mergeType, batchSize, allowDiskUse, expectedCount}) {
- assertMergeBehaviour({
- testName: testName,
- pipeline: pipeline,
- mergeType: (mergeType || "anyShard"),
- batchSize: (batchSize || 10),
- allowDiskUse: allowDiskUse,
- expectedCount: expectedCount
- });
+// Enable sharding on the test DB and ensure its primary is shard0.
+assert.commandWorked(mongosDB.adminCommand({enableSharding: mongosDB.getName()}));
+st.ensurePrimaryShard(mongosDB.getName(), st.shard0.shardName);
+
+// Shard the test collection on _id.
+assert.commandWorked(
+ mongosDB.adminCommand({shardCollection: mongosColl.getFullName(), key: {_id: 1}}));
+
+// We will need to test $geoNear on this collection, so create a 2dsphere index.
+assert.commandWorked(mongosColl.createIndex({geo: "2dsphere"}));
+
+// We will test that $textScore metadata is not propagated to the user, so create a text index.
+assert.commandWorked(mongosColl.createIndex({text: "text"}));
+
+// Split the collection into 4 chunks: [MinKey, -100), [-100, 0), [0, 100), [100, MaxKey).
+assert.commandWorked(mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: -100}}));
+assert.commandWorked(mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 0}}));
+assert.commandWorked(mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 100}}));
+
+// Move the [0, 100) and [100, MaxKey) chunks to shard1.
+assert.commandWorked(mongosDB.adminCommand(
+ {moveChunk: mongosColl.getFullName(), find: {_id: 50}, to: st.shard1.shardName}));
+assert.commandWorked(mongosDB.adminCommand(
+ {moveChunk: mongosColl.getFullName(), find: {_id: 150}, to: st.shard1.shardName}));
+
+// Create a random geo co-ord generator for testing.
+var georng = new GeoNearRandomTest(mongosColl);
+
+// Write 400 documents across the 4 chunks.
+for (let i = -200; i < 200; i++) {
+ assert.writeOK(mongosColl.insert(
+ {_id: i, a: [i], b: {redactThisDoc: true}, c: true, geo: georng.mkPt(), text: "txt"}));
+ assert.writeOK(unshardedColl.insert({_id: i, x: i}));
+}
+
+let testNameHistory = new Set();
+
+// Clears system.profile and restarts the profiler on the primary shard. We enable profiling to
+// verify that no $mergeCursors occur during tests where we expect the merge to run on mongoS.
+function startProfiling() {
+ assert.commandWorked(primaryShardDB.setProfilingLevel(0));
+ primaryShardDB.system.profile.drop();
+ assert.commandWorked(primaryShardDB.setProfilingLevel(2));
+}
+
+/**
+ * Runs the aggregation specified by 'pipeline', verifying that:
+ * - The number of documents returned by the aggregation matches 'expectedCount'.
+ * - The merge was performed on a mongoS if 'mergeType' is 'mongos', and on a shard otherwise.
+ */
+function assertMergeBehaviour(
+ {testName, pipeline, mergeType, batchSize, allowDiskUse, expectedCount}) {
+ // Ensure that this test has a unique name.
+ assert(!testNameHistory.has(testName));
+ testNameHistory.add(testName);
+
+ // Create the aggregation options from the given arguments.
+ const opts = {
+ comment: testName,
+ cursor: (batchSize ? {batchSize: batchSize} : {}),
+ };
+
+ if (allowDiskUse !== undefined) {
+ opts.allowDiskUse = allowDiskUse;
}
- /**
- * Runs a series of test cases which will consistently merge on mongoS or mongoD regardless of
- * whether 'allowDiskUse' is true, false or omitted.
- */
- function runTestCasesWhoseMergeLocationIsConsistentRegardlessOfAllowDiskUse(allowDiskUse) {
- // Test that a $match pipeline with an empty merge stage is merged on mongoS.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_match_only",
- pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}],
- allowDiskUse: allowDiskUse,
- expectedCount: 400
- });
+ // Verify that the explain() output's 'mergeType' field matches our expectation.
+ assert.eq(
+ assert.commandWorked(mongosColl.explain().aggregate(pipeline, Object.extend({}, opts)))
+ .mergeType,
+ mergeType);
+
+ // Verify that the aggregation returns the expected number of results.
+ assert.eq(mongosColl.aggregate(pipeline, opts).itcount(), expectedCount);
+
+ // Verify that a $mergeCursors aggregation ran on the primary shard if 'mergeType' is not
+ // 'mongos', and that no such aggregation ran otherwise.
+ profilerHasNumMatchingEntriesOrThrow({
+ profileDB: primaryShardDB,
+ numExpectedMatches: (mergeType === "mongos" ? 0 : 1),
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: 1}
+ }
+ });
+}
- // Test that a $sort stage which merges pre-sorted streams is run on mongoS.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_sort_presorted",
- pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sort: {_id: -1}}],
- allowDiskUse: allowDiskUse,
- expectedCount: 400
- });
+/**
+ * Throws an assertion if the aggregation specified by 'pipeline' does not produce
+ * 'expectedCount' results, or if the merge phase is not performed on the mongoS.
+ */
+function assertMergeOnMongoS({testName, pipeline, batchSize, allowDiskUse, expectedCount}) {
+ assertMergeBehaviour({
+ testName: testName,
+ pipeline: pipeline,
+ mergeType: "mongos",
+ batchSize: (batchSize || 10),
+ allowDiskUse: allowDiskUse,
+ expectedCount: expectedCount
+ });
+}
- // Test that $skip is merged on mongoS.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_skip",
- pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sort: {_id: -1}}, {$skip: 300}],
- allowDiskUse: allowDiskUse,
- expectedCount: 100
- });
+/**
+ * Throws an assertion if the aggregation specified by 'pipeline' does not produce
+ * 'expectedCount' results, or if the merge phase was not performed on a shard.
+ */
+function assertMergeOnMongoD(
+ {testName, pipeline, mergeType, batchSize, allowDiskUse, expectedCount}) {
+ assertMergeBehaviour({
+ testName: testName,
+ pipeline: pipeline,
+ mergeType: (mergeType || "anyShard"),
+ batchSize: (batchSize || 10),
+ allowDiskUse: allowDiskUse,
+ expectedCount: expectedCount
+ });
+}
- // Test that $limit is merged on mongoS.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_limit",
- pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$limit: 300}],
- allowDiskUse: allowDiskUse,
- expectedCount: 300
- });
+/**
+ * Runs a series of test cases which will consistently merge on mongoS or mongoD regardless of
+ * whether 'allowDiskUse' is true, false or omitted.
+ */
+function runTestCasesWhoseMergeLocationIsConsistentRegardlessOfAllowDiskUse(allowDiskUse) {
+ // Test that a $match pipeline with an empty merge stage is merged on mongoS.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_match_only",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 400
+ });
- // Test that $sample is merged on mongoS if it is the splitpoint, since this will result in
- // a merging $sort of presorted streams in the merge pipeline.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_sample_splitpoint",
- pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sample: {size: 300}}],
- allowDiskUse: allowDiskUse,
- expectedCount: 300
- });
+ // Test that a $sort stage which merges pre-sorted streams is run on mongoS.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_sort_presorted",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sort: {_id: -1}}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 400
+ });
- // Test that $geoNear is merged on mongoS.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_geo_near",
- pipeline: [
- {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
- {$limit: 300}
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 300
- });
+ // Test that $skip is merged on mongoS.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_skip",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sort: {_id: -1}}, {$skip: 300}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 100
+ });
- // Test that $facet is merged on mongoS if all pipelines are mongoS-mergeable regardless of
- // 'allowDiskUse'.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_facet_all_pipes_eligible_for_mongos",
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {
- $facet: {
- pipe1: [{$match: {_id: {$gt: 0}}}, {$skip: 10}, {$limit: 150}],
- pipe2: [{$match: {_id: {$lt: 0}}}, {$project: {_id: 0, a: 1}}]
- }
+ // Test that $limit is merged on mongoS.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_limit",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$limit: 300}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 300
+ });
+
+ // Test that $sample is merged on mongoS if it is the splitpoint, since this will result in
+ // a merging $sort of presorted streams in the merge pipeline.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_sample_splitpoint",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sample: {size: 300}}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 300
+ });
+
+ // Test that $geoNear is merged on mongoS.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_geo_near",
+ pipeline:
+ [{$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}}, {$limit: 300}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 300
+ });
+
+ // Test that $facet is merged on mongoS if all pipelines are mongoS-mergeable regardless of
+ // 'allowDiskUse'.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_facet_all_pipes_eligible_for_mongos",
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {
+ $facet: {
+ pipe1: [{$match: {_id: {$gt: 0}}}, {$skip: 10}, {$limit: 150}],
+ pipe2: [{$match: {_id: {$lt: 0}}}, {$project: {_id: 0, a: 1}}]
}
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 1
- });
+ }
+ ],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 1
+ });
- // Test that $facet is merged on mongoD if any pipeline requires a primary shard merge,
- // regardless of 'allowDiskUse'.
- assertMergeOnMongoD({
+ // Test that $facet is merged on mongoD if any pipeline requires a primary shard merge,
+ // regardless of 'allowDiskUse'.
+ assertMergeOnMongoD({
testName: "agg_mongos_merge_facet_pipe_needs_primary_shard_disk_use_" + allowDiskUse,
pipeline: [
{$match: {_id: {$gte: -200, $lte: 200}}},
@@ -263,43 +258,43 @@
expectedCount: 1
});
- // Test that a pipeline whose merging half can be run on mongos using only the mongos
- // execution machinery returns the correct results.
- // TODO SERVER-30882 Find a way to assert that all stages get absorbed by mongos.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_all_mongos_runnable_skip_and_limit_stages",
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {$sort: {_id: -1}},
- {$skip: 150},
- {$limit: 150},
- {$skip: 5},
- {$limit: 1},
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 1
- });
+ // Test that a pipeline whose merging half can be run on mongos using only the mongos
+ // execution machinery returns the correct results.
+ // TODO SERVER-30882 Find a way to assert that all stages get absorbed by mongos.
+ assertMergeOnMongoS({
+ testName: "agg_mongos_merge_all_mongos_runnable_skip_and_limit_stages",
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {$sort: {_id: -1}},
+ {$skip: 150},
+ {$limit: 150},
+ {$skip: 5},
+ {$limit: 1},
+ ],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 1
+ });
- // Test that a merge pipeline which needs to run on a shard is NOT merged on mongoS
- // regardless of 'allowDiskUse'.
- assertMergeOnMongoD({
- testName: "agg_mongos_merge_primary_shard_disk_use_" + allowDiskUse,
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {$_internalSplitPipeline: {mergeType: "anyShard"}}
- ],
- mergeType: "anyShard",
- allowDiskUse: allowDiskUse,
- expectedCount: 400
- });
+ // Test that a merge pipeline which needs to run on a shard is NOT merged on mongoS
+ // regardless of 'allowDiskUse'.
+ assertMergeOnMongoD({
+ testName: "agg_mongos_merge_primary_shard_disk_use_" + allowDiskUse,
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {$_internalSplitPipeline: {mergeType: "anyShard"}}
+ ],
+ mergeType: "anyShard",
+ allowDiskUse: allowDiskUse,
+ expectedCount: 400
+ });
- // Allow sharded $lookup.
- setParameterOnAllHosts(
- DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", true);
+ // Allow sharded $lookup.
+ setParameterOnAllHosts(
+ DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", true);
- // Test that $lookup is merged on the primary shard when the foreign collection is
- // unsharded.
- assertMergeOnMongoD({
+ // Test that $lookup is merged on the primary shard when the foreign collection is
+ // unsharded.
+ assertMergeOnMongoD({
testName: "agg_mongos_merge_lookup_unsharded_disk_use_" + allowDiskUse,
pipeline: [
{$match: {_id: {$gte: -200, $lte: 200}}},
@@ -317,8 +312,8 @@
expectedCount: 400
});
- // Test that $lookup is merged on mongoS when the foreign collection is sharded.
- assertMergeOnMongoS({
+ // Test that $lookup is merged on mongoS when the foreign collection is sharded.
+ assertMergeOnMongoS({
testName: "agg_mongos_merge_lookup_sharded_disk_use_" + allowDiskUse,
pipeline: [
{$match: {_id: {$gte: -200, $lte: 200}}},
@@ -336,192 +331,180 @@
expectedCount: 400
});
- // Disable sharded $lookup.
- setParameterOnAllHosts(
- DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", false);
- }
+ // Disable sharded $lookup.
+ setParameterOnAllHosts(
+ DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", false);
+}
- /**
- * Runs a series of test cases which will always merge on mongoD when 'allowDiskUse' is true,
- * and on mongoS when 'allowDiskUse' is false or omitted.
- */
- function runTestCasesWhoseMergeLocationDependsOnAllowDiskUse(allowDiskUse) {
- // All test cases should merge on mongoD if allowDiskUse is true, mongoS otherwise.
- const assertMergeOnMongoX = (allowDiskUse ? assertMergeOnMongoD : assertMergeOnMongoS);
-
- // Test that a blocking $sort is only merged on mongoS if 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_blocking_sort_no_disk_use",
- pipeline:
- [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sort: {_id: -1}}, {$sort: {a: 1}}],
- allowDiskUse: allowDiskUse,
- expectedCount: 400
- });
+/**
+ * Runs a series of test cases which will always merge on mongoD when 'allowDiskUse' is true,
+ * and on mongoS when 'allowDiskUse' is false or omitted.
+ */
+function runTestCasesWhoseMergeLocationDependsOnAllowDiskUse(allowDiskUse) {
+ // All test cases should merge on mongoD if allowDiskUse is true, mongoS otherwise.
+ const assertMergeOnMongoX = (allowDiskUse ? assertMergeOnMongoD : assertMergeOnMongoS);
+
+ // Test that a blocking $sort is only merged on mongoS if 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_blocking_sort_no_disk_use",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sort: {_id: -1}}, {$sort: {a: 1}}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 400
+ });
- // Test that $group is only merged on mongoS if 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_group_allow_disk_use",
- pipeline:
- [{$match: {_id: {$gte: -200, $lte: 200}}}, {$group: {_id: {$mod: ["$_id", 150]}}}],
- allowDiskUse: allowDiskUse,
- expectedCount: 299
- });
+ // Test that $group is only merged on mongoS if 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_group_allow_disk_use",
+ pipeline:
+ [{$match: {_id: {$gte: -200, $lte: 200}}}, {$group: {_id: {$mod: ["$_id", 150]}}}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 299
+ });
- // Test that a blocking $sample is only merged on mongoS if 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_blocking_sample_allow_disk_use",
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {$sample: {size: 300}},
- {$sample: {size: 200}}
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 200
- });
+ // Test that a blocking $sample is only merged on mongoS if 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_blocking_sample_allow_disk_use",
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {$sample: {size: 300}},
+ {$sample: {size: 200}}
+ ],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 200
+ });
- // Test that $facet is only merged on mongoS if all pipelines are mongoS-mergeable when
- // 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_facet_allow_disk_use",
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {
- $facet: {
- pipe1: [{$match: {_id: {$gt: 0}}}, {$skip: 10}, {$limit: 150}],
- pipe2: [{$match: {_id: {$lt: 0}}}, {$sort: {a: -1}}]
- }
+ // Test that $facet is only merged on mongoS if all pipelines are mongoS-mergeable when
+ // 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_facet_allow_disk_use",
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {
+ $facet: {
+ pipe1: [{$match: {_id: {$gt: 0}}}, {$skip: 10}, {$limit: 150}],
+ pipe2: [{$match: {_id: {$lt: 0}}}, {$sort: {a: -1}}]
}
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 1
- });
-
- // Test that $bucketAuto is only merged on mongoS if 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_bucket_auto_allow_disk_use",
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {$bucketAuto: {groupBy: "$_id", buckets: 10}}
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 10
- });
-
- //
- // Test composite stages.
- //
+ }
+ ],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 1
+ });
- // Test that $bucket ($group->$sort) is merged on mongoS iff 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_bucket_allow_disk_use",
- pipeline: [
- {$match: {_id: {$gte: -200, $lte: 200}}},
- {
- $bucket: {
- groupBy: "$_id",
- boundaries: [-200, -150, -100, -50, 0, 50, 100, 150, 200]
- }
- }
- ],
- allowDiskUse: allowDiskUse,
- expectedCount: 8
- });
+ // Test that $bucketAuto is only merged on mongoS if 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_bucket_auto_allow_disk_use",
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {$bucketAuto: {groupBy: "$_id", buckets: 10}}
+ ],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 10
+ });
- // Test that $sortByCount ($group->$sort) is merged on mongoS iff 'allowDiskUse' isn't set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_sort_by_count_allow_disk_use",
- pipeline:
- [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sortByCount: {$mod: ["$_id", 150]}}],
- allowDiskUse: allowDiskUse,
- expectedCount: 299
- });
+ //
+ // Test composite stages.
+ //
- // Test that $count ($group->$project) is merged on mongoS iff 'allowDiskUse' is not set.
- assertMergeOnMongoX({
- testName: "agg_mongos_merge_count_allow_disk_use",
- pipeline: [{$match: {_id: {$gte: -150, $lte: 1500}}}, {$count: "doc_count"}],
- allowDiskUse: allowDiskUse,
- expectedCount: 1
- });
- }
+ // Test that $bucket ($group->$sort) is merged on mongoS iff 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_bucket_allow_disk_use",
+ pipeline: [
+ {$match: {_id: {$gte: -200, $lte: 200}}},
+ {$bucket: {groupBy: "$_id", boundaries: [-200, -150, -100, -50, 0, 50, 100, 150, 200]}}
+ ],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 8
+ });
- // Run all test cases for each potential value of 'allowDiskUse'.
- for (let allowDiskUse of[false, undefined, true]) {
- // Reset the profiler and clear the list of tests that ran on the previous iteration.
- testNameHistory.clear();
- startProfiling();
+ // Test that $sortByCount ($group->$sort) is merged on mongoS iff 'allowDiskUse' isn't set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_sort_by_count_allow_disk_use",
+ pipeline: [{$match: {_id: {$gte: -200, $lte: 200}}}, {$sortByCount: {$mod: ["$_id", 150]}}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 299
+ });
- // Run all test cases.
- runTestCasesWhoseMergeLocationIsConsistentRegardlessOfAllowDiskUse(allowDiskUse);
- runTestCasesWhoseMergeLocationDependsOnAllowDiskUse(allowDiskUse);
- }
+ // Test that $count ($group->$project) is merged on mongoS iff 'allowDiskUse' is not set.
+ assertMergeOnMongoX({
+ testName: "agg_mongos_merge_count_allow_disk_use",
+ pipeline: [{$match: {_id: {$gte: -150, $lte: 1500}}}, {$count: "doc_count"}],
+ allowDiskUse: allowDiskUse,
+ expectedCount: 1
+ });
+}
- // Start a new profiling session before running the final few tests.
+// Run all test cases for each potential value of 'allowDiskUse'.
+for (let allowDiskUse of [false, undefined, true]) {
+ // Reset the profiler and clear the list of tests that ran on the previous iteration.
+ testNameHistory.clear();
startProfiling();
- // Test that merge pipelines containing all mongos-runnable stages produce the expected output.
- assertMergeOnMongoS({
- testName: "agg_mongos_merge_all_mongos_runnable_stages",
- pipeline: [
- {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
- {$sort: {a: 1}},
- {$skip: 150},
- {$limit: 150},
- {$addFields: {d: true}},
- {$unwind: "$a"},
- {$sample: {size: 100}},
- {$project: {c: 0, geo: 0, distance: 0}},
- {$group: {_id: "$_id", doc: {$push: "$$CURRENT"}}},
- {$unwind: "$doc"},
- {$replaceRoot: {newRoot: "$doc"}},
- {$facet: {facetPipe: [{$match: {_id: {$gte: -200, $lte: 200}}}]}},
- {$unwind: "$facetPipe"},
- {$replaceRoot: {newRoot: "$facetPipe"}},
- {
- $redact: {
- $cond:
- {if: {$eq: ["$redactThisDoc", true]}, then: "$$PRUNE", else: "$$DESCEND"}
- }
- },
- {
- $match: {
- _id: {$gte: -50, $lte: 100},
- a: {$type: "number", $gte: -50, $lte: 100},
- b: {$exists: false},
- c: {$exists: false},
- d: true,
- geo: {$exists: false},
- distance: {$exists: false},
- text: "txt"
- }
- }
- ],
- expectedCount: 100
- });
-
- // Test that metadata is not propagated to the user when a pipeline which produces metadata
- // fields merges on mongoS.
- const metaDataTests = [
- {pipeline: [{$sort: {_id: -1}}], verifyNoMetaData: (doc) => assert.isnull(doc.$sortKey)},
+ // Run all test cases.
+ runTestCasesWhoseMergeLocationIsConsistentRegardlessOfAllowDiskUse(allowDiskUse);
+ runTestCasesWhoseMergeLocationDependsOnAllowDiskUse(allowDiskUse);
+}
+
+// Start a new profiling session before running the final few tests.
+startProfiling();
+
+// Test that merge pipelines containing all mongos-runnable stages produce the expected output.
+assertMergeOnMongoS({
+ testName: "agg_mongos_merge_all_mongos_runnable_stages",
+ pipeline: [
+ {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
+ {$sort: {a: 1}},
+ {$skip: 150},
+ {$limit: 150},
+ {$addFields: {d: true}},
+ {$unwind: "$a"},
+ {$sample: {size: 100}},
+ {$project: {c: 0, geo: 0, distance: 0}},
+ {$group: {_id: "$_id", doc: {$push: "$$CURRENT"}}},
+ {$unwind: "$doc"},
+ {$replaceRoot: {newRoot: "$doc"}},
+ {$facet: {facetPipe: [{$match: {_id: {$gte: -200, $lte: 200}}}]}},
+ {$unwind: "$facetPipe"},
+ {$replaceRoot: {newRoot: "$facetPipe"}},
{
- pipeline: [{$match: {$text: {$search: "txt"}}}],
- verifyNoMetaData: (doc) => assert.isnull(doc.$textScore)
+ $redact:
+ {$cond: {if: {$eq: ["$redactThisDoc", true]}, then: "$$PRUNE", else: "$$DESCEND"}}
},
{
- pipeline: [{$sample: {size: 300}}],
- verifyNoMetaData: (doc) => assert.isnull(doc.$randVal)
- },
- {
- pipeline: [{$match: {$text: {$search: "txt"}}}, {$sort: {text: 1}}],
- verifyNoMetaData:
- (doc) => assert.docEq([doc.$textScore, doc.$sortKey], [undefined, undefined])
+ $match: {
+ _id: {$gte: -50, $lte: 100},
+ a: {$type: "number", $gte: -50, $lte: 100},
+ b: {$exists: false},
+ c: {$exists: false},
+ d: true,
+ geo: {$exists: false},
+ distance: {$exists: false},
+ text: "txt"
+ }
}
- ];
-
- for (let metaDataTest of metaDataTests) {
- assert.gte(mongosColl.aggregate(metaDataTest.pipeline).itcount(), 300);
- mongosColl.aggregate(metaDataTest.pipeline).forEach(metaDataTest.verifyNoMetaData);
+ ],
+ expectedCount: 100
+});
+
+// Test that metadata is not propagated to the user when a pipeline which produces metadata
+// fields merges on mongoS.
+const metaDataTests = [
+ {pipeline: [{$sort: {_id: -1}}], verifyNoMetaData: (doc) => assert.isnull(doc.$sortKey)},
+ {
+ pipeline: [{$match: {$text: {$search: "txt"}}}],
+ verifyNoMetaData: (doc) => assert.isnull(doc.$textScore)
+ },
+ {pipeline: [{$sample: {size: 300}}], verifyNoMetaData: (doc) => assert.isnull(doc.$randVal)},
+ {
+ pipeline: [{$match: {$text: {$search: "txt"}}}, {$sort: {text: 1}}],
+ verifyNoMetaData: (doc) =>
+ assert.docEq([doc.$textScore, doc.$sortKey], [undefined, undefined])
}
+];
+
+for (let metaDataTest of metaDataTests) {
+ assert.gte(mongosColl.aggregate(metaDataTest.pipeline).itcount(), 300);
+ mongosColl.aggregate(metaDataTest.pipeline).forEach(metaDataTest.verifyNoMetaData);
+}
- st.stop();
+st.stop();
})();
diff --git a/jstests/aggregation/mongos_slaveok.js b/jstests/aggregation/mongos_slaveok.js
index 24346e407f0..a0ccf2d1100 100644
--- a/jstests/aggregation/mongos_slaveok.js
+++ b/jstests/aggregation/mongos_slaveok.js
@@ -8,43 +8,41 @@
* ]
*/
(function() {
- load('jstests/replsets/rslib.js');
+load('jstests/replsets/rslib.js');
- var NODES = 2;
+var NODES = 2;
- var doTest = function(st, doSharded) {
- var testDB = st.s.getDB('test');
+var doTest = function(st, doSharded) {
+ var testDB = st.s.getDB('test');
- if (doSharded) {
- testDB.adminCommand({enableSharding: 'test'});
- testDB.adminCommand({shardCollection: 'test.user', key: {x: 1}});
- }
+ if (doSharded) {
+ testDB.adminCommand({enableSharding: 'test'});
+ testDB.adminCommand({shardCollection: 'test.user', key: {x: 1}});
+ }
- testDB.user.insert({x: 10}, {writeConcern: {w: NODES}});
- testDB.setSlaveOk(true);
+ testDB.user.insert({x: 10}, {writeConcern: {w: NODES}});
+ testDB.setSlaveOk(true);
- var secNode = st.rs0.getSecondary();
- secNode.getDB('test').setProfilingLevel(2);
+ var secNode = st.rs0.getSecondary();
+ secNode.getDB('test').setProfilingLevel(2);
- // wait for mongos to recognize that the slave is up
- awaitRSClientHosts(st.s, secNode, {ok: true});
+ // wait for mongos to recognize that the slave is up
+ awaitRSClientHosts(st.s, secNode, {ok: true});
- var res =
- testDB.runCommand({aggregate: 'user', pipeline: [{$project: {x: 1}}], cursor: {}});
- assert(res.ok, 'aggregate command failed: ' + tojson(res));
+ var res = testDB.runCommand({aggregate: 'user', pipeline: [{$project: {x: 1}}], cursor: {}});
+ assert(res.ok, 'aggregate command failed: ' + tojson(res));
- var profileQuery = {op: 'command', ns: 'test.user', 'command.aggregate': 'user'};
- var profileDoc = secNode.getDB('test').system.profile.findOne(profileQuery);
+ var profileQuery = {op: 'command', ns: 'test.user', 'command.aggregate': 'user'};
+ var profileDoc = secNode.getDB('test').system.profile.findOne(profileQuery);
- assert(profileDoc != null);
- testDB.dropDatabase();
- };
+ assert(profileDoc != null);
+ testDB.dropDatabase();
+};
- var st = new ShardingTest({shards: {rs0: {oplogSize: 10, nodes: NODES}}});
+var st = new ShardingTest({shards: {rs0: {oplogSize: 10, nodes: NODES}}});
- doTest(st, false);
- doTest(st, true);
-
- st.stop();
+doTest(st, false);
+doTest(st, true);
+st.stop();
})();
diff --git a/jstests/aggregation/optimize_away_pipeline.js b/jstests/aggregation/optimize_away_pipeline.js
index 191a98023d4..8ca82dd3ed6 100644
--- a/jstests/aggregation/optimize_away_pipeline.js
+++ b/jstests/aggregation/optimize_away_pipeline.js
@@ -9,330 +9,322 @@
// sharded collections.
// @tags: [do_not_wrap_aggregations_in_facets, assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For 'orderedArrayEq' and 'arrayEq'.
- load("jstests/concurrency/fsm_workload_helpers/server_types.js"); // For isWiredTiger.
- load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
- load("jstests/libs/fixture_helpers.js"); // For 'isMongos' and 'isSharded'.
+load("jstests/aggregation/extras/utils.js"); // For 'orderedArrayEq' and 'arrayEq'.
+load("jstests/concurrency/fsm_workload_helpers/server_types.js"); // For isWiredTiger.
+load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
+load("jstests/libs/fixture_helpers.js"); // For 'isMongos' and 'isSharded'.
- const coll = db.optimize_away_pipeline;
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, x: 10}));
- assert.writeOK(coll.insert({_id: 2, x: 20}));
- assert.writeOK(coll.insert({_id: 3, x: 30}));
+const coll = db.optimize_away_pipeline;
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, x: 10}));
+assert.writeOK(coll.insert({_id: 2, x: 20}));
+assert.writeOK(coll.insert({_id: 3, x: 30}));
- // Asserts that the give pipeline has *not* been optimized away and the request is answered
- // using the aggregation module. There should be pipeline stages present in the explain output.
- // The functions also asserts that a query stage passed in the 'stage' argument is present in
- // the explain output. If 'expectedResult' is provided, the pipeline is executed and the
- // returned result as validated agains the expected result without respecting the order of the
- // documents. If 'preserveResultOrder' is 'true' - the order is respected.
- function assertPipelineUsesAggregation({
- pipeline = [],
- pipelineOptions = {},
- expectedStage = null,
- expectedResult = null,
- preserveResultOrder = false
- } = {}) {
- const explainOutput = coll.explain().aggregate(pipeline, pipelineOptions);
+// Asserts that the give pipeline has *not* been optimized away and the request is answered
+// using the aggregation module. There should be pipeline stages present in the explain output.
+// The functions also asserts that a query stage passed in the 'stage' argument is present in
+// the explain output. If 'expectedResult' is provided, the pipeline is executed and the
+// returned result as validated agains the expected result without respecting the order of the
+// documents. If 'preserveResultOrder' is 'true' - the order is respected.
+function assertPipelineUsesAggregation({
+ pipeline = [],
+ pipelineOptions = {},
+ expectedStage = null,
+ expectedResult = null,
+ preserveResultOrder = false
+} = {}) {
+ const explainOutput = coll.explain().aggregate(pipeline, pipelineOptions);
- assert(isAggregationPlan(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to use an aggregation framework in the explain output: " +
- tojson(explainOutput));
- assert(!isQueryPlan(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to use a query layer at the root level in the explain output: " +
- tojson(explainOutput));
+ assert(isAggregationPlan(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to use an aggregation framework in the explain output: " + tojson(explainOutput));
+ assert(!isQueryPlan(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to use a query layer at the root level in the explain output: " +
+ tojson(explainOutput));
- let cursor = getAggPlanStage(explainOutput, "$cursor");
- if (cursor) {
- cursor = cursor.$cursor;
- } else {
- cursor = getAggPlanStage(explainOutput, "$geoNearCursor").$geoNearCursor;
- }
-
- assert(cursor,
- "Expected pipeline " + tojsononeline(pipeline) + " to include a $cursor " +
- " stage in the explain output: " + tojson(explainOutput));
- assert(cursor.queryPlanner.optimizedPipeline === undefined,
- "Expected pipeline " + tojsononeline(pipeline) + " to *not* include an " +
- "'optimizedPipeline' field in the explain output: " + tojson(explainOutput));
- assert(aggPlanHasStage(explainOutput, expectedStage),
- "Expected pipeline " + tojsononeline(pipeline) + " to include a " + expectedStage +
- " stage in the explain output: " + tojson(explainOutput));
+ let cursor = getAggPlanStage(explainOutput, "$cursor");
+ if (cursor) {
+ cursor = cursor.$cursor;
+ } else {
+ cursor = getAggPlanStage(explainOutput, "$geoNearCursor").$geoNearCursor;
+ }
- if (expectedResult) {
- const actualResult = coll.aggregate(pipeline, pipelineOptions).toArray();
- assert(preserveResultOrder ? orderedArrayEq(actualResult, expectedResult)
- : arrayEq(actualResult, expectedResult));
- }
+ assert(cursor,
+ "Expected pipeline " + tojsononeline(pipeline) + " to include a $cursor " +
+ " stage in the explain output: " + tojson(explainOutput));
+ assert(cursor.queryPlanner.optimizedPipeline === undefined,
+ "Expected pipeline " + tojsononeline(pipeline) + " to *not* include an " +
+ "'optimizedPipeline' field in the explain output: " + tojson(explainOutput));
+ assert(aggPlanHasStage(explainOutput, expectedStage),
+ "Expected pipeline " + tojsononeline(pipeline) + " to include a " + expectedStage +
+ " stage in the explain output: " + tojson(explainOutput));
- return explainOutput;
+ if (expectedResult) {
+ const actualResult = coll.aggregate(pipeline, pipelineOptions).toArray();
+ assert(preserveResultOrder ? orderedArrayEq(actualResult, expectedResult)
+ : arrayEq(actualResult, expectedResult));
}
- // Asserts that the give pipeline has been optimized away and the request is answered using
- // just the query module. There should be no pipeline stages present in the explain output.
- // The functions also asserts that a query stage passed in the 'stage' argument is present in
- // the explain output. If 'expectedResult' is provided, the pipeline is executed and the
- // returned result as validated agains the expected result without respecting the order of the
- // documents. If 'preserveResultOrder' is 'true' - the order is respected.
- function assertPipelineDoesNotUseAggregation({
- pipeline = [],
- pipelineOptions = {},
- expectedStage = null,
- expectedResult = null,
- preserveResultOrder = false
- } = {}) {
- const explainOutput = coll.explain().aggregate(pipeline, pipelineOptions);
-
- assert(!isAggregationPlan(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to use an aggregation framework in the explain output: " +
- tojson(explainOutput));
- assert(isQueryPlan(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to use a query layer at the root level in the explain output: " +
- tojson(explainOutput));
- if (explainOutput.hasOwnProperty("shards")) {
- Object.keys(explainOutput.shards)
- .forEach((shard) => assert(
- explainOutput.shards[shard].queryPlanner.optimizedPipeline === true,
- "Expected pipeline " + tojsononeline(pipeline) + " to include an " +
- "'optimizedPipeline' field in the explain output: " +
- tojson(explainOutput)));
- } else {
- assert(explainOutput.queryPlanner.optimizedPipeline === true,
- "Expected pipeline " + tojsononeline(pipeline) + " to include an " +
- "'optimizedPipeline' field in the explain output: " + tojson(explainOutput));
- }
- assert(planHasStage(db, explainOutput, expectedStage),
- "Expected pipeline " + tojsononeline(pipeline) + " to include a " + expectedStage +
- " stage in the explain output: " + tojson(explainOutput));
+ return explainOutput;
+}
- if (expectedResult) {
- const actualResult = coll.aggregate(pipeline, pipelineOptions).toArray();
- assert(preserveResultOrder ? orderedArrayEq(actualResult, expectedResult)
- : arrayEq(actualResult, expectedResult));
- }
+// Asserts that the give pipeline has been optimized away and the request is answered using
+// just the query module. There should be no pipeline stages present in the explain output.
+// The functions also asserts that a query stage passed in the 'stage' argument is present in
+// the explain output. If 'expectedResult' is provided, the pipeline is executed and the
+// returned result as validated agains the expected result without respecting the order of the
+// documents. If 'preserveResultOrder' is 'true' - the order is respected.
+function assertPipelineDoesNotUseAggregation({
+ pipeline = [],
+ pipelineOptions = {},
+ expectedStage = null,
+ expectedResult = null,
+ preserveResultOrder = false
+} = {}) {
+ const explainOutput = coll.explain().aggregate(pipeline, pipelineOptions);
- return explainOutput;
+ assert(!isAggregationPlan(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to use an aggregation framework in the explain output: " +
+ tojson(explainOutput));
+ assert(isQueryPlan(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to use a query layer at the root level in the explain output: " +
+ tojson(explainOutput));
+ if (explainOutput.hasOwnProperty("shards")) {
+ Object.keys(explainOutput.shards)
+ .forEach((shard) =>
+ assert(explainOutput.shards[shard].queryPlanner.optimizedPipeline === true,
+ "Expected pipeline " + tojsononeline(pipeline) + " to include an " +
+ "'optimizedPipeline' field in the explain output: " +
+ tojson(explainOutput)));
+ } else {
+ assert(explainOutput.queryPlanner.optimizedPipeline === true,
+ "Expected pipeline " + tojsononeline(pipeline) + " to include an " +
+ "'optimizedPipeline' field in the explain output: " + tojson(explainOutput));
}
+ assert(planHasStage(db, explainOutput, expectedStage),
+ "Expected pipeline " + tojsononeline(pipeline) + " to include a " + expectedStage +
+ " stage in the explain output: " + tojson(explainOutput));
- // Test that getMore works with the optimized query.
- function testGetMore({command = null, expectedResult = null} = {}) {
- const documents =
- new DBCommandCursor(db, assert.commandWorked(db.runCommand(command)), 1 /* batchsize */)
- .toArray();
- assert(arrayEq(documents, expectedResult));
+ if (expectedResult) {
+ const actualResult = coll.aggregate(pipeline, pipelineOptions).toArray();
+ assert(preserveResultOrder ? orderedArrayEq(actualResult, expectedResult)
+ : arrayEq(actualResult, expectedResult));
}
- let explainOutput;
+ return explainOutput;
+}
- // Basic pipelines.
+// Test that getMore works with the optimized query.
+function testGetMore({command = null, expectedResult = null} = {}) {
+ const documents =
+ new DBCommandCursor(db, assert.commandWorked(db.runCommand(command)), 1 /* batchsize */)
+ .toArray();
+ assert(arrayEq(documents, expectedResult));
+}
- // Test basic scenarios when a pipeline has a single $cursor stage or can be collapsed into a
- // single cursor stage.
- assertPipelineDoesNotUseAggregation({
- pipeline: [],
- expectedStage: "COLLSCAN",
- expectedResult: [{_id: 1, x: 10}, {_id: 2, x: 20}, {_id: 3, x: 30}]
- });
- assertPipelineDoesNotUseAggregation({
- pipeline: [{$match: {x: 20}}],
- expectedStage: "COLLSCAN",
- expectedResult: [{_id: 2, x: 20}]
- });
+let explainOutput;
- // Pipelines with a collation.
+// Basic pipelines.
- // Test a simple pipeline with a case-insensitive collation.
- assert.writeOK(coll.insert({_id: 4, x: 40, b: "abc"}));
- assertPipelineDoesNotUseAggregation({
- pipeline: [{$match: {b: "ABC"}}],
- pipelineOptions: {collation: {locale: "en_US", strength: 2}},
- expectedStage: "COLLSCAN",
- expectedResult: [{_id: 4, x: 40, b: "abc"}]
- });
- assert.commandWorked(coll.deleteOne({_id: 4}));
+// Test basic scenarios when a pipeline has a single $cursor stage or can be collapsed into a
+// single cursor stage.
+assertPipelineDoesNotUseAggregation({
+ pipeline: [],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{_id: 1, x: 10}, {_id: 2, x: 20}, {_id: 3, x: 30}]
+});
+assertPipelineDoesNotUseAggregation(
+ {pipeline: [{$match: {x: 20}}], expectedStage: "COLLSCAN", expectedResult: [{_id: 2, x: 20}]});
- // Pipelines with covered queries.
+// Pipelines with a collation.
- // We can collapse a covered query into a single $cursor when $project and $sort are present and
- // the latter is near the front of the pipeline. Skip this test in sharded modes as we cannot
- // correctly handle explain output in plan analyzer helper functions.
- assert.commandWorked(coll.createIndex({x: 1}));
- assertPipelineDoesNotUseAggregation({
- pipeline: [{$sort: {x: 1}}, {$project: {x: 1, _id: 0}}],
- expectedStage: "IXSCAN",
- expectedResult: [{x: 10}, {x: 20}, {x: 30}],
- preserveResultOrder: true
- });
- assertPipelineDoesNotUseAggregation({
- pipeline: [{$match: {x: {$gte: 20}}}, {$sort: {x: 1}}, {$project: {x: 1, _id: 0}}],
- expectedStage: "IXSCAN",
- expectedResult: [{x: 20}, {x: 30}],
- preserveResultOrder: true
- });
- // TODO: SERVER-36723 We cannot collapse if there is a $limit stage though.
- assertPipelineUsesAggregation({
- pipeline:
- [{$match: {x: {$gte: 20}}}, {$sort: {x: 1}}, {$limit: 1}, {$project: {x: 1, _id: 0}}],
- expectedStage: "IXSCAN",
- expectedResult: [{x: 20}]
- });
- assert.commandWorked(coll.dropIndexes());
+// Test a simple pipeline with a case-insensitive collation.
+assert.writeOK(coll.insert({_id: 4, x: 40, b: "abc"}));
+assertPipelineDoesNotUseAggregation({
+ pipeline: [{$match: {b: "ABC"}}],
+ pipelineOptions: {collation: {locale: "en_US", strength: 2}},
+ expectedStage: "COLLSCAN",
+ expectedResult: [{_id: 4, x: 40, b: "abc"}]
+});
+assert.commandWorked(coll.deleteOne({_id: 4}));
- // Pipelines which cannot be optimized away.
+// Pipelines with covered queries.
- // TODO SERVER-40254: Uncovered queries.
- assert.writeOK(coll.insert({_id: 4, x: 40, a: {b: "ab1"}}));
- assertPipelineUsesAggregation({
- pipeline: [{$project: {x: 1, _id: 0}}],
- expectedStage: "COLLSCAN",
- expectedResult: [{x: 10}, {x: 20}, {x: 30}, {x: 40}]
- });
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: 20}}, {$project: {x: 1, _id: 0}}],
- expectedStage: "COLLSCAN",
- expectedResult: [{x: 20}]
- });
- assertPipelineUsesAggregation({
- pipeline: [{$project: {x: 1, "a.b": 1, _id: 0}}],
- expectedStage: "COLLSCAN",
- expectedResult: [{x: 10}, {x: 20}, {x: 30}, {x: 40, a: {b: "ab1"}}]
- });
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: 40}}, {$project: {"a.b": 1, _id: 0}}],
- expectedStage: "COLLSCAN",
- expectedResult: [{a: {b: "ab1"}}]
- });
- assert.commandWorked(coll.deleteOne({_id: 4}));
+// We can collapse a covered query into a single $cursor when $project and $sort are present and
+// the latter is near the front of the pipeline. Skip this test in sharded modes as we cannot
+// correctly handle explain output in plan analyzer helper functions.
+assert.commandWorked(coll.createIndex({x: 1}));
+assertPipelineDoesNotUseAggregation({
+ pipeline: [{$sort: {x: 1}}, {$project: {x: 1, _id: 0}}],
+ expectedStage: "IXSCAN",
+ expectedResult: [{x: 10}, {x: 20}, {x: 30}],
+ preserveResultOrder: true
+});
+assertPipelineDoesNotUseAggregation({
+ pipeline: [{$match: {x: {$gte: 20}}}, {$sort: {x: 1}}, {$project: {x: 1, _id: 0}}],
+ expectedStage: "IXSCAN",
+ expectedResult: [{x: 20}, {x: 30}],
+ preserveResultOrder: true
+});
+// TODO: SERVER-36723 We cannot collapse if there is a $limit stage though.
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: {$gte: 20}}}, {$sort: {x: 1}}, {$limit: 1}, {$project: {x: 1, _id: 0}}],
+ expectedStage: "IXSCAN",
+ expectedResult: [{x: 20}]
+});
+assert.commandWorked(coll.dropIndexes());
- // TODO SERVER-36723: $limit stage is not supported yet.
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: 20}}, {$limit: 1}],
- expectedStage: "COLLSCAN",
- expectedResult: [{_id: 2, x: 20}]
- });
- // TODO SERVER-36723: $skip stage is not supported yet.
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: {$gte: 20}}}, {$skip: 1}],
- expectedStage: "COLLSCAN",
- expectedResult: [{_id: 3, x: 30}]
- });
- // We cannot collapse a $project stage if it has a complex pipeline expression.
- assertPipelineUsesAggregation(
- {pipeline: [{$project: {x: {$substr: ["$y", 0, 1]}, _id: 0}}], expectedStage: "COLLSCAN"});
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: 20}}, {$project: {x: {$substr: ["$y", 0, 1]}, _id: 0}}],
- expectedStage: "COLLSCAN"
- });
- // We cannot optimize away a pipeline if there are stages which have no equivalent in the
- // find command.
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: {$gte: 20}}}, {$count: "count"}],
- expectedStage: "COLLSCAN",
- expectedResult: [{count: 2}]
- });
- assertPipelineUsesAggregation({
- pipeline: [{$match: {x: {$gte: 20}}}, {$group: {_id: "null", s: {$sum: "$x"}}}],
- expectedStage: "COLLSCAN",
- expectedResult: [{_id: "null", s: 50}]
- });
- // TODO SERVER-40253: We cannot optimize away text search queries.
- assert.commandWorked(coll.createIndex({y: "text"}));
- assertPipelineUsesAggregation(
- {pipeline: [{$match: {$text: {$search: "abc"}}}], expectedStage: "IXSCAN"});
- assert.commandWorked(coll.dropIndexes());
- // We cannot optimize away geo near queries.
- assert.commandWorked(coll.createIndex({"y": "2d"}));
- assertPipelineUsesAggregation({
- pipeline: [{$geoNear: {near: [0, 0], distanceField: "y", spherical: true}}],
- expectedStage: "GEO_NEAR_2D"
- });
- assert.commandWorked(coll.dropIndexes());
+// Pipelines which cannot be optimized away.
+
+// TODO SERVER-40254: Uncovered queries.
+assert.writeOK(coll.insert({_id: 4, x: 40, a: {b: "ab1"}}));
+assertPipelineUsesAggregation({
+ pipeline: [{$project: {x: 1, _id: 0}}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{x: 10}, {x: 20}, {x: 30}, {x: 40}]
+});
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: 20}}, {$project: {x: 1, _id: 0}}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{x: 20}]
+});
+assertPipelineUsesAggregation({
+ pipeline: [{$project: {x: 1, "a.b": 1, _id: 0}}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{x: 10}, {x: 20}, {x: 30}, {x: 40, a: {b: "ab1"}}]
+});
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: 40}}, {$project: {"a.b": 1, _id: 0}}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{a: {b: "ab1"}}]
+});
+assert.commandWorked(coll.deleteOne({_id: 4}));
+
+// TODO SERVER-36723: $limit stage is not supported yet.
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: 20}}, {$limit: 1}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{_id: 2, x: 20}]
+});
+// TODO SERVER-36723: $skip stage is not supported yet.
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: {$gte: 20}}}, {$skip: 1}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{_id: 3, x: 30}]
+});
+// We cannot collapse a $project stage if it has a complex pipeline expression.
+assertPipelineUsesAggregation(
+ {pipeline: [{$project: {x: {$substr: ["$y", 0, 1]}, _id: 0}}], expectedStage: "COLLSCAN"});
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: 20}}, {$project: {x: {$substr: ["$y", 0, 1]}, _id: 0}}],
+ expectedStage: "COLLSCAN"
+});
+// We cannot optimize away a pipeline if there are stages which have no equivalent in the
+// find command.
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: {$gte: 20}}}, {$count: "count"}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{count: 2}]
+});
+assertPipelineUsesAggregation({
+ pipeline: [{$match: {x: {$gte: 20}}}, {$group: {_id: "null", s: {$sum: "$x"}}}],
+ expectedStage: "COLLSCAN",
+ expectedResult: [{_id: "null", s: 50}]
+});
+// TODO SERVER-40253: We cannot optimize away text search queries.
+assert.commandWorked(coll.createIndex({y: "text"}));
+assertPipelineUsesAggregation(
+ {pipeline: [{$match: {$text: {$search: "abc"}}}], expectedStage: "IXSCAN"});
+assert.commandWorked(coll.dropIndexes());
+// We cannot optimize away geo near queries.
+assert.commandWorked(coll.createIndex({"y": "2d"}));
+assertPipelineUsesAggregation({
+ pipeline: [{$geoNear: {near: [0, 0], distanceField: "y", spherical: true}}],
+ expectedStage: "GEO_NEAR_2D"
+});
+assert.commandWorked(coll.dropIndexes());
- // getMore cases.
+// getMore cases.
- // Test getMore on a collection with an optimized away pipeline.
+// Test getMore on a collection with an optimized away pipeline.
+testGetMore({
+ command: {aggregate: coll.getName(), pipeline: [], cursor: {batchSize: 1}},
+ expectedResult: [{_id: 1, x: 10}, {_id: 2, x: 20}, {_id: 3, x: 30}]
+});
+testGetMore({
+ command:
+ {aggregate: coll.getName(), pipeline: [{$match: {x: {$gte: 20}}}], cursor: {batchSize: 1}},
+ expectedResult: [{_id: 2, x: 20}, {_id: 3, x: 30}]
+});
+testGetMore({
+ command: {
+ aggregate: coll.getName(),
+ pipeline: [{$match: {x: {$gte: 20}}}, {$project: {x: 1, _id: 0}}],
+ cursor: {batchSize: 1}
+ },
+ expectedResult: [{x: 20}, {x: 30}]
+});
+// Test getMore on a view with an optimized away pipeline. Since views cannot be created when
+// imlicit sharded collection mode is on, this test will be run only on a non-sharded
+// collection.
+let view;
+if (!FixtureHelpers.isSharded(coll)) {
+ view = db.optimize_away_pipeline_view;
+ view.drop();
+ assert.commandWorked(db.createView(view.getName(), coll.getName(), []));
testGetMore({
- command: {aggregate: coll.getName(), pipeline: [], cursor: {batchSize: 1}},
+ command: {find: view.getName(), filter: {}, batchSize: 1},
expectedResult: [{_id: 1, x: 10}, {_id: 2, x: 20}, {_id: 3, x: 30}]
});
+}
+// Test getMore puts a correct namespace into profile data for a colletion with optimized away
+// pipeline. Cannot be run on mongos as profiling can be enabled only on mongod. Also profiling
+// is supported on WiredTiger only.
+if (!FixtureHelpers.isMongos(db) && isWiredTiger(db)) {
+ db.system.profile.drop();
+ db.setProfilingLevel(2);
testGetMore({
command: {
aggregate: coll.getName(),
- pipeline: [{$match: {x: {$gte: 20}}}],
- cursor: {batchSize: 1}
+ pipeline: [{$match: {x: 10}}],
+ cursor: {batchSize: 1},
+ comment: 'optimize_away_pipeline'
},
- expectedResult: [{_id: 2, x: 20}, {_id: 3, x: 30}]
+ expectedResult: [{_id: 1, x: 10}]
});
- testGetMore({
- command: {
- aggregate: coll.getName(),
- pipeline: [{$match: {x: {$gte: 20}}}, {$project: {x: 1, _id: 0}}],
- cursor: {batchSize: 1}
- },
- expectedResult: [{x: 20}, {x: 30}]
- });
- // Test getMore on a view with an optimized away pipeline. Since views cannot be created when
- // imlicit sharded collection mode is on, this test will be run only on a non-sharded
- // collection.
- let view;
+ db.setProfilingLevel(0);
+ let profile = db.system.profile.find({}, {op: 1, ns: 1, comment: 'optimize_away_pipeline'})
+ .sort({ts: 1})
+ .toArray();
+ assert(arrayEq(
+ profile,
+ [{op: "command", ns: coll.getFullName()}, {op: "getmore", ns: coll.getFullName()}]));
+ // Test getMore puts a correct namespace into profile data for a view with an optimized away
+ // pipeline.
if (!FixtureHelpers.isSharded(coll)) {
- view = db.optimize_away_pipeline_view;
- view.drop();
- assert.commandWorked(db.createView(view.getName(), coll.getName(), []));
- testGetMore({
- command: {find: view.getName(), filter: {}, batchSize: 1},
- expectedResult: [{_id: 1, x: 10}, {_id: 2, x: 20}, {_id: 3, x: 30}]
- });
- }
- // Test getMore puts a correct namespace into profile data for a colletion with optimized away
- // pipeline. Cannot be run on mongos as profiling can be enabled only on mongod. Also profiling
- // is supported on WiredTiger only.
- if (!FixtureHelpers.isMongos(db) && isWiredTiger(db)) {
db.system.profile.drop();
db.setProfilingLevel(2);
testGetMore({
command: {
- aggregate: coll.getName(),
- pipeline: [{$match: {x: 10}}],
- cursor: {batchSize: 1},
+ find: view.getName(),
+ filter: {x: 10},
+ batchSize: 1,
comment: 'optimize_away_pipeline'
},
expectedResult: [{_id: 1, x: 10}]
});
db.setProfilingLevel(0);
- let profile = db.system.profile.find({}, {op: 1, ns: 1, comment: 'optimize_away_pipeline'})
- .sort({ts: 1})
- .toArray();
+ profile = db.system.profile.find({}, {op: 1, ns: 1, comment: 'optimize_away_pipeline'})
+ .sort({ts: 1})
+ .toArray();
assert(arrayEq(
profile,
- [{op: "command", ns: coll.getFullName()}, {op: "getmore", ns: coll.getFullName()}]));
- // Test getMore puts a correct namespace into profile data for a view with an optimized away
- // pipeline.
- if (!FixtureHelpers.isSharded(coll)) {
- db.system.profile.drop();
- db.setProfilingLevel(2);
- testGetMore({
- command: {
- find: view.getName(),
- filter: {x: 10},
- batchSize: 1,
- comment: 'optimize_away_pipeline'
- },
- expectedResult: [{_id: 1, x: 10}]
- });
- db.setProfilingLevel(0);
- profile = db.system.profile.find({}, {op: 1, ns: 1, comment: 'optimize_away_pipeline'})
- .sort({ts: 1})
- .toArray();
- assert(arrayEq(
- profile,
- [{op: "query", ns: view.getFullName()}, {op: "getmore", ns: view.getFullName()}]));
- }
+ [{op: "query", ns: view.getFullName()}, {op: "getmore", ns: view.getFullName()}]));
}
+}
}());
diff --git a/jstests/aggregation/pipeline_pass_through_from_mongos.js b/jstests/aggregation/pipeline_pass_through_from_mongos.js
index e98bbc8d854..3c3694e3931 100644
--- a/jstests/aggregation/pipeline_pass_through_from_mongos.js
+++ b/jstests/aggregation/pipeline_pass_through_from_mongos.js
@@ -5,49 +5,48 @@
* @tags: [requires_sharding]
*/
(function() {
- 'use strict';
+'use strict';
- load("jstests/libs/profiler.js"); // For profilerHas*OrThrow helper functions.
+load("jstests/libs/profiler.js"); // For profilerHas*OrThrow helper functions.
- const st = new ShardingTest({shards: 2});
- const mongosDB = st.s0.getDB(jsTestName());
- assert.commandWorked(st.s0.adminCommand({enableSharding: jsTestName()}));
- st.ensurePrimaryShard(jsTestName(), st.shard0.shardName);
- const mongosColl = mongosDB.test;
- const primaryShard = st.shard0.getDB(jsTestName());
- const shard1DB = st.shard1.getDB(jsTestName());
+const st = new ShardingTest({shards: 2});
+const mongosDB = st.s0.getDB(jsTestName());
+assert.commandWorked(st.s0.adminCommand({enableSharding: jsTestName()}));
+st.ensurePrimaryShard(jsTestName(), st.shard0.shardName);
+const mongosColl = mongosDB.test;
+const primaryShard = st.shard0.getDB(jsTestName());
+const shard1DB = st.shard1.getDB(jsTestName());
- assert.commandWorked(primaryShard.setProfilingLevel(2));
- assert.commandWorked(shard1DB.setProfilingLevel(2));
+assert.commandWorked(primaryShard.setProfilingLevel(2));
+assert.commandWorked(shard1DB.setProfilingLevel(2));
- // Verify that the $lookup is passed through to the primary shard when all its sub-pipeline
- // stages can be passed through.
- let testName = "sub_pipeline_can_be_passed_through";
- assert.commandWorked(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: [{
- $lookup:
- {pipeline: [{$match: {a: "val"}}], from: mongosDB.otherColl.getName(), as: "c"}
- }],
- cursor: {},
- comment: testName
- }));
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: primaryShard,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
+// Verify that the $lookup is passed through to the primary shard when all its sub-pipeline
+// stages can be passed through.
+let testName = "sub_pipeline_can_be_passed_through";
+assert.commandWorked(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: [
+ {$lookup: {pipeline: [{$match: {a: "val"}}], from: mongosDB.otherColl.getName(), as: "c"}}
+ ],
+ cursor: {},
+ comment: testName
+}));
+profilerHasSingleMatchingEntryOrThrow({
+ profileDB: primaryShard,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
- // Test to verify that the mongoS doesn't pass the pipeline through to the primary shard when
- // $lookup's sub-pipeline has one or more stages which don't allow passthrough. In this
- // sub-pipeline, the $merge stage is not allowed to pass through, which forces the pipeline to
- // be parsed on mongoS. Since $merge is not allowed within a $lookup, the command thus fails on
- // mongoS without ever reaching a shard. This test-case exercises the bug described in
- // SERVER-41290.
- const pipelineForLookup = [
+// Test to verify that the mongoS doesn't pass the pipeline through to the primary shard when
+// $lookup's sub-pipeline has one or more stages which don't allow passthrough. In this
+// sub-pipeline, the $merge stage is not allowed to pass through, which forces the pipeline to
+// be parsed on mongoS. Since $merge is not allowed within a $lookup, the command thus fails on
+// mongoS without ever reaching a shard. This test-case exercises the bug described in
+// SERVER-41290.
+const pipelineForLookup = [
{
$lookup: {
pipeline: [{$match: {a: "val"}}, {$merge: {into: "merge_collection"}}],
@@ -56,25 +55,25 @@
}
},
];
- testName = "lookup_with_merge_cannot_be_passed_through";
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: pipelineForLookup,
- cursor: {},
- comment: testName
- }),
- 51047);
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShard,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
+testName = "lookup_with_merge_cannot_be_passed_through";
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: pipelineForLookup,
+ cursor: {},
+ comment: testName
+}),
+ 51047);
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShard,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
- // Same test as the above with another level of nested $lookup.
- const pipelineForNestedLookup = [{
+// Same test as the above with another level of nested $lookup.
+const pipelineForNestedLookup = [{
$lookup: {
from: mongosDB.otherColl.getName(),
as: "field",
@@ -87,81 +86,80 @@
}]
}
}];
- testName = "nested_lookup_with_merge_cannot_be_passed_through";
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: pipelineForNestedLookup,
- cursor: {},
- comment: testName
- }),
- 51047);
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShard,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
+testName = "nested_lookup_with_merge_cannot_be_passed_through";
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: pipelineForNestedLookup,
+ cursor: {},
+ comment: testName
+}),
+ 51047);
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShard,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
- // Test to verify that the mongoS doesn't pass the pipeline through to the primary shard when
- // one or more of $facet's sub-pipelines have one or more stages which don't allow passthrough.
- // In this sub-pipeline, the $merge stage is not allowed to pass through, which forces the
- // pipeline to be parsed on mongoS. Since $merge is not allowed within a $facet, the command
- // thus fails on mongoS without ever reaching a shard. This test-case exercises the bug
- // described in SERVER-41290.
- const pipelineForFacet = [
- {
- $facet: {
- field0: [{$match: {a: "val"}}],
- field1: [{$match: {a: "val"}}, {$merge: {into: "merge_collection"}}],
- }
- },
- ];
- testName = "facet_with_merge_cannot_be_passed_through";
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: pipelineForFacet,
- cursor: {},
- comment: testName
- }),
- 40600);
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShard,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
+// Test to verify that the mongoS doesn't pass the pipeline through to the primary shard when
+// one or more of $facet's sub-pipelines have one or more stages which don't allow passthrough.
+// In this sub-pipeline, the $merge stage is not allowed to pass through, which forces the
+// pipeline to be parsed on mongoS. Since $merge is not allowed within a $facet, the command
+// thus fails on mongoS without ever reaching a shard. This test-case exercises the bug
+// described in SERVER-41290.
+const pipelineForFacet = [
+ {
+ $facet: {
+ field0: [{$match: {a: "val"}}],
+ field1: [{$match: {a: "val"}}, {$merge: {into: "merge_collection"}}],
+ }
+ },
+];
+testName = "facet_with_merge_cannot_be_passed_through";
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: pipelineForFacet,
+ cursor: {},
+ comment: testName
+}),
+ 40600);
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShard,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
- // Same test as the above with another level of nested $facet.
- const pipelineForNestedFacet = [
- {
- $facet: {
- field0: [{$match: {a: "val"}}],
- field1: [
- {$facet: {field2: [{$match: {a: "val"}}, {$merge: {into: "merge_collection"}}]}}
- ],
- }
- },
- ];
- testName = "facet_with_merge_cannot_be_passed_through";
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: pipelineForFacet,
- cursor: {},
- comment: testName
- }),
- 40600);
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShard,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
+// Same test as the above with another level of nested $facet.
+const pipelineForNestedFacet = [
+ {
+ $facet: {
+ field0: [{$match: {a: "val"}}],
+ field1:
+ [{$facet: {field2: [{$match: {a: "val"}}, {$merge: {into: "merge_collection"}}]}}],
+ }
+ },
+];
+testName = "facet_with_merge_cannot_be_passed_through";
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: pipelineForFacet,
+ cursor: {},
+ comment: testName
+}),
+ 40600);
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShard,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
+profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+});
- st.stop();
+st.stop();
})();
diff --git a/jstests/aggregation/shard_targeting.js b/jstests/aggregation/shard_targeting.js
index 800357ab324..1654c17760d 100644
--- a/jstests/aggregation/shard_targeting.js
+++ b/jstests/aggregation/shard_targeting.js
@@ -22,361 +22,358 @@
* ]
*/
(function() {
- load("jstests/libs/profiler.js"); // For profilerHas*OrThrow helper functions.
-
- const st = new ShardingTest({shards: 2, mongos: 2, config: 1});
-
- // mongosForAgg will be used to perform all aggregations.
- // mongosForMove does all chunk migrations, leaving mongosForAgg with stale config metadata.
- const mongosForAgg = st.s0;
- const mongosForMove = st.s1;
-
- const mongosDB = mongosForAgg.getDB(jsTestName());
- const mongosColl = mongosDB.test;
-
- const shard0DB = primaryShardDB = st.shard0.getDB(jsTestName());
- const shard1DB = st.shard1.getDB(jsTestName());
-
- // Turn off best-effort recipient metadata refresh post-migration commit on both shards because
- // it creates non-determinism for the profiler.
- assert.commandWorked(st.shard0.getDB('admin').runCommand(
- {configureFailPoint: 'doNotRefreshRecipientAfterCommit', mode: 'alwaysOn'}));
- assert.commandWorked(st.shard1.getDB('admin').runCommand(
- {configureFailPoint: 'doNotRefreshRecipientAfterCommit', mode: 'alwaysOn'}));
-
- // Turn off automatic shard refresh in mongos when a stale config error is thrown.
- assert.commandWorked(mongosForAgg.getDB('admin').runCommand(
- {configureFailPoint: 'doNotRefreshShardsOnRetargettingError', mode: 'alwaysOn'}));
-
- assert.commandWorked(mongosDB.dropDatabase());
-
- // Enable sharding on the test DB and ensure its primary is st.shard0.shardName.
- assert.commandWorked(mongosDB.adminCommand({enableSharding: mongosDB.getName()}));
- st.ensurePrimaryShard(mongosDB.getName(), st.shard0.shardName);
-
- // Shard the test collection on _id.
- assert.commandWorked(
- mongosDB.adminCommand({shardCollection: mongosColl.getFullName(), key: {_id: 1}}));
-
- // Split the collection into 4 chunks: [MinKey, -100), [-100, 0), [0, 100), [100, MaxKey).
- assert.commandWorked(
- mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: -100}}));
- assert.commandWorked(
- mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 0}}));
- assert.commandWorked(
- mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 100}}));
-
- // Move the [0, 100) and [100, MaxKey) chunks to st.shard1.shardName.
- assert.commandWorked(mongosDB.adminCommand(
- {moveChunk: mongosColl.getFullName(), find: {_id: 50}, to: st.shard1.shardName}));
- assert.commandWorked(mongosDB.adminCommand(
- {moveChunk: mongosColl.getFullName(), find: {_id: 150}, to: st.shard1.shardName}));
-
- // Write one document into each of the chunks.
- assert.writeOK(mongosColl.insert({_id: -150}));
- assert.writeOK(mongosColl.insert({_id: -50}));
- assert.writeOK(mongosColl.insert({_id: 50}));
- assert.writeOK(mongosColl.insert({_id: 150}));
-
- const shardExceptions =
- [ErrorCodes.StaleConfig, ErrorCodes.StaleShardVersion, ErrorCodes.StaleEpoch];
-
- // Create an $_internalSplitPipeline stage that forces the merge to occur on the Primary shard.
- const forcePrimaryMerge = [{$_internalSplitPipeline: {mergeType: "primaryShard"}}];
-
- function runAggShardTargetTest({splitPoint}) {
- // Ensure that both mongoS have up-to-date caches, and enable the profiler on both shards.
- assert.commandWorked(mongosForAgg.getDB("admin").runCommand({flushRouterConfig: 1}));
- assert.commandWorked(mongosForMove.getDB("admin").runCommand({flushRouterConfig: 1}));
-
- assert.commandWorked(shard0DB.setProfilingLevel(2));
- assert.commandWorked(shard1DB.setProfilingLevel(2));
-
- //
- // Test cases.
- //
-
- let testName, outColl;
-
- // Test that a range query is passed through if the chunks encompassed by the query all lie
- // on a single shard, in this case st.shard0.shardName.
- testName = "agg_shard_targeting_range_single_shard_all_chunks_on_same_shard";
- assert.eq(mongosColl
- .aggregate([{$match: {_id: {$gte: -150, $lte: -50}}}].concat(splitPoint),
- {comment: testName})
- .itcount(),
- 2);
-
- // We expect one aggregation on shard0, none on shard1, and no $mergeCursors on shard0 (the
- // primary shard).
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard0DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShardDB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: 1}
- }
- });
-
- // Test that a range query with a stage that requires a primary shard merge ($out in this
- // case) is passed through if the chunk ranges encompassed by the query all lie on the
- // primary shard.
- testName = "agg_shard_targeting_range_all_chunks_on_primary_shard_out_no_merge";
- outColl = mongosDB[testName];
-
- assert.commandWorked(mongosDB.runCommand({
- aggregate: mongosColl.getName(),
- pipeline: [{$match: {_id: {$gte: -150, $lte: -50}}}].concat(splitPoint).concat([
- {$out: testName}
- ]),
- comment: testName,
- cursor: {}
- }));
-
- // We expect one aggregation on shard0, none on shard1, and no $mergeCursors on shard0 (the
- // primary shard).
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard0DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: shard1DB,
- filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
- });
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShardDB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: 1}
- }
- });
-
- // Verify that the contents of the $out collection are as expected.
- assert.eq(outColl.find().sort({_id: 1}).toArray(), [{_id: -150}, {_id: -50}]);
-
- // Test that a passthrough will back out and split the pipeline if we try to target a single
- // shard, get a stale config exception, and find that more than one shard is now involved.
- // Move the _id: [-100, 0) chunk from st.shard0.shardName to st.shard1.shardName via
- // mongosForMove.
- assert.commandWorked(mongosForMove.getDB("admin").runCommand({
- moveChunk: mongosColl.getFullName(),
- find: {_id: -50},
- to: st.shard1.shardName,
- }));
-
- // Run the same aggregation that targeted a single shard via the now-stale mongoS. It should
- // attempt to send the aggregation to st.shard0.shardName, hit a stale config exception,
- // split the pipeline and redispatch. We append an $_internalSplitPipeline stage in order to
- // force a shard merge rather than a mongoS merge.
- testName = "agg_shard_targeting_backout_passthrough_and_split_if_cache_is_stale";
- assert.eq(mongosColl
- .aggregate([{$match: {_id: {$gte: -150, $lte: -50}}}]
- .concat(splitPoint)
- .concat(forcePrimaryMerge),
- {comment: testName})
- .itcount(),
- 2);
-
- // Before the first dispatch:
- // - mongosForMove and st.shard0.shardName (the donor shard) are up to date.
- // - mongosForAgg and st.shard1.shardName are stale. mongosForAgg incorrectly believes that
- // the necessary data is all on st.shard0.shardName.
- //
- // We therefore expect that:
- // - mongosForAgg will throw a stale config error when it attempts to establish a
- // single-shard cursor on st.shard0.shardName (attempt 1).
- // - mongosForAgg will back out, refresh itself, and redispatch to both shards.
- // - st.shard1.shardName will throw a stale config and refresh itself when the split
- // pipeline is sent to it (attempt 2).
- // - mongosForAgg will back out and redispatch (attempt 3).
- // - The aggregation will succeed on the third dispatch.
-
- // We confirm this behaviour via the following profiler results:
-
- // - One aggregation on st.shard0.shardName with a shard version exception (indicating that
- // the mongoS was stale).
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard0DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$in: shardExceptions}
- }
- });
-
- // - One aggregation on st.shard1.shardName with a shard version exception (indicating that
- // the shard was stale).
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard1DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$in: shardExceptions}
- }
- });
-
- // - At most two aggregations on st.shard0.shardName with no stale config exceptions. The
- // first, if present, is an aborted cursor created if the command reaches
- // st.shard0.shardName before st.shard1.shardName throws its stale config exception during
- // attempt 2. The second profiler entry is from the aggregation which succeeded.
- profilerHasAtLeastOneAtMostNumMatchingEntriesOrThrow({
- profileDB: shard0DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$exists: false}
- },
- maxExpectedMatches: 2
- });
-
- // - One aggregation on st.shard1.shardName with no stale config exception.
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard1DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$exists: false}
- }
- });
-
- // - One $mergeCursors aggregation on primary st.shard0.shardName, since we eventually
- // target both shards after backing out the passthrough and splitting the pipeline.
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: primaryShardDB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: true}
- }
- });
-
- // Move the _id: [-100, 0) chunk back from st.shard1.shardName to st.shard0.shardName via
- // mongosForMove. Shard0 and mongosForAgg are now stale.
- assert.commandWorked(mongosForMove.getDB("admin").runCommand({
- moveChunk: mongosColl.getFullName(),
- find: {_id: -50},
- to: st.shard0.shardName,
- _waitForDelete: true
- }));
-
- // Run the same aggregation via the now-stale mongoS. It should split the pipeline, hit a
- // stale config exception, and reset to the original single-shard pipeline upon refresh. We
- // append an $_internalSplitPipeline stage in order to force a shard merge rather than a
- // mongoS merge.
- testName = "agg_shard_targeting_backout_split_pipeline_and_reassemble_if_cache_is_stale";
- assert.eq(mongosColl
- .aggregate([{$match: {_id: {$gte: -150, $lte: -50}}}]
- .concat(splitPoint)
- .concat(forcePrimaryMerge),
- {comment: testName})
- .itcount(),
- 2);
-
- // Before the first dispatch:
- // - mongosForMove and st.shard1.shardName (the donor shard) are up to date.
- // - mongosForAgg and st.shard0.shardName are stale. mongosForAgg incorrectly believes that
- // the necessary data is spread across both shards.
- //
- // We therefore expect that:
- // - mongosForAgg will throw a stale config error when it attempts to establish a cursor on
- // st.shard1.shardName (attempt 1).
- // - mongosForAgg will back out, refresh itself, and redispatch to st.shard0.shardName.
- // - st.shard0.shardName will throw a stale config and refresh itself when the pipeline is
- // sent to it (attempt 2).
- // - mongosForAgg will back out, and redispatch (attempt 3).
- // - The aggregation will succeed on the third dispatch.
-
- // We confirm this behaviour via the following profiler results:
-
- // - One aggregation on st.shard1.shardName with a shard version exception (indicating that
- // the mongoS was stale).
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard1DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$in: shardExceptions}
- }
- });
-
- // - One aggregation on st.shard0.shardName with a shard version exception (indicating that
- // the shard was stale).
- profilerHasSingleMatchingEntryOrThrow({
- profileDB: shard0DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$in: shardExceptions}
- }
- });
-
- // - At most two aggregations on st.shard0.shardName with no stale config exceptions. The
- // first, if present, is an aborted cursor created if the command reaches
- // st.shard0.shardName before st.shard1.shardName throws its stale config exception during
- // attempt 1. The second profiler entry is the aggregation which succeeded.
- profilerHasAtLeastOneAtMostNumMatchingEntriesOrThrow({
- profileDB: shard0DB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: false},
- errCode: {$exists: false}
- },
- maxExpectedMatches: 2
- });
-
- // No $mergeCursors aggregation on primary st.shard0.shardName, since after backing out the
- // split pipeline we eventually target only st.shard0.shardName.
- profilerHasZeroMatchingEntriesOrThrow({
- profileDB: primaryShardDB,
- filter: {
- "command.aggregate": mongosColl.getName(),
- "command.comment": testName,
- "command.pipeline.$mergeCursors": {$exists: true}
- }
- });
-
- // Clean up the test run by dropping the $out collection and resetting the profiler.
- assert(outColl.drop());
-
- assert.commandWorked(shard0DB.setProfilingLevel(0));
- assert.commandWorked(shard1DB.setProfilingLevel(0));
-
- assert(shard0DB.system.profile.drop());
- assert(shard1DB.system.profile.drop());
- }
-
- // Run tests with a variety of splitpoints, testing the pipeline split and re-assembly logic in
- // cases where the merge pipeline is empty, where the split stage is moved from shard to merge
- // pipe ($facet, $lookup), and where there are both shard and merge versions of the split source
- // ($sort, $group, $limit). Each test case will ultimately produce the same output.
- runAggShardTargetTest({splitPoint: []});
- runAggShardTargetTest({splitPoint: [{$sort: {_id: 1}}]});
- runAggShardTargetTest({splitPoint: [{$group: {_id: "$_id"}}]});
- runAggShardTargetTest({splitPoint: [{$limit: 4}]});
- runAggShardTargetTest({
- splitPoint: [
- {$facet: {facetPipe: [{$match: {_id: {$gt: MinKey}}}]}},
- {$unwind: "$facetPipe"},
- {$replaceRoot: {newRoot: "$facetPipe"}}
- ]
+load("jstests/libs/profiler.js"); // For profilerHas*OrThrow helper functions.
+
+const st = new ShardingTest({shards: 2, mongos: 2, config: 1});
+
+// mongosForAgg will be used to perform all aggregations.
+// mongosForMove does all chunk migrations, leaving mongosForAgg with stale config metadata.
+const mongosForAgg = st.s0;
+const mongosForMove = st.s1;
+
+const mongosDB = mongosForAgg.getDB(jsTestName());
+const mongosColl = mongosDB.test;
+
+const shard0DB = primaryShardDB = st.shard0.getDB(jsTestName());
+const shard1DB = st.shard1.getDB(jsTestName());
+
+// Turn off best-effort recipient metadata refresh post-migration commit on both shards because
+// it creates non-determinism for the profiler.
+assert.commandWorked(st.shard0.getDB('admin').runCommand(
+ {configureFailPoint: 'doNotRefreshRecipientAfterCommit', mode: 'alwaysOn'}));
+assert.commandWorked(st.shard1.getDB('admin').runCommand(
+ {configureFailPoint: 'doNotRefreshRecipientAfterCommit', mode: 'alwaysOn'}));
+
+// Turn off automatic shard refresh in mongos when a stale config error is thrown.
+assert.commandWorked(mongosForAgg.getDB('admin').runCommand(
+ {configureFailPoint: 'doNotRefreshShardsOnRetargettingError', mode: 'alwaysOn'}));
+
+assert.commandWorked(mongosDB.dropDatabase());
+
+// Enable sharding on the test DB and ensure its primary is st.shard0.shardName.
+assert.commandWorked(mongosDB.adminCommand({enableSharding: mongosDB.getName()}));
+st.ensurePrimaryShard(mongosDB.getName(), st.shard0.shardName);
+
+// Shard the test collection on _id.
+assert.commandWorked(
+ mongosDB.adminCommand({shardCollection: mongosColl.getFullName(), key: {_id: 1}}));
+
+// Split the collection into 4 chunks: [MinKey, -100), [-100, 0), [0, 100), [100, MaxKey).
+assert.commandWorked(mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: -100}}));
+assert.commandWorked(mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 0}}));
+assert.commandWorked(mongosDB.adminCommand({split: mongosColl.getFullName(), middle: {_id: 100}}));
+
+// Move the [0, 100) and [100, MaxKey) chunks to st.shard1.shardName.
+assert.commandWorked(mongosDB.adminCommand(
+ {moveChunk: mongosColl.getFullName(), find: {_id: 50}, to: st.shard1.shardName}));
+assert.commandWorked(mongosDB.adminCommand(
+ {moveChunk: mongosColl.getFullName(), find: {_id: 150}, to: st.shard1.shardName}));
+
+// Write one document into each of the chunks.
+assert.writeOK(mongosColl.insert({_id: -150}));
+assert.writeOK(mongosColl.insert({_id: -50}));
+assert.writeOK(mongosColl.insert({_id: 50}));
+assert.writeOK(mongosColl.insert({_id: 150}));
+
+const shardExceptions =
+ [ErrorCodes.StaleConfig, ErrorCodes.StaleShardVersion, ErrorCodes.StaleEpoch];
+
+// Create an $_internalSplitPipeline stage that forces the merge to occur on the Primary shard.
+const forcePrimaryMerge = [{$_internalSplitPipeline: {mergeType: "primaryShard"}}];
+
+function runAggShardTargetTest({splitPoint}) {
+ // Ensure that both mongoS have up-to-date caches, and enable the profiler on both shards.
+ assert.commandWorked(mongosForAgg.getDB("admin").runCommand({flushRouterConfig: 1}));
+ assert.commandWorked(mongosForMove.getDB("admin").runCommand({flushRouterConfig: 1}));
+
+ assert.commandWorked(shard0DB.setProfilingLevel(2));
+ assert.commandWorked(shard1DB.setProfilingLevel(2));
+
+ //
+ // Test cases.
+ //
+
+ let testName, outColl;
+
+ // Test that a range query is passed through if the chunks encompassed by the query all lie
+ // on a single shard, in this case st.shard0.shardName.
+ testName = "agg_shard_targeting_range_single_shard_all_chunks_on_same_shard";
+ assert.eq(mongosColl
+ .aggregate([{$match: {_id: {$gte: -150, $lte: -50}}}].concat(splitPoint),
+ {comment: testName})
+ .itcount(),
+ 2);
+
+ // We expect one aggregation on shard0, none on shard1, and no $mergeCursors on shard0 (the
+ // primary shard).
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard0DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+ });
+ profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+ });
+ profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShardDB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: 1}
+ }
+ });
+
+ // Test that a range query with a stage that requires a primary shard merge ($out in this
+ // case) is passed through if the chunk ranges encompassed by the query all lie on the
+ // primary shard.
+ testName = "agg_shard_targeting_range_all_chunks_on_primary_shard_out_no_merge";
+ outColl = mongosDB[testName];
+
+ assert.commandWorked(mongosDB.runCommand({
+ aggregate: mongosColl.getName(),
+ pipeline: [{$match: {_id: {$gte: -150, $lte: -50}}}].concat(splitPoint).concat([
+ {$out: testName}
+ ]),
+ comment: testName,
+ cursor: {}
+ }));
+
+ // We expect one aggregation on shard0, none on shard1, and no $mergeCursors on shard0 (the
+ // primary shard).
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard0DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
});
- runAggShardTargetTest({
+ profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: shard1DB,
+ filter: {"command.aggregate": mongosColl.getName(), "command.comment": testName}
+ });
+ profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShardDB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: 1}
+ }
+ });
+
+ // Verify that the contents of the $out collection are as expected.
+ assert.eq(outColl.find().sort({_id: 1}).toArray(), [{_id: -150}, {_id: -50}]);
+
+ // Test that a passthrough will back out and split the pipeline if we try to target a single
+ // shard, get a stale config exception, and find that more than one shard is now involved.
+ // Move the _id: [-100, 0) chunk from st.shard0.shardName to st.shard1.shardName via
+ // mongosForMove.
+ assert.commandWorked(mongosForMove.getDB("admin").runCommand({
+ moveChunk: mongosColl.getFullName(),
+ find: {_id: -50},
+ to: st.shard1.shardName,
+ }));
+
+ // Run the same aggregation that targeted a single shard via the now-stale mongoS. It should
+ // attempt to send the aggregation to st.shard0.shardName, hit a stale config exception,
+ // split the pipeline and redispatch. We append an $_internalSplitPipeline stage in order to
+ // force a shard merge rather than a mongoS merge.
+ testName = "agg_shard_targeting_backout_passthrough_and_split_if_cache_is_stale";
+ assert.eq(mongosColl
+ .aggregate([{$match: {_id: {$gte: -150, $lte: -50}}}]
+ .concat(splitPoint)
+ .concat(forcePrimaryMerge),
+ {comment: testName})
+ .itcount(),
+ 2);
+
+ // Before the first dispatch:
+ // - mongosForMove and st.shard0.shardName (the donor shard) are up to date.
+ // - mongosForAgg and st.shard1.shardName are stale. mongosForAgg incorrectly believes that
+ // the necessary data is all on st.shard0.shardName.
+ //
+ // We therefore expect that:
+ // - mongosForAgg will throw a stale config error when it attempts to establish a
+ // single-shard cursor on st.shard0.shardName (attempt 1).
+ // - mongosForAgg will back out, refresh itself, and redispatch to both shards.
+ // - st.shard1.shardName will throw a stale config and refresh itself when the split
+ // pipeline is sent to it (attempt 2).
+ // - mongosForAgg will back out and redispatch (attempt 3).
+ // - The aggregation will succeed on the third dispatch.
+
+ // We confirm this behaviour via the following profiler results:
+
+ // - One aggregation on st.shard0.shardName with a shard version exception (indicating that
+ // the mongoS was stale).
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard0DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$in: shardExceptions}
+ }
+ });
+
+ // - One aggregation on st.shard1.shardName with a shard version exception (indicating that
+ // the shard was stale).
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard1DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$in: shardExceptions}
+ }
+ });
+
+ // - At most two aggregations on st.shard0.shardName with no stale config exceptions. The
+ // first, if present, is an aborted cursor created if the command reaches
+ // st.shard0.shardName before st.shard1.shardName throws its stale config exception during
+ // attempt 2. The second profiler entry is from the aggregation which succeeded.
+ profilerHasAtLeastOneAtMostNumMatchingEntriesOrThrow({
+ profileDB: shard0DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$exists: false}
+ },
+ maxExpectedMatches: 2
+ });
+
+ // - One aggregation on st.shard1.shardName with no stale config exception.
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard1DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$exists: false}
+ }
+ });
+
+ // - One $mergeCursors aggregation on primary st.shard0.shardName, since we eventually
+ // target both shards after backing out the passthrough and splitting the pipeline.
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: primaryShardDB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: true}
+ }
+ });
+
+ // Move the _id: [-100, 0) chunk back from st.shard1.shardName to st.shard0.shardName via
+ // mongosForMove. Shard0 and mongosForAgg are now stale.
+ assert.commandWorked(mongosForMove.getDB("admin").runCommand({
+ moveChunk: mongosColl.getFullName(),
+ find: {_id: -50},
+ to: st.shard0.shardName,
+ _waitForDelete: true
+ }));
+
+ // Run the same aggregation via the now-stale mongoS. It should split the pipeline, hit a
+ // stale config exception, and reset to the original single-shard pipeline upon refresh. We
+ // append an $_internalSplitPipeline stage in order to force a shard merge rather than a
+ // mongoS merge.
+ testName = "agg_shard_targeting_backout_split_pipeline_and_reassemble_if_cache_is_stale";
+ assert.eq(mongosColl
+ .aggregate([{$match: {_id: {$gte: -150, $lte: -50}}}]
+ .concat(splitPoint)
+ .concat(forcePrimaryMerge),
+ {comment: testName})
+ .itcount(),
+ 2);
+
+ // Before the first dispatch:
+ // - mongosForMove and st.shard1.shardName (the donor shard) are up to date.
+ // - mongosForAgg and st.shard0.shardName are stale. mongosForAgg incorrectly believes that
+ // the necessary data is spread across both shards.
+ //
+ // We therefore expect that:
+ // - mongosForAgg will throw a stale config error when it attempts to establish a cursor on
+ // st.shard1.shardName (attempt 1).
+ // - mongosForAgg will back out, refresh itself, and redispatch to st.shard0.shardName.
+ // - st.shard0.shardName will throw a stale config and refresh itself when the pipeline is
+ // sent to it (attempt 2).
+ // - mongosForAgg will back out, and redispatch (attempt 3).
+ // - The aggregation will succeed on the third dispatch.
+
+ // We confirm this behaviour via the following profiler results:
+
+ // - One aggregation on st.shard1.shardName with a shard version exception (indicating that
+ // the mongoS was stale).
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard1DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$in: shardExceptions}
+ }
+ });
+
+ // - One aggregation on st.shard0.shardName with a shard version exception (indicating that
+ // the shard was stale).
+ profilerHasSingleMatchingEntryOrThrow({
+ profileDB: shard0DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$in: shardExceptions}
+ }
+ });
+
+ // - At most two aggregations on st.shard0.shardName with no stale config exceptions. The
+ // first, if present, is an aborted cursor created if the command reaches
+ // st.shard0.shardName before st.shard1.shardName throws its stale config exception during
+ // attempt 1. The second profiler entry is the aggregation which succeeded.
+ profilerHasAtLeastOneAtMostNumMatchingEntriesOrThrow({
+ profileDB: shard0DB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: false},
+ errCode: {$exists: false}
+ },
+ maxExpectedMatches: 2
+ });
+
+ // No $mergeCursors aggregation on primary st.shard0.shardName, since after backing out the
+ // split pipeline we eventually target only st.shard0.shardName.
+ profilerHasZeroMatchingEntriesOrThrow({
+ profileDB: primaryShardDB,
+ filter: {
+ "command.aggregate": mongosColl.getName(),
+ "command.comment": testName,
+ "command.pipeline.$mergeCursors": {$exists: true}
+ }
+ });
+
+ // Clean up the test run by dropping the $out collection and resetting the profiler.
+ assert(outColl.drop());
+
+ assert.commandWorked(shard0DB.setProfilingLevel(0));
+ assert.commandWorked(shard1DB.setProfilingLevel(0));
+
+ assert(shard0DB.system.profile.drop());
+ assert(shard1DB.system.profile.drop());
+}
+
+// Run tests with a variety of splitpoints, testing the pipeline split and re-assembly logic in
+// cases where the merge pipeline is empty, where the split stage is moved from shard to merge
+// pipe ($facet, $lookup), and where there are both shard and merge versions of the split source
+// ($sort, $group, $limit). Each test case will ultimately produce the same output.
+runAggShardTargetTest({splitPoint: []});
+runAggShardTargetTest({splitPoint: [{$sort: {_id: 1}}]});
+runAggShardTargetTest({splitPoint: [{$group: {_id: "$_id"}}]});
+runAggShardTargetTest({splitPoint: [{$limit: 4}]});
+runAggShardTargetTest({
+ splitPoint: [
+ {$facet: {facetPipe: [{$match: {_id: {$gt: MinKey}}}]}},
+ {$unwind: "$facetPipe"},
+ {$replaceRoot: {newRoot: "$facetPipe"}}
+ ]
+});
+runAggShardTargetTest({
splitPoint: [
{
$lookup: {
@@ -390,5 +387,5 @@
]
});
- st.stop();
+st.stop();
})();
diff --git a/jstests/aggregation/sharded_agg_cleanup_on_error.js b/jstests/aggregation/sharded_agg_cleanup_on_error.js
index 56d2ae73f32..cbcb1f02e53 100644
--- a/jstests/aggregation/sharded_agg_cleanup_on_error.js
+++ b/jstests/aggregation/sharded_agg_cleanup_on_error.js
@@ -8,136 +8,135 @@
* @tags: [requires_sharding,do_not_wrap_aggregations_in_facets]
*/
(function() {
- "use strict";
-
- // For assertMergeFailsForAllModesWithCode.
- load("jstests/aggregation/extras/merge_helpers.js");
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
-
- const kFailPointName = "waitAfterPinningCursorBeforeGetMoreBatch";
- const kFailpointOptions = {shouldCheckForInterrupt: true};
-
- const st = new ShardingTest({shards: 2});
- const kDBName = "test";
- const kDivideByZeroErrCode = 16608;
- const mongosDB = st.s.getDB(kDBName);
- const shard0DB = st.shard0.getDB(kDBName);
- const shard1DB = st.shard1.getDB(kDBName);
-
- let coll = mongosDB.sharded_agg_cleanup_on_error;
-
- for (let i = 0; i < 10; i++) {
- assert.writeOK(coll.insert({_id: i}));
- }
-
- st.shardColl(coll, {_id: 1}, {_id: 5}, {_id: 6}, kDBName, false);
- st.ensurePrimaryShard(kDBName, st.shard0.name);
-
- function assertFailsAndCleansUpCursors({pipeline, errCode}) {
- let cmdRes = mongosDB.runCommand(
- {aggregate: coll.getName(), pipeline: pipeline, cursor: {batchSize: 0}});
- assert.commandWorked(cmdRes);
- assert.neq(0, cmdRes.cursor.id);
- assert.eq(coll.getFullName(), cmdRes.cursor.ns);
- assert.eq(0, cmdRes.cursor.firstBatch.length);
-
- cmdRes = mongosDB.runCommand({getMore: cmdRes.cursor.id, collection: coll.getName()});
- assert.commandFailedWithCode(cmdRes, errCode);
-
- // Neither mongos or the shards should leave cursors open. By the time we get here, the
- // cursor which was hanging on shard 1 will have been marked interrupted, but isn't
- // guaranteed to be deleted yet. Thus, we use an assert.soon().
- assert.eq(mongosDB.serverStatus().metrics.cursor.open.total, 0);
- assert.eq(shard0DB.serverStatus().metrics.cursor.open.total, 0);
- assert.soon(() => shard1DB.serverStatus().metrics.cursor.open.pinned == 0);
- }
-
- try {
- // Set up a fail point which causes getMore to hang on shard 1.
- assert.commandWorked(shard1DB.adminCommand(
- {configureFailPoint: kFailPointName, mode: "alwaysOn", data: kFailpointOptions}));
-
- // Issue an aggregregation that will fail during a getMore on shard 0, and make sure that
- // this correctly kills the hanging cursor on shard 1. Use $_internalSplitPipeline to ensure
- // that this pipeline merges on mongos.
- assertFailsAndCleansUpCursors({
- pipeline: [
- {$project: {out: {$divide: ["$_id", 0]}}},
- {$_internalSplitPipeline: {mergeType: "mongos"}}
- ],
- errCode: kDivideByZeroErrCode
- });
-
- // Repeat the test above, but this time use $_internalSplitPipeline to force the merge to
- // take place on a shard 0.
- assertFailsAndCleansUpCursors({
- pipeline: [
- {$project: {out: {$divide: ["$_id", 0]}}},
- {$_internalSplitPipeline: {mergeType: "primaryShard"}}
- ],
- errCode: kDivideByZeroErrCode
- });
- } finally {
- assert.commandWorked(
- shard0DB.adminCommand({configureFailPoint: kFailPointName, mode: "off"}));
- }
-
- // Test that aggregations which fail to establish a merging shard cursor also cleanup the open
- // shard cursors.
- try {
- // Enable the failpoint to fail on establishing a merging shard cursor.
- assert.commandWorked(mongosDB.adminCommand({
- configureFailPoint: "clusterAggregateFailToEstablishMergingShardCursor",
- mode: "alwaysOn"
- }));
-
- // Run an aggregation which requires a merging shard pipeline. This should fail because of
- // the failpoint.
- assertErrorCode(coll, [{$out: "target"}], ErrorCodes.FailPointEnabled);
-
- // Neither mongos or the shards should leave cursors open.
- assert.eq(mongosDB.serverStatus().metrics.cursor.open.total, 0);
- assert.soon(() => shard0DB.serverStatus().metrics.cursor.open.total == 0);
- assert.soon(() => shard1DB.serverStatus().metrics.cursor.open.total == 0);
-
- } finally {
- assert.commandWorked(mongosDB.adminCommand({
- configureFailPoint: "clusterAggregateFailToEstablishMergingShardCursor",
- mode: "off"
- }));
- }
-
- // Test that aggregations involving $exchange correctly clean up the producer cursors.
- try {
- assert.commandWorked(mongosDB.adminCommand({
- configureFailPoint: "clusterAggregateFailToDispatchExchangeConsumerPipeline",
- mode: "alwaysOn"
- }));
-
- // Run an aggregation which is eligible for $exchange. This should assert because of
- // the failpoint. Add a $group stage to force an exchange-eligible split of the pipeline
- // before the $merge. Without the $group we won't use the exchange optimization and instead
- // will send the $merge to each shard.
- st.shardColl(mongosDB.target, {_id: 1}, {_id: 0}, {_id: 1}, kDBName, false);
-
- assertMergeFailsForAllModesWithCode({
- source: coll,
- target: mongosDB.target,
- prevStages: [{$group: {_id: "$fakeShardKey"}}],
- errorCodes: ErrorCodes.FailPointEnabled
- });
-
- // Neither mongos or the shards should leave cursors open.
- assert.eq(mongosDB.serverStatus().metrics.cursor.open.total, 0);
- assert.soon(() => shard0DB.serverStatus().metrics.cursor.open.total == 0);
- assert.soon(() => shard1DB.serverStatus().metrics.cursor.open.total == 0);
-
- } finally {
- assert.commandWorked(mongosDB.adminCommand({
- configureFailPoint: "clusterAggregateFailToDispatchExchangeConsumerPipeline",
- mode: "off"
- }));
- }
-
- st.stop();
+"use strict";
+
+// For assertMergeFailsForAllModesWithCode.
+load("jstests/aggregation/extras/merge_helpers.js");
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+
+const kFailPointName = "waitAfterPinningCursorBeforeGetMoreBatch";
+const kFailpointOptions = {
+ shouldCheckForInterrupt: true
+};
+
+const st = new ShardingTest({shards: 2});
+const kDBName = "test";
+const kDivideByZeroErrCode = 16608;
+const mongosDB = st.s.getDB(kDBName);
+const shard0DB = st.shard0.getDB(kDBName);
+const shard1DB = st.shard1.getDB(kDBName);
+
+let coll = mongosDB.sharded_agg_cleanup_on_error;
+
+for (let i = 0; i < 10; i++) {
+ assert.writeOK(coll.insert({_id: i}));
+}
+
+st.shardColl(coll, {_id: 1}, {_id: 5}, {_id: 6}, kDBName, false);
+st.ensurePrimaryShard(kDBName, st.shard0.name);
+
+function assertFailsAndCleansUpCursors({pipeline, errCode}) {
+ let cmdRes = mongosDB.runCommand(
+ {aggregate: coll.getName(), pipeline: pipeline, cursor: {batchSize: 0}});
+ assert.commandWorked(cmdRes);
+ assert.neq(0, cmdRes.cursor.id);
+ assert.eq(coll.getFullName(), cmdRes.cursor.ns);
+ assert.eq(0, cmdRes.cursor.firstBatch.length);
+
+ cmdRes = mongosDB.runCommand({getMore: cmdRes.cursor.id, collection: coll.getName()});
+ assert.commandFailedWithCode(cmdRes, errCode);
+
+ // Neither mongos or the shards should leave cursors open. By the time we get here, the
+ // cursor which was hanging on shard 1 will have been marked interrupted, but isn't
+ // guaranteed to be deleted yet. Thus, we use an assert.soon().
+ assert.eq(mongosDB.serverStatus().metrics.cursor.open.total, 0);
+ assert.eq(shard0DB.serverStatus().metrics.cursor.open.total, 0);
+ assert.soon(() => shard1DB.serverStatus().metrics.cursor.open.pinned == 0);
+}
+
+try {
+ // Set up a fail point which causes getMore to hang on shard 1.
+ assert.commandWorked(shard1DB.adminCommand(
+ {configureFailPoint: kFailPointName, mode: "alwaysOn", data: kFailpointOptions}));
+
+ // Issue an aggregregation that will fail during a getMore on shard 0, and make sure that
+ // this correctly kills the hanging cursor on shard 1. Use $_internalSplitPipeline to ensure
+ // that this pipeline merges on mongos.
+ assertFailsAndCleansUpCursors({
+ pipeline: [
+ {$project: {out: {$divide: ["$_id", 0]}}},
+ {$_internalSplitPipeline: {mergeType: "mongos"}}
+ ],
+ errCode: kDivideByZeroErrCode
+ });
+
+ // Repeat the test above, but this time use $_internalSplitPipeline to force the merge to
+ // take place on a shard 0.
+ assertFailsAndCleansUpCursors({
+ pipeline: [
+ {$project: {out: {$divide: ["$_id", 0]}}},
+ {$_internalSplitPipeline: {mergeType: "primaryShard"}}
+ ],
+ errCode: kDivideByZeroErrCode
+ });
+} finally {
+ assert.commandWorked(shard0DB.adminCommand({configureFailPoint: kFailPointName, mode: "off"}));
+}
+
+// Test that aggregations which fail to establish a merging shard cursor also cleanup the open
+// shard cursors.
+try {
+ // Enable the failpoint to fail on establishing a merging shard cursor.
+ assert.commandWorked(mongosDB.adminCommand({
+ configureFailPoint: "clusterAggregateFailToEstablishMergingShardCursor",
+ mode: "alwaysOn"
+ }));
+
+ // Run an aggregation which requires a merging shard pipeline. This should fail because of
+ // the failpoint.
+ assertErrorCode(coll, [{$out: "target"}], ErrorCodes.FailPointEnabled);
+
+ // Neither mongos or the shards should leave cursors open.
+ assert.eq(mongosDB.serverStatus().metrics.cursor.open.total, 0);
+ assert.soon(() => shard0DB.serverStatus().metrics.cursor.open.total == 0);
+ assert.soon(() => shard1DB.serverStatus().metrics.cursor.open.total == 0);
+
+} finally {
+ assert.commandWorked(mongosDB.adminCommand(
+ {configureFailPoint: "clusterAggregateFailToEstablishMergingShardCursor", mode: "off"}));
+}
+
+// Test that aggregations involving $exchange correctly clean up the producer cursors.
+try {
+ assert.commandWorked(mongosDB.adminCommand({
+ configureFailPoint: "clusterAggregateFailToDispatchExchangeConsumerPipeline",
+ mode: "alwaysOn"
+ }));
+
+ // Run an aggregation which is eligible for $exchange. This should assert because of
+ // the failpoint. Add a $group stage to force an exchange-eligible split of the pipeline
+ // before the $merge. Without the $group we won't use the exchange optimization and instead
+ // will send the $merge to each shard.
+ st.shardColl(mongosDB.target, {_id: 1}, {_id: 0}, {_id: 1}, kDBName, false);
+
+ assertMergeFailsForAllModesWithCode({
+ source: coll,
+ target: mongosDB.target,
+ prevStages: [{$group: {_id: "$fakeShardKey"}}],
+ errorCodes: ErrorCodes.FailPointEnabled
+ });
+
+ // Neither mongos or the shards should leave cursors open.
+ assert.eq(mongosDB.serverStatus().metrics.cursor.open.total, 0);
+ assert.soon(() => shard0DB.serverStatus().metrics.cursor.open.total == 0);
+ assert.soon(() => shard1DB.serverStatus().metrics.cursor.open.total == 0);
+
+} finally {
+ assert.commandWorked(mongosDB.adminCommand({
+ configureFailPoint: "clusterAggregateFailToDispatchExchangeConsumerPipeline",
+ mode: "off"
+ }));
+}
+
+st.stop();
})();
diff --git a/jstests/aggregation/single_stage_alias_error.js b/jstests/aggregation/single_stage_alias_error.js
index cdf440093cb..10426ea6245 100644
--- a/jstests/aggregation/single_stage_alias_error.js
+++ b/jstests/aggregation/single_stage_alias_error.js
@@ -4,38 +4,37 @@
*/
(function() {
- "use strict";
-
- // For assertErrMessageContains and assertErrMessageDoesNotContain.
- load("jstests/aggregation/extras/utils.js");
- const coll = db.single_stage_alias_error;
-
- coll.drop();
-
- // Assert that, despite the fact $set and $addFields are internally identical, error messages
- // use only the name used by the user.
- var pipeline = [{'$set': {}}];
- assertErrMsgContains(coll, pipeline, "$set");
- assertErrMsgDoesNotContain(coll, pipeline, "$addFields");
-
- pipeline = [{'$addFields': {}}];
- assertErrMsgContains(coll, pipeline, "$addFields");
- assertErrMsgDoesNotContain(coll, pipeline, "$set");
-
- // Assert that, despite the fact $unset is an alias for an exclusion projection, error messages
- // use only the name used by the user.
- pipeline = [{'$unset': [""]}];
- assertErrMsgContains(coll, pipeline, "$unset");
- assertErrMsgDoesNotContain(coll, pipeline, "$project");
-
- pipeline = [{'$project': [""]}];
- assertErrMsgContains(coll, pipeline, "$project");
- assertErrMsgDoesNotContain(coll, pipeline, "$unset");
-
- // Assert that, despite the fact that $replaceWith is just an alias for $replaceRoot, error
- // messages contain syntax that matches the documentation for whichever name the user inputs.
- var doc = {'_id': 0};
- coll.insert(doc);
- pipeline = [{'$replaceWith': "abc"}];
-
+"use strict";
+
+// For assertErrMessageContains and assertErrMessageDoesNotContain.
+load("jstests/aggregation/extras/utils.js");
+const coll = db.single_stage_alias_error;
+
+coll.drop();
+
+// Assert that, despite the fact $set and $addFields are internally identical, error messages
+// use only the name used by the user.
+var pipeline = [{'$set': {}}];
+assertErrMsgContains(coll, pipeline, "$set");
+assertErrMsgDoesNotContain(coll, pipeline, "$addFields");
+
+pipeline = [{'$addFields': {}}];
+assertErrMsgContains(coll, pipeline, "$addFields");
+assertErrMsgDoesNotContain(coll, pipeline, "$set");
+
+// Assert that, despite the fact $unset is an alias for an exclusion projection, error messages
+// use only the name used by the user.
+pipeline = [{'$unset': [""]}];
+assertErrMsgContains(coll, pipeline, "$unset");
+assertErrMsgDoesNotContain(coll, pipeline, "$project");
+
+pipeline = [{'$project': [""]}];
+assertErrMsgContains(coll, pipeline, "$project");
+assertErrMsgDoesNotContain(coll, pipeline, "$unset");
+
+// Assert that, despite the fact that $replaceWith is just an alias for $replaceRoot, error
+// messages contain syntax that matches the documentation for whichever name the user inputs.
+var doc = {'_id': 0};
+coll.insert(doc);
+pipeline = [{'$replaceWith': "abc"}];
})();
diff --git a/jstests/aggregation/sources/addFields/use_cases.js b/jstests/aggregation/sources/addFields/use_cases.js
index b6f92fdb7b7..2f6f454ba5a 100644
--- a/jstests/aggregation/sources/addFields/use_cases.js
+++ b/jstests/aggregation/sources/addFields/use_cases.js
@@ -9,60 +9,59 @@
*/
(function() {
- "use strict";
+"use strict";
- // For arrayEq.
- load("jstests/aggregation/extras/utils.js");
+// For arrayEq.
+load("jstests/aggregation/extras/utils.js");
- const dbName = "test";
- const collName = jsTest.name();
+const dbName = "test";
+const collName = jsTest.name();
- function doExecutionTest(conn) {
- const coll = conn.getDB(dbName).getCollection(collName);
- coll.drop();
+function doExecutionTest(conn) {
+ const coll = conn.getDB(dbName).getCollection(collName);
+ coll.drop();
- // Insert a bunch of documents of the form above.
- const nDocs = 10;
- for (let i = 0; i < nDocs; i++) {
- assert.writeOK(coll.insert({"_id": i, "2i": i * 2, "3i": i * 3}));
- }
-
- // Add the minimum, maximum, and average temperatures, and make sure that doing the same
- // with addFields yields the correct answer.
- // First compute with $project, since we know all the fields in this document.
- let projectPipe = [{
- $project: {
- "2i": 1,
- "3i": 1,
- "6i^2": {"$multiply": ["$2i", "$3i"]},
- // _id is implicitly included.
- }
- }];
- let correct = coll.aggregate(projectPipe).toArray();
+ // Insert a bunch of documents of the form above.
+ const nDocs = 10;
+ for (let i = 0; i < nDocs; i++) {
+ assert.writeOK(coll.insert({"_id": i, "2i": i * 2, "3i": i * 3}));
+ }
- // Then compute the same results using $addFields.
- let addFieldsPipe = [{
- $addFields: {
- "6i^2": {"$multiply": ["$2i", "$3i"]},
- // All other fields are implicitly included.
- }
- }];
- let addFieldsResult = coll.aggregate(addFieldsPipe).toArray();
+ // Add the minimum, maximum, and average temperatures, and make sure that doing the same
+ // with addFields yields the correct answer.
+ // First compute with $project, since we know all the fields in this document.
+ let projectPipe = [{
+ $project: {
+ "2i": 1,
+ "3i": 1,
+ "6i^2": {"$multiply": ["$2i", "$3i"]},
+ // _id is implicitly included.
+ }
+ }];
+ let correct = coll.aggregate(projectPipe).toArray();
- // Then assert they are the same.
- assert(arrayEq(addFieldsResult, correct),
- "$addFields does not work the same as a $project with computed and included fields");
- }
+ // Then compute the same results using $addFields.
+ let addFieldsPipe = [{
+ $addFields: {
+ "6i^2": {"$multiply": ["$2i", "$3i"]},
+ // All other fields are implicitly included.
+ }
+ }];
+ let addFieldsResult = coll.aggregate(addFieldsPipe).toArray();
- // Test against the standalone started by resmoke.py.
- let conn = db.getMongo();
- doExecutionTest(conn);
- print("Success! Standalone execution use case test for $addFields passed.");
+ // Then assert they are the same.
+ assert(arrayEq(addFieldsResult, correct),
+ "$addFields does not work the same as a $project with computed and included fields");
+}
- // Test against a sharded cluster.
- let st = new ShardingTest({shards: 2});
- doExecutionTest(st.s0);
- st.stop();
- print("Success! Sharding use case test for $addFields passed.");
+// Test against the standalone started by resmoke.py.
+let conn = db.getMongo();
+doExecutionTest(conn);
+print("Success! Standalone execution use case test for $addFields passed.");
+// Test against a sharded cluster.
+let st = new ShardingTest({shards: 2});
+doExecutionTest(st.s0);
+st.stop();
+print("Success! Sharding use case test for $addFields passed.");
}());
diff --git a/jstests/aggregation/sources/addFields/weather.js b/jstests/aggregation/sources/addFields/weather.js
index 79916ee13d6..16e570b843c 100644
--- a/jstests/aggregation/sources/addFields/weather.js
+++ b/jstests/aggregation/sources/addFields/weather.js
@@ -9,97 +9,96 @@
*/
(function() {
- "use strict";
+"use strict";
- // For arrayEq.
- load("jstests/aggregation/extras/utils.js");
+// For arrayEq.
+load("jstests/aggregation/extras/utils.js");
- const dbName = "test";
- const collName = jsTest.name();
+const dbName = "test";
+const collName = jsTest.name();
- Random.setRandomSeed();
+Random.setRandomSeed();
- /**
- * Helper to generate an array of specified length of numbers in the specified range.
- */
- function randomArray(length, minValue, maxValue) {
- let array = [];
- for (let i = 0; i < length; i++) {
- array.push((Random.rand() * (maxValue - minValue)) + minValue);
- }
- return array;
+/**
+ * Helper to generate an array of specified length of numbers in the specified range.
+ */
+function randomArray(length, minValue, maxValue) {
+ let array = [];
+ for (let i = 0; i < length; i++) {
+ array.push((Random.rand() * (maxValue - minValue)) + minValue);
}
+ return array;
+}
- /**
- * Helper to generate a randomized document with the following schema:
- * {
- * month: <integer month of year>,
- * day: <integer day of month>,
- * temperatures: <array of 24 decimal temperatures>
- * }
- */
- function generateRandomDocument() {
- const minTemp = -40;
- const maxTemp = 120;
-
- return {
- month: Random.randInt(12) + 1, // 1-12
- day: Random.randInt(31) + 1, // 1-31
- temperatures: randomArray(24, minTemp, maxTemp),
- };
- }
+/**
+ * Helper to generate a randomized document with the following schema:
+ * {
+ * month: <integer month of year>,
+ * day: <integer day of month>,
+ * temperatures: <array of 24 decimal temperatures>
+ * }
+ */
+function generateRandomDocument() {
+ const minTemp = -40;
+ const maxTemp = 120;
- function doExecutionTest(conn) {
- const coll = conn.getDB(dbName).getCollection(collName);
- coll.drop();
+ return {
+ month: Random.randInt(12) + 1, // 1-12
+ day: Random.randInt(31) + 1, // 1-31
+ temperatures: randomArray(24, minTemp, maxTemp),
+ };
+}
- // Insert a bunch of documents of the form above.
- const nDocs = 10;
- for (let i = 0; i < nDocs; i++) {
- assert.writeOK(coll.insert(generateRandomDocument()));
- }
+function doExecutionTest(conn) {
+ const coll = conn.getDB(dbName).getCollection(collName);
+ coll.drop();
- // Add the minimum, maximum, and average temperatures, and make sure that doing the same
- // with addFields yields the correct answer.
- // First compute with $project, since we know all the fields in this document.
- let projectWeatherPipe = [{
- $project: {
- "month": 1,
- "day": 1,
- "temperatures": 1,
- "minTemp": {"$min": "$temperatures"},
- "maxTemp": {"$max": "$temperatures"},
- "average": {"$avg": "$temperatures"},
- // _id is implicitly included.
- }
- }];
- let correctWeather = coll.aggregate(projectWeatherPipe).toArray();
+ // Insert a bunch of documents of the form above.
+ const nDocs = 10;
+ for (let i = 0; i < nDocs; i++) {
+ assert.writeOK(coll.insert(generateRandomDocument()));
+ }
- // Then compute the same results using $addFields.
- let addFieldsWeatherPipe = [{
- $addFields: {
- "minTemp": {"$min": "$temperatures"},
- "maxTemp": {"$max": "$temperatures"},
- "average": {"$avg": "$temperatures"},
- // All other fields are implicitly included.
- }
- }];
- let addFieldsResult = coll.aggregate(addFieldsWeatherPipe).toArray();
+ // Add the minimum, maximum, and average temperatures, and make sure that doing the same
+ // with addFields yields the correct answer.
+ // First compute with $project, since we know all the fields in this document.
+ let projectWeatherPipe = [{
+ $project: {
+ "month": 1,
+ "day": 1,
+ "temperatures": 1,
+ "minTemp": {"$min": "$temperatures"},
+ "maxTemp": {"$max": "$temperatures"},
+ "average": {"$avg": "$temperatures"},
+ // _id is implicitly included.
+ }
+ }];
+ let correctWeather = coll.aggregate(projectWeatherPipe).toArray();
- // Then assert they are the same.
- assert(arrayEq(addFieldsResult, correctWeather),
- "$addFields does not work the same as a $project with computed and included fields");
- }
+ // Then compute the same results using $addFields.
+ let addFieldsWeatherPipe = [{
+ $addFields: {
+ "minTemp": {"$min": "$temperatures"},
+ "maxTemp": {"$max": "$temperatures"},
+ "average": {"$avg": "$temperatures"},
+ // All other fields are implicitly included.
+ }
+ }];
+ let addFieldsResult = coll.aggregate(addFieldsWeatherPipe).toArray();
- // Test against the standalone started by resmoke.py.
- let conn = db.getMongo();
- doExecutionTest(conn);
- print("Success! Standalone execution weather test for $addFields passed.");
+ // Then assert they are the same.
+ assert(arrayEq(addFieldsResult, correctWeather),
+ "$addFields does not work the same as a $project with computed and included fields");
+}
- // Test against a sharded cluster.
- let st = new ShardingTest({shards: 2});
- doExecutionTest(st.s0);
- st.stop();
- print("Success! Sharding weather test for $addFields passed.");
+// Test against the standalone started by resmoke.py.
+let conn = db.getMongo();
+doExecutionTest(conn);
+print("Success! Standalone execution weather test for $addFields passed.");
+// Test against a sharded cluster.
+let st = new ShardingTest({shards: 2});
+doExecutionTest(st.s0);
+st.stop();
+print("Success! Sharding weather test for $addFields passed.");
}());
diff --git a/jstests/aggregation/sources/bucket/collation_bucket.js b/jstests/aggregation/sources/bucket/collation_bucket.js
index 45f15402499..617bf8085f2 100644
--- a/jstests/aggregation/sources/bucket/collation_bucket.js
+++ b/jstests/aggregation/sources/bucket/collation_bucket.js
@@ -3,102 +3,100 @@
// Test that the $bucket stage defines and sorts buckets according to the collation.
(function() {
- "use strict";
+"use strict";
- var results;
- const numericOrdering = {collation: {locale: "en_US", numericOrdering: true}};
+var results;
+const numericOrdering = {
+ collation: {locale: "en_US", numericOrdering: true}
+};
- var coll = db.collation_bucket;
- coll.drop();
+var coll = db.collation_bucket;
+coll.drop();
- function insertData() {
- assert.writeOK(coll.insert({num: "1"}));
- assert.writeOK(coll.insert({num: "2"}));
- assert.writeOK(coll.insert({num: "5"}));
- assert.writeOK(coll.insert({num: "10"}));
- assert.writeOK(coll.insert({num: "20"}));
- assert.writeOK(coll.insert({num: "50"}));
- assert.writeOK(coll.insert({num: "100"}));
- assert.writeOK(coll.insert({num: "200"}));
- assert.writeOK(coll.insert({num: "500"}));
- }
+function insertData() {
+ assert.writeOK(coll.insert({num: "1"}));
+ assert.writeOK(coll.insert({num: "2"}));
+ assert.writeOK(coll.insert({num: "5"}));
+ assert.writeOK(coll.insert({num: "10"}));
+ assert.writeOK(coll.insert({num: "20"}));
+ assert.writeOK(coll.insert({num: "50"}));
+ assert.writeOK(coll.insert({num: "100"}));
+ assert.writeOK(coll.insert({num: "200"}));
+ assert.writeOK(coll.insert({num: "500"}));
+}
- insertData();
+insertData();
- // Test that $bucket respects an explicit collation.
- results = coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100", "1000"]}}],
- numericOrdering)
- .toArray();
- assert.eq(3, results.length);
- assert.eq({_id: "1", count: 3}, results[0]);
- assert.eq({_id: "10", count: 3}, results[1]);
- assert.eq({_id: "100", count: 3}, results[2]);
+// Test that $bucket respects an explicit collation.
+results = coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100", "1000"]}}],
+ numericOrdering)
+ .toArray();
+assert.eq(3, results.length);
+assert.eq({_id: "1", count: 3}, results[0]);
+assert.eq({_id: "10", count: 3}, results[1]);
+assert.eq({_id: "100", count: 3}, results[2]);
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), numericOrdering));
- insertData();
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), numericOrdering));
+insertData();
- // Test that $bucket respects the inherited collation.
- results = coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100", "1000"]}}])
- .toArray();
- assert.eq(3, results.length);
- assert.eq({_id: "1", count: 3}, results[0]);
- assert.eq({_id: "10", count: 3}, results[1]);
- assert.eq({_id: "100", count: 3}, results[2]);
+// Test that $bucket respects the inherited collation.
+results = coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100", "1000"]}}])
+ .toArray();
+assert.eq(3, results.length);
+assert.eq({_id: "1", count: 3}, results[0]);
+assert.eq({_id: "10", count: 3}, results[1]);
+assert.eq({_id: "100", count: 3}, results[2]);
- // Test that the collection default can be overridden with the simple collation. In this case,
- // the $bucket should fail, because under a lexicographical comparison strings like "2" or "5"
- // won't fall into any of the buckets.
- assert.throws(
- () => coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100", "1000"]}}],
- {collation: {locale: "simple"}}));
+// Test that the collection default can be overridden with the simple collation. In this case,
+// the $bucket should fail, because under a lexicographical comparison strings like "2" or "5"
+// won't fall into any of the buckets.
+assert.throws(
+ () => coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100", "1000"]}}],
+ {collation: {locale: "simple"}}));
- // Test that $bucket rejects boundaries that are not sorted according to the collation.
- assert.throws(
- () => coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["100", "20", "4"]}}]));
+// Test that $bucket rejects boundaries that are not sorted according to the collation.
+assert.throws(() => coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["100", "20", "4"]}}]));
- assert.throws(() =>
- coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["4", "20", "100"]}}],
- {collation: {locale: "simple"}}));
+assert.throws(() => coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["4", "20", "100"]}}],
+ {collation: {locale: "simple"}}));
- // Test that $bucket rejects a default value that falls within the boundaries.
- assert.throws(
- () => coll.aggregate(
- [{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100"], default: "40"}}]));
+// Test that $bucket rejects a default value that falls within the boundaries.
+assert.throws(() => coll.aggregate(
+ [{$bucket: {groupBy: "$num", boundaries: ["1", "10", "100"], default: "40"}}]));
- assert.throws(() => coll.aggregate(
- [{$bucket: {groupBy: "$num", boundaries: ["100", "999"], default: "2"}}],
- {collation: {locale: "simple"}}));
+assert.throws(
+ () => coll.aggregate([{$bucket: {groupBy: "$num", boundaries: ["100", "999"], default: "2"}}],
+ {collation: {locale: "simple"}}));
- // Test that $bucket accepts a default value that falls outside the boundaries according to the
- // collation.
- results =
- coll.aggregate([{
+// Test that $bucket accepts a default value that falls outside the boundaries according to the
+// collation.
+results = coll.aggregate([{
+ $bucket: {
+ groupBy: "$num",
+ boundaries: ["100", "999"],
+ default: "2" // Would fall between 100 and 999 if using the simple collation.
+ }
+ }])
+ .toArray();
+assert.eq(2, results.length);
+assert.eq({_id: "2", count: 6}, results[0]);
+assert.eq({_id: "100", count: 3}, results[1]); // "100", "200", and "500".
+
+results =
+ coll.aggregate(
+ [{
$bucket: {
groupBy: "$num",
- boundaries: ["100", "999"],
- default: "2" // Would fall between 100 and 999 if using the simple collation.
+ boundaries: ["1", "19999"], // Will include all numbers that start with "1"
+ default: "2" // Would fall between boundaries if using the
+ // collection-default collation with numeric
+ // ordering.
}
- }])
- .toArray();
- assert.eq(2, results.length);
- assert.eq({_id: "2", count: 6}, results[0]);
- assert.eq({_id: "100", count: 3}, results[1]); // "100", "200", and "500".
-
- results =
- coll.aggregate(
- [{
- $bucket: {
- groupBy: "$num",
- boundaries: ["1", "19999"], // Will include all numbers that start with "1"
- default: "2" // Would fall between boundaries if using the
- // collection-default collation with numeric
- // ordering.
- }
- }],
- {collation: {locale: "simple"}})
- .toArray();
- assert.eq(2, results.length);
- assert.eq({_id: "1", count: 3}, results[0]); // "1", "10", and "100".
- assert.eq({_id: "2", count: 6}, results[1]);
+ }],
+ {collation: {locale: "simple"}})
+ .toArray();
+assert.eq(2, results.length);
+assert.eq({_id: "1", count: 3}, results[0]); // "1", "10", and "100".
+assert.eq({_id: "2", count: 6}, results[1]);
})();
diff --git a/jstests/aggregation/sources/bucketauto/collation_bucketauto.js b/jstests/aggregation/sources/bucketauto/collation_bucketauto.js
index 4aafa6ea511..26b48951ab7 100644
--- a/jstests/aggregation/sources/bucketauto/collation_bucketauto.js
+++ b/jstests/aggregation/sources/bucketauto/collation_bucketauto.js
@@ -3,55 +3,56 @@
// Test that the $bucketAuto stage defines and sorts buckets according to the collation.
(function() {
- "use strict";
-
- var results;
- const numericOrdering = {collation: {locale: "en_US", numericOrdering: true}};
-
- var coll = db.collation_bucket;
- coll.drop();
-
- function insertData() {
- assert.writeOK(coll.insert({num: "1"}));
- assert.writeOK(coll.insert({num: "2"}));
- assert.writeOK(coll.insert({num: "5"}));
- assert.writeOK(coll.insert({num: "10"}));
- assert.writeOK(coll.insert({num: "20"}));
- assert.writeOK(coll.insert({num: "50"}));
- assert.writeOK(coll.insert({num: "100"}));
- assert.writeOK(coll.insert({num: "200"}));
- assert.writeOK(coll.insert({num: "500"}));
- }
-
- insertData();
-
- // Test that $bucketAuto respects an explicit collation.
- results =
- coll.aggregate([{$bucketAuto: {groupBy: "$num", buckets: 3}}], numericOrdering).toArray();
- assert.eq(3, results.length);
- assert.eq({_id: {min: "1", max: "10"}, count: 3}, results[0]);
- assert.eq({_id: {min: "10", max: "100"}, count: 3}, results[1]);
- assert.eq({_id: {min: "100", max: "500"}, count: 3}, results[2]);
-
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), numericOrdering));
- insertData();
-
- // Test that $bucketAuto respects the inherited collation.
- results = coll.aggregate([{$bucketAuto: {groupBy: "$num", buckets: 3}}]).toArray();
- assert.eq(3, results.length);
- assert.eq({_id: {min: "1", max: "10"}, count: 3}, results[0]);
- assert.eq({_id: {min: "10", max: "100"}, count: 3}, results[1]);
- assert.eq({_id: {min: "100", max: "500"}, count: 3}, results[2]);
-
- // Test that the collection default can be overridden with the simple collation. In this case,
- // the numbers will be sorted in lexicographical order, so the 3 buckets will be:
- // ["1", "10","100"], ["2", "20", "200"], and ["5", "50", "500"]
- results = coll.aggregate([{$bucketAuto: {groupBy: "$num", buckets: 3}}],
- {collation: {locale: "simple"}})
- .toArray();
- assert.eq(3, results.length);
- assert.eq({_id: {min: "1", max: "2"}, count: 3}, results[0]);
- assert.eq({_id: {min: "2", max: "5"}, count: 3}, results[1]);
- assert.eq({_id: {min: "5", max: "500"}, count: 3}, results[2]);
+"use strict";
+
+var results;
+const numericOrdering = {
+ collation: {locale: "en_US", numericOrdering: true}
+};
+
+var coll = db.collation_bucket;
+coll.drop();
+
+function insertData() {
+ assert.writeOK(coll.insert({num: "1"}));
+ assert.writeOK(coll.insert({num: "2"}));
+ assert.writeOK(coll.insert({num: "5"}));
+ assert.writeOK(coll.insert({num: "10"}));
+ assert.writeOK(coll.insert({num: "20"}));
+ assert.writeOK(coll.insert({num: "50"}));
+ assert.writeOK(coll.insert({num: "100"}));
+ assert.writeOK(coll.insert({num: "200"}));
+ assert.writeOK(coll.insert({num: "500"}));
+}
+
+insertData();
+
+// Test that $bucketAuto respects an explicit collation.
+results = coll.aggregate([{$bucketAuto: {groupBy: "$num", buckets: 3}}], numericOrdering).toArray();
+assert.eq(3, results.length);
+assert.eq({_id: {min: "1", max: "10"}, count: 3}, results[0]);
+assert.eq({_id: {min: "10", max: "100"}, count: 3}, results[1]);
+assert.eq({_id: {min: "100", max: "500"}, count: 3}, results[2]);
+
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), numericOrdering));
+insertData();
+
+// Test that $bucketAuto respects the inherited collation.
+results = coll.aggregate([{$bucketAuto: {groupBy: "$num", buckets: 3}}]).toArray();
+assert.eq(3, results.length);
+assert.eq({_id: {min: "1", max: "10"}, count: 3}, results[0]);
+assert.eq({_id: {min: "10", max: "100"}, count: 3}, results[1]);
+assert.eq({_id: {min: "100", max: "500"}, count: 3}, results[2]);
+
+// Test that the collection default can be overridden with the simple collation. In this case,
+// the numbers will be sorted in lexicographical order, so the 3 buckets will be:
+// ["1", "10","100"], ["2", "20", "200"], and ["5", "50", "500"]
+results =
+ coll.aggregate([{$bucketAuto: {groupBy: "$num", buckets: 3}}], {collation: {locale: "simple"}})
+ .toArray();
+assert.eq(3, results.length);
+assert.eq({_id: {min: "1", max: "2"}, count: 3}, results[0]);
+assert.eq({_id: {min: "2", max: "5"}, count: 3}, results[1]);
+assert.eq({_id: {min: "5", max: "500"}, count: 3}, results[2]);
})();
diff --git a/jstests/aggregation/sources/collStats/count.js b/jstests/aggregation/sources/collStats/count.js
index 265805408a3..5eb96cd7146 100644
--- a/jstests/aggregation/sources/collStats/count.js
+++ b/jstests/aggregation/sources/collStats/count.js
@@ -1,71 +1,71 @@
// Test that count within a $collStats stage returns the correct number of documents.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
- load("jstests/libs/fixture_helpers.js"); // For "FixtureHelpers".
+load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
+load("jstests/libs/fixture_helpers.js"); // For "FixtureHelpers".
- let testDB = db.getSiblingDB("aggregation_count_db");
- let coll = testDB.aggregation_count;
- coll.drop();
+let testDB = db.getSiblingDB("aggregation_count_db");
+let coll = testDB.aggregation_count;
+coll.drop();
- let nDocs = 1000;
- for (var i = 0; i < nDocs; i++) {
- assert.writeOK(coll.insert({a: i}));
- }
+let nDocs = 1000;
+for (var i = 0; i < nDocs; i++) {
+ assert.writeOK(coll.insert({a: i}));
+}
- // Test that $collStats must be first stage.
- let pipeline = [{$match: {}}, {$collStats: {}}];
- assertErrorCode(coll, pipeline, 40602);
+// Test that $collStats must be first stage.
+let pipeline = [{$match: {}}, {$collStats: {}}];
+assertErrorCode(coll, pipeline, 40602);
- // Test that an error is returned if count is not an object.
- pipeline = [{$collStats: {count: 1}}];
- assertErrorCode(coll, pipeline, 40480, "count spec must be an object");
- pipeline = [{$collStats: {count: "1"}}];
- assertErrorCode(coll, pipeline, 40480, "count spec must be an object");
+// Test that an error is returned if count is not an object.
+pipeline = [{$collStats: {count: 1}}];
+assertErrorCode(coll, pipeline, 40480, "count spec must be an object");
+pipeline = [{$collStats: {count: "1"}}];
+assertErrorCode(coll, pipeline, 40480, "count spec must be an object");
- // Test the accuracy of the record count as a standalone option.
- pipeline = [{$collStats: {count: {}}}];
- let result = coll.aggregate(pipeline).next();
- assert.eq(nDocs, result.count);
+// Test the accuracy of the record count as a standalone option.
+pipeline = [{$collStats: {count: {}}}];
+let result = coll.aggregate(pipeline).next();
+assert.eq(nDocs, result.count);
- // Test the record count alongside latencyStats and storageStats.
- pipeline = [{$collStats: {count: {}, latencyStats: {}}}];
- result = coll.aggregate(pipeline).next();
- assert.eq(nDocs, result.count);
- assert(result.hasOwnProperty("latencyStats"));
- assert(result.latencyStats.hasOwnProperty("reads"));
- assert(result.latencyStats.hasOwnProperty("writes"));
- assert(result.latencyStats.hasOwnProperty("commands"));
+// Test the record count alongside latencyStats and storageStats.
+pipeline = [{$collStats: {count: {}, latencyStats: {}}}];
+result = coll.aggregate(pipeline).next();
+assert.eq(nDocs, result.count);
+assert(result.hasOwnProperty("latencyStats"));
+assert(result.latencyStats.hasOwnProperty("reads"));
+assert(result.latencyStats.hasOwnProperty("writes"));
+assert(result.latencyStats.hasOwnProperty("commands"));
- pipeline = [{$collStats: {count: {}, latencyStats: {}, storageStats: {}}}];
- result = coll.aggregate(pipeline).next();
- assert.eq(nDocs, result.count);
- assert(result.hasOwnProperty("latencyStats"));
- assert(result.latencyStats.hasOwnProperty("reads"));
- assert(result.latencyStats.hasOwnProperty("writes"));
- assert(result.latencyStats.hasOwnProperty("commands"));
- assert(result.hasOwnProperty("storageStats"));
- assert.eq(nDocs, result.storageStats.count);
+pipeline = [{$collStats: {count: {}, latencyStats: {}, storageStats: {}}}];
+result = coll.aggregate(pipeline).next();
+assert.eq(nDocs, result.count);
+assert(result.hasOwnProperty("latencyStats"));
+assert(result.latencyStats.hasOwnProperty("reads"));
+assert(result.latencyStats.hasOwnProperty("writes"));
+assert(result.latencyStats.hasOwnProperty("commands"));
+assert(result.hasOwnProperty("storageStats"));
+assert.eq(nDocs, result.storageStats.count);
- // Test the record count against an empty collection.
- assert.writeOK(coll.remove({}));
- pipeline = [{$collStats: {count: {}}}];
- result = coll.aggregate(pipeline).next();
- assert.eq(0, result.count);
+// Test the record count against an empty collection.
+assert.writeOK(coll.remove({}));
+pipeline = [{$collStats: {count: {}}}];
+result = coll.aggregate(pipeline).next();
+assert.eq(0, result.count);
- // Test that we error when the collection does not exist.
- coll.drop();
- assertErrorCode(coll, pipeline, 40481);
+// Test that we error when the collection does not exist.
+coll.drop();
+assertErrorCode(coll, pipeline, 40481);
- // Test that we error when the database does not exist.
- // TODO SERVER-33039 When running against a mongos, a non-existent database will cause all
- // aggregations to return an empty result set.
- assert.commandWorked(testDB.dropDatabase());
- if (FixtureHelpers.isMongos(testDB)) {
- assert.eq([], coll.aggregate(pipeline).toArray());
- } else {
- assertErrorCode(coll, pipeline, 40481);
- }
+// Test that we error when the database does not exist.
+// TODO SERVER-33039 When running against a mongos, a non-existent database will cause all
+// aggregations to return an empty result set.
+assert.commandWorked(testDB.dropDatabase());
+if (FixtureHelpers.isMongos(testDB)) {
+ assert.eq([], coll.aggregate(pipeline).toArray());
+} else {
+ assertErrorCode(coll, pipeline, 40481);
+}
}());
diff --git a/jstests/aggregation/sources/collStats/query_exec_stats.js b/jstests/aggregation/sources/collStats/query_exec_stats.js
index 920f3ed84a6..ac343692341 100644
--- a/jstests/aggregation/sources/collStats/query_exec_stats.js
+++ b/jstests/aggregation/sources/collStats/query_exec_stats.js
@@ -1,83 +1,83 @@
// Test that queryExecStats within a $collStats stage returns the correct execution stats.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- if (jsTest.options().storageEngine === "mobile") {
- print("Skipping test because storage engine isn't mobile");
- return;
- }
+if (jsTest.options().storageEngine === "mobile") {
+ print("Skipping test because storage engine isn't mobile");
+ return;
+}
- load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
- load("jstests/libs/fixture_helpers.js"); // For "FixtureHelpers".
+load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
+load("jstests/libs/fixture_helpers.js"); // For "FixtureHelpers".
- const nDocs = 32;
+const nDocs = 32;
- const testDB = db.getSiblingDB("aggregation_query_exec_stats");
- const coll = testDB.aggregation_query_exec_stats;
- coll.drop();
- assert.commandWorked(
- testDB.createCollection("aggregation_query_exec_stats", {capped: true, size: nDocs * 100}));
+const testDB = db.getSiblingDB("aggregation_query_exec_stats");
+const coll = testDB.aggregation_query_exec_stats;
+coll.drop();
+assert.commandWorked(
+ testDB.createCollection("aggregation_query_exec_stats", {capped: true, size: nDocs * 100}));
- for (let i = 0; i < nDocs; i++) {
- assert.commandWorked(coll.insert({a: i}));
- }
+for (let i = 0; i < nDocs; i++) {
+ assert.commandWorked(coll.insert({a: i}));
+}
- // Run a bunch of collection scans on the server.
- for (let i = 0; i < nDocs; i++) {
- assert.eq(coll.find({a: i}).itcount(), 1);
- }
+// Run a bunch of collection scans on the server.
+for (let i = 0; i < nDocs; i++) {
+ assert.eq(coll.find({a: i}).itcount(), 1);
+}
- // Test that an error is returned if queryExecStats is not an object.
- let pipeline = [{$collStats: {queryExecStats: 1}}];
- assertErrorCode(coll, pipeline, 31141, "queryExecStats spec must be an object");
- pipeline = [{$collStats: {queryExecStats: "1"}}];
- assertErrorCode(coll, pipeline, 31141, "queryExecStats spec must be an object");
+// Test that an error is returned if queryExecStats is not an object.
+let pipeline = [{$collStats: {queryExecStats: 1}}];
+assertErrorCode(coll, pipeline, 31141, "queryExecStats spec must be an object");
+pipeline = [{$collStats: {queryExecStats: "1"}}];
+assertErrorCode(coll, pipeline, 31141, "queryExecStats spec must be an object");
- // Test the accuracy of the result of queryExecStats as a standalone option.
- pipeline = [{$collStats: {queryExecStats: {}}}];
- let result = coll.aggregate(pipeline).next();
- assert.eq(nDocs, result.queryExecStats.collectionScans.total);
- assert.eq(nDocs, result.queryExecStats.collectionScans.nonTailable);
+// Test the accuracy of the result of queryExecStats as a standalone option.
+pipeline = [{$collStats: {queryExecStats: {}}}];
+let result = coll.aggregate(pipeline).next();
+assert.eq(nDocs, result.queryExecStats.collectionScans.total);
+assert.eq(nDocs, result.queryExecStats.collectionScans.nonTailable);
- // Test tailable collection scans update collectionScans counters appropriately.
- for (let i = 0; i < nDocs; i++) {
- assert.eq(coll.find({a: i}).tailable().itcount(), 1);
- }
- result = coll.aggregate(pipeline).next();
- assert.eq(nDocs * 2, result.queryExecStats.collectionScans.total);
- assert.eq(nDocs, result.queryExecStats.collectionScans.nonTailable);
+// Test tailable collection scans update collectionScans counters appropriately.
+for (let i = 0; i < nDocs; i++) {
+ assert.eq(coll.find({a: i}).tailable().itcount(), 1);
+}
+result = coll.aggregate(pipeline).next();
+assert.eq(nDocs * 2, result.queryExecStats.collectionScans.total);
+assert.eq(nDocs, result.queryExecStats.collectionScans.nonTailable);
- // Run a query which will require the client to fetch multiple batches from the server. Ensure
- // that the getMore commands don't increment the counter of collection scans.
- assert.eq(coll.find({}).batchSize(2).itcount(), nDocs);
- result = coll.aggregate(pipeline).next();
- assert.eq((nDocs * 2) + 1, result.queryExecStats.collectionScans.total);
- assert.eq(nDocs + 1, result.queryExecStats.collectionScans.nonTailable);
+// Run a query which will require the client to fetch multiple batches from the server. Ensure
+// that the getMore commands don't increment the counter of collection scans.
+assert.eq(coll.find({}).batchSize(2).itcount(), nDocs);
+result = coll.aggregate(pipeline).next();
+assert.eq((nDocs * 2) + 1, result.queryExecStats.collectionScans.total);
+assert.eq(nDocs + 1, result.queryExecStats.collectionScans.nonTailable);
- // Create index to test that index scans don't up the collection scan counter.
- assert.commandWorked(coll.createIndex({a: 1}));
- // Run a bunch of index scans.
- for (let i = 0; i < nDocs; i++) {
- assert.eq(coll.find({a: i}).itcount(), 1);
- }
- result = coll.aggregate(pipeline).next();
- // Assert that the number of collection scans hasn't increased.
- assert.eq((nDocs * 2) + 1, result.queryExecStats.collectionScans.total);
- assert.eq(nDocs + 1, result.queryExecStats.collectionScans.nonTailable);
+// Create index to test that index scans don't up the collection scan counter.
+assert.commandWorked(coll.createIndex({a: 1}));
+// Run a bunch of index scans.
+for (let i = 0; i < nDocs; i++) {
+ assert.eq(coll.find({a: i}).itcount(), 1);
+}
+result = coll.aggregate(pipeline).next();
+// Assert that the number of collection scans hasn't increased.
+assert.eq((nDocs * 2) + 1, result.queryExecStats.collectionScans.total);
+assert.eq(nDocs + 1, result.queryExecStats.collectionScans.nonTailable);
- // Test that we error when the collection does not exist.
- coll.drop();
- pipeline = [{$collStats: {queryExecStats: {}}}];
- assertErrorCode(coll, pipeline, 31142);
+// Test that we error when the collection does not exist.
+coll.drop();
+pipeline = [{$collStats: {queryExecStats: {}}}];
+assertErrorCode(coll, pipeline, 31142);
- // Test that we error when the database does not exist.
- // TODO SERVER-33039 When running against a mongos, a non-existent database will cause all
- // aggregations to return an empty result set.
- assert.commandWorked(testDB.dropDatabase());
- if (FixtureHelpers.isMongos(testDB)) {
- assert.eq([], coll.aggregate(pipeline).toArray());
- } else {
- assertErrorCode(coll, pipeline, 31142);
- }
+// Test that we error when the database does not exist.
+// TODO SERVER-33039 When running against a mongos, a non-existent database will cause all
+// aggregations to return an empty result set.
+assert.commandWorked(testDB.dropDatabase());
+if (FixtureHelpers.isMongos(testDB)) {
+ assert.eq([], coll.aggregate(pipeline).toArray());
+} else {
+ assertErrorCode(coll, pipeline, 31142);
+}
}());
diff --git a/jstests/aggregation/sources/collStats/shard_host_info.js b/jstests/aggregation/sources/collStats/shard_host_info.js
index 34e1d8a195e..ced3f9bb47f 100644
--- a/jstests/aggregation/sources/collStats/shard_host_info.js
+++ b/jstests/aggregation/sources/collStats/shard_host_info.js
@@ -7,52 +7,52 @@
* ]
*/
(function() {
- "use strict";
-
- // Test mongoD behaviour using the standalone started by resmoke.py.
- let testDB = db.getSiblingDB(jsTestName());
- let testColl = testDB.test;
-
- // getHostName() doesn't include port, db.getMongo().host is 127.0.0.1:<port>
- const hostName = (getHostName() + ":" + db.getMongo().host.split(":")[1]);
-
- // Test that the shard field is absent and the host field is present when run on mongoD.
- assert.eq(testColl
- .aggregate([
- {$collStats: {latencyStats: {histograms: true}}},
- {$group: {_id: {shard: "$shard", host: "$host"}}}
- ])
- .toArray(),
- [{_id: {host: hostName}}]);
-
- // Test that both shard and hostname are present for $collStats results on a sharded cluster.
- const st = new ShardingTest({name: jsTestName(), shards: 2});
-
- testDB = st.s.getDB(jsTestName());
- testColl = testDB.test;
-
- assert.commandWorked(testDB.dropDatabase());
-
- // Enable sharding on the test database.
- assert.commandWorked(testDB.adminCommand({enableSharding: testDB.getName()}));
-
- // Shard 'testColl' on {_id: 'hashed'}. This will automatically presplit the collection and
- // place chunks on each shard.
- assert.commandWorked(
- testDB.adminCommand({shardCollection: testColl.getFullName(), key: {_id: "hashed"}}));
-
- // Group $collStats result by $shard and $host to confirm that both fields are present.
- assert.eq(testColl
- .aggregate([
- {$collStats: {latencyStats: {histograms: true}}},
- {$group: {_id: {shard: "$shard", host: "$host"}}},
- {$sort: {_id: 1}}
- ])
- .toArray(),
- [
- {_id: {shard: st.shard0.shardName, host: st.rs0.getPrimary().host}},
- {_id: {shard: st.shard1.shardName, host: st.rs1.getPrimary().host}},
- ]);
-
- st.stop();
+"use strict";
+
+// Test mongoD behaviour using the standalone started by resmoke.py.
+let testDB = db.getSiblingDB(jsTestName());
+let testColl = testDB.test;
+
+// getHostName() doesn't include port, db.getMongo().host is 127.0.0.1:<port>
+const hostName = (getHostName() + ":" + db.getMongo().host.split(":")[1]);
+
+// Test that the shard field is absent and the host field is present when run on mongoD.
+assert.eq(testColl
+ .aggregate([
+ {$collStats: {latencyStats: {histograms: true}}},
+ {$group: {_id: {shard: "$shard", host: "$host"}}}
+ ])
+ .toArray(),
+ [{_id: {host: hostName}}]);
+
+// Test that both shard and hostname are present for $collStats results on a sharded cluster.
+const st = new ShardingTest({name: jsTestName(), shards: 2});
+
+testDB = st.s.getDB(jsTestName());
+testColl = testDB.test;
+
+assert.commandWorked(testDB.dropDatabase());
+
+// Enable sharding on the test database.
+assert.commandWorked(testDB.adminCommand({enableSharding: testDB.getName()}));
+
+// Shard 'testColl' on {_id: 'hashed'}. This will automatically presplit the collection and
+// place chunks on each shard.
+assert.commandWorked(
+ testDB.adminCommand({shardCollection: testColl.getFullName(), key: {_id: "hashed"}}));
+
+// Group $collStats result by $shard and $host to confirm that both fields are present.
+assert.eq(testColl
+ .aggregate([
+ {$collStats: {latencyStats: {histograms: true}}},
+ {$group: {_id: {shard: "$shard", host: "$host"}}},
+ {$sort: {_id: 1}}
+ ])
+ .toArray(),
+ [
+ {_id: {shard: st.shard0.shardName, host: st.rs0.getPrimary().host}},
+ {_id: {shard: st.shard1.shardName, host: st.rs1.getPrimary().host}},
+ ]);
+
+st.stop();
})();
diff --git a/jstests/aggregation/sources/facet/inner_graphlookup.js b/jstests/aggregation/sources/facet/inner_graphlookup.js
index 340853f7721..9631b8878ef 100644
--- a/jstests/aggregation/sources/facet/inner_graphlookup.js
+++ b/jstests/aggregation/sources/facet/inner_graphlookup.js
@@ -7,21 +7,21 @@
* using the $graphLookup stage outside of the $facet stage.
*/
(function() {
- "use strict";
+"use strict";
- // We will only use one collection, the $graphLookup will look up from the same collection.
- var graphColl = db.facetGraphLookup;
+// We will only use one collection, the $graphLookup will look up from the same collection.
+var graphColl = db.facetGraphLookup;
- // The graph in ASCII form: 0 --- 1 --- 2 3
- graphColl.drop();
- assert.writeOK(graphColl.insert({_id: 0, edges: [1]}));
- assert.writeOK(graphColl.insert({_id: 1, edges: [0, 2]}));
- assert.writeOK(graphColl.insert({_id: 2, edges: [1]}));
- assert.writeOK(graphColl.insert({_id: 3}));
+// The graph in ASCII form: 0 --- 1 --- 2 3
+graphColl.drop();
+assert.writeOK(graphColl.insert({_id: 0, edges: [1]}));
+assert.writeOK(graphColl.insert({_id: 1, edges: [0, 2]}));
+assert.writeOK(graphColl.insert({_id: 2, edges: [1]}));
+assert.writeOK(graphColl.insert({_id: 3}));
- // For each document in the collection, this will compute all the other documents that are
- // reachable from this one.
- const graphLookupStage = {
+// For each document in the collection, this will compute all the other documents that are
+// reachable from this one.
+const graphLookupStage = {
$graphLookup: {
from: graphColl.getName(),
startWith: "$_id",
@@ -31,20 +31,24 @@
}
};
- const projectStage = {$project: {_id: 1, edges: 1, connected_length: {$size: "$connected"}}};
-
- const normalResults = graphColl.aggregate([graphLookupStage, projectStage]).toArray();
- const facetedResults =
- graphColl.aggregate([{$facet: {nested: [graphLookupStage, projectStage]}}]).toArray();
- assert.eq(facetedResults, [{nested: normalResults}]);
-
- const sortStage = {$sort: {_id: 1, "connected._id": 1}};
-
- const normalResultsUnwound =
- graphColl.aggregate([graphLookupStage, {$unwind: "$connected"}, sortStage]).toArray();
- const facetedResultsUnwound =
- graphColl
- .aggregate([{$facet: {nested: [graphLookupStage, {$unwind: "$connected"}, sortStage]}}])
- .toArray();
- assert.eq(facetedResultsUnwound, [{nested: normalResultsUnwound}]);
+const projectStage = {
+ $project: {_id: 1, edges: 1, connected_length: {$size: "$connected"}}
+};
+
+const normalResults = graphColl.aggregate([graphLookupStage, projectStage]).toArray();
+const facetedResults =
+ graphColl.aggregate([{$facet: {nested: [graphLookupStage, projectStage]}}]).toArray();
+assert.eq(facetedResults, [{nested: normalResults}]);
+
+const sortStage = {
+ $sort: {_id: 1, "connected._id": 1}
+};
+
+const normalResultsUnwound =
+ graphColl.aggregate([graphLookupStage, {$unwind: "$connected"}, sortStage]).toArray();
+const facetedResultsUnwound =
+ graphColl
+ .aggregate([{$facet: {nested: [graphLookupStage, {$unwind: "$connected"}, sortStage]}}])
+ .toArray();
+assert.eq(facetedResultsUnwound, [{nested: normalResultsUnwound}]);
}());
diff --git a/jstests/aggregation/sources/facet/inner_lookup.js b/jstests/aggregation/sources/facet/inner_lookup.js
index 39f1b53f88f..0852f820869 100644
--- a/jstests/aggregation/sources/facet/inner_lookup.js
+++ b/jstests/aggregation/sources/facet/inner_lookup.js
@@ -7,41 +7,36 @@
* the $lookup stage outside of the $facet stage.
*/
(function() {
- "use strict";
+"use strict";
- var local = db.facetLookupLocal;
- var foreign = db.facetLookupForeign;
+var local = db.facetLookupLocal;
+var foreign = db.facetLookupForeign;
- local.drop();
- assert.writeOK(local.insert({_id: 0}));
- assert.writeOK(local.insert({_id: 1}));
+local.drop();
+assert.writeOK(local.insert({_id: 0}));
+assert.writeOK(local.insert({_id: 1}));
- foreign.drop();
- assert.writeOK(foreign.insert({_id: 0, foreignKey: 0}));
- assert.writeOK(foreign.insert({_id: 1, foreignKey: 1}));
- assert.writeOK(foreign.insert({_id: 2, foreignKey: 2}));
+foreign.drop();
+assert.writeOK(foreign.insert({_id: 0, foreignKey: 0}));
+assert.writeOK(foreign.insert({_id: 1, foreignKey: 1}));
+assert.writeOK(foreign.insert({_id: 2, foreignKey: 2}));
- function runTest(lookupStage) {
- const lookupResults = local.aggregate([lookupStage]).toArray();
- const facetedLookupResults = local.aggregate([{$facet: {nested: [lookupStage]}}]).toArray();
- assert.eq(facetedLookupResults, [{nested: lookupResults}]);
+function runTest(lookupStage) {
+ const lookupResults = local.aggregate([lookupStage]).toArray();
+ const facetedLookupResults = local.aggregate([{$facet: {nested: [lookupStage]}}]).toArray();
+ assert.eq(facetedLookupResults, [{nested: lookupResults}]);
- const lookupResultsUnwound = local.aggregate([lookupStage, {$unwind: "$joined"}]).toArray();
- const facetedLookupResultsUnwound =
- local.aggregate([{$facet: {nested: [lookupStage, {$unwind: "$joined"}]}}]).toArray();
- assert.eq(facetedLookupResultsUnwound, [{nested: lookupResultsUnwound}]);
- }
+ const lookupResultsUnwound = local.aggregate([lookupStage, {$unwind: "$joined"}]).toArray();
+ const facetedLookupResultsUnwound =
+ local.aggregate([{$facet: {nested: [lookupStage, {$unwind: "$joined"}]}}]).toArray();
+ assert.eq(facetedLookupResultsUnwound, [{nested: lookupResultsUnwound}]);
+}
- runTest({
- $lookup: {
- from: foreign.getName(),
- localField: "_id",
- foreignField: "foreignKey",
- as: "joined"
- }
- });
+runTest({
+ $lookup: {from: foreign.getName(), localField: "_id", foreignField: "foreignKey", as: "joined"}
+});
- runTest({
+runTest({
$lookup: {
from: foreign.getName(),
let : {id1: "$_id"},
diff --git a/jstests/aggregation/sources/facet/use_cases.js b/jstests/aggregation/sources/facet/use_cases.js
index a6f1def408e..83f5d58d4d8 100644
--- a/jstests/aggregation/sources/facet/use_cases.js
+++ b/jstests/aggregation/sources/facet/use_cases.js
@@ -6,168 +6,164 @@
* ]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
- load("jstests/libs/discover_topology.js"); // For findDataBearingNodes.
+load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
+load("jstests/libs/discover_topology.js"); // For findDataBearingNodes.
- const dbName = "test";
- const collName = jsTest.name();
- const testNs = dbName + "." + collName;
+const dbName = "test";
+const collName = jsTest.name();
+const testNs = dbName + "." + collName;
- Random.setRandomSeed();
+Random.setRandomSeed();
- /**
- * Helper to get a random entry out of an array.
- */
- function randomChoice(array) {
- return array[Random.randInt(array.length)];
- }
-
- /**
- * Helper to generate a randomized document with the following schema:
- * {
- * manufacturer: <string>,
- * price: <double>,
- * screenSize: <double>
- * }
- */
- function generateRandomDocument(docId) {
- const manufacturers =
- ["Sony", "Samsung", "LG", "Panasonic", "Mitsubishi", "Vizio", "Toshiba", "Sharp"];
- const minPrice = 100;
- const maxPrice = 4000;
- const minScreenSize = 18;
- const maxScreenSize = 40;
-
- return {
- _id: docId,
- manufacturer: randomChoice(manufacturers),
- price: Random.randInt(maxPrice - minPrice + 1) + minPrice,
- screenSize: Random.randInt(maxScreenSize - minScreenSize + 1) + minScreenSize,
- };
- }
+/**
+ * Helper to get a random entry out of an array.
+ */
+function randomChoice(array) {
+ return array[Random.randInt(array.length)];
+}
- /**
- * Inserts 'nDocs' documents into collection given by 'dbName' and 'collName'. Documents will
- * have _ids in the range [0, nDocs).
- */
- function populateData(conn, nDocs) {
- var coll = conn.getDB(dbName).getCollection(collName);
- coll.remove({}); // Don't drop the collection, since it might be sharded.
-
- var bulk = coll.initializeUnorderedBulkOp();
- for (var i = 0; i < nDocs; i++) {
- const doc = generateRandomDocument(i);
- bulk.insert(doc);
- }
- assert.writeOK(bulk.execute());
- }
+/**
+ * Helper to generate a randomized document with the following schema:
+ * {
+ * manufacturer: <string>,
+ * price: <double>,
+ * screenSize: <double>
+ * }
+ */
+function generateRandomDocument(docId) {
+ const manufacturers =
+ ["Sony", "Samsung", "LG", "Panasonic", "Mitsubishi", "Vizio", "Toshiba", "Sharp"];
+ const minPrice = 100;
+ const maxPrice = 4000;
+ const minScreenSize = 18;
+ const maxScreenSize = 40;
+
+ return {
+ _id: docId,
+ manufacturer: randomChoice(manufacturers),
+ price: Random.randInt(maxPrice - minPrice + 1) + minPrice,
+ screenSize: Random.randInt(maxScreenSize - minScreenSize + 1) + minScreenSize,
+ };
+}
- function doExecutionTest(conn) {
- var coll = conn.getDB(dbName).getCollection(collName);
- //
- // Compute the most common manufacturers, and the number of TVs in each price range.
- //
-
- // First compute each separately, to make sure we have the correct results.
- const manufacturerPipe = [
- {$sortByCount: "$manufacturer"},
- // Sort by count and then by _id in case there are two manufacturers with an equal
- // count.
- {$sort: {count: -1, _id: 1}},
- ];
- const bucketedPricePipe = [
- {
- $bucket: {groupBy: "$price", boundaries: [0, 500, 1000, 1500, 2000], default: 2000},
- },
- {$sort: {count: -1}}
- ];
- const automaticallyBucketedPricePipe = [{$bucketAuto: {groupBy: "$price", buckets: 5}}];
-
- const mostCommonManufacturers = coll.aggregate(manufacturerPipe).toArray();
- const numTVsBucketedByPriceRange = coll.aggregate(bucketedPricePipe).toArray();
- const numTVsAutomaticallyBucketedByPriceRange =
- coll.aggregate(automaticallyBucketedPricePipe).toArray();
-
- const facetPipe = [{
- $facet: {
- manufacturers: manufacturerPipe,
- bucketedPrices: bucketedPricePipe,
- autoBucketedPrices: automaticallyBucketedPricePipe
- }
- }];
-
- // Then compute the results using $facet.
- const facetResult = coll.aggregate(facetPipe).toArray();
- assert.eq(facetResult.length, 1);
- const facetManufacturers = facetResult[0].manufacturers;
- const facetBucketedPrices = facetResult[0].bucketedPrices;
- const facetAutoBucketedPrices = facetResult[0].autoBucketedPrices;
-
- // Then assert they are the same.
- assert.eq(facetManufacturers, mostCommonManufacturers);
- assert.eq(facetBucketedPrices, numTVsBucketedByPriceRange);
- assert.eq(facetAutoBucketedPrices, numTVsAutomaticallyBucketedByPriceRange);
+/**
+ * Inserts 'nDocs' documents into collection given by 'dbName' and 'collName'. Documents will
+ * have _ids in the range [0, nDocs).
+ */
+function populateData(conn, nDocs) {
+ var coll = conn.getDB(dbName).getCollection(collName);
+ coll.remove({}); // Don't drop the collection, since it might be sharded.
+
+ var bulk = coll.initializeUnorderedBulkOp();
+ for (var i = 0; i < nDocs; i++) {
+ const doc = generateRandomDocument(i);
+ bulk.insert(doc);
}
-
- // Test against the standalone started by resmoke.py.
- const nDocs = 1000 * 10;
- const conn = db.getMongo();
- populateData(conn, nDocs);
- doExecutionTest(conn);
-
- // Test against a sharded cluster.
- const st = new ShardingTest({shards: 2});
- populateData(st.s0, nDocs);
- doExecutionTest(st.s0);
-
- const shardedDBName = "sharded";
- const shardedCollName = "collection";
- const shardedColl = st.getDB(shardedDBName).getCollection(shardedCollName);
- const unshardedColl = st.getDB(shardedDBName).getCollection(collName);
-
- assert.commandWorked(st.admin.runCommand({enableSharding: shardedDBName}));
- assert.commandWorked(
- st.admin.runCommand({shardCollection: shardedColl.getFullName(), key: {_id: 1}}));
-
- // Test $lookup inside a $facet stage on a sharded collection.
- // Enable sharded $lookup.
- setParameterOnAllHosts(
- DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", true);
- assert.commandWorked(unshardedColl.runCommand({
- aggregate: unshardedColl.getName(),
- pipeline: [{
- $facet: {
- a: [{
- $lookup: {
- from: shardedCollName,
- localField: "_id",
- foreignField: "_id",
- as: "results"
- }
- }]
- }
- }],
- cursor: {}
- }));
- // Disable sharded $lookup.
- setParameterOnAllHosts(
- DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", false);
-
- // Then run the assertions against a sharded collection.
- assert.commandWorked(st.admin.runCommand({enableSharding: dbName}));
- assert.commandWorked(st.admin.runCommand({shardCollection: testNs, key: {_id: 1}}));
-
- // Make sure there is a chunk on each shard, so that our aggregations are targeted to multiple
- // shards.
- assert.commandWorked(st.admin.runCommand({split: testNs, middle: {_id: nDocs / 2}}));
- assert.commandWorked(
- st.admin.runCommand({moveChunk: testNs, find: {_id: 0}, to: st.shard0.shardName}));
- assert.commandWorked(
- st.admin.runCommand({moveChunk: testNs, find: {_id: nDocs - 1}, to: st.shard1.shardName}));
-
- doExecutionTest(st.s0);
-
- st.stop();
+ assert.writeOK(bulk.execute());
+}
+
+function doExecutionTest(conn) {
+ var coll = conn.getDB(dbName).getCollection(collName);
+ //
+ // Compute the most common manufacturers, and the number of TVs in each price range.
+ //
+
+ // First compute each separately, to make sure we have the correct results.
+ const manufacturerPipe = [
+ {$sortByCount: "$manufacturer"},
+ // Sort by count and then by _id in case there are two manufacturers with an equal
+ // count.
+ {$sort: {count: -1, _id: 1}},
+ ];
+ const bucketedPricePipe = [
+ {
+ $bucket: {groupBy: "$price", boundaries: [0, 500, 1000, 1500, 2000], default: 2000},
+ },
+ {$sort: {count: -1}}
+ ];
+ const automaticallyBucketedPricePipe = [{$bucketAuto: {groupBy: "$price", buckets: 5}}];
+
+ const mostCommonManufacturers = coll.aggregate(manufacturerPipe).toArray();
+ const numTVsBucketedByPriceRange = coll.aggregate(bucketedPricePipe).toArray();
+ const numTVsAutomaticallyBucketedByPriceRange =
+ coll.aggregate(automaticallyBucketedPricePipe).toArray();
+
+ const facetPipe = [{
+ $facet: {
+ manufacturers: manufacturerPipe,
+ bucketedPrices: bucketedPricePipe,
+ autoBucketedPrices: automaticallyBucketedPricePipe
+ }
+ }];
+
+ // Then compute the results using $facet.
+ const facetResult = coll.aggregate(facetPipe).toArray();
+ assert.eq(facetResult.length, 1);
+ const facetManufacturers = facetResult[0].manufacturers;
+ const facetBucketedPrices = facetResult[0].bucketedPrices;
+ const facetAutoBucketedPrices = facetResult[0].autoBucketedPrices;
+
+ // Then assert they are the same.
+ assert.eq(facetManufacturers, mostCommonManufacturers);
+ assert.eq(facetBucketedPrices, numTVsBucketedByPriceRange);
+ assert.eq(facetAutoBucketedPrices, numTVsAutomaticallyBucketedByPriceRange);
+}
+
+// Test against the standalone started by resmoke.py.
+const nDocs = 1000 * 10;
+const conn = db.getMongo();
+populateData(conn, nDocs);
+doExecutionTest(conn);
+
+// Test against a sharded cluster.
+const st = new ShardingTest({shards: 2});
+populateData(st.s0, nDocs);
+doExecutionTest(st.s0);
+
+const shardedDBName = "sharded";
+const shardedCollName = "collection";
+const shardedColl = st.getDB(shardedDBName).getCollection(shardedCollName);
+const unshardedColl = st.getDB(shardedDBName).getCollection(collName);
+
+assert.commandWorked(st.admin.runCommand({enableSharding: shardedDBName}));
+assert.commandWorked(
+ st.admin.runCommand({shardCollection: shardedColl.getFullName(), key: {_id: 1}}));
+
+// Test $lookup inside a $facet stage on a sharded collection.
+// Enable sharded $lookup.
+setParameterOnAllHosts(
+ DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", true);
+assert.commandWorked(unshardedColl.runCommand({
+ aggregate: unshardedColl.getName(),
+ pipeline: [{
+ $facet: {
+ a: [{
+ $lookup:
+ {from: shardedCollName, localField: "_id", foreignField: "_id", as: "results"}
+ }]
+ }
+ }],
+ cursor: {}
+}));
+// Disable sharded $lookup.
+setParameterOnAllHosts(
+ DiscoverTopology.findNonConfigNodes(st.s), "internalQueryAllowShardedLookup", false);
+
+// Then run the assertions against a sharded collection.
+assert.commandWorked(st.admin.runCommand({enableSharding: dbName}));
+assert.commandWorked(st.admin.runCommand({shardCollection: testNs, key: {_id: 1}}));
+
+// Make sure there is a chunk on each shard, so that our aggregations are targeted to multiple
+// shards.
+assert.commandWorked(st.admin.runCommand({split: testNs, middle: {_id: nDocs / 2}}));
+assert.commandWorked(
+ st.admin.runCommand({moveChunk: testNs, find: {_id: 0}, to: st.shard0.shardName}));
+assert.commandWorked(
+ st.admin.runCommand({moveChunk: testNs, find: {_id: nDocs - 1}, to: st.shard1.shardName}));
+
+doExecutionTest(st.s0);
+
+st.stop();
}());
diff --git a/jstests/aggregation/sources/geonear/collation_geonear.js b/jstests/aggregation/sources/geonear/collation_geonear.js
index 076e0a8bea8..d4c47c1aec0 100644
--- a/jstests/aggregation/sources/geonear/collation_geonear.js
+++ b/jstests/aggregation/sources/geonear/collation_geonear.js
@@ -3,79 +3,81 @@
// Test that the $geoNear stage's query predicate respects the collation.
(function() {
- "use strict";
+"use strict";
- const caseInsensitive = {collation: {locale: "en_US", strength: 2}};
+const caseInsensitive = {
+ collation: {locale: "en_US", strength: 2}
+};
- var coll = db.collation_geonear;
- coll.drop();
- assert.commandWorked(coll.createIndex({loc: "2dsphere"}));
- assert.writeOK(coll.insert({loc: [0, 0], str: "A"}));
+var coll = db.collation_geonear;
+coll.drop();
+assert.commandWorked(coll.createIndex({loc: "2dsphere"}));
+assert.writeOK(coll.insert({loc: [0, 0], str: "A"}));
- // Test that the $geoNear agg stage respects an explicit collation.
- assert.eq(0,
- coll.aggregate([{
- $geoNear: {
- near: {type: "Point", coordinates: [0, 0]},
- distanceField: "distanceField",
- spherical: true,
- query: {str: "a"},
- }
- }])
- .itcount());
- assert.eq(1,
- coll.aggregate([{
- $geoNear: {
- near: {type: "Point", coordinates: [0, 0]},
- distanceField: "distanceField",
- spherical: true,
- query: {str: "a"},
- }
- }],
- caseInsensitive)
- .itcount());
+// Test that the $geoNear agg stage respects an explicit collation.
+assert.eq(0,
+ coll.aggregate([{
+ $geoNear: {
+ near: {type: "Point", coordinates: [0, 0]},
+ distanceField: "distanceField",
+ spherical: true,
+ query: {str: "a"},
+ }
+ }])
+ .itcount());
+assert.eq(1,
+ coll.aggregate([{
+ $geoNear: {
+ near: {type: "Point", coordinates: [0, 0]},
+ distanceField: "distanceField",
+ spherical: true,
+ query: {str: "a"},
+ }
+ }],
+ caseInsensitive)
+ .itcount());
- // Test that the collation parameter cannot be passed directly as a parameter of the $geoNear
- // stage.
- assert.throws(function() {
- coll.aggregate([{
- $geoNear: {
- near: {type: "Point", coordinates: [0, 0]},
- distanceField: "distanceField",
- spherical: true,
- query: {str: "a"},
- collation: {locale: "en_US", strength: 2},
- }
- }]);
- });
+// Test that the collation parameter cannot be passed directly as a parameter of the $geoNear
+// stage.
+assert.throws(function() {
+ coll.aggregate([{
+ $geoNear: {
+ near: {type: "Point", coordinates: [0, 0]},
+ distanceField: "distanceField",
+ spherical: true,
+ query: {str: "a"},
+ collation: {locale: "en_US", strength: 2},
+ }
+ }]);
+});
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
- assert.commandWorked(coll.createIndex({loc: "2dsphere"}));
- assert.writeOK(coll.insert({loc: [0, 0], str: "A"}));
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
+assert.commandWorked(coll.createIndex({loc: "2dsphere"}));
+assert.writeOK(coll.insert({loc: [0, 0], str: "A"}));
- // Test that the $geoNear agg stage respects an inherited collation.
- assert.eq(1,
- coll.aggregate([{
- $geoNear: {
- near: {type: "Point", coordinates: [0, 0]},
- distanceField: "distanceField",
- spherical: true,
- query: {str: "a"},
- }
- }])
- .itcount());
+// Test that the $geoNear agg stage respects an inherited collation.
+assert.eq(1,
+ coll.aggregate([{
+ $geoNear: {
+ near: {type: "Point", coordinates: [0, 0]},
+ distanceField: "distanceField",
+ spherical: true,
+ query: {str: "a"},
+ }
+ }])
+ .itcount());
- // Test that the the collection default can be overridden with the simple collation.
- assert.eq(0,
- coll.aggregate([{
- $geoNear: {
- near: {type: "Point", coordinates: [0, 0]},
- distanceField: "distanceField",
- spherical: true,
- query: {str: "a"},
- }
- }],
- {collation: {locale: "simple"}})
- .itcount());
+// Test that the the collection default can be overridden with the simple collation.
+assert.eq(0,
+ coll.aggregate([{
+ $geoNear: {
+ near: {type: "Point", coordinates: [0, 0]},
+ distanceField: "distanceField",
+ spherical: true,
+ query: {str: "a"},
+ }
+ }],
+ {collation: {locale: "simple"}})
+ .itcount());
})();
diff --git a/jstests/aggregation/sources/geonear/distancefield_and_includelocs.js b/jstests/aggregation/sources/geonear/distancefield_and_includelocs.js
index 80e884c2c36..1ed2364ccb3 100644
--- a/jstests/aggregation/sources/geonear/distancefield_and_includelocs.js
+++ b/jstests/aggregation/sources/geonear/distancefield_and_includelocs.js
@@ -3,167 +3,164 @@
* (specifically, by specifying nested fields, overriding existing fields, and so on).
*/
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For 'customDocumentEq'.
-
- const coll = db.getCollection("geonear_distancefield_and_includelocs");
- coll.drop();
-
- /**
- * Runs an aggregation with a $geoNear stage using 'geoSpec' and an optional $project stage
- * using 'projSpec'. Returns the first result; that is, the result closest to the "near" point.
- */
- function firstGeoNearResult(geoSpec, projSpec) {
- geoSpec.spherical = true;
- const pipeline = [{$geoNear: geoSpec}, {$limit: 1}];
- if (projSpec) {
- pipeline.push({$project: projSpec});
- }
-
- const res = coll.aggregate(pipeline).toArray();
- assert.eq(1, res.length, tojson(res));
- return res[0];
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For 'customDocumentEq'.
+
+const coll = db.getCollection("geonear_distancefield_and_includelocs");
+coll.drop();
+
+/**
+ * Runs an aggregation with a $geoNear stage using 'geoSpec' and an optional $project stage
+ * using 'projSpec'. Returns the first result; that is, the result closest to the "near" point.
+ */
+function firstGeoNearResult(geoSpec, projSpec) {
+ geoSpec.spherical = true;
+ const pipeline = [{$geoNear: geoSpec}, {$limit: 1}];
+ if (projSpec) {
+ pipeline.push({$project: projSpec});
}
- // Use documents with a variety of different fields: scalars, arrays, legacy points and GeoJSON
- // objects.
- const docWithLegacyPoint = {
- _id: "legacy",
- geo: [1, 1],
- ptForNearQuery: [1, 1],
- scalar: "foo",
- arr: [{a: 1, b: 1}, {a: 2, b: 2}],
- };
- const docWithGeoPoint = {
- _id: "point",
- geo: {type: "Point", coordinates: [1, 0]},
- ptForNearQuery: [1, 0],
- scalar: "bar",
- arr: [{a: 3, b: 3}, {a: 4, b: 4}],
- };
- const docWithGeoLine = {
- _id: "linestring",
- geo: {type: "LineString", coordinates: [[0, 0], [-1, -1]]},
- ptForNearQuery: [-1, -1],
- scalar: "baz",
- arr: [{a: 5, b: 5}, {a: 6, b: 6}],
- };
-
- // We test with a 2dsphere index, since 2d indexes can't support GeoJSON objects.
- assert.commandWorked(coll.createIndex({geo: "2dsphere"}));
-
- // Populate the collection.
- assert.writeOK(coll.insert(docWithLegacyPoint));
- assert.writeOK(coll.insert(docWithGeoPoint));
- assert.writeOK(coll.insert(docWithGeoLine));
-
- // Define a custom way to compare documents since the results here might differ by insignificant
- // amounts.
- const assertCloseEnough = (left, right) =>
- assert(customDocumentEq({
- left: left,
- right: right,
- valueComparator: (a, b) => {
- if (typeof a !== "number") {
- return a === b;
- }
- // Allow some minor differences in the numbers.
- return Math.abs(a - b) < 1e-10;
- }
- }),
- () => `[${tojson(left)}] != [${tojson(right)}]`);
-
- [docWithLegacyPoint, docWithGeoPoint, docWithGeoLine].forEach(doc => {
- const docPlusNewFields = (newDoc) => Object.extend(Object.extend({}, doc), newDoc);
-
- //
- // Tests for "distanceField".
- //
- const expectedDistance = 0.0000000000000001;
-
- // Test that "distanceField" can be computed in a new field.
- assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "newField"}),
- docPlusNewFields({newField: expectedDistance}));
-
- // Test that "distanceField" can be computed in a new nested field.
- assertCloseEnough(
- firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "nested.field"}),
- docPlusNewFields({nested: {field: expectedDistance}}));
-
- // Test that "distanceField" can overwrite an existing scalar field.
- assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "scalar"}),
- docPlusNewFields({scalar: expectedDistance}));
-
- // Test that "distanceField" can completely overwrite an existing array field.
- assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "arr"}),
- docPlusNewFields({arr: expectedDistance}));
-
- // TODO (SERVER-35561): When "includeLocs" shares a path prefix with an existing field, the
- // fields are overwritten, even if they could be preserved.
- assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "arr.b"}),
- docPlusNewFields({arr: {b: expectedDistance}}));
-
- //
- // Tests for both "includeLocs" and "distanceField".
- //
-
- // Test that "distanceField" and "includeLocs" can both be specified.
- assertCloseEnough(
- firstGeoNearResult(
- {near: doc.ptForNearQuery, distanceField: "dist", includeLocs: "loc"}),
- docPlusNewFields({dist: expectedDistance, loc: doc.geo}));
-
- // Test that "distanceField" and "includeLocs" can be the same path. The result is arbitrary
- // ("includeLocs" wins).
- assertCloseEnough(
- firstGeoNearResult(
- {near: doc.ptForNearQuery, distanceField: "newField", includeLocs: "newField"}),
- docPlusNewFields({newField: doc.geo}));
-
- // Test that "distanceField" and "includeLocs" are both preserved when their paths share a
- // prefix but do not conflict.
- assertCloseEnough(
- firstGeoNearResult(
- {near: doc.ptForNearQuery, distanceField: "comp.dist", includeLocs: "comp.loc"}),
- docPlusNewFields({comp: {dist: expectedDistance, loc: doc.geo}}));
-
- //
- // Tests for "includeLocs" only. Project out the distance field.
- //
- const removeDistFieldProj = {d: 0};
-
- // Test that "includeLocs" can be computed in a new field.
- assertCloseEnough(
- firstGeoNearResult(
- {near: doc.ptForNearQuery, distanceField: "d", includeLocs: "newField"},
- removeDistFieldProj),
- docPlusNewFields({newField: doc.geo}));
-
- // Test that "includeLocs" can be computed in a new nested field.
- assertCloseEnough(
- firstGeoNearResult(
- {near: doc.ptForNearQuery, distanceField: "d", includeLocs: "nested.field"},
- removeDistFieldProj),
- docPlusNewFields({nested: {field: doc.geo}}));
-
- // Test that "includeLocs" can overwrite an existing scalar field.
- assertCloseEnough(firstGeoNearResult(
- {near: doc.ptForNearQuery, distanceField: "d", includeLocs: "scalar"},
- removeDistFieldProj),
- docPlusNewFields({scalar: doc.geo}));
-
- // Test that "includeLocs" can completely overwrite an existing array field.
- assertCloseEnough(
- firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "d", includeLocs: "arr"},
- removeDistFieldProj),
- docPlusNewFields({arr: doc.geo}));
-
- // TODO (SERVER-35561): When "includeLocs" shares a path prefix with an existing field, the
- // fields are overwritten, even if they could be preserved.
- assertCloseEnough(
- firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "d", includeLocs: "arr.a"},
- removeDistFieldProj),
- docPlusNewFields({arr: {a: doc.geo}}));
- });
+ const res = coll.aggregate(pipeline).toArray();
+ assert.eq(1, res.length, tojson(res));
+ return res[0];
+}
+
+// Use documents with a variety of different fields: scalars, arrays, legacy points and GeoJSON
+// objects.
+const docWithLegacyPoint = {
+ _id: "legacy",
+ geo: [1, 1],
+ ptForNearQuery: [1, 1],
+ scalar: "foo",
+ arr: [{a: 1, b: 1}, {a: 2, b: 2}],
+};
+const docWithGeoPoint = {
+ _id: "point",
+ geo: {type: "Point", coordinates: [1, 0]},
+ ptForNearQuery: [1, 0],
+ scalar: "bar",
+ arr: [{a: 3, b: 3}, {a: 4, b: 4}],
+};
+const docWithGeoLine = {
+ _id: "linestring",
+ geo: {type: "LineString", coordinates: [[0, 0], [-1, -1]]},
+ ptForNearQuery: [-1, -1],
+ scalar: "baz",
+ arr: [{a: 5, b: 5}, {a: 6, b: 6}],
+};
+
+// We test with a 2dsphere index, since 2d indexes can't support GeoJSON objects.
+assert.commandWorked(coll.createIndex({geo: "2dsphere"}));
+
+// Populate the collection.
+assert.writeOK(coll.insert(docWithLegacyPoint));
+assert.writeOK(coll.insert(docWithGeoPoint));
+assert.writeOK(coll.insert(docWithGeoLine));
+
+// Define a custom way to compare documents since the results here might differ by insignificant
+// amounts.
+const assertCloseEnough = (left, right) => assert(customDocumentEq({
+ left: left,
+ right: right,
+ valueComparator: (a, b) => {
+ if (typeof a !== "number") {
+ return a === b;
+ }
+ // Allow some minor differences in the
+ // numbers.
+ return Math.abs(a - b) < 1e-10;
+ }
+ }),
+ () => `[${tojson(left)}] != [${tojson(right)}]`);
+
+[docWithLegacyPoint, docWithGeoPoint, docWithGeoLine].forEach(doc => {
+ const docPlusNewFields = (newDoc) => Object.extend(Object.extend({}, doc), newDoc);
+
+ //
+ // Tests for "distanceField".
+ //
+ const expectedDistance = 0.0000000000000001;
+
+ // Test that "distanceField" can be computed in a new field.
+ assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "newField"}),
+ docPlusNewFields({newField: expectedDistance}));
+
+ // Test that "distanceField" can be computed in a new nested field.
+ assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "nested.field"}),
+ docPlusNewFields({nested: {field: expectedDistance}}));
+
+ // Test that "distanceField" can overwrite an existing scalar field.
+ assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "scalar"}),
+ docPlusNewFields({scalar: expectedDistance}));
+
+ // Test that "distanceField" can completely overwrite an existing array field.
+ assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "arr"}),
+ docPlusNewFields({arr: expectedDistance}));
+
+ // TODO (SERVER-35561): When "includeLocs" shares a path prefix with an existing field, the
+ // fields are overwritten, even if they could be preserved.
+ assertCloseEnough(firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "arr.b"}),
+ docPlusNewFields({arr: {b: expectedDistance}}));
+
+ //
+ // Tests for both "includeLocs" and "distanceField".
+ //
+
+ // Test that "distanceField" and "includeLocs" can both be specified.
+ assertCloseEnough(
+ firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "dist", includeLocs: "loc"}),
+ docPlusNewFields({dist: expectedDistance, loc: doc.geo}));
+
+ // Test that "distanceField" and "includeLocs" can be the same path. The result is arbitrary
+ // ("includeLocs" wins).
+ assertCloseEnough(
+ firstGeoNearResult(
+ {near: doc.ptForNearQuery, distanceField: "newField", includeLocs: "newField"}),
+ docPlusNewFields({newField: doc.geo}));
+
+ // Test that "distanceField" and "includeLocs" are both preserved when their paths share a
+ // prefix but do not conflict.
+ assertCloseEnough(
+ firstGeoNearResult(
+ {near: doc.ptForNearQuery, distanceField: "comp.dist", includeLocs: "comp.loc"}),
+ docPlusNewFields({comp: {dist: expectedDistance, loc: doc.geo}}));
+
+ //
+ // Tests for "includeLocs" only. Project out the distance field.
+ //
+ const removeDistFieldProj = {d: 0};
+
+ // Test that "includeLocs" can be computed in a new field.
+ assertCloseEnough(
+ firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "d", includeLocs: "newField"},
+ removeDistFieldProj),
+ docPlusNewFields({newField: doc.geo}));
+
+ // Test that "includeLocs" can be computed in a new nested field.
+ assertCloseEnough(
+ firstGeoNearResult(
+ {near: doc.ptForNearQuery, distanceField: "d", includeLocs: "nested.field"},
+ removeDistFieldProj),
+ docPlusNewFields({nested: {field: doc.geo}}));
+
+ // Test that "includeLocs" can overwrite an existing scalar field.
+ assertCloseEnough(
+ firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "d", includeLocs: "scalar"},
+ removeDistFieldProj),
+ docPlusNewFields({scalar: doc.geo}));
+
+ // Test that "includeLocs" can completely overwrite an existing array field.
+ assertCloseEnough(
+ firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "d", includeLocs: "arr"},
+ removeDistFieldProj),
+ docPlusNewFields({arr: doc.geo}));
+
+ // TODO (SERVER-35561): When "includeLocs" shares a path prefix with an existing field, the
+ // fields are overwritten, even if they could be preserved.
+ assertCloseEnough(
+ firstGeoNearResult({near: doc.ptForNearQuery, distanceField: "d", includeLocs: "arr.a"},
+ removeDistFieldProj),
+ docPlusNewFields({arr: {a: doc.geo}}));
+});
}());
diff --git a/jstests/aggregation/sources/geonear/mindistance_and_maxdistance.js b/jstests/aggregation/sources/geonear/mindistance_and_maxdistance.js
index 7c5e6c750f3..99262902d3e 100644
--- a/jstests/aggregation/sources/geonear/mindistance_and_maxdistance.js
+++ b/jstests/aggregation/sources/geonear/mindistance_and_maxdistance.js
@@ -2,98 +2,103 @@
* Tests the behavior of the $geoNear stage with varying values of 'minDistance' and 'maxDistance'.
*/
(function() {
- "use strict";
-
- const coll = db.getCollection("geonear_mindistance_maxdistance");
-
- const kMaxDistance = Math.PI * 2.0;
-
- // Test points that are exactly at the "near" point, close to the point, and far from the point.
- // Distances are purposely chosen to be small so that distances in meters and radians are close.
- const origin = {pt: [0, 0]};
- const near = {pt: [0.23, -0.32]};
- const far = {pt: [5.9, 0.0]};
-
- ["2d", "2dsphere"].forEach(geoType => {
- jsTestLog(`Testing $geoNear with index {pt: "${geoType}"}`);
- coll.drop();
-
- // Create the desired index type and populate the collection.
- assert.commandWorked(coll.createIndex({pt: geoType}));
- [origin, near, far].forEach(doc => {
- doc.distFromOrigin = (geoType === "2dsphere") ? Geo.sphereDistance(doc.pt, origin.pt)
- : Geo.distance(doc.pt, origin.pt);
- assert.commandWorked(coll.insert(doc));
- });
-
- /**
- * Helper function that runs a $geoNear aggregation near the origin, setting the minimum
- * and/or maximum search distance using the object 'minMaxOpts', and asserting that the
- * results match 'expected'.
- */
- function assertGeoNearResults(minMaxOpts, expected) {
- const geoNearStage = {
- $geoNear: Object.extend(
- {near: origin.pt, distanceField: "dist", spherical: (geoType === "2dsphere")},
- minMaxOpts)
- };
- const projStage = {$project: {_id: 0, dist: 0}};
- const res = coll.aggregate([geoNearStage, projStage]).toArray();
- assert.eq(
- res,
- expected,
- () => `Unexpected results from ${tojson(geoNearStage)} using a ${geoType} index`);
- }
-
- // If no minimum nor maximum distance is set, all points are returned.
- assertGeoNearResults({}, [origin, near, far]);
-
- //
- // Tests for minDistance.
- //
-
- // Negative values and non-numeric values are illegal.
- assert.throws(() => assertGeoNearResults({minDistance: -1.1}));
- assert.throws(() => assertGeoNearResults({minDistance: "3.2"}));
-
- // A minimum distance of 0 returns all points.
- assertGeoNearResults({minDistance: -0.0}, [origin, near, far]);
- assertGeoNearResults({minDistance: 0.0}, [origin, near, far]);
-
- // Larger minimum distances exclude closer points.
- assertGeoNearResults({minDistance: (near.distFromOrigin / 2)}, [near, far]);
- assertGeoNearResults({minDistance: (far.distFromOrigin / 2)}, [far]);
- assertGeoNearResults({minDistance: kMaxDistance}, []);
-
- //
- // Tests for maxDistance.
- //
-
- // Negative values and non-numeric values are illegal.
- assert.throws(() => assertGeoNearResults({maxDistance: -1.1}));
- assert.throws(() => assertGeoNearResults({maxDistance: "3.2"}));
-
- // A maximum distance of 0 returns only the origin.
- assertGeoNearResults({maxDistance: 0.0}, [origin]);
- assertGeoNearResults({maxDistance: -0.0}, [origin]);
-
- // Larger maximum distances include more points.
- assertGeoNearResults({maxDistance: (near.distFromOrigin + 0.01)}, [origin, near]);
- assertGeoNearResults({maxDistance: (far.distFromOrigin + 0.01)}, [origin, near, far]);
-
- //
- // Tests for minDistance and maxDistance together.
- //
-
- // Cast a wide net and all points should be returned.
- assertGeoNearResults({minDistance: 0.0, maxDistance: kMaxDistance}, [origin, near, far]);
-
- // A narrower range excludes the origin and the far point.
- assertGeoNearResults(
- {minDistance: (near.distFromOrigin / 2), maxDistance: (near.distFromOrigin + 0.01)},
- [near]);
-
- // An impossible range is legal but returns no results.
- assertGeoNearResults({minDistance: 3.0, maxDistance: 1.0}, []);
+"use strict";
+
+const coll = db.getCollection("geonear_mindistance_maxdistance");
+
+const kMaxDistance = Math.PI * 2.0;
+
+// Test points that are exactly at the "near" point, close to the point, and far from the point.
+// Distances are purposely chosen to be small so that distances in meters and radians are close.
+const origin = {
+ pt: [0, 0]
+};
+const near = {
+ pt: [0.23, -0.32]
+};
+const far = {
+ pt: [5.9, 0.0]
+};
+
+["2d", "2dsphere"].forEach(geoType => {
+ jsTestLog(`Testing $geoNear with index {pt: "${geoType}"}`);
+ coll.drop();
+
+ // Create the desired index type and populate the collection.
+ assert.commandWorked(coll.createIndex({pt: geoType}));
+ [origin, near, far].forEach(doc => {
+ doc.distFromOrigin = (geoType === "2dsphere") ? Geo.sphereDistance(doc.pt, origin.pt)
+ : Geo.distance(doc.pt, origin.pt);
+ assert.commandWorked(coll.insert(doc));
});
+
+ /**
+ * Helper function that runs a $geoNear aggregation near the origin, setting the minimum
+ * and/or maximum search distance using the object 'minMaxOpts', and asserting that the
+ * results match 'expected'.
+ */
+ function assertGeoNearResults(minMaxOpts, expected) {
+ const geoNearStage = {
+ $geoNear: Object.extend(
+ {near: origin.pt, distanceField: "dist", spherical: (geoType === "2dsphere")},
+ minMaxOpts)
+ };
+ const projStage = {$project: {_id: 0, dist: 0}};
+ const res = coll.aggregate([geoNearStage, projStage]).toArray();
+ assert.eq(res,
+ expected,
+ () => `Unexpected results from ${tojson(geoNearStage)} using a ${geoType} index`);
+ }
+
+ // If no minimum nor maximum distance is set, all points are returned.
+ assertGeoNearResults({}, [origin, near, far]);
+
+ //
+ // Tests for minDistance.
+ //
+
+ // Negative values and non-numeric values are illegal.
+ assert.throws(() => assertGeoNearResults({minDistance: -1.1}));
+ assert.throws(() => assertGeoNearResults({minDistance: "3.2"}));
+
+ // A minimum distance of 0 returns all points.
+ assertGeoNearResults({minDistance: -0.0}, [origin, near, far]);
+ assertGeoNearResults({minDistance: 0.0}, [origin, near, far]);
+
+ // Larger minimum distances exclude closer points.
+ assertGeoNearResults({minDistance: (near.distFromOrigin / 2)}, [near, far]);
+ assertGeoNearResults({minDistance: (far.distFromOrigin / 2)}, [far]);
+ assertGeoNearResults({minDistance: kMaxDistance}, []);
+
+ //
+ // Tests for maxDistance.
+ //
+
+ // Negative values and non-numeric values are illegal.
+ assert.throws(() => assertGeoNearResults({maxDistance: -1.1}));
+ assert.throws(() => assertGeoNearResults({maxDistance: "3.2"}));
+
+ // A maximum distance of 0 returns only the origin.
+ assertGeoNearResults({maxDistance: 0.0}, [origin]);
+ assertGeoNearResults({maxDistance: -0.0}, [origin]);
+
+ // Larger maximum distances include more points.
+ assertGeoNearResults({maxDistance: (near.distFromOrigin + 0.01)}, [origin, near]);
+ assertGeoNearResults({maxDistance: (far.distFromOrigin + 0.01)}, [origin, near, far]);
+
+ //
+ // Tests for minDistance and maxDistance together.
+ //
+
+ // Cast a wide net and all points should be returned.
+ assertGeoNearResults({minDistance: 0.0, maxDistance: kMaxDistance}, [origin, near, far]);
+
+ // A narrower range excludes the origin and the far point.
+ assertGeoNearResults(
+ {minDistance: (near.distFromOrigin / 2), maxDistance: (near.distFromOrigin + 0.01)},
+ [near]);
+
+ // An impossible range is legal but returns no results.
+ assertGeoNearResults({minDistance: 3.0, maxDistance: 1.0}, []);
+});
}());
diff --git a/jstests/aggregation/sources/geonear/requires_geo_index.js b/jstests/aggregation/sources/geonear/requires_geo_index.js
index e2c3a1d9706..f8380eb27a4 100644
--- a/jstests/aggregation/sources/geonear/requires_geo_index.js
+++ b/jstests/aggregation/sources/geonear/requires_geo_index.js
@@ -2,21 +2,21 @@
// TODO: Reenable test on passthroughs with sharded collections as part of SERVER-38995.
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
+load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
- const coll = db.coll;
- const from = db.from;
+const coll = db.coll;
+const from = db.from;
- coll.drop();
- from.drop();
+coll.drop();
+from.drop();
- const geonearPipeline = [
- {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
- ];
+const geonearPipeline = [
+ {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
+];
- const geonearWithinLookupPipeline = [
+const geonearWithinLookupPipeline = [
{
$lookup: {
pipeline: geonearPipeline,
@@ -26,16 +26,16 @@
},
];
- assert.commandWorked(coll.insert({_id: 5, x: 5}));
- assert.commandWorked(from.insert({_id: 1, geo: [0, 0]}));
+assert.commandWorked(coll.insert({_id: 5, x: 5}));
+assert.commandWorked(from.insert({_id: 1, geo: [0, 0]}));
- // Fail without index.
- assertErrorCode(from, geonearPipeline, ErrorCodes.IndexNotFound);
- assertErrorCode(coll, geonearWithinLookupPipeline, ErrorCodes.IndexNotFound);
+// Fail without index.
+assertErrorCode(from, geonearPipeline, ErrorCodes.IndexNotFound);
+assertErrorCode(coll, geonearWithinLookupPipeline, ErrorCodes.IndexNotFound);
- assert.commandWorked(from.createIndex({geo: "2dsphere"}));
+assert.commandWorked(from.createIndex({geo: "2dsphere"}));
- // Run successfully when you have the geospatial index.
- assert.eq(from.aggregate(geonearPipeline).itcount(), 1);
- assert.eq(coll.aggregate(geonearWithinLookupPipeline).itcount(), 1);
+// Run successfully when you have the geospatial index.
+assert.eq(from.aggregate(geonearPipeline).itcount(), 1);
+assert.eq(coll.aggregate(geonearWithinLookupPipeline).itcount(), 1);
}());
diff --git a/jstests/aggregation/sources/graphLookup/airports.js b/jstests/aggregation/sources/graphLookup/airports.js
index 9254fd992fa..779678b07da 100644
--- a/jstests/aggregation/sources/graphLookup/airports.js
+++ b/jstests/aggregation/sources/graphLookup/airports.js
@@ -5,36 +5,36 @@
// In MongoDB 3.4, $graphLookup was introduced. In this file, we test some complex graphs.
(function() {
- "use strict";
+"use strict";
- var local = db.local;
- var foreign = db.foreign;
+var local = db.local;
+var foreign = db.foreign;
- local.drop();
- foreign.drop();
+local.drop();
+foreign.drop();
- var airports = [
- {_id: "JFK", connects: ["PWM", "BOS", "LGA", "SFO"]},
- {_id: "PWM", connects: ["BOS", "JFK"]},
- {_id: "BOS", connects: ["PWM", "JFK", "LGA"]},
- {_id: "SFO", connects: ["JFK", "MIA"]},
- {_id: "LGA", connects: ["BOS", "JFK", "ORD"]},
- {_id: "ORD", connects: ["LGA"]},
- {_id: "ATL", connects: ["MIA"]},
- {_id: "MIA", connects: ["ATL", "SFO"]}
- ];
+var airports = [
+ {_id: "JFK", connects: ["PWM", "BOS", "LGA", "SFO"]},
+ {_id: "PWM", connects: ["BOS", "JFK"]},
+ {_id: "BOS", connects: ["PWM", "JFK", "LGA"]},
+ {_id: "SFO", connects: ["JFK", "MIA"]},
+ {_id: "LGA", connects: ["BOS", "JFK", "ORD"]},
+ {_id: "ORD", connects: ["LGA"]},
+ {_id: "ATL", connects: ["MIA"]},
+ {_id: "MIA", connects: ["ATL", "SFO"]}
+];
- var bulk = foreign.initializeUnorderedBulkOp();
- airports.forEach(function(a) {
- bulk.insert(a);
- });
- assert.writeOK(bulk.execute());
+var bulk = foreign.initializeUnorderedBulkOp();
+airports.forEach(function(a) {
+ bulk.insert(a);
+});
+assert.writeOK(bulk.execute());
- // Insert a dummy document so that something will flow through the pipeline.
- local.insert({});
+// Insert a dummy document so that something will flow through the pipeline.
+local.insert({});
- // Perform a simple $graphLookup and ensure it retrieves every result.
- var res = local
+// Perform a simple $graphLookup and ensure it retrieves every result.
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -46,12 +46,12 @@
})
.toArray()[0];
- // "foreign" represents a connected graph.
- assert.eq(res.connections.length, airports.length);
+// "foreign" represents a connected graph.
+assert.eq(res.connections.length, airports.length);
- // Perform a $graphLookup and ensure it correctly computes the shortest path to a node when more
- // than one path exists.
- res = local
+// Perform a $graphLookup and ensure it correctly computes the shortest path to a node when more
+// than one path exists.
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -66,17 +66,17 @@
{$project: {_id: "$connections._id", hops: "$connections.hops"}})
.toArray();
- var expectedDistances = {BOS: 0, PWM: 1, JFK: 1, LGA: 1, ORD: 2, SFO: 2, MIA: 3, ATL: 4};
+var expectedDistances = {BOS: 0, PWM: 1, JFK: 1, LGA: 1, ORD: 2, SFO: 2, MIA: 3, ATL: 4};
- assert.eq(res.length, airports.length);
- res.forEach(function(c) {
- assert.eq(c.hops, expectedDistances[c._id]);
- });
+assert.eq(res.length, airports.length);
+res.forEach(function(c) {
+ assert.eq(c.hops, expectedDistances[c._id]);
+});
- // Disconnect the graph, and ensure we don't find the other side.
- foreign.remove({_id: "JFK"});
+// Disconnect the graph, and ensure we don't find the other side.
+foreign.remove({_id: "JFK"});
- res = db.local
+res = db.local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -88,6 +88,6 @@
})
.toArray()[0];
- // ATL should now connect to itself, MIA, and SFO.
- assert.eq(res.connections.length, 3);
+// ATL should now connect to itself, MIA, and SFO.
+assert.eq(res.connections.length, 3);
}());
diff --git a/jstests/aggregation/sources/graphLookup/basic.js b/jstests/aggregation/sources/graphLookup/basic.js
index c0bcb1a8a53..ef44b9b60bb 100644
--- a/jstests/aggregation/sources/graphLookup/basic.js
+++ b/jstests/aggregation/sources/graphLookup/basic.js
@@ -6,16 +6,16 @@
// of the stage.
(function() {
- "use strict";
+"use strict";
- var local = db.local;
- var foreign = db.foreign;
+var local = db.local;
+var foreign = db.foreign;
- local.drop();
- foreign.drop();
+local.drop();
+foreign.drop();
- // Ensure a $graphLookup works even if one of the involved collections doesn't exist.
- const basicGraphLookup = {
+// Ensure a $graphLookup works even if one of the involved collections doesn't exist.
+const basicGraphLookup = {
$graphLookup: {
from: "foreign",
startWith: "$starting",
@@ -25,40 +25,39 @@
}
};
- assert.eq(
- local.aggregate([basicGraphLookup]).toArray().length,
- 0,
- "expected an empty result set for a $graphLookup with non-existent local and foreign " +
- "collections");
+assert.eq(local.aggregate([basicGraphLookup]).toArray().length,
+ 0,
+ "expected an empty result set for a $graphLookup with non-existent local and foreign " +
+ "collections");
- assert.writeOK(foreign.insert({}));
+assert.writeOK(foreign.insert({}));
- assert.eq(local.aggregate([basicGraphLookup]).toArray().length,
- 0,
- "expected an empty result set for a $graphLookup on a non-existent local collection");
+assert.eq(local.aggregate([basicGraphLookup]).toArray().length,
+ 0,
+ "expected an empty result set for a $graphLookup on a non-existent local collection");
- local.drop();
- foreign.drop();
+local.drop();
+foreign.drop();
- assert.writeOK(local.insert({_id: 0}));
+assert.writeOK(local.insert({_id: 0}));
- assert.eq(local.aggregate([basicGraphLookup]).toArray(),
- [{_id: 0, results: []}],
- "expected $graphLookup to succeed with a non-existent foreign collection");
+assert.eq(local.aggregate([basicGraphLookup]).toArray(),
+ [{_id: 0, results: []}],
+ "expected $graphLookup to succeed with a non-existent foreign collection");
- local.drop();
- foreign.drop();
+local.drop();
+foreign.drop();
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 100; i++) {
- bulk.insert({_id: i, neighbors: [i - 1, i + 1]});
- }
- assert.writeOK(bulk.execute());
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 100; i++) {
+ bulk.insert({_id: i, neighbors: [i - 1, i + 1]});
+}
+assert.writeOK(bulk.execute());
- assert.writeOK(local.insert({starting: 50}));
+assert.writeOK(local.insert({starting: 50}));
- // Perform a simple $graphLookup and ensure it retrieves every result.
- var res = local
+// Perform a simple $graphLookup and ensure it retrieves every result.
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -70,10 +69,10 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 100);
+assert.eq(res.integers.length, 100);
- // Perform a $graphLookup and ensure it respects "maxDepth".
- res = local
+// Perform a $graphLookup and ensure it respects "maxDepth".
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -86,11 +85,11 @@
})
.toArray()[0];
- // At depth zero, we retrieve one integer, and two for every depth thereafter.
- assert.eq(res.integers.length, 11);
+// At depth zero, we retrieve one integer, and two for every depth thereafter.
+assert.eq(res.integers.length, 11);
- // Perform a $graphLookup and ensure it properly evaluates "startWith".
- res = local
+// Perform a $graphLookup and ensure it properly evaluates "startWith".
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -103,11 +102,11 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 1);
- assert.eq(res.integers[0]._id, 53);
+assert.eq(res.integers.length, 1);
+assert.eq(res.integers[0]._id, 53);
- // Perform a $graphLookup and ensure it properly expands "startWith".
- res = local
+// Perform a $graphLookup and ensure it properly expands "startWith".
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -120,17 +119,17 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 3);
+assert.eq(res.integers.length, 3);
- // $graphLookup should not recurse when the 'connectFromField' is missing. However, if it
- // mistakenly does, then it would look for a 'connectToField' value of null. In order to prevent
- // regressions, we insert a document with a 'connectToField' value of null, then perform a
- // $graphLookup, and ensure that we do not find the erroneous document.
- assert.writeOK(foreign.remove({_id: 51}));
- assert.writeOK(foreign.insert({_id: 51}));
- assert.writeOK(foreign.insert({_id: null, neighbors: [50, 52]}));
+// $graphLookup should not recurse when the 'connectFromField' is missing. However, if it
+// mistakenly does, then it would look for a 'connectToField' value of null. In order to prevent
+// regressions, we insert a document with a 'connectToField' value of null, then perform a
+// $graphLookup, and ensure that we do not find the erroneous document.
+assert.writeOK(foreign.remove({_id: 51}));
+assert.writeOK(foreign.insert({_id: 51}));
+assert.writeOK(foreign.insert({_id: null, neighbors: [50, 52]}));
- res = local
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -142,17 +141,17 @@
})
.toArray()[0];
- // Our result should be missing the values with _id from 52 to 99.
- assert.eq(res.integers.length, 52);
+// Our result should be missing the values with _id from 52 to 99.
+assert.eq(res.integers.length, 52);
- // Perform a $graphLookup and ensure we don't go into an infinite loop when our graph is cyclic.
- assert.writeOK(foreign.remove({_id: {$in: [null, 51]}}));
- assert.writeOK(foreign.insert({_id: 51, neighbors: [50, 52]}));
+// Perform a $graphLookup and ensure we don't go into an infinite loop when our graph is cyclic.
+assert.writeOK(foreign.remove({_id: {$in: [null, 51]}}));
+assert.writeOK(foreign.insert({_id: 51, neighbors: [50, 52]}));
- assert.writeOK(foreign.update({_id: 99}, {$set: {neighbors: [98, 0]}}));
- assert.writeOK(foreign.update({_id: 0}, {$set: {neighbors: [99, 1]}}));
+assert.writeOK(foreign.update({_id: 99}, {$set: {neighbors: [98, 0]}}));
+assert.writeOK(foreign.update({_id: 0}, {$set: {neighbors: [99, 1]}}));
- res = local
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -164,10 +163,10 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 100);
+assert.eq(res.integers.length, 100);
- // Perform a $graphLookup and ensure that "depthField" is properly populated.
- res = local
+// Perform a $graphLookup and ensure that "depthField" is properly populated.
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -180,9 +179,9 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 100);
+assert.eq(res.integers.length, 100);
- res.integers.forEach(function(n) {
- assert.eq(n.distance, Math.abs(50 - n._id));
- });
+res.integers.forEach(function(n) {
+ assert.eq(n.distance, Math.abs(50 - n._id));
+});
}());
diff --git a/jstests/aggregation/sources/graphLookup/collation_graphlookup.js b/jstests/aggregation/sources/graphLookup/collation_graphlookup.js
index 7b457289cc6..f3fbcf2ee34 100644
--- a/jstests/aggregation/sources/graphLookup/collation_graphlookup.js
+++ b/jstests/aggregation/sources/graphLookup/collation_graphlookup.js
@@ -8,22 +8,26 @@
* set on the aggregation, or the default collation of the collection.
*/
(function() {
- "use strict";
+"use strict";
- var res;
- const caseInsensitiveUS = {collation: {locale: "en_US", strength: 2}};
- const caseSensitiveUS = {collation: {locale: "en_US", strength: 3}};
+var res;
+const caseInsensitiveUS = {
+ collation: {locale: "en_US", strength: 2}
+};
+const caseSensitiveUS = {
+ collation: {locale: "en_US", strength: 3}
+};
- var coll = db.collation_graphlookup;
- var foreignColl = db.collation_graphlookup_foreign;
+var coll = db.collation_graphlookup;
+var foreignColl = db.collation_graphlookup_foreign;
- // Test that $graphLookup respects the collation set on the aggregation pipeline. Case
- // insensitivity should mean that we find both "jeremy" and "jimmy" as friends.
- coll.drop();
- assert.writeOK(coll.insert({username: "erica", friends: ["jeremy", "jimmy"]}));
- assert.writeOK(coll.insert([{username: "JEREMY"}, {username: "JIMMY"}]));
+// Test that $graphLookup respects the collation set on the aggregation pipeline. Case
+// insensitivity should mean that we find both "jeremy" and "jimmy" as friends.
+coll.drop();
+assert.writeOK(coll.insert({username: "erica", friends: ["jeremy", "jimmy"]}));
+assert.writeOK(coll.insert([{username: "JEREMY"}, {username: "JIMMY"}]));
- res = coll.aggregate(
+res = coll.aggregate(
[
{$match: {username: "erica"}},
{
@@ -38,12 +42,12 @@
],
caseInsensitiveUS)
.toArray();
- assert.eq(1, res.length);
- assert.eq("erica", res[0].username);
- assert.eq(2, res[0].friendUsers.length);
+assert.eq(1, res.length);
+assert.eq("erica", res[0].username);
+assert.eq(2, res[0].friendUsers.length);
- // Negative test: ensure that we don't find any friends when the collation is simple.
- res = coll.aggregate([
+// Negative test: ensure that we don't find any friends when the collation is simple.
+res = coll.aggregate([
{$match: {username: "erica"}},
{
$graphLookup: {
@@ -56,20 +60,20 @@
}
])
.toArray();
- assert.eq(1, res.length);
- assert.eq("erica", res[0].username);
- assert.eq(0, res[0].friendUsers.length);
+assert.eq(1, res.length);
+assert.eq("erica", res[0].username);
+assert.eq(0, res[0].friendUsers.length);
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), caseInsensitiveUS));
- assert.writeOK(coll.insert({username: "erica", friends: ["jeremy", "jimmy"]}));
- foreignColl.drop();
- assert.commandWorked(db.createCollection(foreignColl.getName(), caseSensitiveUS));
- assert.writeOK(foreignColl.insert([{username: "JEREMY"}, {username: "JIMMY"}]));
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), caseInsensitiveUS));
+assert.writeOK(coll.insert({username: "erica", friends: ["jeremy", "jimmy"]}));
+foreignColl.drop();
+assert.commandWorked(db.createCollection(foreignColl.getName(), caseSensitiveUS));
+assert.writeOK(foreignColl.insert([{username: "JEREMY"}, {username: "JIMMY"}]));
- // Test that $graphLookup inherits the default collation of the collection on which it is run,
- // and that this collation is used instead of the default collation of the foreign collection.
- res = coll.aggregate([
+// Test that $graphLookup inherits the default collation of the collection on which it is run,
+// and that this collation is used instead of the default collation of the foreign collection.
+res = coll.aggregate([
{$match: {username: "erica"}},
{
$graphLookup: {
@@ -82,18 +86,18 @@
}
])
.toArray();
- assert.eq(1, res.length);
- assert.eq("erica", res[0].username);
- assert.eq(2, res[0].friendUsers.length);
+assert.eq(1, res.length);
+assert.eq("erica", res[0].username);
+assert.eq(2, res[0].friendUsers.length);
- // Test that we don't use the collation to dedup string _id values. This would cause us to miss
- // nodes in the graph that have distinct _id values which compare equal under the collation.
- coll.drop();
- assert.writeOK(coll.insert({username: "erica", friends: ["jeremy"]}));
- assert.writeOK(coll.insert({_id: "foo", username: "JEREMY", friends: ["jimmy"]}));
- assert.writeOK(coll.insert({_id: "FOO", username: "jimmy", friends: []}));
+// Test that we don't use the collation to dedup string _id values. This would cause us to miss
+// nodes in the graph that have distinct _id values which compare equal under the collation.
+coll.drop();
+assert.writeOK(coll.insert({username: "erica", friends: ["jeremy"]}));
+assert.writeOK(coll.insert({_id: "foo", username: "JEREMY", friends: ["jimmy"]}));
+assert.writeOK(coll.insert({_id: "FOO", username: "jimmy", friends: []}));
- res = coll.aggregate(
+res = coll.aggregate(
[
{$match: {username: "erica"}},
{
@@ -108,18 +112,18 @@
],
caseInsensitiveUS)
.toArray();
- assert.eq(1, res.length);
- assert.eq("erica", res[0].username);
- assert.eq(2, res[0].friendUsers.length);
+assert.eq(1, res.length);
+assert.eq("erica", res[0].username);
+assert.eq(2, res[0].friendUsers.length);
- // Test that the result set is not deduplicated under the collation. If two documents are
- // entirely equal under the collation, they should still both get returned in the "as" field.
- coll.drop();
- assert.writeOK(coll.insert({username: "erica", friends: ["jeremy"]}));
- assert.writeOK(coll.insert({_id: "foo", username: "jeremy"}));
- assert.writeOK(coll.insert({_id: "FOO", username: "JEREMY"}));
+// Test that the result set is not deduplicated under the collation. If two documents are
+// entirely equal under the collation, they should still both get returned in the "as" field.
+coll.drop();
+assert.writeOK(coll.insert({username: "erica", friends: ["jeremy"]}));
+assert.writeOK(coll.insert({_id: "foo", username: "jeremy"}));
+assert.writeOK(coll.insert({_id: "FOO", username: "JEREMY"}));
- res = coll.aggregate(
+res = coll.aggregate(
[
{$match: {username: "erica"}},
{
@@ -134,7 +138,7 @@
],
caseInsensitiveUS)
.toArray();
- assert.eq(1, res.length);
- assert.eq("erica", res[0].username);
- assert.eq(2, res[0].friendUsers.length);
+assert.eq(1, res.length);
+assert.eq("erica", res[0].username);
+assert.eq(2, res[0].friendUsers.length);
})();
diff --git a/jstests/aggregation/sources/graphLookup/error.js b/jstests/aggregation/sources/graphLookup/error.js
index 42d1203238c..b7360f3e9e8 100644
--- a/jstests/aggregation/sources/graphLookup/error.js
+++ b/jstests/aggregation/sources/graphLookup/error.js
@@ -6,18 +6,17 @@
load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
(function() {
- "use strict";
+"use strict";
- var local = db.local;
+var local = db.local;
- local.drop();
- assert.writeOK(local.insert({b: 0}));
+local.drop();
+assert.writeOK(local.insert({b: 0}));
- var pipeline = {$graphLookup: 4};
- assertErrorCode(
- local, pipeline, ErrorCodes.FailedToParse, "$graphLookup spec must be an object");
+var pipeline = {$graphLookup: 4};
+assertErrorCode(local, pipeline, ErrorCodes.FailedToParse, "$graphLookup spec must be an object");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -27,9 +26,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
maxDepth: "string"
}
};
- assertErrorCode(local, pipeline, 40100, "maxDepth must be numeric");
+assertErrorCode(local, pipeline, 40100, "maxDepth must be numeric");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -39,9 +38,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
maxDepth: -1
}
};
- assertErrorCode(local, pipeline, 40101, "maxDepth must be nonnegative");
+assertErrorCode(local, pipeline, 40101, "maxDepth must be nonnegative");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -51,9 +50,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
maxDepth: 2.3
}
};
- assertErrorCode(local, pipeline, 40102, "maxDepth must be representable as a long long");
+assertErrorCode(local, pipeline, 40102, "maxDepth must be representable as a long long");
- pipeline = {
+pipeline = {
$graphLookup: {
from: -1,
startWith: {$literal: 0},
@@ -62,9 +61,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "output"
}
};
- assertErrorCode(local, pipeline, ErrorCodes.FailedToParse, "from must be a string");
+assertErrorCode(local, pipeline, ErrorCodes.FailedToParse, "from must be a string");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "",
startWith: {$literal: 0},
@@ -73,9 +72,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "output"
}
};
- assertErrorCode(local, pipeline, ErrorCodes.InvalidNamespace, "from must be a valid namespace");
+assertErrorCode(local, pipeline, ErrorCodes.InvalidNamespace, "from must be a valid namespace");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -84,9 +83,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: 0
}
};
- assertErrorCode(local, pipeline, 40103, "as must be a string");
+assertErrorCode(local, pipeline, 40103, "as must be a string");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -95,9 +94,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "$output"
}
};
- assertErrorCode(local, pipeline, 16410, "as cannot be a fieldPath");
+assertErrorCode(local, pipeline, 16410, "as cannot be a fieldPath");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -106,9 +105,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "output"
}
};
- assertErrorCode(local, pipeline, 40103, "connectFromField must be a string");
+assertErrorCode(local, pipeline, 40103, "connectFromField must be a string");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -117,9 +116,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "output"
}
};
- assertErrorCode(local, pipeline, 16410, "connectFromField cannot be a fieldPath");
+assertErrorCode(local, pipeline, 16410, "connectFromField cannot be a fieldPath");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -128,9 +127,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "output"
}
};
- assertErrorCode(local, pipeline, 40103, "connectToField must be a string");
+assertErrorCode(local, pipeline, 40103, "connectToField must be a string");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -139,9 +138,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "output"
}
};
- assertErrorCode(local, pipeline, 16410, "connectToField cannot be a fieldPath");
+assertErrorCode(local, pipeline, 16410, "connectToField cannot be a fieldPath");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -151,9 +150,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
depthField: 0
}
};
- assertErrorCode(local, pipeline, 40103, "depthField must be a string");
+assertErrorCode(local, pipeline, 40103, "depthField must be a string");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -163,9 +162,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
depthField: "$depth"
}
};
- assertErrorCode(local, pipeline, 16410, "depthField cannot be a fieldPath");
+assertErrorCode(local, pipeline, 16410, "depthField cannot be a fieldPath");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -175,9 +174,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
restrictSearchWithMatch: "notamatch"
}
};
- assertErrorCode(local, pipeline, 40185, "restrictSearchWithMatch must be an object");
+assertErrorCode(local, pipeline, 40185, "restrictSearchWithMatch must be an object");
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -187,43 +186,37 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
notAField: "foo"
}
};
- assertErrorCode(local, pipeline, 40104, "unknown argument");
-
- pipeline = {
- $graphLookup:
- {from: "foreign", startWith: {$literal: 0}, connectFromField: "b", as: "output"}
- };
- assertErrorCode(local, pipeline, 40105, "connectToField was not specified");
-
- pipeline = {
- $graphLookup:
- {from: "foreign", startWith: {$literal: 0}, connectToField: "a", as: "output"}
- };
- assertErrorCode(local, pipeline, 40105, "connectFromField was not specified");
-
- pipeline = {
- $graphLookup: {from: "foreign", connectToField: "a", connectFromField: "b", as: "output"}
- };
- assertErrorCode(local, pipeline, 40105, "startWith was not specified");
-
- pipeline = {
- $graphLookup: {
- from: "foreign",
- startWith: {$literal: 0},
- connectToField: "a",
- connectFromField: "b"
- }
- };
- assertErrorCode(local, pipeline, 40105, "as was not specified");
-
- pipeline = {
- $graphLookup:
- {startWith: {$literal: 0}, connectToField: "a", connectFromField: "b", as: "output"}
- };
- assertErrorCode(local, pipeline, ErrorCodes.FailedToParse, "from was not specified");
-
- // restrictSearchWithMatch must be a valid match expression.
- pipeline = {
+assertErrorCode(local, pipeline, 40104, "unknown argument");
+
+pipeline = {
+ $graphLookup: {from: "foreign", startWith: {$literal: 0}, connectFromField: "b", as: "output"}
+};
+assertErrorCode(local, pipeline, 40105, "connectToField was not specified");
+
+pipeline = {
+ $graphLookup: {from: "foreign", startWith: {$literal: 0}, connectToField: "a", as: "output"}
+};
+assertErrorCode(local, pipeline, 40105, "connectFromField was not specified");
+
+pipeline = {
+ $graphLookup: {from: "foreign", connectToField: "a", connectFromField: "b", as: "output"}
+};
+assertErrorCode(local, pipeline, 40105, "startWith was not specified");
+
+pipeline = {
+ $graphLookup:
+ {from: "foreign", startWith: {$literal: 0}, connectToField: "a", connectFromField: "b"}
+};
+assertErrorCode(local, pipeline, 40105, "as was not specified");
+
+pipeline = {
+ $graphLookup:
+ {startWith: {$literal: 0}, connectToField: "a", connectFromField: "b", as: "output"}
+};
+assertErrorCode(local, pipeline, ErrorCodes.FailedToParse, "from was not specified");
+
+// restrictSearchWithMatch must be a valid match expression.
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -233,10 +226,10 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
restrictSearchWithMatch: {$not: {a: 1}}
}
};
- assert.throws(() => local.aggregate(pipeline), [], "unable to parse match expression");
+assert.throws(() => local.aggregate(pipeline), [], "unable to parse match expression");
- // $where and $text cannot be used inside $graphLookup.
- pipeline = {
+// $where and $text cannot be used inside $graphLookup.
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -246,9 +239,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
restrictSearchWithMatch: {$where: "3 > 2"}
}
};
- assert.throws(() => local.aggregate(pipeline), [], "cannot use $where inside $graphLookup");
+assert.throws(() => local.aggregate(pipeline), [], "cannot use $where inside $graphLookup");
- pipeline = {
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -258,9 +251,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
restrictSearchWithMatch: {$text: {$search: "some text"}}
}
};
- assert.throws(() => local.aggregate(pipeline), [], "cannot use $text inside $graphLookup");
+assert.throws(() => local.aggregate(pipeline), [], "cannot use $text inside $graphLookup");
- pipeline = {
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -272,9 +265,9 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
}
}
};
- assert.throws(() => local.aggregate(pipeline), [], "cannot use $near inside $graphLookup");
+assert.throws(() => local.aggregate(pipeline), [], "cannot use $near inside $graphLookup");
- pipeline = {
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -293,15 +286,15 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
}
}
};
- assert.throws(
- () => local.aggregate(pipeline), [], "cannot use $near inside $graphLookup at any depth");
+assert.throws(
+ () => local.aggregate(pipeline), [], "cannot use $near inside $graphLookup at any depth");
- let foreign = db.foreign;
- foreign.drop();
- assert.writeOK(foreign.insert({a: 0, x: 0}));
+let foreign = db.foreign;
+foreign.drop();
+assert.writeOK(foreign.insert({a: 0, x: 0}));
- // Test a restrictSearchWithMatch expression that fails to parse.
- pipeline = {
+// Test a restrictSearchWithMatch expression that fails to parse.
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -311,10 +304,10 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
restrictSearchWithMatch: {$expr: {$eq: ["$x", "$$unbound"]}}
}
};
- assert.throws(() => local.aggregate(pipeline), [], "cannot use $expr with unbound variable");
+assert.throws(() => local.aggregate(pipeline), [], "cannot use $expr with unbound variable");
- // Test a restrictSearchWithMatchExpression that throws at runtime.
- pipeline = {
+// Test a restrictSearchWithMatchExpression that throws at runtime.
+pipeline = {
$graphLookup: {
from: 'foreign',
startWith: {$literal: 0},
@@ -324,25 +317,25 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
restrictSearchWithMatch: {$expr: {$divide: [1, "$x"]}}
}
};
- assertErrorCode(local, pipeline, 16608, "division by zero in $expr");
+assertErrorCode(local, pipeline, 16608, "division by zero in $expr");
- // $graphLookup can only consume at most 100MB of memory.
- foreign.drop();
+// $graphLookup can only consume at most 100MB of memory.
+foreign.drop();
- // Here, the visited set exceeds 100MB.
- var bulk = foreign.initializeUnorderedBulkOp();
+// Here, the visited set exceeds 100MB.
+var bulk = foreign.initializeUnorderedBulkOp();
- var initial = [];
- for (var i = 0; i < 8; i++) {
- var obj = {_id: i};
+var initial = [];
+for (var i = 0; i < 8; i++) {
+ var obj = {_id: i};
- obj['longString'] = new Array(14 * 1024 * 1024).join('x');
- initial.push(i);
- bulk.insert(obj);
- }
- assert.writeOK(bulk.execute());
+ obj['longString'] = new Array(14 * 1024 * 1024).join('x');
+ initial.push(i);
+ bulk.insert(obj);
+}
+assert.writeOK(bulk.execute());
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: initial},
@@ -351,21 +344,21 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
as: "graph"
}
};
- assertErrorCode(local, pipeline, 40099, "maximum memory usage reached");
+assertErrorCode(local, pipeline, 40099, "maximum memory usage reached");
- // Here, the visited set should grow to approximately 90 MB, and the frontier should push memory
- // usage over 100MB.
- foreign.drop();
+// Here, the visited set should grow to approximately 90 MB, and the frontier should push memory
+// usage over 100MB.
+foreign.drop();
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 14; i++) {
- var obj = {from: 0, to: 1};
- obj['s'] = new Array(7 * 1024 * 1024).join(' ');
- bulk.insert(obj);
- }
- assert.writeOK(bulk.execute());
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 14; i++) {
+ var obj = {from: 0, to: 1};
+ obj['s'] = new Array(7 * 1024 * 1024).join(' ');
+ bulk.insert(obj);
+}
+assert.writeOK(bulk.execute());
- pipeline = {
+pipeline = {
$graphLookup: {
from: "foreign",
startWith: {$literal: 0},
@@ -375,20 +368,20 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
}
};
- assertErrorCode(local, pipeline, 40099, "maximum memory usage reached");
+assertErrorCode(local, pipeline, 40099, "maximum memory usage reached");
- // Here, we test that the cache keeps memory usage under 100MB, and does not cause an error.
- foreign.drop();
+// Here, we test that the cache keeps memory usage under 100MB, and does not cause an error.
+foreign.drop();
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 13; i++) {
- var obj = {from: 0, to: 1};
- obj['s'] = new Array(7 * 1024 * 1024).join(' ');
- bulk.insert(obj);
- }
- assert.writeOK(bulk.execute());
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 13; i++) {
+ var obj = {from: 0, to: 1};
+ obj['s'] = new Array(7 * 1024 * 1024).join(' ');
+ bulk.insert(obj);
+}
+assert.writeOK(bulk.execute());
- var res = local
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -401,5 +394,5 @@ load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
{$unwind: {path: "$out"}})
.toArray();
- assert.eq(res.length, 13);
+assert.eq(res.length, 13);
}());
diff --git a/jstests/aggregation/sources/graphLookup/filter.js b/jstests/aggregation/sources/graphLookup/filter.js
index 69027500aae..4b46c843d9a 100644
--- a/jstests/aggregation/sources/graphLookup/filter.js
+++ b/jstests/aggregation/sources/graphLookup/filter.js
@@ -6,23 +6,23 @@
// we test the functionality and correctness of the option.
(function() {
- "use strict";
+"use strict";
- var local = db.local;
- var foreign = db.foreign;
+var local = db.local;
+var foreign = db.foreign;
- local.drop();
- foreign.drop();
+local.drop();
+foreign.drop();
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 100; i++) {
- bulk.insert({_id: i, neighbors: [i - 1, i + 1]});
- }
- assert.writeOK(bulk.execute());
- assert.writeOK(local.insert({starting: 0}));
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 100; i++) {
+ bulk.insert({_id: i, neighbors: [i - 1, i + 1]});
+}
+assert.writeOK(bulk.execute());
+assert.writeOK(local.insert({starting: 0}));
- // Assert that the graphLookup only retrieves ten documents, with _id from 0 to 9.
- var res = local
+// Assert that the graphLookup only retrieves ten documents, with _id from 0 to 9.
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -35,11 +35,11 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 10);
+assert.eq(res.integers.length, 10);
- // Assert that the graphLookup doesn't retrieve any documents, as to do so it would need to
- // traverse nodes in the graph that don't match the 'restrictSearchWithMatch' predicate.
- res = local
+// Assert that the graphLookup doesn't retrieve any documents, as to do so it would need to
+// traverse nodes in the graph that don't match the 'restrictSearchWithMatch' predicate.
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -52,16 +52,16 @@
})
.toArray()[0];
- assert.eq(res.integers.length, 0);
+assert.eq(res.integers.length, 0);
- foreign.drop();
- assert.writeOK(foreign.insert({from: 0, to: 1, shouldBeIncluded: true}));
- assert.writeOK(foreign.insert({from: 1, to: 2, shouldBeIncluded: false}));
- assert.writeOK(foreign.insert({from: 2, to: 3, shouldBeIncluded: true}));
+foreign.drop();
+assert.writeOK(foreign.insert({from: 0, to: 1, shouldBeIncluded: true}));
+assert.writeOK(foreign.insert({from: 1, to: 2, shouldBeIncluded: false}));
+assert.writeOK(foreign.insert({from: 2, to: 3, shouldBeIncluded: true}));
- // Assert that the $graphLookup stops exploring when it finds a document that doesn't match the
- // filter.
- res = local
+// Assert that the $graphLookup stops exploring when it finds a document that doesn't match the
+// filter.
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -74,10 +74,10 @@
})
.toArray()[0];
- assert.eq(res.results.length, 1);
+assert.eq(res.results.length, 1);
- // $expr is allowed inside the 'restrictSearchWithMatch' match expression.
- res = local
+// $expr is allowed inside the 'restrictSearchWithMatch' match expression.
+res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -90,5 +90,5 @@
})
.toArray()[0];
- assert.eq(res.results.length, 1);
+assert.eq(res.results.length, 1);
})();
diff --git a/jstests/aggregation/sources/graphLookup/nested_objects.js b/jstests/aggregation/sources/graphLookup/nested_objects.js
index d40cced2ac4..43c81302ae4 100644
--- a/jstests/aggregation/sources/graphLookup/nested_objects.js
+++ b/jstests/aggregation/sources/graphLookup/nested_objects.js
@@ -6,24 +6,24 @@
// when the 'connectToField' is a nested array, or when the 'connectFromField' is a nested array.
(function() {
- "use strict";
+"use strict";
- var local = db.local;
- var foreign = db.foreign;
+var local = db.local;
+var foreign = db.foreign;
- local.drop();
- foreign.drop();
+local.drop();
+foreign.drop();
- // 'connectFromField' is an array of objects.
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 100; i++) {
- bulk.insert({_id: i, neighbors: [{id: i + 1}, {id: i + 2}]});
- }
- assert.writeOK(bulk.execute());
+// 'connectFromField' is an array of objects.
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 100; i++) {
+ bulk.insert({_id: i, neighbors: [{id: i + 1}, {id: i + 2}]});
+}
+assert.writeOK(bulk.execute());
- assert.writeOK(local.insert({starting: 0}));
+assert.writeOK(local.insert({starting: 0}));
- var res = local
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -34,18 +34,18 @@
}
})
.toArray()[0];
- assert.eq(res.integers.length, 100);
+assert.eq(res.integers.length, 100);
- foreign.drop();
+foreign.drop();
- // 'connectToField' is an array of objects.
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 100; i++) {
- bulk.insert({previous: [{neighbor: i}, {neighbor: i - 1}], value: i + 1});
- }
- assert.writeOK(bulk.execute());
+// 'connectToField' is an array of objects.
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 100; i++) {
+ bulk.insert({previous: [{neighbor: i}, {neighbor: i - 1}], value: i + 1});
+}
+assert.writeOK(bulk.execute());
- var res = local
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -56,21 +56,21 @@
}
})
.toArray()[0];
- assert.eq(res.integers.length, 100);
+assert.eq(res.integers.length, 100);
- foreign.drop();
+foreign.drop();
- // Both 'connectToField' and 'connectFromField' are arrays of objects.
- var bulk = foreign.initializeUnorderedBulkOp();
- for (var i = 0; i < 100; i++) {
- bulk.insert({
- previous: [{neighbor: i}, {neighbor: i - 1}],
- values: [{neighbor: i + 1}, {neighbor: i + 2}]
- });
- }
- assert.writeOK(bulk.execute());
+// Both 'connectToField' and 'connectFromField' are arrays of objects.
+var bulk = foreign.initializeUnorderedBulkOp();
+for (var i = 0; i < 100; i++) {
+ bulk.insert({
+ previous: [{neighbor: i}, {neighbor: i - 1}],
+ values: [{neighbor: i + 1}, {neighbor: i + 2}]
+ });
+}
+assert.writeOK(bulk.execute());
- var res = local
+var res = local
.aggregate({
$graphLookup: {
from: "foreign",
@@ -81,5 +81,5 @@
}
})
.toArray()[0];
- assert.eq(res.integers.length, 100);
+assert.eq(res.integers.length, 100);
}());
diff --git a/jstests/aggregation/sources/graphLookup/socialite.js b/jstests/aggregation/sources/graphLookup/socialite.js
index 228c0f56c0e..f38f6c2ffc0 100644
--- a/jstests/aggregation/sources/graphLookup/socialite.js
+++ b/jstests/aggregation/sources/graphLookup/socialite.js
@@ -6,35 +6,35 @@
// Socialite schema example available here: https://github.com/mongodb-labs/socialite
(function() {
- "use strict";
+"use strict";
- var follower = db.followers;
- var users = db.users;
+var follower = db.followers;
+var users = db.users;
- follower.drop();
- users.drop();
+follower.drop();
+users.drop();
- var userDocs = [
- {_id: "djw", fullname: "Darren", country: "Australia"},
- {_id: "bmw", fullname: "Bob", country: "Germany"},
- {_id: "jsr", fullname: "Jared", country: "USA"},
- {_id: "ftr", fullname: "Frank", country: "Canada"}
- ];
+var userDocs = [
+ {_id: "djw", fullname: "Darren", country: "Australia"},
+ {_id: "bmw", fullname: "Bob", country: "Germany"},
+ {_id: "jsr", fullname: "Jared", country: "USA"},
+ {_id: "ftr", fullname: "Frank", country: "Canada"}
+];
- userDocs.forEach(function(userDoc) {
- assert.writeOK(users.insert(userDoc));
- });
+userDocs.forEach(function(userDoc) {
+ assert.writeOK(users.insert(userDoc));
+});
- var followers = [{_f: "djw", _t: "jsr"}, {_f: "jsr", _t: "bmw"}, {_f: "ftr", _t: "bmw"}];
+var followers = [{_f: "djw", _t: "jsr"}, {_f: "jsr", _t: "bmw"}, {_f: "ftr", _t: "bmw"}];
- followers.forEach(function(f) {
- assert.writeOK(follower.insert(f));
- });
+followers.forEach(function(f) {
+ assert.writeOK(follower.insert(f));
+});
- // Find the social network of "Darren", that is, people Darren follows, and people who are
- // followed by someone Darren follows, etc.
+// Find the social network of "Darren", that is, people Darren follows, and people who are
+// followed by someone Darren follows, etc.
- var res = users
+var res = users
.aggregate({$match: {fullname: "Darren"}},
{
$graphLookup: {
@@ -49,6 +49,6 @@
{$project: {_id: "$network._t"}})
.toArray();
- // "djw" is followed, directly or indirectly, by "jsr" and "bmw".
- assert.eq(res.length, 2);
+// "djw" is followed, directly or indirectly, by "jsr" and "bmw".
+assert.eq(res.length, 2);
}());
diff --git a/jstests/aggregation/sources/graphLookup/variables.js b/jstests/aggregation/sources/graphLookup/variables.js
index 87e2c8b3975..63b1bbea244 100644
--- a/jstests/aggregation/sources/graphLookup/variables.js
+++ b/jstests/aggregation/sources/graphLookup/variables.js
@@ -2,17 +2,17 @@
* Tests to verify that $graphLookup can use the variables defined in an outer scope.
*/
(function() {
- "use strict";
+"use strict";
- let local = db.graph_lookup_var_local;
- let foreign = db.graph_lookup_var_foreign;
- local.drop();
- foreign.drop();
+let local = db.graph_lookup_var_local;
+let foreign = db.graph_lookup_var_foreign;
+local.drop();
+foreign.drop();
- foreign.insert({from: "b", to: "a", _id: 0});
- local.insert({});
+foreign.insert({from: "b", to: "a", _id: 0});
+local.insert({});
- const basicGraphLookup = {
+const basicGraphLookup = {
$graphLookup: {
from: "graph_lookup_var_foreign",
startWith: "$$var1",
@@ -22,7 +22,7 @@
}
};
- const lookup = {
+const lookup = {
$lookup: {
from: "graph_lookup_var_local",
let : {var1: "a"},
@@ -31,11 +31,10 @@
}
};
- // Verify that $graphLookup can use the variable 'var1' which is defined in parent $lookup.
- let res = local.aggregate([lookup]).toArray();
- assert.eq(res.length, 1);
- assert.eq(res[0].resultsFromLookup.length, 1);
- assert.eq(res[0].resultsFromLookup[0].resultsFromGraphLookup.length, 1);
- assert.eq(res[0].resultsFromLookup[0].resultsFromGraphLookup[0], {_id: 0, from: "b", to: "a"});
-
+// Verify that $graphLookup can use the variable 'var1' which is defined in parent $lookup.
+let res = local.aggregate([lookup]).toArray();
+assert.eq(res.length, 1);
+assert.eq(res[0].resultsFromLookup.length, 1);
+assert.eq(res[0].resultsFromLookup[0].resultsFromGraphLookup.length, 1);
+assert.eq(res[0].resultsFromLookup[0].resultsFromGraphLookup[0], {_id: 0, from: "b", to: "a"});
})();
diff --git a/jstests/aggregation/sources/group/collation_group.js b/jstests/aggregation/sources/group/collation_group.js
index c4977900078..94db6f15ed1 100644
--- a/jstests/aggregation/sources/group/collation_group.js
+++ b/jstests/aggregation/sources/group/collation_group.js
@@ -3,82 +3,80 @@
// Test that the $group stage and all accumulators respect the collation.
(function() {
- "use strict";
+"use strict";
- var coll = db.collation_group;
- coll.drop();
+var coll = db.collation_group;
+coll.drop();
- var results;
- var caseInsensitive = {collation: {locale: "en_US", strength: 2}};
- var diacriticInsensitive = {collation: {locale: "en_US", strength: 1, caseLevel: true}};
- var numericOrdering = {collation: {locale: "en_US", numericOrdering: true}};
- var caseAndDiacriticInsensitive = {collation: {locale: "en_US", strength: 1}};
+var results;
+var caseInsensitive = {collation: {locale: "en_US", strength: 2}};
+var diacriticInsensitive = {collation: {locale: "en_US", strength: 1, caseLevel: true}};
+var numericOrdering = {collation: {locale: "en_US", numericOrdering: true}};
+var caseAndDiacriticInsensitive = {collation: {locale: "en_US", strength: 1}};
- assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
+assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
- assert.writeOK(coll.insert({_id: 0, str: "A", str2: "á"}));
- assert.writeOK(coll.insert({_id: 1, str: "a", str2: "a"}));
- assert.writeOK(coll.insert({_id: 2, str: "B", str2: "é"}));
- assert.writeOK(coll.insert({_id: 3, str: "b", str2: "e"}));
+assert.writeOK(coll.insert({_id: 0, str: "A", str2: "á"}));
+assert.writeOK(coll.insert({_id: 1, str: "a", str2: "a"}));
+assert.writeOK(coll.insert({_id: 2, str: "B", str2: "é"}));
+assert.writeOK(coll.insert({_id: 3, str: "b", str2: "e"}));
- // Ensure that equality of groups respects the collation inherited from the collection default.
- assert.eq(2, coll.aggregate([{$group: {_id: "$str"}}]).itcount());
+// Ensure that equality of groups respects the collation inherited from the collection default.
+assert.eq(2, coll.aggregate([{$group: {_id: "$str"}}]).itcount());
- // Ensure that equality of groups respects an explicit collation.
- assert.eq(2, coll.aggregate([{$group: {_id: "$str2"}}], diacriticInsensitive).itcount());
+// Ensure that equality of groups respects an explicit collation.
+assert.eq(2, coll.aggregate([{$group: {_id: "$str2"}}], diacriticInsensitive).itcount());
- // Ensure that equality of groups created by $sortByCount respects the inherited collation.
- assert.eq(2, coll.aggregate([{$sortByCount: "$str"}]).itcount());
- assert.eq(4, coll.aggregate([{$sortByCount: "$str2"}]).itcount());
+// Ensure that equality of groups created by $sortByCount respects the inherited collation.
+assert.eq(2, coll.aggregate([{$sortByCount: "$str"}]).itcount());
+assert.eq(4, coll.aggregate([{$sortByCount: "$str2"}]).itcount());
- // Ensure that equality of groups created by $sortByCount respects an explicit collation.
- assert.eq(4, coll.aggregate([{$sortByCount: "$str"}], diacriticInsensitive).itcount());
- assert.eq(2, coll.aggregate([{$sortByCount: "$str2"}], diacriticInsensitive).itcount());
+// Ensure that equality of groups created by $sortByCount respects an explicit collation.
+assert.eq(4, coll.aggregate([{$sortByCount: "$str"}], diacriticInsensitive).itcount());
+assert.eq(2, coll.aggregate([{$sortByCount: "$str2"}], diacriticInsensitive).itcount());
- // Ensure that equality of groups inside $facet stage respects the inherited collation.
- results =
- coll.aggregate([{
- $facet:
- {facetStr: [{$group: {_id: "$str"}}], facetStr2: [{$group: {_id: "$str2"}}]}
- }])
- .toArray();
- assert.eq(1, results.length);
- assert.eq(2, results[0].facetStr.length);
- assert.eq(4, results[0].facetStr2.length);
+// Ensure that equality of groups inside $facet stage respects the inherited collation.
+results =
+ coll.aggregate([
+ {$facet: {facetStr: [{$group: {_id: "$str"}}], facetStr2: [{$group: {_id: "$str2"}}]}}
+ ])
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(2, results[0].facetStr.length);
+assert.eq(4, results[0].facetStr2.length);
- // Test that the $addToSet accumulator respects the inherited collation.
- results = coll.aggregate([{$group: {_id: null, set: {$addToSet: "$str"}}}]).toArray();
- assert.eq(1, results.length);
- assert.eq(2, results[0].set.length);
+// Test that the $addToSet accumulator respects the inherited collation.
+results = coll.aggregate([{$group: {_id: null, set: {$addToSet: "$str"}}}]).toArray();
+assert.eq(1, results.length);
+assert.eq(2, results[0].set.length);
- // Test that the $addToSet accumulator respects an explicit collation.
- results =
- coll.aggregate([{$group: {_id: null, set: {$addToSet: "$str2"}}}], diacriticInsensitive)
- .toArray();
- assert.eq(1, results.length);
- assert.eq(2, results[0].set.length);
+// Test that the $addToSet accumulator respects an explicit collation.
+results = coll.aggregate([{$group: {_id: null, set: {$addToSet: "$str2"}}}], diacriticInsensitive)
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(2, results[0].set.length);
- // Ensure that a subexpression inside $push respects the collation.
- results = coll.aggregate(
- [
- {$match: {_id: 0}},
- {$group: {_id: null, areEqual: {$push: {$eq: ["$str", "$str2"]}}}}
- ],
- caseAndDiacriticInsensitive)
- .toArray();
- assert.eq(1, results.length);
- assert.eq(1, results[0].areEqual.length);
- assert.eq(true, results[0].areEqual[0]);
+// Ensure that a subexpression inside $push respects the collation.
+results = coll.aggregate(
+ [
+ {$match: {_id: 0}},
+ {$group: {_id: null, areEqual: {$push: {$eq: ["$str", "$str2"]}}}}
+ ],
+ caseAndDiacriticInsensitive)
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(1, results[0].areEqual.length);
+assert.eq(true, results[0].areEqual[0]);
- // Test that the $min and $max accumulators respect the inherited collation.
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), numericOrdering));
- assert.writeOK(coll.insert({num: "100"}));
- assert.writeOK(coll.insert({num: "2"}));
- results = coll.aggregate([{$group: {_id: null, min: {$min: "$num"}}}]).toArray();
- assert.eq(1, results.length);
- assert.eq("2", results[0].min);
- results = coll.aggregate([{$group: {_id: null, max: {$max: "$num"}}}]).toArray();
- assert.eq(1, results.length);
- assert.eq("100", results[0].max);
+// Test that the $min and $max accumulators respect the inherited collation.
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), numericOrdering));
+assert.writeOK(coll.insert({num: "100"}));
+assert.writeOK(coll.insert({num: "2"}));
+results = coll.aggregate([{$group: {_id: null, min: {$min: "$num"}}}]).toArray();
+assert.eq(1, results.length);
+assert.eq("2", results[0].min);
+results = coll.aggregate([{$group: {_id: null, max: {$max: "$num"}}}]).toArray();
+assert.eq(1, results.length);
+assert.eq("100", results[0].max);
})();
diff --git a/jstests/aggregation/sources/group/group_by_variable.js b/jstests/aggregation/sources/group/group_by_variable.js
index 1d5e203caea..322b260297a 100644
--- a/jstests/aggregation/sources/group/group_by_variable.js
+++ b/jstests/aggregation/sources/group/group_by_variable.js
@@ -3,24 +3,24 @@
* SERVER-37459.
*/
(function() {
- "use strict";
+"use strict";
- const coll = db.group_by_system_var;
- coll.drop();
+const coll = db.group_by_system_var;
+coll.drop();
- assert.commandWorked(coll.insert({_id: 1, x: 1}));
- assert.commandWorked(coll.insert({_id: 2, x: 2}));
+assert.commandWorked(coll.insert({_id: 1, x: 1}));
+assert.commandWorked(coll.insert({_id: 2, x: 2}));
- function checkPipeline(pipeline, expectedResults) {
- const res = coll.aggregate(pipeline).toArray();
- assert.eq(res, expectedResults, pipeline);
- }
+function checkPipeline(pipeline, expectedResults) {
+ const res = coll.aggregate(pipeline).toArray();
+ assert.eq(res, expectedResults, pipeline);
+}
- const wholeCollUnderId = [{_id: {_id: 1, x: 1}}, {_id: {_id: 2, x: 2}}];
- checkPipeline([{$group: {_id: "$$ROOT"}}, {$sort: {"_id": 1}}], wholeCollUnderId);
- checkPipeline([{$group: {_id: "$$CURRENT"}}, {$sort: {"_id": 1}}], wholeCollUnderId);
+const wholeCollUnderId = [{_id: {_id: 1, x: 1}}, {_id: {_id: 2, x: 2}}];
+checkPipeline([{$group: {_id: "$$ROOT"}}, {$sort: {"_id": 1}}], wholeCollUnderId);
+checkPipeline([{$group: {_id: "$$CURRENT"}}, {$sort: {"_id": 1}}], wholeCollUnderId);
- const collIds = [{_id: 1}, {_id: 2}];
- checkPipeline([{$group: {_id: "$$ROOT.x"}}, {$sort: {"_id": 1}}], collIds);
- checkPipeline([{$group: {_id: "$$CURRENT.x"}}, {$sort: {"_id": 1}}], collIds);
+const collIds = [{_id: 1}, {_id: 2}];
+checkPipeline([{$group: {_id: "$$ROOT.x"}}, {$sort: {"_id": 1}}], collIds);
+checkPipeline([{$group: {_id: "$$CURRENT.x"}}, {$sort: {"_id": 1}}], collIds);
})();
diff --git a/jstests/aggregation/sources/group/numeric_grouping.js b/jstests/aggregation/sources/group/numeric_grouping.js
index c55dee564de..a7b9f22d979 100644
--- a/jstests/aggregation/sources/group/numeric_grouping.js
+++ b/jstests/aggregation/sources/group/numeric_grouping.js
@@ -2,24 +2,24 @@
* Tests that numbers that are equivalent but have different types are grouped together.
*/
(function() {
- "use strict";
- const coll = db.numeric_grouping;
+"use strict";
+const coll = db.numeric_grouping;
- coll.drop();
+coll.drop();
- assert.writeOK(coll.insert({key: new NumberInt(24), value: 17}));
- assert.writeOK(coll.insert({key: new NumberLong(24), value: 8}));
- assert.writeOK(coll.insert({key: 24, value: 5}));
+assert.writeOK(coll.insert({key: new NumberInt(24), value: 17}));
+assert.writeOK(coll.insert({key: new NumberLong(24), value: 8}));
+assert.writeOK(coll.insert({key: 24, value: 5}));
- assert.writeOK(coll.insert({key: new NumberInt(42), value: 11}));
- assert.writeOK(coll.insert({key: new NumberLong(42), value: 13}));
- assert.writeOK(coll.insert({key: 42, value: 6}));
+assert.writeOK(coll.insert({key: new NumberInt(42), value: 11}));
+assert.writeOK(coll.insert({key: new NumberLong(42), value: 13}));
+assert.writeOK(coll.insert({key: 42, value: 6}));
- const results = coll.aggregate({$group: {_id: "$key", s: {$sum: "$value"}}}).toArray();
+const results = coll.aggregate({$group: {_id: "$key", s: {$sum: "$value"}}}).toArray();
- assert.eq(results.length, 2, tojson(results));
+assert.eq(results.length, 2, tojson(results));
- // Both groups should sum to 30.
- assert.eq(results[0].s, 30, tojson(results));
- assert.eq(results[1].s, 30, tojson(results));
+// Both groups should sum to 30.
+assert.eq(results[0].s, 30, tojson(results));
+assert.eq(results[1].s, 30, tojson(results));
}());
diff --git a/jstests/aggregation/sources/group/text_score_grouping.js b/jstests/aggregation/sources/group/text_score_grouping.js
index bb65d77fd00..2952602ee46 100644
--- a/jstests/aggregation/sources/group/text_score_grouping.js
+++ b/jstests/aggregation/sources/group/text_score_grouping.js
@@ -2,30 +2,29 @@
* Tests that a user can group on the text score.
*/
(function() {
- "use strict";
- const coll = db.text_score_grouping;
+"use strict";
+const coll = db.text_score_grouping;
- coll.drop();
+coll.drop();
- assert.writeOK(coll.insert({"_id": 1, "title": "cakes"}));
- assert.writeOK(coll.insert({"_id": 2, "title": "cookies and cakes"}));
+assert.writeOK(coll.insert({"_id": 1, "title": "cakes"}));
+assert.writeOK(coll.insert({"_id": 2, "title": "cookies and cakes"}));
- assert.commandWorked(coll.createIndex({title: "text"}));
+assert.commandWorked(coll.createIndex({title: "text"}));
- // Make sure there are two distinct groups for a text search with no other dependencies.
- var results = coll.aggregate([
- {$match: {$text: {$search: "cake cookies"}}},
- {$group: {_id: {$meta: "textScore"}, count: {$sum: 1}}}
- ])
- .toArray();
- assert.eq(results.length, 2);
-
- // Make sure there are two distinct groups if there are other fields required by the group.
- results = coll.aggregate([
+// Make sure there are two distinct groups for a text search with no other dependencies.
+var results = coll.aggregate([
{$match: {$text: {$search: "cake cookies"}}},
- {$group: {_id: {$meta: "textScore"}, firstId: {$first: "$_id"}}}
+ {$group: {_id: {$meta: "textScore"}, count: {$sum: 1}}}
])
.toArray();
- assert.eq(results.length, 2);
+assert.eq(results.length, 2);
+// Make sure there are two distinct groups if there are other fields required by the group.
+results = coll.aggregate([
+ {$match: {$text: {$search: "cake cookies"}}},
+ {$group: {_id: {$meta: "textScore"}, firstId: {$first: "$_id"}}}
+ ])
+ .toArray();
+assert.eq(results.length, 2);
}());
diff --git a/jstests/aggregation/sources/lookup/lookup_absorb_match.js b/jstests/aggregation/sources/lookup/lookup_absorb_match.js
index 1a6aea31b16..1d85817970f 100644
--- a/jstests/aggregation/sources/lookup/lookup_absorb_match.js
+++ b/jstests/aggregation/sources/lookup/lookup_absorb_match.js
@@ -7,22 +7,22 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- "use strict";
+"use strict";
- let testDB = db.getSiblingDB("lookup_absorb_match");
- testDB.dropDatabase();
+let testDB = db.getSiblingDB("lookup_absorb_match");
+testDB.dropDatabase();
- let locations = testDB.getCollection("locations");
- assert.writeOK(locations.insert({_id: "doghouse", coordinates: [25.0, 60.0]}));
- assert.writeOK(locations.insert({_id: "bullpen", coordinates: [-25.0, -60.0]}));
+let locations = testDB.getCollection("locations");
+assert.writeOK(locations.insert({_id: "doghouse", coordinates: [25.0, 60.0]}));
+assert.writeOK(locations.insert({_id: "bullpen", coordinates: [-25.0, -60.0]}));
- let animals = testDB.getCollection("animals");
- assert.writeOK(animals.insert({_id: "dog", locationId: "doghouse"}));
- assert.writeOK(animals.insert({_id: "bull", locationId: "bullpen"}));
+let animals = testDB.getCollection("animals");
+assert.writeOK(animals.insert({_id: "dog", locationId: "doghouse"}));
+assert.writeOK(animals.insert({_id: "bull", locationId: "bullpen"}));
- // Test that a $match with $geoWithin works properly when performed directly on an absorbed
- // lookup field.
- let result = testDB.animals
+// Test that a $match with $geoWithin works properly when performed directly on an absorbed
+// lookup field.
+let result = testDB.animals
.aggregate([
{
$lookup: {
@@ -53,15 +53,12 @@
}
])
.toArray();
- let expected = [{
- _id: "dog",
- locationId: "doghouse",
- location: {_id: "doghouse", coordinates: [25.0, 60.0]}
- }];
- assert.eq(result, expected);
+let expected =
+ [{_id: "dog", locationId: "doghouse", location: {_id: "doghouse", coordinates: [25.0, 60.0]}}];
+assert.eq(result, expected);
- // Test that a $match with $geoIntersects works as expected when absorbed by a $lookup.
- result = testDB.animals
+// Test that a $match with $geoIntersects works as expected when absorbed by a $lookup.
+result = testDB.animals
.aggregate([
{
$lookup: {
@@ -92,10 +89,7 @@
}
])
.toArray();
- expected = [{
- _id: "bull",
- locationId: "bullpen",
- location: {_id: "bullpen", coordinates: [-25.0, -60.0]}
- }];
- assert.eq(result, expected);
+expected =
+ [{_id: "bull", locationId: "bullpen", location: {_id: "bullpen", coordinates: [-25.0, -60.0]}}];
+assert.eq(result, expected);
}());
diff --git a/jstests/aggregation/sources/lookup/lookup_contains_text.js b/jstests/aggregation/sources/lookup/lookup_contains_text.js
index 2e96054735e..0ecbb60d683 100644
--- a/jstests/aggregation/sources/lookup/lookup_contains_text.js
+++ b/jstests/aggregation/sources/lookup/lookup_contains_text.js
@@ -3,49 +3,46 @@
// TODO: Reenable test on passthroughs with sharded collections as part of SERVER-38996.
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
+load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
- const outer = db.outer;
- const inner = db.inner;
+const outer = db.outer;
+const inner = db.inner;
- outer.drop();
- inner.drop();
+outer.drop();
+inner.drop();
- const kNoTextScoreAvailableErrCode = 40218;
+const kNoTextScoreAvailableErrCode = 40218;
- // This pipeline is never legal, because the subpipeline projects out a textScore but does not
- // begin with a $text search.
- let pipeline = [
- {$match: {$text: {$search: "foo"}}},
- {
- $lookup:
- {from: "inner", as: "as", pipeline: [{$project: {score: {$meta: "textScore"}}}]}
- }
- ];
+// This pipeline is never legal, because the subpipeline projects out a textScore but does not
+// begin with a $text search.
+let pipeline = [
+ {$match: {$text: {$search: "foo"}}},
+ {$lookup: {from: "inner", as: "as", pipeline: [{$project: {score: {$meta: "textScore"}}}]}}
+];
- assert.commandWorked(outer.insert({_id: 100, a: "foo"}));
- assert.commandWorked(inner.insert({_id: 100, a: "bar apple banana"}));
+assert.commandWorked(outer.insert({_id: 100, a: "foo"}));
+assert.commandWorked(inner.insert({_id: 100, a: "bar apple banana"}));
- // Neither 'outer' nor 'inner' have text indexes.
- assertErrorCode(outer, pipeline, ErrorCodes.IndexNotFound);
+// Neither 'outer' nor 'inner' have text indexes.
+assertErrorCode(outer, pipeline, ErrorCodes.IndexNotFound);
- // Only 'outer' has a text index.
- assert.commandWorked(outer.createIndex({a: "text"}, {name: "outer_first_index"}));
- assertErrorCode(outer, pipeline, kNoTextScoreAvailableErrCode);
+// Only 'outer' has a text index.
+assert.commandWorked(outer.createIndex({a: "text"}, {name: "outer_first_index"}));
+assertErrorCode(outer, pipeline, kNoTextScoreAvailableErrCode);
- // Only 'inner' has a text index.
- assert.commandWorked(outer.dropIndex("outer_first_index"));
- assert.commandWorked(inner.createIndex({a: "text"}));
- assertErrorCode(outer, pipeline, ErrorCodes.IndexNotFound);
+// Only 'inner' has a text index.
+assert.commandWorked(outer.dropIndex("outer_first_index"));
+assert.commandWorked(inner.createIndex({a: "text"}));
+assertErrorCode(outer, pipeline, ErrorCodes.IndexNotFound);
- // Both 'outer' and 'inner' have a text index.
- assert.commandWorked(outer.createIndex({a: "text"}));
- assertErrorCode(outer, pipeline, kNoTextScoreAvailableErrCode);
+// Both 'outer' and 'inner' have a text index.
+assert.commandWorked(outer.createIndex({a: "text"}));
+assertErrorCode(outer, pipeline, kNoTextScoreAvailableErrCode);
- // A pipeline with two text searches, one within a $lookup, will work.
- pipeline = [
+// A pipeline with two text searches, one within a $lookup, will work.
+pipeline = [
{$match: {$text: {$search: "foo"}}},
{
$lookup: {
@@ -59,24 +56,24 @@
}
];
- let expected = [{"_id": 100, "a": "foo", "as": [{"_id": 100, "score": 2}]}];
- assert.eq(outer.aggregate(pipeline).toArray(), expected);
+let expected = [{"_id": 100, "a": "foo", "as": [{"_id": 100, "score": 2}]}];
+assert.eq(outer.aggregate(pipeline).toArray(), expected);
- // A lookup with a text search in the subpipeline will correctly perform that search on 'from'.
- pipeline = [{
+// A lookup with a text search in the subpipeline will correctly perform that search on 'from'.
+pipeline = [{
$lookup: {
from: "inner",
as: "as",
pipeline: [{$match: {$text: {$search: "bar apple banana hello"}}}]
}
}];
- expected = [{"_id": 100, "a": "foo", "as": [{"_id": 100, "a": "bar apple banana"}]}];
+expected = [{"_id": 100, "a": "foo", "as": [{"_id": 100, "a": "bar apple banana"}]}];
- assert.eq(outer.aggregate(pipeline).toArray(), expected);
+assert.eq(outer.aggregate(pipeline).toArray(), expected);
- // A lookup with two text searches and two text score $projects will have the text scores
- // reference the relevant text search.
- pipeline = [
+// A lookup with two text searches and two text score $projects will have the text scores
+// reference the relevant text search.
+pipeline = [
{$match: {$text: {$search: "foo"}}},
{
$lookup: {
@@ -91,13 +88,13 @@
{$project: {score: {$meta: "textScore"}, as: 1}},
];
- expected = [{"_id": 100, "as": [{"_id": 100, "score": 2}], "score": 1.1}];
+expected = [{"_id": 100, "as": [{"_id": 100, "score": 2}], "score": 1.1}];
- assert.eq(outer.aggregate(pipeline).toArray(), expected);
+assert.eq(outer.aggregate(pipeline).toArray(), expected);
- // Given a $text stage in the 'from' pipeline, the outer pipeline will not be able to access
- // this $text stage's text score.
- pipeline = [
+// Given a $text stage in the 'from' pipeline, the outer pipeline will not be able to access
+// this $text stage's text score.
+pipeline = [
{
$lookup: {
from: "inner",
@@ -108,5 +105,5 @@
{$project: {score: {$meta: "textScore"}, as: 1}},
];
- assertErrorCode(outer, pipeline, kNoTextScoreAvailableErrCode);
+assertErrorCode(outer, pipeline, kNoTextScoreAvailableErrCode);
}());
diff --git a/jstests/aggregation/sources/lookup/lookup_non_correlated.js b/jstests/aggregation/sources/lookup/lookup_non_correlated.js
index 523eb37f8d4..d7323d861c1 100644
--- a/jstests/aggregation/sources/lookup/lookup_non_correlated.js
+++ b/jstests/aggregation/sources/lookup/lookup_non_correlated.js
@@ -6,59 +6,59 @@
* Confirms that $lookup with a non-correlated foreign pipeline returns expected results.
*/
(function() {
- "use strict";
+"use strict";
- const testDB = db.getSiblingDB("lookup_non_correlated");
- const localName = "local";
- const localColl = testDB.getCollection(localName);
- localColl.drop();
- const foreignName = "foreign";
- const foreignColl = testDB.getCollection(foreignName);
- foreignColl.drop();
+const testDB = db.getSiblingDB("lookup_non_correlated");
+const localName = "local";
+const localColl = testDB.getCollection(localName);
+localColl.drop();
+const foreignName = "foreign";
+const foreignColl = testDB.getCollection(foreignName);
+foreignColl.drop();
- assert.writeOK(localColl.insert({_id: "A"}));
- assert.writeOK(localColl.insert({_id: "B"}));
- assert.writeOK(localColl.insert({_id: "C"}));
+assert.writeOK(localColl.insert({_id: "A"}));
+assert.writeOK(localColl.insert({_id: "B"}));
+assert.writeOK(localColl.insert({_id: "C"}));
- assert.writeOK(foreignColl.insert({_id: 1}));
- assert.writeOK(foreignColl.insert({_id: 2}));
- assert.writeOK(foreignColl.insert({_id: 3}));
+assert.writeOK(foreignColl.insert({_id: 1}));
+assert.writeOK(foreignColl.insert({_id: 2}));
+assert.writeOK(foreignColl.insert({_id: 3}));
- // Basic non-correlated lookup returns expected results.
- let cursor = localColl.aggregate([
- {$match: {_id: {$in: ["B", "C"]}}},
- {$sort: {_id: 1}},
- {$lookup: {from: foreignName, as: "foreignDocs", pipeline: [{$match: {_id: {"$gte": 2}}}]}},
- ]);
+// Basic non-correlated lookup returns expected results.
+let cursor = localColl.aggregate([
+ {$match: {_id: {$in: ["B", "C"]}}},
+ {$sort: {_id: 1}},
+ {$lookup: {from: foreignName, as: "foreignDocs", pipeline: [{$match: {_id: {"$gte": 2}}}]}},
+]);
- assert(cursor.hasNext());
- assert.docEq({_id: "B", foreignDocs: [{_id: 2}, {_id: 3}]}, cursor.next());
- assert(cursor.hasNext());
- assert.docEq({_id: "C", foreignDocs: [{_id: 2}, {_id: 3}]}, cursor.next());
- assert(!cursor.hasNext());
+assert(cursor.hasNext());
+assert.docEq({_id: "B", foreignDocs: [{_id: 2}, {_id: 3}]}, cursor.next());
+assert(cursor.hasNext());
+assert.docEq({_id: "C", foreignDocs: [{_id: 2}, {_id: 3}]}, cursor.next());
+assert(!cursor.hasNext());
- // Non-correlated lookup followed by unwind on 'as' returns expected results.
- cursor = localColl.aggregate([
- {$match: {_id: "A"}},
- {$lookup: {from: foreignName, as: "foreignDocs", pipeline: [{$match: {_id: {"$gte": 2}}}]}},
- {$unwind: "$foreignDocs"}
- ]);
+// Non-correlated lookup followed by unwind on 'as' returns expected results.
+cursor = localColl.aggregate([
+ {$match: {_id: "A"}},
+ {$lookup: {from: foreignName, as: "foreignDocs", pipeline: [{$match: {_id: {"$gte": 2}}}]}},
+ {$unwind: "$foreignDocs"}
+]);
- assert(cursor.hasNext());
- assert.docEq({_id: "A", foreignDocs: {_id: 2}}, cursor.next());
- assert(cursor.hasNext());
- assert.docEq({_id: "A", foreignDocs: {_id: 3}}, cursor.next());
- assert(!cursor.hasNext());
+assert(cursor.hasNext());
+assert.docEq({_id: "A", foreignDocs: {_id: 2}}, cursor.next());
+assert(cursor.hasNext());
+assert.docEq({_id: "A", foreignDocs: {_id: 3}}, cursor.next());
+assert(!cursor.hasNext());
- // Non-correlated lookup followed by unwind and filter on 'as' returns expected results.
- cursor = localColl.aggregate([
- {$match: {_id: "A"}},
- {$lookup: {from: foreignName, as: "foreignDocs", pipeline: [{$match: {_id: {"$gte": 2}}}]}},
- {$unwind: "$foreignDocs"},
- {$match: {"foreignDocs._id": 2}}
- ]);
+// Non-correlated lookup followed by unwind and filter on 'as' returns expected results.
+cursor = localColl.aggregate([
+ {$match: {_id: "A"}},
+ {$lookup: {from: foreignName, as: "foreignDocs", pipeline: [{$match: {_id: {"$gte": 2}}}]}},
+ {$unwind: "$foreignDocs"},
+ {$match: {"foreignDocs._id": 2}}
+]);
- assert(cursor.hasNext());
- assert.docEq({_id: "A", foreignDocs: {_id: 2}}, cursor.next());
- assert(!cursor.hasNext());
+assert(cursor.hasNext());
+assert.docEq({_id: "A", foreignDocs: {_id: 2}}, cursor.next());
+assert(!cursor.hasNext());
})();
diff --git a/jstests/aggregation/sources/lookup/lookup_non_correlated_prefix.js b/jstests/aggregation/sources/lookup/lookup_non_correlated_prefix.js
index 66442ffab86..2a67c79a262 100644
--- a/jstests/aggregation/sources/lookup/lookup_non_correlated_prefix.js
+++ b/jstests/aggregation/sources/lookup/lookup_non_correlated_prefix.js
@@ -6,75 +6,75 @@
* Confirms that $lookup with a non-correlated prefix returns expected results.
*/
(function() {
- "use strict";
+"use strict";
- const testColl = db.lookup_non_correlated_prefix;
- testColl.drop();
- const joinColl = db.lookup_non_correlated_prefix_join;
- joinColl.drop();
+const testColl = db.lookup_non_correlated_prefix;
+testColl.drop();
+const joinColl = db.lookup_non_correlated_prefix_join;
+joinColl.drop();
- const users = [
- {
- _id: "user_1",
- },
- {
- _id: "user_2",
- },
- ];
- let res = assert.commandWorked(testColl.insert(users));
+const users = [
+ {
+ _id: "user_1",
+ },
+ {
+ _id: "user_2",
+ },
+];
+let res = assert.commandWorked(testColl.insert(users));
- const items = [
- {_id: "item_1", owner: "user_1"},
- {_id: "item_2", owner: "user_2"},
- ];
- res = assert.commandWorked(joinColl.insert(items));
+const items = [
+ {_id: "item_1", owner: "user_1"},
+ {_id: "item_2", owner: "user_2"},
+];
+res = assert.commandWorked(joinColl.insert(items));
- // $lookup with non-correlated prefix followed by correlated pipeline suffix containing $facet
- // returns correct results. This test confirms the fix for SERVER-41714.
- let cursor = testColl.aggregate([
- {
- $lookup: {
- as: 'items_check',
- from: joinColl.getName(),
- let : {id: '$_id'},
- pipeline: [
- {$addFields: {id: '$_id'}},
- {$match: {$expr: {$eq: ['$$id', '$owner']}}},
- {
+// $lookup with non-correlated prefix followed by correlated pipeline suffix containing $facet
+// returns correct results. This test confirms the fix for SERVER-41714.
+let cursor = testColl.aggregate([
+ {
+ $lookup: {
+ as: 'items_check',
+ from: joinColl.getName(),
+ let : {id: '$_id'},
+ pipeline: [
+ {$addFields: {id: '$_id'}},
+ {$match: {$expr: {$eq: ['$$id', '$owner']}}},
+ {
$facet: {
all: [{$match: {}}],
},
- },
- ],
- },
+ },
+ ],
},
- ]);
- assert(cursor.hasNext());
- cursor.toArray().forEach(user => {
- const joinedDocs = user['items_check'][0]['all'];
- assert.neq(null, joinedDocs);
- assert.eq(1, joinedDocs.length);
- assert.eq(user['_id'], joinedDocs[0].owner);
- });
+ },
+]);
+assert(cursor.hasNext());
+cursor.toArray().forEach(user => {
+ const joinedDocs = user['items_check'][0]['all'];
+ assert.neq(null, joinedDocs);
+ assert.eq(1, joinedDocs.length);
+ assert.eq(user['_id'], joinedDocs[0].owner);
+});
- cursor = testColl.aggregate([
- {
- $lookup: {
- as: 'items_check',
- from: joinColl.getName(),
- let : {id: '$_id'},
- pipeline: [
- {$addFields: {id: '$_id'}},
- {$match: {$expr: {$eq: ['$$id', '$owner']}}},
- ],
- },
+cursor = testColl.aggregate([
+ {
+ $lookup: {
+ as: 'items_check',
+ from: joinColl.getName(),
+ let : {id: '$_id'},
+ pipeline: [
+ {$addFields: {id: '$_id'}},
+ {$match: {$expr: {$eq: ['$$id', '$owner']}}},
+ ],
},
- ]);
- assert(cursor.hasNext());
- cursor.toArray().forEach(user => {
- const joinedDocs = user['items_check'];
- assert.neq(null, joinedDocs);
- assert.eq(1, joinedDocs.length);
- assert.eq(user['_id'], joinedDocs[0].owner);
- });
+ },
+]);
+assert(cursor.hasNext());
+cursor.toArray().forEach(user => {
+ const joinedDocs = user['items_check'];
+ assert.neq(null, joinedDocs);
+ assert.eq(1, joinedDocs.length);
+ assert.eq(user['_id'], joinedDocs[0].owner);
+});
})();
diff --git a/jstests/aggregation/sources/lookup/lookup_sort_limit.js b/jstests/aggregation/sources/lookup/lookup_sort_limit.js
index 3633852615b..121a3c43f04 100644
--- a/jstests/aggregation/sources/lookup/lookup_sort_limit.js
+++ b/jstests/aggregation/sources/lookup/lookup_sort_limit.js
@@ -5,24 +5,24 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/analyze_plan.js"); // For getAggPlanStages().
+load("jstests/libs/analyze_plan.js"); // For getAggPlanStages().
- const testDB = db.getSiblingDB("lookup_sort_limit");
- testDB.dropDatabase();
+const testDB = db.getSiblingDB("lookup_sort_limit");
+testDB.dropDatabase();
- const localColl = testDB.getCollection("local");
- const fromColl = testDB.getCollection("from");
+const localColl = testDB.getCollection("local");
+const fromColl = testDB.getCollection("from");
- const bulk = fromColl.initializeUnorderedBulkOp();
- for (let i = 0; i < 10; i++) {
- bulk.insert({_id: i, foreignField: i});
- }
- assert.commandWorked(bulk.execute());
- assert.commandWorked(localColl.insert({_id: 0}));
+const bulk = fromColl.initializeUnorderedBulkOp();
+for (let i = 0; i < 10; i++) {
+ bulk.insert({_id: i, foreignField: i});
+}
+assert.commandWorked(bulk.execute());
+assert.commandWorked(localColl.insert({_id: 0}));
- let res = localColl
+let res = localColl
.aggregate([{
$lookup: {
from: fromColl.getName(),
@@ -33,10 +33,10 @@
}])
.toArray();
- assert.eq({_id: 0, result: [{_id: 0, foreignField: 0}]}, res[0]);
+assert.eq({_id: 0, result: [{_id: 0, foreignField: 0}]}, res[0]);
- // Run a similar test except with a sort that cannot be covered with an index scan.
- res = localColl
+// Run a similar test except with a sort that cannot be covered with an index scan.
+res = localColl
.aggregate([{
$lookup: {
from: fromColl.getName(),
@@ -47,6 +47,5 @@
}])
.toArray();
- assert.eq({_id: 0, result: [{_id: 9, foreignField: 9}]}, res[0]);
-
+assert.eq({_id: 0, result: [{_id: 9, foreignField: 9}]}, res[0]);
}());
diff --git a/jstests/aggregation/sources/lookup/lookup_subpipeline.js b/jstests/aggregation/sources/lookup/lookup_subpipeline.js
index 39d2ff0d850..d9933c869cf 100644
--- a/jstests/aggregation/sources/lookup/lookup_subpipeline.js
+++ b/jstests/aggregation/sources/lookup/lookup_subpipeline.js
@@ -1,58 +1,58 @@
// Tests for the $lookup stage with a sub-pipeline.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- const testName = "lookup_subpipeline";
+const testName = "lookup_subpipeline";
- const coll = db.lookUp;
- const from = db.from;
- const thirdColl = db.thirdColl;
- const fourthColl = db.fourthColl;
+const coll = db.lookUp;
+const from = db.from;
+const thirdColl = db.thirdColl;
+const fourthColl = db.fourthColl;
- // Used by testPipeline to sort result documents. All _ids must be primitives.
- function compareId(a, b) {
- if (a._id < b._id) {
- return -1;
- }
- if (a._id > b._id) {
- return 1;
- }
- return 0;
+// Used by testPipeline to sort result documents. All _ids must be primitives.
+function compareId(a, b) {
+ if (a._id < b._id) {
+ return -1;
}
-
- function generateNestedPipeline(foreignCollName, numLevels) {
- let pipeline = [{"$lookup": {pipeline: [], from: foreignCollName, as: "same"}}];
-
- for (let level = 1; level < numLevels; level++) {
- pipeline = [{"$lookup": {pipeline: pipeline, from: foreignCollName, as: "same"}}];
- }
-
- return pipeline;
+ if (a._id > b._id) {
+ return 1;
}
+ return 0;
+}
- // Helper for testing that pipeline returns correct set of results.
- function testPipeline(pipeline, expectedResult, collection) {
- assert.eq(collection.aggregate(pipeline).toArray().sort(compareId),
- expectedResult.sort(compareId));
+function generateNestedPipeline(foreignCollName, numLevels) {
+ let pipeline = [{"$lookup": {pipeline: [], from: foreignCollName, as: "same"}}];
+
+ for (let level = 1; level < numLevels; level++) {
+ pipeline = [{"$lookup": {pipeline: pipeline, from: foreignCollName, as: "same"}}];
}
- //
- // Pipeline syntax using 'let' variables.
- //
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, x: 1}));
- assert.writeOK(coll.insert({_id: 2, x: 2}));
- assert.writeOK(coll.insert({_id: 3, x: 3}));
-
- from.drop();
- assert.writeOK(from.insert({_id: 1}));
- assert.writeOK(from.insert({_id: 2}));
- assert.writeOK(from.insert({_id: 3}));
-
- // Basic non-equi theta join via $project.
- let pipeline = [
+ return pipeline;
+}
+
+// Helper for testing that pipeline returns correct set of results.
+function testPipeline(pipeline, expectedResult, collection) {
+ assert.eq(collection.aggregate(pipeline).toArray().sort(compareId),
+ expectedResult.sort(compareId));
+}
+
+//
+// Pipeline syntax using 'let' variables.
+//
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, x: 1}));
+assert.writeOK(coll.insert({_id: 2, x: 2}));
+assert.writeOK(coll.insert({_id: 3, x: 3}));
+
+from.drop();
+assert.writeOK(from.insert({_id: 1}));
+assert.writeOK(from.insert({_id: 2}));
+assert.writeOK(from.insert({_id: 3}));
+
+// Basic non-equi theta join via $project.
+let pipeline = [
{
$lookup: {
let : {var1: "$_id"},
@@ -67,23 +67,23 @@
},
];
- let expectedResults = [
- {"_id": 1, x: 1, "c": []},
- {"_id": 2, x: 2, "c": [{"_id": 1}]},
- {
- "_id": 3,
- x: 3,
- "c": [
- {"_id": 1},
- {
+let expectedResults = [
+ {"_id": 1, x: 1, "c": []},
+ {"_id": 2, x: 2, "c": [{"_id": 1}]},
+ {
+ "_id": 3,
+ x: 3,
+ "c": [
+ {"_id": 1},
+ {
"_id": 2,
- }
- ]
- }
- ];
- testPipeline(pipeline, expectedResults, coll);
- // Basic non-equi theta join via $match.
- pipeline = [
+ }
+ ]
+ }
+];
+testPipeline(pipeline, expectedResults, coll);
+// Basic non-equi theta join via $match.
+pipeline = [
{
$lookup: {
let : {var1: "$_id"},
@@ -96,24 +96,24 @@
},
];
- expectedResults = [
- {"_id": 1, x: 1, "c": []},
- {"_id": 2, x: 2, "c": [{"_id": 1}]},
- {
- "_id": 3,
- x: 3,
- "c": [
- {"_id": 1},
- {
+expectedResults = [
+ {"_id": 1, x: 1, "c": []},
+ {"_id": 2, x: 2, "c": [{"_id": 1}]},
+ {
+ "_id": 3,
+ x: 3,
+ "c": [
+ {"_id": 1},
+ {
"_id": 2,
- }
- ]
- }
- ];
- testPipeline(pipeline, expectedResults, coll);
+ }
+ ]
+ }
+];
+testPipeline(pipeline, expectedResults, coll);
- // Multi-level join using $match.
- pipeline = [
+// Multi-level join using $match.
+pipeline = [
{
$lookup: {
let : {var1: "$_id"},
@@ -134,15 +134,15 @@
},
];
- expectedResults = [
- {"_id": 1, "x": 1, "c": [{"_id": 1, "d": [{"_id": 2}, {"_id": 3}]}]},
- {"_id": 2, "x": 2, "c": [{"_id": 2, "d": [{"_id": 3}]}]},
- {"_id": 3, "x": 3, "c": [{"_id": 3, "d": []}]}
- ];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [
+ {"_id": 1, "x": 1, "c": [{"_id": 1, "d": [{"_id": 2}, {"_id": 3}]}]},
+ {"_id": 2, "x": 2, "c": [{"_id": 2, "d": [{"_id": 3}]}]},
+ {"_id": 3, "x": 3, "c": [{"_id": 3, "d": []}]}
+];
+testPipeline(pipeline, expectedResults, coll);
- // Equijoin with $match that can't be delegated to the query subsystem.
- pipeline = [
+// Equijoin with $match that can't be delegated to the query subsystem.
+pipeline = [
{
$lookup: {
let : {var1: "$x"},
@@ -158,15 +158,15 @@
},
];
- expectedResults = [
- {"_id": 1, "x": 1, "c": []},
- {"_id": 2, "x": 2, "c": [{"_id": 1}, {"_id": 2}, {"_id": 3}]},
- {"_id": 3, "x": 3, "c": []}
- ];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [
+ {"_id": 1, "x": 1, "c": []},
+ {"_id": 2, "x": 2, "c": [{"_id": 1}, {"_id": 2}, {"_id": 3}]},
+ {"_id": 3, "x": 3, "c": []}
+];
+testPipeline(pipeline, expectedResults, coll);
- // Multiple variables.
- pipeline = [
+// Multiple variables.
+pipeline = [
{
$lookup: {
let : {var1: "$_id", var2: "$x"},
@@ -187,15 +187,15 @@
{$project: {x: 1, c: 1}}
];
- expectedResults = [
- {"_id": 1, x: 1, "c": []},
- {"_id": 2, x: 2, "c": [{"_id": 1, var2Times2: 4}]},
- {"_id": 3, x: 3, "c": [{"_id": 1, var2Times2: 6}, {"_id": 2, var2Times2: 6}]}
- ];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [
+ {"_id": 1, x: 1, "c": []},
+ {"_id": 2, x: 2, "c": [{"_id": 1, var2Times2: 4}]},
+ {"_id": 3, x: 3, "c": [{"_id": 1, var2Times2: 6}, {"_id": 2, var2Times2: 6}]}
+];
+testPipeline(pipeline, expectedResults, coll);
- // Let var as complex expression object.
- pipeline = [
+// Let var as complex expression object.
+pipeline = [
{
$lookup: {
let : {var1: {$mod: ["$x", 3]}},
@@ -208,39 +208,39 @@
},
];
- expectedResults = [
- {
- "_id": 1,
- x: 1,
- "c": [
- {_id: 1, var1Mod3TimesForeignId: 1},
- {_id: 2, var1Mod3TimesForeignId: 2},
- {_id: 3, var1Mod3TimesForeignId: 3}
- ]
- },
- {
- "_id": 2,
- x: 2,
- "c": [
- {_id: 1, var1Mod3TimesForeignId: 2},
- {_id: 2, var1Mod3TimesForeignId: 4},
- {_id: 3, var1Mod3TimesForeignId: 6}
- ]
- },
- {
- "_id": 3,
- x: 3,
- "c": [
- {_id: 1, var1Mod3TimesForeignId: 0},
- {_id: 2, var1Mod3TimesForeignId: 0},
- {_id: 3, var1Mod3TimesForeignId: 0}
- ]
- }
- ];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [
+ {
+ "_id": 1,
+ x: 1,
+ "c": [
+ {_id: 1, var1Mod3TimesForeignId: 1},
+ {_id: 2, var1Mod3TimesForeignId: 2},
+ {_id: 3, var1Mod3TimesForeignId: 3}
+ ]
+ },
+ {
+ "_id": 2,
+ x: 2,
+ "c": [
+ {_id: 1, var1Mod3TimesForeignId: 2},
+ {_id: 2, var1Mod3TimesForeignId: 4},
+ {_id: 3, var1Mod3TimesForeignId: 6}
+ ]
+ },
+ {
+ "_id": 3,
+ x: 3,
+ "c": [
+ {_id: 1, var1Mod3TimesForeignId: 0},
+ {_id: 2, var1Mod3TimesForeignId: 0},
+ {_id: 3, var1Mod3TimesForeignId: 0}
+ ]
+ }
+];
+testPipeline(pipeline, expectedResults, coll);
- // 'let' defined variables are available to all nested sub-pipelines.
- pipeline = [
+// 'let' defined variables are available to all nested sub-pipelines.
+pipeline = [
{$match: {_id: 1}},
{
$lookup: {
@@ -277,26 +277,26 @@
}
];
- expectedResults = [{
+expectedResults = [{
+ "_id": 1,
+ "x": 1,
+ "join1": [{
"_id": 1,
- "x": 1,
- "join1": [{
- "_id": 1,
- "join2": [{
- "_id": 2,
- "letVar1": "ABC",
- "join3": [
- {"_id": 1, "mergedLetVars": "ABC123XYZ"},
- {"_id": 2, "mergedLetVars": "ABC123XYZ"},
- {"_id": 3, "mergedLetVars": "ABC123XYZ"}
- ]
- }]
+ "join2": [{
+ "_id": 2,
+ "letVar1": "ABC",
+ "join3": [
+ {"_id": 1, "mergedLetVars": "ABC123XYZ"},
+ {"_id": 2, "mergedLetVars": "ABC123XYZ"},
+ {"_id": 3, "mergedLetVars": "ABC123XYZ"}
+ ]
}]
- }];
- testPipeline(pipeline, expectedResults, coll);
+ }]
+}];
+testPipeline(pipeline, expectedResults, coll);
- // 'let' variable shadowed by foreign pipeline variable.
- pipeline = [
+// 'let' variable shadowed by foreign pipeline variable.
+pipeline = [
{$match: {_id: 2}},
{
$lookup: {
@@ -327,46 +327,46 @@
}
];
- expectedResults = [{
- "_id": 2,
- "x": 2,
- "c": [
- {
- "_id": 1,
- "shadowedVar": "abc",
- "originalVar": 2,
- "d": [
- {"_id": 1, "shadowedVar": "xyz", "originalVar": 2},
- {"_id": 2, "shadowedVar": "xyz", "originalVar": 2},
- {"_id": 3, "shadowedVar": "xyz", "originalVar": 2}
- ]
- },
- {
- "_id": 2,
- "shadowedVar": "abc",
- "originalVar": 2,
- "d": [
- {"_id": 1, "shadowedVar": "xyz", "originalVar": 2},
- {"_id": 2, "shadowedVar": "xyz", "originalVar": 2},
- {"_id": 3, "shadowedVar": "xyz", "originalVar": 2}
- ]
- },
- {
- "_id": 3,
- "shadowedVar": "abc",
- "originalVar": 2,
- "d": [
- {"_id": 1, "shadowedVar": "xyz", "originalVar": 2},
- {"_id": 2, "shadowedVar": "xyz", "originalVar": 2},
- {"_id": 3, "shadowedVar": "xyz", "originalVar": 2}
- ]
- }
- ]
- }];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [{
+ "_id": 2,
+ "x": 2,
+ "c": [
+ {
+ "_id": 1,
+ "shadowedVar": "abc",
+ "originalVar": 2,
+ "d": [
+ {"_id": 1, "shadowedVar": "xyz", "originalVar": 2},
+ {"_id": 2, "shadowedVar": "xyz", "originalVar": 2},
+ {"_id": 3, "shadowedVar": "xyz", "originalVar": 2}
+ ]
+ },
+ {
+ "_id": 2,
+ "shadowedVar": "abc",
+ "originalVar": 2,
+ "d": [
+ {"_id": 1, "shadowedVar": "xyz", "originalVar": 2},
+ {"_id": 2, "shadowedVar": "xyz", "originalVar": 2},
+ {"_id": 3, "shadowedVar": "xyz", "originalVar": 2}
+ ]
+ },
+ {
+ "_id": 3,
+ "shadowedVar": "abc",
+ "originalVar": 2,
+ "d": [
+ {"_id": 1, "shadowedVar": "xyz", "originalVar": 2},
+ {"_id": 2, "shadowedVar": "xyz", "originalVar": 2},
+ {"_id": 3, "shadowedVar": "xyz", "originalVar": 2}
+ ]
+ }
+ ]
+}];
+testPipeline(pipeline, expectedResults, coll);
- // Use of undefined variable fails.
- assertErrorCode(coll,
+// Use of undefined variable fails.
+assertErrorCode(coll,
[{
$lookup: {
from: "from",
@@ -376,11 +376,11 @@
}
}],
17276);
- assertErrorCode(
- coll,
- [{$lookup: {let : {var1: 1, var2: "$$var1"}, pipeline: [], from: "from", as: "as"}}],
- 17276);
- assertErrorCode(coll,
+assertErrorCode(
+ coll,
+ [{$lookup: {let : {var1: 1, var2: "$$var1"}, pipeline: [], from: "from", as: "as"}}],
+ 17276);
+assertErrorCode(coll,
[{
$lookup: {
let : {
@@ -394,9 +394,9 @@
}],
17276);
- // The dotted path offset of a non-object variable is equivalent referencing an undefined
- // field.
- pipeline = [
+// The dotted path offset of a non-object variable is equivalent referencing an undefined
+// field.
+pipeline = [
{
$lookup: {
let : {var1: "$x"},
@@ -416,18 +416,18 @@
{$sort: {x: 1}}
];
- expectedResults = [
- {"x": 1, "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]},
- {"x": 2, "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]},
- {"x": 3, "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]}
- ];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [
+ {"x": 1, "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]},
+ {"x": 2, "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]},
+ {"x": 3, "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]}
+];
+testPipeline(pipeline, expectedResults, coll);
- // Comparison where a 'let' variable references an array.
- coll.drop();
- assert.writeOK(coll.insert({x: [1, 2, 3]}));
+// Comparison where a 'let' variable references an array.
+coll.drop();
+assert.writeOK(coll.insert({x: [1, 2, 3]}));
- pipeline = [
+pipeline = [
{
$lookup: {
let : {var1: "$x"},
@@ -441,17 +441,17 @@
{$project: {_id: 0}}
];
- expectedResults = [{"x": [1, 2, 3], "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]}];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [{"x": [1, 2, 3], "as": [{"_id": 1}, {"_id": 2}, {"_id": 3}]}];
+testPipeline(pipeline, expectedResults, coll);
- //
- // Pipeline syntax with nested object.
- //
- coll.drop();
- assert.writeOK(coll.insert({x: {y: {z: 10}}}));
+//
+// Pipeline syntax with nested object.
+//
+coll.drop();
+assert.writeOK(coll.insert({x: {y: {z: 10}}}));
- // Subfields of 'let' variables can be referenced via dotted path.
- pipeline = [
+// Subfields of 'let' variables can be referenced via dotted path.
+pipeline = [
{
$lookup: {
let : {var1: "$x"},
@@ -465,14 +465,13 @@
{$project: {_id: 0}}
];
- expectedResults = [{
- "x": {"y": {"z": 10}},
- "as": [{"_id": 1, "z": 10}, {"_id": 2, "z": 10}, {"_id": 3, "z": 10}]
- }];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [
+ {"x": {"y": {"z": 10}}, "as": [{"_id": 1, "z": 10}, {"_id": 2, "z": 10}, {"_id": 3, "z": 10}]}
+];
+testPipeline(pipeline, expectedResults, coll);
- // 'let' variable with dotted field path off of $$ROOT.
- pipeline = [
+// 'let' variable with dotted field path off of $$ROOT.
+pipeline = [
{
$lookup: {
let : {var1: "$$ROOT.x.y.z"},
@@ -485,11 +484,11 @@
{$project: {_id: 0}}
];
- expectedResults = [{"x": {"y": {"z": 10}}, "as": [{"x": {"y": {"z": 10}}}]}];
- testPipeline(pipeline, expectedResults, coll);
+expectedResults = [{"x": {"y": {"z": 10}}, "as": [{"x": {"y": {"z": 10}}}]}];
+testPipeline(pipeline, expectedResults, coll);
- // 'let' variable with dotted field path off of $$CURRENT.
- pipeline = [
+// 'let' variable with dotted field path off of $$CURRENT.
+pipeline = [
{
$lookup: {
let : {var1: "$$CURRENT.x.y.z"},
@@ -502,34 +501,34 @@
{$project: {_id: 0}}
];
- expectedResults = [{"x": {"y": {"z": 10}}, "as": [{"x": {"y": {"z": 10}}}]}];
- testPipeline(pipeline, expectedResults, coll);
-
- //
- // Pipeline syntax with nested $lookup.
- //
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, w: 1}));
- assert.writeOK(coll.insert({_id: 2, w: 2}));
- assert.writeOK(coll.insert({_id: 3, w: 3}));
-
- from.drop();
- assert.writeOK(from.insert({_id: 1, x: 1}));
- assert.writeOK(from.insert({_id: 2, x: 2}));
- assert.writeOK(from.insert({_id: 3, x: 3}));
-
- thirdColl.drop();
- assert.writeOK(thirdColl.insert({_id: 1, y: 1}));
- assert.writeOK(thirdColl.insert({_id: 2, y: 2}));
- assert.writeOK(thirdColl.insert({_id: 3, y: 3}));
-
- fourthColl.drop();
- assert.writeOK(fourthColl.insert({_id: 1, z: 1}));
- assert.writeOK(fourthColl.insert({_id: 2, z: 2}));
- assert.writeOK(fourthColl.insert({_id: 3, z: 3}));
-
- // Nested $lookup pipeline.
- pipeline = [
+expectedResults = [{"x": {"y": {"z": 10}}, "as": [{"x": {"y": {"z": 10}}}]}];
+testPipeline(pipeline, expectedResults, coll);
+
+//
+// Pipeline syntax with nested $lookup.
+//
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, w: 1}));
+assert.writeOK(coll.insert({_id: 2, w: 2}));
+assert.writeOK(coll.insert({_id: 3, w: 3}));
+
+from.drop();
+assert.writeOK(from.insert({_id: 1, x: 1}));
+assert.writeOK(from.insert({_id: 2, x: 2}));
+assert.writeOK(from.insert({_id: 3, x: 3}));
+
+thirdColl.drop();
+assert.writeOK(thirdColl.insert({_id: 1, y: 1}));
+assert.writeOK(thirdColl.insert({_id: 2, y: 2}));
+assert.writeOK(thirdColl.insert({_id: 3, y: 3}));
+
+fourthColl.drop();
+assert.writeOK(fourthColl.insert({_id: 1, z: 1}));
+assert.writeOK(fourthColl.insert({_id: 2, z: 2}));
+assert.writeOK(fourthColl.insert({_id: 3, z: 3}));
+
+// Nested $lookup pipeline.
+pipeline = [
{$match: {_id: 1}},
{
$lookup: {
@@ -560,61 +559,55 @@
}
];
- expectedResults = [{
- "_id": 1,
- "w": 1,
- "firstLookup": [{
- "_id": 2,
- x: 2, "secondLookup": [{"_id": 3, y: 3, "thirdLookup": [{_id: 1, z: 1}]}]
- }]
- }];
- testPipeline(pipeline, expectedResults, coll);
-
- // Deeply nested $lookup pipeline. Confirm that we can execute an aggregation with nested
- // $lookup sub-pipelines up to the maximum depth, but not beyond.
- let nestedPipeline = generateNestedPipeline("lookup", 20);
- assert.commandWorked(
- coll.getDB().runCommand({aggregate: coll.getName(), pipeline: nestedPipeline, cursor: {}}));
-
- nestedPipeline = generateNestedPipeline("lookup", 21);
- assertErrorCode(coll, nestedPipeline, ErrorCodes.MaxSubPipelineDepthExceeded);
-
- // Confirm that maximum $lookup sub-pipeline depth is respected when aggregating views whose
- // combined nesting depth exceeds the limit.
- nestedPipeline = generateNestedPipeline("lookup", 10);
- coll.getDB().view1.drop();
- assert.commandWorked(
- coll.getDB().runCommand({create: "view1", viewOn: "lookup", pipeline: nestedPipeline}));
-
- nestedPipeline = generateNestedPipeline("view1", 10);
- coll.getDB().view2.drop();
- assert.commandWorked(
- coll.getDB().runCommand({create: "view2", viewOn: "view1", pipeline: nestedPipeline}));
-
- // Confirm that a composite sub-pipeline depth of 20 is allowed.
- assert.commandWorked(coll.getDB().runCommand({aggregate: "view2", pipeline: [], cursor: {}}));
-
- const pipelineWhichExceedsNestingLimit = generateNestedPipeline("view2", 1);
- coll.getDB().view3.drop();
- assert.commandWorked(coll.getDB().runCommand(
- {create: "view3", viewOn: "view2", pipeline: pipelineWhichExceedsNestingLimit}));
-
- //
- // Error cases.
- //
-
- // Confirm that a composite sub-pipeline depth greater than 20 fails.
- assertErrorCode(coll.getDB().view3, [], ErrorCodes.MaxSubPipelineDepthExceeded);
-
- // 'pipeline' and 'let' must be of expected type.
- assertErrorCode(
- coll, [{$lookup: {pipeline: 1, from: "from", as: "as"}}], ErrorCodes.TypeMismatch);
- assertErrorCode(
- coll, [{$lookup: {pipeline: {}, from: "from", as: "as"}}], ErrorCodes.TypeMismatch);
- assertErrorCode(coll,
- [{$lookup: {let : 1, pipeline: [], from: "from", as: "as"}}],
- ErrorCodes.FailedToParse);
- assertErrorCode(coll,
- [{$lookup: {let : [], pipeline: [], from: "from", as: "as"}}],
- ErrorCodes.FailedToParse);
+expectedResults = [{
+ "_id": 1,
+ "w": 1,
+ "firstLookup":
+ [{"_id": 2, x: 2, "secondLookup": [{"_id": 3, y: 3, "thirdLookup": [{_id: 1, z: 1}]}]}]
+}];
+testPipeline(pipeline, expectedResults, coll);
+
+// Deeply nested $lookup pipeline. Confirm that we can execute an aggregation with nested
+// $lookup sub-pipelines up to the maximum depth, but not beyond.
+let nestedPipeline = generateNestedPipeline("lookup", 20);
+assert.commandWorked(
+ coll.getDB().runCommand({aggregate: coll.getName(), pipeline: nestedPipeline, cursor: {}}));
+
+nestedPipeline = generateNestedPipeline("lookup", 21);
+assertErrorCode(coll, nestedPipeline, ErrorCodes.MaxSubPipelineDepthExceeded);
+
+// Confirm that maximum $lookup sub-pipeline depth is respected when aggregating views whose
+// combined nesting depth exceeds the limit.
+nestedPipeline = generateNestedPipeline("lookup", 10);
+coll.getDB().view1.drop();
+assert.commandWorked(
+ coll.getDB().runCommand({create: "view1", viewOn: "lookup", pipeline: nestedPipeline}));
+
+nestedPipeline = generateNestedPipeline("view1", 10);
+coll.getDB().view2.drop();
+assert.commandWorked(
+ coll.getDB().runCommand({create: "view2", viewOn: "view1", pipeline: nestedPipeline}));
+
+// Confirm that a composite sub-pipeline depth of 20 is allowed.
+assert.commandWorked(coll.getDB().runCommand({aggregate: "view2", pipeline: [], cursor: {}}));
+
+const pipelineWhichExceedsNestingLimit = generateNestedPipeline("view2", 1);
+coll.getDB().view3.drop();
+assert.commandWorked(coll.getDB().runCommand(
+ {create: "view3", viewOn: "view2", pipeline: pipelineWhichExceedsNestingLimit}));
+
+//
+// Error cases.
+//
+
+// Confirm that a composite sub-pipeline depth greater than 20 fails.
+assertErrorCode(coll.getDB().view3, [], ErrorCodes.MaxSubPipelineDepthExceeded);
+
+// 'pipeline' and 'let' must be of expected type.
+assertErrorCode(coll, [{$lookup: {pipeline: 1, from: "from", as: "as"}}], ErrorCodes.TypeMismatch);
+assertErrorCode(coll, [{$lookup: {pipeline: {}, from: "from", as: "as"}}], ErrorCodes.TypeMismatch);
+assertErrorCode(
+ coll, [{$lookup: {let : 1, pipeline: [], from: "from", as: "as"}}], ErrorCodes.FailedToParse);
+assertErrorCode(
+ coll, [{$lookup: {let : [], pipeline: [], from: "from", as: "as"}}], ErrorCodes.FailedToParse);
}());
diff --git a/jstests/aggregation/sources/lookup/lookup_subpipeline_geonear.js b/jstests/aggregation/sources/lookup/lookup_subpipeline_geonear.js
index 185e46bfb10..2043a298779 100644
--- a/jstests/aggregation/sources/lookup/lookup_subpipeline_geonear.js
+++ b/jstests/aggregation/sources/lookup/lookup_subpipeline_geonear.js
@@ -2,29 +2,29 @@
// TODO: Reenable test on passthroughs with sharded collections as part of SERVER-38995.
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- const coll = db.lookup_subpipeline_geonear;
- const from = db.from;
+const coll = db.lookup_subpipeline_geonear;
+const from = db.from;
- coll.drop();
- assert.commandWorked(coll.insert({_id: 4, x: 4}));
+coll.drop();
+assert.commandWorked(coll.insert({_id: 4, x: 4}));
- from.drop();
+from.drop();
- // Create geospatial index for field 'geo' on 'from'.
- assert.commandWorked(from.createIndex({geo: "2dsphere"}));
+// Create geospatial index for field 'geo' on 'from'.
+assert.commandWorked(from.createIndex({geo: "2dsphere"}));
- // Insert one matching document in 'from'.
- assert.commandWorked(from.insert({_id: 1, geo: [0, 0]}));
+// Insert one matching document in 'from'.
+assert.commandWorked(from.insert({_id: 1, geo: [0, 0]}));
- const geonearPipeline = [
- {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
- ];
+const geonearPipeline = [
+ {$geoNear: {near: [0, 0], distanceField: "distance", spherical: true}},
+];
- assert.eq(from.aggregate(geonearPipeline).itcount(), 1);
+assert.eq(from.aggregate(geonearPipeline).itcount(), 1);
- let pipeline = [
+let pipeline = [
{
$lookup: {
pipeline: geonearPipeline,
@@ -34,6 +34,6 @@
},
];
- assert.eq(coll.aggregate(pipeline).toArray(),
- [{"_id": 4, "x": 4, "c": [{"_id": 1, "geo": [0, 0], "distance": 0}]}]);
+assert.eq(coll.aggregate(pipeline).toArray(),
+ [{"_id": 4, "x": 4, "c": [{"_id": 1, "geo": [0, 0], "distance": 0}]}]);
}()); \ No newline at end of file
diff --git a/jstests/aggregation/sources/lookup/profile_lookup.js b/jstests/aggregation/sources/lookup/profile_lookup.js
index a6c07b910a0..f2c9df8331c 100644
--- a/jstests/aggregation/sources/lookup/profile_lookup.js
+++ b/jstests/aggregation/sources/lookup/profile_lookup.js
@@ -3,38 +3,38 @@
// Tests that profiled $lookups contain the correct namespace and that Top is updated accordingly.
(function() {
- "use strict";
+"use strict";
- const localColl = db.local;
- const foreignColl = db.foreign;
- localColl.drop();
- foreignColl.drop();
+const localColl = db.local;
+const foreignColl = db.foreign;
+localColl.drop();
+foreignColl.drop();
- assert.commandWorked(localColl.insert([{a: 1}, {b: 1}, {a: 2}]));
- assert.commandWorked(foreignColl.insert({a: 1}));
+assert.commandWorked(localColl.insert([{a: 1}, {b: 1}, {a: 2}]));
+assert.commandWorked(foreignColl.insert({a: 1}));
- db.system.profile.drop();
- db.setProfilingLevel(2);
+db.system.profile.drop();
+db.setProfilingLevel(2);
- let oldTop = db.adminCommand("top");
+let oldTop = db.adminCommand("top");
- localColl.aggregate(
- [{$lookup: {from: foreignColl.getName(), as: "res", localField: "a", foreignField: "a"}}]);
+localColl.aggregate(
+ [{$lookup: {from: foreignColl.getName(), as: "res", localField: "a", foreignField: "a"}}]);
- db.setProfilingLevel(0);
+db.setProfilingLevel(0);
- // Confirm that namespace is the local rather than foreign collection.
- let profileDoc = db.system.profile.findOne();
- assert.eq("test.local", profileDoc.ns);
+// Confirm that namespace is the local rather than foreign collection.
+let profileDoc = db.system.profile.findOne();
+assert.eq("test.local", profileDoc.ns);
- // Confirm that the local collection had one command added to Top.
- let newTop = db.adminCommand("top");
- assert.eq(1,
- newTop.totals[localColl.getFullName()].commands.count -
- oldTop.totals[localColl.getFullName()].commands.count);
+// Confirm that the local collection had one command added to Top.
+let newTop = db.adminCommand("top");
+assert.eq(1,
+ newTop.totals[localColl.getFullName()].commands.count -
+ oldTop.totals[localColl.getFullName()].commands.count);
- // Confirm that for each document in local, the foreign collection had one entry added to Top.
- assert.eq(3,
- newTop.totals[foreignColl.getFullName()].commands.count -
- oldTop.totals[foreignColl.getFullName()].commands.count);
+// Confirm that for each document in local, the foreign collection had one entry added to Top.
+assert.eq(3,
+ newTop.totals[foreignColl.getFullName()].commands.count -
+ oldTop.totals[foreignColl.getFullName()].commands.count);
}());
diff --git a/jstests/aggregation/sources/match/collation_match.js b/jstests/aggregation/sources/match/collation_match.js
index bc9456898f9..8c8c225f66d 100644
--- a/jstests/aggregation/sources/match/collation_match.js
+++ b/jstests/aggregation/sources/match/collation_match.js
@@ -3,47 +3,46 @@
// Test that the $match stage respects the collation.
(function() {
- "use strict";
-
- var caseInsensitive = {collation: {locale: "en_US", strength: 2}};
-
- var coll = db.collation_match;
- coll.drop();
- assert.writeOK(coll.insert({a: "a"}));
-
- // Test that the $match respects an explicit collation when it can be pushed down into the query
- // layer.
- assert.eq(1, coll.aggregate([{$match: {a: "A"}}], caseInsensitive).itcount());
-
- // Test that the $match respects an explicit collation when it cannot be pushed down into the
- // query layer.
- assert.eq(
- 1, coll.aggregate([{$project: {b: "B"}}, {$match: {b: "b"}}], caseInsensitive).itcount());
-
- // Test that $match inside a $facet respects the collation.
- assert.eq(1,
- coll.aggregate([{$facet: {fct: [{$match: {a: "A"}}]}}], caseInsensitive)
- .toArray()[0]
- .fct.length);
-
- // Test that when a $match can be split to be part before the $unwind and part after, both
- // pieces of the split respect the collation.
- coll.drop();
- assert.writeOK(coll.insert({a: "foo", b: ["bar"]}));
- assert.eq(1,
- coll.aggregate([{$limit: 1}, {$unwind: "$b"}, {$match: {a: "FOO", b: "BAR"}}],
- caseInsensitive)
- .itcount());
-
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
- assert.writeOK(coll.insert({a: "a"}));
-
- // Test that the $match respects the inherited collation when it can be pushed down into the
- // query layer.
- assert.eq(1, coll.aggregate([{$match: {a: "A"}}]).itcount());
-
- // Test that the $match respects the inherited collation when it cannot be pushed down into the
- // query layer.
- assert.eq(1, coll.aggregate([{$project: {b: "B"}}, {$match: {b: "b"}}]).itcount());
+"use strict";
+
+var caseInsensitive = {collation: {locale: "en_US", strength: 2}};
+
+var coll = db.collation_match;
+coll.drop();
+assert.writeOK(coll.insert({a: "a"}));
+
+// Test that the $match respects an explicit collation when it can be pushed down into the query
+// layer.
+assert.eq(1, coll.aggregate([{$match: {a: "A"}}], caseInsensitive).itcount());
+
+// Test that the $match respects an explicit collation when it cannot be pushed down into the
+// query layer.
+assert.eq(1, coll.aggregate([{$project: {b: "B"}}, {$match: {b: "b"}}], caseInsensitive).itcount());
+
+// Test that $match inside a $facet respects the collation.
+assert.eq(1,
+ coll.aggregate([{$facet: {fct: [{$match: {a: "A"}}]}}], caseInsensitive)
+ .toArray()[0]
+ .fct.length);
+
+// Test that when a $match can be split to be part before the $unwind and part after, both
+// pieces of the split respect the collation.
+coll.drop();
+assert.writeOK(coll.insert({a: "foo", b: ["bar"]}));
+assert.eq(
+ 1,
+ coll.aggregate([{$limit: 1}, {$unwind: "$b"}, {$match: {a: "FOO", b: "BAR"}}], caseInsensitive)
+ .itcount());
+
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
+assert.writeOK(coll.insert({a: "a"}));
+
+// Test that the $match respects the inherited collation when it can be pushed down into the
+// query layer.
+assert.eq(1, coll.aggregate([{$match: {a: "A"}}]).itcount());
+
+// Test that the $match respects the inherited collation when it cannot be pushed down into the
+// query layer.
+assert.eq(1, coll.aggregate([{$project: {b: "B"}}, {$match: {b: "b"}}]).itcount());
})();
diff --git a/jstests/aggregation/sources/match/expr_match.js b/jstests/aggregation/sources/match/expr_match.js
index b2627c963cc..8e2541958db 100644
--- a/jstests/aggregation/sources/match/expr_match.js
+++ b/jstests/aggregation/sources/match/expr_match.js
@@ -1,49 +1,44 @@
// Basic testing to confirm that the $match stage handles $expr correctly.
(function() {
- "use strict";
-
- const coll = db.expr_match;
- coll.drop();
- assert.writeOK(coll.insert({x: 0}));
- assert.writeOK(coll.insert({x: 1, y: 1}));
- assert.writeOK(coll.insert({x: 2, y: 4}));
- assert.writeOK(coll.insert({x: 3, y: 9}));
-
- // $match with $expr representing local document field path reference.
- assert.eq(1, coll.aggregate([{$match: {$expr: {$eq: ["$x", 2]}}}]).itcount());
- assert.eq(1, coll.aggregate([{$match: {$expr: {$eq: ["$x", "$y"]}}}]).itcount());
- assert.eq(3, coll.aggregate([{$match: {$expr: {$eq: ["$x", {$sqrt: "$y"}]}}}]).itcount());
-
- // $match with $expr containing $or and $and.
- assert.eq(
- 2,
- coll.aggregate([{
- $match: {
- $expr:
- {$or: [{$eq: ["$x", 3]}, {$and: [{$eq: ["$x", 2]}, {$eq: ["$y", 4]}]}]}
- }
- }])
- .itcount());
-
- // $match $expr containing $in.
- assert.eq(3,
- coll.aggregate([{$match: {$expr: {$in: ["$x", [1, {$mod: [4, 2]}, 3]]}}}]).itcount());
-
- // $match with constant expression and field path.
- assert.eq(1,
- coll.aggregate([{$match: {$expr: {$gte: ["$y", {$multiply: [3, 3]}]}}}]).itcount());
-
- // $match with constant expression and no field path.
- assert.eq(4, coll.aggregate([{$match: {$expr: {$gte: [10, 5]}}}]).itcount());
- assert.eq(0, coll.aggregate([{$match: {$expr: {$gte: [5, 10]}}}]).itcount());
-
- // $match with $expr works inside a $or.
- assert.eq(4,
- coll.aggregate([{$match: {$or: [{$expr: {$eq: ["$foo", "$bar"]}}, {b: {$gt: 3}}]}}])
- .itcount());
-
- // $match with $expr works inside a $and.
- assert.eq(2,
- coll.aggregate([{$match: {$and: [{$expr: {$eq: ["$foo", "$bar"]}}, {x: {$lt: 2}}]}}])
- .itcount());
+"use strict";
+
+const coll = db.expr_match;
+coll.drop();
+assert.writeOK(coll.insert({x: 0}));
+assert.writeOK(coll.insert({x: 1, y: 1}));
+assert.writeOK(coll.insert({x: 2, y: 4}));
+assert.writeOK(coll.insert({x: 3, y: 9}));
+
+// $match with $expr representing local document field path reference.
+assert.eq(1, coll.aggregate([{$match: {$expr: {$eq: ["$x", 2]}}}]).itcount());
+assert.eq(1, coll.aggregate([{$match: {$expr: {$eq: ["$x", "$y"]}}}]).itcount());
+assert.eq(3, coll.aggregate([{$match: {$expr: {$eq: ["$x", {$sqrt: "$y"}]}}}]).itcount());
+
+// $match with $expr containing $or and $and.
+assert.eq(
+ 2,
+ coll.aggregate([{
+ $match: {$expr: {$or: [{$eq: ["$x", 3]}, {$and: [{$eq: ["$x", 2]}, {$eq: ["$y", 4]}]}]}}
+ }])
+ .itcount());
+
+// $match $expr containing $in.
+assert.eq(3, coll.aggregate([{$match: {$expr: {$in: ["$x", [1, {$mod: [4, 2]}, 3]]}}}]).itcount());
+
+// $match with constant expression and field path.
+assert.eq(1, coll.aggregate([{$match: {$expr: {$gte: ["$y", {$multiply: [3, 3]}]}}}]).itcount());
+
+// $match with constant expression and no field path.
+assert.eq(4, coll.aggregate([{$match: {$expr: {$gte: [10, 5]}}}]).itcount());
+assert.eq(0, coll.aggregate([{$match: {$expr: {$gte: [5, 10]}}}]).itcount());
+
+// $match with $expr works inside a $or.
+assert.eq(
+ 4,
+ coll.aggregate([{$match: {$or: [{$expr: {$eq: ["$foo", "$bar"]}}, {b: {$gt: 3}}]}}]).itcount());
+
+// $match with $expr works inside a $and.
+assert.eq(2,
+ coll.aggregate([{$match: {$and: [{$expr: {$eq: ["$foo", "$bar"]}}, {x: {$lt: 2}}]}}])
+ .itcount());
})();
diff --git a/jstests/aggregation/sources/match/text_search_requires_index.js b/jstests/aggregation/sources/match/text_search_requires_index.js
index 431b5185d0e..ff62fa1bea2 100644
--- a/jstests/aggregation/sources/match/text_search_requires_index.js
+++ b/jstests/aggregation/sources/match/text_search_requires_index.js
@@ -2,19 +2,19 @@
// TODO: Reenable test on passthroughs with sharded collections as part of SERVER-38996.
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
+load("jstests/aggregation/extras/utils.js"); // For "assertErrorCode".
- const coll = db.coll;
- const from = db.from;
+const coll = db.coll;
+const from = db.from;
- coll.drop();
- from.drop();
+coll.drop();
+from.drop();
- const textPipeline = [{$match: {$text: {$search: "foo"}}}];
+const textPipeline = [{$match: {$text: {$search: "foo"}}}];
- const pipeline = [
+const pipeline = [
{
$lookup: {
pipeline: textPipeline,
@@ -24,16 +24,16 @@
},
];
- assert.commandWorked(coll.insert({_id: 1}));
- assert.commandWorked(from.insert({_id: 100, a: "foo"}));
+assert.commandWorked(coll.insert({_id: 1}));
+assert.commandWorked(from.insert({_id: 100, a: "foo"}));
- // Fail without index.
- assertErrorCode(from, textPipeline, ErrorCodes.IndexNotFound);
- assertErrorCode(coll, pipeline, ErrorCodes.IndexNotFound);
+// Fail without index.
+assertErrorCode(from, textPipeline, ErrorCodes.IndexNotFound);
+assertErrorCode(coll, pipeline, ErrorCodes.IndexNotFound);
- assert.commandWorked(from.createIndex({a: "text"}));
+assert.commandWorked(from.createIndex({a: "text"}));
- // Should run when you have the text index.
- assert.eq(from.aggregate(textPipeline).itcount(), 1);
- assert.eq(coll.aggregate(pipeline).itcount(), 1);
+// Should run when you have the text index.
+assert.eq(from.aggregate(textPipeline).itcount(), 1);
+assert.eq(coll.aggregate(pipeline).itcount(), 1);
}());
diff --git a/jstests/aggregation/sources/merge/all_modes.js b/jstests/aggregation/sources/merge/all_modes.js
index 83e0192530f..3854008072c 100644
--- a/jstests/aggregation/sources/merge/all_modes.js
+++ b/jstests/aggregation/sources/merge/all_modes.js
@@ -4,312 +4,290 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- const source = db.all_modes_source;
- const target = db.all_modes_target;
+const source = db.all_modes_source;
+const target = db.all_modes_target;
- (function setup() {
- source.drop();
- target.drop();
+(function setup() {
+ source.drop();
+ target.drop();
- // All tests use the same data in the source collection.
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]));
+ // All tests use the same data in the source collection.
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]));
+})();
- })();
+// Test 'whenMatched=replace whenNotMatched=insert' mode. This is an equivalent of a
+// replacement-style update with upsert=true.
+(function testWhenMatchedReplaceWhenNotMatchedInsert() {
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 40}
+ ]
+ });
+})();
- // Test 'whenMatched=replace whenNotMatched=insert' mode. This is an equivalent of a
- // replacement-style update with upsert=true.
- (function testWhenMatchedReplaceWhenNotMatchedInsert() {
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 40}
- ]
- });
- })();
+// Test 'whenMatched=replace whenNotMatched=fail' mode. For matched documents the update
+// should be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document without a match.
+(function testWhenMatchedReplaceWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}}]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
+ });
+})();
- // Test 'whenMatched=replace whenNotMatched=fail' mode. For matched documents the update
- // should be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document without a match.
- (function testWhenMatchedReplaceWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- const error = assert.throws(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}}
- ]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
- });
- })();
+// Test 'whenMatched=replace whenNotMatched=discard' mode. Documents in the target
+// collection without a match in the source collection should not be modified as a result
+// of the merge operation.
+(function testWhenMatchedReplaceWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}}
+ ]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
+ });
+})();
- // Test 'whenMatched=replace whenNotMatched=discard' mode. Documents in the target
- // collection without a match in the source collection should not be modified as a result
- // of the merge operation.
- (function testWhenMatchedReplaceWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
- });
- })();
+// Test 'whenMatched=fail whenNotMatched=insert' mode. For matched documents the update should
+// be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document with a match.
+(function testWhenMatchedFailWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(target.insert(
+ [{_id: 10, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ // Besides ensuring that a DuplicateKey error is raised when we find a matching document,
+ // this test also verifies that this $merge mode does perform an unordered insert and all
+ // documents in the batch without a matching document get inserted into the target
+ // collection. There is a special case when we can bail out early without processing all
+ // documents which fit into a single batch. Namely, if we have a sharded cluster with two
+ // shards, and shard documents by {_id: "hashed"}, we will end up with the document {_id: 3}
+ // landed on shard0, and {_id: 1} and {_id: 2} on shard1 in the source collection. Note
+ // that {_id: 3} has a duplicate key with the document in the target collection. For this
+ // particlar case, the entire pipeline is sent to each shard. Lets assume that shard0 has
+ // processed its single document with {_id: 3} and raised a DuplicateKey error, whilst
+ // shard1 hasn't performed any writes yet (or even hasn't started reading from the cursor).
+ // The mongos, after receiving the DuplicateKey, will stop pulling data from the shards
+ // and will kill the cursors open on the remaining shards. Shard1, eventually, will throw
+ // a CursorKilled during an interrupt check, and so no writes will be done into the target
+ // collection. To workaround this scenario and guarantee that the writes will always be
+ // performed, we will sort the documents by _id in ascending order. In this case, the
+ // pipeline will be split and we will pull everything to mongos before doing the $merge.
+ // This also ensures that documents with {_id: 1 } and {_id: 2} will be inserted first
+ // before the DuplicateKey error is raised.
+ const error = assert.throws(() => source.aggregate([
+ {$sort: {_id: 1}},
+ {$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ]));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 30, c: "y"},
+ {_id: 4, a: 40, c: "z"},
+ {_id: 10, a: 10, c: "x"}
+ ]
+ });
+})();
- // Test 'whenMatched=fail whenNotMatched=insert' mode. For matched documents the update should
- // be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document with a match.
- (function testWhenMatchedFailWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 10, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- // Besides ensuring that a DuplicateKey error is raised when we find a matching document,
- // this test also verifies that this $merge mode does perform an unordered insert and all
- // documents in the batch without a matching document get inserted into the target
- // collection. There is a special case when we can bail out early without processing all
- // documents which fit into a single batch. Namely, if we have a sharded cluster with two
- // shards, and shard documents by {_id: "hashed"}, we will end up with the document {_id: 3}
- // landed on shard0, and {_id: 1} and {_id: 2} on shard1 in the source collection. Note
- // that {_id: 3} has a duplicate key with the document in the target collection. For this
- // particlar case, the entire pipeline is sent to each shard. Lets assume that shard0 has
- // processed its single document with {_id: 3} and raised a DuplicateKey error, whilst
- // shard1 hasn't performed any writes yet (or even hasn't started reading from the cursor).
- // The mongos, after receiving the DuplicateKey, will stop pulling data from the shards
- // and will kill the cursors open on the remaining shards. Shard1, eventually, will throw
- // a CursorKilled during an interrupt check, and so no writes will be done into the target
- // collection. To workaround this scenario and guarantee that the writes will always be
- // performed, we will sort the documents by _id in ascending order. In this case, the
- // pipeline will be split and we will pull everything to mongos before doing the $merge.
- // This also ensures that documents with {_id: 1 } and {_id: 2} will be inserted first
- // before the DuplicateKey error is raised.
- const error = assert.throws(() => source.aggregate([
- {$sort: {_id: 1}},
- {$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
- ]));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 30, c: "y"},
- {_id: 4, a: 40, c: "z"},
- {_id: 10, a: 10, c: "x"}
- ]
- });
- })();
+// Test 'whenMatched=fail whenNotMatched=fail' mode. This mode is not supported and should fail.
+(function testWhenMatchedFailWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "fail"}}]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
- // Test 'whenMatched=fail whenNotMatched=fail' mode. This mode is not supported and should fail.
- (function testWhenMatchedFailWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(
- () => source.aggregate(
- [{$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "fail"}}]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
+// Test 'whenMatched=fail whenNotMatched=discard' mode. This mode is not supported and should
+// fail.
+(function testWhenMatchedFailWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "discard"}}]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
- // Test 'whenMatched=fail whenNotMatched=discard' mode. This mode is not supported and should
- // fail.
- (function testWhenMatchedFailWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "discard"}}
- ]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
+// Test 'whenMatched=merge whenNotMatched=insert' mode. This is an equivalent of an update
+// with a $set operator and upsert=true.
+(function testWhenMatchedMergeWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "z"}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, c: "z", b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 40}
+ ]
+ });
+})();
- // Test 'whenMatched=merge whenNotMatched=insert' mode. This is an equivalent of an update
- // with a $set operator and upsert=true.
- (function testWhenMatchedMergeWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(
- target.insert([{_id: 1, a: 10, c: "z"}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, c: "z", b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 40}
- ]
- });
- })();
+// Test 'whenMatched=merge whenNotMatched=fail' mode. For matched documents the update
+// should be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document without a match.
+(function testWhenMatchedMergeWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}}]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 3, a: 3, b: "c", c: "y"},
+ {_id: 4, a: 40, c: "z"}
+ ]
+ });
+})();
- // Test 'whenMatched=merge whenNotMatched=fail' mode. For matched documents the update
- // should be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document without a match.
- (function testWhenMatchedMergeWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- const error = assert.throws(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}}
- ]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 3, a: 3, b: "c", c: "y"},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=merge whenNotMatched=discard' mode. Documents in the target collection
+// without
+// a match in the source collection should not be modified as a result of the merge operation.
+(function testWhenMatchedMergeWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 3, a: 3, b: "c", c: "y"},
+ {_id: 4, a: 40, c: "z"}
+ ]
+ });
+})();
- // Test 'whenMatched=merge whenNotMatched=discard' mode. Documents in the target collection
- // without
- // a match in the source collection should not be modified as a result of the merge operation.
- (function testWhenMatchedMergeWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 3, a: 3, b: "c", c: "y"},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=[pipeline] whenNotMatched=insert' mode. This is an equivalent of a
+// pipeline-style update with upsert=true.
+(function testWhenMatchedPipelineUpdateWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: 1}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge:
+ {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "insert"}
+ }]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: 2}, {_id: 3, x: 2}]
+ });
+})();
- // Test 'whenMatched=[pipeline] whenNotMatched=insert' mode. This is an equivalent of a
- // pipeline-style update with upsert=true.
- (function testWhenMatchedPipelineUpdateWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: 1}));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: [{$addFields: {x: 2}}],
- whenNotMatched: "insert"
- }
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: 2}, {_id: 3, x: 2}]
- });
- })();
+// Test 'whenMatched=[pipeline] whenNotMatched=fail' mode. For matched documents the update
+// should be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document without a match.
+(function testWhenMatchedPipelineUpdateWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ const error = assert.throws(() => source.aggregate([{
+ $merge:
+ {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "fail"}
+ }]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected:
+ [{_id: 1, a: 10, c: "x", x: 2}, {_id: 3, a: 30, c: "y", x: 2}, {_id: 4, a: 40, c: "z"}]
+ });
+})();
- // Test 'whenMatched=[pipeline] whenNotMatched=fail' mode. For matched documents the update
- // should be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document without a match.
- (function testWhenMatchedPipelineUpdateWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- const error = assert.throws(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: [{$addFields: {x: 2}}],
- whenNotMatched: "fail"
- }
- }]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 10, c: "x", x: 2},
- {_id: 3, a: 30, c: "y", x: 2},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=[pipeline] whenNotMatched=discard' mode. Documents in the target collection
+// without a match in the source collection should not be modified as a result of the merge
+// operation.
+(function testWhenMatchedPipelineUpdateWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge:
+ {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "discard"}
+ }]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected:
+ [{_id: 1, a: 10, c: "x", x: 2}, {_id: 3, a: 30, c: "y", x: 2}, {_id: 4, a: 40, c: "z"}]
+ });
+})();
- // Test 'whenMatched=[pipeline] whenNotMatched=discard' mode. Documents in the target collection
- // without a match in the source collection should not be modified as a result of the merge
- // operation.
- (function testWhenMatchedPipelineUpdateWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: [{$addFields: {x: 2}}],
- whenNotMatched: "discard"
- }
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 10, c: "x", x: 2},
- {_id: 3, a: 30, c: "y", x: 2},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=keepExisting whenNotMatched=insert' mode. Existing documents in the target
+// collection which have a matching document in the source collection must not be updated, only
+// documents without a match must be inserted into the target collection.
+(function testWhenMatchedKeepExistingWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}}
+ ]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 10}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30}, {_id: 4, a: 40}]
+ });
+})();
- // Test 'whenMatched=keepExisting whenNotMatched=insert' mode. Existing documents in the target
- // collection which have a matching document in the source collection must not be updated, only
- // documents without a match must be inserted into the target collection.
- (function testWhenMatchedKeepExistingWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge:
- {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 10}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30}, {_id: 4, a: 40}]
- });
- })();
+// Test 'whenMatched=keepExisting whenNotMatched=fail' mode. This mode is not supported and
+// should fail.
+(function testWhenMatchedKeepExistingWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "fail"}}
+ ]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
- // Test 'whenMatched=keepExisting whenNotMatched=fail' mode. This mode is not supported and
- // should fail.
- (function testWhenMatchedKeepExistingWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(() => source.aggregate([{
- $merge:
- {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "fail"}
- }]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
-
- // Test 'whenMatched=keepExisting whenNotMatched=discard' mode. This mode is not supported and
- // should fail.
- (function testWhenMatchedKeepExistingWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "keepExisting",
- whenNotMatched: "discard"
- }
- }]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
+// Test 'whenMatched=keepExisting whenNotMatched=discard' mode. This mode is not supported and
+// should fail.
+(function testWhenMatchedKeepExistingWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "discard"}}
+ ]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/batch_writes.js b/jstests/aggregation/sources/merge/batch_writes.js
index 3dc6455161e..1d0c5502391 100644
--- a/jstests/aggregation/sources/merge/batch_writes.js
+++ b/jstests/aggregation/sources/merge/batch_writes.js
@@ -3,69 +3,69 @@
// nothing horrendous happens and to characterize the current behavior.
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode.
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- const coll = db.batch_writes;
- const outColl = db.batch_writes_out;
- coll.drop();
- outColl.drop();
+const coll = db.batch_writes;
+const outColl = db.batch_writes_out;
+coll.drop();
+outColl.drop();
- // Test with 2 very large documents that do not fit into a single batch.
- const kSize15MB = 15 * 1024 * 1024;
- const largeArray = new Array(kSize15MB).join("a");
- assert.commandWorked(coll.insert({_id: 0, a: largeArray}));
- assert.commandWorked(coll.insert({_id: 1, a: largeArray}));
+// Test with 2 very large documents that do not fit into a single batch.
+const kSize15MB = 15 * 1024 * 1024;
+const largeArray = new Array(kSize15MB).join("a");
+assert.commandWorked(coll.insert({_id: 0, a: largeArray}));
+assert.commandWorked(coll.insert({_id: 1, a: largeArray}));
- // Make sure the $merge succeeds without any duplicate keys.
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- // Skip the combination of merge modes which will fail depending on the contents of the
- // source and target collection, as this will cause the aggregation to fail.
- if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
- return;
+// Make sure the $merge succeeds without any duplicate keys.
+withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ // Skip the combination of merge modes which will fail depending on the contents of the
+ // source and target collection, as this will cause the aggregation to fail.
+ if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
+ return;
- coll.aggregate([{
- $merge: {
- into: outColl.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]);
- assert.eq(whenNotMatchedMode == "discard" ? 0 : 2, outColl.find().itcount());
- outColl.drop();
- });
+ coll.aggregate([{
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }]);
+ assert.eq(whenNotMatchedMode == "discard" ? 0 : 2, outColl.find().itcount());
+ outColl.drop();
+});
- coll.drop();
- for (let i = 0; i < 10; i++) {
- assert.commandWorked(coll.insert({_id: i, a: i}));
- }
+coll.drop();
+for (let i = 0; i < 10; i++) {
+ assert.commandWorked(coll.insert({_id: i, a: i}));
+}
- // Create a unique index on 'a' in the output collection to create a unique key violation when
- // running the $merge. The second document to be written ({_id: 1, a: 1}) will conflict with the
- // existing document in the output collection. We use a unique index on a field other than _id
- // because whenMatched: "replace" will not change _id when one already exists.
- outColl.drop();
- assert.commandWorked(outColl.insert({_id: 2, a: 1}));
- assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
+// Create a unique index on 'a' in the output collection to create a unique key violation when
+// running the $merge. The second document to be written ({_id: 1, a: 1}) will conflict with the
+// existing document in the output collection. We use a unique index on a field other than _id
+// because whenMatched: "replace" will not change _id when one already exists.
+outColl.drop();
+assert.commandWorked(outColl.insert({_id: 2, a: 1}));
+assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
- // Test that the writes for $merge are unordered, meaning the operation continues even if it
- // encounters a duplicate key error. We don't guarantee any particular behavior in this case,
- // but this test is meant to characterize the current behavior.
- assertErrorCode(
- coll,
- [{$merge: {into: outColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}],
- ErrorCodes.DuplicateKey);
- assert.soon(() => {
- return outColl.find().itcount() == 9;
- });
+// Test that the writes for $merge are unordered, meaning the operation continues even if it
+// encounters a duplicate key error. We don't guarantee any particular behavior in this case,
+// but this test is meant to characterize the current behavior.
+assertErrorCode(
+ coll,
+ [{$merge: {into: outColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}],
+ ErrorCodes.DuplicateKey);
+assert.soon(() => {
+ return outColl.find().itcount() == 9;
+});
- assertErrorCode(
- coll,
- [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}],
- ErrorCodes.DuplicateKey);
- assert.soon(() => {
- return outColl.find().itcount() == 9;
- });
+assertErrorCode(
+ coll,
+ [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}],
+ ErrorCodes.DuplicateKey);
+assert.soon(() => {
+ return outColl.find().itcount() == 9;
+});
}());
diff --git a/jstests/aggregation/sources/merge/bypass_doc_validation.js b/jstests/aggregation/sources/merge/bypass_doc_validation.js
index d43b624ba91..957fcc9a2df 100644
--- a/jstests/aggregation/sources/merge/bypass_doc_validation.js
+++ b/jstests/aggregation/sources/merge/bypass_doc_validation.js
@@ -4,186 +4,150 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- const testDB = db.getSiblingDB("out_bypass_doc_val");
- const sourceColl = testDB.getCollection("source");
- const targetColl = testDB.getCollection("target");
+const testDB = db.getSiblingDB("out_bypass_doc_val");
+const sourceColl = testDB.getCollection("source");
+const targetColl = testDB.getCollection("target");
+targetColl.drop();
+assert.commandWorked(testDB.createCollection(targetColl.getName(), {validator: {a: 2}}));
+
+sourceColl.drop();
+assert.commandWorked(sourceColl.insert({_id: 0, a: 1}));
+
+// Test that the bypassDocumentValidation flag is passed through to the writes on the output
+// collection.
+(function testBypassDocValidationTrue() {
+ sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
+
+ sourceColl.aggregate(
+ [
+ {$addFields: {a: 3}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+
+ sourceColl.aggregate(
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
+}());
+
+// Test that mode "replaceDocuments" passes without the bypassDocumentValidation flag if the
+// updated doc is valid.
+(function testReplacementStyleUpdateWithoutBypass() {
+ sourceColl.aggregate([
+ {$addFields: {a: 2}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ]);
+ assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
+ sourceColl.aggregate(
+ [
+ {$addFields: {a: 2}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: false});
+ assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
+}());
+
+function assertDocValidationFailure(cmdOptions) {
+ assert.commandWorked(targetColl.remove({}));
+ assertErrorCode(sourceColl,
+ [{$merge: targetColl.getName()}],
+ ErrorCodes.DocumentValidationFailure,
+ "Expected failure without bypass set",
+ cmdOptions);
+
+ assertErrorCode(
+ sourceColl,
+ [
+ {$addFields: {a: 3}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ ErrorCodes.DocumentValidationFailure,
+ "Expected failure without bypass set",
+ cmdOptions);
+
+ assertErrorCode(
+ sourceColl,
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ],
+ ErrorCodes.DocumentValidationFailure,
+ "Expected failure without bypass set",
+ cmdOptions);
+ assert.eq(0, targetColl.find().itcount());
+}
+
+// Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
+// flag is not set.
+assertDocValidationFailure({});
+
+// Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
+// flag is explicitly set to false.
+assertDocValidationFailure({bypassDocumentValidation: false});
+
+// Test that bypassDocumentValidation is *not* needed if the source collection has a
+// validator but the output collection does not.
+(function testDocValidatorOnSourceCollection() {
targetColl.drop();
- assert.commandWorked(testDB.createCollection(targetColl.getName(), {validator: {a: 2}}));
+ assert.commandWorked(testDB.runCommand({collMod: sourceColl.getName(), validator: {a: 1}}));
+
+ sourceColl.aggregate([{$merge: targetColl.getName()}]);
+ assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
+
+ sourceColl.aggregate([
+ {$addFields: {a: 3}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ]);
+ assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+
+ sourceColl.aggregate([
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ]);
+ assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
+}());
+// Test that the bypassDocumentValidation is casted to true if the value is non-boolean.
+(function testNonBooleanBypassDocValidationFlag() {
+ assert.commandWorked(targetColl.remove({}));
+ assert.commandWorked(testDB.runCommand({collMod: targetColl.getName(), validator: {a: 1}}));
sourceColl.drop();
assert.commandWorked(sourceColl.insert({_id: 0, a: 1}));
- // Test that the bypassDocumentValidation flag is passed through to the writes on the output
- // collection.
- (function testBypassDocValidationTrue() {
- sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
+ sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: 5});
+ assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
- sourceColl.aggregate(
- [
- {$addFields: {a: 3}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
-
- sourceColl.aggregate(
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
- }());
-
- // Test that mode "replaceDocuments" passes without the bypassDocumentValidation flag if the
- // updated doc is valid.
- (function testReplacementStyleUpdateWithoutBypass() {
- sourceColl.aggregate([
- {$addFields: {a: 2}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}
- }
- ]);
- assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
- sourceColl.aggregate(
- [
- {$addFields: {a: 2}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- {bypassDocumentValidation: false});
- assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
- }());
-
- function assertDocValidationFailure(cmdOptions) {
- assert.commandWorked(targetColl.remove({}));
- assertErrorCode(sourceColl,
- [{$merge: targetColl.getName()}],
- ErrorCodes.DocumentValidationFailure,
- "Expected failure without bypass set",
- cmdOptions);
-
- assertErrorCode(sourceColl,
- [
- {$addFields: {a: 3}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- ErrorCodes.DocumentValidationFailure,
- "Expected failure without bypass set",
- cmdOptions);
-
- assertErrorCode(
- sourceColl,
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}
- }
- ],
- ErrorCodes.DocumentValidationFailure,
- "Expected failure without bypass set",
- cmdOptions);
- assert.eq(0, targetColl.find().itcount());
- }
-
- // Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
- // flag is not set.
- assertDocValidationFailure({});
-
- // Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
- // flag is explicitly set to false.
- assertDocValidationFailure({bypassDocumentValidation: false});
-
- // Test that bypassDocumentValidation is *not* needed if the source collection has a
- // validator but the output collection does not.
- (function testDocValidatorOnSourceCollection() {
- targetColl.drop();
- assert.commandWorked(testDB.runCommand({collMod: sourceColl.getName(), validator: {a: 1}}));
-
- sourceColl.aggregate([{$merge: targetColl.getName()}]);
- assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
-
- sourceColl.aggregate([
+ sourceColl.aggregate(
+ [
{$addFields: {a: 3}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}
- }
- ]);
- assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: "false"});
+ assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+}());
- sourceColl.aggregate([
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
- ]);
- assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
- }());
-
- // Test that the bypassDocumentValidation is casted to true if the value is non-boolean.
- (function testNonBooleanBypassDocValidationFlag() {
- assert.commandWorked(targetColl.remove({}));
- assert.commandWorked(testDB.runCommand({collMod: targetColl.getName(), validator: {a: 1}}));
- sourceColl.drop();
- assert.commandWorked(sourceColl.insert({_id: 0, a: 1}));
-
- sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: 5});
- assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
-
- sourceColl.aggregate(
- [
- {$addFields: {a: 3}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- {bypassDocumentValidation: "false"});
- assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
- }());
-
- // Test bypassDocumentValidation with $merge to a collection in a foreign database.
- (function testForeignDb() {
- const foreignDB = db.getSiblingDB("foreign_db");
- const foreignColl = foreignDB.foreign_coll;
- foreignColl.drop();
- assert.commandWorked(
- foreignDB.createCollection(foreignColl.getName(), {validator: {a: 2}}));
-
- sourceColl.aggregate(
- [
- {$addFields: {a: 3}},
- {
+// Test bypassDocumentValidation with $merge to a collection in a foreign database.
+(function testForeignDb() {
+ const foreignDB = db.getSiblingDB("foreign_db");
+ const foreignColl = foreignDB.foreign_coll;
+ foreignColl.drop();
+ assert.commandWorked(foreignDB.createCollection(foreignColl.getName(), {validator: {a: 2}}));
+
+ sourceColl.aggregate(
+ [
+ {$addFields: {a: 3}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -192,15 +156,15 @@
whenMatched: "replace",
whenNotMatched: "insert"
}
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}], foreignColl.find().toArray());
-
- sourceColl.aggregate(
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
+ }
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}], foreignColl.find().toArray());
+
+ sourceColl.aggregate(
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -209,16 +173,16 @@
whenMatched: "fail",
whenNotMatched: "insert"
}
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], foreignColl.find().sort({_id: 1}).toArray());
-
- assert.commandWorked(foreignColl.remove({}));
- assertErrorCode(sourceColl,
- [
- {$addFields: {a: 3}},
- {
+ }
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], foreignColl.find().sort({_id: 1}).toArray());
+
+ assert.commandWorked(foreignColl.remove({}));
+ assertErrorCode(sourceColl,
+ [
+ {$addFields: {a: 3}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -227,14 +191,14 @@
whenMatched: "replace",
whenNotMatched: "insert"
}
- }
- ],
- ErrorCodes.DocumentValidationFailure);
-
- assertErrorCode(sourceColl,
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
+ }
+ ],
+ ErrorCodes.DocumentValidationFailure);
+
+ assertErrorCode(sourceColl,
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -243,9 +207,9 @@
whenMatched: "fail",
whenNotMatched: "insert"
}
- }
- ],
- ErrorCodes.DocumentValidationFailure);
- assert.eq(0, foreignColl.find().itcount());
- }());
+ }
+ ],
+ ErrorCodes.DocumentValidationFailure);
+ assert.eq(0, foreignColl.find().itcount());
+}());
}());
diff --git a/jstests/aggregation/sources/merge/disallowed_in_lookup.js b/jstests/aggregation/sources/merge/disallowed_in_lookup.js
index 3731055f6b9..19f37305dbe 100644
--- a/jstests/aggregation/sources/merge/disallowed_in_lookup.js
+++ b/jstests/aggregation/sources/merge/disallowed_in_lookup.js
@@ -1,28 +1,28 @@
// Tests that $merge cannot be used within a $lookup pipeline.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/collection_drop_recreate.js"); // For assertDropCollection.
- load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
- load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
- load("jstests/libs/fixture_helpers.js"); // For isSharded.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/collection_drop_recreate.js"); // For assertDropCollection.
+load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
+load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
+load("jstests/libs/fixture_helpers.js"); // For isSharded.
- const kErrorCodeMergeBannedInLookup = 51047;
- const kErrorCodeMergeLastStageOnly = 40601;
- const coll = db.merge_in_lookup_not_allowed;
- coll.drop();
+const kErrorCodeMergeBannedInLookup = 51047;
+const kErrorCodeMergeLastStageOnly = 40601;
+const coll = db.merge_in_lookup_not_allowed;
+coll.drop();
- const from = db.merge_in_lookup_not_allowed_from;
- from.drop();
+const from = db.merge_in_lookup_not_allowed_from;
+from.drop();
- if (FixtureHelpers.isSharded(from)) {
- setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
- "internalQueryAllowShardedLookup",
- true);
- }
+if (FixtureHelpers.isSharded(from)) {
+ setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
+ "internalQueryAllowShardedLookup",
+ true);
+}
- let pipeline = [
+let pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection", on: "_id"}}],
@@ -31,9 +31,9 @@
}
},
];
- assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
+assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$project: {x: 0}}, {$merge: {into: "out_collection", on: "_id"}}],
@@ -42,9 +42,9 @@
}
},
];
- assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
+assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection", on: "_id"}}, {$match: {x: true}}],
@@ -53,14 +53,14 @@
}
},
];
- // Pipeline will fail because $merge is not last in the subpipeline.
- // Validation for $merge in a $lookup's subpipeline occurs at a later point.
- assertErrorCode(coll, pipeline, kErrorCodeMergeLastStageOnly);
+// Pipeline will fail because $merge is not last in the subpipeline.
+// Validation for $merge in a $lookup's subpipeline occurs at a later point.
+assertErrorCode(coll, pipeline, kErrorCodeMergeLastStageOnly);
- // Create view which contains $merge within $lookup.
- assertDropCollection(coll.getDB(), "view1");
+// Create view which contains $merge within $lookup.
+assertDropCollection(coll.getDB(), "view1");
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection", on: "_id"}}],
@@ -69,14 +69,14 @@
}
},
];
- // Pipeline will fail because $merge is not allowed to exist within a $lookup.
- // Validation for $merge in a view occurs at a later point.
- const cmdRes =
- coll.getDB().runCommand({create: "view1", viewOn: coll.getName(), pipeline: pipeline});
- assert.commandFailedWithCode(cmdRes, kErrorCodeMergeBannedInLookup);
+// Pipeline will fail because $merge is not allowed to exist within a $lookup.
+// Validation for $merge in a view occurs at a later point.
+const cmdRes =
+ coll.getDB().runCommand({create: "view1", viewOn: coll.getName(), pipeline: pipeline});
+assert.commandFailedWithCode(cmdRes, kErrorCodeMergeBannedInLookup);
- // Test that a $merge without an explicit "on" field still fails within a $lookup.
- pipeline = [
+// Test that a $merge without an explicit "on" field still fails within a $lookup.
+pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection"}}],
@@ -85,7 +85,7 @@
}
},
];
- assert.commandFailedWithCode(
- db.runCommand({aggregate: coll.getName(), pipeline: pipeline, cursor: {}}),
- kErrorCodeMergeBannedInLookup);
+assert.commandFailedWithCode(
+ db.runCommand({aggregate: coll.getName(), pipeline: pipeline, cursor: {}}),
+ kErrorCodeMergeBannedInLookup);
}());
diff --git a/jstests/aggregation/sources/merge/exchange_explain.js b/jstests/aggregation/sources/merge/exchange_explain.js
index 362af97ed46..23bed99973d 100644
--- a/jstests/aggregation/sources/merge/exchange_explain.js
+++ b/jstests/aggregation/sources/merge/exchange_explain.js
@@ -6,173 +6,169 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- "use strict";
-
- const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
-
- const mongosDB = st.s.getDB("test_db");
-
- const inColl = mongosDB["inColl"];
- const targetCollRange = mongosDB["targetCollRange"];
- const targetCollRangeOtherField = mongosDB["targetCollRangeOtherField"];
- const targetCollHash = mongosDB["targetCollHash"];
-
- const numDocs = 1000;
-
- function runExplainQuery(targetColl) {
- return inColl.explain("allPlansExecution").aggregate([
- {$group: {_id: "$a", a: {$avg: "$a"}}},
- {
- $merge: {
- into: {
- db: targetColl.getDB().getName(),
- coll: targetColl.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ]);
- }
+"use strict";
- function runRealQuery(targetColl) {
- return inColl.aggregate([
- {$group: {_id: "$a", a: {$avg: "$a"}}},
- {
- $merge: {
- into: {
- db: targetColl.getDB().getName(),
- coll: targetColl.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ]);
- }
+const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
- function getExchangeSpec(explain) {
- assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
- assert(explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
+const mongosDB = st.s.getDB("test_db");
- return explain.splitPipeline.exchange;
- }
+const inColl = mongosDB["inColl"];
+const targetCollRange = mongosDB["targetCollRange"];
+const targetCollRangeOtherField = mongosDB["targetCollRangeOtherField"];
+const targetCollHash = mongosDB["targetCollHash"];
- // Shard the input collection.
- st.shardColl(inColl, {a: 1}, {a: 500}, {a: 500}, mongosDB.getName());
+const numDocs = 1000;
- // Insert some data to the input collection.
- let bulk = inColl.initializeUnorderedBulkOp();
- for (let i = 0; i < numDocs; i++) {
- bulk.insert({a: i}, {b: [0, 1, 2, 3, i]});
- }
- assert.commandWorked(bulk.execute());
-
- // Shard the output collections.
- st.shardColl(targetCollRange, {_id: 1}, {_id: 500}, {_id: 500}, mongosDB.getName());
- st.shardColl(targetCollRangeOtherField, {b: 1}, {b: 500}, {b: 500}, mongosDB.getName());
- st.shardColl(targetCollHash, {_id: "hashed"}, false, false, mongosDB.getName());
-
- // Run the explain. We expect to see the range based exchange here.
- let explain = runExplainQuery(targetCollRange);
-
- // Make sure we see the exchange in the explain output.
- assert.eq(explain.mergeType, "exchange", tojson(explain));
- let exchangeSpec = getExchangeSpec(explain);
- assert.eq(exchangeSpec.policy, "keyRange");
- assert.eq(exchangeSpec.key, {_id: 1});
-
- // Run the real query.
- runRealQuery(targetCollRange);
- let results = targetCollRange.aggregate([{'$count': "count"}]).next().count;
- assert.eq(results, numDocs);
-
- // Rerun the same query with the hash based exchange.
- explain = runExplainQuery(targetCollHash);
-
- // Make sure we see the exchange in the explain output.
- assert.eq(explain.mergeType, "exchange", tojson(explain));
- exchangeSpec = getExchangeSpec(explain);
- assert.eq(exchangeSpec.policy, "keyRange");
- assert.eq(exchangeSpec.key, {_id: "hashed"});
-
- // Run the real query.
- runRealQuery(targetCollHash);
- results = targetCollHash.aggregate([{'$count': "count"}]).next().count;
- assert.eq(results, numDocs);
-
- // This should fail because the "on" field ('b' in this case, the shard key of the target
- // collection) cannot be an array.
- assertErrorCode(inColl,
- [{
- $merge: {
- into: {
- db: targetCollRangeOtherField.getDB().getName(),
- coll: targetCollRangeOtherField.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }],
- 51132);
-
- // Turn off the exchange and rerun the query.
- assert.commandWorked(mongosDB.adminCommand({setParameter: 1, internalQueryDisableExchange: 1}));
- explain = runExplainQuery(targetCollRange);
-
- // Make sure there is no exchange.
- assert.eq(explain.mergeType, "anyShard", tojson(explain));
- assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
- assert(!explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
-
- // This should fail similar to before even if we are not running the exchange.
- assertErrorCode(inColl,
- [{
- $merge: {
- into: {
- db: targetCollRangeOtherField.getDB().getName(),
- coll: targetCollRangeOtherField.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }],
- 51132);
-
- // SERVER-38349 Make sure mongos rejects specifying exchange directly.
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: inColl.getName(),
- pipeline: [],
- cursor: {},
- exchange: {
- policy: "keyRange",
- bufferSize: NumberInt(1024),
- boundaries: [{_id: 0}],
- consumers: NumberInt(2),
- consumerIds: [NumberInt(0), NumberInt(1)]
+function runExplainQuery(targetColl) {
+ return inColl.explain("allPlansExecution").aggregate([
+ {$group: {_id: "$a", a: {$avg: "$a"}}},
+ {
+ $merge: {
+ into: {
+ db: targetColl.getDB().getName(),
+ coll: targetColl.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert"
+ }
}
- }),
- 51028);
+ ]);
+}
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: inColl.getName(),
- pipeline: [{
+function runRealQuery(targetColl) {
+ return inColl.aggregate([
+ {$group: {_id: "$a", a: {$avg: "$a"}}},
+ {
$merge: {
- into: targetCollRange.getName(),
+ into: {
+ db: targetColl.getDB().getName(),
+ coll: targetColl.getName(),
+ },
whenMatched: "replace",
whenNotMatched: "insert"
}
- }],
- cursor: {},
- exchange: {
- policy: "keyRange",
- bufferSize: NumberInt(1024),
- boundaries: [{_id: 0}],
- consumers: NumberInt(2),
- consumerIds: [NumberInt(0), NumberInt(1)]
}
- }),
- 51028);
+ ]);
+}
+
+function getExchangeSpec(explain) {
+ assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
+ assert(explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
+
+ return explain.splitPipeline.exchange;
+}
+
+// Shard the input collection.
+st.shardColl(inColl, {a: 1}, {a: 500}, {a: 500}, mongosDB.getName());
+
+// Insert some data to the input collection.
+let bulk = inColl.initializeUnorderedBulkOp();
+for (let i = 0; i < numDocs; i++) {
+ bulk.insert({a: i}, {b: [0, 1, 2, 3, i]});
+}
+assert.commandWorked(bulk.execute());
+
+// Shard the output collections.
+st.shardColl(targetCollRange, {_id: 1}, {_id: 500}, {_id: 500}, mongosDB.getName());
+st.shardColl(targetCollRangeOtherField, {b: 1}, {b: 500}, {b: 500}, mongosDB.getName());
+st.shardColl(targetCollHash, {_id: "hashed"}, false, false, mongosDB.getName());
+
+// Run the explain. We expect to see the range based exchange here.
+let explain = runExplainQuery(targetCollRange);
+
+// Make sure we see the exchange in the explain output.
+assert.eq(explain.mergeType, "exchange", tojson(explain));
+let exchangeSpec = getExchangeSpec(explain);
+assert.eq(exchangeSpec.policy, "keyRange");
+assert.eq(exchangeSpec.key, {_id: 1});
+
+// Run the real query.
+runRealQuery(targetCollRange);
+let results = targetCollRange.aggregate([{'$count': "count"}]).next().count;
+assert.eq(results, numDocs);
+
+// Rerun the same query with the hash based exchange.
+explain = runExplainQuery(targetCollHash);
+
+// Make sure we see the exchange in the explain output.
+assert.eq(explain.mergeType, "exchange", tojson(explain));
+exchangeSpec = getExchangeSpec(explain);
+assert.eq(exchangeSpec.policy, "keyRange");
+assert.eq(exchangeSpec.key, {_id: "hashed"});
+
+// Run the real query.
+runRealQuery(targetCollHash);
+results = targetCollHash.aggregate([{'$count': "count"}]).next().count;
+assert.eq(results, numDocs);
+
+// This should fail because the "on" field ('b' in this case, the shard key of the target
+// collection) cannot be an array.
+assertErrorCode(inColl,
+ [{
+ $merge: {
+ into: {
+ db: targetCollRangeOtherField.getDB().getName(),
+ coll: targetCollRangeOtherField.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert"
+ }
+ }],
+ 51132);
+
+// Turn off the exchange and rerun the query.
+assert.commandWorked(mongosDB.adminCommand({setParameter: 1, internalQueryDisableExchange: 1}));
+explain = runExplainQuery(targetCollRange);
+
+// Make sure there is no exchange.
+assert.eq(explain.mergeType, "anyShard", tojson(explain));
+assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
+assert(!explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
+
+// This should fail similar to before even if we are not running the exchange.
+assertErrorCode(inColl,
+ [{
+ $merge: {
+ into: {
+ db: targetCollRangeOtherField.getDB().getName(),
+ coll: targetCollRangeOtherField.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert"
+ }
+ }],
+ 51132);
+
+// SERVER-38349 Make sure mongos rejects specifying exchange directly.
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: inColl.getName(),
+ pipeline: [],
+ cursor: {},
+ exchange: {
+ policy: "keyRange",
+ bufferSize: NumberInt(1024),
+ boundaries: [{_id: 0}],
+ consumers: NumberInt(2),
+ consumerIds: [NumberInt(0), NumberInt(1)]
+ }
+}),
+ 51028);
+
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: inColl.getName(),
+ pipeline: [{
+ $merge: {into: targetCollRange.getName(), whenMatched: "replace", whenNotMatched: "insert"}
+ }],
+ cursor: {},
+ exchange: {
+ policy: "keyRange",
+ bufferSize: NumberInt(1024),
+ boundaries: [{_id: 0}],
+ consumers: NumberInt(2),
+ consumerIds: [NumberInt(0), NumberInt(1)]
+ }
+}),
+ 51028);
- st.stop();
+st.stop();
}());
diff --git a/jstests/aggregation/sources/merge/merge_to_referenced_collection.js b/jstests/aggregation/sources/merge/merge_to_referenced_collection.js
index c6a82bab79d..a9060f58b0a 100644
--- a/jstests/aggregation/sources/merge/merge_to_referenced_collection.js
+++ b/jstests/aggregation/sources/merge/merge_to_referenced_collection.js
@@ -9,64 +9,55 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- 'use strict';
+'use strict';
- load('jstests/aggregation/extras/merge_helpers.js'); // For 'withEachMergeMode'.
- load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'.
+load('jstests/aggregation/extras/merge_helpers.js'); // For 'withEachMergeMode'.
+load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'.
- const testDB = db.getSiblingDB("merge_to_referenced_coll");
- const coll = testDB.test;
+const testDB = db.getSiblingDB("merge_to_referenced_coll");
+const coll = testDB.test;
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- coll.drop();
+withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ coll.drop();
- // Seed the collection to ensure each pipeline will actually do something.
- assert.commandWorked(coll.insert({_id: 0}));
+ // Seed the collection to ensure each pipeline will actually do something.
+ assert.commandWorked(coll.insert({_id: 0}));
- // Each of the following assertions will somehow use $merge to write to a namespace that is
- // being read from elsewhere in the pipeline.
- const assertFailsWithCode = ((fn) => {
- const error = assert.throws(fn);
- assert.contains(error.code, [51188, 51079]);
- });
+ // Each of the following assertions will somehow use $merge to write to a namespace that is
+ // being read from elsewhere in the pipeline.
+ const assertFailsWithCode = ((fn) => {
+ const error = assert.throws(fn);
+ assert.contains(error.code, [51188, 51079]);
+ });
- // Test $merge to the aggregate command's source collection.
- assertFailsWithCode(() => coll.aggregate([{
+ // Test $merge to the aggregate command's source collection.
+ assertFailsWithCode(() => coll.aggregate([{
+ $merge:
+ {into: coll.getName(), whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}
+ }]));
+
+ // Test $merge to the same namespace as a $lookup which is the same as the aggregate
+ // command's source collection.
+ assertFailsWithCode(() => coll.aggregate([
+ {$lookup: {from: coll.getName(), as: "x", localField: "f_id", foreignField: "_id"}},
+ {
$merge: {
into: coll.getName(),
whenMatched: whenMatchedMode,
whenNotMatched: whenNotMatchedMode
}
- }]));
+ }
+ ]));
- // Test $merge to the same namespace as a $lookup which is the same as the aggregate
- // command's source collection.
- assertFailsWithCode(() => coll.aggregate([
- {$lookup: {from: coll.getName(), as: "x", localField: "f_id", foreignField: "_id"}},
- {
- $merge: {
- into: coll.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }
- ]));
+ // Test $merge to the same namespace as a $lookup which is *not* the same as the aggregate
+ // command's source collection.
+ assertFailsWithCode(() => coll.aggregate([
+ {$lookup: {from: "bar", as: "x", localField: "f_id", foreignField: "_id"}},
+ {$merge: {into: "bar", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
- // Test $merge to the same namespace as a $lookup which is *not* the same as the aggregate
- // command's source collection.
- assertFailsWithCode(() => coll.aggregate([
- {$lookup: {from: "bar", as: "x", localField: "f_id", foreignField: "_id"}},
- {
- $merge: {
- into: "bar",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }
- ]));
-
- // Test $merge to the same namespace as a $graphLookup.
- assertFailsWithCode(() => coll.aggregate([
+ // Test $merge to the same namespace as a $graphLookup.
+ assertFailsWithCode(() => coll.aggregate([
{
$graphLookup: {
from: "bar",
@@ -85,8 +76,8 @@
}
]));
- // Test $merge to the same namespace as a $lookup which is nested within another $lookup.
- assertFailsWithCode(() => coll.aggregate([
+ // Test $merge to the same namespace as a $lookup which is nested within another $lookup.
+ assertFailsWithCode(() => coll.aggregate([
{
$lookup: {
from: "bar",
@@ -103,49 +94,33 @@
}
}
]));
- // Test $merge to the same namespace as a $lookup which is nested within a $facet.
- assertFailsWithCode(() => coll.aggregate([
- {
- $facet: {
- y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
- }
- },
- {
- $merge: {
- into: "TARGET",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }
- ]));
- assertFailsWithCode(() => coll.aggregate([
- {
- $facet: {
- x: [{$lookup: {from: "other", as: "y", pipeline: []}}],
- y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
- }
- },
- {
- $merge: {
- into: "TARGET",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
+ // Test $merge to the same namespace as a $lookup which is nested within a $facet.
+ assertFailsWithCode(() => coll.aggregate([
+ {
+ $facet: {
+ y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
}
- ]));
-
- // Test that we use the resolved namespace of a view to detect this sort of halloween
- // problem.
- assert.commandWorked(
- testDB.runCommand({create: "view_on_TARGET", viewOn: "TARGET", pipeline: []}));
- assertFailsWithCode(() => testDB.view_on_TARGET.aggregate([{
- $merge: {
- into: "TARGET",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
+ },
+ {$merge: {into: "TARGET", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
+ assertFailsWithCode(() => coll.aggregate([
+ {
+ $facet: {
+ x: [{$lookup: {from: "other", as: "y", pipeline: []}}],
+ y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
}
- }]));
- assertFailsWithCode(() => coll.aggregate([
+ },
+ {$merge: {into: "TARGET", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
+
+ // Test that we use the resolved namespace of a view to detect this sort of halloween
+ // problem.
+ assert.commandWorked(
+ testDB.runCommand({create: "view_on_TARGET", viewOn: "TARGET", pipeline: []}));
+ assertFailsWithCode(() => testDB.view_on_TARGET.aggregate([
+ {$merge: {into: "TARGET", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
+ assertFailsWithCode(() => coll.aggregate([
{
$facet: {
x: [{$lookup: {from: "other", as: "y", pipeline: []}}],
@@ -167,25 +142,21 @@
}
]));
- function generateNestedPipeline(foreignCollName, numLevels) {
- let pipeline = [{"$lookup": {pipeline: [], from: foreignCollName, as: "same"}}];
-
- for (let level = 1; level < numLevels; level++) {
- pipeline = [{"$lookup": {pipeline: pipeline, from: foreignCollName, as: "same"}}];
- }
+ function generateNestedPipeline(foreignCollName, numLevels) {
+ let pipeline = [{"$lookup": {pipeline: [], from: foreignCollName, as: "same"}}];
- return pipeline;
+ for (let level = 1; level < numLevels; level++) {
+ pipeline = [{"$lookup": {pipeline: pipeline, from: foreignCollName, as: "same"}}];
}
- const nestedPipeline = generateNestedPipeline("lookup", 20).concat([{
- $merge: {
- into: "lookup",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]);
- assertFailsWithCode(() => coll.aggregate(nestedPipeline));
+ return pipeline;
+ }
- testDB.dropDatabase();
- });
+ const nestedPipeline = generateNestedPipeline("lookup", 20).concat([
+ {$merge: {into: "lookup", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]);
+ assertFailsWithCode(() => coll.aggregate(nestedPipeline));
+
+ testDB.dropDatabase();
+});
}());
diff --git a/jstests/aggregation/sources/merge/merge_to_same_collection.js b/jstests/aggregation/sources/merge/merge_to_same_collection.js
index 2e26a26965a..51435696fdd 100644
--- a/jstests/aggregation/sources/merge/merge_to_same_collection.js
+++ b/jstests/aggregation/sources/merge/merge_to_same_collection.js
@@ -2,19 +2,19 @@
* Tests that $merge fails when the target collection is the aggregation collection.
*
* @tags: [assumes_unsharded_collection]
-*/
+ */
(function() {
- "use strict";
+"use strict";
- // For assertMergeFailsForAllModesWithCode.
- load("jstests/aggregation/extras/merge_helpers.js");
+// For assertMergeFailsForAllModesWithCode.
+load("jstests/aggregation/extras/merge_helpers.js");
- const coll = db.name;
- coll.drop();
+const coll = db.name;
+coll.drop();
- const nDocs = 10;
- for (let i = 0; i < nDocs; i++) {
- assert.commandWorked(coll.insert({_id: i, a: i}));
- }
- assertMergeFailsForAllModesWithCode({source: coll, target: coll, errorCodes: 51188});
+const nDocs = 10;
+for (let i = 0; i < nDocs; i++) {
+ assert.commandWorked(coll.insert({_id: i, a: i}));
+}
+assertMergeFailsForAllModesWithCode({source: coll, target: coll, errorCodes: 51188});
}());
diff --git a/jstests/aggregation/sources/merge/mode_fail_insert.js b/jstests/aggregation/sources/merge/mode_fail_insert.js
index 7cfd6aee02e..9363c42b12d 100644
--- a/jstests/aggregation/sources/merge/mode_fail_insert.js
+++ b/jstests/aggregation/sources/merge/mode_fail_insert.js
@@ -1,147 +1,149 @@
// Tests the behavior of $merge with whenMatched: "fail" and whenNotMatched: "insert".
// @tags: [assumes_unsharded_collection, assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const coll = db.merge_insert_only;
- coll.drop();
-
- const targetColl = db.merge_insert_only_out;
- targetColl.drop();
-
- const pipeline =
- [{$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}];
-
- //
- // Test $merge with a non-existent output collection.
- //
- assert.commandWorked(coll.insert({_id: 0}));
-
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
-
- //
- // Test $merge with an existing output collection.
- //
- assert.commandWorked(coll.remove({_id: 0}));
- assert.commandWorked(coll.insert({_id: 1}));
- coll.aggregate(pipeline);
- assert.eq(2, targetColl.find().itcount());
-
- //
- // Test that $merge fails if there's a duplicate key error.
- //
- assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
-
- //
- // Test that $merge will preserve the indexes and options of the output collection.
- //
- const validator = {a: {$gt: 0}};
- targetColl.drop();
- assert.commandWorked(db.createCollection(targetColl.getName(), {validator: validator}));
- assert.commandWorked(targetColl.createIndex({a: 1}));
-
- coll.drop();
- assert.commandWorked(coll.insert({a: 1}));
-
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
- assert.eq(2, targetColl.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: targetColl.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
-
- //
- // Test that $merge fails if it violates a unique index constraint.
- //
- coll.drop();
- assert.commandWorked(coll.insert([{_id: 0, a: 0}, {_id: 1, a: 0}]));
- targetColl.drop();
- assert.commandWorked(targetColl.createIndex({a: 1}, {unique: true}));
-
- assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
-
- //
- // Test that a $merge aggregation succeeds even if the _id is stripped out and the "unique key"
- // is the document key, which will be _id for a new collection.
- //
- coll.drop();
- assert.commandWorked(coll.insert({a: 0}));
- targetColl.drop();
- assert.doesNotThrow(() => coll.aggregate([
- {$project: {_id: 0}},
- {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}},
- ]));
- assert.eq(1, targetColl.find().itcount());
-
- //
- // Test that a $merge aggregation succeeds even if the _id is stripped out and _id is included
- // in the "on" fields.
- //
- coll.drop();
- assert.commandWorked(coll.insert([{_id: "should be projected away", name: "kyle"}]));
- targetColl.drop();
- assert.commandWorked(targetColl.createIndex({_id: 1, name: -1}, {unique: true}));
- assert.doesNotThrow(() => coll.aggregate([
- {$project: {_id: 0}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "fail",
- whenNotMatched: "insert",
- on: ["_id", "name"]
- }
- },
- ]));
- assert.eq(1, targetColl.find().itcount());
-
- //
- // Tests for $merge to a database that differs from the aggregation database.
- //
- const foreignDb = db.getSiblingDB("merge_insert_only_foreign");
- const foreignTargetColl = foreignDb.merge_insert_only_out;
- const pipelineDifferentOutputDb = [
- {$project: {_id: 0}},
- {
- $merge: {
- into: {
- db: foreignDb.getName(),
- coll: foreignTargetColl.getName(),
- },
- whenMatched: "fail",
- whenNotMatched: "insert",
- }
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const coll = db.merge_insert_only;
+coll.drop();
+
+const targetColl = db.merge_insert_only_out;
+targetColl.drop();
+
+const pipeline =
+ [{$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}];
+
+//
+// Test $merge with a non-existent output collection.
+//
+assert.commandWorked(coll.insert({_id: 0}));
+
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
+
+//
+// Test $merge with an existing output collection.
+//
+assert.commandWorked(coll.remove({_id: 0}));
+assert.commandWorked(coll.insert({_id: 1}));
+coll.aggregate(pipeline);
+assert.eq(2, targetColl.find().itcount());
+
+//
+// Test that $merge fails if there's a duplicate key error.
+//
+assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
+
+//
+// Test that $merge will preserve the indexes and options of the output collection.
+//
+const validator = {
+ a: {$gt: 0}
+};
+targetColl.drop();
+assert.commandWorked(db.createCollection(targetColl.getName(), {validator: validator}));
+assert.commandWorked(targetColl.createIndex({a: 1}));
+
+coll.drop();
+assert.commandWorked(coll.insert({a: 1}));
+
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
+assert.eq(2, targetColl.getIndexes().length);
+
+const listColl = db.runCommand({listCollections: 1, filter: {name: targetColl.getName()}});
+assert.commandWorked(listColl);
+assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+
+//
+// Test that $merge fails if it violates a unique index constraint.
+//
+coll.drop();
+assert.commandWorked(coll.insert([{_id: 0, a: 0}, {_id: 1, a: 0}]));
+targetColl.drop();
+assert.commandWorked(targetColl.createIndex({a: 1}, {unique: true}));
+
+assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
+
+//
+// Test that a $merge aggregation succeeds even if the _id is stripped out and the "unique key"
+// is the document key, which will be _id for a new collection.
+//
+coll.drop();
+assert.commandWorked(coll.insert({a: 0}));
+targetColl.drop();
+assert.doesNotThrow(() => coll.aggregate([
+ {$project: {_id: 0}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}},
+]));
+assert.eq(1, targetColl.find().itcount());
+
+//
+// Test that a $merge aggregation succeeds even if the _id is stripped out and _id is included
+// in the "on" fields.
+//
+coll.drop();
+assert.commandWorked(coll.insert([{_id: "should be projected away", name: "kyle"}]));
+targetColl.drop();
+assert.commandWorked(targetColl.createIndex({_id: 1, name: -1}, {unique: true}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$project: {_id: 0}},
+ {
+ $merge: {
+ into: targetColl.getName(),
+ whenMatched: "fail",
+ whenNotMatched: "insert",
+ on: ["_id", "name"]
+ }
+ },
+]));
+assert.eq(1, targetColl.find().itcount());
+
+//
+// Tests for $merge to a database that differs from the aggregation database.
+//
+const foreignDb = db.getSiblingDB("merge_insert_only_foreign");
+const foreignTargetColl = foreignDb.merge_insert_only_out;
+const pipelineDifferentOutputDb = [
+ {$project: {_id: 0}},
+ {
+ $merge: {
+ into: {
+ db: foreignDb.getName(),
+ coll: foreignTargetColl.getName(),
+ },
+ whenMatched: "fail",
+ whenNotMatched: "insert",
}
- ];
-
- foreignDb.dropDatabase();
- coll.drop();
- assert.commandWorked(coll.insert({a: 1}));
-
- if (!FixtureHelpers.isMongos(db)) {
- //
- // Test that $merge implicitly creates a new database when the output collection's database
- // doesn't exist.
- //
- coll.aggregate(pipelineDifferentOutputDb);
- assert.eq(foreignTargetColl.find().itcount(), 1);
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Explicitly create the collection and database, then fall through to the test below.
- assert.commandWorked(foreignTargetColl.insert({val: "forcing database creation"}));
}
-
- //
- // Re-run the $merge aggregation, which should merge with the existing contents of the
- // collection. We rely on implicit _id generation to give us unique _id values.
- //
- assert.doesNotThrow(() => coll.aggregate(pipelineDifferentOutputDb));
- assert.eq(foreignTargetColl.find().itcount(), 2);
+];
+
+foreignDb.dropDatabase();
+coll.drop();
+assert.commandWorked(coll.insert({a: 1}));
+
+if (!FixtureHelpers.isMongos(db)) {
+ //
+ // Test that $merge implicitly creates a new database when the output collection's database
+ // doesn't exist.
+ //
+ coll.aggregate(pipelineDifferentOutputDb);
+ assert.eq(foreignTargetColl.find().itcount(), 1);
+} else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Explicitly create the collection and database, then fall through to the test below.
+ assert.commandWorked(foreignTargetColl.insert({val: "forcing database creation"}));
+}
+
+//
+// Re-run the $merge aggregation, which should merge with the existing contents of the
+// collection. We rely on implicit _id generation to give us unique _id values.
+//
+assert.doesNotThrow(() => coll.aggregate(pipelineDifferentOutputDb));
+assert.eq(foreignTargetColl.find().itcount(), 2);
}());
diff --git a/jstests/aggregation/sources/merge/mode_keep_existing_insert.js b/jstests/aggregation/sources/merge/mode_keep_existing_insert.js
index b76fb9d20e9..3f146adbcb5 100644
--- a/jstests/aggregation/sources/merge/mode_keep_existing_insert.js
+++ b/jstests/aggregation/sources/merge/mode_keep_existing_insert.js
@@ -4,372 +4,367 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge into a non-existent collection.
- (function testMergeIntoNonExistentCollection() {
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- ]
- });
- })();
-
- // Test $merge into an existing collection.
- (function testMergeIntoExistentCollection() {
- assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge does not update documents in the target collection if they were not modified
- // in the source collection.
- (function testMergeDoesNotUpdateUnmodifiedDocuments() {
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge doesn't update documents in the target collection if they were modified in the
- // source collection.
- (function testMergeDoesNotUpdateModifiedDocuments() {
- // Update and merge a single document.
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
-
- // Update and merge multiple documents.
- assert.commandWorked(source.update({_id: 1}, {a: 11}));
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge inserts a new document into the target collection if it was inserted into the
- // source collection.
- (function testMergeInsertsNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]
- });
- assert.commandWorked(source.deleteOne({_id: 3}));
- assert.commandWorked(target.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 4, c: "d"}
- ]
- });
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- ]
- });
- })();
-
- // Test $merge fails if a unique index constraint in the target collection is violated.
- (function testMergeFailsIfTargetUniqueKeyIsViolated() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge into a non-existent collection.
+(function testMergeIntoNonExistentCollection() {
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ ]
+ });
+})();
+
+// Test $merge into an existing collection.
+(function testMergeIntoExistentCollection() {
+ assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge does not update documents in the target collection if they were not modified
+// in the source collection.
+(function testMergeDoesNotUpdateUnmodifiedDocuments() {
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge doesn't update documents in the target collection if they were modified in the
+// source collection.
+(function testMergeDoesNotUpdateModifiedDocuments() {
+ // Update and merge a single document.
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+
+ // Update and merge multiple documents.
+ assert.commandWorked(source.update({_id: 1}, {a: 11}));
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge inserts a new document into the target collection if it was inserted into the
+// source collection.
+(function testMergeInsertsNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]
+ });
+ assert.commandWorked(source.deleteOne({_id: 3}));
+ assert.commandWorked(target.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 4, c: "d"}
+ ]
+ });
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+ assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ ]
+ });
+})();
+
+// Test $merge fails if a unique index constraint in the target collection is violated.
+(function testMergeFailsIfTargetUniqueKeyIsViolated() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 4, a: 1}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ const error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ ]
+ });
+ assert.commandWorked(target.dropIndex({a: 1}));
+})();
+
+// Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
+(function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
+ // The 'on' fields contains a single document field.
+ let error = assert.throws(
+ () => source.aggregate([{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+
+ // The 'on' fields contains multiple document fields.
+ error = assert.throws(
+ () => source.aggregate(
+ [{$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, c: "x"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected:
+ [{_id: 1, a: {b: "b"}, c: "x"}, {_id: 2, a: {b: "c"}}, {_id: 3, a: {b: 30}, b: "c"}]
+ });
+})();
+
+// Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
+// null or an array.
+(function testMergeFailsIfOnFieldIsInvalid() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
+
+ // The 'on' field is missing.
+ assert.commandWorked(source.insert({_id: 1}));
+ let error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is null.
+ assert.commandWorked(source.update({_id: 1}, {z: null}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is an array.
+ assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51185);
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({
+ // Remove the _id field from the projection as the arrayEq function cannot ignore
+ // mismatches in the ObjectId. The target collection should contain all elements from
+ // the source and the target even though they had the same _id's and would have been
+ // merged should we not remove the _id field from the aggregate projection.
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({
+ // Remove the _id field from the projection as the arrayEq function cannot ignore
+ // mismatches in the ObjectId. The target collection should contain all elements from
+ // the source and the target even though they had the same _id's and would have been
+ // merged should we not remove the _id field from the aggregate projection.
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
+
+// Test $merge preserves indexes and options of the existing target collection.
+(function testMergePresrvesIndexesAndOptions() {
+ const validator = {a: {$gt: 0}};
+ assert(target.drop());
+ assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+ assert.eq(2, target.getIndexes().length);
+
+ const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
+ assert.commandWorked(listColl);
+ assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+})();
+
+// Test $merge implicitly creates a new database when the target collection's database doesn't
+// exist.
+(function testMergeImplicitlyCreatesTargetDatabase() {
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1}));
+
+ const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
+ assert.commandWorked(foreignDb.dropDatabase());
+ const foreignTarget = foreignDb[`${jsTest.name()}_target`];
+ const foreignPipeline = [{
+ $merge: {
+ into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
+ whenMatched: "keepExisting",
+ whenNotMatched: "insert"
}
+ }];
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 4, a: 1}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- const error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- ]
- });
- assert.commandWorked(target.dropIndex({a: 1}));
- })();
-
- // Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
- (function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
- // The 'on' fields contains a single document field.
- let error =
- assert.throws(() => source.aggregate(
- [{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, [51190, 51183]);
-
- // The 'on' fields contains multiple document fields.
- error = assert.throws(() => source.aggregate([
- {$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}
- ]));
- assert.commandFailedWithCode(error, [51190, 51183]);
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}},
- {_id: 3, a: {b: 30}, b: "c"}
- ]
- });
- })();
-
- // Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
- // null or an array.
- (function testMergeFailsIfOnFieldIsInvalid() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
-
- // The 'on' field is missing.
- assert.commandWorked(source.insert({_id: 1}));
- let error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is null.
- assert.commandWorked(source.update({_id: 1}, {z: null}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is an array.
- assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51185);
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({
- // Remove the _id field from the projection as the arrayEq function cannot ignore
- // mismatches in the ObjectId. The target collection should contain all elements from
- // the source and the target even though they had the same _id's and would have been
- // merged should we not remove the _id field from the aggregate projection.
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- // Remove the _id field from the projection as the arrayEq function cannot ignore
- // mismatches in the ObjectId. The target collection should contain all elements from
- // the source and the target even though they had the same _id's and would have been
- // merged should we not remove the _id field from the aggregate projection.
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
-
- // Test $merge preserves indexes and options of the existing target collection.
- (function testMergePresrvesIndexesAndOptions() {
- const validator = {a: {$gt: 0}};
- assert(target.drop());
- assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
- assert.commandWorked(target.createIndex({a: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- assert.eq(2, target.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
- })();
-
- // Test $merge implicitly creates a new database when the target collection's database doesn't
- // exist.
- (function testMergeImplicitlyCreatesTargetDatabase() {
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1}));
-
- const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
- assert.commandWorked(foreignDb.dropDatabase());
- const foreignTarget = foreignDb[`${jsTest.name()}_target`];
- const foreignPipeline = [{
- $merge: {
- into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
- whenMatched: "keepExisting",
- whenNotMatched: "insert"
- }
- }];
-
- if (!FixtureHelpers.isMongos(db)) {
- assert.doesNotThrow(() => source.aggregate(foreignPipeline));
- assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1}]});
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => source.aggregate(foreignPipeline));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test below.
- assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
- }
-
- assert.commandWorked(source.update({_id: 1}, {a: 1, b: "a"}));
+ if (!FixtureHelpers.isMongos(db)) {
assert.doesNotThrow(() => source.aggregate(foreignPipeline));
assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1}]});
- assert.commandWorked(foreignDb.dropDatabase());
- })();
+ } else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => source.aggregate(foreignPipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test below.
+ assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
+ }
+
+ assert.commandWorked(source.update({_id: 1}, {a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(foreignPipeline));
+ assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1}]});
+ assert.commandWorked(foreignDb.dropDatabase());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_merge_discard.js b/jstests/aggregation/sources/merge/mode_merge_discard.js
index 401210c1d4d..cc9fff93691 100644
--- a/jstests/aggregation/sources/merge/mode_merge_discard.js
+++ b/jstests/aggregation/sources/merge/mode_merge_discard.js
@@ -4,237 +4,227 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection. The merge operation should succeed and unmatched documents discarded.
- (function testMergeIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]
- });
-
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
- })();
-
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
-
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, b: 3}]
- });
- })();
-
- // Test $merge when a field is presented in the source and the target and contains a
- // sub-document value.
- (function testMergeSubdocuments() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 1, a: {b: 1}}));
- assert.commandWorked(target.insert([{_id: 1, a: {c: 2}}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, a: {b: 1}}, {_id: 3, b: 3}]});
-
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: {b: 1}}, {_id: 2, a: {b: 2}}]));
- assert.commandWorked(target.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: {b: 1}, b: 1}, {_id: 2, a: {b: 2}}, {_id: 3, b: 3}]
- });
- })();
-
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge does not insert a new document into the target collection if it was inserted
- // into the source collection.
- (function testMergeDoesNotInsertNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert([
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection. The merge operation should succeed and unmatched documents discarded.
+(function testMergeIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
+
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
+})();
+
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
+
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, b: 3}]
+ });
+})();
+
+// Test $merge when a field is presented in the source and the target and contains a
+// sub-document value.
+(function testMergeSubdocuments() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 1, a: {b: 1}}));
+ assert.commandWorked(target.insert([{_id: 1, a: {c: 2}}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, a: {b: 1}}, {_id: 3, b: 3}]});
+
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: {b: 1}}, {_id: 2, a: {b: 2}}]));
+ assert.commandWorked(target.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: {b: 1}, b: 1}, {_id: 2, a: {b: 2}}, {_id: 3, b: 3}]
+ });
+})();
+
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge does not insert a new document into the target collection if it was inserted
+// into the source collection.
+(function testMergeDoesNotInsertNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
{_id: 1, a: 1, b: "a", c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 30, b: "c", c: "x"}
- ]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 4, a: 30, b: "c", c: "x"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, d: "z"}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 2, a: {b: "c"}, c: "y", d: "z"},
- ]
- });
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c", c: "x"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 4, a: 30, b: "c", c: "x"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, d: "z"}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 2, a: {b: "c"}, c: "y", d: "z"},
+ ]
+ });
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_merge_fail.js b/jstests/aggregation/sources/merge/mode_merge_fail.js
index 9bf0eadb148..de0842a02b9 100644
--- a/jstests/aggregation/sources/merge/mode_merge_fail.js
+++ b/jstests/aggregation/sources/merge/mode_merge_fail.js
@@ -4,116 +4,113 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}
- };
- const pipeline = [mergeStage];
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}
+};
+const pipeline = [mergeStage];
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection.
- (function testMergeFailsIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]
- });
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection.
+(function testMergeFailsIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
- // Source has multiple documents with matches in the target.
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, a: 3, b: 3}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, a: 3, b: 3}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
- // should be returned once the batch is processed and no further documents should be processed
- // and updated.
- (function testMergeUnorderedBatchUpdate() {
- const maxBatchSize = 16 * 1024 * 1024; // 16MB
- const docSize = 1024 * 1024; // 1MB
- const numDocs = 20;
- const maxDocsInBatch = maxBatchSize / docSize;
+// Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
+// should be returned once the batch is processed and no further documents should be processed
+// and updated.
+(function testMergeUnorderedBatchUpdate() {
+ const maxBatchSize = 16 * 1024 * 1024; // 16MB
+ const docSize = 1024 * 1024; // 1MB
+ const numDocs = 20;
+ const maxDocsInBatch = maxBatchSize / docSize;
- assert(source.drop());
- assert(target.drop());
+ assert(source.drop());
+ assert(target.drop());
- // Insert 'numDocs' documents of size 'docSize' into the source collection.
- generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
+ // Insert 'numDocs' documents of size 'docSize' into the source collection.
+ generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
- // Copy over documents from the source collection into the target and remove the 'padding'
- // field from the projection, so we can distinguish which documents have been modified by
- // the $merge stage.
- assert.doesNotThrow(
- () => source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
+ // Copy over documents from the source collection into the target and remove the 'padding'
+ // field from the projection, so we can distinguish which documents have been modified by
+ // the $merge stage.
+ assert.doesNotThrow(() =>
+ source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
- // Remove one document from the target collection so that $merge fails. This document should
- // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
- // field in ascending order. Since each document in the source collection is 1MB, and the
- // max batch size is 16MB, the first batch will contain documents with the _id in the range
- // of [0, 15].
- assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
+ // Remove one document from the target collection so that $merge fails. This document should
+ // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
+ // field in ascending order. Since each document in the source collection is 1MB, and the
+ // max batch size is 16MB, the first batch will contain documents with the _id in the range
+ // of [0, 15].
+ assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
- // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
- assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
+ // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
+ assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
- // Run the $merge pipeline and ensure it fails, as there is one document in the source
- // collection without a match in the target.
- const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ // Run the $merge pipeline and ensure it fails, as there is one document in the source
+ // collection without a match in the target.
+ const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- // There will be maxDocsInBatch documents in the batch, one without a match.
- const numDocsModified = maxDocsInBatch - 1;
- // All remaining documents except those in the first batch must be left unmodified.
- const numDocsUnmodified = numDocs - maxDocsInBatch;
- assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
- assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
- })();
+ // There will be maxDocsInBatch documents in the batch, one without a match.
+ const numDocsModified = maxDocsInBatch - 1;
+ // All remaining documents except those in the first batch must be left unmodified.
+ const numDocsUnmodified = numDocs - maxDocsInBatch;
+ assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
+ assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_merge_insert.js b/jstests/aggregation/sources/merge/mode_merge_insert.js
index 370963a24d2..577479f7a46 100644
--- a/jstests/aggregation/sources/merge/mode_merge_insert.js
+++ b/jstests/aggregation/sources/merge/mode_merge_insert.js
@@ -4,368 +4,365 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge into a non-existent collection.
- (function testMergeIntoNonExistentCollection() {
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- ]
- });
- })();
-
- // Test $merge into an existing collection.
- (function testMergeIntoExistentCollection() {
- assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge does not update documents in the target collection if they were not modified
- // in the source collection.
- (function testMergeDoesNotUpdateUnmodifiedDocuments() {
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge updates documents in the target collection if they were modified in the source
- // collection.
- (function testMergeUpdatesModifiedDocuments() {
- // Update and merge a single document.
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 22, b: "b", c: "c"}]
- });
-
- // Update and merge multiple documents.
- assert.commandWorked(source.update({_id: 1}, {a: 11}));
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 11, b: "a"}, {_id: 2, a: 22, b: "b", c: "c", d: "d"}]
- });
- })();
-
- // Test $merge inserts a new document into the target collection if it was inserted into the
- // source collection.
- (function testMergeInsertsNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- {_id: 3, a: 3, b: "c"}
- ]
- });
- assert.commandWorked(source.deleteOne({_id: 3}));
- assert.commandWorked(target.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 4, c: "d"}
- ]
- });
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- ]
- });
- })();
-
- // Test $merge fails if a unique index constraint in the target collection is violated.
- (function testMergeFailsIfTargetUniqueKeyIsViolated() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 4, a: 11}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- const error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- ]
- });
- assert.commandWorked(target.dropIndex({a: 1}));
- })();
-
- // Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
- (function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
- // The 'on' fields contains a single document field.
- let error =
- assert.throws(() => source.aggregate(
- [{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, [51190, 51183]);
-
- // The 'on' fields contains multiple document fields.
- error = assert.throws(() => source.aggregate([
- {$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}
- ]));
- assert.commandFailedWithCode(error, [51190, 51183]);
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge into a non-existent collection.
+(function testMergeIntoNonExistentCollection() {
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ ]
+ });
+})();
+
+// Test $merge into an existing collection.
+(function testMergeIntoExistentCollection() {
+ assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge does not update documents in the target collection if they were not modified
+// in the source collection.
+(function testMergeDoesNotUpdateUnmodifiedDocuments() {
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge updates documents in the target collection if they were modified in the source
+// collection.
+(function testMergeUpdatesModifiedDocuments() {
+ // Update and merge a single document.
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 22, b: "b", c: "c"}]
+ });
+
+ // Update and merge multiple documents.
+ assert.commandWorked(source.update({_id: 1}, {a: 11}));
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 11, b: "a"}, {_id: 2, a: 22, b: "b", c: "c", d: "d"}]
+ });
+})();
+
+// Test $merge inserts a new document into the target collection if it was inserted into the
+// source collection.
+(function testMergeInsertsNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ {_id: 3, a: 3, b: "c"}
+ ]
+ });
+ assert.commandWorked(source.deleteOne({_id: 3}));
+ assert.commandWorked(target.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 4, c: "d"}
+ ]
+ });
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+ assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ ]
+ });
+})();
+
+// Test $merge fails if a unique index constraint in the target collection is violated.
+(function testMergeFailsIfTargetUniqueKeyIsViolated() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 4, a: 11}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ const error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ ]
+ });
+ assert.commandWorked(target.dropIndex({a: 1}));
+})();
+
+// Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
+(function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
+ // The 'on' fields contains a single document field.
+ let error = assert.throws(
+ () => source.aggregate([{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+
+ // The 'on' fields contains multiple document fields.
+ error = assert.throws(
+ () => source.aggregate(
+ [{$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
{_id: 1, a: {b: "b"}, c: "x"},
{_id: 2, a: {b: "c"}, c: "y"},
{_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]
- });
- })();
-
- // Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
- // null or an array.
- (function testMergeFailsIfOnFieldIsInvalid() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
-
- // The 'on' field is missing.
- assert.commandWorked(source.insert({_id: 1}));
- let error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is null.
- assert.commandWorked(source.update({_id: 1}, {z: null}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is an array.
- assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51185);
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
-
- // Test $merge preserves indexes and options of the existing target collection.
- (function testMergePresrvesIndexesAndOptions() {
- const validator = {a: {$gt: 0}};
- assert(target.drop());
- assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
- assert.commandWorked(target.createIndex({a: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- assert.eq(2, target.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
- })();
-
- // Test $merge implicitly creates a new database when the target collection's database doesn't
- // exist.
- (function testMergeImplicitlyCreatesTargetDatabase() {
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
-
- const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
- assert.commandWorked(foreignDb.dropDatabase());
- const foreignTarget = foreignDb[`${jsTest.name()}_target`];
- const foreignPipeline = [{
- $merge: {
- into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
- whenMatched: "merge",
- whenNotMatched: "insert"
- }
- }];
-
- if (!FixtureHelpers.isMongos(db)) {
- assert.doesNotThrow(() => source.aggregate(foreignPipeline));
- assertArrayEq(
- {actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1, b: "a"}]});
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => source.aggregate(foreignPipeline));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test below.
- assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
+ ]
+ });
+})();
+
+// Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
+// null or an array.
+(function testMergeFailsIfOnFieldIsInvalid() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
+
+ // The 'on' field is missing.
+ assert.commandWorked(source.insert({_id: 1}));
+ let error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is null.
+ assert.commandWorked(source.update({_id: 1}, {z: null}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is an array.
+ assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51185);
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
+
+// Test $merge preserves indexes and options of the existing target collection.
+(function testMergePresrvesIndexesAndOptions() {
+ const validator = {a: {$gt: 0}};
+ assert(target.drop());
+ assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+ assert.eq(2, target.getIndexes().length);
+
+ const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
+ assert.commandWorked(listColl);
+ assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+})();
+
+// Test $merge implicitly creates a new database when the target collection's database doesn't
+// exist.
+(function testMergeImplicitlyCreatesTargetDatabase() {
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+
+ const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
+ assert.commandWorked(foreignDb.dropDatabase());
+ const foreignTarget = foreignDb[`${jsTest.name()}_target`];
+ const foreignPipeline = [{
+ $merge: {
+ into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
+ whenMatched: "merge",
+ whenNotMatched: "insert"
}
+ }];
+ if (!FixtureHelpers.isMongos(db)) {
assert.doesNotThrow(() => source.aggregate(foreignPipeline));
assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1, b: "a"}]});
- assert.commandWorked(foreignDb.dropDatabase());
- })();
+ } else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => source.aggregate(foreignPipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test below.
+ assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
+ }
+
+ assert.doesNotThrow(() => source.aggregate(foreignPipeline));
+ assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1, b: "a"}]});
+ assert.commandWorked(foreignDb.dropDatabase());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_pipeline_discard.js b/jstests/aggregation/sources/merge/mode_pipeline_discard.js
index 12b556b8384..0c9333ca2af 100644
--- a/jstests/aggregation/sources/merge/mode_pipeline_discard.js
+++ b/jstests/aggregation/sources/merge/mode_pipeline_discard.js
@@ -4,279 +4,271 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
- // A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
- // for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
- // appended to this array and the result returned to the caller, otherwise an array with a
- // single $merge stage is returned. An output collection for the $merge stage is specified
- // in the 'target', and the $merge stage 'on' fields in the 'on' parameter.
- function makeMergePipeline(
- {target = "", initialStages = [], updatePipeline = [], on = "_id"} = {}) {
- return initialStages.concat([{
- $merge:
- {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "discard"}
- }]);
- }
+// A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
+// for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
+// appended to this array and the result returned to the caller, otherwise an array with a
+// single $merge stage is returned. An output collection for the $merge stage is specified
+// in the 'target', and the $merge stage 'on' fields in the 'on' parameter.
+function makeMergePipeline(
+ {target = "", initialStages = [], updatePipeline = [], on = "_id"} = {}) {
+ return initialStages.concat(
+ [{$merge: {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "discard"}}]);
+}
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection. The merge operation should succeed and unmatched documents discarded.
- (function testMergeIfMatchingDocumentNotFound() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection. The merge operation should succeed and unmatched documents discarded.
+(function testMergeIfMatchingDocumentNotFound() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 3, b: 3, x: 1, y: 2}]
- });
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 3, b: 3, x: 1, y: 2}]
+ });
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 1, y: 2}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 1, y: 2}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 1, y: 2}]
- });
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 1, y: 2}]});
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 2, b: 2, x: 1, y: 2}, {_id: 3, b: 3}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 2, b: 2, x: 1, y: 2}, {_id: 3, b: 3}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge does not insert a new document into the target collection if it was inserted
- // into the source collection.
- (function testMergeDoesNotInsertNewDocument() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge does not insert a new document into the target collection if it was inserted
+// into the source collection.
+(function testMergeDoesNotInsertNewDocument() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteOne({_id: 3}));
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteOne({_id: 3}));
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- })();
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+})();
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
- let pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a",
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ let pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a",
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, c: "x", x: 1, y: 2},
- {_id: 4, a: 30, c: "y", x: 1, y: 2},
- {_id: 5, a: 40, c: "z"}
- ]
- });
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, c: "x", x: 1, y: 2},
+ {_id: 4, a: 30, c: "y", x: 1, y: 2},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
- pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: ["a", "b"],
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: ["a", "b"],
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", x: 1, y: 2},
- {_id: 4, a: 30, b: "c", c: "y", x: 1, y: 2},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", x: 1, y: 2},
+ {_id: 4, a: 30, b: "c", c: "y", x: 1, y: 2},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
- const pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a.b",
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ const pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a.b",
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 2, a: {b: "c"}, x: 1, y: 2},
- ]
- });
- })();
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 2, a: {b: "c"}, x: 1, y: 2},
+ ]
+ });
+})();
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- let pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ let pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- on: ["_id", "a"],
- target: target.getName(),
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ on: ["_id", "a"],
+ target: target.getName(),
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
- // Test that variables referencing the fields in the source document can be specified in the
- // 'let' argument and referenced in the update pipeline.
- (function testMergeWithLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert([{_id: 1, c: 1}]));
+// Test that variables referencing the fields in the source document can be specified in the
+// 'let' argument and referenced in the update pipeline.
+(function testMergeWithLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, c: 1}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- let : {x: "$a", y: "$b"},
- whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
- whenNotMatched: "discard"
- }
- }]));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}]});
- })();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ let : {x: "$a", y: "$b"},
+ whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
+ whenNotMatched: "discard"
+ }
+ }]));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_pipeline_fail.js b/jstests/aggregation/sources/merge/mode_pipeline_fail.js
index 7d8d2337949..60c46ce8708 100644
--- a/jstests/aggregation/sources/merge/mode_pipeline_fail.js
+++ b/jstests/aggregation/sources/merge/mode_pipeline_fail.js
@@ -4,95 +4,89 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge:
- {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "fail"}
- };
- const pipeline = [mergeStage];
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "fail"}
+};
+const pipeline = [mergeStage];
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection.
- (function testMergeFailsIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 2}, {_id: 3, b: 3, x: 2}]
- });
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection.
+(function testMergeFailsIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}, {_id: 3, b: 3, x: 2}]});
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 2}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 2}]});
- // Source has multiple documents with matches in the target.
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 2}, {_id: 2, b: 2, x: 2}, {_id: 3, b: 3, x: 2}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 2}, {_id: 2, b: 2, x: 2}, {_id: 3, b: 3, x: 2}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test that variables referencing the fields in the source document can be specified in the
- // 'let' argument and referenced in the update pipeline.
- (function testMergeWithLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert([{_id: 1, c: 1}, {_id: 2, c: 2}]));
+// Test that variables referencing the fields in the source document can be specified in the
+// 'let' argument and referenced in the update pipeline.
+(function testMergeWithLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, c: 1}, {_id: 2, c: 2}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- let : {x: "$a", y: "$b"},
- whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
- whenNotMatched: "fail"
- }
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, z: 2}, {_id: 2, c: 2, z: 4}]
- });
- })();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ let : {x: "$a", y: "$b"},
+ whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
+ whenNotMatched: "fail"
+ }
+ }]));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, c: 2, z: 4}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_pipeline_insert.js b/jstests/aggregation/sources/merge/mode_pipeline_insert.js
index b8f8374cfc9..df3414e0950 100644
--- a/jstests/aggregation/sources/merge/mode_pipeline_insert.js
+++ b/jstests/aggregation/sources/merge/mode_pipeline_insert.js
@@ -4,644 +4,624 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- // A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
- // for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
- // appended to this array and the result returned to the caller, otherwise an array with a
- // single $merge stage is returned. An output collection for the $merge stage is specified
- // in the 'target', and the $merge stage 'on' fields in the 'on' parameter. The 'letVars'
- // parameter describes the 'let' argument of the $merge stage and holds variables that can be
- // referenced in the pipeline.
- function makeMergePipeline({target = "",
- initialStages = [],
- updatePipeline = [],
- on = "_id",
- letVars = undefined} = {}) {
- const baseObj = letVars !== undefined ? {let : letVars} : {};
- return initialStages.concat([{
- $merge: Object.assign(
- baseObj,
- {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "insert"})
- }]);
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+// A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
+// for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
+// appended to this array and the result returned to the caller, otherwise an array with a
+// single $merge stage is returned. An output collection for the $merge stage is specified
+// in the 'target', and the $merge stage 'on' fields in the 'on' parameter. The 'letVars'
+// parameter describes the 'let' argument of the $merge stage and holds variables that can be
+// referenced in the pipeline.
+function makeMergePipeline(
+ {target = "", initialStages = [], updatePipeline = [], on = "_id", letVars = undefined} = {}) {
+ const baseObj = letVars !== undefined ? {let : letVars} : {};
+ return initialStages.concat([{
+ $merge: Object.assign(
+ baseObj, {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "insert"})
+ }]);
+}
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+
+(function testMergeIntoNonExistentCollection() {
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, x: 1},
+ ]
+ });
+})();
+
+// Test $merge inserts a document into an existing target collection if no matching document
+// is found.
+(function testMergeInsertsDocumentIfMatchNotFound() {
+ assert.commandWorked(target.deleteMany({}));
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: 1, y: 2}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 1, y: 2}]});
+})();
+
+// Test $merge updates an existing document in the target collection by applying a
+// pipeline-style update.
+(function testMergeUpdatesDocumentIfMatchFound() {
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ updatePipeline: [{$project: {x: {$add: ["$x", 1]}, y: {$add: ["$y", 2]}}}]
+ })));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 2, y: 4}]});
+})();
+
+// Test $merge with various pipeline stages which are currently supported by the pipeline-style
+// update.
+(function testMergeWithSupportedUpdatePipelineStages() {
+ assert(source.drop());
+ assert(target.drop());
+
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert({_id: 1, b: 1}));
+
+ // Test $addFields stage.
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: {$add: ["$b", 1]}}}]})));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: null}]});
+
+ // Test $project stage.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: 1}));
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$project: {x: {$add: ["$b", 1]}}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 2}, {_id: 2, x: null}]});
+
+ // Test $replaceWith stage.
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$replaceWith: "$c"}]})));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]});
+
+ // Test $replaceRoot stage.
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$replaceRoot: {newRoot: "$c"}}]})));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]});
+})();
+
+// Test $merge inserts a new document into the target collection if not matching document is
+// found by applying a pipeline-style update with upsert=true semantics.
+(function testMergeInsertDocumentIfMatchNotFound() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1}));
+ assert.commandWorked(target.insert({_id: 2, a: 2}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 1}, {_id: 2, a: 2}]});
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ updatePipeline: [{$project: {x: {$add: ["$x", 1]}, a: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, x: 1},
+ {_id: 2, a: 2},
+ ]
+ });
+})();
+
+// Test $merge fails if a unique index constraint in the target collection is violated.
+(function testMergeFailsIfTargetUniqueKeyIsViolated() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
}
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
-
- (function testMergeIntoNonExistentCollection() {
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, x: 1},
- ]
- });
- })();
-
- // Test $merge inserts a document into an existing target collection if no matching document
- // is found.
- (function testMergeInsertsDocumentIfMatchNotFound() {
- assert.commandWorked(target.deleteMany({}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {x: 1, y: 2}}]})));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 1, y: 2}]});
- })();
-
- // Test $merge updates an existing document in the target collection by applying a
- // pipeline-style update.
- (function testMergeUpdatesDocumentIfMatchFound() {
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$project: {x: {$add: ["$x", 1]}, y: {$add: ["$y", 2]}}}]
- })));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 2, y: 4}]});
- })();
-
- // Test $merge with various pipeline stages which are currently supported by the pipeline-style
- // update.
- (function testMergeWithSupportedUpdatePipelineStages() {
- assert(source.drop());
- assert(target.drop());
-
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert({_id: 1, b: 1}));
-
- // Test $addFields stage.
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$addFields: {x: {$add: ["$b", 1]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: null}]});
-
- // Test $project stage.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: 1}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$project: {x: {$add: ["$b", 1]}}}]})));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, x: 2}, {_id: 2, x: null}]});
-
- // Test $replaceWith stage.
- assert(target.drop());
- assert.commandWorked(
- target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$replaceWith: "$c"}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]
- });
-
- // Test $replaceRoot stage.
- assert(target.drop());
- assert.commandWorked(
- target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$replaceRoot: {newRoot: "$c"}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]
- });
- })();
-
- // Test $merge inserts a new document into the target collection if not matching document is
- // found by applying a pipeline-style update with upsert=true semantics.
- (function testMergeInsertDocumentIfMatchNotFound() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1}));
- assert.commandWorked(target.insert({_id: 2, a: 2}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, x: 1}, {_id: 2, a: 2}]});
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$project: {x: {$add: ["$x", 1]}, a: 1}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, x: 1},
- {_id: 2, a: 2},
- ]
- });
- })();
-
- // Test $merge fails if a unique index constraint in the target collection is violated.
- (function testMergeFailsIfTargetUniqueKeyIsViolated() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 4, a: 2}));
- assert.commandWorked(target.insert([{_id: 1, x: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- const error = assert.throws(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$project: {x: 1, a: 1}}]})));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, x: 1},
- {_id: 2, a: 2},
- ]
- });
- assert.commandWorked(target.dropIndex({a: 1}));
- })();
-
- // Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
- (function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
- // The 'on' fields contains a single document field.
- let error = assert.throws(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- on: "nonexistent",
- updatePipeline: [{$project: {x: 1, a: 1}}]
- })));
- assert.commandFailedWithCode(error, [51190, 51183]);
-
- // The 'on' fields contains multiple document fields.
- error = assert.throws(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- on: ["nonexistent1", "nonexistent2"],
- updatePipeline: [{$project: {x: 1, a: 1}}]
- })));
- assert.commandFailedWithCode(error, [51190, 51183]);
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 30}]));
- assert.commandWorked(
- target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a",
- updatePipeline: [{$addFields: {z: 1}}]
- })));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{a: 1, b: 1, z: 1}, {a: 2, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(
- source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 4}, {_id: 3, a: 30, b: 2}]));
- assert.commandWorked(
- target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: ["a", "b"],
- updatePipeline: [{$addFields: {z: 1}}]
- })));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected:
- [{a: 1, b: 1, z: 1}, {a: 2, b: 4, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, c: "y"}));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a.b",
- updatePipeline: [{$addFields: {z: 1}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: {b: "b"}, z: 1},
- {_id: 2, a: {b: "c"}, c: "y", z: 1},
- {_id: 3, a: {b: 30}, z: 1}
- ]
- });
- })();
-
- // Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
- // null or an array.
- (function testMergeFailsIfOnFieldIsInvalid() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
-
- const pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "z",
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- // The 'on' field is missing.
- assert.commandWorked(source.insert({_id: 1}));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is null.
- assert.commandWorked(source.update({_id: 1}, {z: null}));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is an array.
- assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, 51185);
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field. When the _id is missing, the $merge stage will create a new ObjectId in
- // its place before performing the insert or update.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- let pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- // There is a matching document in the target with {_id: 1}, but since we cannot match
- // it (no _id in projection), we just insert two new documents from the source
- // collection by applying a pipeline-style update.
- expected: [{b: "c"}, {z: 1}, {z: 1}]
- });
-
- pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- on: ["_id", "a"],
- target: target.getName(),
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, z: 1}, {a: 2, z: 1}]
- });
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
-
- // Test $merge preserves indexes and options of the existing target collection.
- (function testMergePresrvesIndexesAndOptions() {
- const validator = {z: {$gt: 0}};
- assert(target.drop());
- assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
- assert.commandWorked(target.createIndex({a: 1}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {z: 1}}]})));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, z: 1}, {_id: 2, z: 1}]});
- assert.eq(2, target.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
- })();
-
- // Test $merge implicitly creates a new database when the target collection's database doesn't
- // exist.
- (function testMergeImplicitlyCreatesTargetDatabase() {
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
-
- const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
- assert.commandWorked(foreignDb.dropDatabase());
- const foreignTarget = foreignDb[`${jsTest.name()}_target`];
- const foreignPipeline = makeMergePipeline({
- target: {db: foreignDb.getName(), coll: foreignTarget.getName()},
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- if (!FixtureHelpers.isMongos(db)) {
- assert.doesNotThrow(() => source.aggregate(foreignPipeline));
- assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, z: 1}]});
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => source.aggregate(foreignPipeline));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test
- // below.
- assert.commandWorked(foreignTarget.insert({_id: 1}));
- }
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 4, a: 2}));
+ assert.commandWorked(target.insert([{_id: 1, x: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ const error =
+ assert.throws(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$project: {x: 1, a: 1}}]})));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, x: 1},
+ {_id: 2, a: 2},
+ ]
+ });
+ assert.commandWorked(target.dropIndex({a: 1}));
+})();
+
+// Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
+(function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
+ // The 'on' fields contains a single document field.
+ let error = assert.throws(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ on: "nonexistent",
+ updatePipeline: [{$project: {x: 1, a: 1}}]
+ })));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+
+ // The 'on' fields contains multiple document fields.
+ error = assert.throws(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ on: ["nonexistent1", "nonexistent2"],
+ updatePipeline: [{$project: {x: 1, a: 1}}]
+ })));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 30}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a",
+ updatePipeline: [{$addFields: {z: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{a: 1, b: 1, z: 1}, {a: 2, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 4}, {_id: 3, a: 30, b: 2}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: ["a", "b"],
+ updatePipeline: [{$addFields: {z: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{a: 1, b: 1, z: 1}, {a: 2, b: 4, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, c: "y"}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a.b",
+ updatePipeline: [{$addFields: {z: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: {b: "b"}, z: 1},
+ {_id: 2, a: {b: "c"}, c: "y", z: 1},
+ {_id: 3, a: {b: 30}, z: 1}
+ ]
+ });
+})();
+
+// Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
+// null or an array.
+(function testMergeFailsIfOnFieldIsInvalid() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
+
+ const pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "z",
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ // The 'on' field is missing.
+ assert.commandWorked(source.insert({_id: 1}));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is null.
+ assert.commandWorked(source.update({_id: 1}, {z: null}));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is an array.
+ assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, 51185);
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field. When the _id is missing, the $merge stage will create a new ObjectId in
+// its place before performing the insert or update.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ let pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ // There is a matching document in the target with {_id: 1}, but since we cannot match
+ // it (no _id in projection), we just insert two new documents from the source
+ // collection by applying a pipeline-style update.
+ expected: [{b: "c"}, {z: 1}, {z: 1}]
+ });
+
+ pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ on: ["_id", "a"],
+ target: target.getName(),
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, z: 1}, {a: 2, z: 1}]
+ });
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
+
+// Test $merge preserves indexes and options of the existing target collection.
+(function testMergePresrvesIndexesAndOptions() {
+ const validator = {z: {$gt: 0}};
+ assert(target.drop());
+ assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {z: 1}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, z: 1}, {_id: 2, z: 1}]});
+ assert.eq(2, target.getIndexes().length);
+
+ const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
+ assert.commandWorked(listColl);
+ assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+})();
+
+// Test $merge implicitly creates a new database when the target collection's database doesn't
+// exist.
+(function testMergeImplicitlyCreatesTargetDatabase() {
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+
+ const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
+ assert.commandWorked(foreignDb.dropDatabase());
+ const foreignTarget = foreignDb[`${jsTest.name()}_target`];
+ const foreignPipeline = makeMergePipeline({
+ target: {db: foreignDb.getName(), coll: foreignTarget.getName()},
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ if (!FixtureHelpers.isMongos(db)) {
assert.doesNotThrow(() => source.aggregate(foreignPipeline));
assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, z: 1}]});
- assert.commandWorked(foreignDb.dropDatabase());
- })();
-
- // Test that $merge can reference the default 'let' variable 'new' which holds the entire
- // document from the source collection.
- (function testMergeWithDefaultLetVariable() {
- assert(source.drop());
- assert(target.drop());
-
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$set: {x: {$add: ["$$new.a", "$$new.b"]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 4}]});
- })();
-
- // Test that the default 'let' variable 'new' is not available once the 'let' argument to the
- // $merge stage is specified explicitly.
- (function testMergeCannotUseDefaultLetVariableIfLetIsSpecified() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- const error = assert.throws(() => source.aggregate(makeMergePipeline({
- letVars: {foo: "bar"},
- target: target.getName(),
- updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
- })));
- assert.commandFailedWithCode(error, 17276);
- })();
-
- // Test that $merge can accept an empty object holding no variables and the default 'new'
- // variable is not available.
- (function testMergeWithEmptyLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- // Can use an empty object.
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {letVars: {}, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
- });
-
- // No default variable 'new' is available.
- const error = assert.throws(() => source.aggregate(makeMergePipeline({
- letVars: {},
- target: target.getName(),
- updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
- })));
- assert.commandFailedWithCode(error, 17276);
- })();
-
- // Test that $merge can accept a null value as the 'let' argument and the default variable 'new'
- // can be used.
- // Note that this is not a desirable behaviour but rather a limitation in the IDL parser which
- // cannot differentiate between an optional field specified explicitly as 'null', or not
- // specified at all. In both cases it will treat the field like it wasn't specified. So, this
- // test ensures that we're aware of this limitation. Once the limitation is addressed in
- // SERVER-41272, this test should be updated to accordingly.
- (function testMergeWithNullLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- // Can use a null 'let' argument.
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {letVars: null, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
- });
-
- // Can use the default 'new' variable.
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: null,
- target: target.getName(),
- updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, x: 1, y: 1}, {_id: 2, x: 2, y: 2}]
- });
- })();
-
- // Test that constant values can be specified in the 'let' argument and referenced in the update
- // pipeline.
- (function testMergeWithConstantLetVariable() {
- // Non-array constants.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {a: 1, b: "foo", c: true},
- target: target.getName(),
- updatePipeline: [{$set: {x: "$$a", y: "$$b", z: "$$c"}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected:
- [{_id: 1, c: 1, x: 1, y: "foo", z: true}, {_id: 2, x: 1, y: "foo", z: true}]
- });
-
- // Constant array.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {a: [1, 2, 3]},
- target: target.getName(),
- updatePipeline: [{$set: {x: {$arrayElemAt: ["$$a", 1]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 2}]});
- })();
-
- // Test that variables referencing the fields in the source document can be specified in the
- // 'let' argument and referenced in the update pipeline.
- (function testMergeWithNonConstantLetVariables() {
- // Non-array fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: "$a", y: "$b"},
- target: target.getName(),
- updatePipeline: [{$set: {z: {$add: ["$$x", "$$y"]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 4}]});
-
- // Array field with expressions in the pipeline.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: [1, 2, 3]}, {_id: 2, a: [4, 5, 6]}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: "$a"},
- target: target.getName(),
- updatePipeline: [{$set: {z: {$arrayElemAt: ["$$x", 1]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 5}]});
-
- // Array field with expressions in the 'let' argument.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: {$arrayElemAt: ["$a", 2]}},
- target: target.getName(),
- updatePipeline: [{$set: {z: "$$x"}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 3}, {_id: 2, z: 6}]});
- })();
-
- // Test that variables using the dotted path can be specified in the 'let' argument and
- // referenced in the update pipeline.
- (function testMergeWithDottedPathLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: {b: {c: 2}}}, {_id: 2, a: {b: {c: 3}}}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: "$a.b.c"},
- target: target.getName(),
- updatePipeline: [{$set: {z: {$pow: ["$$x", 2]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 4}, {_id: 2, z: 9}]});
- })();
-
- // Test that 'let' variables are referred to the computed document in the aggregation pipeline,
- // not the original document in the source collection.
- (function testMergeLetVariablesHoldsComputedValues() {
- // Test the default 'new' variable.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(
- source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- // In the $group stage the total field 'a' uses the same name as in the source collection
- // intentionally, to make sure that even when a referenced field is present in the source
- // collection under the same name, the actual value for the variable will be picked up from
- // the computed document.
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
- target: target.getName(),
- updatePipeline: [{$set: {z: "$$new"}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, z: {_id: 1, a: 3}}, {_id: 2, z: {_id: 2, a: 3}}]
- });
-
- // Test custom 'let' variables.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(
- source.insert([{_id: 1, a: 1, b: 5}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
- letVars: {x: {$pow: ["$a", 2]}},
- target: target.getName(),
- updatePipeline: [{$set: {z: "$$x"}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 49}, {_id: 2, z: 9}]});
- })();
+ } else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => source.aggregate(foreignPipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test
+ // below.
+ assert.commandWorked(foreignTarget.insert({_id: 1}));
+ }
+
+ assert.doesNotThrow(() => source.aggregate(foreignPipeline));
+ assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, z: 1}]});
+ assert.commandWorked(foreignDb.dropDatabase());
+})();
+
+// Test that $merge can reference the default 'let' variable 'new' which holds the entire
+// document from the source collection.
+(function testMergeWithDefaultLetVariable() {
+ assert(source.drop());
+ assert(target.drop());
+
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ updatePipeline: [{$set: {x: {$add: ["$$new.a", "$$new.b"]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 4}]});
+})();
+
+// Test that the default 'let' variable 'new' is not available once the 'let' argument to the
+// $merge stage is specified explicitly.
+(function testMergeCannotUseDefaultLetVariableIfLetIsSpecified() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ const error = assert.throws(() => source.aggregate(makeMergePipeline({
+ letVars: {foo: "bar"},
+ target: target.getName(),
+ updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
+ })));
+ assert.commandFailedWithCode(error, 17276);
+})();
+
+// Test that $merge can accept an empty object holding no variables and the default 'new'
+// variable is not available.
+(function testMergeWithEmptyLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ // Can use an empty object.
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {letVars: {}, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
+ });
+
+ // No default variable 'new' is available.
+ const error = assert.throws(() => source.aggregate(makeMergePipeline({
+ letVars: {},
+ target: target.getName(),
+ updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
+ })));
+ assert.commandFailedWithCode(error, 17276);
+})();
+
+// Test that $merge can accept a null value as the 'let' argument and the default variable 'new'
+// can be used.
+// Note that this is not a desirable behaviour but rather a limitation in the IDL parser which
+// cannot differentiate between an optional field specified explicitly as 'null', or not
+// specified at all. In both cases it will treat the field like it wasn't specified. So, this
+// test ensures that we're aware of this limitation. Once the limitation is addressed in
+// SERVER-41272, this test should be updated to accordingly.
+(function testMergeWithNullLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ // Can use a null 'let' argument.
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {letVars: null, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
+ });
+
+ // Can use the default 'new' variable.
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: null,
+ target: target.getName(),
+ updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, x: 1, y: 1}, {_id: 2, x: 2, y: 2}]});
+})();
+
+// Test that constant values can be specified in the 'let' argument and referenced in the update
+// pipeline.
+(function testMergeWithConstantLetVariable() {
+ // Non-array constants.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {a: 1, b: "foo", c: true},
+ target: target.getName(),
+ updatePipeline: [{$set: {x: "$$a", y: "$$b", z: "$$c"}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, x: 1, y: "foo", z: true}, {_id: 2, x: 1, y: "foo", z: true}]
+ });
+
+ // Constant array.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {a: [1, 2, 3]},
+ target: target.getName(),
+ updatePipeline: [{$set: {x: {$arrayElemAt: ["$$a", 1]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 2}]});
+})();
+
+// Test that variables referencing the fields in the source document can be specified in the
+// 'let' argument and referenced in the update pipeline.
+(function testMergeWithNonConstantLetVariables() {
+ // Non-array fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: "$a", y: "$b"},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: {$add: ["$$x", "$$y"]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 4}]});
+
+ // Array field with expressions in the pipeline.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: [1, 2, 3]}, {_id: 2, a: [4, 5, 6]}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: "$a"},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: {$arrayElemAt: ["$$x", 1]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 5}]});
+
+ // Array field with expressions in the 'let' argument.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: {$arrayElemAt: ["$a", 2]}},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: "$$x"}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 3}, {_id: 2, z: 6}]});
+})();
+
+// Test that variables using the dotted path can be specified in the 'let' argument and
+// referenced in the update pipeline.
+(function testMergeWithDottedPathLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: {b: {c: 2}}}, {_id: 2, a: {b: {c: 3}}}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: "$a.b.c"},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: {$pow: ["$$x", 2]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 4}, {_id: 2, z: 9}]});
+})();
+
+// Test that 'let' variables are referred to the computed document in the aggregation pipeline,
+// not the original document in the source collection.
+(function testMergeLetVariablesHoldsComputedValues() {
+ // Test the default 'new' variable.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ // In the $group stage the total field 'a' uses the same name as in the source collection
+ // intentionally, to make sure that even when a referenced field is present in the source
+ // collection under the same name, the actual value for the variable will be picked up from
+ // the computed document.
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
+ target: target.getName(),
+ updatePipeline: [{$set: {z: "$$new"}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, z: {_id: 1, a: 3}}, {_id: 2, z: {_id: 2, a: 3}}]
+ });
+
+ // Test custom 'let' variables.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: 5}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
+ letVars: {x: {$pow: ["$a", 2]}},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: "$$x"}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 49}, {_id: 2, z: 9}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_replace_discard.js b/jstests/aggregation/sources/merge/mode_replace_discard.js
index 5a0aa6eeb79..aba69a27d28 100644
--- a/jstests/aggregation/sources/merge/mode_replace_discard.js
+++ b/jstests/aggregation/sources/merge/mode_replace_discard.js
@@ -5,204 +5,193 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection. The merge operation should succeed and unmatched documents discarded.
- (function testMergeIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
-
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
- })();
-
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
-
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]
- });
- })();
-
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge does not insert a new document into the target collection if it was inserted
- // into the source collection.
- (function testMergeDoesNotInsertNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 4, a: 30, b: "c"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection. The merge operation should succeed and unmatched documents discarded.
+(function testMergeIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
+
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
+})();
+
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
+
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]
+ });
+})();
+
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge does not insert a new document into the target collection if it was inserted
+// into the source collection.
+(function testMergeDoesNotInsertNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
{_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 2, a: {b: "c"}, c: "y"},
- ]
- });
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
+ ]
+ });
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_replace_fail.js b/jstests/aggregation/sources/merge/mode_replace_fail.js
index 7afdb6579dc..19e74e58536 100644
--- a/jstests/aggregation/sources/merge/mode_replace_fail.js
+++ b/jstests/aggregation/sources/merge/mode_replace_fail.js
@@ -4,114 +4,111 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}
- };
- const pipeline = [mergeStage];
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}
+};
+const pipeline = [mergeStage];
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection.
- (function testMergeFailsIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection.
+(function testMergeFailsIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
- // Source has multiple documents with matches in the target.
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
- // should be returned once the batch is processed and no further documents should be processed
- // and updated.
- (function testMergeUnorderedBatchUpdate() {
- const maxBatchSize = 16 * 1024 * 1024; // 16MB
- const docSize = 1024 * 1024; // 1MB
- const numDocs = 20;
- const maxDocsInBatch = maxBatchSize / docSize;
+// Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
+// should be returned once the batch is processed and no further documents should be processed
+// and updated.
+(function testMergeUnorderedBatchUpdate() {
+ const maxBatchSize = 16 * 1024 * 1024; // 16MB
+ const docSize = 1024 * 1024; // 1MB
+ const numDocs = 20;
+ const maxDocsInBatch = maxBatchSize / docSize;
- assert(source.drop());
- assert(target.drop());
+ assert(source.drop());
+ assert(target.drop());
- // Insert 'numDocs' documents of size 'docSize' into the source collection.
- generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
+ // Insert 'numDocs' documents of size 'docSize' into the source collection.
+ generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
- // Copy over documents from the source collection into the target and remove the 'padding'
- // field from the projection, so we can distinguish which documents have been modified by
- // the $merge stage.
- assert.doesNotThrow(
- () => source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
+ // Copy over documents from the source collection into the target and remove the 'padding'
+ // field from the projection, so we can distinguish which documents have been modified by
+ // the $merge stage.
+ assert.doesNotThrow(() =>
+ source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
- // Remove one document from the target collection so that $merge fails. This document should
- // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
- // field in ascending order. Since each document in the source collection is 1MB, and the
- // max batch size is 16MB, the first batch will contain documents with the _id in the range
- // of [0, 15].
- assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
+ // Remove one document from the target collection so that $merge fails. This document should
+ // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
+ // field in ascending order. Since each document in the source collection is 1MB, and the
+ // max batch size is 16MB, the first batch will contain documents with the _id in the range
+ // of [0, 15].
+ assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
- // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
- assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
+ // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
+ assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
- // Run the $merge pipeline and ensure it fails, as there is one document in the source
- // collection without a match in the target.
- const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ // Run the $merge pipeline and ensure it fails, as there is one document in the source
+ // collection without a match in the target.
+ const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- // There will be maxDocsInBatch documents in the batch, one without a match.
- const numDocsModified = maxDocsInBatch - 1;
- // All remaining documents except those in the first batch must be left unmodified.
- const numDocsUnmodified = numDocs - maxDocsInBatch;
- assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
- assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
- })();
+ // There will be maxDocsInBatch documents in the batch, one without a match.
+ const numDocsModified = maxDocsInBatch - 1;
+ // All remaining documents except those in the first batch must be left unmodified.
+ const numDocsUnmodified = numDocs - maxDocsInBatch;
+ assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
+ assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_replace_insert.js b/jstests/aggregation/sources/merge/mode_replace_insert.js
index bb1e407ea7b..e81ac857dc4 100644
--- a/jstests/aggregation/sources/merge/mode_replace_insert.js
+++ b/jstests/aggregation/sources/merge/mode_replace_insert.js
@@ -1,225 +1,214 @@
// Tests for the $merge stage with whenMatched: "replace" and whenNotMatched: "insert".
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const coll = db.merge_replace_insert;
- const outColl = db.merge_replace_insert_out;
- coll.drop();
- outColl.drop();
-
- const nDocs = 10;
- for (let i = 0; i < nDocs; i++) {
- assert.commandWorked(coll.insert({_id: i, a: i}));
- }
-
- // Test that a $merge with whenMatched: "replace" and whenNotMatched: "insert" mode will
- // default the "on" fields to "_id".
- coll.aggregate(
- [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}]);
- assert.eq(nDocs, outColl.find().itcount());
-
- // Test that $merge will update existing documents that match the "on" fields.
- const nDocsReplaced = 5;
- coll.aggregate([
- {$project: {_id: {$mod: ["$_id", nDocsReplaced]}}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "_id"
- }
- }
- ]);
- assert.eq(nDocsReplaced, outColl.find({a: {$exists: false}}).itcount());
-
- // Test $merge with a dotted path "on" fields.
- coll.drop();
- outColl.drop();
- assert.commandWorked(coll.insert([{_id: 0, a: {b: 1}}, {_id: 1, a: {b: 1}, c: 1}]));
- assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
- coll.aggregate([
- {$addFields: {_id: 0}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["_id", "a.b"]
- }
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const coll = db.merge_replace_insert;
+const outColl = db.merge_replace_insert_out;
+coll.drop();
+outColl.drop();
+
+const nDocs = 10;
+for (let i = 0; i < nDocs; i++) {
+ assert.commandWorked(coll.insert({_id: i, a: i}));
+}
+
+// Test that a $merge with whenMatched: "replace" and whenNotMatched: "insert" mode will
+// default the "on" fields to "_id".
+coll.aggregate(
+ [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}]);
+assert.eq(nDocs, outColl.find().itcount());
+
+// Test that $merge will update existing documents that match the "on" fields.
+const nDocsReplaced = 5;
+coll.aggregate([
+ {$project: {_id: {$mod: ["$_id", nDocsReplaced]}}},
+ {$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "_id"}}
+]);
+assert.eq(nDocsReplaced, outColl.find({a: {$exists: false}}).itcount());
+
+// Test $merge with a dotted path "on" fields.
+coll.drop();
+outColl.drop();
+assert.commandWorked(coll.insert([{_id: 0, a: {b: 1}}, {_id: 1, a: {b: 1}, c: 1}]));
+assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
+coll.aggregate([
+ {$addFields: {_id: 0}},
+ {
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["_id", "a.b"]
}
- ]);
- assert.eq([{_id: 0, a: {b: 1}, c: 1}], outColl.find().toArray());
-
- // Test that $merge will automatically generate a missing "_id" for the "on" field.
- coll.drop();
- outColl.drop();
- assert.commandWorked(coll.insert({field: "will be removed"}));
- assert.doesNotThrow(() => coll.aggregate([
- {$replaceRoot: {newRoot: {}}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- }
+ }
+]);
+assert.eq([{_id: 0, a: {b: 1}, c: 1}], outColl.find().toArray());
+
+// Test that $merge will automatically generate a missing "_id" for the "on" field.
+coll.drop();
+outColl.drop();
+assert.commandWorked(coll.insert({field: "will be removed"}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$replaceRoot: {newRoot: {}}},
+ {
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
}
- ]));
- assert.eq(1, outColl.find({field: {$exists: false}}).itcount());
-
- // Test that $merge will automatically generate a missing "_id", and the aggregation succeeds
- // with multiple "on" fields.
- outColl.drop();
- assert.commandWorked(outColl.createIndex({name: -1, _id: 1}, {unique: true, sparse: true}));
- assert.doesNotThrow(() => coll.aggregate([
- {$replaceRoot: {newRoot: {name: "jungsoo"}}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["_id", "name"]
- }
+ }
+]));
+assert.eq(1, outColl.find({field: {$exists: false}}).itcount());
+
+// Test that $merge will automatically generate a missing "_id", and the aggregation succeeds
+// with multiple "on" fields.
+outColl.drop();
+assert.commandWorked(outColl.createIndex({name: -1, _id: 1}, {unique: true, sparse: true}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$replaceRoot: {newRoot: {name: "jungsoo"}}},
+ {
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["_id", "name"]
}
- ]));
- assert.eq(1, outColl.find().itcount());
-
- // Test that we will not attempt to modify the _id of an existing document if the _id is
- // projected away but the "on" field does not involve _id.
- coll.drop();
- assert.commandWorked(coll.insert({name: "kyle"}));
- assert.commandWorked(coll.insert({name: "nick"}));
- outColl.drop();
- assert.commandWorked(outColl.createIndex({name: 1}, {unique: true}));
- assert.commandWorked(outColl.insert({_id: "must be unchanged", name: "kyle"}));
- assert.doesNotThrow(() => coll.aggregate([
- {$project: {_id: 0}},
- {$addFields: {newField: 1}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "name"
- }
+ }
+]));
+assert.eq(1, outColl.find().itcount());
+
+// Test that we will not attempt to modify the _id of an existing document if the _id is
+// projected away but the "on" field does not involve _id.
+coll.drop();
+assert.commandWorked(coll.insert({name: "kyle"}));
+assert.commandWorked(coll.insert({name: "nick"}));
+outColl.drop();
+assert.commandWorked(outColl.createIndex({name: 1}, {unique: true}));
+assert.commandWorked(outColl.insert({_id: "must be unchanged", name: "kyle"}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$project: {_id: 0}},
+ {$addFields: {newField: 1}},
+ {
+ $merge:
+ {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "name"}
+ }
+]));
+const outResult = outColl.find().sort({name: 1}).toArray();
+const errmsgFn = () => tojson(outResult);
+assert.eq(2, outResult.length, errmsgFn);
+assert.docEq({_id: "must be unchanged", name: "kyle", newField: 1}, outResult[0], errmsgFn);
+assert.eq("nick", outResult[1].name, errmsgFn);
+assert.eq(1, outResult[1].newField, errmsgFn);
+assert.neq(null, outResult[1]._id, errmsgFn);
+
+// Test that $merge with a missing non-id "on" field fails.
+outColl.drop();
+assert.commandWorked(outColl.createIndex({missing: 1}, {unique: true}));
+assertErrorCode(
+ coll,
+ [{
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "missing"
}
- ]));
- const outResult = outColl.find().sort({name: 1}).toArray();
- const errmsgFn = () => tojson(outResult);
- assert.eq(2, outResult.length, errmsgFn);
- assert.docEq({_id: "must be unchanged", name: "kyle", newField: 1}, outResult[0], errmsgFn);
- assert.eq("nick", outResult[1].name, errmsgFn);
- assert.eq(1, outResult[1].newField, errmsgFn);
- assert.neq(null, outResult[1]._id, errmsgFn);
-
- // Test that $merge with a missing non-id "on" field fails.
- outColl.drop();
- assert.commandWorked(outColl.createIndex({missing: 1}, {unique: true}));
- assertErrorCode(
- coll,
- [{
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "missing"
- }
- }],
- 51132 // This attempt should fail because there's no field 'missing' in the document.
- );
-
- // Test that a replace fails to insert a document if it violates a unique index constraint. In
- // this example, $merge will attempt to insert multiple documents with {a: 0} which is not
- // allowed with the unique index on {a: 1}.
- coll.drop();
- assert.commandWorked(coll.insert([{_id: 0}, {_id: 1}]));
-
- outColl.drop();
- assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
- assertErrorCode(
- coll,
- [
- {$addFields: {a: 0}},
- {$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
- ],
- ErrorCodes.DuplicateKey);
-
- // Test that $merge fails if the "on" fields contains an array.
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0, a: [1, 2]}));
- assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
- assertErrorCode(coll,
- [
- {$addFields: {_id: 0}},
- {
+ }],
+ 51132 // This attempt should fail because there's no field 'missing' in the document.
+);
+
+// Test that a replace fails to insert a document if it violates a unique index constraint. In
+// this example, $merge will attempt to insert multiple documents with {a: 0} which is not
+// allowed with the unique index on {a: 1}.
+coll.drop();
+assert.commandWorked(coll.insert([{_id: 0}, {_id: 1}]));
+
+outColl.drop();
+assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
+assertErrorCode(
+ coll,
+ [
+ {$addFields: {a: 0}},
+ {$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ ErrorCodes.DuplicateKey);
+
+// Test that $merge fails if the "on" fields contains an array.
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0, a: [1, 2]}));
+assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
+assertErrorCode(coll,
+ [
+ {$addFields: {_id: 0}},
+ {
$merge: {
into: outColl.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
on: ["_id", "a.b"]
}
- }
- ],
- 51132);
-
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0, a: [{b: 1}]}));
- assertErrorCode(coll,
- [
- {$addFields: {_id: 0}},
- {
+ }
+ ],
+ 51132);
+
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0, a: [{b: 1}]}));
+assertErrorCode(coll,
+ [
+ {$addFields: {_id: 0}},
+ {
$merge: {
into: outColl.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
on: ["_id", "a.b"]
}
- }
- ],
- 51132);
-
- // Tests for $merge to a database that differs from the aggregation database.
- const foreignDb = db.getSiblingDB("merge_replace_insert_foreign");
- const foreignTargetColl = foreignDb.out;
- const pipelineDifferentOutputDb = [{
- $merge: {
- into: {
- db: foreignDb.getName(),
- coll: foreignTargetColl.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert",
- }
- }];
-
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0}));
- foreignDb.dropDatabase();
-
- if (!FixtureHelpers.isMongos(db)) {
- // Test that $merge implicitly creates a new database when the output collection's database
- // doesn't exist.
- coll.aggregate(pipelineDifferentOutputDb);
- assert.eq(foreignTargetColl.find().itcount(), 1);
- } else {
- // Implicit database creation is prohibited in a cluster.
- let error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test below.
- assert.commandWorked(foreignTargetColl.insert({_id: 0}));
+ }
+ ],
+ 51132);
+
+// Tests for $merge to a database that differs from the aggregation database.
+const foreignDb = db.getSiblingDB("merge_replace_insert_foreign");
+const foreignTargetColl = foreignDb.out;
+const pipelineDifferentOutputDb = [{
+ $merge: {
+ into: {
+ db: foreignDb.getName(),
+ coll: foreignTargetColl.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert",
}
+}];
+
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0}));
+foreignDb.dropDatabase();
- // Insert a new document into the source collection, then test that running the same
- // aggregation will replace existing documents in the foreign output collection when
- // applicable.
- coll.drop();
- const newDocuments = [{_id: 0, newField: 1}, {_id: 1}];
- assert.commandWorked(coll.insert(newDocuments));
+if (!FixtureHelpers.isMongos(db)) {
+ // Test that $merge implicitly creates a new database when the output collection's database
+ // doesn't exist.
coll.aggregate(pipelineDifferentOutputDb);
- assert.eq(foreignTargetColl.find().sort({_id: 1}).toArray(), newDocuments);
+ assert.eq(foreignTargetColl.find().itcount(), 1);
+} else {
+ // Implicit database creation is prohibited in a cluster.
+ let error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test below.
+ assert.commandWorked(foreignTargetColl.insert({_id: 0}));
+}
+
+// Insert a new document into the source collection, then test that running the same
+// aggregation will replace existing documents in the foreign output collection when
+// applicable.
+coll.drop();
+const newDocuments = [{_id: 0, newField: 1}, {_id: 1}];
+assert.commandWorked(coll.insert(newDocuments));
+coll.aggregate(pipelineDifferentOutputDb);
+assert.eq(foreignTargetColl.find().sort({_id: 1}).toArray(), newDocuments);
}());
diff --git a/jstests/aggregation/sources/merge/on_fields_validation.js b/jstests/aggregation/sources/merge/on_fields_validation.js
index 78c7dd4eb41..ae911689cdf 100644
--- a/jstests/aggregation/sources/merge/on_fields_validation.js
+++ b/jstests/aggregation/sources/merge/on_fields_validation.js
@@ -7,137 +7,133 @@
* @tags: [cannot_create_unique_index_when_using_hashed_shard_key]
*/
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
-
- const source = db.unique_key_validation_source;
- const target = db.unique_key_validation_target;
-
- [source, target].forEach(coll => coll.drop());
- assert.commandWorked(source.insert({_id: 0}));
-
- //
- // Tests for invalid "on" fields specifications.
- //
- function assertOnFieldsIsInvalid(onFields, expectedErrorCode) {
- const stage = {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: onFields
- }
- };
- assertErrorCode(source, stage, expectedErrorCode);
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+
+const source = db.unique_key_validation_source;
+const target = db.unique_key_validation_target;
+
+[source, target].forEach(coll => coll.drop());
+assert.commandWorked(source.insert({_id: 0}));
+
+//
+// Tests for invalid "on" fields specifications.
+//
+function assertOnFieldsIsInvalid(onFields, expectedErrorCode) {
+ const stage = {
+ $merge:
+ {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: onFields}
+ };
+ assertErrorCode(source, stage, expectedErrorCode);
+}
+
+// A non-array or string "on" fields is prohibited.
+assertOnFieldsIsInvalid(3.14, 51186);
+assertOnFieldsIsInvalid({_id: 1}, 51186);
+
+// Explicitly specifying an empty-array "on" fields is invalid.
+assertOnFieldsIsInvalid([], 51187);
+
+// The "on" fields array won't be accepted if any element is not a string.
+assertOnFieldsIsInvalid(["hashed", 1], 51134);
+assertOnFieldsIsInvalid([["_id"]], 51134);
+assertOnFieldsIsInvalid([null], 51134);
+assertOnFieldsIsInvalid([true, "a"], 51134);
+
+//
+// An error is raised if $merge encounters a document that is missing one or more of the
+// "on" fields.
+//
+assert.commandWorked(target.remove({}));
+assert.commandWorked(target.createIndex({name: 1, team: -1}, {unique: true}));
+const pipelineNameTeam = [{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["name", "team"]
}
+}];
+
+// Missing both "name" and "team".
+assertErrorCode(source, pipelineNameTeam, 51132);
+
+// Missing "name".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, team: "query"}));
+assertErrorCode(source, pipelineNameTeam, 51132);
+
+// Missing "team".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas"}));
+assertErrorCode(source, pipelineNameTeam, 51132);
+
+// A document with both "name" and "team" will be accepted.
+assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas", team: "query"}));
+assert.doesNotThrow(() => source.aggregate(pipelineNameTeam));
+assert.eq(target.find().toArray(), [{_id: 0, name: "nicholas", team: "query"}]);
+
+//
+// An error is raised if $merge encounters a document where one of the "on" fields is a nullish
+// value.
+//
+assert.commandWorked(target.remove({}));
+assert.commandWorked(target.createIndex({"song.artist": 1}, {unique: 1}));
+const pipelineSongDotArtist = [{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["song.artist"]
+ }
+}];
+
+// Explicit null "song" (a prefix of an "on" field).
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: null}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// Explicit undefined "song" (a prefix of an "on" field).
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: undefined}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// Explicit null "song.artist".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: null}}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// Explicit undefined "song.artist".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: undefined}}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// A valid "artist" will be accepted.
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: "Illenium"}}));
+assert.doesNotThrow(() => source.aggregate(pipelineSongDotArtist));
+assert.eq(target.find().toArray(), [{_id: 0, song: {artist: "Illenium"}}]);
+
+//
+// An error is raised if $merge encounters a document where one of the "on" fields (or a prefix
+// of an "on" field) is an array.
+//
+assert.commandWorked(target.remove({}));
+assert.commandWorked(target.createIndex({"address.street": 1}, {unique: 1}));
+const pipelineAddressDotStreet = [{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["address.street"]
+ }
+}];
+
+// "address.street" is an array.
+assert.commandWorked(
+ source.update({_id: 0}, {_id: 0, address: {street: ["West 43rd St", "1633 Broadway"]}}));
+assertErrorCode(source, pipelineAddressDotStreet, 51185);
+
+// "address" is an array (a prefix of an "on" field).
+assert.commandWorked(source.update({_id: 0}, {_id: 0, address: [{street: "1633 Broadway"}]}));
+assertErrorCode(source, pipelineAddressDotStreet, 51132);
- // A non-array or string "on" fields is prohibited.
- assertOnFieldsIsInvalid(3.14, 51186);
- assertOnFieldsIsInvalid({_id: 1}, 51186);
-
- // Explicitly specifying an empty-array "on" fields is invalid.
- assertOnFieldsIsInvalid([], 51187);
-
- // The "on" fields array won't be accepted if any element is not a string.
- assertOnFieldsIsInvalid(["hashed", 1], 51134);
- assertOnFieldsIsInvalid([["_id"]], 51134);
- assertOnFieldsIsInvalid([null], 51134);
- assertOnFieldsIsInvalid([true, "a"], 51134);
-
- //
- // An error is raised if $merge encounters a document that is missing one or more of the
- // "on" fields.
- //
- assert.commandWorked(target.remove({}));
- assert.commandWorked(target.createIndex({name: 1, team: -1}, {unique: true}));
- const pipelineNameTeam = [{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["name", "team"]
- }
- }];
-
- // Missing both "name" and "team".
- assertErrorCode(source, pipelineNameTeam, 51132);
-
- // Missing "name".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, team: "query"}));
- assertErrorCode(source, pipelineNameTeam, 51132);
-
- // Missing "team".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas"}));
- assertErrorCode(source, pipelineNameTeam, 51132);
-
- // A document with both "name" and "team" will be accepted.
- assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas", team: "query"}));
- assert.doesNotThrow(() => source.aggregate(pipelineNameTeam));
- assert.eq(target.find().toArray(), [{_id: 0, name: "nicholas", team: "query"}]);
-
- //
- // An error is raised if $merge encounters a document where one of the "on" fields is a nullish
- // value.
- //
- assert.commandWorked(target.remove({}));
- assert.commandWorked(target.createIndex({"song.artist": 1}, {unique: 1}));
- const pipelineSongDotArtist = [{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["song.artist"]
- }
- }];
-
- // Explicit null "song" (a prefix of an "on" field).
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: null}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // Explicit undefined "song" (a prefix of an "on" field).
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: undefined}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // Explicit null "song.artist".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: null}}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // Explicit undefined "song.artist".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: undefined}}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // A valid "artist" will be accepted.
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: "Illenium"}}));
- assert.doesNotThrow(() => source.aggregate(pipelineSongDotArtist));
- assert.eq(target.find().toArray(), [{_id: 0, song: {artist: "Illenium"}}]);
-
- //
- // An error is raised if $merge encounters a document where one of the "on" fields (or a prefix
- // of an "on" field) is an array.
- //
- assert.commandWorked(target.remove({}));
- assert.commandWorked(target.createIndex({"address.street": 1}, {unique: 1}));
- const pipelineAddressDotStreet = [{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["address.street"]
- }
- }];
-
- // "address.street" is an array.
- assert.commandWorked(
- source.update({_id: 0}, {_id: 0, address: {street: ["West 43rd St", "1633 Broadway"]}}));
- assertErrorCode(source, pipelineAddressDotStreet, 51185);
-
- // "address" is an array (a prefix of an "on" field).
- assert.commandWorked(source.update({_id: 0}, {_id: 0, address: [{street: "1633 Broadway"}]}));
- assertErrorCode(source, pipelineAddressDotStreet, 51132);
-
- // A scalar "address.street" is accepted.
- assert.commandWorked(source.update({_id: 0}, {_id: 0, address: {street: "1633 Broadway"}}));
- assert.doesNotThrow(() => source.aggregate(pipelineAddressDotStreet));
- assert.eq(target.find().toArray(), [{_id: 0, address: {street: "1633 Broadway"}}]);
+// A scalar "address.street" is accepted.
+assert.commandWorked(source.update({_id: 0}, {_id: 0, address: {street: "1633 Broadway"}}));
+assert.doesNotThrow(() => source.aggregate(pipelineAddressDotStreet));
+assert.eq(target.find().toArray(), [{_id: 0, address: {street: "1633 Broadway"}}]);
}());
diff --git a/jstests/aggregation/sources/merge/requires_unique_index.js b/jstests/aggregation/sources/merge/requires_unique_index.js
index a316d239321..38f8aa27f64 100644
--- a/jstests/aggregation/sources/merge/requires_unique_index.js
+++ b/jstests/aggregation/sources/merge/requires_unique_index.js
@@ -6,407 +6,365 @@
// manually. This is to avoid implicit creation and sharding of the $merge target collections in the
// passthrough suites.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode,
- // assertMergeFailsWithoutUniqueIndex.
+load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode,
+ // assertMergeFailsWithoutUniqueIndex.
- const testDB = db.getSiblingDB("merge_requires_unique_index");
- assert.commandWorked(testDB.dropDatabase());
+const testDB = db.getSiblingDB("merge_requires_unique_index");
+assert.commandWorked(testDB.dropDatabase());
- const source = testDB.source;
- assert.commandWorked(source.insert([{_id: 0, a: 0}, {_id: 1, a: 1}]));
+const source = testDB.source;
+assert.commandWorked(source.insert([{_id: 0, a: 0}, {_id: 1, a: 1}]));
- // Helper to drop a collection without using the shell helper, and thus avoiding the implicit
- // recreation in the passthrough suites.
- function dropWithoutImplicitRecreate(coll) {
- testDB.runCommand({drop: coll.getName()});
+// Helper to drop a collection without using the shell helper, and thus avoiding the implicit
+// recreation in the passthrough suites.
+function dropWithoutImplicitRecreate(coll) {
+ testDB.runCommand({drop: coll.getName()});
+}
+
+// Test that using {_id: 1} or not providing a unique key does not require any special indexes.
+(function simpleIdOnFieldsOrDefaultShouldNotRequireIndexes() {
+ function assertDefaultOnFieldsSucceeds({setupCallback, collName}) {
+ withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ // Skip the combination of merge modes which will fail depending on the contents of
+ // the source and target collection, as this will cause the assertion below to trip.
+ if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
+ return;
+
+ setupCallback();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: collName,
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }]));
+ setupCallback();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: collName,
+ on: "_id",
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }]));
+ });
}
- // Test that using {_id: 1} or not providing a unique key does not require any special indexes.
- (function simpleIdOnFieldsOrDefaultShouldNotRequireIndexes() {
- function assertDefaultOnFieldsSucceeds({setupCallback, collName}) {
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- // Skip the combination of merge modes which will fail depending on the contents of
- // the source and target collection, as this will cause the assertion below to trip.
- if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
- return;
-
- setupCallback();
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: collName,
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]));
- setupCallback();
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: collName,
- on: "_id",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]));
- });
+ // Test that using "_id" or not specifying "on" fields works for a collection which does
+ // not exist.
+ const non_existent = testDB.non_existent;
+ assertDefaultOnFieldsSucceeds({
+ setupCallback: () => dropWithoutImplicitRecreate(non_existent),
+ collName: non_existent.getName()
+ });
+
+ const unindexed = testDB.unindexed;
+ assertDefaultOnFieldsSucceeds({
+ setupCallback: () => {
+ dropWithoutImplicitRecreate(unindexed);
+ assert.commandWorked(testDB.runCommand({create: unindexed.getName()}));
+ },
+ collName: unindexed.getName()
+ });
+}());
+
+// Test that a unique index on the "on" fields can be used to satisfy the requirement.
+(function basicUniqueIndexWorks() {
+ const target = testDB.regular_unique;
+ dropWithoutImplicitRecreate(target);
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["_id", "a"], target: target});
+
+ assert.commandWorked(testDB.runCommand({create: target.getName()}));
+ assert.commandWorked(target.createIndex({a: 1, _id: 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["_id", "a"]
}
+ }]));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["a", "_id"]
+ }
+ }]));
+
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: ["_id", "a", "b"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "b"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["b"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a"], target: target});
+
+ assert.commandWorked(target.dropIndex({a: 1, _id: 1}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+
+ // Create a non-unique index and make sure that doesn't work.
+ assert.commandWorked(target.dropIndex({a: 1}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["_id", "a"], target: target});
+}());
- // Test that using "_id" or not specifying "on" fields works for a collection which does
- // not exist.
- const non_existent = testDB.non_existent;
- assertDefaultOnFieldsSucceeds({
- setupCallback: () => dropWithoutImplicitRecreate(non_existent),
- collName: non_existent.getName()
- });
+// Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
+// is a partial index.
+(function uniqueButPartialShouldNotWork() {
+ const target = testDB.unique_but_partial_indexes;
+ dropWithoutImplicitRecreate(target);
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+
+ assert.commandWorked(
+ target.createIndex({a: 1}, {unique: true, partialFilterExpression: {a: {$gte: 2}}}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["_id", "a"], target: target});
+}());
- const unindexed = testDB.unindexed;
- assertDefaultOnFieldsSucceeds({
- setupCallback: () => {
- dropWithoutImplicitRecreate(unindexed);
- assert.commandWorked(testDB.runCommand({create: unindexed.getName()}));
- },
- collName: unindexed.getName()
- });
- }());
-
- // Test that a unique index on the "on" fields can be used to satisfy the requirement.
- (function basicUniqueIndexWorks() {
- const target = testDB.regular_unique;
- dropWithoutImplicitRecreate(target);
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a"], target: target});
-
- assert.commandWorked(testDB.runCommand({create: target.getName()}));
- assert.commandWorked(target.createIndex({a: 1, _id: 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([{
+// Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
+// has a different collation.
+(function indexMustMatchCollationOfOperation() {
+ const target = testDB.collation_indexes;
+ dropWithoutImplicitRecreate(target);
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true, collation: {locale: "en_US"}}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "en"}}});
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "simple"}}});
+ assertMergeFailsWithoutUniqueIndex({
+ source: source,
+ onFields: "a",
+ target: target,
+ options: {collation: {locale: "en_US", strength: 1}}
+ });
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "a"
+ }
+ }],
+ {collation: {locale: "en_US"}}));
+
+ // Test that a non-unique index with the same collation cannot be used.
+ assert.commandWorked(target.dropIndex({a: 1}));
+ assert.commandWorked(target.createIndex({a: 1}, {collation: {locale: "en_US"}}));
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "en_US"}}});
+
+ // Test that a collection-default collation will be applied to the index, but not the
+ // $merge's update or insert into that collection. The pipeline will inherit a
+ // collection-default collation, but from the source collection, not the $merge's target
+ // collection.
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(
+ testDB.runCommand({create: target.getName(), collation: {locale: "en_US"}}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({
+ source: source,
+ onFields: "a",
+ target: target,
+ });
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "a"
+ }
+ }],
+ {collation: {locale: "en_US"}}));
+
+ // Test that when the source collection and foreign collection have the same default
+ // collation, a unique index on the foreign collection can be used.
+ const newSourceColl = testDB.new_source;
+ dropWithoutImplicitRecreate(newSourceColl);
+ assert.commandWorked(
+ testDB.runCommand({create: newSourceColl.getName(), collation: {locale: "en_US"}}));
+ assert.commandWorked(newSourceColl.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ // This aggregate does not specify a collation, but it should inherit the default collation
+ // from 'newSourceColl', and therefore the index on 'target' should be eligible for use
+ // since it has the same collation.
+ assert.doesNotThrow(() => newSourceColl.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+
+ // Test that an explicit "simple" collation can be used with an index without a collation.
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "a"
+ }
+ }],
+ {collation: {locale: "simple"}}));
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "en_US"}}});
+}());
+
+// Test that a unique index which is not simply ascending/descending fields cannot be used for
+// the "on" fields.
+(function testSpecialIndexTypes() {
+ const target = testDB.special_index_types;
+ dropWithoutImplicitRecreate(target);
+
+ assert.commandWorked(target.createIndex({a: 1, text: "text"}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "text"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "text", target: target});
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({a: 1, geo: "2dsphere"}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "geo"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["geo", "a"], target: target});
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({geo: "2d"}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "geo"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "geo", target: target});
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(
+ target.createIndex({geo: "geoHaystack", a: 1}, {unique: true, bucketSize: 5}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "geo"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["geo", "a"], target: target});
+
+ dropWithoutImplicitRecreate(target);
+ // MongoDB does not support unique hashed indexes.
+ assert.commandFailedWithCode(target.createIndex({a: "hashed"}, {unique: true}), 16764);
+ assert.commandWorked(target.createIndex({a: "hashed"}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+}());
+
+// Test that a unique index with dotted field names can be used.
+(function testDottedFieldNames() {
+ const target = testDB.dotted_field_paths;
+ dropWithoutImplicitRecreate(target);
+
+ assert.commandWorked(target.createIndex({a: 1, "b.c.d": -1}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 1, a: 1, b: {c: {d: "x"}}}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: ["_id", "a"]
+ on: ["a", "b.c.d"]
}
- }]));
- assert.doesNotThrow(() => source.aggregate([{
+ }
+ ]));
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({"id.x": 1, "id.y": -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$group: {_id: {x: "$_id", y: "$a"}}},
+ {$project: {id: "$_id"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: ["a", "_id"]
+ on: ["id.x", "id.y"]
}
- }]));
-
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a", "b"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "b"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["b"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a"], target: target});
-
- assert.commandWorked(target.dropIndex({a: 1, _id: 1}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([{
+ }
+ ]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$group: {_id: {x: "$_id", y: "$a"}}},
+ {$project: {id: "$_id"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: "a"
+ on: ["id.y", "id.x"]
}
- }]));
-
- // Create a non-unique index and make sure that doesn't work.
- assert.commandWorked(target.dropIndex({a: 1}));
- assert.commandWorked(target.createIndex({a: 1}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a"], target: target});
- }());
-
- // Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
- // is a partial index.
- (function uniqueButPartialShouldNotWork() {
- const target = testDB.unique_but_partial_indexes;
- dropWithoutImplicitRecreate(target);
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
-
- assert.commandWorked(
- target.createIndex({a: 1}, {unique: true, partialFilterExpression: {a: {$gte: 2}}}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a"], target: target});
- }());
-
- // Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
- // has a different collation.
- (function indexMustMatchCollationOfOperation() {
- const target = testDB.collation_indexes;
- dropWithoutImplicitRecreate(target);
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
-
- assert.commandWorked(
- target.createIndex({a: 1}, {unique: true, collation: {locale: "en_US"}}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: "a", target: target, options: {collation: {locale: "en"}}});
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "simple"}}
- });
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "en_US", strength: 1}}
- });
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }],
- {collation: {locale: "en_US"}}));
-
- // Test that a non-unique index with the same collation cannot be used.
- assert.commandWorked(target.dropIndex({a: 1}));
- assert.commandWorked(target.createIndex({a: 1}, {collation: {locale: "en_US"}}));
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "en_US"}}
- });
+ }
+ ]));
+
+ // Test that we cannot use arrays with a dotted path within a $merge.
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({"b.c": 1}, {unique: true}));
+ withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ assert.commandFailedWithCode(testDB.runCommand({
+ aggregate: source.getName(),
+ pipeline: [
+ {$replaceRoot: {newRoot: {b: [{c: 1}, {c: 2}]}}},
+ {
+ $merge: {
+ into: target.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode,
+ on: "b.c"
+ }
+ }
+ ],
+ cursor: {}
+ }),
+ [50905, 51132]);
+ });
+}());
- // Test that a collection-default collation will be applied to the index, but not the
- // $merge's update or insert into that collection. The pipeline will inherit a
- // collection-default collation, but from the source collection, not the $merge's target
- // collection.
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(
- testDB.runCommand({create: target.getName(), collation: {locale: "en_US"}}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- });
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }],
- {collation: {locale: "en_US"}}));
-
- // Test that when the source collection and foreign collection have the same default
- // collation, a unique index on the foreign collection can be used.
- const newSourceColl = testDB.new_source;
- dropWithoutImplicitRecreate(newSourceColl);
- assert.commandWorked(
- testDB.runCommand({create: newSourceColl.getName(), collation: {locale: "en_US"}}));
- assert.commandWorked(newSourceColl.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- // This aggregate does not specify a collation, but it should inherit the default collation
- // from 'newSourceColl', and therefore the index on 'target' should be eligible for use
- // since it has the same collation.
- assert.doesNotThrow(() => newSourceColl.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }]));
-
- // Test that an explicit "simple" collation can be used with an index without a collation.
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }],
- {collation: {locale: "simple"}}));
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "en_US"}}
- });
- }());
-
- // Test that a unique index which is not simply ascending/descending fields cannot be used for
- // the "on" fields.
- (function testSpecialIndexTypes() {
- const target = testDB.special_index_types;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({a: 1, text: "text"}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "text"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "text", target: target});
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({a: 1, geo: "2dsphere"}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "geo"], target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["geo", "a"], target: target});
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({geo: "2d"}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "geo"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "geo", target: target});
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(
- target.createIndex({geo: "geoHaystack", a: 1}, {unique: true, bucketSize: 5}));
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "geo"], target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["geo", "a"], target: target});
-
- dropWithoutImplicitRecreate(target);
- // MongoDB does not support unique hashed indexes.
- assert.commandFailedWithCode(target.createIndex({a: "hashed"}, {unique: true}), 16764);
- assert.commandWorked(target.createIndex({a: "hashed"}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- }());
-
- // Test that a unique index with dotted field names can be used.
- (function testDottedFieldNames() {
- const target = testDB.dotted_field_paths;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({a: 1, "b.c.d": -1}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 1, a: 1, b: {c: {d: "x"}}}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["a", "b.c.d"]
- }
- }
- ]));
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({"id.x": 1, "id.y": -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$group: {_id: {x: "$_id", y: "$a"}}},
- {$project: {id: "$_id"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["id.x", "id.y"]
- }
- }
- ]));
- assert.doesNotThrow(() => source.aggregate([
- {$group: {_id: {x: "$_id", y: "$a"}}},
- {$project: {id: "$_id"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["id.y", "id.x"]
- }
- }
- ]));
+// Test that a unique index that is multikey can still be used.
+(function testMultikeyIndex() {
+ const target = testDB.multikey_index;
+ dropWithoutImplicitRecreate(target);
- // Test that we cannot use arrays with a dotted path within a $merge.
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({"b.c": 1}, {unique: true}));
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- assert.commandFailedWithCode(testDB.runCommand({
- aggregate: source.getName(),
- pipeline: [
- {$replaceRoot: {newRoot: {b: [{c: 1}, {c: 2}]}}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode,
- on: "b.c"
- }
- }
- ],
- cursor: {}
- }),
- [50905, 51132]);
- });
- }());
-
- // Test that a unique index that is multikey can still be used.
- (function testMultikeyIndex() {
- const target = testDB.multikey_index;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 1, "a.b": "$a"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a.b"
- }
- }
- ]));
- assert.commandWorked(target.insert({_id: "TARGET", a: [{b: "hi"}, {b: "hello"}]}));
- assert.commandWorked(source.insert({a: "hi", proofOfUpdate: "PROOF"}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0, proofOfUpdate: "PROOF", "a.b": "$a"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a.b"
- }
- }
- ]));
- assert.docEq(target.findOne({"a.b": "hi", proofOfUpdate: "PROOF"}),
- {_id: "TARGET", a: {b: "hi"}, proofOfUpdate: "PROOF"});
- }());
-
- // Test that a unique index that is sparse can still be used.
- (function testSparseIndex() {
- const target = testDB.multikey_index;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({a: 1}, {unique: true, sparse: true}));
- assert.doesNotThrow(() => source.aggregate([{
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 1, "a.b": "$a"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: "a"
+ on: "a.b"
}
- }]));
- assert.commandWorked(target.insert([{b: 1, c: 1}, {a: null}, {d: 4}]));
- assert.doesNotThrow(() => source.aggregate([{
+ }
+ ]));
+ assert.commandWorked(target.insert({_id: "TARGET", a: [{b: "hi"}, {b: "hello"}]}));
+ assert.commandWorked(source.insert({a: "hi", proofOfUpdate: "PROOF"}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0, proofOfUpdate: "PROOF", "a.b": "$a"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: "a"
+ on: "a.b"
}
- }]));
- }());
+ }
+ ]));
+ assert.docEq(target.findOne({"a.b": "hi", proofOfUpdate: "PROOF"}),
+ {_id: "TARGET", a: {b: "hi"}, proofOfUpdate: "PROOF"});
+}());
+
+// Test that a unique index that is sparse can still be used.
+(function testSparseIndex() {
+ const target = testDB.multikey_index;
+ dropWithoutImplicitRecreate(target);
+
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true, sparse: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+ assert.commandWorked(target.insert([{b: 1, c: 1}, {a: null}, {d: 4}]));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+}());
}());
diff --git a/jstests/aggregation/sources/merge/use_cases.js b/jstests/aggregation/sources/merge/use_cases.js
index 5bce8006656..6c1c71b9419 100644
--- a/jstests/aggregation/sources/merge/use_cases.js
+++ b/jstests/aggregation/sources/merge/use_cases.js
@@ -5,112 +5,109 @@
* @tags: [requires_sharding]
*/
(function() {
- "use strict";
+"use strict";
- Random.setRandomSeed();
+Random.setRandomSeed();
- const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
+const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
- const mongosDB = st.s.getDB("use_cases");
+const mongosDB = st.s.getDB("use_cases");
- const metricsColl = mongosDB["metrics"];
- const rollupColl = mongosDB["rollup"];
+const metricsColl = mongosDB["metrics"];
+const rollupColl = mongosDB["rollup"];
- function incDateByMinutes(date, mins) {
- return new Date(date.getTime() + (60 * 1000 * mins));
- }
-
- // Inserts 'nSamples' worth of random data starting at 'date'.
- function insertRandomData(coll, date, nSamples) {
- let ticksSum = 0, tempSum = 0;
- let bulk = coll.initializeUnorderedBulkOp();
- for (let i = 0; i < nSamples; i++) {
- const randTick = Random.randInt(100);
- const randTemp = Random.randInt(100);
- ticksSum += randTick;
- tempSum += randTemp;
- bulk.insert({
- _id: incDateByMinutes(date, i * (60 / nSamples)),
- ticks: randTick,
- temp: randTemp
- });
- }
- assert.commandWorked(bulk.execute());
+function incDateByMinutes(date, mins) {
+ return new Date(date.getTime() + (60 * 1000 * mins));
+}
- return [ticksSum, tempSum];
+// Inserts 'nSamples' worth of random data starting at 'date'.
+function insertRandomData(coll, date, nSamples) {
+ let ticksSum = 0, tempSum = 0;
+ let bulk = coll.initializeUnorderedBulkOp();
+ for (let i = 0; i < nSamples; i++) {
+ const randTick = Random.randInt(100);
+ const randTemp = Random.randInt(100);
+ ticksSum += randTick;
+ tempSum += randTemp;
+ bulk.insert(
+ {_id: incDateByMinutes(date, i * (60 / nSamples)), ticks: randTick, temp: randTemp});
}
-
- // Runs a $merge aggregate on the metrics collection to the rollup collection, grouping by hour,
- // summing the ticks, and averaging the temps.
- function runAggregate({startDate, whenMatchedMode, whenNotMatchedMode}) {
- metricsColl.aggregate([
- {$match: {_id: {$gte: startDate}}},
- {
- $group: {
- _id: {$dateToString: {format: "%Y-%m-%dT%H", date: "$_id"}},
- ticks: {$sum: "$ticks"},
- avgTemp: {$avg: "$temp"},
- }
- },
- {
- $merge: {
- into: {db: rollupColl.getDB().getName(), coll: rollupColl.getName()},
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
+ assert.commandWorked(bulk.execute());
+
+ return [ticksSum, tempSum];
+}
+
+// Runs a $merge aggregate on the metrics collection to the rollup collection, grouping by hour,
+// summing the ticks, and averaging the temps.
+function runAggregate({startDate, whenMatchedMode, whenNotMatchedMode}) {
+ metricsColl.aggregate([
+ {$match: {_id: {$gte: startDate}}},
+ {
+ $group: {
+ _id: {$dateToString: {format: "%Y-%m-%dT%H", date: "$_id"}},
+ ticks: {$sum: "$ticks"},
+ avgTemp: {$avg: "$temp"},
}
- ]);
- }
+ },
+ {
+ $merge: {
+ into: {db: rollupColl.getDB().getName(), coll: rollupColl.getName()},
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }
+ ]);
+}
- // Shard the metrics (source) collection on _id, which is the date of the sample.
- const hourZero = new ISODate("2018-08-15T00:00:00.000Z");
- const hourOne = incDateByMinutes(hourZero, 60);
- st.shardColl(metricsColl, {_id: 1}, {_id: hourOne}, {_id: hourOne}, mongosDB.getName());
+// Shard the metrics (source) collection on _id, which is the date of the sample.
+const hourZero = new ISODate("2018-08-15T00:00:00.000Z");
+const hourOne = incDateByMinutes(hourZero, 60);
+st.shardColl(metricsColl, {_id: 1}, {_id: hourOne}, {_id: hourOne}, mongosDB.getName());
- // Insert sample documents into the metrics collection.
- const samplesPerHour = 10;
- let [ticksSum, tempSum] = insertRandomData(metricsColl, hourZero, samplesPerHour);
+// Insert sample documents into the metrics collection.
+const samplesPerHour = 10;
+let [ticksSum, tempSum] = insertRandomData(metricsColl, hourZero, samplesPerHour);
- runAggregate({startDate: hourZero, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourZero, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
- // Verify the results of the $merge in the rollup collection.
- let res = rollupColl.find().sort({_id: 1});
- assert.eq([{_id: "2018-08-15T00", ticks: ticksSum, avgTemp: tempSum / samplesPerHour}],
- res.toArray());
+// Verify the results of the $merge in the rollup collection.
+let res = rollupColl.find().sort({_id: 1});
+assert.eq([{_id: "2018-08-15T00", ticks: ticksSum, avgTemp: tempSum / samplesPerHour}],
+ res.toArray());
- // Insert another hour's worth of data, and verify that the $merge will append the result to the
- // output collection.
- [ticksSum, tempSum] = insertRandomData(metricsColl, hourOne, samplesPerHour);
+// Insert another hour's worth of data, and verify that the $merge will append the result to the
+// output collection.
+[ticksSum, tempSum] = insertRandomData(metricsColl, hourOne, samplesPerHour);
- runAggregate({startDate: hourOne, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourOne, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
- res = rollupColl.find().sort({_id: 1}).toArray();
- assert.eq(2, res.length);
- assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
+res = rollupColl.find().sort({_id: 1}).toArray();
+assert.eq(2, res.length);
+assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
- // Whoops, there was a mistake in the last hour of data. Let's re-run the aggregation and update
- // the rollup collection using the "replace".
- assert.commandWorked(metricsColl.update({_id: hourOne}, {$inc: {ticks: 10}}));
- ticksSum += 10;
+// Whoops, there was a mistake in the last hour of data. Let's re-run the aggregation and update
+// the rollup collection using the "replace".
+assert.commandWorked(metricsColl.update({_id: hourOne}, {$inc: {ticks: 10}}));
+ticksSum += 10;
- runAggregate({startDate: hourOne, whenMatchedMode: "replace", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourOne, whenMatchedMode: "replace", whenNotMatchedMode: "insert"});
- res = rollupColl.find().sort({_id: 1}).toArray();
- assert.eq(2, res.length);
- assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
+res = rollupColl.find().sort({_id: 1}).toArray();
+assert.eq(2, res.length);
+assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
- // Shard the output collection into 2 chunks, and make the split hour 6.
- const hourSix = incDateByMinutes(hourZero, 60 * 6);
- st.shardColl(rollupColl, {_id: 1}, {_id: hourSix}, {_id: hourSix}, mongosDB.getName());
+// Shard the output collection into 2 chunks, and make the split hour 6.
+const hourSix = incDateByMinutes(hourZero, 60 * 6);
+st.shardColl(rollupColl, {_id: 1}, {_id: hourSix}, {_id: hourSix}, mongosDB.getName());
- // Insert hour 7 data into the metrics collection and re-run the aggregation.
- [ticksSum, tempSum] = insertRandomData(metricsColl, hourSix, samplesPerHour);
+// Insert hour 7 data into the metrics collection and re-run the aggregation.
+[ticksSum, tempSum] = insertRandomData(metricsColl, hourSix, samplesPerHour);
- runAggregate({startDate: hourSix, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourSix, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
- res = rollupColl.find().sort({_id: 1}).toArray();
- assert.eq(3, res.length, tojson(res));
- assert.eq(res[2], {_id: "2018-08-15T06", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
+res = rollupColl.find().sort({_id: 1}).toArray();
+assert.eq(3, res.length, tojson(res));
+assert.eq(res[2], {_id: "2018-08-15T06", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
- st.stop();
+st.stop();
}());
diff --git a/jstests/aggregation/sources/out/out_in_lookup_not_allowed.js b/jstests/aggregation/sources/out/out_in_lookup_not_allowed.js
index d81eaaaab83..9e97363233c 100644
--- a/jstests/aggregation/sources/out/out_in_lookup_not_allowed.js
+++ b/jstests/aggregation/sources/out/out_in_lookup_not_allowed.js
@@ -1,28 +1,28 @@
// Tests that $out cannot be used within a $lookup pipeline.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/collection_drop_recreate.js"); // For assertDropCollection.
- load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
- load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
- load("jstests/libs/fixture_helpers.js"); // For isSharded.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/collection_drop_recreate.js"); // For assertDropCollection.
+load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
+load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
+load("jstests/libs/fixture_helpers.js"); // For isSharded.
- const ERROR_CODE_OUT_BANNED_IN_LOOKUP = 51047;
- const ERROR_CODE_OUT_LAST_STAGE_ONLY = 40601;
- const coll = db.out_in_lookup_not_allowed;
- coll.drop();
+const ERROR_CODE_OUT_BANNED_IN_LOOKUP = 51047;
+const ERROR_CODE_OUT_LAST_STAGE_ONLY = 40601;
+const coll = db.out_in_lookup_not_allowed;
+coll.drop();
- const from = db.out_in_lookup_not_allowed_from;
- from.drop();
+const from = db.out_in_lookup_not_allowed_from;
+from.drop();
- if (FixtureHelpers.isSharded(from)) {
- setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
- "internalQueryAllowShardedLookup",
- true);
- }
+if (FixtureHelpers.isSharded(from)) {
+ setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
+ "internalQueryAllowShardedLookup",
+ true);
+}
- let pipeline = [
+let pipeline = [
{
$lookup: {
pipeline: [{$out: "out_collection"}],
@@ -31,9 +31,9 @@
}
},
];
- assertErrorCode(coll, pipeline, ERROR_CODE_OUT_BANNED_IN_LOOKUP);
+assertErrorCode(coll, pipeline, ERROR_CODE_OUT_BANNED_IN_LOOKUP);
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$project: {x: 0}}, {$out: "out_collection"}],
@@ -43,9 +43,9 @@
},
];
- assertErrorCode(coll, pipeline, ERROR_CODE_OUT_BANNED_IN_LOOKUP);
+assertErrorCode(coll, pipeline, ERROR_CODE_OUT_BANNED_IN_LOOKUP);
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$out: "out_collection"}, {$match: {x: true}}],
@@ -55,14 +55,14 @@
},
];
- // Pipeline will fail because $out is not last in the subpipeline.
- // Validation for $out in a $lookup's subpipeline occurs at a later point.
- assertErrorCode(coll, pipeline, ERROR_CODE_OUT_LAST_STAGE_ONLY);
+// Pipeline will fail because $out is not last in the subpipeline.
+// Validation for $out in a $lookup's subpipeline occurs at a later point.
+assertErrorCode(coll, pipeline, ERROR_CODE_OUT_LAST_STAGE_ONLY);
- // Create view which contains $out within $lookup.
- assertDropCollection(coll.getDB(), "view1");
+// Create view which contains $out within $lookup.
+assertDropCollection(coll.getDB(), "view1");
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$out: "out_collection"}],
@@ -72,9 +72,9 @@
},
];
- // Pipeline will fail because $out is not allowed to exist within a $lookup.
- // Validation for $out in a view occurs at a later point.
- const cmdRes =
- coll.getDB().runCommand({create: "view1", viewOn: coll.getName(), pipeline: pipeline});
- assert.commandFailedWithCode(cmdRes, ERROR_CODE_OUT_BANNED_IN_LOOKUP);
+// Pipeline will fail because $out is not allowed to exist within a $lookup.
+// Validation for $out in a view occurs at a later point.
+const cmdRes =
+ coll.getDB().runCommand({create: "view1", viewOn: coll.getName(), pipeline: pipeline});
+assert.commandFailedWithCode(cmdRes, ERROR_CODE_OUT_BANNED_IN_LOOKUP);
}());
diff --git a/jstests/aggregation/sources/out/replace_collection.js b/jstests/aggregation/sources/out/replace_collection.js
index 63204485a7c..b614e5bc486 100644
--- a/jstests/aggregation/sources/out/replace_collection.js
+++ b/jstests/aggregation/sources/out/replace_collection.js
@@ -6,69 +6,69 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const coll = db.source;
- coll.drop();
+const coll = db.source;
+coll.drop();
- const targetColl = db.target;
- targetColl.drop();
+const targetColl = db.target;
+targetColl.drop();
- const pipeline = [{$out: targetColl.getName()}];
+const pipeline = [{$out: targetColl.getName()}];
- //
- // Test $out with a non-existent output collection.
- //
- assert.commandWorked(coll.insert({_id: 0}));
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
+//
+// Test $out with a non-existent output collection.
+//
+assert.commandWorked(coll.insert({_id: 0}));
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
- //
- // Test $out with an existing output collection.
- //
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
+//
+// Test $out with an existing output collection.
+//
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
- //
- // Test that $out will preserve the indexes and options of the output collection.
- //
- targetColl.drop();
- assert.commandWorked(db.createCollection(targetColl.getName(), {validator: {a: {$gt: 0}}}));
- assert.commandWorked(targetColl.createIndex({a: 1}));
+//
+// Test that $out will preserve the indexes and options of the output collection.
+//
+targetColl.drop();
+assert.commandWorked(db.createCollection(targetColl.getName(), {validator: {a: {$gt: 0}}}));
+assert.commandWorked(targetColl.createIndex({a: 1}));
- coll.drop();
- assert.commandWorked(coll.insert({a: 1}));
+coll.drop();
+assert.commandWorked(coll.insert({a: 1}));
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
- assert.eq(2, targetColl.getIndexes().length);
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
+assert.eq(2, targetColl.getIndexes().length);
- const listColl = db.runCommand({listCollections: 1, filter: {name: targetColl.getName()}});
- assert.commandWorked(listColl);
- assert.eq({a: {$gt: 0}}, listColl.cursor.firstBatch[0].options["validator"]);
+const listColl = db.runCommand({listCollections: 1, filter: {name: targetColl.getName()}});
+assert.commandWorked(listColl);
+assert.eq({a: {$gt: 0}}, listColl.cursor.firstBatch[0].options["validator"]);
- //
- // Test that $out fails if it violates a unique index constraint.
- //
- coll.drop();
- assert.commandWorked(coll.insert([{_id: 0, a: 0}, {_id: 1, a: 0}]));
- targetColl.drop();
- assert.commandWorked(targetColl.createIndex({a: 1}, {unique: true}));
+//
+// Test that $out fails if it violates a unique index constraint.
+//
+coll.drop();
+assert.commandWorked(coll.insert([{_id: 0, a: 0}, {_id: 1, a: 0}]));
+targetColl.drop();
+assert.commandWorked(targetColl.createIndex({a: 1}, {unique: true}));
- assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
+assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
- // Rerun a similar test, except populate the target collection with a document that conflics
- // with one out of the pipeline. In this case, there is no unique key violation since the target
- // collection will be dropped before renaming the source collection.
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0, a: 0}));
- targetColl.remove({});
- assert.commandWorked(targetColl.insert({_id: 1, a: 0}));
+// Rerun a similar test, except populate the target collection with a document that conflics
+// with one out of the pipeline. In this case, there is no unique key violation since the target
+// collection will be dropped before renaming the source collection.
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0, a: 0}));
+targetColl.remove({});
+assert.commandWorked(targetColl.insert({_id: 1, a: 0}));
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
- assert.eq(2, targetColl.getIndexes().length);
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
+assert.eq(2, targetColl.getIndexes().length);
}());
diff --git a/jstests/aggregation/sources/out/required_last_position.js b/jstests/aggregation/sources/out/required_last_position.js
index e3a861aaf20..97ba01e7b04 100644
--- a/jstests/aggregation/sources/out/required_last_position.js
+++ b/jstests/aggregation/sources/out/required_last_position.js
@@ -1,17 +1,16 @@
// Tests that $out can only be used as the last stage.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- const coll = db.require_out_last;
- coll.drop();
+const coll = db.require_out_last;
+coll.drop();
- // Test that $out is allowed as the last (and only) stage.
- assert.doesNotThrow(() => coll.aggregate([{$out: "out_collection"}]));
+// Test that $out is allowed as the last (and only) stage.
+assert.doesNotThrow(() => coll.aggregate([{$out: "out_collection"}]));
- // Test that $out is not allowed to have a stage after it.
- assertErrorCode(coll, [{$out: "out_collection"}, {$match: {x: true}}], 40601);
- assertErrorCode(
- coll, [{$project: {x: 0}}, {$out: "out_collection"}, {$match: {x: true}}], 40601);
+// Test that $out is not allowed to have a stage after it.
+assertErrorCode(coll, [{$out: "out_collection"}, {$match: {x: true}}], 40601);
+assertErrorCode(coll, [{$project: {x: 0}}, {$out: "out_collection"}, {$match: {x: true}}], 40601);
}());
diff --git a/jstests/aggregation/sources/project/remove_redundant_projects.js b/jstests/aggregation/sources/project/remove_redundant_projects.js
index e3c7af08573..f1a21264c7e 100644
--- a/jstests/aggregation/sources/project/remove_redundant_projects.js
+++ b/jstests/aggregation/sources/project/remove_redundant_projects.js
@@ -2,150 +2,151 @@
// pipeline that can be covered by a normal query.
// @tags: [do_not_wrap_aggregations_in_facets]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For orderedArrayEq.
- load('jstests/libs/analyze_plan.js'); // For planHasStage().
-
- let coll = db.remove_redundant_projects;
- coll.drop();
-
- assert.writeOK(coll.insert({_id: {a: 1, b: 1}, a: 1, c: {d: 1}, e: ['elem1']}));
-
- let indexSpec = {a: 1, 'c.d': 1, 'e.0': 1};
-
- /**
- * Helper to test that for a given pipeline, the same results are returned whether or not an
- * index is present. Also tests whether a projection is absorbed by the pipeline
- * ('expectProjectToCoalesce') and the corresponding project stage ('removedProjectStage') does
- * not exist in the explain output.
- */
- function assertResultsMatch({pipeline = [],
- expectProjectToCoalesce = false,
- removedProjectStage = null,
- index = indexSpec,
- pipelineOptimizedAway = false} = {}) {
- // Add a match stage to ensure index scans are considered for planning (workaround for
- // SERVER-20066).
- pipeline = [{$match: {a: {$gte: 0}}}].concat(pipeline);
-
- // Once with an index.
- assert.commandWorked(coll.createIndex(index));
- let explain = coll.explain().aggregate(pipeline);
- let resultsWithIndex = coll.aggregate(pipeline).toArray();
-
- // Projection does not get pushed down when sharding filter is used.
- if (!explain.hasOwnProperty("shards")) {
- let result;
-
- if (pipelineOptimizedAway) {
- assert(isQueryPlan(explain));
- result = explain.queryPlanner.winningPlan;
- } else {
- assert(isAggregationPlan(explain));
- result = explain.stages[0].$cursor.queryPlanner.winningPlan;
- }
-
- // Check that $project uses the query system.
- assert.eq(expectProjectToCoalesce,
- planHasStage(db, result, "PROJECTION_DEFAULT") ||
- planHasStage(db, result, "PROJECTION_COVERED") ||
- planHasStage(db, result, "PROJECTION_SIMPLE"));
-
- if (!pipelineOptimizedAway) {
- // Check that $project was removed from pipeline and pushed to the query system.
- explain.stages.forEach(function(stage) {
- if (stage.hasOwnProperty("$project"))
- assert.neq(removedProjectStage, stage["$project"]);
- });
- }
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For orderedArrayEq.
+load('jstests/libs/analyze_plan.js'); // For planHasStage().
+
+let coll = db.remove_redundant_projects;
+coll.drop();
+
+assert.writeOK(coll.insert({_id: {a: 1, b: 1}, a: 1, c: {d: 1}, e: ['elem1']}));
+
+let indexSpec = {a: 1, 'c.d': 1, 'e.0': 1};
+
+/**
+ * Helper to test that for a given pipeline, the same results are returned whether or not an
+ * index is present. Also tests whether a projection is absorbed by the pipeline
+ * ('expectProjectToCoalesce') and the corresponding project stage ('removedProjectStage') does
+ * not exist in the explain output.
+ */
+function assertResultsMatch({
+ pipeline = [],
+ expectProjectToCoalesce = false,
+ removedProjectStage = null,
+ index = indexSpec,
+ pipelineOptimizedAway = false
+} = {}) {
+ // Add a match stage to ensure index scans are considered for planning (workaround for
+ // SERVER-20066).
+ pipeline = [{$match: {a: {$gte: 0}}}].concat(pipeline);
+
+ // Once with an index.
+ assert.commandWorked(coll.createIndex(index));
+ let explain = coll.explain().aggregate(pipeline);
+ let resultsWithIndex = coll.aggregate(pipeline).toArray();
+
+ // Projection does not get pushed down when sharding filter is used.
+ if (!explain.hasOwnProperty("shards")) {
+ let result;
+
+ if (pipelineOptimizedAway) {
+ assert(isQueryPlan(explain));
+ result = explain.queryPlanner.winningPlan;
+ } else {
+ assert(isAggregationPlan(explain));
+ result = explain.stages[0].$cursor.queryPlanner.winningPlan;
}
- // Again without an index.
- assert.commandWorked(coll.dropIndex(index));
- let resultsWithoutIndex = coll.aggregate(pipeline).toArray();
-
- assert(orderedArrayEq(resultsWithIndex, resultsWithoutIndex));
+ // Check that $project uses the query system.
+ assert.eq(expectProjectToCoalesce,
+ planHasStage(db, result, "PROJECTION_DEFAULT") ||
+ planHasStage(db, result, "PROJECTION_COVERED") ||
+ planHasStage(db, result, "PROJECTION_SIMPLE"));
+
+ if (!pipelineOptimizedAway) {
+ // Check that $project was removed from pipeline and pushed to the query system.
+ explain.stages.forEach(function(stage) {
+ if (stage.hasOwnProperty("$project"))
+ assert.neq(removedProjectStage, stage["$project"]);
+ });
+ }
}
- // Test that covered projections correctly use the query system for projection and the $project
- // stage is removed from the pipeline.
- assertResultsMatch({
- pipeline: [{$project: {_id: 0, a: 1}}],
- expectProjectToCoalesce: true,
- removedProjectStage: {_id: 0, a: 1},
- pipelineOptimizedAway: true
- });
- assertResultsMatch({
- pipeline: [{$project: {_id: 0, a: 1}}, {$group: {_id: null, a: {$sum: "$a"}}}],
- expectProjectToCoalesce: true,
- removedProjectStage: {_id: 0, a: 1}
- });
- assertResultsMatch({
- pipeline: [{$sort: {a: -1}}, {$project: {_id: 0, a: 1}}],
- expectProjectToCoalesce: true,
- removedProjectStage: {_id: 0, a: 1},
- pipelineOptimizedAway: true
- });
- assertResultsMatch({
- pipeline: [
- {$sort: {a: 1, 'c.d': 1}},
- {$project: {_id: 0, a: 1}},
- {$group: {_id: "$a", arr: {$push: "$a"}}}
- ],
- expectProjectToCoalesce: true,
- removedProjectStage: {_id: 0, a: 1}
- });
- assertResultsMatch({
- pipeline: [{$project: {_id: 0, c: {d: 1}}}],
- expectProjectToCoalesce: true,
- removedProjectStage: {_id: 0, c: {d: 1}},
- pipelineOptimizedAway: true
- });
-
- // Test that projections with renamed fields are not removed from the pipeline, however an
- // inclusion projection is still pushed to the query system.
- assertResultsMatch({pipeline: [{$project: {_id: 0, f: "$a"}}], expectProjectToCoalesce: true});
- assertResultsMatch(
- {pipeline: [{$project: {_id: 0, a: 1, f: "$a"}}], expectProjectToCoalesce: true});
-
- // Test that uncovered projections include the $project stage in the pipeline.
- assertResultsMatch(
- {pipeline: [{$sort: {a: 1}}, {$project: {_id: 1, b: 1}}], expectProjectToCoalesce: false});
- assertResultsMatch({
- pipeline:
- [{$sort: {a: 1}}, {$group: {_id: "$_id", arr: {$push: "$a"}}}, {$project: {arr: 1}}],
- expectProjectToCoalesce: false
- });
-
- // Test that projections with computed fields are kept in the pipeline.
- assertResultsMatch(
- {pipeline: [{$project: {computedField: {$sum: "$a"}}}], expectProjectToCoalesce: false});
- assertResultsMatch({pipeline: [{$project: {a: ["$a", "$b"]}}], expectProjectToCoalesce: false});
- assertResultsMatch({
- pipeline: [{
- $project:
- {e: {$filter: {input: "$e", as: "item", cond: {"$eq": ["$$item", "elem0"]}}}}
- }],
- expectProjectToCoalesce: false
- });
-
- // Test that only the first projection is removed from the pipeline.
- assertResultsMatch({
- pipeline: [
- {$project: {_id: 0, a: 1}},
- {$group: {_id: "$a", arr: {$push: "$a"}, a: {$sum: "$a"}}},
- {$project: {_id: 0}}
- ],
- expectProjectToCoalesce: true,
- removedProjectStage: {_id: 0, a: 1}
- });
-
- // Test that projections on _id with nested fields are not removed from pipeline. Due to
- // SERVER-7502, the dependency analysis does not generate a covered projection for nested
- // fields in _id and thus we cannot remove the stage.
- indexSpec = {'_id.a': 1, a: 1};
- assertResultsMatch(
- {pipeline: [{$project: {'_id.a': 1}}], expectProjectToCoalesce: false, index: indexSpec});
-
+ // Again without an index.
+ assert.commandWorked(coll.dropIndex(index));
+ let resultsWithoutIndex = coll.aggregate(pipeline).toArray();
+
+ assert(orderedArrayEq(resultsWithIndex, resultsWithoutIndex));
+}
+
+// Test that covered projections correctly use the query system for projection and the $project
+// stage is removed from the pipeline.
+assertResultsMatch({
+ pipeline: [{$project: {_id: 0, a: 1}}],
+ expectProjectToCoalesce: true,
+ removedProjectStage: {_id: 0, a: 1},
+ pipelineOptimizedAway: true
+});
+assertResultsMatch({
+ pipeline: [{$project: {_id: 0, a: 1}}, {$group: {_id: null, a: {$sum: "$a"}}}],
+ expectProjectToCoalesce: true,
+ removedProjectStage: {_id: 0, a: 1}
+});
+assertResultsMatch({
+ pipeline: [{$sort: {a: -1}}, {$project: {_id: 0, a: 1}}],
+ expectProjectToCoalesce: true,
+ removedProjectStage: {_id: 0, a: 1},
+ pipelineOptimizedAway: true
+});
+assertResultsMatch({
+ pipeline: [
+ {$sort: {a: 1, 'c.d': 1}},
+ {$project: {_id: 0, a: 1}},
+ {$group: {_id: "$a", arr: {$push: "$a"}}}
+ ],
+ expectProjectToCoalesce: true,
+ removedProjectStage: {_id: 0, a: 1}
+});
+assertResultsMatch({
+ pipeline: [{$project: {_id: 0, c: {d: 1}}}],
+ expectProjectToCoalesce: true,
+ removedProjectStage: {_id: 0, c: {d: 1}},
+ pipelineOptimizedAway: true
+});
+
+// Test that projections with renamed fields are not removed from the pipeline, however an
+// inclusion projection is still pushed to the query system.
+assertResultsMatch({pipeline: [{$project: {_id: 0, f: "$a"}}], expectProjectToCoalesce: true});
+assertResultsMatch(
+ {pipeline: [{$project: {_id: 0, a: 1, f: "$a"}}], expectProjectToCoalesce: true});
+
+// Test that uncovered projections include the $project stage in the pipeline.
+assertResultsMatch(
+ {pipeline: [{$sort: {a: 1}}, {$project: {_id: 1, b: 1}}], expectProjectToCoalesce: false});
+assertResultsMatch({
+ pipeline: [{$sort: {a: 1}}, {$group: {_id: "$_id", arr: {$push: "$a"}}}, {$project: {arr: 1}}],
+ expectProjectToCoalesce: false
+});
+
+// Test that projections with computed fields are kept in the pipeline.
+assertResultsMatch(
+ {pipeline: [{$project: {computedField: {$sum: "$a"}}}], expectProjectToCoalesce: false});
+assertResultsMatch({pipeline: [{$project: {a: ["$a", "$b"]}}], expectProjectToCoalesce: false});
+assertResultsMatch({
+ pipeline:
+ [{$project: {e: {$filter: {input: "$e", as: "item", cond: {"$eq": ["$$item", "elem0"]}}}}}],
+ expectProjectToCoalesce: false
+});
+
+// Test that only the first projection is removed from the pipeline.
+assertResultsMatch({
+ pipeline: [
+ {$project: {_id: 0, a: 1}},
+ {$group: {_id: "$a", arr: {$push: "$a"}, a: {$sum: "$a"}}},
+ {$project: {_id: 0}}
+ ],
+ expectProjectToCoalesce: true,
+ removedProjectStage: {_id: 0, a: 1}
+});
+
+// Test that projections on _id with nested fields are not removed from pipeline. Due to
+// SERVER-7502, the dependency analysis does not generate a covered projection for nested
+// fields in _id and thus we cannot remove the stage.
+indexSpec = {
+ '_id.a': 1,
+ a: 1
+};
+assertResultsMatch(
+ {pipeline: [{$project: {'_id.a': 1}}], expectProjectToCoalesce: false, index: indexSpec});
}());
diff --git a/jstests/aggregation/sources/redact/collation_redact.js b/jstests/aggregation/sources/redact/collation_redact.js
index 36304e9a7f2..7ff1e1ad4f1 100644
--- a/jstests/aggregation/sources/redact/collation_redact.js
+++ b/jstests/aggregation/sources/redact/collation_redact.js
@@ -3,38 +3,37 @@
// Test that the $redact stage respects the collation.
(function() {
- "use strict";
-
- var caseInsensitive = {collation: {locale: "en_US", strength: 2}};
-
- var coll = db.collation_redact;
- coll.drop();
- assert.writeOK(coll.insert({a: "a"}));
-
- // Test that $redact respects an explicit collation. Since the top-level of the document gets
- // pruned, we end up redacting the entire document and returning no results.
- assert.eq(0,
- coll.aggregate([{$redact: {$cond: [{$eq: ["A", "a"]}, "$$PRUNE", "$$KEEP"]}}],
- caseInsensitive)
- .itcount());
-
- coll.drop();
- assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
- assert.writeOK(coll.insert({a: "a"}));
-
- // Test that $redact respects the inherited collation. Since the top-level of the document gets
- // pruned, we end up redacting the entire document and returning no results.
- assert.eq(
- 0,
- coll.aggregate([{$redact: {$cond: [{$eq: ["A", "a"]}, "$$PRUNE", "$$KEEP"]}}]).itcount());
-
- // Test that a $match which can be optimized to be pushed before the $redact respects the
- // collation.
- assert.eq(1, coll.aggregate([{$redact: "$$KEEP"}, {$match: {a: "A"}}]).itcount());
-
- // Comparison to the internal constants bound to the $$KEEP, $$PRUNE, and $$DESCEND variable
- // should not respect the collation.
- assert.throws(() => coll.aggregate([{$redact: "KEEP"}], caseInsensitive));
- assert.throws(() => coll.aggregate([{$redact: "PRUNE"}], caseInsensitive));
- assert.throws(() => coll.aggregate([{$redact: "REDACT"}], caseInsensitive));
+"use strict";
+
+var caseInsensitive = {collation: {locale: "en_US", strength: 2}};
+
+var coll = db.collation_redact;
+coll.drop();
+assert.writeOK(coll.insert({a: "a"}));
+
+// Test that $redact respects an explicit collation. Since the top-level of the document gets
+// pruned, we end up redacting the entire document and returning no results.
+assert.eq(
+ 0,
+ coll.aggregate([{$redact: {$cond: [{$eq: ["A", "a"]}, "$$PRUNE", "$$KEEP"]}}], caseInsensitive)
+ .itcount());
+
+coll.drop();
+assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive));
+assert.writeOK(coll.insert({a: "a"}));
+
+// Test that $redact respects the inherited collation. Since the top-level of the document gets
+// pruned, we end up redacting the entire document and returning no results.
+assert.eq(0,
+ coll.aggregate([{$redact: {$cond: [{$eq: ["A", "a"]}, "$$PRUNE", "$$KEEP"]}}]).itcount());
+
+// Test that a $match which can be optimized to be pushed before the $redact respects the
+// collation.
+assert.eq(1, coll.aggregate([{$redact: "$$KEEP"}, {$match: {a: "A"}}]).itcount());
+
+// Comparison to the internal constants bound to the $$KEEP, $$PRUNE, and $$DESCEND variable
+// should not respect the collation.
+assert.throws(() => coll.aggregate([{$redact: "KEEP"}], caseInsensitive));
+assert.throws(() => coll.aggregate([{$redact: "PRUNE"}], caseInsensitive));
+assert.throws(() => coll.aggregate([{$redact: "REDACT"}], caseInsensitive));
})();
diff --git a/jstests/aggregation/sources/replaceRoot/address.js b/jstests/aggregation/sources/replaceRoot/address.js
index 32ac3df2626..537ec7d50ac 100644
--- a/jstests/aggregation/sources/replaceRoot/address.js
+++ b/jstests/aggregation/sources/replaceRoot/address.js
@@ -7,99 +7,98 @@
*/
(function() {
- "use strict";
+"use strict";
- // For arrayEq.
- load("jstests/aggregation/extras/utils.js");
+// For arrayEq.
+load("jstests/aggregation/extras/utils.js");
- const dbName = "test";
- const collName = jsTest.name();
+const dbName = "test";
+const collName = jsTest.name();
- Random.setRandomSeed();
+Random.setRandomSeed();
- /**
- * Helper to get a random entry out of an array.
- */
- function randomChoice(array) {
- return array[Random.randInt(array.length)];
- }
-
- /**
- * Helper to generate a randomized document with the following schema:
- * {
- * name: <string>,
- * address: {number: <3-digit int>, street: <string>, city: <string>, zip: <5-digit int>}
- * }
- */
- function generateRandomDocument() {
- let names = ["Asya", "Charlie", "Dan", "Geert", "Kyle"];
- const minNumber = 1;
- const maxNumber = 999;
- let streets = ["3rd", "4th", "5th", "6th", "7th", "8th", "9th"];
- let cities = ["New York", "Palo Alto", "Sydney", "Dublin"];
- const minZip = 10000;
- const maxZip = 99999;
+/**
+ * Helper to get a random entry out of an array.
+ */
+function randomChoice(array) {
+ return array[Random.randInt(array.length)];
+}
- return {
- names: randomChoice(names),
- address: {
- number: Random.randInt(maxNumber - minNumber + 1) + minNumber,
- street: randomChoice(streets),
- city: randomChoice(cities),
- zip: Random.randInt(maxZip - minZip + 1) + minZip,
- },
- };
- }
+/**
+ * Helper to generate a randomized document with the following schema:
+ * {
+ * name: <string>,
+ * address: {number: <3-digit int>, street: <string>, city: <string>, zip: <5-digit int>}
+ * }
+ */
+function generateRandomDocument() {
+ let names = ["Asya", "Charlie", "Dan", "Geert", "Kyle"];
+ const minNumber = 1;
+ const maxNumber = 999;
+ let streets = ["3rd", "4th", "5th", "6th", "7th", "8th", "9th"];
+ let cities = ["New York", "Palo Alto", "Sydney", "Dublin"];
+ const minZip = 10000;
+ const maxZip = 99999;
- function doExecutionTest(conn) {
- const coll = conn.getDB(dbName).getCollection(collName);
- coll.drop();
+ return {
+ names: randomChoice(names),
+ address: {
+ number: Random.randInt(maxNumber - minNumber + 1) + minNumber,
+ street: randomChoice(streets),
+ city: randomChoice(cities),
+ zip: Random.randInt(maxZip - minZip + 1) + minZip,
+ },
+ };
+}
- // Insert a bunch of documents of the form above.
- const nDocs = 10;
- let bulk = coll.initializeUnorderedBulkOp();
- for (let i = 0; i < nDocs; i++) {
- bulk.insert(generateRandomDocument());
- }
- assert.writeOK(bulk.execute());
+function doExecutionTest(conn) {
+ const coll = conn.getDB(dbName).getCollection(collName);
+ coll.drop();
- // Extract the contents of the address field, and make sure that doing the same
- // with replaceRoot yields the correct answer.
- // First compute each separately, since we know all of the fields in the address,
- // to make sure we have the correct results.
- let addressPipe = [{
- $project: {
- "_id": 0,
- "number": "$address.number",
- "street": "$address.street",
- "city": "$address.city",
- "zip": "$address.zip"
- }
- }];
- let correctAddresses = coll.aggregate(addressPipe).toArray();
+ // Insert a bunch of documents of the form above.
+ const nDocs = 10;
+ let bulk = coll.initializeUnorderedBulkOp();
+ for (let i = 0; i < nDocs; i++) {
+ bulk.insert(generateRandomDocument());
+ }
+ assert.writeOK(bulk.execute());
- // Then compute the same results using $replaceRoot.
- let replaceWithResult = coll.aggregate([
- {$replaceRoot: {newRoot: "$address"}},
- {$sort: {city: 1, zip: 1, street: 1, number: 1}}
- ])
- .toArray();
+ // Extract the contents of the address field, and make sure that doing the same
+ // with replaceRoot yields the correct answer.
+ // First compute each separately, since we know all of the fields in the address,
+ // to make sure we have the correct results.
+ let addressPipe = [{
+ $project: {
+ "_id": 0,
+ "number": "$address.number",
+ "street": "$address.street",
+ "city": "$address.city",
+ "zip": "$address.zip"
+ }
+ }];
+ let correctAddresses = coll.aggregate(addressPipe).toArray();
- // Then assert they are the same.
- assert(
- arrayEq(replaceWithResult, correctAddresses),
- "$replaceRoot does not work the same as $project-ing the relevant fields to the top level");
- }
+ // Then compute the same results using $replaceRoot.
+ let replaceWithResult = coll.aggregate([
+ {$replaceRoot: {newRoot: "$address"}},
+ {$sort: {city: 1, zip: 1, street: 1, number: 1}}
+ ])
+ .toArray();
- // Test against the standalone started by resmoke.py.
- let conn = db.getMongo();
- doExecutionTest(conn);
- print("Success! Standalone execution test for $replaceRoot passed.");
+ // Then assert they are the same.
+ assert(
+ arrayEq(replaceWithResult, correctAddresses),
+ "$replaceRoot does not work the same as $project-ing the relevant fields to the top level");
+}
- // Test against a sharded cluster.
- let st = new ShardingTest({shards: 2});
- doExecutionTest(st.s0);
- st.stop();
- print("Success! Sharding test for $replaceRoot passed.");
+// Test against the standalone started by resmoke.py.
+let conn = db.getMongo();
+doExecutionTest(conn);
+print("Success! Standalone execution test for $replaceRoot passed.");
+// Test against a sharded cluster.
+let st = new ShardingTest({shards: 2});
+doExecutionTest(st.s0);
+st.stop();
+print("Success! Sharding test for $replaceRoot passed.");
}());
diff --git a/jstests/aggregation/sources/replaceRoot/use_cases.js b/jstests/aggregation/sources/replaceRoot/use_cases.js
index d66129df2e9..cb58ddac5c8 100644
--- a/jstests/aggregation/sources/replaceRoot/use_cases.js
+++ b/jstests/aggregation/sources/replaceRoot/use_cases.js
@@ -1,25 +1,22 @@
// Basic integration tests for $replaceRoot and its alias $replaceWith.
(function() {
- "use strict";
+"use strict";
- const coll = db.replaceWith_use_cases;
- coll.drop();
+const coll = db.replaceWith_use_cases;
+coll.drop();
- assert.commandWorked(coll.insert([
- {_id: 0, comments: [{user_id: "x", comment: "foo"}, {user_id: "y", comment: "bar"}]},
- {_id: 1, comments: [{user_id: "y", comment: "bar again"}]}
- ]));
+assert.commandWorked(coll.insert([
+ {_id: 0, comments: [{user_id: "x", comment: "foo"}, {user_id: "y", comment: "bar"}]},
+ {_id: 1, comments: [{user_id: "y", comment: "bar again"}]}
+]));
- // Test computing the most frequent commenters using $replaceRoot.
- let pipeline = [
- {$unwind: "$comments"},
- {$replaceRoot: {newRoot: "$comments"}},
- {$sortByCount: "$user_id"}
- ];
- const expectedResults = [{_id: "y", count: 2}, {_id: "x", count: 1}];
- assert.eq(coll.aggregate(pipeline).toArray(), expectedResults);
+// Test computing the most frequent commenters using $replaceRoot.
+let pipeline =
+ [{$unwind: "$comments"}, {$replaceRoot: {newRoot: "$comments"}}, {$sortByCount: "$user_id"}];
+const expectedResults = [{_id: "y", count: 2}, {_id: "x", count: 1}];
+assert.eq(coll.aggregate(pipeline).toArray(), expectedResults);
- // Test the same thing but using the $replaceWith alias.
- pipeline = [{$unwind: "$comments"}, {$replaceWith: "$comments"}, {$sortByCount: "$user_id"}];
- assert.eq(coll.aggregate(pipeline).toArray(), expectedResults);
+// Test the same thing but using the $replaceWith alias.
+pipeline = [{$unwind: "$comments"}, {$replaceWith: "$comments"}, {$sortByCount: "$user_id"}];
+assert.eq(coll.aggregate(pipeline).toArray(), expectedResults);
}());
diff --git a/jstests/aggregation/sources/sort/collation_sort.js b/jstests/aggregation/sources/sort/collation_sort.js
index 8febbafb857..6d8b20f9ab2 100644
--- a/jstests/aggregation/sources/sort/collation_sort.js
+++ b/jstests/aggregation/sources/sort/collation_sort.js
@@ -1,95 +1,95 @@
// Test that the $sort stage respects the collation.
(function() {
- "use strict";
+"use strict";
- // In French, words are sometimes ordered on the secondary level (a.k.a. at the level of
- // diacritical marks) by the *last* accent difference rather than the first. This is specified
- // by the {backwards: true} option.
- //
- // For example, côte < coté, since the last accent difference is "e" < "é". Without the reverse
- // accent weighting turned on, these two words would sort in the opposite order, since "ô" >
- // "o".
- var frenchAccentOrdering = {collation: {locale: "fr", backwards: true}};
+// In French, words are sometimes ordered on the secondary level (a.k.a. at the level of
+// diacritical marks) by the *last* accent difference rather than the first. This is specified
+// by the {backwards: true} option.
+//
+// For example, côte < coté, since the last accent difference is "e" < "é". Without the reverse
+// accent weighting turned on, these two words would sort in the opposite order, since "ô" >
+// "o".
+var frenchAccentOrdering = {collation: {locale: "fr", backwards: true}};
- var coll = db.collation_sort;
- coll.drop();
- assert.writeOK(coll.insert({_id: 1, word1: "pêche", word2: "côté"}));
- assert.writeOK(coll.insert({_id: 2, word1: "pêche", word2: "coté"}));
- assert.writeOK(coll.insert({_id: 3, word1: "pêche", word2: "côte"}));
- assert.writeOK(coll.insert({_id: 4, word1: "pèché", word2: "côté"}));
- assert.writeOK(coll.insert({_id: 5, word1: "pèché", word2: "coté"}));
- assert.writeOK(coll.insert({_id: 6, word1: "pèché", word2: "côte"}));
- assert.writeOK(coll.insert({_id: 7, word1: "pêché", word2: "côté"}));
- assert.writeOK(coll.insert({_id: 8, word1: "pêché", word2: "coté"}));
- assert.writeOK(coll.insert({_id: 9, word1: "pêché", word2: "côte"}));
+var coll = db.collation_sort;
+coll.drop();
+assert.writeOK(coll.insert({_id: 1, word1: "pêche", word2: "côté"}));
+assert.writeOK(coll.insert({_id: 2, word1: "pêche", word2: "coté"}));
+assert.writeOK(coll.insert({_id: 3, word1: "pêche", word2: "côte"}));
+assert.writeOK(coll.insert({_id: 4, word1: "pèché", word2: "côté"}));
+assert.writeOK(coll.insert({_id: 5, word1: "pèché", word2: "coté"}));
+assert.writeOK(coll.insert({_id: 6, word1: "pèché", word2: "côte"}));
+assert.writeOK(coll.insert({_id: 7, word1: "pêché", word2: "côté"}));
+assert.writeOK(coll.insert({_id: 8, word1: "pêché", word2: "coté"}));
+assert.writeOK(coll.insert({_id: 9, word1: "pêché", word2: "côte"}));
- // Test that ascending sort respects the collation.
- assert.eq([{_id: "pèché"}, {_id: "pêche"}, {_id: "pêché"}],
- coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: 1}}]).toArray());
- assert.eq([{_id: "pêche"}, {_id: "pèché"}, {_id: "pêché"}],
- coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: 1}}], frenchAccentOrdering)
- .toArray());
+// Test that ascending sort respects the collation.
+assert.eq([{_id: "pèché"}, {_id: "pêche"}, {_id: "pêché"}],
+ coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: 1}}]).toArray());
+assert.eq(
+ [{_id: "pêche"}, {_id: "pèché"}, {_id: "pêché"}],
+ coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: 1}}], frenchAccentOrdering).toArray());
- // Test that descending sort respects the collation.
- assert.eq([{_id: "pêché"}, {_id: "pêche"}, {_id: "pèché"}],
- coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: -1}}]).toArray());
- assert.eq([{_id: "pêché"}, {_id: "pèché"}, {_id: "pêche"}],
- coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: -1}}], frenchAccentOrdering)
- .toArray());
+// Test that descending sort respects the collation.
+assert.eq([{_id: "pêché"}, {_id: "pêche"}, {_id: "pèché"}],
+ coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: -1}}]).toArray());
+assert.eq([{_id: "pêché"}, {_id: "pèché"}, {_id: "pêche"}],
+ coll.aggregate([{$group: {_id: "$word1"}}, {$sort: {_id: -1}}], frenchAccentOrdering)
+ .toArray());
- // Test that compound, mixed ascending/descending sort respects the collation.
- assert.eq([4, 6, 5, 1, 3, 2, 7, 9, 8],
- coll.aggregate([
+// Test that compound, mixed ascending/descending sort respects the collation.
+assert.eq([4, 6, 5, 1, 3, 2, 7, 9, 8],
+ coll.aggregate([
+ {$sort: {word1: 1, word2: -1}},
+ {$project: {_id: 1}},
+ {$group: {_id: null, out: {$push: "$_id"}}}
+ ])
+ .toArray()[0]
+ .out);
+assert.eq([1, 2, 3, 4, 5, 6, 7, 8, 9],
+ coll.aggregate(
+ [
{$sort: {word1: 1, word2: -1}},
{$project: {_id: 1}},
{$group: {_id: null, out: {$push: "$_id"}}}
- ])
- .toArray()[0]
- .out);
- assert.eq([1, 2, 3, 4, 5, 6, 7, 8, 9],
- coll.aggregate(
- [
- {$sort: {word1: 1, word2: -1}},
- {$project: {_id: 1}},
- {$group: {_id: null, out: {$push: "$_id"}}}
- ],
- frenchAccentOrdering)
- .toArray()[0]
- .out);
+ ],
+ frenchAccentOrdering)
+ .toArray()[0]
+ .out);
- // Test that compound, mixed descending/ascending sort respects the collation.
- assert.eq([8, 9, 7, 2, 3, 1, 5, 6, 4],
- coll.aggregate([
+// Test that compound, mixed descending/ascending sort respects the collation.
+assert.eq([8, 9, 7, 2, 3, 1, 5, 6, 4],
+ coll.aggregate([
+ {$sort: {word1: -1, word2: 1}},
+ {$project: {_id: 1}},
+ {$group: {_id: null, out: {$push: "$_id"}}}
+ ])
+ .toArray()[0]
+ .out);
+assert.eq([9, 8, 7, 6, 5, 4, 3, 2, 1],
+ coll.aggregate(
+ [
{$sort: {word1: -1, word2: 1}},
{$project: {_id: 1}},
{$group: {_id: null, out: {$push: "$_id"}}}
- ])
- .toArray()[0]
- .out);
- assert.eq([9, 8, 7, 6, 5, 4, 3, 2, 1],
- coll.aggregate(
- [
- {$sort: {word1: -1, word2: 1}},
- {$project: {_id: 1}},
- {$group: {_id: null, out: {$push: "$_id"}}}
- ],
- frenchAccentOrdering)
- .toArray()[0]
- .out);
+ ],
+ frenchAccentOrdering)
+ .toArray()[0]
+ .out);
- // Test that sort inside a $facet respects the collation.
- const results = coll.aggregate([{
- $facet: {
- fct: [
- {$sort: {word1: -1, word2: 1}},
- {$project: {_id: 1}},
- {$group: {_id: null, out: {$push: "$_id"}}}
- ]
- }
- }],
- frenchAccentOrdering)
- .toArray();
- assert.eq(1, results.length);
- assert.eq(1, results[0].fct.length);
- assert.eq([9, 8, 7, 6, 5, 4, 3, 2, 1], results[0].fct[0].out);
+// Test that sort inside a $facet respects the collation.
+const results = coll.aggregate([{
+ $facet: {
+ fct: [
+ {$sort: {word1: -1, word2: 1}},
+ {$project: {_id: 1}},
+ {$group: {_id: null, out: {$push: "$_id"}}}
+ ]
+ }
+ }],
+ frenchAccentOrdering)
+ .toArray();
+assert.eq(1, results.length);
+assert.eq(1, results[0].fct.length);
+assert.eq([9, 8, 7, 6, 5, 4, 3, 2, 1], results[0].fct[0].out);
})();
diff --git a/jstests/aggregation/sources/sort/collation_sort_japanese.js b/jstests/aggregation/sources/sort/collation_sort_japanese.js
index 5bfad05af31..9051ed45aa7 100644
--- a/jstests/aggregation/sources/sort/collation_sort_japanese.js
+++ b/jstests/aggregation/sources/sort/collation_sort_japanese.js
@@ -4,143 +4,148 @@
* aggregation_sharded_collections_passthrough.)
*/
(function() {
- "use strict";
-
- Random.setRandomSeed();
- const coll = db.getCollection("collation_sort_japanese");
-
- // In Japanese, the order of vowels is a, i, u, e, o. The sorting of mixed katakana and hiragana
- // vowels differs depending on the collation:
- //
- // - With the simple collation, hiragana vowels come first (in order), followed by katakana.
- // - In the Japanese locale, vowels with the same sound sort together. Whether hiragana or
- // katakana comes first depends on the strength level of the collation.
- const data = [
- {kana: "ア", val: 0, name: "katakana a"},
- {kana: "イ", val: 1, name: "katakana i"},
- {kana: "ウ", val: 2, name: "katakana u"},
- {kana: "エ", val: 3, name: "katakana e"},
- {kana: "オ", val: 4, name: "katakana o"},
- {kana: "あ", val: 5, name: "hiragana a"},
- {kana: "い", val: 6, name: "hiragana i"},
- {kana: "う", val: 7, name: "hiragana u"},
- {kana: "え", val: 8, name: "hiragana e"},
- {kana: "お", val: 9, name: "hiragana o"},
- ];
-
- const simpleCollation = {locale: "simple"};
- const jaCollationStr3 = {locale: "ja"};
- const jaCollationStr4 = {locale: "ja", strength: 4};
-
- /**
- * Inserts each doc of 'docs' into the collection in no specified order before running tests.
- */
- function runTests(docs) {
- let bulk = coll.initializeUnorderedBulkOp();
- for (let doc of docs) {
- bulk.insert(doc);
- }
- assert.writeOK(bulk.execute());
-
- let sortOrder;
-
- function assertAggregationSortOrder(collation, expectedVals) {
- let expectedDocs = expectedVals.map(val => ({val: val}));
- let result = coll.aggregate([{$sort: sortOrder}, {$project: {_id: 0, val: 1}}],
- {collation: collation})
- .toArray();
- assert.eq(result,
- expectedDocs,
- "sort returned wrong order with sort pattern " + tojson(sortOrder) +
- " and collation " + tojson(collation));
-
- // Run the same aggregation, but in a sharded cluster, force the merging to be performed
- // on a shard instead of on mongos.
- result = coll.aggregate(
- [
- {$_internalSplitPipeline: {mergeType: "anyShard"}},
- {$sort: sortOrder},
- {$project: {_id: 0, val: 1}}
- ],
- {collation: collation})
- .toArray();
- assert.eq(result,
- expectedDocs,
- "sort returned wrong order with sort pattern " + tojson(sortOrder) +
- " and collation " + tojson(collation) + " when merging on a shard");
- }
-
- // Start with a sort on a single key.
- sortOrder = {kana: 1};
-
- // With the binary collation, hiragana codepoints sort before katakana codepoints.
- assertAggregationSortOrder(simpleCollation, [5, 6, 7, 8, 9, 0, 1, 2, 3, 4]);
-
- // With the Japanese collation at strength 4, a hiragana codepoint always sorts before its
- // equivalent katakana.
- assertAggregationSortOrder(jaCollationStr4, [5, 0, 6, 1, 7, 2, 8, 3, 9, 4]);
+"use strict";
+
+Random.setRandomSeed();
+const coll = db.getCollection("collation_sort_japanese");
+
+// In Japanese, the order of vowels is a, i, u, e, o. The sorting of mixed katakana and hiragana
+// vowels differs depending on the collation:
+//
+// - With the simple collation, hiragana vowels come first (in order), followed by katakana.
+// - In the Japanese locale, vowels with the same sound sort together. Whether hiragana or
+// katakana comes first depends on the strength level of the collation.
+const data = [
+ {kana: "ア", val: 0, name: "katakana a"},
+ {kana: "イ", val: 1, name: "katakana i"},
+ {kana: "ウ", val: 2, name: "katakana u"},
+ {kana: "エ", val: 3, name: "katakana e"},
+ {kana: "オ", val: 4, name: "katakana o"},
+ {kana: "あ", val: 5, name: "hiragana a"},
+ {kana: "い", val: 6, name: "hiragana i"},
+ {kana: "う", val: 7, name: "hiragana u"},
+ {kana: "え", val: 8, name: "hiragana e"},
+ {kana: "お", val: 9, name: "hiragana o"},
+];
+
+const simpleCollation = {
+ locale: "simple"
+};
+const jaCollationStr3 = {
+ locale: "ja"
+};
+const jaCollationStr4 = {
+ locale: "ja",
+ strength: 4
+};
- // Test a sort on a compound key.
- sortOrder = {kana: 1, val: 1};
-
- // With the binary collation, hiragana codepoints sort before katakana codepoints.
- assertAggregationSortOrder(simpleCollation, [5, 6, 7, 8, 9, 0, 1, 2, 3, 4]);
+/**
+ * Inserts each doc of 'docs' into the collection in no specified order before running tests.
+ */
+function runTests(docs) {
+ let bulk = coll.initializeUnorderedBulkOp();
+ for (let doc of docs) {
+ bulk.insert(doc);
+ }
+ assert.writeOK(bulk.execute());
- // With the default Japanese collation, hiragana and katakana with the same pronunciation
- // sort together but with no specified order. The compound sort on "val" breaks the tie and
- // puts the katakana first.
- assertAggregationSortOrder(jaCollationStr3, [0, 5, 1, 6, 2, 7, 3, 8, 4, 9]);
+ let sortOrder;
- // With the Japanese collation at strength 4, a hiragana codepoint always sorts before its
- // equivalent katakana.
- assertAggregationSortOrder(jaCollationStr4, [5, 0, 6, 1, 7, 2, 8, 3, 9, 4]);
+ function assertAggregationSortOrder(collation, expectedVals) {
+ let expectedDocs = expectedVals.map(val => ({val: val}));
+ let result = coll.aggregate([{$sort: sortOrder}, {$project: {_id: 0, val: 1}}],
+ {collation: collation})
+ .toArray();
+ assert.eq(result,
+ expectedDocs,
+ "sort returned wrong order with sort pattern " + tojson(sortOrder) +
+ " and collation " + tojson(collation));
+
+ // Run the same aggregation, but in a sharded cluster, force the merging to be performed
+ // on a shard instead of on mongos.
+ result = coll.aggregate(
+ [
+ {$_internalSplitPipeline: {mergeType: "anyShard"}},
+ {$sort: sortOrder},
+ {$project: {_id: 0, val: 1}}
+ ],
+ {collation: collation})
+ .toArray();
+ assert.eq(result,
+ expectedDocs,
+ "sort returned wrong order with sort pattern " + tojson(sortOrder) +
+ " and collation " + tojson(collation) + " when merging on a shard");
}
- // Test sorting documents with only scalar values.
- coll.drop();
- runTests(data);
-
- // Test sorting documents containing singleton arrays.
- assert(coll.drop());
- runTests(data.map(doc => {
- let copy = Object.extend({}, doc);
+ // Start with a sort on a single key.
+ sortOrder = {kana: 1};
+
+ // With the binary collation, hiragana codepoints sort before katakana codepoints.
+ assertAggregationSortOrder(simpleCollation, [5, 6, 7, 8, 9, 0, 1, 2, 3, 4]);
+
+ // With the Japanese collation at strength 4, a hiragana codepoint always sorts before its
+ // equivalent katakana.
+ assertAggregationSortOrder(jaCollationStr4, [5, 0, 6, 1, 7, 2, 8, 3, 9, 4]);
+
+ // Test a sort on a compound key.
+ sortOrder = {kana: 1, val: 1};
+
+ // With the binary collation, hiragana codepoints sort before katakana codepoints.
+ assertAggregationSortOrder(simpleCollation, [5, 6, 7, 8, 9, 0, 1, 2, 3, 4]);
+
+ // With the default Japanese collation, hiragana and katakana with the same pronunciation
+ // sort together but with no specified order. The compound sort on "val" breaks the tie and
+ // puts the katakana first.
+ assertAggregationSortOrder(jaCollationStr3, [0, 5, 1, 6, 2, 7, 3, 8, 4, 9]);
+
+ // With the Japanese collation at strength 4, a hiragana codepoint always sorts before its
+ // equivalent katakana.
+ assertAggregationSortOrder(jaCollationStr4, [5, 0, 6, 1, 7, 2, 8, 3, 9, 4]);
+}
+
+// Test sorting documents with only scalar values.
+coll.drop();
+runTests(data);
+
+// Test sorting documents containing singleton arrays.
+assert(coll.drop());
+runTests(data.map(doc => {
+ let copy = Object.extend({}, doc);
+ copy.kana = [copy.kana];
+ return copy;
+}));
+
+// Test sorting documents containing arrays with multiple elements.
+assert(coll.drop());
+runTests(data.map(doc => {
+ let copy = Object.extend({}, doc);
+ copy.kana = [copy.kana, copy.kana, copy.kana];
+ return copy;
+}));
+
+// Test sorting documents where some values are scalars and others are arrays.
+assert(coll.drop());
+runTests(data.map(doc => {
+ let copy = Object.extend({}, doc);
+ if (Math.random() < 0.5) {
copy.kana = [copy.kana];
- return copy;
- }));
-
- // Test sorting documents containing arrays with multiple elements.
- assert(coll.drop());
- runTests(data.map(doc => {
- let copy = Object.extend({}, doc);
- copy.kana = [copy.kana, copy.kana, copy.kana];
- return copy;
- }));
-
- // Test sorting documents where some values are scalars and others are arrays.
- assert(coll.drop());
- runTests(data.map(doc => {
- let copy = Object.extend({}, doc);
- if (Math.random() < 0.5) {
- copy.kana = [copy.kana];
- }
- return copy;
- }));
-
- // Create indexes that provide sorts and assert that the results are equivalent.
- assert(coll.drop());
- assert.commandWorked(
- coll.createIndex({kana: 1}, {name: "k1_jaStr3", collation: jaCollationStr3}));
- assert.commandWorked(
- coll.createIndex({kana: 1}, {name: "k1_jaStr4", collation: jaCollationStr4}));
- assert.commandWorked(
- coll.createIndex({kana: 1, val: 1}, {name: "k1v1_jaStr3", collation: jaCollationStr3}));
- assert.commandWorked(
- coll.createIndex({kana: 1, val: 1}, {name: "k1v1_jaStr4", collation: jaCollationStr4}));
- runTests(data.map(doc => {
- let copy = Object.extend({}, doc);
- if (Math.random() < 0.5) {
- copy.kana = [copy.kana];
- }
- return copy;
- }));
+ }
+ return copy;
+}));
+
+// Create indexes that provide sorts and assert that the results are equivalent.
+assert(coll.drop());
+assert.commandWorked(coll.createIndex({kana: 1}, {name: "k1_jaStr3", collation: jaCollationStr3}));
+assert.commandWorked(coll.createIndex({kana: 1}, {name: "k1_jaStr4", collation: jaCollationStr4}));
+assert.commandWorked(
+ coll.createIndex({kana: 1, val: 1}, {name: "k1v1_jaStr3", collation: jaCollationStr3}));
+assert.commandWorked(
+ coll.createIndex({kana: 1, val: 1}, {name: "k1v1_jaStr4", collation: jaCollationStr4}));
+runTests(data.map(doc => {
+ let copy = Object.extend({}, doc);
+ if (Math.random() < 0.5) {
+ copy.kana = [copy.kana];
+ }
+ return copy;
+}));
}());
diff --git a/jstests/aggregation/sources/sort/explain_sort.js b/jstests/aggregation/sources/sort/explain_sort.js
index 19d4fb0c7ba..d519ea323c7 100644
--- a/jstests/aggregation/sources/sort/explain_sort.js
+++ b/jstests/aggregation/sources/sort/explain_sort.js
@@ -2,60 +2,60 @@
// designed to reproduce SERVER-33084.
// @tags: [do_not_wrap_aggregations_in_facets]
(function() {
- "use strict";
-
- load("jstests/libs/analyze_plan.js"); // For getAggPlanStages().
-
- const coll = db.explain_sort;
- coll.drop();
-
- const kNumDocs = 10;
-
- // Return whether or not explain() was successful and contained the appropriate fields given the
- // requested verbosity.
- function checkResults(results, verbosity) {
- let cursorSubdocs = getAggPlanStages(results, "$cursor");
- let nReturned = 0;
- let nExamined = 0;
- assert.gt(cursorSubdocs.length, 0);
- for (let stageResult of cursorSubdocs) {
- const result = stageResult.$cursor;
- if (verbosity === "queryPlanner") {
- assert(!result.hasOwnProperty("executionStats"), tojson(results));
- } else {
- nReturned += result.executionStats.nReturned;
- nExamined += result.executionStats.totalDocsExamined;
- }
- }
- if (verbosity != "queryPlanner") {
- assert.eq(nReturned, kNumDocs, tojson(results));
- assert.eq(nExamined, kNumDocs, tojson(results));
+"use strict";
+
+load("jstests/libs/analyze_plan.js"); // For getAggPlanStages().
+
+const coll = db.explain_sort;
+coll.drop();
+
+const kNumDocs = 10;
+
+// Return whether or not explain() was successful and contained the appropriate fields given the
+// requested verbosity.
+function checkResults(results, verbosity) {
+ let cursorSubdocs = getAggPlanStages(results, "$cursor");
+ let nReturned = 0;
+ let nExamined = 0;
+ assert.gt(cursorSubdocs.length, 0);
+ for (let stageResult of cursorSubdocs) {
+ const result = stageResult.$cursor;
+ if (verbosity === "queryPlanner") {
+ assert(!result.hasOwnProperty("executionStats"), tojson(results));
+ } else {
+ nReturned += result.executionStats.nReturned;
+ nExamined += result.executionStats.totalDocsExamined;
}
}
-
- for (let i = 0; i < kNumDocs; i++) {
- assert.writeOK(coll.insert({a: i}));
+ if (verbosity != "queryPlanner") {
+ assert.eq(nReturned, kNumDocs, tojson(results));
+ assert.eq(nExamined, kNumDocs, tojson(results));
}
+}
- // Execute several aggregations with a sort stage combined with various single document
- // transformation stages.
- for (let verbosity of["queryPlanner", "executionStats", "allPlansExecution"]) {
- let pipeline = [{$project: {a: 1}}, {$sort: {a: 1}}];
- checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+for (let i = 0; i < kNumDocs; i++) {
+ assert.writeOK(coll.insert({a: i}));
+}
- pipeline = [{$project: {a: 0}}, {$sort: {a: 1}}];
- checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+// Execute several aggregations with a sort stage combined with various single document
+// transformation stages.
+for (let verbosity of ["queryPlanner", "executionStats", "allPlansExecution"]) {
+ let pipeline = [{$project: {a: 1}}, {$sort: {a: 1}}];
+ checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
- pipeline = [{$addFields: {b: 1}}, {$sort: {a: 1}}];
- checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+ pipeline = [{$project: {a: 0}}, {$sort: {a: 1}}];
+ checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
- pipeline = [{$sort: {a: 1}}, {$project: {_id: 1}}];
- checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+ pipeline = [{$addFields: {b: 1}}, {$sort: {a: 1}}];
+ checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
- pipeline = [{$project: {a: 1}}, {$limit: 5}, {$sort: {a: 1}}];
- checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+ pipeline = [{$sort: {a: 1}}, {$project: {_id: 1}}];
+ checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
- pipeline = [{$project: {_id: 1}}, {$limit: 5}];
- checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
- }
+ pipeline = [{$project: {a: 1}}, {$limit: 5}, {$sort: {a: 1}}];
+ checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+
+ pipeline = [{$project: {_id: 1}}, {$limit: 5}];
+ checkResults(coll.explain(verbosity).aggregate(pipeline), verbosity);
+}
})();
diff --git a/jstests/aggregation/sources/unset/unset.js b/jstests/aggregation/sources/unset/unset.js
index be20a69b362..c11f97598f5 100644
--- a/jstests/aggregation/sources/unset/unset.js
+++ b/jstests/aggregation/sources/unset/unset.js
@@ -1,39 +1,38 @@
// Basic testing for the $unset aggregation stage.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- const coll = db.agg_stage_unset;
- coll.drop();
+const coll = db.agg_stage_unset;
+coll.drop();
- assert.commandWorked(coll.insert(
- [{_id: 0, a: 10}, {_id: 1, a: {b: 20, c: 30, 0: 40}}, {_id: 2, a: [{b: 50, c: 60}]}]));
+assert.commandWorked(coll.insert(
+ [{_id: 0, a: 10}, {_id: 1, a: {b: 20, c: 30, 0: 40}}, {_id: 2, a: [{b: 50, c: 60}]}]));
- // unset single field.
- let result = coll.aggregate([{$unset: ["a"]}]).toArray();
- assertArrayEq({actual: result, expected: [{_id: 0}, {_id: 1}, {_id: 2}]});
+// unset single field.
+let result = coll.aggregate([{$unset: ["a"]}]).toArray();
+assertArrayEq({actual: result, expected: [{_id: 0}, {_id: 1}, {_id: 2}]});
- // unset should work with string directive.
- result = coll.aggregate([{$unset: "a"}]).toArray();
- assertArrayEq({actual: result, expected: [{_id: 0}, {_id: 1}, {_id: 2}]});
+// unset should work with string directive.
+result = coll.aggregate([{$unset: "a"}]).toArray();
+assertArrayEq({actual: result, expected: [{_id: 0}, {_id: 1}, {_id: 2}]});
- // unset multiple fields.
- result = coll.aggregate([{$unset: ["_id", "a"]}]).toArray();
- assertArrayEq({actual: result, expected: [{}, {}, {}]});
+// unset multiple fields.
+result = coll.aggregate([{$unset: ["_id", "a"]}]).toArray();
+assertArrayEq({actual: result, expected: [{}, {}, {}]});
- // unset with dotted field path.
- result = coll.aggregate([{$unset: ["a.b"]}]).toArray();
- assertArrayEq({
- actual: result,
- expected: [{_id: 0, a: 10}, {_id: 1, a: {0: 40, c: 30}}, {_id: 2, a: [{c: 60}]}]
- });
-
- // Numeric field paths in aggregation represent field name only and not array offset.
- result = coll.aggregate([{$unset: ["a.0"]}]).toArray();
- assertArrayEq({
- actual: result,
- expected: [{_id: 0, a: 10}, {_id: 1, a: {b: 20, c: 30}}, {_id: 2, a: [{b: 50, c: 60}]}]
- });
+// unset with dotted field path.
+result = coll.aggregate([{$unset: ["a.b"]}]).toArray();
+assertArrayEq({
+ actual: result,
+ expected: [{_id: 0, a: 10}, {_id: 1, a: {0: 40, c: 30}}, {_id: 2, a: [{c: 60}]}]
+});
+// Numeric field paths in aggregation represent field name only and not array offset.
+result = coll.aggregate([{$unset: ["a.0"]}]).toArray();
+assertArrayEq({
+ actual: result,
+ expected: [{_id: 0, a: 10}, {_id: 1, a: {b: 20, c: 30}}, {_id: 2, a: [{b: 50, c: 60}]}]
+});
})();
diff --git a/jstests/aggregation/stages/skip_with_limit.js b/jstests/aggregation/stages/skip_with_limit.js
index 161ac931e88..d0bad0ed03f 100644
--- a/jstests/aggregation/stages/skip_with_limit.js
+++ b/jstests/aggregation/stages/skip_with_limit.js
@@ -4,47 +4,47 @@
* especially in a sharded cluster - which we intend to stress with this test.
*/
(function() {
- "use strict";
+"use strict";
- const coll = db.skip_with_limit;
- coll.drop();
+const coll = db.skip_with_limit;
+coll.drop();
- // Insert twenty documents: {x: 4, y: 0}, {x: 4, y: 1}, ..., {x: 4, y: 19}.
- const bulk = coll.initializeOrderedBulkOp();
- Array.from({length: 20}, (_, i) => ({x: 4, y: i})).forEach(doc => bulk.insert(doc));
- assert.commandWorked(bulk.execute());
+// Insert twenty documents: {x: 4, y: 0}, {x: 4, y: 1}, ..., {x: 4, y: 19}.
+const bulk = coll.initializeOrderedBulkOp();
+Array.from({length: 20}, (_, i) => ({x: 4, y: i})).forEach(doc => bulk.insert(doc));
+assert.commandWorked(bulk.execute());
- var count = coll.aggregate([{$match: {x: 4}}, {$skip: 10}, {$limit: 5}]).itcount();
- assert.eq(count, 5);
+var count = coll.aggregate([{$match: {x: 4}}, {$skip: 10}, {$limit: 5}]).itcount();
+assert.eq(count, 5);
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 7}, {$skip: 3}, {$limit: 5}]).itcount();
- assert.eq(count, 5);
+count = coll.aggregate([{$match: {x: 4}}, {$skip: 7}, {$skip: 3}, {$limit: 5}]).itcount();
+assert.eq(count, 5);
- count = coll.aggregate([{$match: {x: 4}}, {$limit: 10}, {$skip: 5}]).itcount();
- assert.eq(count, 5);
+count = coll.aggregate([{$match: {x: 4}}, {$limit: 10}, {$skip: 5}]).itcount();
+assert.eq(count, 5);
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 10}, {$addFields: {y: 1}}, {$limit: 5}])
- .itcount();
- assert.eq(count, 5);
+count =
+ coll.aggregate([{$match: {x: 4}}, {$skip: 10}, {$addFields: {y: 1}}, {$limit: 5}]).itcount();
+assert.eq(count, 5);
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 10}, {$group: {_id: '$y'}}, {$limit: 5}])
- .itcount();
- assert.eq(count, 5);
+count =
+ coll.aggregate([{$match: {x: 4}}, {$skip: 10}, {$group: {_id: '$y'}}, {$limit: 5}]).itcount();
+assert.eq(count, 5);
- // For the pipelines with a $skip before the $limit, repeat the tests with larger skip values to
- // ensure that the skip is actually working. The large skips exhaust our 20 documents, so we get
- // fewer results.
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 18}, {$limit: 5}]).itcount();
- assert.eq(count, 2);
+// For the pipelines with a $skip before the $limit, repeat the tests with larger skip values to
+// ensure that the skip is actually working. The large skips exhaust our 20 documents, so we get
+// fewer results.
+count = coll.aggregate([{$match: {x: 4}}, {$skip: 18}, {$limit: 5}]).itcount();
+assert.eq(count, 2);
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 11}, {$skip: 7}, {$limit: 5}]).itcount();
- assert.eq(count, 2);
+count = coll.aggregate([{$match: {x: 4}}, {$skip: 11}, {$skip: 7}, {$limit: 5}]).itcount();
+assert.eq(count, 2);
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 18}, {$addFields: {y: 1}}, {$limit: 5}])
- .itcount();
- assert.eq(count, 2);
+count =
+ coll.aggregate([{$match: {x: 4}}, {$skip: 18}, {$addFields: {y: 1}}, {$limit: 5}]).itcount();
+assert.eq(count, 2);
- count = coll.aggregate([{$match: {x: 4}}, {$skip: 18}, {$group: {_id: '$y'}}, {$limit: 5}])
- .itcount();
- assert.eq(count, 2);
+count =
+ coll.aggregate([{$match: {x: 4}}, {$skip: 18}, {$group: {_id: '$y'}}, {$limit: 5}]).itcount();
+assert.eq(count, 2);
}());
diff --git a/jstests/aggregation/testall.js b/jstests/aggregation/testall.js
index 33dd09a0463..a58a1bb00f2 100644
--- a/jstests/aggregation/testall.js
+++ b/jstests/aggregation/testall.js
@@ -1,938 +1,917 @@
(function() {
- "use strict";
-
- // Loads data into the namespace 'aggdb.articles'.
- load('jstests/aggregation/data/articles.js');
- load('jstests/aggregation/extras/utils.js');
-
- const testDB = db.getSiblingDB("aggdb");
-
- // just passing through fields
- let p1 = testDB.runCommand({
- aggregate: "article",
- pipeline: [{$project: {tags: 1, pageViews: 1}}, {$sort: {_id: 1}}],
- cursor: {}
- });
-
- let p1result = [
- {"_id": 1, "pageViews": 5, "tags": ["fun", "good", "fun"]},
- {"_id": 2, "pageViews": 7, "tags": ["fun", "nasty"]},
- {"_id": 3, "pageViews": 6, "tags": ["nasty", "filthy"]}
- ];
-
- assert.docEq(p1.cursor.firstBatch, p1result, 'p1 failed');
-
- // a simple array unwinding
- let u1 = testDB.runCommand({aggregate: "article", pipeline: [{$unwind: "$tags"}], cursor: {}});
-
- let u1result = [
- {
- "_id": 1,
- "title": "this is my title",
- "author": "bob",
- "posted": ISODate("2004-03-21T18:59:54Z"),
- "pageViews": 5,
- "tags": "fun",
- "comments":
- [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
- "other": {"foo": 5}
- },
- {
- "_id": 1,
- "title": "this is my title",
- "author": "bob",
- "posted": ISODate("2004-03-21T18:59:54Z"),
- "pageViews": 5,
- "tags": "good",
- "comments":
- [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
- "other": {"foo": 5}
- },
- {
- "_id": 1,
- "title": "this is my title",
- "author": "bob",
- "posted": ISODate("2004-03-21T18:59:54Z"),
- "pageViews": 5,
- "tags": "fun",
- "comments":
- [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
- "other": {"foo": 5}
- },
- {
- "_id": 2,
- "title": "this is your title",
- "author": "dave",
- "posted": ISODate("2030-08-08T04:11:10Z"),
- "pageViews": 7,
- "tags": "fun",
- "comments": [
- {"author": "barbara", "text": "this is interesting"},
- {"author": "jenny", "text": "i like to play pinball", "votes": 10}
- ],
- "other": {"bar": 14}
- },
- {
- "_id": 2,
- "title": "this is your title",
- "author": "dave",
- "posted": ISODate("2030-08-08T04:11:10Z"),
- "pageViews": 7,
- "tags": "nasty",
- "comments": [
- {"author": "barbara", "text": "this is interesting"},
- {"author": "jenny", "text": "i like to play pinball", "votes": 10}
- ],
- "other": {"bar": 14}
- },
- {
- "_id": 3,
- "title": "this is some other title",
- "author": "jane",
- "posted": ISODate("2000-12-31T05:17:14Z"),
- "pageViews": 6,
- "tags": "nasty",
- "comments": [
- {"author": "will", "text": "i don't like the color"},
- {"author": "jenny", "text": "can i get that in green?"}
- ],
- "other": {"bar": 14}
- },
- {
- "_id": 3,
- "title": "this is some other title",
- "author": "jane",
- "posted": ISODate("2000-12-31T05:17:14Z"),
- "pageViews": 6,
- "tags": "filthy",
- "comments": [
- {"author": "will", "text": "i don't like the color"},
- {"author": "jenny", "text": "can i get that in green?"}
- ],
- "other": {"bar": 14}
- }
- ];
-
- let firstBatch = u1.cursor.firstBatch;
- assert(arrayEq(firstBatch, u1result), tojson({got: firstBatch, expected: u1result}));
-
- // unwind an array at the end of a dotted path
- testDB.ut.drop();
- assert.writeOK(testDB.ut.insert({_id: 4, a: 1, b: {e: 7, f: [4, 3, 2, 1]}, c: 12, d: 17}));
- let u2 = testDB.runCommand(
- {aggregate: "ut", pipeline: [{$unwind: "$b.f"}, {$sort: {"b.f": -1}}], cursor: {}});
-
- let u2result = [
- {"_id": 4, "a": 1, "b": {"e": 7, "f": 4}, "c": 12, "d": 17},
- {"_id": 4, "a": 1, "b": {"e": 7, "f": 3}, "c": 12, "d": 17},
- {"_id": 4, "a": 1, "b": {"e": 7, "f": 2}, "c": 12, "d": 17},
- {"_id": 4, "a": 1, "b": {"e": 7, "f": 1}, "c": 12, "d": 17}
- ];
-
- assert.docEq(u2.cursor.firstBatch, u2result, 'u2 failed');
-
- // combining a projection with unwinding an array
- let p2 = testDB.runCommand({
- aggregate: "article",
- pipeline: [{$project: {author: 1, tags: 1, pageViews: 1}}, {$unwind: "$tags"}],
- cursor: {}
- });
-
- let p2result = [
- {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"},
- {"_id": 1, "author": "bob", "pageViews": 5, "tags": "good"},
- {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"},
- {"_id": 2, "author": "dave", "pageViews": 7, "tags": "fun"},
- {"_id": 2, "author": "dave", "pageViews": 7, "tags": "nasty"},
- {"_id": 3, "author": "jane", "pageViews": 6, "tags": "nasty"},
- {"_id": 3, "author": "jane", "pageViews": 6, "tags": "filthy"}
- ];
-
- firstBatch = p2.cursor.firstBatch;
- assert(arrayEq(firstBatch, p2result), tojson({got: firstBatch, expected: p2result}));
-
- // pulling values out of subdocuments
- let p3 = testDB.runCommand({
- aggregate: "article",
- pipeline: [{$project: {otherfoo: "$other.foo", otherbar: "$other.bar"}}, {$sort: {_id: 1}}],
- cursor: {}
- });
-
- let p3result =
- [{"_id": 1, "otherfoo": 5}, {"_id": 2, "otherbar": 14}, {"_id": 3, "otherbar": 14}];
-
- assert.docEq(p3.cursor.firstBatch, p3result, 'p3 failed');
-
- // projection includes a computed value
- let p4 = testDB.runCommand({
- aggregate: "article",
- pipeline:
- [{$project: {author: 1, daveWroteIt: {$eq: ["$author", "dave"]}}}, {$sort: {_id: 1}}],
- cursor: {}
- });
-
- let p4result = [
- {"_id": 1, "author": "bob", "daveWroteIt": false},
- {"_id": 2, "author": "dave", "daveWroteIt": true},
- {"_id": 3, "author": "jane", "daveWroteIt": false}
- ];
-
- assert.docEq(p4.cursor.firstBatch, p4result, 'p4 failed');
-
- // projection includes a virtual (fabricated) document
- let p5 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {author: 1, pageViews: 1, tags: 1}},
- {$unwind: "$tags"},
- {$project: {author: 1, subDocument: {foo: "$pageViews", bar: "$tags"}}}
+"use strict";
+
+// Loads data into the namespace 'aggdb.articles'.
+load('jstests/aggregation/data/articles.js');
+load('jstests/aggregation/extras/utils.js');
+
+const testDB = db.getSiblingDB("aggdb");
+
+// just passing through fields
+let p1 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [{$project: {tags: 1, pageViews: 1}}, {$sort: {_id: 1}}],
+ cursor: {}
+});
+
+let p1result = [
+ {"_id": 1, "pageViews": 5, "tags": ["fun", "good", "fun"]},
+ {"_id": 2, "pageViews": 7, "tags": ["fun", "nasty"]},
+ {"_id": 3, "pageViews": 6, "tags": ["nasty", "filthy"]}
+];
+
+assert.docEq(p1.cursor.firstBatch, p1result, 'p1 failed');
+
+// a simple array unwinding
+let u1 = testDB.runCommand({aggregate: "article", pipeline: [{$unwind: "$tags"}], cursor: {}});
+
+let u1result = [
+ {
+ "_id": 1,
+ "title": "this is my title",
+ "author": "bob",
+ "posted": ISODate("2004-03-21T18:59:54Z"),
+ "pageViews": 5,
+ "tags": "fun",
+ "comments":
+ [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
+ "other": {"foo": 5}
+ },
+ {
+ "_id": 1,
+ "title": "this is my title",
+ "author": "bob",
+ "posted": ISODate("2004-03-21T18:59:54Z"),
+ "pageViews": 5,
+ "tags": "good",
+ "comments":
+ [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
+ "other": {"foo": 5}
+ },
+ {
+ "_id": 1,
+ "title": "this is my title",
+ "author": "bob",
+ "posted": ISODate("2004-03-21T18:59:54Z"),
+ "pageViews": 5,
+ "tags": "fun",
+ "comments":
+ [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
+ "other": {"foo": 5}
+ },
+ {
+ "_id": 2,
+ "title": "this is your title",
+ "author": "dave",
+ "posted": ISODate("2030-08-08T04:11:10Z"),
+ "pageViews": 7,
+ "tags": "fun",
+ "comments": [
+ {"author": "barbara", "text": "this is interesting"},
+ {"author": "jenny", "text": "i like to play pinball", "votes": 10}
],
- cursor: {}
- });
-
- let p5result = [
- {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}},
- {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "good"}},
- {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}},
- {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "fun"}},
- {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "nasty"}},
- {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "nasty"}},
- {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "filthy"}}
- ];
-
- firstBatch = p5.cursor.firstBatch;
- assert(arrayEq(firstBatch, p5result), tojson({got: firstBatch, expected: p5result}));
-
- // multi-step aggregate
- // nested expressions in computed fields
- let p6 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {author: 1, tags: 1, pageViews: 1}},
- {$unwind: "$tags"},
- {
- $project: {
- author: 1,
- tag: "$tags",
- pageViews: 1,
- daveWroteIt: {$eq: ["$author", "dave"]},
- weLikeIt: {$or: [{$eq: ["$author", "dave"]}, {$eq: ["$tags", "good"]}]}
- }
- }
+ "other": {"bar": 14}
+ },
+ {
+ "_id": 2,
+ "title": "this is your title",
+ "author": "dave",
+ "posted": ISODate("2030-08-08T04:11:10Z"),
+ "pageViews": 7,
+ "tags": "nasty",
+ "comments": [
+ {"author": "barbara", "text": "this is interesting"},
+ {"author": "jenny", "text": "i like to play pinball", "votes": 10}
],
- cursor: {}
- });
-
- let p6result = [
- {
- "_id": 1,
- "author": "bob",
- "pageViews": 5,
- "tag": "fun",
- "daveWroteIt": false,
- "weLikeIt": false
- },
- {
- "_id": 1,
- "author": "bob",
- "pageViews": 5,
- "tag": "good",
- "daveWroteIt": false,
- "weLikeIt": true
- },
- {
- "_id": 1,
- "author": "bob",
- "pageViews": 5,
- "tag": "fun",
- "daveWroteIt": false,
- "weLikeIt": false
- },
- {
- "_id": 2,
- "author": "dave",
- "pageViews": 7,
- "tag": "fun",
- "daveWroteIt": true,
- "weLikeIt": true
- },
- {
- "_id": 2,
- "author": "dave",
- "pageViews": 7,
- "tag": "nasty",
- "daveWroteIt": true,
- "weLikeIt": true
- },
- {
- "_id": 3,
- "author": "jane",
- "pageViews": 6,
- "tag": "nasty",
- "daveWroteIt": false,
- "weLikeIt": false
- },
+ "other": {"bar": 14}
+ },
+ {
+ "_id": 3,
+ "title": "this is some other title",
+ "author": "jane",
+ "posted": ISODate("2000-12-31T05:17:14Z"),
+ "pageViews": 6,
+ "tags": "nasty",
+ "comments": [
+ {"author": "will", "text": "i don't like the color"},
+ {"author": "jenny", "text": "can i get that in green?"}
+ ],
+ "other": {"bar": 14}
+ },
+ {
+ "_id": 3,
+ "title": "this is some other title",
+ "author": "jane",
+ "posted": ISODate("2000-12-31T05:17:14Z"),
+ "pageViews": 6,
+ "tags": "filthy",
+ "comments": [
+ {"author": "will", "text": "i don't like the color"},
+ {"author": "jenny", "text": "can i get that in green?"}
+ ],
+ "other": {"bar": 14}
+ }
+];
+
+let firstBatch = u1.cursor.firstBatch;
+assert(arrayEq(firstBatch, u1result), tojson({got: firstBatch, expected: u1result}));
+
+// unwind an array at the end of a dotted path
+testDB.ut.drop();
+assert.writeOK(testDB.ut.insert({_id: 4, a: 1, b: {e: 7, f: [4, 3, 2, 1]}, c: 12, d: 17}));
+let u2 = testDB.runCommand(
+ {aggregate: "ut", pipeline: [{$unwind: "$b.f"}, {$sort: {"b.f": -1}}], cursor: {}});
+
+let u2result = [
+ {"_id": 4, "a": 1, "b": {"e": 7, "f": 4}, "c": 12, "d": 17},
+ {"_id": 4, "a": 1, "b": {"e": 7, "f": 3}, "c": 12, "d": 17},
+ {"_id": 4, "a": 1, "b": {"e": 7, "f": 2}, "c": 12, "d": 17},
+ {"_id": 4, "a": 1, "b": {"e": 7, "f": 1}, "c": 12, "d": 17}
+];
+
+assert.docEq(u2.cursor.firstBatch, u2result, 'u2 failed');
+
+// combining a projection with unwinding an array
+let p2 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [{$project: {author: 1, tags: 1, pageViews: 1}}, {$unwind: "$tags"}],
+ cursor: {}
+});
+
+let p2result = [
+ {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"},
+ {"_id": 1, "author": "bob", "pageViews": 5, "tags": "good"},
+ {"_id": 1, "author": "bob", "pageViews": 5, "tags": "fun"},
+ {"_id": 2, "author": "dave", "pageViews": 7, "tags": "fun"},
+ {"_id": 2, "author": "dave", "pageViews": 7, "tags": "nasty"},
+ {"_id": 3, "author": "jane", "pageViews": 6, "tags": "nasty"},
+ {"_id": 3, "author": "jane", "pageViews": 6, "tags": "filthy"}
+];
+
+firstBatch = p2.cursor.firstBatch;
+assert(arrayEq(firstBatch, p2result), tojson({got: firstBatch, expected: p2result}));
+
+// pulling values out of subdocuments
+let p3 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [{$project: {otherfoo: "$other.foo", otherbar: "$other.bar"}}, {$sort: {_id: 1}}],
+ cursor: {}
+});
+
+let p3result = [{"_id": 1, "otherfoo": 5}, {"_id": 2, "otherbar": 14}, {"_id": 3, "otherbar": 14}];
+
+assert.docEq(p3.cursor.firstBatch, p3result, 'p3 failed');
+
+// projection includes a computed value
+let p4 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [{$project: {author: 1, daveWroteIt: {$eq: ["$author", "dave"]}}}, {$sort: {_id: 1}}],
+ cursor: {}
+});
+
+let p4result = [
+ {"_id": 1, "author": "bob", "daveWroteIt": false},
+ {"_id": 2, "author": "dave", "daveWroteIt": true},
+ {"_id": 3, "author": "jane", "daveWroteIt": false}
+];
+
+assert.docEq(p4.cursor.firstBatch, p4result, 'p4 failed');
+
+// projection includes a virtual (fabricated) document
+let p5 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {author: 1, pageViews: 1, tags: 1}},
+ {$unwind: "$tags"},
+ {$project: {author: 1, subDocument: {foo: "$pageViews", bar: "$tags"}}}
+ ],
+ cursor: {}
+});
+
+let p5result = [
+ {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}},
+ {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "good"}},
+ {"_id": 1, "author": "bob", "subDocument": {"foo": 5, "bar": "fun"}},
+ {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "fun"}},
+ {"_id": 2, "author": "dave", "subDocument": {"foo": 7, "bar": "nasty"}},
+ {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "nasty"}},
+ {"_id": 3, "author": "jane", "subDocument": {"foo": 6, "bar": "filthy"}}
+];
+
+firstBatch = p5.cursor.firstBatch;
+assert(arrayEq(firstBatch, p5result), tojson({got: firstBatch, expected: p5result}));
+
+// multi-step aggregate
+// nested expressions in computed fields
+let p6 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {author: 1, tags: 1, pageViews: 1}},
+ {$unwind: "$tags"},
{
- "_id": 3,
- "author": "jane",
- "pageViews": 6,
- "tag": "filthy",
- "daveWroteIt": false,
- "weLikeIt": false
+ $project: {
+ author: 1,
+ tag: "$tags",
+ pageViews: 1,
+ daveWroteIt: {$eq: ["$author", "dave"]},
+ weLikeIt: {$or: [{$eq: ["$author", "dave"]}, {$eq: ["$tags", "good"]}]}
+ }
}
- ];
-
- firstBatch = p6.cursor.firstBatch;
- assert(arrayEq(firstBatch, p6result), tojson({got: firstBatch, expected: p6result}));
-
- // slightly more complex computed expression; $ifNull
- let p7 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {theSum: {$add: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}}},
- {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p6result = [
+ {
+ "_id": 1,
+ "author": "bob",
+ "pageViews": 5,
+ "tag": "fun",
+ "daveWroteIt": false,
+ "weLikeIt": false
+ },
+ {
+ "_id": 1,
+ "author": "bob",
+ "pageViews": 5,
+ "tag": "good",
+ "daveWroteIt": false,
+ "weLikeIt": true
+ },
+ {
+ "_id": 1,
+ "author": "bob",
+ "pageViews": 5,
+ "tag": "fun",
+ "daveWroteIt": false,
+ "weLikeIt": false
+ },
+ {
+ "_id": 2,
+ "author": "dave",
+ "pageViews": 7,
+ "tag": "fun",
+ "daveWroteIt": true,
+ "weLikeIt": true
+ },
+ {
+ "_id": 2,
+ "author": "dave",
+ "pageViews": 7,
+ "tag": "nasty",
+ "daveWroteIt": true,
+ "weLikeIt": true
+ },
+ {
+ "_id": 3,
+ "author": "jane",
+ "pageViews": 6,
+ "tag": "nasty",
+ "daveWroteIt": false,
+ "weLikeIt": false
+ },
+ {
+ "_id": 3,
+ "author": "jane",
+ "pageViews": 6,
+ "tag": "filthy",
+ "daveWroteIt": false,
+ "weLikeIt": false
+ }
+];
+
+firstBatch = p6.cursor.firstBatch;
+assert(arrayEq(firstBatch, p6result), tojson({got: firstBatch, expected: p6result}));
+
+// slightly more complex computed expression; $ifNull
+let p7 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {theSum: {$add: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}}},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p7result = [{"_id": 1, "theSum": 10}, {"_id": 2, "theSum": 21}, {"_id": 3, "theSum": 20}];
+
+assert.docEq(p7.cursor.firstBatch, p7result, 'p7 failed');
+
+// dotted path inclusion; _id exclusion
+let p8 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [{$project: {_id: 0, author: 1, tags: 1, "comments.author": 1}}, {$unwind: "$tags"}],
+ cursor: {}
+});
+
+let p8result = [
+ {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]},
+ {"author": "bob", "tags": "good", "comments": [{"author": "joe"}, {"author": "sam"}]},
+ {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]},
+ {"author": "dave", "tags": "fun", "comments": [{"author": "barbara"}, {"author": "jenny"}]},
+ {"author": "dave", "tags": "nasty", "comments": [{"author": "barbara"}, {"author": "jenny"}]},
+ {"author": "jane", "tags": "nasty", "comments": [{"author": "will"}, {"author": "jenny"}]},
+ {"author": "jane", "tags": "filthy", "comments": [{"author": "will"}, {"author": "jenny"}]}
+];
+
+firstBatch = p8.cursor.firstBatch;
+assert(arrayEq(firstBatch, p8result), tojson({got: firstBatch, expected: p8result}));
+
+// collapse a dotted path with an intervening array
+let p9 = testDB.runCommand({
+ aggregate: "article",
+ pipeline:
+ [{$project: {_id: 0, author: 1, commentsAuthor: "$comments.author"}}, {$sort: {author: 1}}],
+ cursor: {}
+});
+
+let p9result = [
+ {"author": "bob", "commentsAuthor": ["joe", "sam"]},
+ {"author": "dave", "commentsAuthor": ["barbara", "jenny"]},
+ {"author": "jane", "commentsAuthor": ["will", "jenny"]}
+];
+
+assert.docEq(p9.cursor.firstBatch, p9result, 'p9 failed');
+
+// simple sort
+let p10 = testDB.runCommand({aggregate: "article", pipeline: [{$sort: {title: 1}}], cursor: {}});
+
+let p10result = [
+ {
+ "_id": 1,
+ "title": "this is my title",
+ "author": "bob",
+ "posted": ISODate("2004-03-21T18:59:54Z"),
+ "pageViews": 5,
+ "tags": ["fun", "good", "fun"],
+ "comments":
+ [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
+ "other": {"foo": 5}
+ },
+ {
+ "_id": 3,
+ "title": "this is some other title",
+ "author": "jane",
+ "posted": ISODate("2000-12-31T05:17:14Z"),
+ "pageViews": 6,
+ "tags": ["nasty", "filthy"],
+ "comments": [
+ {"author": "will", "text": "i don't like the color"},
+ {"author": "jenny", "text": "can i get that in green?"}
],
- cursor: {}
- });
-
- let p7result = [{"_id": 1, "theSum": 10}, {"_id": 2, "theSum": 21}, {"_id": 3, "theSum": 20}];
-
- assert.docEq(p7.cursor.firstBatch, p7result, 'p7 failed');
-
- // dotted path inclusion; _id exclusion
- let p8 = testDB.runCommand({
- aggregate: "article",
- pipeline:
- [{$project: {_id: 0, author: 1, tags: 1, "comments.author": 1}}, {$unwind: "$tags"}],
- cursor: {}
- });
-
- let p8result = [
- {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]},
- {"author": "bob", "tags": "good", "comments": [{"author": "joe"}, {"author": "sam"}]},
- {"author": "bob", "tags": "fun", "comments": [{"author": "joe"}, {"author": "sam"}]},
- {"author": "dave", "tags": "fun", "comments": [{"author": "barbara"}, {"author": "jenny"}]},
+ "other": {"bar": 14}
+ },
+ {
+ "_id": 2,
+ "title": "this is your title",
+ "author": "dave",
+ "posted": ISODate("2030-08-08T04:11:10Z"),
+ "pageViews": 7,
+ "tags": ["fun", "nasty"],
+ "comments": [
+ {"author": "barbara", "text": "this is interesting"},
+ {"author": "jenny", "text": "i like to play pinball", "votes": 10}
+ ],
+ "other": {"bar": 14}
+ }
+];
+
+assert.docEq(p10.cursor.firstBatch, p10result, 'p10 failed');
+
+// unwind on nested array
+testDB.p11.drop();
+testDB.p11.save({
+ _id: 5,
+ name: 'MongoDB',
+ items: {authors: ['jay', 'vivek', 'bjornar'], dbg: [17, 42]},
+ favorites: ['pickles', 'ice cream', 'kettle chips']
+});
+
+let p11 = testDB.runCommand({
+ aggregate: "p11",
+ pipeline: [
+ {$unwind: "$items.authors"},
+ {$project: {name: 1, author: "$items.authors"}},
+ {$sort: {author: 1}}
+
+ ],
+ cursor: {}
+});
+
+let p11result = [
+ {"_id": 5, "name": "MongoDB", "author": "bjornar"},
+ {"_id": 5, "name": "MongoDB", "author": "jay"},
+ {"_id": 5, "name": "MongoDB", "author": "vivek"},
+];
+
+assert.docEq(p11.cursor.firstBatch, p11result, 'p11 failed');
+
+// multiply test
+let p12 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "author": "dave",
- "tags": "nasty",
- "comments": [{"author": "barbara"}, {"author": "jenny"}]
+ $project:
+ {theProduct: {$multiply: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}},
},
- {"author": "jane", "tags": "nasty", "comments": [{"author": "will"}, {"author": "jenny"}]},
- {
- "author": "jane",
- "tags": "filthy",
- "comments": [{"author": "will"}, {"author": "jenny"}]
- }
- ];
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
- firstBatch = p8.cursor.firstBatch;
- assert(arrayEq(firstBatch, p8result), tojson({got: firstBatch, expected: p8result}));
+let p12result =
+ [{"_id": 1, "theProduct": 25}, {"_id": 2, "theProduct": 98}, {"_id": 3, "theProduct": 84}];
- // collapse a dotted path with an intervening array
- let p9 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {_id: 0, author: 1, commentsAuthor: "$comments.author"}},
- {$sort: {author: 1}}
- ],
- cursor: {}
- });
-
- let p9result = [
- {"author": "bob", "commentsAuthor": ["joe", "sam"]},
- {"author": "dave", "commentsAuthor": ["barbara", "jenny"]},
- {"author": "jane", "commentsAuthor": ["will", "jenny"]}
- ];
-
- assert.docEq(p9.cursor.firstBatch, p9result, 'p9 failed');
+assert.docEq(p12.cursor.firstBatch, p12result, 'p12 failed');
- // simple sort
- let p10 =
- testDB.runCommand({aggregate: "article", pipeline: [{$sort: {title: 1}}], cursor: {}});
-
- let p10result = [
+// subtraction test
+let p13 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": 1,
- "title": "this is my title",
- "author": "bob",
- "posted": ISODate("2004-03-21T18:59:54Z"),
- "pageViews": 5,
- "tags": ["fun", "good", "fun"],
- "comments":
- [{"author": "joe", "text": "this is cool"}, {"author": "sam", "text": "this is bad"}],
- "other": {"foo": 5}
+ $project: {
+ theDifference:
+ {$subtract: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}
+ }
},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p13result = [
+ {"_id": 1, "theDifference": 0},
+ {"_id": 2, "theDifference": -7},
+ {"_id": 3, "theDifference": -8}
+];
+
+assert.docEq(p13.cursor.firstBatch, p13result, 'p13 failed');
+
+// mod test
+let p14 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": 3,
- "title": "this is some other title",
- "author": "jane",
- "posted": ISODate("2000-12-31T05:17:14Z"),
- "pageViews": 6,
- "tags": ["nasty", "filthy"],
- "comments": [
- {"author": "will", "text": "i don't like the color"},
- {"author": "jenny", "text": "can i get that in green?"}
- ],
- "other": {"bar": 14}
+ $project: {
+ theRemainder: {
+ $mod: [
+ {$ifNull: ["$other.foo", "$other.bar"]},
+ "$pageViews",
+ ]
+ }
+ }
},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p14result =
+ [{"_id": 1, "theRemainder": 0}, {"_id": 2, "theRemainder": 0}, {"_id": 3, "theRemainder": 2}];
+
+assert.docEq(p14.cursor.firstBatch, p14result, 'p14 failed');
+
+// toUpper test
+let p15 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [{$project: {author: {$toUpper: "$author"}, pageViews: 1}}, {$sort: {_id: 1}}],
+ cursor: {}
+});
+
+let p15result = [
+ {"_id": 1, "author": "BOB", "pageViews": 5},
+ {"_id": 2, "author": "DAVE", "pageViews": 7},
+ {"_id": 3, "author": "JANE", "pageViews": 6}
+];
+
+assert.docEq(p15.cursor.firstBatch, p15result, 'p15 failed');
+
+// toLower test
+let p16 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {author: {$toUpper: "$author"}, pageViews: 1}},
+ {$project: {author: {$toLower: "$author"}, pageViews: 1}},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p16result = [
+ {
+ "_id": 1,
+ "author": "bob",
+ "pageViews": 5,
+ },
+ {
+ "_id": 2,
+ "author": "dave",
+ "pageViews": 7,
+ },
+ {
+ "_id": 3,
+ "author": "jane",
+ "pageViews": 6,
+ }
+];
+
+assert.docEq(p16.cursor.firstBatch, p16result, 'p16 failed');
+
+// substr test
+let p17 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": 2,
- "title": "this is your title",
- "author": "dave",
- "posted": ISODate("2030-08-08T04:11:10Z"),
- "pageViews": 7,
- "tags": ["fun", "nasty"],
- "comments": [
- {"author": "barbara", "text": "this is interesting"},
- {"author": "jenny", "text": "i like to play pinball", "votes": 10}
- ],
- "other": {"bar": 14}
- }
- ];
-
- assert.docEq(p10.cursor.firstBatch, p10result, 'p10 failed');
-
- // unwind on nested array
- testDB.p11.drop();
- testDB.p11.save({
- _id: 5,
- name: 'MongoDB',
- items: {authors: ['jay', 'vivek', 'bjornar'], dbg: [17, 42]},
- favorites: ['pickles', 'ice cream', 'kettle chips']
- });
-
- let p11 = testDB.runCommand({
- aggregate: "p11",
- pipeline: [
- {$unwind: "$items.authors"},
- {$project: {name: 1, author: "$items.authors"}},
- {$sort: {author: 1}}
+ $project: {
+ author: {$substrBytes: ["$author", 1, 2]},
+ }
+ },
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
- ],
- cursor: {}
- });
-
- let p11result = [
- {"_id": 5, "name": "MongoDB", "author": "bjornar"},
- {"_id": 5, "name": "MongoDB", "author": "jay"},
- {"_id": 5, "name": "MongoDB", "author": "vivek"},
- ];
-
- assert.docEq(p11.cursor.firstBatch, p11result, 'p11 failed');
-
- // multiply test
- let p12 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- theProduct: {$multiply: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}
- },
- },
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
-
- let p12result =
- [{"_id": 1, "theProduct": 25}, {"_id": 2, "theProduct": 98}, {"_id": 3, "theProduct": 84}];
-
- assert.docEq(p12.cursor.firstBatch, p12result, 'p12 failed');
-
- // subtraction test
- let p13 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- theDifference:
- {$subtract: ["$pageViews", {$ifNull: ["$other.foo", "$other.bar"]}]}
- }
- },
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
-
- let p13result = [
- {"_id": 1, "theDifference": 0},
- {"_id": 2, "theDifference": -7},
- {"_id": 3, "theDifference": -8}
- ];
-
- assert.docEq(p13.cursor.firstBatch, p13result, 'p13 failed');
-
- // mod test
- let p14 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- theRemainder: {
- $mod: [
- {$ifNull: ["$other.foo", "$other.bar"]},
- "$pageViews",
- ]
- }
- }
- },
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
-
- let p14result = [
- {"_id": 1, "theRemainder": 0},
- {"_id": 2, "theRemainder": 0},
- {"_id": 3, "theRemainder": 2}
- ];
-
- assert.docEq(p14.cursor.firstBatch, p14result, 'p14 failed');
-
- // toUpper test
- let p15 = testDB.runCommand({
- aggregate: "article",
- pipeline: [{$project: {author: {$toUpper: "$author"}, pageViews: 1}}, {$sort: {_id: 1}}],
- cursor: {}
- });
-
- let p15result = [
- {"_id": 1, "author": "BOB", "pageViews": 5},
- {"_id": 2, "author": "DAVE", "pageViews": 7},
- {"_id": 3, "author": "JANE", "pageViews": 6}
- ];
-
- assert.docEq(p15.cursor.firstBatch, p15result, 'p15 failed');
-
- // toLower test
- let p16 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {author: {$toUpper: "$author"}, pageViews: 1}},
- {$project: {author: {$toLower: "$author"}, pageViews: 1}},
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
+let p17result =
+ [{"_id": 1, "author": "ob"}, {"_id": 2, "author": "av"}, {"_id": 3, "author": "an"}];
- let p16result = [
- {
- "_id": 1,
- "author": "bob",
- "pageViews": 5,
- },
- {
- "_id": 2,
- "author": "dave",
- "pageViews": 7,
- },
- {
- "_id": 3,
- "author": "jane",
- "pageViews": 6,
- }
- ];
-
- assert.docEq(p16.cursor.firstBatch, p16result, 'p16 failed');
-
- // substr test
- let p17 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- author: {$substrBytes: ["$author", 1, 2]},
- }
- },
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
-
- let p17result =
- [{"_id": 1, "author": "ob"}, {"_id": 2, "author": "av"}, {"_id": 3, "author": "an"}];
-
- assert.docEq(p17.cursor.firstBatch, p17result, 'p17 failed');
-
- // strcasecmp test
- let p18 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- tags: 1,
- thisisalametest: {$strcasecmp: ["foo", "bar"]},
- thisisalamepass: {$strcasecmp: ["foo", "foo"]}
- }
- },
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
-
- let p18result = [
- {"_id": 1, "tags": ["fun", "good", "fun"], "thisisalametest": 1, "thisisalamepass": 0},
- {"_id": 2, "tags": ["fun", "nasty"], "thisisalametest": 1, "thisisalamepass": 0},
- {"_id": 3, "tags": ["nasty", "filthy"], "thisisalametest": 1, "thisisalamepass": 0}
- ];
-
- assert.docEq(p18.cursor.firstBatch, p18result, 'p18 failed');
-
- // date tests
- let p19 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- authors: 1,
- posted: 1,
- seconds: {$second: "$posted"},
- minutes: {$minute: "$posted"},
- hour: {$hour: "$posted"},
- dayOfYear: {$dayOfYear: "$posted"},
- dayOfMonth: {$dayOfMonth: "$posted"},
- dayOfWeek: {$dayOfWeek: "$posted"},
- month: {$month: "$posted"},
- week: {$week: "$posted"},
- year: {$year: "$posted"}
- }
- },
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
+assert.docEq(p17.cursor.firstBatch, p17result, 'p17 failed');
- let p19result = [
+// strcasecmp test
+let p18 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": 1,
- "posted": ISODate("2004-03-21T18:59:54Z"),
- "seconds": 54,
- "minutes": 59,
- "hour": 18,
- "dayOfYear": 81,
- "dayOfMonth": 21,
- "dayOfWeek": 1,
- "month": 3,
- "week": 12,
- "year": 2004,
+ $project: {
+ tags: 1,
+ thisisalametest: {$strcasecmp: ["foo", "bar"]},
+ thisisalamepass: {$strcasecmp: ["foo", "foo"]}
+ }
},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p18result = [
+ {"_id": 1, "tags": ["fun", "good", "fun"], "thisisalametest": 1, "thisisalamepass": 0},
+ {"_id": 2, "tags": ["fun", "nasty"], "thisisalametest": 1, "thisisalamepass": 0},
+ {"_id": 3, "tags": ["nasty", "filthy"], "thisisalametest": 1, "thisisalamepass": 0}
+];
+
+assert.docEq(p18.cursor.firstBatch, p18result, 'p18 failed');
+
+// date tests
+let p19 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": 2,
- "posted": ISODate("2030-08-08T04:11:10Z"),
- "seconds": 10,
- "minutes": 11,
- "hour": 4,
- "dayOfYear": 220,
- "dayOfMonth": 8,
- "dayOfWeek": 5,
- "month": 8,
- "week": 31,
- "year": 2030,
+ $project: {
+ authors: 1,
+ posted: 1,
+ seconds: {$second: "$posted"},
+ minutes: {$minute: "$posted"},
+ hour: {$hour: "$posted"},
+ dayOfYear: {$dayOfYear: "$posted"},
+ dayOfMonth: {$dayOfMonth: "$posted"},
+ dayOfWeek: {$dayOfWeek: "$posted"},
+ month: {$month: "$posted"},
+ week: {$week: "$posted"},
+ year: {$year: "$posted"}
+ }
},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let p19result = [
+ {
+ "_id": 1,
+ "posted": ISODate("2004-03-21T18:59:54Z"),
+ "seconds": 54,
+ "minutes": 59,
+ "hour": 18,
+ "dayOfYear": 81,
+ "dayOfMonth": 21,
+ "dayOfWeek": 1,
+ "month": 3,
+ "week": 12,
+ "year": 2004,
+ },
+ {
+ "_id": 2,
+ "posted": ISODate("2030-08-08T04:11:10Z"),
+ "seconds": 10,
+ "minutes": 11,
+ "hour": 4,
+ "dayOfYear": 220,
+ "dayOfMonth": 8,
+ "dayOfWeek": 5,
+ "month": 8,
+ "week": 31,
+ "year": 2030,
+ },
+ {
+ "_id": 3,
+ "posted": ISODate("2000-12-31T05:17:14Z"),
+ "seconds": 14,
+ "minutes": 17,
+ "hour": 5,
+ "dayOfYear": 366,
+ "dayOfMonth": 31,
+ "dayOfWeek": 1,
+ "month": 12,
+ "week": 53,
+ "year": 2000,
+ }
+];
+
+assert.docEq(p19.cursor.firstBatch, p19result, 'p19 failed');
+
+testDB.lettype.drop();
+testDB.lettype.save({x: 17, y: "foo"});
+
+// ternary conditional operator
+let p21 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": 3,
- "posted": ISODate("2000-12-31T05:17:14Z"),
- "seconds": 14,
- "minutes": 17,
- "hour": 5,
- "dayOfYear": 366,
- "dayOfMonth": 31,
- "dayOfWeek": 1,
- "month": 12,
- "week": 53,
- "year": 2000,
- }
- ];
-
- assert.docEq(p19.cursor.firstBatch, p19result, 'p19 failed');
-
- testDB.lettype.drop();
- testDB.lettype.save({x: 17, y: "foo"});
-
- // ternary conditional operator
- let p21 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- _id: 0,
- author: 1,
- pageViews: {
- $cond: [
- {$eq: ["$author", "dave"]},
- {$add: ["$pageViews", 1000]},
- "$pageViews"
- ]
- }
- }
- },
- {$sort: {author: 1}}
- ],
- cursor: {}
- });
-
- let p21result = [
- {"author": "bob", "pageViews": 5},
- {"author": "dave", "pageViews": 1007},
- {"author": "jane", "pageViews": 6}
- ];
-
- assert.docEq(p21.cursor.firstBatch, p21result, 'p21 failed');
-
- // simple matching
- let m1 = testDB.runCommand(
- {aggregate: "article", pipeline: [{$match: {author: "dave"}}], cursor: {}});
-
- let m1result = [{
+ $project: {
+ _id: 0,
+ author: 1,
+ pageViews: {
+ $cond:
+ [{$eq: ["$author", "dave"]}, {$add: ["$pageViews", 1000]}, "$pageViews"]
+ }
+ }
+ },
+ {$sort: {author: 1}}
+ ],
+ cursor: {}
+});
+
+let p21result = [
+ {"author": "bob", "pageViews": 5},
+ {"author": "dave", "pageViews": 1007},
+ {"author": "jane", "pageViews": 6}
+];
+
+assert.docEq(p21.cursor.firstBatch, p21result, 'p21 failed');
+
+// simple matching
+let m1 =
+ testDB.runCommand({aggregate: "article", pipeline: [{$match: {author: "dave"}}], cursor: {}});
+
+let m1result = [{
+ "_id": 2,
+ "title": "this is your title",
+ "author": "dave",
+ "posted": ISODate("2030-08-08T04:11:10Z"),
+ "pageViews": 7,
+ "tags": ["fun", "nasty"],
+ "comments": [
+ {"author": "barbara", "text": "this is interesting"},
+ {"author": "jenny", "text": "i like to play pinball", "votes": 10}
+ ],
+ "other": {"bar": 14}
+}];
+
+assert.docEq(m1.cursor.firstBatch, m1result, 'm1 failed');
+
+// combining matching with a projection
+let m2 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {title: 1, author: 1, pageViews: 1, tags: 1, comments: 1}},
+ {$unwind: "$tags"},
+ {$match: {tags: "nasty"}},
+ {$sort: {_id: 1}}
+ ],
+ cursor: {}
+});
+
+let m2result = [
+ {
"_id": 2,
"title": "this is your title",
"author": "dave",
- "posted": ISODate("2030-08-08T04:11:10Z"),
"pageViews": 7,
- "tags": ["fun", "nasty"],
+ "tags": "nasty",
"comments": [
{"author": "barbara", "text": "this is interesting"},
{"author": "jenny", "text": "i like to play pinball", "votes": 10}
- ],
- "other": {"bar": 14}
- }];
-
- assert.docEq(m1.cursor.firstBatch, m1result, 'm1 failed');
-
- // combining matching with a projection
- let m2 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {title: 1, author: 1, pageViews: 1, tags: 1, comments: 1}},
- {$unwind: "$tags"},
- {$match: {tags: "nasty"}},
- {$sort: {_id: 1}}
- ],
- cursor: {}
- });
-
- let m2result = [
+ ]
+ },
+ {
+ "_id": 3,
+ "title": "this is some other title",
+ "author": "jane",
+ "pageViews": 6,
+ "tags": "nasty",
+ "comments": [
+ {"author": "will", "text": "i don't like the color"},
+ {"author": "jenny", "text": "can i get that in green?"}
+ ]
+ }
+];
+
+assert.docEq(m2.cursor.firstBatch, m2result, 'm2 failed');
+
+// group by tag, _id is a field reference
+let g1 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {author: 1, tags: 1, pageViews: 1}},
+ {$unwind: "$tags"},
+ {$group: {_id: "$tags", docsByTag: {$sum: 1}, viewsByTag: {$sum: "$pageViews"}}},
+ {$sort: {'_id': 1}}
+ ],
+ cursor: {}
+});
+
+let g1result = [
+ {"_id": "filthy", "docsByTag": 1, "viewsByTag": 6},
+ {"_id": "fun", "docsByTag": 3, "viewsByTag": 17},
+ {"_id": "good", "docsByTag": 1, "viewsByTag": 5},
+ {"_id": "nasty", "docsByTag": 2, "viewsByTag": 13},
+];
+
+assert.docEq(g1.cursor.firstBatch, g1result, 'g1 failed');
+
+// $max, and averaging in a final projection; _id is structured
+let g2 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {author: 1, tags: 1, pageViews: 1}},
+ {$unwind: "$tags"},
{
- "_id": 2,
- "title": "this is your title",
- "author": "dave",
- "pageViews": 7,
- "tags": "nasty",
- "comments": [
- {"author": "barbara", "text": "this is interesting"},
- {"author": "jenny", "text": "i like to play pinball", "votes": 10}
- ]
+ $group: {
+ _id: {tags: "$tags"},
+ docsByTag: {$sum: 1},
+ viewsByTag: {$sum: "$pageViews"},
+ mostViewsByTag: {$max: "$pageViews"},
+ }
},
{
- "_id": 3,
- "title": "this is some other title",
- "author": "jane",
- "pageViews": 6,
- "tags": "nasty",
- "comments": [
- {"author": "will", "text": "i don't like the color"},
- {"author": "jenny", "text": "can i get that in green?"}
- ]
- }
- ];
-
- assert.docEq(m2.cursor.firstBatch, m2result, 'm2 failed');
-
- // group by tag, _id is a field reference
- let g1 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {author: 1, tags: 1, pageViews: 1}},
- {$unwind: "$tags"},
- {$group: {_id: "$tags", docsByTag: {$sum: 1}, viewsByTag: {$sum: "$pageViews"}}},
- {$sort: {'_id': 1}}
- ],
- cursor: {}
- });
-
- let g1result = [
- {"_id": "filthy", "docsByTag": 1, "viewsByTag": 6},
- {"_id": "fun", "docsByTag": 3, "viewsByTag": 17},
- {"_id": "good", "docsByTag": 1, "viewsByTag": 5},
- {"_id": "nasty", "docsByTag": 2, "viewsByTag": 13},
- ];
-
- assert.docEq(g1.cursor.firstBatch, g1result, 'g1 failed');
-
- // $max, and averaging in a final projection; _id is structured
- let g2 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {author: 1, tags: 1, pageViews: 1}},
- {$unwind: "$tags"},
- {
- $group: {
- _id: {tags: "$tags"},
- docsByTag: {$sum: 1},
- viewsByTag: {$sum: "$pageViews"},
- mostViewsByTag: {$max: "$pageViews"},
- }
- },
- {
- $project: {
- _id: false,
- tag: "$_id.tags",
- mostViewsByTag: 1,
- docsByTag: 1,
- viewsByTag: 1,
- avgByTag: {$divide: ["$viewsByTag", "$docsByTag"]}
- }
- },
- {$sort: {'docsByTag': 1, 'viewsByTag': 1}}
- ],
- cursor: {}
- });
-
- let g2result = [
- {"docsByTag": 1, "viewsByTag": 5, "mostViewsByTag": 5, "tag": "good", "avgByTag": 5},
- {"docsByTag": 1, "viewsByTag": 6, "mostViewsByTag": 6, "tag": "filthy", "avgByTag": 6},
- {"docsByTag": 2, "viewsByTag": 13, "mostViewsByTag": 7, "tag": "nasty", "avgByTag": 6.5},
+ $project: {
+ _id: false,
+ tag: "$_id.tags",
+ mostViewsByTag: 1,
+ docsByTag: 1,
+ viewsByTag: 1,
+ avgByTag: {$divide: ["$viewsByTag", "$docsByTag"]}
+ }
+ },
+ {$sort: {'docsByTag': 1, 'viewsByTag': 1}}
+ ],
+ cursor: {}
+});
+
+let g2result = [
+ {"docsByTag": 1, "viewsByTag": 5, "mostViewsByTag": 5, "tag": "good", "avgByTag": 5},
+ {"docsByTag": 1, "viewsByTag": 6, "mostViewsByTag": 6, "tag": "filthy", "avgByTag": 6},
+ {"docsByTag": 2, "viewsByTag": 13, "mostViewsByTag": 7, "tag": "nasty", "avgByTag": 6.5},
+ {
+ "docsByTag": 3,
+ "viewsByTag": 17,
+ "mostViewsByTag": 7,
+ "tag": "fun",
+ "avgByTag": 5.666666666666667
+ }
+];
+
+assert.docEq(g2.cursor.firstBatch, g2result, 'g2 failed');
+
+// $push as an accumulator; can pivot data
+let g3 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "docsByTag": 3,
- "viewsByTag": 17,
- "mostViewsByTag": 7,
- "tag": "fun",
- "avgByTag": 5.666666666666667
- }
- ];
-
- assert.docEq(g2.cursor.firstBatch, g2result, 'g2 failed');
-
- // $push as an accumulator; can pivot data
- let g3 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- author: 1,
- tags: 1,
- }
- },
- {$unwind: "$tags"},
- {$sort: {author: 1}},
- {$group: {_id: {tags: "$tags"}, authors: {$push: "$author"}}},
- {$sort: {'_id': 1}}
- ],
- cursor: {}
- });
-
- let g3result = [
- {"_id": {"tags": "filthy"}, "authors": ["jane"]},
- {"_id": {"tags": "fun"}, "authors": ["bob", "bob", "dave"]},
- {"_id": {"tags": "good"}, "authors": ["bob"]},
- {"_id": {"tags": "nasty"}, "authors": ["dave", "jane"]}
- ];
-
- assert.docEq(g3.cursor.firstBatch, g3result, 'g3 failed');
-
- // $avg, and averaging in a final projection
- let g4 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$project: {author: 1, tags: 1, pageViews: 1}},
- {$unwind: "$tags"},
- {
- $group: {
- _id: {tags: "$tags"},
- docsByTag: {$sum: 1},
- viewsByTag: {$sum: "$pageViews"},
- avgByTag: {$avg: "$pageViews"},
- }
- },
- {$sort: {'_id': 1}}
- ],
- cursor: {}
- });
-
- let g4result = [
- {"_id": {"tags": "filthy"}, "docsByTag": 1, "viewsByTag": 6, "avgByTag": 6},
- {"_id": {"tags": "fun"}, "docsByTag": 3, "viewsByTag": 17, "avgByTag": 5.666666666666667},
- {"_id": {"tags": "good"}, "docsByTag": 1, "viewsByTag": 5, "avgByTag": 5},
- {"_id": {"tags": "nasty"}, "docsByTag": 2, "viewsByTag": 13, "avgByTag": 6.5}
- ];
-
- assert.docEq(g4.cursor.firstBatch, g4result, 'g4 failed');
-
- // $addToSet as an accumulator; can pivot data
- let g5 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {
- $project: {
- author: 1,
- tags: 1,
- }
- },
- {$unwind: "$tags"},
- {$group: {_id: {tags: "$tags"}, authors: {$addToSet: "$author"}}},
- {$sort: {'_id': 1}}
- ],
- cursor: {}
- });
-
- // $addToSet doesn't guarantee order so we shouldn't test for it.
- g5.cursor.firstBatch.forEach(function(obj) {
- obj.authors.sort();
- });
-
- let g5result = [
- {"_id": {"tags": "filthy"}, "authors": ["jane"]},
+ $project: {
+ author: 1,
+ tags: 1,
+ }
+ },
+ {$unwind: "$tags"},
+ {$sort: {author: 1}},
+ {$group: {_id: {tags: "$tags"}, authors: {$push: "$author"}}},
+ {$sort: {'_id': 1}}
+ ],
+ cursor: {}
+});
+
+let g3result = [
+ {"_id": {"tags": "filthy"}, "authors": ["jane"]},
+ {"_id": {"tags": "fun"}, "authors": ["bob", "bob", "dave"]},
+ {"_id": {"tags": "good"}, "authors": ["bob"]},
+ {"_id": {"tags": "nasty"}, "authors": ["dave", "jane"]}
+];
+
+assert.docEq(g3.cursor.firstBatch, g3result, 'g3 failed');
+
+// $avg, and averaging in a final projection
+let g4 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$project: {author: 1, tags: 1, pageViews: 1}},
+ {$unwind: "$tags"},
{
- "_id": {"tags": "fun"},
- "authors": [
- "bob",
- "dave",
- ]
+ $group: {
+ _id: {tags: "$tags"},
+ docsByTag: {$sum: 1},
+ viewsByTag: {$sum: "$pageViews"},
+ avgByTag: {$avg: "$pageViews"},
+ }
},
- {"_id": {"tags": "good"}, "authors": ["bob"]},
+ {$sort: {'_id': 1}}
+ ],
+ cursor: {}
+});
+
+let g4result = [
+ {"_id": {"tags": "filthy"}, "docsByTag": 1, "viewsByTag": 6, "avgByTag": 6},
+ {"_id": {"tags": "fun"}, "docsByTag": 3, "viewsByTag": 17, "avgByTag": 5.666666666666667},
+ {"_id": {"tags": "good"}, "docsByTag": 1, "viewsByTag": 5, "avgByTag": 5},
+ {"_id": {"tags": "nasty"}, "docsByTag": 2, "viewsByTag": 13, "avgByTag": 6.5}
+];
+
+assert.docEq(g4.cursor.firstBatch, g4result, 'g4 failed');
+
+// $addToSet as an accumulator; can pivot data
+let g5 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
{
- "_id": {"tags": "nasty"},
- "authors": [
- "dave",
- "jane",
- ]
- }
- ];
-
- assert.docEq(g5.cursor.firstBatch, g5result, 'g5 failed');
-
- // $first and $last accumulators, constant _id
- let g6 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$sort: {author: -1}},
- {
- $group: {
- _id: "authors", /* constant string, *not* a field reference */
- firstAuthor: {$last: "$author"}, /* note reverse sort above */
- lastAuthor: {$first: "$author"}, /* note reverse sort above */
- count: {$sum: 1}
- }
+ $project: {
+ author: 1,
+ tags: 1,
}
- ],
- cursor: {}
- });
-
- let g6result = [{"_id": "authors", firstAuthor: "bob", lastAuthor: "jane", count: 3}];
-
- // Test unwind on an unused field
- let g7 = testDB.runCommand({
- aggregate: "article",
- pipeline: [
- {$unwind: '$tags'},
- {
- $group: {
- _id: "tag_count", /* constant string, *not* a field reference */
- count: {$sum: 1}
- }
+ },
+ {$unwind: "$tags"},
+ {$group: {_id: {tags: "$tags"}, authors: {$addToSet: "$author"}}},
+ {$sort: {'_id': 1}}
+ ],
+ cursor: {}
+});
+
+// $addToSet doesn't guarantee order so we shouldn't test for it.
+g5.cursor.firstBatch.forEach(function(obj) {
+ obj.authors.sort();
+});
+
+let g5result = [
+ {"_id": {"tags": "filthy"}, "authors": ["jane"]},
+ {
+ "_id": {"tags": "fun"},
+ "authors": [
+ "bob",
+ "dave",
+ ]
+ },
+ {"_id": {"tags": "good"}, "authors": ["bob"]},
+ {
+ "_id": {"tags": "nasty"},
+ "authors": [
+ "dave",
+ "jane",
+ ]
+ }
+];
+
+assert.docEq(g5.cursor.firstBatch, g5result, 'g5 failed');
+
+// $first and $last accumulators, constant _id
+let g6 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$sort: {author: -1}},
+ {
+ $group: {
+ _id: "authors", /* constant string, *not* a field reference */
+ firstAuthor: {$last: "$author"}, /* note reverse sort above */
+ lastAuthor: {$first: "$author"}, /* note reverse sort above */
+ count: {$sum: 1}
}
- ],
- cursor: {}
- });
- assert.eq(g7.cursor.firstBatch[0].count, 7);
+ }
+ ],
+ cursor: {}
+});
+
+let g6result = [{"_id": "authors", firstAuthor: "bob", lastAuthor: "jane", count: 3}];
+
+// Test unwind on an unused field
+let g7 = testDB.runCommand({
+ aggregate: "article",
+ pipeline: [
+ {$unwind: '$tags'},
+ {
+ $group: {
+ _id: "tag_count", /* constant string, *not* a field reference */
+ count: {$sum: 1}
+ }
+ }
+ ],
+ cursor: {}
+});
+assert.eq(g7.cursor.firstBatch[0].count, 7);
}());
diff --git a/jstests/aggregation/testutils.js b/jstests/aggregation/testutils.js
index f4c5c1e296a..33c681d5dd9 100644
--- a/jstests/aggregation/testutils.js
+++ b/jstests/aggregation/testutils.js
@@ -1,143 +1,143 @@
// Tests the test utilities themselves.
(function() {
- load("jstests/aggregation/extras/utils.js");
-
- const verbose = false;
-
- const example = [
- {_id: ObjectId("4dc07fedd8420ab8d0d4066d"), pageViews: 5, tags: ["fun", "good"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["nasty", "filthy"]}
- ];
-
- assert(arrayEq(example, example, verbose));
- assert(resultsEq(example, example, verbose));
-
- const exampleDifferentOrder = [
- {_id: ObjectId("4dc07fedd8420ab8d0d4066d"), pageViews: 5, tags: ["fun", "good"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["nasty", "filthy"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
- ];
-
- assert(resultsEq(exampleDifferentOrder, example, verbose));
- assert(resultsEq(example, exampleDifferentOrder, verbose));
- assert(!orderedArrayEq(example, exampleDifferentOrder, verbose));
-
- const exampleFewerEntries = [
- {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["nasty", "filthy"]}
- ];
-
- assert(!resultsEq(example, exampleFewerEntries, verbose));
- assert(!resultsEq(exampleFewerEntries, example, verbose));
-
- const exampleNoIds = [
- {pageViews: 5, tags: ["fun", "good"]},
- {pageViews: 7, tags: ["fun", "nasty"]},
- {pageViews: 6, tags: ["nasty", "filthy"]}
- ];
-
- assert(!resultsEq(example, exampleNoIds, verbose));
- assert(!resultsEq(exampleNoIds, example, verbose));
-
- const exampleMissingTags = [
- {_id: ObjectId("4dc07fedd8420ab8d0d4066d"), pageViews: 5, tags: ["fun"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
- {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["filthy"]}
- ];
-
- assert(!resultsEq(example, exampleMissingTags, verbose));
- assert(!resultsEq(exampleMissingTags, example, verbose));
-
- const exampleDifferentIds = [
- {_id: 0, pageViews: 5, tags: ["fun", "good"]},
- {_id: 1, pageViews: 7, tags: ["fun", "nasty"]},
- {_id: 2, pageViews: 6, tags: ["nasty", "filthy"]}
- ];
- assert(resultsEq(example, exampleDifferentIds));
- assert(resultsEq(exampleDifferentIds, example));
-
- // Test using a custom comparator.
- assert(customDocumentEq({
- left: {a: 1, b: 3},
- right: {a: "ignore", b: 3},
- verbose: verbose,
- valueComparator: (l, r) => {
- if (l == "ignore" || r == "ignore") {
- return true;
- }
- return l == r;
+load("jstests/aggregation/extras/utils.js");
+
+const verbose = false;
+
+const example = [
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066d"), pageViews: 5, tags: ["fun", "good"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["nasty", "filthy"]}
+];
+
+assert(arrayEq(example, example, verbose));
+assert(resultsEq(example, example, verbose));
+
+const exampleDifferentOrder = [
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066d"), pageViews: 5, tags: ["fun", "good"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["nasty", "filthy"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
+];
+
+assert(resultsEq(exampleDifferentOrder, example, verbose));
+assert(resultsEq(example, exampleDifferentOrder, verbose));
+assert(!orderedArrayEq(example, exampleDifferentOrder, verbose));
+
+const exampleFewerEntries = [
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["nasty", "filthy"]}
+];
+
+assert(!resultsEq(example, exampleFewerEntries, verbose));
+assert(!resultsEq(exampleFewerEntries, example, verbose));
+
+const exampleNoIds = [
+ {pageViews: 5, tags: ["fun", "good"]},
+ {pageViews: 7, tags: ["fun", "nasty"]},
+ {pageViews: 6, tags: ["nasty", "filthy"]}
+];
+
+assert(!resultsEq(example, exampleNoIds, verbose));
+assert(!resultsEq(exampleNoIds, example, verbose));
+
+const exampleMissingTags = [
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066d"), pageViews: 5, tags: ["fun"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066e"), pageViews: 7, tags: ["fun", "nasty"]},
+ {_id: ObjectId("4dc07fedd8420ab8d0d4066f"), pageViews: 6, tags: ["filthy"]}
+];
+
+assert(!resultsEq(example, exampleMissingTags, verbose));
+assert(!resultsEq(exampleMissingTags, example, verbose));
+
+const exampleDifferentIds = [
+ {_id: 0, pageViews: 5, tags: ["fun", "good"]},
+ {_id: 1, pageViews: 7, tags: ["fun", "nasty"]},
+ {_id: 2, pageViews: 6, tags: ["nasty", "filthy"]}
+];
+assert(resultsEq(example, exampleDifferentIds));
+assert(resultsEq(exampleDifferentIds, example));
+
+// Test using a custom comparator.
+assert(customDocumentEq({
+ left: {a: 1, b: 3},
+ right: {a: "ignore", b: 3},
+ verbose: verbose,
+ valueComparator: (l, r) => {
+ if (l == "ignore" || r == "ignore") {
+ return true;
}
- }));
- assert(!customDocumentEq({
- left: {a: 1, b: 3},
- right: {a: 3, b: 3},
- valueComparator: (l, r) => {
- if (l == "ignore" || r == "ignore") {
- return true;
- }
- return l == r;
+ return l == r;
+ }
+}));
+assert(!customDocumentEq({
+ left: {a: 1, b: 3},
+ right: {a: 3, b: 3},
+ valueComparator: (l, r) => {
+ if (l == "ignore" || r == "ignore") {
+ return true;
}
- }));
-
- // Test using a custom comparator with arrays.
- assert(customDocumentEq({
- left: {a: [1, 2], b: 3},
- right: {a: [2, "ignore"], b: 3},
- verbose: verbose,
- valueComparator: (l, r) => {
- if (l == "ignore" || r == "ignore") {
- return true;
- }
- return l == r;
+ return l == r;
+ }
+}));
+
+// Test using a custom comparator with arrays.
+assert(customDocumentEq({
+ left: {a: [1, 2], b: 3},
+ right: {a: [2, "ignore"], b: 3},
+ verbose: verbose,
+ valueComparator: (l, r) => {
+ if (l == "ignore" || r == "ignore") {
+ return true;
}
- }));
- assert(!customDocumentEq({
- left: {a: [1, 2], b: 3},
- right: {a: [3, "ignore"], b: 3},
- verbose: verbose,
- valueComparator: (l, r) => {
- if (l == "ignore" || r == "ignore") {
- return true;
- }
- return l == r;
+ return l == r;
+ }
+}));
+assert(!customDocumentEq({
+ left: {a: [1, 2], b: 3},
+ right: {a: [3, "ignore"], b: 3},
+ verbose: verbose,
+ valueComparator: (l, r) => {
+ if (l == "ignore" || r == "ignore") {
+ return true;
}
- }));
-
- // Test using a custom comparator with arrays of objects.
- assert(customDocumentEq({
- left: {a: [{b: 1}, {b: 2}, {b: 3}]},
- right: {a: [{b: "ignore"}, {b: 2}, {b: 3}]},
- verbose: verbose,
- valueComparator: (l, r) => {
- if (l == "ignore" || r == "ignore") {
- return true;
- }
- return l == r;
+ return l == r;
+ }
+}));
+
+// Test using a custom comparator with arrays of objects.
+assert(customDocumentEq({
+ left: {a: [{b: 1}, {b: 2}, {b: 3}]},
+ right: {a: [{b: "ignore"}, {b: 2}, {b: 3}]},
+ verbose: verbose,
+ valueComparator: (l, r) => {
+ if (l == "ignore" || r == "ignore") {
+ return true;
}
- }));
- assert(!customDocumentEq({
- left: {a: [{b: 1}, {b: 2}, {b: 1}]},
- right: {a: [{b: "ignore"}, {b: 2}, {b: 3}]},
- verbose: verbose,
- valueComparator: (l, r) => {
- if (l == "ignore" || r == "ignore") {
- return true;
- }
- return l == r;
+ return l == r;
+ }
+}));
+assert(!customDocumentEq({
+ left: {a: [{b: 1}, {b: 2}, {b: 1}]},
+ right: {a: [{b: "ignore"}, {b: 2}, {b: 3}]},
+ verbose: verbose,
+ valueComparator: (l, r) => {
+ if (l == "ignore" || r == "ignore") {
+ return true;
}
- }));
-
- assert(!anyEq(5, [5], verbose));
- assert(!anyEq([5], 5, verbose));
- assert(!anyEq("5", 5, verbose));
- assert(!anyEq(5, "5", verbose));
-
- assert(arrayEq([{c: 6}, [5], [4, 5], 2, undefined, 3, null, 4, 5],
- [undefined, null, 2, 3, 4, 5, {c: 6}, [4, 5], [5]],
- verbose));
-
- assert(arrayEq([undefined, null, 2, 3, 4, 5, {c: 6}, [4, 5], [5]],
- [{c: 6}, [5], [4, 5], 2, undefined, 3, null, 4, 5],
- verbose));
+ return l == r;
+ }
+}));
+
+assert(!anyEq(5, [5], verbose));
+assert(!anyEq([5], 5, verbose));
+assert(!anyEq("5", 5, verbose));
+assert(!anyEq(5, "5", verbose));
+
+assert(arrayEq([{c: 6}, [5], [4, 5], 2, undefined, 3, null, 4, 5],
+ [undefined, null, 2, 3, 4, 5, {c: 6}, [4, 5], [5]],
+ verbose));
+
+assert(arrayEq([undefined, null, 2, 3, 4, 5, {c: 6}, [4, 5], [5]],
+ [{c: 6}, [5], [4, 5], 2, undefined, 3, null, 4, 5],
+ verbose));
}());
diff --git a/jstests/aggregation/use_query_project_and_sort.js b/jstests/aggregation/use_query_project_and_sort.js
index 4d3c4a7a45a..191b4d78d3f 100644
--- a/jstests/aggregation/use_query_project_and_sort.js
+++ b/jstests/aggregation/use_query_project_and_sort.js
@@ -6,64 +6,55 @@
// in $facet stages:
// @tags: [do_not_wrap_aggregations_in_facets]
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
+load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
- const coll = db.use_query_project_and_sort;
- coll.drop();
+const coll = db.use_query_project_and_sort;
+coll.drop();
- const bulk = coll.initializeUnorderedBulkOp();
- for (let i = 0; i < 100; ++i) {
- bulk.insert({_id: i, x: "string", a: -i, y: i % 2});
- }
- assert.writeOK(bulk.execute());
+const bulk = coll.initializeUnorderedBulkOp();
+for (let i = 0; i < 100; ++i) {
+ bulk.insert({_id: i, x: "string", a: -i, y: i % 2});
+}
+assert.writeOK(bulk.execute());
- function assertQueryCoversProjectionAndSort(pipeline) {
- const explainOutput = coll.explain().aggregate(pipeline);
- assert(isQueryPlan(explainOutput));
- assert(!planHasStage(db, explainOutput, "FETCH"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to include a FETCH stage in the explain output: " +
- tojson(explainOutput));
- assert(!planHasStage(db, explainOutput, "SORT"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to include a SORT stage in the explain output: " +
- tojson(explainOutput));
- assert(planHasStage(db, explainOutput, "IXSCAN"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include an index scan in the explain output: " + tojson(explainOutput));
- assert(!hasRejectedPlans(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " not to have any rejected plans in the explain output: " +
- tojson(explainOutput));
- return explainOutput;
- }
+function assertQueryCoversProjectionAndSort(pipeline) {
+ const explainOutput = coll.explain().aggregate(pipeline);
+ assert(isQueryPlan(explainOutput));
+ assert(!planHasStage(db, explainOutput, "FETCH"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to include a FETCH stage in the explain output: " + tojson(explainOutput));
+ assert(!planHasStage(db, explainOutput, "SORT"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to include a SORT stage in the explain output: " + tojson(explainOutput));
+ assert(planHasStage(db, explainOutput, "IXSCAN"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to include an index scan in the explain output: " + tojson(explainOutput));
+ assert(!hasRejectedPlans(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " not to have any rejected plans in the explain output: " + tojson(explainOutput));
+ return explainOutput;
+}
- assert.commandWorked(coll.createIndex({x: 1, a: -1, _id: 1}));
+assert.commandWorked(coll.createIndex({x: 1, a: -1, _id: 1}));
- // Test that a pipeline requiring a subset of the fields in a compound index can use that index
- // to cover the query.
- assertQueryCoversProjectionAndSort(
- [{$match: {x: "string"}}, {$sort: {x: 1}}, {$project: {_id: 0, x: 1}}]);
- assertQueryCoversProjectionAndSort(
- [{$match: {x: "string"}}, {$sort: {x: 1}}, {$project: {_id: 1, x: 1}}]);
- assertQueryCoversProjectionAndSort(
- [{$match: {x: "string"}}, {$sort: {x: -1, a: 1}}, {$project: {_id: 1, x: 1}}]);
- assertQueryCoversProjectionAndSort(
- [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1}}]);
- assertQueryCoversProjectionAndSort(
- [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1, x: 1}}]);
- assertQueryCoversProjectionAndSort(
- [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1, a: 1}}]);
- assertQueryCoversProjectionAndSort([
- {$match: {x: "string"}},
- {$sort: {x: 1, a: -1, _id: 1}},
- {$project: {_id: 0, a: 1, x: 1}}
- ]);
- assertQueryCoversProjectionAndSort([
- {$match: {x: "string"}},
- {$sort: {x: 1, a: -1, _id: 1}},
- {$project: {_id: 1, x: 1, a: 1}}
- ]);
+// Test that a pipeline requiring a subset of the fields in a compound index can use that index
+// to cover the query.
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1}}, {$project: {_id: 0, x: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1}}, {$project: {_id: 1, x: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: -1, a: 1}}, {$project: {_id: 1, x: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1, x: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1, a: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 0, a: 1, x: 1}}]);
+assertQueryCoversProjectionAndSort(
+ [{$match: {x: "string"}}, {$sort: {x: 1, a: -1, _id: 1}}, {$project: {_id: 1, x: 1, a: 1}}]);
}());
diff --git a/jstests/aggregation/use_query_projection.js b/jstests/aggregation/use_query_projection.js
index e86a357b782..dccc24f58b9 100644
--- a/jstests/aggregation/use_query_projection.js
+++ b/jstests/aggregation/use_query_projection.js
@@ -6,122 +6,114 @@
// consider an index scan, so the pipelines cannot be wrapped in facet stages.
// @tags: [do_not_wrap_aggregations_in_facets]
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
+load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
- const coll = db.use_query_projection;
- coll.drop();
+const coll = db.use_query_projection;
+coll.drop();
- const bulk = coll.initializeUnorderedBulkOp();
- for (let i = 0; i < 100; ++i) {
- bulk.insert({_id: i, x: "string", a: -i, y: i % 2});
- }
- assert.writeOK(bulk.execute());
+const bulk = coll.initializeUnorderedBulkOp();
+for (let i = 0; i < 100; ++i) {
+ bulk.insert({_id: i, x: "string", a: -i, y: i % 2});
+}
+assert.writeOK(bulk.execute());
- function assertQueryCoversProjection({pipeline = [], pipelineOptimizedAway = true} = {}) {
- const explainOutput = coll.explain().aggregate(pipeline);
+function assertQueryCoversProjection({pipeline = [], pipelineOptimizedAway = true} = {}) {
+ const explainOutput = coll.explain().aggregate(pipeline);
- if (pipelineOptimizedAway) {
- assert(isQueryPlan(explainOutput));
- assert(!planHasStage(db, explainOutput, "FETCH"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to include a FETCH stage in the explain output: " +
- tojson(explainOutput));
- assert(planHasStage(db, explainOutput, "IXSCAN"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include an index scan in the explain output: " + tojson(explainOutput));
- } else {
- assert(isAggregationPlan(explainOutput));
- assert(!aggPlanHasStage(explainOutput, "FETCH"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to include a FETCH stage in the explain output: " +
- tojson(explainOutput));
- assert(aggPlanHasStage(explainOutput, "IXSCAN"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include an index scan in the explain output: " + tojson(explainOutput));
- }
- assert(!hasRejectedPlans(explainOutput),
+ if (pipelineOptimizedAway) {
+ assert(isQueryPlan(explainOutput));
+ assert(
+ !planHasStage(db, explainOutput, "FETCH"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to include a FETCH stage in the explain output: " + tojson(explainOutput));
+ assert(planHasStage(db, explainOutput, "IXSCAN"),
"Expected pipeline " + tojsononeline(pipeline) +
- " not to have any rejected plans in the explain output: " +
- tojson(explainOutput));
- return explainOutput;
+ " to include an index scan in the explain output: " + tojson(explainOutput));
+ } else {
+ assert(isAggregationPlan(explainOutput));
+ assert(
+ !aggPlanHasStage(explainOutput, "FETCH"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to include a FETCH stage in the explain output: " + tojson(explainOutput));
+ assert(aggPlanHasStage(explainOutput, "IXSCAN"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to include an index scan in the explain output: " + tojson(explainOutput));
}
+ assert(!hasRejectedPlans(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " not to have any rejected plans in the explain output: " + tojson(explainOutput));
+ return explainOutput;
+}
- function assertQueryDoesNotCoverProjection({pipeline = [], pipelineOptimizedAway = true} = {}) {
- const explainOutput = coll.explain().aggregate(pipeline);
+function assertQueryDoesNotCoverProjection({pipeline = [], pipelineOptimizedAway = true} = {}) {
+ const explainOutput = coll.explain().aggregate(pipeline);
- if (pipelineOptimizedAway) {
- assert(isQueryPlan(explainOutput));
- assert(planHasStage(db, explainOutput, "FETCH") || aggPlanHasStage("COLLSCAN"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include a FETCH or COLLSCAN stage in the explain output: " +
- tojson(explainOutput));
- assert(!hasRejectedPlans(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " not to have any rejected plans in the explain output: " +
- tojson(explainOutput));
- } else {
- assert(isAggregationPlan(explainOutput));
- assert(aggPlanHasStage(explainOutput, "FETCH") || aggPlanHasStage("COLLSCAN"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include a FETCH or COLLSCAN stage in the explain output: " +
- tojson(explainOutput));
- assert(!hasRejectedPlans(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " not to have any rejected plans in the explain output: " +
- tojson(explainOutput));
- }
-
- return explainOutput;
+ if (pipelineOptimizedAway) {
+ assert(isQueryPlan(explainOutput));
+ assert(planHasStage(db, explainOutput, "FETCH") || aggPlanHasStage("COLLSCAN"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to include a FETCH or COLLSCAN stage in the explain output: " +
+ tojson(explainOutput));
+ assert(
+ !hasRejectedPlans(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " not to have any rejected plans in the explain output: " + tojson(explainOutput));
+ } else {
+ assert(isAggregationPlan(explainOutput));
+ assert(aggPlanHasStage(explainOutput, "FETCH") || aggPlanHasStage("COLLSCAN"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to include a FETCH or COLLSCAN stage in the explain output: " +
+ tojson(explainOutput));
+ assert(
+ !hasRejectedPlans(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " not to have any rejected plans in the explain output: " + tojson(explainOutput));
}
- assert.commandWorked(coll.createIndex({x: 1, a: -1, _id: 1}));
+ return explainOutput;
+}
+
+assert.commandWorked(coll.createIndex({x: 1, a: -1, _id: 1}));
- // Test that a pipeline requiring a subset of the fields in a compound index can use that index
- // to cover the query.
- assertQueryCoversProjection({pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1}}]});
- assertQueryCoversProjection({pipeline: [{$match: {x: "string"}}, {$project: {_id: 0, x: 1}}]});
- assertQueryCoversProjection(
- {pipeline: [{$match: {x: "string"}}, {$project: {_id: 0, x: 1, a: 1}}]});
- assertQueryCoversProjection(
- {pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1, a: 1}}]});
- assertQueryCoversProjection(
- {pipeline: [{$match: {_id: 0, x: "string"}}, {$project: {_id: 1, x: 1, a: 1}}]});
+// Test that a pipeline requiring a subset of the fields in a compound index can use that index
+// to cover the query.
+assertQueryCoversProjection({pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1}}]});
+assertQueryCoversProjection({pipeline: [{$match: {x: "string"}}, {$project: {_id: 0, x: 1}}]});
+assertQueryCoversProjection(
+ {pipeline: [{$match: {x: "string"}}, {$project: {_id: 0, x: 1, a: 1}}]});
+assertQueryCoversProjection(
+ {pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1, a: 1}}]});
+assertQueryCoversProjection(
+ {pipeline: [{$match: {_id: 0, x: "string"}}, {$project: {_id: 1, x: 1, a: 1}}]});
- // Test that a pipeline requiring a field that is not in the index cannot use a covered plan.
- assertQueryDoesNotCoverProjection({
- pipeline: [{$match: {x: "string"}}, {$project: {notThere: 1}}],
- pipelineOptimizedAway: false
- });
+// Test that a pipeline requiring a field that is not in the index cannot use a covered plan.
+assertQueryDoesNotCoverProjection(
+ {pipeline: [{$match: {x: "string"}}, {$project: {notThere: 1}}], pipelineOptimizedAway: false});
- // Test that a covered plan is the only plan considered, even if another plan would be equally
- // selective. Add an equally selective index, then rely on assertQueryCoversProjection() to
- // assert that there is only one considered plan, and it is a covered plan.
- assert.commandWorked(coll.createIndex({x: 1}));
- assertQueryCoversProjection({
- pipeline: [
- {$match: {_id: 0, x: "string"}},
- {
- $sort: {
- x: 1,
- a: 1
- }
- }, // Note: not indexable, but doesn't add any additional dependencies.
- {$project: {_id: 1, x: 1, a: 1}},
- ],
- pipelineOptimizedAway: false
- });
+// Test that a covered plan is the only plan considered, even if another plan would be equally
+// selective. Add an equally selective index, then rely on assertQueryCoversProjection() to
+// assert that there is only one considered plan, and it is a covered plan.
+assert.commandWorked(coll.createIndex({x: 1}));
+assertQueryCoversProjection({
+ pipeline: [
+ {$match: {_id: 0, x: "string"}},
+ {$sort: {x: 1, a: 1}}, // Note: not indexable, but doesn't add any additional dependencies.
+ {$project: {_id: 1, x: 1, a: 1}},
+ ],
+ pipelineOptimizedAway: false
+});
- // Test that a multikey index will prevent a covered plan.
- assert.commandWorked(coll.dropIndex({x: 1})); // Make sure there is only one plan considered.
- assert.writeOK(coll.insert({x: ["an", "array!"]}));
- assertQueryDoesNotCoverProjection({
- pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1}}],
- pipelineOptimizedAway: false
- });
- assertQueryDoesNotCoverProjection({
- pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1, a: 1}}],
- pipelineOptimizedAway: false
- });
+// Test that a multikey index will prevent a covered plan.
+assert.commandWorked(coll.dropIndex({x: 1})); // Make sure there is only one plan considered.
+assert.writeOK(coll.insert({x: ["an", "array!"]}));
+assertQueryDoesNotCoverProjection({
+ pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1}}],
+ pipelineOptimizedAway: false
+});
+assertQueryDoesNotCoverProjection({
+ pipeline: [{$match: {x: "string"}}, {$project: {_id: 1, x: 1, a: 1}}],
+ pipelineOptimizedAway: false
+});
}());
diff --git a/jstests/aggregation/use_query_sort.js b/jstests/aggregation/use_query_sort.js
index 26542252ff4..af9338be79e 100644
--- a/jstests/aggregation/use_query_sort.js
+++ b/jstests/aggregation/use_query_sort.js
@@ -5,80 +5,76 @@
// in $facet stages:
// @tags: [do_not_wrap_aggregations_in_facets]
(function() {
- "use strict";
+"use strict";
- load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
+load("jstests/libs/analyze_plan.js"); // For 'aggPlanHasStage' and other explain helpers.
- const coll = db.use_query_sort;
- coll.drop();
+const coll = db.use_query_sort;
+coll.drop();
- const bulk = coll.initializeUnorderedBulkOp();
- for (let i = 0; i < 100; ++i) {
- bulk.insert({_id: i, x: "string", a: -i, y: i % 2});
- }
- assert.writeOK(bulk.execute());
+const bulk = coll.initializeUnorderedBulkOp();
+for (let i = 0; i < 100; ++i) {
+ bulk.insert({_id: i, x: "string", a: -i, y: i % 2});
+}
+assert.writeOK(bulk.execute());
- function assertHasNonBlockingQuerySort(pipeline) {
- const explainOutput = coll.explain().aggregate(pipeline);
- assert(isQueryPlan(explainOutput));
- assert(!planHasStage(db, explainOutput, "SORT"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to include a SORT stage in the explain output: " +
- tojson(explainOutput));
- assert(planHasStage(db, explainOutput, "IXSCAN"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include an index scan in the explain output: " + tojson(explainOutput));
- assert(!hasRejectedPlans(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " not to have any rejected plans in the explain output: " +
- tojson(explainOutput));
- return explainOutput;
- }
+function assertHasNonBlockingQuerySort(pipeline) {
+ const explainOutput = coll.explain().aggregate(pipeline);
+ assert(isQueryPlan(explainOutput));
+ assert(!planHasStage(db, explainOutput, "SORT"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to include a SORT stage in the explain output: " + tojson(explainOutput));
+ assert(planHasStage(db, explainOutput, "IXSCAN"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to include an index scan in the explain output: " + tojson(explainOutput));
+ assert(!hasRejectedPlans(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " not to have any rejected plans in the explain output: " + tojson(explainOutput));
+ return explainOutput;
+}
- function assertDoesNotHaveQuerySort(pipeline) {
- const explainOutput = coll.explain().aggregate(pipeline);
- assert(isAggregationPlan(explainOutput));
- assert(aggPlanHasStage(explainOutput, "$sort"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " to include a $sort stage in the explain output: " + tojson(explainOutput));
- assert(!aggPlanHasStage(explainOutput, "SORT"),
- "Expected pipeline " + tojsononeline(pipeline) +
- " *not* to include a SORT stage in the explain output: " +
- tojson(explainOutput));
- assert(!hasRejectedPlans(explainOutput),
- "Expected pipeline " + tojsononeline(pipeline) +
- " not to have any rejected plans in the explain output: " +
- tojson(explainOutput));
- return explainOutput;
- }
+function assertDoesNotHaveQuerySort(pipeline) {
+ const explainOutput = coll.explain().aggregate(pipeline);
+ assert(isAggregationPlan(explainOutput));
+ assert(aggPlanHasStage(explainOutput, "$sort"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " to include a $sort stage in the explain output: " + tojson(explainOutput));
+ assert(!aggPlanHasStage(explainOutput, "SORT"),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " *not* to include a SORT stage in the explain output: " + tojson(explainOutput));
+ assert(!hasRejectedPlans(explainOutput),
+ "Expected pipeline " + tojsononeline(pipeline) +
+ " not to have any rejected plans in the explain output: " + tojson(explainOutput));
+ return explainOutput;
+}
- // Test that a sort on the _id can use the query system to provide the sort.
- assertHasNonBlockingQuerySort([{$sort: {_id: -1}}]);
- assertHasNonBlockingQuerySort([{$sort: {_id: 1}}]);
- assertHasNonBlockingQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {_id: 1}}]);
- assertHasNonBlockingQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {_id: -1}}]);
+// Test that a sort on the _id can use the query system to provide the sort.
+assertHasNonBlockingQuerySort([{$sort: {_id: -1}}]);
+assertHasNonBlockingQuerySort([{$sort: {_id: 1}}]);
+assertHasNonBlockingQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {_id: 1}}]);
+assertHasNonBlockingQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {_id: -1}}]);
- // Test that a sort on a field not in any index cannot use a query system sort, and thus still
- // has a $sort stage.
- assertDoesNotHaveQuerySort([{$sort: {x: -1}}]);
- assertDoesNotHaveQuerySort([{$sort: {x: 1}}]);
- assertDoesNotHaveQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {x: 1}}]);
+// Test that a sort on a field not in any index cannot use a query system sort, and thus still
+// has a $sort stage.
+assertDoesNotHaveQuerySort([{$sort: {x: -1}}]);
+assertDoesNotHaveQuerySort([{$sort: {x: 1}}]);
+assertDoesNotHaveQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {x: 1}}]);
- assert.commandWorked(coll.createIndex({x: 1, y: -1}));
+assert.commandWorked(coll.createIndex({x: 1, y: -1}));
- assertHasNonBlockingQuerySort([{$sort: {x: 1, y: -1}}]);
- assertHasNonBlockingQuerySort([{$sort: {x: 1}}]);
- assertDoesNotHaveQuerySort([{$sort: {y: 1}}]);
- assertDoesNotHaveQuerySort([{$sort: {x: 1, y: 1}}]);
+assertHasNonBlockingQuerySort([{$sort: {x: 1, y: -1}}]);
+assertHasNonBlockingQuerySort([{$sort: {x: 1}}]);
+assertDoesNotHaveQuerySort([{$sort: {y: 1}}]);
+assertDoesNotHaveQuerySort([{$sort: {x: 1, y: 1}}]);
- // Test that a $match on a field not present in the same index eligible to provide a sort can
- // still result in a index scan on the sort field (SERVER-7568).
- assertHasNonBlockingQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {x: 1}}]);
+// Test that a $match on a field not present in the same index eligible to provide a sort can
+// still result in a index scan on the sort field (SERVER-7568).
+assertHasNonBlockingQuerySort([{$match: {_id: {$gte: 50}}}, {$sort: {x: 1}}]);
- // Test that a sort on the text score does not use the query system to provide the sort, since
- // it would need to be a blocking sort, and we prefer the $sort stage to the query system's sort
- // implementation.
- assert.commandWorked(coll.createIndex({x: "text"}));
- assertDoesNotHaveQuerySort(
- [{$match: {$text: {$search: "test"}}}, {$sort: {key: {$meta: "textScore"}}}]);
+// Test that a sort on the text score does not use the query system to provide the sort, since
+// it would need to be a blocking sort, and we prefer the $sort stage to the query system's sort
+// implementation.
+assert.commandWorked(coll.createIndex({x: "text"}));
+assertDoesNotHaveQuerySort(
+ [{$match: {$text: {$search: "test"}}}, {$sort: {key: {$meta: "textScore"}}}]);
}());
diff --git a/jstests/aggregation/variables/layered_variables.js b/jstests/aggregation/variables/layered_variables.js
index e0e10494b29..95e2d535402 100644
--- a/jstests/aggregation/variables/layered_variables.js
+++ b/jstests/aggregation/variables/layered_variables.js
@@ -1,14 +1,14 @@
// Tests that a pipeline with a blend of variable-using expressions reports correct results.
(function() {
- "use strict";
- const testDB = db.getSiblingDB("layered_variables");
- assert.commandWorked(testDB.dropDatabase());
- const coll = testDB.getCollection("test");
+"use strict";
+const testDB = db.getSiblingDB("layered_variables");
+assert.commandWorked(testDB.dropDatabase());
+const coll = testDB.getCollection("test");
- assert.writeOK(coll.insert({_id: 1, has_permissions: 1, my_array: [2, 3]}));
+assert.writeOK(coll.insert({_id: 1, has_permissions: 1, my_array: [2, 3]}));
- const res = assert.commandWorked(testDB.runCommand({
+const res = assert.commandWorked(testDB.runCommand({
aggregate: "test",
pipeline: [
{
@@ -51,8 +51,7 @@
cursor: {}
}));
- assert.eq(
- {_id: 1, has_permissions: 1, my_array: [2, 3], a: 1, b: 6, c: [2, 3], d: 3000, e: [3, 4]},
- res.cursor.firstBatch[0],
- tojson(res));
+assert.eq({_id: 1, has_permissions: 1, my_array: [2, 3], a: 1, b: 6, c: [2, 3], d: 3000, e: [3, 4]},
+ res.cursor.firstBatch[0],
+ tojson(res));
})();
diff --git a/jstests/aggregation/variables/remove_system_variable.js b/jstests/aggregation/variables/remove_system_variable.js
index 803c826af4f..5dd0cda9525 100644
--- a/jstests/aggregation/variables/remove_system_variable.js
+++ b/jstests/aggregation/variables/remove_system_variable.js
@@ -2,72 +2,68 @@
* Tests for the $$REMOVE system variable.
*/
(function() {
- "use strict";
+"use strict";
- let coll = db[jsTest.name()];
- coll.drop();
+let coll = db[jsTest.name()];
+coll.drop();
- assert.writeOK(coll.insert({_id: 1, a: 2, b: 3}));
- assert.writeOK(coll.insert({_id: 2, a: 3, b: 4}));
- assert.writeOK(coll.insert({_id: 3, a: {b: 98, c: 99}}));
+assert.writeOK(coll.insert({_id: 1, a: 2, b: 3}));
+assert.writeOK(coll.insert({_id: 2, a: 3, b: 4}));
+assert.writeOK(coll.insert({_id: 3, a: {b: 98, c: 99}}));
- let projectStage = {
- $project: {_id: 0, a: 1, b: {$cond: {if: {$eq: ["$b", 4]}, then: "$$REMOVE", else: "$b"}}}
- };
+let projectStage = {
+ $project: {_id: 0, a: 1, b: {$cond: {if: {$eq: ["$b", 4]}, then: "$$REMOVE", else: "$b"}}}
+};
- // Test that we can conditionally remove a field in $project.
- assert.eq([{a: 2, b: 3}], coll.aggregate([{$match: {_id: 1}}, projectStage]).toArray());
- assert.eq([{a: 3}], coll.aggregate([{$match: {_id: 2}}, projectStage]).toArray());
+// Test that we can conditionally remove a field in $project.
+assert.eq([{a: 2, b: 3}], coll.aggregate([{$match: {_id: 1}}, projectStage]).toArray());
+assert.eq([{a: 3}], coll.aggregate([{$match: {_id: 2}}, projectStage]).toArray());
- // Test removal of a nested field, using $project.
- assert.eq([{a: {b: 98}}],
- coll.aggregate([{$match: {_id: 3}}, {$project: {_id: 0, "a.b": 1}}]).toArray());
- assert.eq(
- [{a: {}}],
- coll.aggregate([{$match: {_id: 3}}, {$project: {_id: 0, "a.b": "$$REMOVE"}}]).toArray());
- assert.eq(
- [{a: {}}],
- coll.aggregate([{$match: {_id: 3}}, {$project: {_id: 0, a: {b: "$$REMOVE"}}}]).toArray());
+// Test removal of a nested field, using $project.
+assert.eq([{a: {b: 98}}],
+ coll.aggregate([{$match: {_id: 3}}, {$project: {_id: 0, "a.b": 1}}]).toArray());
+assert.eq([{a: {}}],
+ coll.aggregate([{$match: {_id: 3}}, {$project: {_id: 0, "a.b": "$$REMOVE"}}]).toArray());
+assert.eq([{a: {}}],
+ coll.aggregate([{$match: {_id: 3}}, {$project: {_id: 0, a: {b: "$$REMOVE"}}}]).toArray());
- // Test removal of a nested field, using $addFields.
- assert.eq([{_id: 3, a: {c: 99}}],
- coll.aggregate([{$match: {_id: 3}}, {$addFields: {"a.b": "$$REMOVE"}}]).toArray());
+// Test removal of a nested field, using $addFields.
+assert.eq([{_id: 3, a: {c: 99}}],
+ coll.aggregate([{$match: {_id: 3}}, {$addFields: {"a.b": "$$REMOVE"}}]).toArray());
- // Test that any field path following "$$REMOVE" also evaluates to missing.
- assert.eq([{_id: 3}],
- coll.aggregate([{$match: {_id: 3}}, {$addFields: {"a": "$$REMOVE.a.c"}}]).toArray());
+// Test that any field path following "$$REMOVE" also evaluates to missing.
+assert.eq([{_id: 3}],
+ coll.aggregate([{$match: {_id: 3}}, {$addFields: {"a": "$$REMOVE.a.c"}}]).toArray());
- // Test that $$REMOVE can be used together with user-defined variables in a $let.
- assert.eq([{a: {b: 3, d: 4}}],
- coll.aggregate([
- {$match: {_id: 3}},
- {
- $project: {
- _id: 0,
- a: {
- $let: {
- vars: {bar: 3, foo: 4},
- in : {b: "$$bar", c: "$$REMOVE", d: "$$foo"}
- }
- }
- }
- }
- ])
- .toArray());
-
- // Test that $$REMOVE cannot be assigned in a $let.
- assert.commandFailedWithCode(db.runCommand({
- aggregate: coll.getName(),
- cursor: {},
- pipeline: [
+// Test that $$REMOVE can be used together with user-defined variables in a $let.
+assert.eq(
+ [{a: {b: 3, d: 4}}],
+ coll.aggregate([
{$match: {_id: 3}},
- {$project: {_id: 0, a: {$let: {vars: {"REMOVE": 3}, in : {b: "$$REMOVE", c: 2}}}}}
- ]
- }),
- 16867);
+ {
+ $project: {
+ _id: 0,
+ a: {
+ $let: {vars: {bar: 3, foo: 4}, in : {b: "$$bar", c: "$$REMOVE", d: "$$foo"}}
+ }
+ }
+ }
+ ])
+ .toArray());
+
+// Test that $$REMOVE cannot be assigned in a $let.
+assert.commandFailedWithCode(db.runCommand({
+ aggregate: coll.getName(),
+ cursor: {},
+ pipeline: [
+ {$match: {_id: 3}},
+ {$project: {_id: 0, a: {$let: {vars: {"REMOVE": 3}, in : {b: "$$REMOVE", c: 2}}}}}
+ ]
+}),
+ 16867);
- // Test that $$REMOVE, $$CURRENT, $$ROOT, and user-defined variables can all be used together.
- assert.eq(
+// Test that $$REMOVE, $$CURRENT, $$ROOT, and user-defined variables can all be used together.
+assert.eq(
[{a: {b: 3, d: {_id: 1, a: 2, b: 3}, e: {_id: 1, a: 2, b: 3}}}],
coll.aggregate([
{$match: {_id: 1}},