summaryrefslogtreecommitdiff
path: root/jstests/aggregation/sources/merge
diff options
context:
space:
mode:
Diffstat (limited to 'jstests/aggregation/sources/merge')
-rw-r--r--jstests/aggregation/sources/merge/all_modes.js556
-rw-r--r--jstests/aggregation/sources/merge/batch_writes.js112
-rw-r--r--jstests/aggregation/sources/merge/bypass_doc_validation.js366
-rw-r--r--jstests/aggregation/sources/merge/disallowed_in_lookup.js76
-rw-r--r--jstests/aggregation/sources/merge/exchange_explain.js308
-rw-r--r--jstests/aggregation/sources/merge/merge_to_referenced_collection.js181
-rw-r--r--jstests/aggregation/sources/merge/merge_to_same_collection.js22
-rw-r--r--jstests/aggregation/sources/merge/mode_fail_insert.js284
-rw-r--r--jstests/aggregation/sources/merge/mode_keep_existing_insert.js723
-rw-r--r--jstests/aggregation/sources/merge/mode_merge_discard.js454
-rw-r--r--jstests/aggregation/sources/merge/mode_merge_fail.js191
-rw-r--r--jstests/aggregation/sources/merge/mode_merge_insert.js711
-rw-r--r--jstests/aggregation/sources/merge/mode_pipeline_discard.js482
-rw-r--r--jstests/aggregation/sources/merge/mode_pipeline_fail.js158
-rw-r--r--jstests/aggregation/sources/merge/mode_pipeline_insert.js1250
-rw-r--r--jstests/aggregation/sources/merge/mode_replace_discard.js387
-rw-r--r--jstests/aggregation/sources/merge/mode_replace_fail.js187
-rw-r--r--jstests/aggregation/sources/merge/mode_replace_insert.js393
-rw-r--r--jstests/aggregation/sources/merge/on_fields_validation.js258
-rw-r--r--jstests/aggregation/sources/merge/requires_unique_index.js692
-rw-r--r--jstests/aggregation/sources/merge/use_cases.js167
21 files changed, 3870 insertions, 4088 deletions
diff --git a/jstests/aggregation/sources/merge/all_modes.js b/jstests/aggregation/sources/merge/all_modes.js
index 83e0192530f..3854008072c 100644
--- a/jstests/aggregation/sources/merge/all_modes.js
+++ b/jstests/aggregation/sources/merge/all_modes.js
@@ -4,312 +4,290 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- const source = db.all_modes_source;
- const target = db.all_modes_target;
+const source = db.all_modes_source;
+const target = db.all_modes_target;
- (function setup() {
- source.drop();
- target.drop();
+(function setup() {
+ source.drop();
+ target.drop();
- // All tests use the same data in the source collection.
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]));
+ // All tests use the same data in the source collection.
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]));
+})();
- })();
+// Test 'whenMatched=replace whenNotMatched=insert' mode. This is an equivalent of a
+// replacement-style update with upsert=true.
+(function testWhenMatchedReplaceWhenNotMatchedInsert() {
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 40}
+ ]
+ });
+})();
- // Test 'whenMatched=replace whenNotMatched=insert' mode. This is an equivalent of a
- // replacement-style update with upsert=true.
- (function testWhenMatchedReplaceWhenNotMatchedInsert() {
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 40}
- ]
- });
- })();
+// Test 'whenMatched=replace whenNotMatched=fail' mode. For matched documents the update
+// should be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document without a match.
+(function testWhenMatchedReplaceWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}}]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
+ });
+})();
- // Test 'whenMatched=replace whenNotMatched=fail' mode. For matched documents the update
- // should be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document without a match.
- (function testWhenMatchedReplaceWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- const error = assert.throws(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}}
- ]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
- });
- })();
+// Test 'whenMatched=replace whenNotMatched=discard' mode. Documents in the target
+// collection without a match in the source collection should not be modified as a result
+// of the merge operation.
+(function testWhenMatchedReplaceWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}}
+ ]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
+ });
+})();
- // Test 'whenMatched=replace whenNotMatched=discard' mode. Documents in the target
- // collection without a match in the source collection should not be modified as a result
- // of the merge operation.
- (function testWhenMatchedReplaceWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 3, a: 3, b: "c"}, {_id: 4, a: 40}]
- });
- })();
+// Test 'whenMatched=fail whenNotMatched=insert' mode. For matched documents the update should
+// be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document with a match.
+(function testWhenMatchedFailWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(target.insert(
+ [{_id: 10, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ // Besides ensuring that a DuplicateKey error is raised when we find a matching document,
+ // this test also verifies that this $merge mode does perform an unordered insert and all
+ // documents in the batch without a matching document get inserted into the target
+ // collection. There is a special case when we can bail out early without processing all
+ // documents which fit into a single batch. Namely, if we have a sharded cluster with two
+ // shards, and shard documents by {_id: "hashed"}, we will end up with the document {_id: 3}
+ // landed on shard0, and {_id: 1} and {_id: 2} on shard1 in the source collection. Note
+ // that {_id: 3} has a duplicate key with the document in the target collection. For this
+ // particlar case, the entire pipeline is sent to each shard. Lets assume that shard0 has
+ // processed its single document with {_id: 3} and raised a DuplicateKey error, whilst
+ // shard1 hasn't performed any writes yet (or even hasn't started reading from the cursor).
+ // The mongos, after receiving the DuplicateKey, will stop pulling data from the shards
+ // and will kill the cursors open on the remaining shards. Shard1, eventually, will throw
+ // a CursorKilled during an interrupt check, and so no writes will be done into the target
+ // collection. To workaround this scenario and guarantee that the writes will always be
+ // performed, we will sort the documents by _id in ascending order. In this case, the
+ // pipeline will be split and we will pull everything to mongos before doing the $merge.
+ // This also ensures that documents with {_id: 1 } and {_id: 2} will be inserted first
+ // before the DuplicateKey error is raised.
+ const error = assert.throws(() => source.aggregate([
+ {$sort: {_id: 1}},
+ {$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ]));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 30, c: "y"},
+ {_id: 4, a: 40, c: "z"},
+ {_id: 10, a: 10, c: "x"}
+ ]
+ });
+})();
- // Test 'whenMatched=fail whenNotMatched=insert' mode. For matched documents the update should
- // be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document with a match.
- (function testWhenMatchedFailWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 10, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- // Besides ensuring that a DuplicateKey error is raised when we find a matching document,
- // this test also verifies that this $merge mode does perform an unordered insert and all
- // documents in the batch without a matching document get inserted into the target
- // collection. There is a special case when we can bail out early without processing all
- // documents which fit into a single batch. Namely, if we have a sharded cluster with two
- // shards, and shard documents by {_id: "hashed"}, we will end up with the document {_id: 3}
- // landed on shard0, and {_id: 1} and {_id: 2} on shard1 in the source collection. Note
- // that {_id: 3} has a duplicate key with the document in the target collection. For this
- // particlar case, the entire pipeline is sent to each shard. Lets assume that shard0 has
- // processed its single document with {_id: 3} and raised a DuplicateKey error, whilst
- // shard1 hasn't performed any writes yet (or even hasn't started reading from the cursor).
- // The mongos, after receiving the DuplicateKey, will stop pulling data from the shards
- // and will kill the cursors open on the remaining shards. Shard1, eventually, will throw
- // a CursorKilled during an interrupt check, and so no writes will be done into the target
- // collection. To workaround this scenario and guarantee that the writes will always be
- // performed, we will sort the documents by _id in ascending order. In this case, the
- // pipeline will be split and we will pull everything to mongos before doing the $merge.
- // This also ensures that documents with {_id: 1 } and {_id: 2} will be inserted first
- // before the DuplicateKey error is raised.
- const error = assert.throws(() => source.aggregate([
- {$sort: {_id: 1}},
- {$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
- ]));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 30, c: "y"},
- {_id: 4, a: 40, c: "z"},
- {_id: 10, a: 10, c: "x"}
- ]
- });
- })();
+// Test 'whenMatched=fail whenNotMatched=fail' mode. This mode is not supported and should fail.
+(function testWhenMatchedFailWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "fail"}}]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
- // Test 'whenMatched=fail whenNotMatched=fail' mode. This mode is not supported and should fail.
- (function testWhenMatchedFailWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(
- () => source.aggregate(
- [{$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "fail"}}]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
+// Test 'whenMatched=fail whenNotMatched=discard' mode. This mode is not supported and should
+// fail.
+(function testWhenMatchedFailWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "discard"}}]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
- // Test 'whenMatched=fail whenNotMatched=discard' mode. This mode is not supported and should
- // fail.
- (function testWhenMatchedFailWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "fail", whenNotMatched: "discard"}}
- ]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
+// Test 'whenMatched=merge whenNotMatched=insert' mode. This is an equivalent of an update
+// with a $set operator and upsert=true.
+(function testWhenMatchedMergeWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "z"}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, c: "z", b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 40}
+ ]
+ });
+})();
- // Test 'whenMatched=merge whenNotMatched=insert' mode. This is an equivalent of an update
- // with a $set operator and upsert=true.
- (function testWhenMatchedMergeWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(
- target.insert([{_id: 1, a: 10, c: "z"}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, c: "z", b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 40}
- ]
- });
- })();
+// Test 'whenMatched=merge whenNotMatched=fail' mode. For matched documents the update
+// should be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document without a match.
+(function testWhenMatchedMergeWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ const error = assert.throws(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}}]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 3, a: 3, b: "c", c: "y"},
+ {_id: 4, a: 40, c: "z"}
+ ]
+ });
+})();
- // Test 'whenMatched=merge whenNotMatched=fail' mode. For matched documents the update
- // should be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document without a match.
- (function testWhenMatchedMergeWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- const error = assert.throws(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}}
- ]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 3, a: 3, b: "c", c: "y"},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=merge whenNotMatched=discard' mode. Documents in the target collection
+// without
+// a match in the source collection should not be modified as a result of the merge operation.
+(function testWhenMatchedMergeWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 3, a: 3, b: "c", c: "y"},
+ {_id: 4, a: 40, c: "z"}
+ ]
+ });
+})();
- // Test 'whenMatched=merge whenNotMatched=discard' mode. Documents in the target collection
- // without
- // a match in the source collection should not be modified as a result of the merge operation.
- (function testWhenMatchedMergeWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 3, a: 3, b: "c", c: "y"},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=[pipeline] whenNotMatched=insert' mode. This is an equivalent of a
+// pipeline-style update with upsert=true.
+(function testWhenMatchedPipelineUpdateWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: 1}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge:
+ {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "insert"}
+ }]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: 2}, {_id: 3, x: 2}]
+ });
+})();
- // Test 'whenMatched=[pipeline] whenNotMatched=insert' mode. This is an equivalent of a
- // pipeline-style update with upsert=true.
- (function testWhenMatchedPipelineUpdateWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: 1}));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: [{$addFields: {x: 2}}],
- whenNotMatched: "insert"
- }
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: 2}, {_id: 3, x: 2}]
- });
- })();
+// Test 'whenMatched=[pipeline] whenNotMatched=fail' mode. For matched documents the update
+// should be unordered and report an error at the end when all documents in a batch have been
+// processed, it will not fail as soon as we hit the first document without a match.
+(function testWhenMatchedPipelineUpdateWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ const error = assert.throws(() => source.aggregate([{
+ $merge:
+ {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "fail"}
+ }]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected:
+ [{_id: 1, a: 10, c: "x", x: 2}, {_id: 3, a: 30, c: "y", x: 2}, {_id: 4, a: 40, c: "z"}]
+ });
+})();
- // Test 'whenMatched=[pipeline] whenNotMatched=fail' mode. For matched documents the update
- // should be unordered and report an error at the end when all documents in a batch have been
- // processed, it will not fail as soon as we hit the first document without a match.
- (function testWhenMatchedPipelineUpdateWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- const error = assert.throws(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: [{$addFields: {x: 2}}],
- whenNotMatched: "fail"
- }
- }]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 10, c: "x", x: 2},
- {_id: 3, a: 30, c: "y", x: 2},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=[pipeline] whenNotMatched=discard' mode. Documents in the target collection
+// without a match in the source collection should not be modified as a result of the merge
+// operation.
+(function testWhenMatchedPipelineUpdateWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge:
+ {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "discard"}
+ }]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected:
+ [{_id: 1, a: 10, c: "x", x: 2}, {_id: 3, a: 30, c: "y", x: 2}, {_id: 4, a: 40, c: "z"}]
+ });
+})();
- // Test 'whenMatched=[pipeline] whenNotMatched=discard' mode. Documents in the target collection
- // without a match in the source collection should not be modified as a result of the merge
- // operation.
- (function testWhenMatchedPipelineUpdateWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert(
- [{_id: 1, a: 10, c: "x"}, {_id: 3, a: 30, c: "y"}, {_id: 4, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: [{$addFields: {x: 2}}],
- whenNotMatched: "discard"
- }
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 10, c: "x", x: 2},
- {_id: 3, a: 30, c: "y", x: 2},
- {_id: 4, a: 40, c: "z"}
- ]
- });
- })();
+// Test 'whenMatched=keepExisting whenNotMatched=insert' mode. Existing documents in the target
+// collection which have a matching document in the source collection must not be updated, only
+// documents without a match must be inserted into the target collection.
+(function testWhenMatchedKeepExistingWhenNotMatchedInsert() {
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}}
+ ]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 10}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30}, {_id: 4, a: 40}]
+ });
+})();
- // Test 'whenMatched=keepExisting whenNotMatched=insert' mode. Existing documents in the target
- // collection which have a matching document in the source collection must not be updated, only
- // documents without a match must be inserted into the target collection.
- (function testWhenMatchedKeepExistingWhenNotMatchedInsert() {
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, a: 10}, {_id: 3, a: 30}, {_id: 4, a: 40}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge:
- {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 10}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30}, {_id: 4, a: 40}]
- });
- })();
+// Test 'whenMatched=keepExisting whenNotMatched=fail' mode. This mode is not supported and
+// should fail.
+(function testWhenMatchedKeepExistingWhenNotMatchedFail() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "fail"}}
+ ]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
- // Test 'whenMatched=keepExisting whenNotMatched=fail' mode. This mode is not supported and
- // should fail.
- (function testWhenMatchedKeepExistingWhenNotMatchedFail() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(() => source.aggregate([{
- $merge:
- {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "fail"}
- }]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
-
- // Test 'whenMatched=keepExisting whenNotMatched=discard' mode. This mode is not supported and
- // should fail.
- (function testWhenMatchedKeepExistingWhenNotMatchedDiscard() {
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, a: 10}));
- const error = assert.throws(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "keepExisting",
- whenNotMatched: "discard"
- }
- }]));
- assert.commandFailedWithCode(error, 51181);
- // Ensure the target collection has not been modified.
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
- })();
+// Test 'whenMatched=keepExisting whenNotMatched=discard' mode. This mode is not supported and
+// should fail.
+(function testWhenMatchedKeepExistingWhenNotMatchedDiscard() {
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, a: 10}));
+ const error = assert.throws(() => source.aggregate([
+ {$merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "discard"}}
+ ]));
+ assert.commandFailedWithCode(error, 51181);
+ // Ensure the target collection has not been modified.
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 10}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/batch_writes.js b/jstests/aggregation/sources/merge/batch_writes.js
index 3dc6455161e..1d0c5502391 100644
--- a/jstests/aggregation/sources/merge/batch_writes.js
+++ b/jstests/aggregation/sources/merge/batch_writes.js
@@ -3,69 +3,69 @@
// nothing horrendous happens and to characterize the current behavior.
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode.
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- const coll = db.batch_writes;
- const outColl = db.batch_writes_out;
- coll.drop();
- outColl.drop();
+const coll = db.batch_writes;
+const outColl = db.batch_writes_out;
+coll.drop();
+outColl.drop();
- // Test with 2 very large documents that do not fit into a single batch.
- const kSize15MB = 15 * 1024 * 1024;
- const largeArray = new Array(kSize15MB).join("a");
- assert.commandWorked(coll.insert({_id: 0, a: largeArray}));
- assert.commandWorked(coll.insert({_id: 1, a: largeArray}));
+// Test with 2 very large documents that do not fit into a single batch.
+const kSize15MB = 15 * 1024 * 1024;
+const largeArray = new Array(kSize15MB).join("a");
+assert.commandWorked(coll.insert({_id: 0, a: largeArray}));
+assert.commandWorked(coll.insert({_id: 1, a: largeArray}));
- // Make sure the $merge succeeds without any duplicate keys.
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- // Skip the combination of merge modes which will fail depending on the contents of the
- // source and target collection, as this will cause the aggregation to fail.
- if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
- return;
+// Make sure the $merge succeeds without any duplicate keys.
+withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ // Skip the combination of merge modes which will fail depending on the contents of the
+ // source and target collection, as this will cause the aggregation to fail.
+ if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
+ return;
- coll.aggregate([{
- $merge: {
- into: outColl.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]);
- assert.eq(whenNotMatchedMode == "discard" ? 0 : 2, outColl.find().itcount());
- outColl.drop();
- });
+ coll.aggregate([{
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }]);
+ assert.eq(whenNotMatchedMode == "discard" ? 0 : 2, outColl.find().itcount());
+ outColl.drop();
+});
- coll.drop();
- for (let i = 0; i < 10; i++) {
- assert.commandWorked(coll.insert({_id: i, a: i}));
- }
+coll.drop();
+for (let i = 0; i < 10; i++) {
+ assert.commandWorked(coll.insert({_id: i, a: i}));
+}
- // Create a unique index on 'a' in the output collection to create a unique key violation when
- // running the $merge. The second document to be written ({_id: 1, a: 1}) will conflict with the
- // existing document in the output collection. We use a unique index on a field other than _id
- // because whenMatched: "replace" will not change _id when one already exists.
- outColl.drop();
- assert.commandWorked(outColl.insert({_id: 2, a: 1}));
- assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
+// Create a unique index on 'a' in the output collection to create a unique key violation when
+// running the $merge. The second document to be written ({_id: 1, a: 1}) will conflict with the
+// existing document in the output collection. We use a unique index on a field other than _id
+// because whenMatched: "replace" will not change _id when one already exists.
+outColl.drop();
+assert.commandWorked(outColl.insert({_id: 2, a: 1}));
+assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
- // Test that the writes for $merge are unordered, meaning the operation continues even if it
- // encounters a duplicate key error. We don't guarantee any particular behavior in this case,
- // but this test is meant to characterize the current behavior.
- assertErrorCode(
- coll,
- [{$merge: {into: outColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}],
- ErrorCodes.DuplicateKey);
- assert.soon(() => {
- return outColl.find().itcount() == 9;
- });
+// Test that the writes for $merge are unordered, meaning the operation continues even if it
+// encounters a duplicate key error. We don't guarantee any particular behavior in this case,
+// but this test is meant to characterize the current behavior.
+assertErrorCode(
+ coll,
+ [{$merge: {into: outColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}],
+ ErrorCodes.DuplicateKey);
+assert.soon(() => {
+ return outColl.find().itcount() == 9;
+});
- assertErrorCode(
- coll,
- [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}],
- ErrorCodes.DuplicateKey);
- assert.soon(() => {
- return outColl.find().itcount() == 9;
- });
+assertErrorCode(
+ coll,
+ [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}],
+ ErrorCodes.DuplicateKey);
+assert.soon(() => {
+ return outColl.find().itcount() == 9;
+});
}());
diff --git a/jstests/aggregation/sources/merge/bypass_doc_validation.js b/jstests/aggregation/sources/merge/bypass_doc_validation.js
index d43b624ba91..957fcc9a2df 100644
--- a/jstests/aggregation/sources/merge/bypass_doc_validation.js
+++ b/jstests/aggregation/sources/merge/bypass_doc_validation.js
@@ -4,186 +4,150 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- const testDB = db.getSiblingDB("out_bypass_doc_val");
- const sourceColl = testDB.getCollection("source");
- const targetColl = testDB.getCollection("target");
+const testDB = db.getSiblingDB("out_bypass_doc_val");
+const sourceColl = testDB.getCollection("source");
+const targetColl = testDB.getCollection("target");
+targetColl.drop();
+assert.commandWorked(testDB.createCollection(targetColl.getName(), {validator: {a: 2}}));
+
+sourceColl.drop();
+assert.commandWorked(sourceColl.insert({_id: 0, a: 1}));
+
+// Test that the bypassDocumentValidation flag is passed through to the writes on the output
+// collection.
+(function testBypassDocValidationTrue() {
+ sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
+
+ sourceColl.aggregate(
+ [
+ {$addFields: {a: 3}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+
+ sourceColl.aggregate(
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
+}());
+
+// Test that mode "replaceDocuments" passes without the bypassDocumentValidation flag if the
+// updated doc is valid.
+(function testReplacementStyleUpdateWithoutBypass() {
+ sourceColl.aggregate([
+ {$addFields: {a: 2}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ]);
+ assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
+ sourceColl.aggregate(
+ [
+ {$addFields: {a: 2}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: false});
+ assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
+}());
+
+function assertDocValidationFailure(cmdOptions) {
+ assert.commandWorked(targetColl.remove({}));
+ assertErrorCode(sourceColl,
+ [{$merge: targetColl.getName()}],
+ ErrorCodes.DocumentValidationFailure,
+ "Expected failure without bypass set",
+ cmdOptions);
+
+ assertErrorCode(
+ sourceColl,
+ [
+ {$addFields: {a: 3}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ ErrorCodes.DocumentValidationFailure,
+ "Expected failure without bypass set",
+ cmdOptions);
+
+ assertErrorCode(
+ sourceColl,
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ],
+ ErrorCodes.DocumentValidationFailure,
+ "Expected failure without bypass set",
+ cmdOptions);
+ assert.eq(0, targetColl.find().itcount());
+}
+
+// Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
+// flag is not set.
+assertDocValidationFailure({});
+
+// Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
+// flag is explicitly set to false.
+assertDocValidationFailure({bypassDocumentValidation: false});
+
+// Test that bypassDocumentValidation is *not* needed if the source collection has a
+// validator but the output collection does not.
+(function testDocValidatorOnSourceCollection() {
targetColl.drop();
- assert.commandWorked(testDB.createCollection(targetColl.getName(), {validator: {a: 2}}));
+ assert.commandWorked(testDB.runCommand({collMod: sourceColl.getName(), validator: {a: 1}}));
+
+ sourceColl.aggregate([{$merge: targetColl.getName()}]);
+ assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
+
+ sourceColl.aggregate([
+ {$addFields: {a: 3}},
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ]);
+ assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+
+ sourceColl.aggregate([
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
+ ]);
+ assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
+}());
+// Test that the bypassDocumentValidation is casted to true if the value is non-boolean.
+(function testNonBooleanBypassDocValidationFlag() {
+ assert.commandWorked(targetColl.remove({}));
+ assert.commandWorked(testDB.runCommand({collMod: targetColl.getName(), validator: {a: 1}}));
sourceColl.drop();
assert.commandWorked(sourceColl.insert({_id: 0, a: 1}));
- // Test that the bypassDocumentValidation flag is passed through to the writes on the output
- // collection.
- (function testBypassDocValidationTrue() {
- sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
+ sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: 5});
+ assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
- sourceColl.aggregate(
- [
- {$addFields: {a: 3}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
-
- sourceColl.aggregate(
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
- }());
-
- // Test that mode "replaceDocuments" passes without the bypassDocumentValidation flag if the
- // updated doc is valid.
- (function testReplacementStyleUpdateWithoutBypass() {
- sourceColl.aggregate([
- {$addFields: {a: 2}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}
- }
- ]);
- assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
- sourceColl.aggregate(
- [
- {$addFields: {a: 2}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- {bypassDocumentValidation: false});
- assert.eq([{_id: 0, a: 2}], targetColl.find({_id: 0}).toArray());
- }());
-
- function assertDocValidationFailure(cmdOptions) {
- assert.commandWorked(targetColl.remove({}));
- assertErrorCode(sourceColl,
- [{$merge: targetColl.getName()}],
- ErrorCodes.DocumentValidationFailure,
- "Expected failure without bypass set",
- cmdOptions);
-
- assertErrorCode(sourceColl,
- [
- {$addFields: {a: 3}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- ErrorCodes.DocumentValidationFailure,
- "Expected failure without bypass set",
- cmdOptions);
-
- assertErrorCode(
- sourceColl,
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}
- }
- ],
- ErrorCodes.DocumentValidationFailure,
- "Expected failure without bypass set",
- cmdOptions);
- assert.eq(0, targetColl.find().itcount());
- }
-
- // Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
- // flag is not set.
- assertDocValidationFailure({});
-
- // Test that $merge fails if the output document is not valid, and the bypassDocumentValidation
- // flag is explicitly set to false.
- assertDocValidationFailure({bypassDocumentValidation: false});
-
- // Test that bypassDocumentValidation is *not* needed if the source collection has a
- // validator but the output collection does not.
- (function testDocValidatorOnSourceCollection() {
- targetColl.drop();
- assert.commandWorked(testDB.runCommand({collMod: sourceColl.getName(), validator: {a: 1}}));
-
- sourceColl.aggregate([{$merge: targetColl.getName()}]);
- assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
-
- sourceColl.aggregate([
+ sourceColl.aggregate(
+ [
{$addFields: {a: 3}},
- {
- $merge:
- {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}
- }
- ]);
- assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+ {$merge: {into: targetColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ {bypassDocumentValidation: "false"});
+ assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
+}());
- sourceColl.aggregate([
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}
- ]);
- assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], targetColl.find().sort({_id: 1}).toArray());
- }());
-
- // Test that the bypassDocumentValidation is casted to true if the value is non-boolean.
- (function testNonBooleanBypassDocValidationFlag() {
- assert.commandWorked(targetColl.remove({}));
- assert.commandWorked(testDB.runCommand({collMod: targetColl.getName(), validator: {a: 1}}));
- sourceColl.drop();
- assert.commandWorked(sourceColl.insert({_id: 0, a: 1}));
-
- sourceColl.aggregate([{$merge: targetColl.getName()}], {bypassDocumentValidation: 5});
- assert.eq([{_id: 0, a: 1}], targetColl.find().toArray());
-
- sourceColl.aggregate(
- [
- {$addFields: {a: 3}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ],
- {bypassDocumentValidation: "false"});
- assert.eq([{_id: 0, a: 3}], targetColl.find().toArray());
- }());
-
- // Test bypassDocumentValidation with $merge to a collection in a foreign database.
- (function testForeignDb() {
- const foreignDB = db.getSiblingDB("foreign_db");
- const foreignColl = foreignDB.foreign_coll;
- foreignColl.drop();
- assert.commandWorked(
- foreignDB.createCollection(foreignColl.getName(), {validator: {a: 2}}));
-
- sourceColl.aggregate(
- [
- {$addFields: {a: 3}},
- {
+// Test bypassDocumentValidation with $merge to a collection in a foreign database.
+(function testForeignDb() {
+ const foreignDB = db.getSiblingDB("foreign_db");
+ const foreignColl = foreignDB.foreign_coll;
+ foreignColl.drop();
+ assert.commandWorked(foreignDB.createCollection(foreignColl.getName(), {validator: {a: 2}}));
+
+ sourceColl.aggregate(
+ [
+ {$addFields: {a: 3}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -192,15 +156,15 @@
whenMatched: "replace",
whenNotMatched: "insert"
}
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}], foreignColl.find().toArray());
-
- sourceColl.aggregate(
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
+ }
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}], foreignColl.find().toArray());
+
+ sourceColl.aggregate(
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -209,16 +173,16 @@
whenMatched: "fail",
whenNotMatched: "insert"
}
- }
- ],
- {bypassDocumentValidation: true});
- assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], foreignColl.find().sort({_id: 1}).toArray());
-
- assert.commandWorked(foreignColl.remove({}));
- assertErrorCode(sourceColl,
- [
- {$addFields: {a: 3}},
- {
+ }
+ ],
+ {bypassDocumentValidation: true});
+ assert.eq([{_id: 0, a: 3}, {_id: 1, a: 4}], foreignColl.find().sort({_id: 1}).toArray());
+
+ assert.commandWorked(foreignColl.remove({}));
+ assertErrorCode(sourceColl,
+ [
+ {$addFields: {a: 3}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -227,14 +191,14 @@
whenMatched: "replace",
whenNotMatched: "insert"
}
- }
- ],
- ErrorCodes.DocumentValidationFailure);
-
- assertErrorCode(sourceColl,
- [
- {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
- {
+ }
+ ],
+ ErrorCodes.DocumentValidationFailure);
+
+ assertErrorCode(sourceColl,
+ [
+ {$replaceRoot: {newRoot: {_id: 1, a: 4}}},
+ {
$merge: {
into: {
db: foreignDB.getName(),
@@ -243,9 +207,9 @@
whenMatched: "fail",
whenNotMatched: "insert"
}
- }
- ],
- ErrorCodes.DocumentValidationFailure);
- assert.eq(0, foreignColl.find().itcount());
- }());
+ }
+ ],
+ ErrorCodes.DocumentValidationFailure);
+ assert.eq(0, foreignColl.find().itcount());
+}());
}());
diff --git a/jstests/aggregation/sources/merge/disallowed_in_lookup.js b/jstests/aggregation/sources/merge/disallowed_in_lookup.js
index 3731055f6b9..19f37305dbe 100644
--- a/jstests/aggregation/sources/merge/disallowed_in_lookup.js
+++ b/jstests/aggregation/sources/merge/disallowed_in_lookup.js
@@ -1,28 +1,28 @@
// Tests that $merge cannot be used within a $lookup pipeline.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/collection_drop_recreate.js"); // For assertDropCollection.
- load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
- load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
- load("jstests/libs/fixture_helpers.js"); // For isSharded.
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/collection_drop_recreate.js"); // For assertDropCollection.
+load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
+load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
+load("jstests/libs/fixture_helpers.js"); // For isSharded.
- const kErrorCodeMergeBannedInLookup = 51047;
- const kErrorCodeMergeLastStageOnly = 40601;
- const coll = db.merge_in_lookup_not_allowed;
- coll.drop();
+const kErrorCodeMergeBannedInLookup = 51047;
+const kErrorCodeMergeLastStageOnly = 40601;
+const coll = db.merge_in_lookup_not_allowed;
+coll.drop();
- const from = db.merge_in_lookup_not_allowed_from;
- from.drop();
+const from = db.merge_in_lookup_not_allowed_from;
+from.drop();
- if (FixtureHelpers.isSharded(from)) {
- setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
- "internalQueryAllowShardedLookup",
- true);
- }
+if (FixtureHelpers.isSharded(from)) {
+ setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
+ "internalQueryAllowShardedLookup",
+ true);
+}
- let pipeline = [
+let pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection", on: "_id"}}],
@@ -31,9 +31,9 @@
}
},
];
- assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
+assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$project: {x: 0}}, {$merge: {into: "out_collection", on: "_id"}}],
@@ -42,9 +42,9 @@
}
},
];
- assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
+assertErrorCode(coll, pipeline, kErrorCodeMergeBannedInLookup);
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection", on: "_id"}}, {$match: {x: true}}],
@@ -53,14 +53,14 @@
}
},
];
- // Pipeline will fail because $merge is not last in the subpipeline.
- // Validation for $merge in a $lookup's subpipeline occurs at a later point.
- assertErrorCode(coll, pipeline, kErrorCodeMergeLastStageOnly);
+// Pipeline will fail because $merge is not last in the subpipeline.
+// Validation for $merge in a $lookup's subpipeline occurs at a later point.
+assertErrorCode(coll, pipeline, kErrorCodeMergeLastStageOnly);
- // Create view which contains $merge within $lookup.
- assertDropCollection(coll.getDB(), "view1");
+// Create view which contains $merge within $lookup.
+assertDropCollection(coll.getDB(), "view1");
- pipeline = [
+pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection", on: "_id"}}],
@@ -69,14 +69,14 @@
}
},
];
- // Pipeline will fail because $merge is not allowed to exist within a $lookup.
- // Validation for $merge in a view occurs at a later point.
- const cmdRes =
- coll.getDB().runCommand({create: "view1", viewOn: coll.getName(), pipeline: pipeline});
- assert.commandFailedWithCode(cmdRes, kErrorCodeMergeBannedInLookup);
+// Pipeline will fail because $merge is not allowed to exist within a $lookup.
+// Validation for $merge in a view occurs at a later point.
+const cmdRes =
+ coll.getDB().runCommand({create: "view1", viewOn: coll.getName(), pipeline: pipeline});
+assert.commandFailedWithCode(cmdRes, kErrorCodeMergeBannedInLookup);
- // Test that a $merge without an explicit "on" field still fails within a $lookup.
- pipeline = [
+// Test that a $merge without an explicit "on" field still fails within a $lookup.
+pipeline = [
{
$lookup: {
pipeline: [{$merge: {into: "out_collection"}}],
@@ -85,7 +85,7 @@
}
},
];
- assert.commandFailedWithCode(
- db.runCommand({aggregate: coll.getName(), pipeline: pipeline, cursor: {}}),
- kErrorCodeMergeBannedInLookup);
+assert.commandFailedWithCode(
+ db.runCommand({aggregate: coll.getName(), pipeline: pipeline, cursor: {}}),
+ kErrorCodeMergeBannedInLookup);
}());
diff --git a/jstests/aggregation/sources/merge/exchange_explain.js b/jstests/aggregation/sources/merge/exchange_explain.js
index 362af97ed46..23bed99973d 100644
--- a/jstests/aggregation/sources/merge/exchange_explain.js
+++ b/jstests/aggregation/sources/merge/exchange_explain.js
@@ -6,173 +6,169 @@
load('jstests/aggregation/extras/utils.js');
(function() {
- "use strict";
-
- const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
-
- const mongosDB = st.s.getDB("test_db");
-
- const inColl = mongosDB["inColl"];
- const targetCollRange = mongosDB["targetCollRange"];
- const targetCollRangeOtherField = mongosDB["targetCollRangeOtherField"];
- const targetCollHash = mongosDB["targetCollHash"];
-
- const numDocs = 1000;
-
- function runExplainQuery(targetColl) {
- return inColl.explain("allPlansExecution").aggregate([
- {$group: {_id: "$a", a: {$avg: "$a"}}},
- {
- $merge: {
- into: {
- db: targetColl.getDB().getName(),
- coll: targetColl.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ]);
- }
+"use strict";
- function runRealQuery(targetColl) {
- return inColl.aggregate([
- {$group: {_id: "$a", a: {$avg: "$a"}}},
- {
- $merge: {
- into: {
- db: targetColl.getDB().getName(),
- coll: targetColl.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }
- ]);
- }
+const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
- function getExchangeSpec(explain) {
- assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
- assert(explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
+const mongosDB = st.s.getDB("test_db");
- return explain.splitPipeline.exchange;
- }
+const inColl = mongosDB["inColl"];
+const targetCollRange = mongosDB["targetCollRange"];
+const targetCollRangeOtherField = mongosDB["targetCollRangeOtherField"];
+const targetCollHash = mongosDB["targetCollHash"];
- // Shard the input collection.
- st.shardColl(inColl, {a: 1}, {a: 500}, {a: 500}, mongosDB.getName());
+const numDocs = 1000;
- // Insert some data to the input collection.
- let bulk = inColl.initializeUnorderedBulkOp();
- for (let i = 0; i < numDocs; i++) {
- bulk.insert({a: i}, {b: [0, 1, 2, 3, i]});
- }
- assert.commandWorked(bulk.execute());
-
- // Shard the output collections.
- st.shardColl(targetCollRange, {_id: 1}, {_id: 500}, {_id: 500}, mongosDB.getName());
- st.shardColl(targetCollRangeOtherField, {b: 1}, {b: 500}, {b: 500}, mongosDB.getName());
- st.shardColl(targetCollHash, {_id: "hashed"}, false, false, mongosDB.getName());
-
- // Run the explain. We expect to see the range based exchange here.
- let explain = runExplainQuery(targetCollRange);
-
- // Make sure we see the exchange in the explain output.
- assert.eq(explain.mergeType, "exchange", tojson(explain));
- let exchangeSpec = getExchangeSpec(explain);
- assert.eq(exchangeSpec.policy, "keyRange");
- assert.eq(exchangeSpec.key, {_id: 1});
-
- // Run the real query.
- runRealQuery(targetCollRange);
- let results = targetCollRange.aggregate([{'$count': "count"}]).next().count;
- assert.eq(results, numDocs);
-
- // Rerun the same query with the hash based exchange.
- explain = runExplainQuery(targetCollHash);
-
- // Make sure we see the exchange in the explain output.
- assert.eq(explain.mergeType, "exchange", tojson(explain));
- exchangeSpec = getExchangeSpec(explain);
- assert.eq(exchangeSpec.policy, "keyRange");
- assert.eq(exchangeSpec.key, {_id: "hashed"});
-
- // Run the real query.
- runRealQuery(targetCollHash);
- results = targetCollHash.aggregate([{'$count': "count"}]).next().count;
- assert.eq(results, numDocs);
-
- // This should fail because the "on" field ('b' in this case, the shard key of the target
- // collection) cannot be an array.
- assertErrorCode(inColl,
- [{
- $merge: {
- into: {
- db: targetCollRangeOtherField.getDB().getName(),
- coll: targetCollRangeOtherField.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }],
- 51132);
-
- // Turn off the exchange and rerun the query.
- assert.commandWorked(mongosDB.adminCommand({setParameter: 1, internalQueryDisableExchange: 1}));
- explain = runExplainQuery(targetCollRange);
-
- // Make sure there is no exchange.
- assert.eq(explain.mergeType, "anyShard", tojson(explain));
- assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
- assert(!explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
-
- // This should fail similar to before even if we are not running the exchange.
- assertErrorCode(inColl,
- [{
- $merge: {
- into: {
- db: targetCollRangeOtherField.getDB().getName(),
- coll: targetCollRangeOtherField.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert"
- }
- }],
- 51132);
-
- // SERVER-38349 Make sure mongos rejects specifying exchange directly.
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: inColl.getName(),
- pipeline: [],
- cursor: {},
- exchange: {
- policy: "keyRange",
- bufferSize: NumberInt(1024),
- boundaries: [{_id: 0}],
- consumers: NumberInt(2),
- consumerIds: [NumberInt(0), NumberInt(1)]
+function runExplainQuery(targetColl) {
+ return inColl.explain("allPlansExecution").aggregate([
+ {$group: {_id: "$a", a: {$avg: "$a"}}},
+ {
+ $merge: {
+ into: {
+ db: targetColl.getDB().getName(),
+ coll: targetColl.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert"
+ }
}
- }),
- 51028);
+ ]);
+}
- assert.commandFailedWithCode(mongosDB.runCommand({
- aggregate: inColl.getName(),
- pipeline: [{
+function runRealQuery(targetColl) {
+ return inColl.aggregate([
+ {$group: {_id: "$a", a: {$avg: "$a"}}},
+ {
$merge: {
- into: targetCollRange.getName(),
+ into: {
+ db: targetColl.getDB().getName(),
+ coll: targetColl.getName(),
+ },
whenMatched: "replace",
whenNotMatched: "insert"
}
- }],
- cursor: {},
- exchange: {
- policy: "keyRange",
- bufferSize: NumberInt(1024),
- boundaries: [{_id: 0}],
- consumers: NumberInt(2),
- consumerIds: [NumberInt(0), NumberInt(1)]
}
- }),
- 51028);
+ ]);
+}
+
+function getExchangeSpec(explain) {
+ assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
+ assert(explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
+
+ return explain.splitPipeline.exchange;
+}
+
+// Shard the input collection.
+st.shardColl(inColl, {a: 1}, {a: 500}, {a: 500}, mongosDB.getName());
+
+// Insert some data to the input collection.
+let bulk = inColl.initializeUnorderedBulkOp();
+for (let i = 0; i < numDocs; i++) {
+ bulk.insert({a: i}, {b: [0, 1, 2, 3, i]});
+}
+assert.commandWorked(bulk.execute());
+
+// Shard the output collections.
+st.shardColl(targetCollRange, {_id: 1}, {_id: 500}, {_id: 500}, mongosDB.getName());
+st.shardColl(targetCollRangeOtherField, {b: 1}, {b: 500}, {b: 500}, mongosDB.getName());
+st.shardColl(targetCollHash, {_id: "hashed"}, false, false, mongosDB.getName());
+
+// Run the explain. We expect to see the range based exchange here.
+let explain = runExplainQuery(targetCollRange);
+
+// Make sure we see the exchange in the explain output.
+assert.eq(explain.mergeType, "exchange", tojson(explain));
+let exchangeSpec = getExchangeSpec(explain);
+assert.eq(exchangeSpec.policy, "keyRange");
+assert.eq(exchangeSpec.key, {_id: 1});
+
+// Run the real query.
+runRealQuery(targetCollRange);
+let results = targetCollRange.aggregate([{'$count': "count"}]).next().count;
+assert.eq(results, numDocs);
+
+// Rerun the same query with the hash based exchange.
+explain = runExplainQuery(targetCollHash);
+
+// Make sure we see the exchange in the explain output.
+assert.eq(explain.mergeType, "exchange", tojson(explain));
+exchangeSpec = getExchangeSpec(explain);
+assert.eq(exchangeSpec.policy, "keyRange");
+assert.eq(exchangeSpec.key, {_id: "hashed"});
+
+// Run the real query.
+runRealQuery(targetCollHash);
+results = targetCollHash.aggregate([{'$count': "count"}]).next().count;
+assert.eq(results, numDocs);
+
+// This should fail because the "on" field ('b' in this case, the shard key of the target
+// collection) cannot be an array.
+assertErrorCode(inColl,
+ [{
+ $merge: {
+ into: {
+ db: targetCollRangeOtherField.getDB().getName(),
+ coll: targetCollRangeOtherField.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert"
+ }
+ }],
+ 51132);
+
+// Turn off the exchange and rerun the query.
+assert.commandWorked(mongosDB.adminCommand({setParameter: 1, internalQueryDisableExchange: 1}));
+explain = runExplainQuery(targetCollRange);
+
+// Make sure there is no exchange.
+assert.eq(explain.mergeType, "anyShard", tojson(explain));
+assert(explain.hasOwnProperty("splitPipeline"), tojson(explain));
+assert(!explain.splitPipeline.hasOwnProperty("exchange"), tojson(explain));
+
+// This should fail similar to before even if we are not running the exchange.
+assertErrorCode(inColl,
+ [{
+ $merge: {
+ into: {
+ db: targetCollRangeOtherField.getDB().getName(),
+ coll: targetCollRangeOtherField.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert"
+ }
+ }],
+ 51132);
+
+// SERVER-38349 Make sure mongos rejects specifying exchange directly.
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: inColl.getName(),
+ pipeline: [],
+ cursor: {},
+ exchange: {
+ policy: "keyRange",
+ bufferSize: NumberInt(1024),
+ boundaries: [{_id: 0}],
+ consumers: NumberInt(2),
+ consumerIds: [NumberInt(0), NumberInt(1)]
+ }
+}),
+ 51028);
+
+assert.commandFailedWithCode(mongosDB.runCommand({
+ aggregate: inColl.getName(),
+ pipeline: [{
+ $merge: {into: targetCollRange.getName(), whenMatched: "replace", whenNotMatched: "insert"}
+ }],
+ cursor: {},
+ exchange: {
+ policy: "keyRange",
+ bufferSize: NumberInt(1024),
+ boundaries: [{_id: 0}],
+ consumers: NumberInt(2),
+ consumerIds: [NumberInt(0), NumberInt(1)]
+ }
+}),
+ 51028);
- st.stop();
+st.stop();
}());
diff --git a/jstests/aggregation/sources/merge/merge_to_referenced_collection.js b/jstests/aggregation/sources/merge/merge_to_referenced_collection.js
index c6a82bab79d..a9060f58b0a 100644
--- a/jstests/aggregation/sources/merge/merge_to_referenced_collection.js
+++ b/jstests/aggregation/sources/merge/merge_to_referenced_collection.js
@@ -9,64 +9,55 @@
* @tags: [assumes_unsharded_collection]
*/
(function() {
- 'use strict';
+'use strict';
- load('jstests/aggregation/extras/merge_helpers.js'); // For 'withEachMergeMode'.
- load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'.
+load('jstests/aggregation/extras/merge_helpers.js'); // For 'withEachMergeMode'.
+load('jstests/libs/fixture_helpers.js'); // For 'FixtureHelpers'.
- const testDB = db.getSiblingDB("merge_to_referenced_coll");
- const coll = testDB.test;
+const testDB = db.getSiblingDB("merge_to_referenced_coll");
+const coll = testDB.test;
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- coll.drop();
+withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ coll.drop();
- // Seed the collection to ensure each pipeline will actually do something.
- assert.commandWorked(coll.insert({_id: 0}));
+ // Seed the collection to ensure each pipeline will actually do something.
+ assert.commandWorked(coll.insert({_id: 0}));
- // Each of the following assertions will somehow use $merge to write to a namespace that is
- // being read from elsewhere in the pipeline.
- const assertFailsWithCode = ((fn) => {
- const error = assert.throws(fn);
- assert.contains(error.code, [51188, 51079]);
- });
+ // Each of the following assertions will somehow use $merge to write to a namespace that is
+ // being read from elsewhere in the pipeline.
+ const assertFailsWithCode = ((fn) => {
+ const error = assert.throws(fn);
+ assert.contains(error.code, [51188, 51079]);
+ });
- // Test $merge to the aggregate command's source collection.
- assertFailsWithCode(() => coll.aggregate([{
+ // Test $merge to the aggregate command's source collection.
+ assertFailsWithCode(() => coll.aggregate([{
+ $merge:
+ {into: coll.getName(), whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}
+ }]));
+
+ // Test $merge to the same namespace as a $lookup which is the same as the aggregate
+ // command's source collection.
+ assertFailsWithCode(() => coll.aggregate([
+ {$lookup: {from: coll.getName(), as: "x", localField: "f_id", foreignField: "_id"}},
+ {
$merge: {
into: coll.getName(),
whenMatched: whenMatchedMode,
whenNotMatched: whenNotMatchedMode
}
- }]));
+ }
+ ]));
- // Test $merge to the same namespace as a $lookup which is the same as the aggregate
- // command's source collection.
- assertFailsWithCode(() => coll.aggregate([
- {$lookup: {from: coll.getName(), as: "x", localField: "f_id", foreignField: "_id"}},
- {
- $merge: {
- into: coll.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }
- ]));
+ // Test $merge to the same namespace as a $lookup which is *not* the same as the aggregate
+ // command's source collection.
+ assertFailsWithCode(() => coll.aggregate([
+ {$lookup: {from: "bar", as: "x", localField: "f_id", foreignField: "_id"}},
+ {$merge: {into: "bar", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
- // Test $merge to the same namespace as a $lookup which is *not* the same as the aggregate
- // command's source collection.
- assertFailsWithCode(() => coll.aggregate([
- {$lookup: {from: "bar", as: "x", localField: "f_id", foreignField: "_id"}},
- {
- $merge: {
- into: "bar",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }
- ]));
-
- // Test $merge to the same namespace as a $graphLookup.
- assertFailsWithCode(() => coll.aggregate([
+ // Test $merge to the same namespace as a $graphLookup.
+ assertFailsWithCode(() => coll.aggregate([
{
$graphLookup: {
from: "bar",
@@ -85,8 +76,8 @@
}
]));
- // Test $merge to the same namespace as a $lookup which is nested within another $lookup.
- assertFailsWithCode(() => coll.aggregate([
+ // Test $merge to the same namespace as a $lookup which is nested within another $lookup.
+ assertFailsWithCode(() => coll.aggregate([
{
$lookup: {
from: "bar",
@@ -103,49 +94,33 @@
}
}
]));
- // Test $merge to the same namespace as a $lookup which is nested within a $facet.
- assertFailsWithCode(() => coll.aggregate([
- {
- $facet: {
- y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
- }
- },
- {
- $merge: {
- into: "TARGET",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }
- ]));
- assertFailsWithCode(() => coll.aggregate([
- {
- $facet: {
- x: [{$lookup: {from: "other", as: "y", pipeline: []}}],
- y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
- }
- },
- {
- $merge: {
- into: "TARGET",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
+ // Test $merge to the same namespace as a $lookup which is nested within a $facet.
+ assertFailsWithCode(() => coll.aggregate([
+ {
+ $facet: {
+ y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
}
- ]));
-
- // Test that we use the resolved namespace of a view to detect this sort of halloween
- // problem.
- assert.commandWorked(
- testDB.runCommand({create: "view_on_TARGET", viewOn: "TARGET", pipeline: []}));
- assertFailsWithCode(() => testDB.view_on_TARGET.aggregate([{
- $merge: {
- into: "TARGET",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
+ },
+ {$merge: {into: "TARGET", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
+ assertFailsWithCode(() => coll.aggregate([
+ {
+ $facet: {
+ x: [{$lookup: {from: "other", as: "y", pipeline: []}}],
+ y: [{$lookup: {from: "TARGET", as: "y", pipeline: []}}],
}
- }]));
- assertFailsWithCode(() => coll.aggregate([
+ },
+ {$merge: {into: "TARGET", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
+
+ // Test that we use the resolved namespace of a view to detect this sort of halloween
+ // problem.
+ assert.commandWorked(
+ testDB.runCommand({create: "view_on_TARGET", viewOn: "TARGET", pipeline: []}));
+ assertFailsWithCode(() => testDB.view_on_TARGET.aggregate([
+ {$merge: {into: "TARGET", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]));
+ assertFailsWithCode(() => coll.aggregate([
{
$facet: {
x: [{$lookup: {from: "other", as: "y", pipeline: []}}],
@@ -167,25 +142,21 @@
}
]));
- function generateNestedPipeline(foreignCollName, numLevels) {
- let pipeline = [{"$lookup": {pipeline: [], from: foreignCollName, as: "same"}}];
-
- for (let level = 1; level < numLevels; level++) {
- pipeline = [{"$lookup": {pipeline: pipeline, from: foreignCollName, as: "same"}}];
- }
+ function generateNestedPipeline(foreignCollName, numLevels) {
+ let pipeline = [{"$lookup": {pipeline: [], from: foreignCollName, as: "same"}}];
- return pipeline;
+ for (let level = 1; level < numLevels; level++) {
+ pipeline = [{"$lookup": {pipeline: pipeline, from: foreignCollName, as: "same"}}];
}
- const nestedPipeline = generateNestedPipeline("lookup", 20).concat([{
- $merge: {
- into: "lookup",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]);
- assertFailsWithCode(() => coll.aggregate(nestedPipeline));
+ return pipeline;
+ }
- testDB.dropDatabase();
- });
+ const nestedPipeline = generateNestedPipeline("lookup", 20).concat([
+ {$merge: {into: "lookup", whenMatched: whenMatchedMode, whenNotMatched: whenNotMatchedMode}}
+ ]);
+ assertFailsWithCode(() => coll.aggregate(nestedPipeline));
+
+ testDB.dropDatabase();
+});
}());
diff --git a/jstests/aggregation/sources/merge/merge_to_same_collection.js b/jstests/aggregation/sources/merge/merge_to_same_collection.js
index 2e26a26965a..51435696fdd 100644
--- a/jstests/aggregation/sources/merge/merge_to_same_collection.js
+++ b/jstests/aggregation/sources/merge/merge_to_same_collection.js
@@ -2,19 +2,19 @@
* Tests that $merge fails when the target collection is the aggregation collection.
*
* @tags: [assumes_unsharded_collection]
-*/
+ */
(function() {
- "use strict";
+"use strict";
- // For assertMergeFailsForAllModesWithCode.
- load("jstests/aggregation/extras/merge_helpers.js");
+// For assertMergeFailsForAllModesWithCode.
+load("jstests/aggregation/extras/merge_helpers.js");
- const coll = db.name;
- coll.drop();
+const coll = db.name;
+coll.drop();
- const nDocs = 10;
- for (let i = 0; i < nDocs; i++) {
- assert.commandWorked(coll.insert({_id: i, a: i}));
- }
- assertMergeFailsForAllModesWithCode({source: coll, target: coll, errorCodes: 51188});
+const nDocs = 10;
+for (let i = 0; i < nDocs; i++) {
+ assert.commandWorked(coll.insert({_id: i, a: i}));
+}
+assertMergeFailsForAllModesWithCode({source: coll, target: coll, errorCodes: 51188});
}());
diff --git a/jstests/aggregation/sources/merge/mode_fail_insert.js b/jstests/aggregation/sources/merge/mode_fail_insert.js
index 7cfd6aee02e..9363c42b12d 100644
--- a/jstests/aggregation/sources/merge/mode_fail_insert.js
+++ b/jstests/aggregation/sources/merge/mode_fail_insert.js
@@ -1,147 +1,149 @@
// Tests the behavior of $merge with whenMatched: "fail" and whenNotMatched: "insert".
// @tags: [assumes_unsharded_collection, assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const coll = db.merge_insert_only;
- coll.drop();
-
- const targetColl = db.merge_insert_only_out;
- targetColl.drop();
-
- const pipeline =
- [{$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}];
-
- //
- // Test $merge with a non-existent output collection.
- //
- assert.commandWorked(coll.insert({_id: 0}));
-
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
-
- //
- // Test $merge with an existing output collection.
- //
- assert.commandWorked(coll.remove({_id: 0}));
- assert.commandWorked(coll.insert({_id: 1}));
- coll.aggregate(pipeline);
- assert.eq(2, targetColl.find().itcount());
-
- //
- // Test that $merge fails if there's a duplicate key error.
- //
- assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
-
- //
- // Test that $merge will preserve the indexes and options of the output collection.
- //
- const validator = {a: {$gt: 0}};
- targetColl.drop();
- assert.commandWorked(db.createCollection(targetColl.getName(), {validator: validator}));
- assert.commandWorked(targetColl.createIndex({a: 1}));
-
- coll.drop();
- assert.commandWorked(coll.insert({a: 1}));
-
- coll.aggregate(pipeline);
- assert.eq(1, targetColl.find().itcount());
- assert.eq(2, targetColl.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: targetColl.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
-
- //
- // Test that $merge fails if it violates a unique index constraint.
- //
- coll.drop();
- assert.commandWorked(coll.insert([{_id: 0, a: 0}, {_id: 1, a: 0}]));
- targetColl.drop();
- assert.commandWorked(targetColl.createIndex({a: 1}, {unique: true}));
-
- assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
-
- //
- // Test that a $merge aggregation succeeds even if the _id is stripped out and the "unique key"
- // is the document key, which will be _id for a new collection.
- //
- coll.drop();
- assert.commandWorked(coll.insert({a: 0}));
- targetColl.drop();
- assert.doesNotThrow(() => coll.aggregate([
- {$project: {_id: 0}},
- {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}},
- ]));
- assert.eq(1, targetColl.find().itcount());
-
- //
- // Test that a $merge aggregation succeeds even if the _id is stripped out and _id is included
- // in the "on" fields.
- //
- coll.drop();
- assert.commandWorked(coll.insert([{_id: "should be projected away", name: "kyle"}]));
- targetColl.drop();
- assert.commandWorked(targetColl.createIndex({_id: 1, name: -1}, {unique: true}));
- assert.doesNotThrow(() => coll.aggregate([
- {$project: {_id: 0}},
- {
- $merge: {
- into: targetColl.getName(),
- whenMatched: "fail",
- whenNotMatched: "insert",
- on: ["_id", "name"]
- }
- },
- ]));
- assert.eq(1, targetColl.find().itcount());
-
- //
- // Tests for $merge to a database that differs from the aggregation database.
- //
- const foreignDb = db.getSiblingDB("merge_insert_only_foreign");
- const foreignTargetColl = foreignDb.merge_insert_only_out;
- const pipelineDifferentOutputDb = [
- {$project: {_id: 0}},
- {
- $merge: {
- into: {
- db: foreignDb.getName(),
- coll: foreignTargetColl.getName(),
- },
- whenMatched: "fail",
- whenNotMatched: "insert",
- }
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const coll = db.merge_insert_only;
+coll.drop();
+
+const targetColl = db.merge_insert_only_out;
+targetColl.drop();
+
+const pipeline =
+ [{$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}}];
+
+//
+// Test $merge with a non-existent output collection.
+//
+assert.commandWorked(coll.insert({_id: 0}));
+
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
+
+//
+// Test $merge with an existing output collection.
+//
+assert.commandWorked(coll.remove({_id: 0}));
+assert.commandWorked(coll.insert({_id: 1}));
+coll.aggregate(pipeline);
+assert.eq(2, targetColl.find().itcount());
+
+//
+// Test that $merge fails if there's a duplicate key error.
+//
+assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
+
+//
+// Test that $merge will preserve the indexes and options of the output collection.
+//
+const validator = {
+ a: {$gt: 0}
+};
+targetColl.drop();
+assert.commandWorked(db.createCollection(targetColl.getName(), {validator: validator}));
+assert.commandWorked(targetColl.createIndex({a: 1}));
+
+coll.drop();
+assert.commandWorked(coll.insert({a: 1}));
+
+coll.aggregate(pipeline);
+assert.eq(1, targetColl.find().itcount());
+assert.eq(2, targetColl.getIndexes().length);
+
+const listColl = db.runCommand({listCollections: 1, filter: {name: targetColl.getName()}});
+assert.commandWorked(listColl);
+assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+
+//
+// Test that $merge fails if it violates a unique index constraint.
+//
+coll.drop();
+assert.commandWorked(coll.insert([{_id: 0, a: 0}, {_id: 1, a: 0}]));
+targetColl.drop();
+assert.commandWorked(targetColl.createIndex({a: 1}, {unique: true}));
+
+assertErrorCode(coll, pipeline, ErrorCodes.DuplicateKey);
+
+//
+// Test that a $merge aggregation succeeds even if the _id is stripped out and the "unique key"
+// is the document key, which will be _id for a new collection.
+//
+coll.drop();
+assert.commandWorked(coll.insert({a: 0}));
+targetColl.drop();
+assert.doesNotThrow(() => coll.aggregate([
+ {$project: {_id: 0}},
+ {$merge: {into: targetColl.getName(), whenMatched: "fail", whenNotMatched: "insert"}},
+]));
+assert.eq(1, targetColl.find().itcount());
+
+//
+// Test that a $merge aggregation succeeds even if the _id is stripped out and _id is included
+// in the "on" fields.
+//
+coll.drop();
+assert.commandWorked(coll.insert([{_id: "should be projected away", name: "kyle"}]));
+targetColl.drop();
+assert.commandWorked(targetColl.createIndex({_id: 1, name: -1}, {unique: true}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$project: {_id: 0}},
+ {
+ $merge: {
+ into: targetColl.getName(),
+ whenMatched: "fail",
+ whenNotMatched: "insert",
+ on: ["_id", "name"]
+ }
+ },
+]));
+assert.eq(1, targetColl.find().itcount());
+
+//
+// Tests for $merge to a database that differs from the aggregation database.
+//
+const foreignDb = db.getSiblingDB("merge_insert_only_foreign");
+const foreignTargetColl = foreignDb.merge_insert_only_out;
+const pipelineDifferentOutputDb = [
+ {$project: {_id: 0}},
+ {
+ $merge: {
+ into: {
+ db: foreignDb.getName(),
+ coll: foreignTargetColl.getName(),
+ },
+ whenMatched: "fail",
+ whenNotMatched: "insert",
}
- ];
-
- foreignDb.dropDatabase();
- coll.drop();
- assert.commandWorked(coll.insert({a: 1}));
-
- if (!FixtureHelpers.isMongos(db)) {
- //
- // Test that $merge implicitly creates a new database when the output collection's database
- // doesn't exist.
- //
- coll.aggregate(pipelineDifferentOutputDb);
- assert.eq(foreignTargetColl.find().itcount(), 1);
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Explicitly create the collection and database, then fall through to the test below.
- assert.commandWorked(foreignTargetColl.insert({val: "forcing database creation"}));
}
-
- //
- // Re-run the $merge aggregation, which should merge with the existing contents of the
- // collection. We rely on implicit _id generation to give us unique _id values.
- //
- assert.doesNotThrow(() => coll.aggregate(pipelineDifferentOutputDb));
- assert.eq(foreignTargetColl.find().itcount(), 2);
+];
+
+foreignDb.dropDatabase();
+coll.drop();
+assert.commandWorked(coll.insert({a: 1}));
+
+if (!FixtureHelpers.isMongos(db)) {
+ //
+ // Test that $merge implicitly creates a new database when the output collection's database
+ // doesn't exist.
+ //
+ coll.aggregate(pipelineDifferentOutputDb);
+ assert.eq(foreignTargetColl.find().itcount(), 1);
+} else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Explicitly create the collection and database, then fall through to the test below.
+ assert.commandWorked(foreignTargetColl.insert({val: "forcing database creation"}));
+}
+
+//
+// Re-run the $merge aggregation, which should merge with the existing contents of the
+// collection. We rely on implicit _id generation to give us unique _id values.
+//
+assert.doesNotThrow(() => coll.aggregate(pipelineDifferentOutputDb));
+assert.eq(foreignTargetColl.find().itcount(), 2);
}());
diff --git a/jstests/aggregation/sources/merge/mode_keep_existing_insert.js b/jstests/aggregation/sources/merge/mode_keep_existing_insert.js
index b76fb9d20e9..3f146adbcb5 100644
--- a/jstests/aggregation/sources/merge/mode_keep_existing_insert.js
+++ b/jstests/aggregation/sources/merge/mode_keep_existing_insert.js
@@ -4,372 +4,367 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge into a non-existent collection.
- (function testMergeIntoNonExistentCollection() {
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- ]
- });
- })();
-
- // Test $merge into an existing collection.
- (function testMergeIntoExistentCollection() {
- assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge does not update documents in the target collection if they were not modified
- // in the source collection.
- (function testMergeDoesNotUpdateUnmodifiedDocuments() {
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge doesn't update documents in the target collection if they were modified in the
- // source collection.
- (function testMergeDoesNotUpdateModifiedDocuments() {
- // Update and merge a single document.
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
-
- // Update and merge multiple documents.
- assert.commandWorked(source.update({_id: 1}, {a: 11}));
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge inserts a new document into the target collection if it was inserted into the
- // source collection.
- (function testMergeInsertsNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]
- });
- assert.commandWorked(source.deleteOne({_id: 3}));
- assert.commandWorked(target.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 4, c: "d"}
- ]
- });
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- ]
- });
- })();
-
- // Test $merge fails if a unique index constraint in the target collection is violated.
- (function testMergeFailsIfTargetUniqueKeyIsViolated() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "keepExisting", whenNotMatched: "insert"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge into a non-existent collection.
+(function testMergeIntoNonExistentCollection() {
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ ]
+ });
+})();
+
+// Test $merge into an existing collection.
+(function testMergeIntoExistentCollection() {
+ assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge does not update documents in the target collection if they were not modified
+// in the source collection.
+(function testMergeDoesNotUpdateUnmodifiedDocuments() {
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge doesn't update documents in the target collection if they were modified in the
+// source collection.
+(function testMergeDoesNotUpdateModifiedDocuments() {
+ // Update and merge a single document.
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+
+ // Update and merge multiple documents.
+ assert.commandWorked(source.update({_id: 1}, {a: 11}));
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge inserts a new document into the target collection if it was inserted into the
+// source collection.
+(function testMergeInsertsNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 3, b: "c"}]
+ });
+ assert.commandWorked(source.deleteOne({_id: 3}));
+ assert.commandWorked(target.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 4, c: "d"}
+ ]
+ });
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+ assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ ]
+ });
+})();
+
+// Test $merge fails if a unique index constraint in the target collection is violated.
+(function testMergeFailsIfTargetUniqueKeyIsViolated() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 4, a: 1}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ const error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ ]
+ });
+ assert.commandWorked(target.dropIndex({a: 1}));
+})();
+
+// Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
+(function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
+ // The 'on' fields contains a single document field.
+ let error = assert.throws(
+ () => source.aggregate([{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+
+ // The 'on' fields contains multiple document fields.
+ error = assert.throws(
+ () => source.aggregate(
+ [{$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, c: "x"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected:
+ [{_id: 1, a: {b: "b"}, c: "x"}, {_id: 2, a: {b: "c"}}, {_id: 3, a: {b: 30}, b: "c"}]
+ });
+})();
+
+// Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
+// null or an array.
+(function testMergeFailsIfOnFieldIsInvalid() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
+
+ // The 'on' field is missing.
+ assert.commandWorked(source.insert({_id: 1}));
+ let error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is null.
+ assert.commandWorked(source.update({_id: 1}, {z: null}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is an array.
+ assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51185);
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({
+ // Remove the _id field from the projection as the arrayEq function cannot ignore
+ // mismatches in the ObjectId. The target collection should contain all elements from
+ // the source and the target even though they had the same _id's and would have been
+ // merged should we not remove the _id field from the aggregate projection.
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({
+ // Remove the _id field from the projection as the arrayEq function cannot ignore
+ // mismatches in the ObjectId. The target collection should contain all elements from
+ // the source and the target even though they had the same _id's and would have been
+ // merged should we not remove the _id field from the aggregate projection.
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
+
+// Test $merge preserves indexes and options of the existing target collection.
+(function testMergePresrvesIndexesAndOptions() {
+ const validator = {a: {$gt: 0}};
+ assert(target.drop());
+ assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+ assert.eq(2, target.getIndexes().length);
+
+ const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
+ assert.commandWorked(listColl);
+ assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+})();
+
+// Test $merge implicitly creates a new database when the target collection's database doesn't
+// exist.
+(function testMergeImplicitlyCreatesTargetDatabase() {
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1}));
+
+ const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
+ assert.commandWorked(foreignDb.dropDatabase());
+ const foreignTarget = foreignDb[`${jsTest.name()}_target`];
+ const foreignPipeline = [{
+ $merge: {
+ into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
+ whenMatched: "keepExisting",
+ whenNotMatched: "insert"
}
+ }];
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 4, a: 1}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- const error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- ]
- });
- assert.commandWorked(target.dropIndex({a: 1}));
- })();
-
- // Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
- (function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
- // The 'on' fields contains a single document field.
- let error =
- assert.throws(() => source.aggregate(
- [{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, [51190, 51183]);
-
- // The 'on' fields contains multiple document fields.
- error = assert.throws(() => source.aggregate([
- {$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}
- ]));
- assert.commandFailedWithCode(error, [51190, 51183]);
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}},
- {_id: 3, a: {b: 30}, b: "c"}
- ]
- });
- })();
-
- // Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
- // null or an array.
- (function testMergeFailsIfOnFieldIsInvalid() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
-
- // The 'on' field is missing.
- assert.commandWorked(source.insert({_id: 1}));
- let error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is null.
- assert.commandWorked(source.update({_id: 1}, {z: null}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is an array.
- assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51185);
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({
- // Remove the _id field from the projection as the arrayEq function cannot ignore
- // mismatches in the ObjectId. The target collection should contain all elements from
- // the source and the target even though they had the same _id's and would have been
- // merged should we not remove the _id field from the aggregate projection.
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- // Remove the _id field from the projection as the arrayEq function cannot ignore
- // mismatches in the ObjectId. The target collection should contain all elements from
- // the source and the target even though they had the same _id's and would have been
- // merged should we not remove the _id field from the aggregate projection.
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
-
- // Test $merge preserves indexes and options of the existing target collection.
- (function testMergePresrvesIndexesAndOptions() {
- const validator = {a: {$gt: 0}};
- assert(target.drop());
- assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
- assert.commandWorked(target.createIndex({a: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- assert.eq(2, target.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
- })();
-
- // Test $merge implicitly creates a new database when the target collection's database doesn't
- // exist.
- (function testMergeImplicitlyCreatesTargetDatabase() {
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1}));
-
- const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
- assert.commandWorked(foreignDb.dropDatabase());
- const foreignTarget = foreignDb[`${jsTest.name()}_target`];
- const foreignPipeline = [{
- $merge: {
- into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
- whenMatched: "keepExisting",
- whenNotMatched: "insert"
- }
- }];
-
- if (!FixtureHelpers.isMongos(db)) {
- assert.doesNotThrow(() => source.aggregate(foreignPipeline));
- assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1}]});
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => source.aggregate(foreignPipeline));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test below.
- assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
- }
-
- assert.commandWorked(source.update({_id: 1}, {a: 1, b: "a"}));
+ if (!FixtureHelpers.isMongos(db)) {
assert.doesNotThrow(() => source.aggregate(foreignPipeline));
assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1}]});
- assert.commandWorked(foreignDb.dropDatabase());
- })();
+ } else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => source.aggregate(foreignPipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test below.
+ assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
+ }
+
+ assert.commandWorked(source.update({_id: 1}, {a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(foreignPipeline));
+ assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1}]});
+ assert.commandWorked(foreignDb.dropDatabase());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_merge_discard.js b/jstests/aggregation/sources/merge/mode_merge_discard.js
index 401210c1d4d..cc9fff93691 100644
--- a/jstests/aggregation/sources/merge/mode_merge_discard.js
+++ b/jstests/aggregation/sources/merge/mode_merge_discard.js
@@ -4,237 +4,227 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection. The merge operation should succeed and unmatched documents discarded.
- (function testMergeIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]
- });
-
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
- })();
-
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
-
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, b: 3}]
- });
- })();
-
- // Test $merge when a field is presented in the source and the target and contains a
- // sub-document value.
- (function testMergeSubdocuments() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 1, a: {b: 1}}));
- assert.commandWorked(target.insert([{_id: 1, a: {c: 2}}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, a: {b: 1}}, {_id: 3, b: 3}]});
-
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: {b: 1}}, {_id: 2, a: {b: 2}}]));
- assert.commandWorked(target.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: {b: 1}, b: 1}, {_id: 2, a: {b: 2}}, {_id: 3, b: 3}]
- });
- })();
-
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge does not insert a new document into the target collection if it was inserted
- // into the source collection.
- (function testMergeDoesNotInsertNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert([
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "discard"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection. The merge operation should succeed and unmatched documents discarded.
+(function testMergeIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
+
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
+})();
+
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
+
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, b: 3}]
+ });
+})();
+
+// Test $merge when a field is presented in the source and the target and contains a
+// sub-document value.
+(function testMergeSubdocuments() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 1, a: {b: 1}}));
+ assert.commandWorked(target.insert([{_id: 1, a: {c: 2}}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, a: {b: 1}}, {_id: 3, b: 3}]});
+
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: {b: 1}}, {_id: 2, a: {b: 2}}]));
+ assert.commandWorked(target.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: {b: 1}, b: 1}, {_id: 2, a: {b: 2}}, {_id: 3, b: 3}]
+ });
+})();
+
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge does not insert a new document into the target collection if it was inserted
+// into the source collection.
+(function testMergeDoesNotInsertNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
{_id: 1, a: 1, b: "a", c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 3, a: 30, b: "c", c: "x"}
- ]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 4, a: 30, b: "c", c: "x"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, d: "z"}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 2, a: {b: "c"}, c: "y", d: "z"},
- ]
- });
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c", c: "x"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 4, a: 30, b: "c", c: "x"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, d: "z"}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 2, a: {b: "c"}, c: "y", d: "z"},
+ ]
+ });
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_merge_fail.js b/jstests/aggregation/sources/merge/mode_merge_fail.js
index 9bf0eadb148..de0842a02b9 100644
--- a/jstests/aggregation/sources/merge/mode_merge_fail.js
+++ b/jstests/aggregation/sources/merge/mode_merge_fail.js
@@ -4,116 +4,113 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}
- };
- const pipeline = [mergeStage];
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "fail"}
+};
+const pipeline = [mergeStage];
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection.
- (function testMergeFailsIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]
- });
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection.
+(function testMergeFailsIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1, b: 1}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3, b: 3}]});
- // Source has multiple documents with matches in the target.
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, a: 3, b: 3}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}, {_id: 3, a: 3, b: 3}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
- // should be returned once the batch is processed and no further documents should be processed
- // and updated.
- (function testMergeUnorderedBatchUpdate() {
- const maxBatchSize = 16 * 1024 * 1024; // 16MB
- const docSize = 1024 * 1024; // 1MB
- const numDocs = 20;
- const maxDocsInBatch = maxBatchSize / docSize;
+// Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
+// should be returned once the batch is processed and no further documents should be processed
+// and updated.
+(function testMergeUnorderedBatchUpdate() {
+ const maxBatchSize = 16 * 1024 * 1024; // 16MB
+ const docSize = 1024 * 1024; // 1MB
+ const numDocs = 20;
+ const maxDocsInBatch = maxBatchSize / docSize;
- assert(source.drop());
- assert(target.drop());
+ assert(source.drop());
+ assert(target.drop());
- // Insert 'numDocs' documents of size 'docSize' into the source collection.
- generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
+ // Insert 'numDocs' documents of size 'docSize' into the source collection.
+ generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
- // Copy over documents from the source collection into the target and remove the 'padding'
- // field from the projection, so we can distinguish which documents have been modified by
- // the $merge stage.
- assert.doesNotThrow(
- () => source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
+ // Copy over documents from the source collection into the target and remove the 'padding'
+ // field from the projection, so we can distinguish which documents have been modified by
+ // the $merge stage.
+ assert.doesNotThrow(() =>
+ source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
- // Remove one document from the target collection so that $merge fails. This document should
- // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
- // field in ascending order. Since each document in the source collection is 1MB, and the
- // max batch size is 16MB, the first batch will contain documents with the _id in the range
- // of [0, 15].
- assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
+ // Remove one document from the target collection so that $merge fails. This document should
+ // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
+ // field in ascending order. Since each document in the source collection is 1MB, and the
+ // max batch size is 16MB, the first batch will contain documents with the _id in the range
+ // of [0, 15].
+ assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
- // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
- assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
+ // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
+ assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
- // Run the $merge pipeline and ensure it fails, as there is one document in the source
- // collection without a match in the target.
- const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ // Run the $merge pipeline and ensure it fails, as there is one document in the source
+ // collection without a match in the target.
+ const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- // There will be maxDocsInBatch documents in the batch, one without a match.
- const numDocsModified = maxDocsInBatch - 1;
- // All remaining documents except those in the first batch must be left unmodified.
- const numDocsUnmodified = numDocs - maxDocsInBatch;
- assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
- assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
- })();
+ // There will be maxDocsInBatch documents in the batch, one without a match.
+ const numDocsModified = maxDocsInBatch - 1;
+ // All remaining documents except those in the first batch must be left unmodified.
+ const numDocsUnmodified = numDocs - maxDocsInBatch;
+ assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
+ assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_merge_insert.js b/jstests/aggregation/sources/merge/mode_merge_insert.js
index 370963a24d2..577479f7a46 100644
--- a/jstests/aggregation/sources/merge/mode_merge_insert.js
+++ b/jstests/aggregation/sources/merge/mode_merge_insert.js
@@ -4,368 +4,365 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge into a non-existent collection.
- (function testMergeIntoNonExistentCollection() {
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a"},
- ]
- });
- })();
-
- // Test $merge into an existing collection.
- (function testMergeIntoExistentCollection() {
- assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge does not update documents in the target collection if they were not modified
- // in the source collection.
- (function testMergeDoesNotUpdateUnmodifiedDocuments() {
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- })();
-
- // Test $merge updates documents in the target collection if they were modified in the source
- // collection.
- (function testMergeUpdatesModifiedDocuments() {
- // Update and merge a single document.
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 22, b: "b", c: "c"}]
- });
-
- // Update and merge multiple documents.
- assert.commandWorked(source.update({_id: 1}, {a: 11}));
- assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 11, b: "a"}, {_id: 2, a: 22, b: "b", c: "c", d: "d"}]
- });
- })();
-
- // Test $merge inserts a new document into the target collection if it was inserted into the
- // source collection.
- (function testMergeInsertsNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- {_id: 3, a: 3, b: "c"}
- ]
- });
- assert.commandWorked(source.deleteOne({_id: 3}));
- assert.commandWorked(target.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- {_id: 3, a: 3, b: "c"},
- {_id: 4, a: 4, c: "d"}
- ]
- });
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- ]
- });
- })();
-
- // Test $merge fails if a unique index constraint in the target collection is violated.
- (function testMergeFailsIfTargetUniqueKeyIsViolated() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 4, a: 11}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- const error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 11, b: "a"},
- {_id: 2, a: 22, b: "b", c: "c", d: "d"},
- ]
- });
- assert.commandWorked(target.dropIndex({a: 1}));
- })();
-
- // Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
- (function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
- // The 'on' fields contains a single document field.
- let error =
- assert.throws(() => source.aggregate(
- [{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, [51190, 51183]);
-
- // The 'on' fields contains multiple document fields.
- error = assert.throws(() => source.aggregate([
- {$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}
- ]));
- assert.commandFailedWithCode(error, [51190, 51183]);
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 2, a: 2, b: "b"},
- {_id: 4, a: 30, b: "c", c: "y"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "merge", whenNotMatched: "insert"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge into a non-existent collection.
+(function testMergeIntoNonExistentCollection() {
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a"},
+ ]
+ });
+})();
+
+// Test $merge into an existing collection.
+(function testMergeIntoExistentCollection() {
+ assert.commandWorked(source.insert({_id: 2, a: 2, b: "b"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge does not update documents in the target collection if they were not modified
+// in the source collection.
+(function testMergeDoesNotUpdateUnmodifiedDocuments() {
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+})();
+
+// Test $merge updates documents in the target collection if they were modified in the source
+// collection.
+(function testMergeUpdatesModifiedDocuments() {
+ // Update and merge a single document.
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 22, b: "b", c: "c"}]
+ });
+
+ // Update and merge multiple documents.
+ assert.commandWorked(source.update({_id: 1}, {a: 11}));
+ assert.commandWorked(source.update({_id: 2}, {a: 22, c: "c", d: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 11, b: "a"}, {_id: 2, a: 22, b: "b", c: "c", d: "d"}]
+ });
+})();
+
+// Test $merge inserts a new document into the target collection if it was inserted into the
+// source collection.
+(function testMergeInsertsNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ {_id: 3, a: 3, b: "c"}
+ ]
+ });
+ assert.commandWorked(source.deleteOne({_id: 3}));
+ assert.commandWorked(target.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ {_id: 3, a: 3, b: "c"},
+ {_id: 4, a: 4, c: "d"}
+ ]
+ });
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+ assert.commandWorked(target.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ ]
+ });
+})();
+
+// Test $merge fails if a unique index constraint in the target collection is violated.
+(function testMergeFailsIfTargetUniqueKeyIsViolated() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 4, a: 11}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ const error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 11, b: "a"},
+ {_id: 2, a: 22, b: "b", c: "c", d: "d"},
+ ]
+ });
+ assert.commandWorked(target.dropIndex({a: 1}));
+})();
+
+// Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
+(function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
+ // The 'on' fields contains a single document field.
+ let error = assert.throws(
+ () => source.aggregate([{$merge: Object.assign({on: "nonexistent"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+
+ // The 'on' fields contains multiple document fields.
+ error = assert.throws(
+ () => source.aggregate(
+ [{$merge: Object.assign({on: ["nonexistent1", "nonexistent2"]}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", c: "x"},
+ {_id: 2, a: 2, b: "b"},
+ {_id: 4, a: 30, b: "c", c: "y"},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
{_id: 1, a: {b: "b"}, c: "x"},
{_id: 2, a: {b: "c"}, c: "y"},
{_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]
- });
- })();
-
- // Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
- // null or an array.
- (function testMergeFailsIfOnFieldIsInvalid() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
-
- // The 'on' field is missing.
- assert.commandWorked(source.insert({_id: 1}));
- let error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is null.
- assert.commandWorked(source.update({_id: 1}, {z: null}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is an array.
- assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
- error = assert.throws(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
- assert.commandFailedWithCode(error, 51185);
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
- });
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
-
- // Test $merge preserves indexes and options of the existing target collection.
- (function testMergePresrvesIndexesAndOptions() {
- const validator = {a: {$gt: 0}};
- assert(target.drop());
- assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
- assert.commandWorked(target.createIndex({a: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
- });
- assert.eq(2, target.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
- })();
-
- // Test $merge implicitly creates a new database when the target collection's database doesn't
- // exist.
- (function testMergeImplicitlyCreatesTargetDatabase() {
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
-
- const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
- assert.commandWorked(foreignDb.dropDatabase());
- const foreignTarget = foreignDb[`${jsTest.name()}_target`];
- const foreignPipeline = [{
- $merge: {
- into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
- whenMatched: "merge",
- whenNotMatched: "insert"
- }
- }];
-
- if (!FixtureHelpers.isMongos(db)) {
- assert.doesNotThrow(() => source.aggregate(foreignPipeline));
- assertArrayEq(
- {actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1, b: "a"}]});
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => source.aggregate(foreignPipeline));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test below.
- assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
+ ]
+ });
+})();
+
+// Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
+// null or an array.
+(function testMergeFailsIfOnFieldIsInvalid() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
+
+ // The 'on' field is missing.
+ assert.commandWorked(source.insert({_id: 1}));
+ let error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is null.
+ assert.commandWorked(source.update({_id: 1}, {z: null}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is an array.
+ assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
+ error = assert.throws(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "z"}, mergeStage.$merge)}]));
+ assert.commandFailedWithCode(error, 51185);
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, b: "a"}, {a: 2, b: "b"}]
+ });
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
+
+// Test $merge preserves indexes and options of the existing target collection.
+(function testMergePresrvesIndexesAndOptions() {
+ const validator = {a: {$gt: 0}};
+ assert(target.drop());
+ assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]
+ });
+ assert.eq(2, target.getIndexes().length);
+
+ const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
+ assert.commandWorked(listColl);
+ assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+})();
+
+// Test $merge implicitly creates a new database when the target collection's database doesn't
+// exist.
+(function testMergeImplicitlyCreatesTargetDatabase() {
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+
+ const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
+ assert.commandWorked(foreignDb.dropDatabase());
+ const foreignTarget = foreignDb[`${jsTest.name()}_target`];
+ const foreignPipeline = [{
+ $merge: {
+ into: {db: foreignDb.getName(), coll: foreignTarget.getName()},
+ whenMatched: "merge",
+ whenNotMatched: "insert"
}
+ }];
+ if (!FixtureHelpers.isMongos(db)) {
assert.doesNotThrow(() => source.aggregate(foreignPipeline));
assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1, b: "a"}]});
- assert.commandWorked(foreignDb.dropDatabase());
- })();
+ } else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => source.aggregate(foreignPipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test below.
+ assert.commandWorked(foreignTarget.insert({_id: 1, a: 1}));
+ }
+
+ assert.doesNotThrow(() => source.aggregate(foreignPipeline));
+ assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, a: 1, b: "a"}]});
+ assert.commandWorked(foreignDb.dropDatabase());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_pipeline_discard.js b/jstests/aggregation/sources/merge/mode_pipeline_discard.js
index 12b556b8384..0c9333ca2af 100644
--- a/jstests/aggregation/sources/merge/mode_pipeline_discard.js
+++ b/jstests/aggregation/sources/merge/mode_pipeline_discard.js
@@ -4,279 +4,271 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
- // A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
- // for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
- // appended to this array and the result returned to the caller, otherwise an array with a
- // single $merge stage is returned. An output collection for the $merge stage is specified
- // in the 'target', and the $merge stage 'on' fields in the 'on' parameter.
- function makeMergePipeline(
- {target = "", initialStages = [], updatePipeline = [], on = "_id"} = {}) {
- return initialStages.concat([{
- $merge:
- {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "discard"}
- }]);
- }
+// A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
+// for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
+// appended to this array and the result returned to the caller, otherwise an array with a
+// single $merge stage is returned. An output collection for the $merge stage is specified
+// in the 'target', and the $merge stage 'on' fields in the 'on' parameter.
+function makeMergePipeline(
+ {target = "", initialStages = [], updatePipeline = [], on = "_id"} = {}) {
+ return initialStages.concat(
+ [{$merge: {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "discard"}}]);
+}
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection. The merge operation should succeed and unmatched documents discarded.
- (function testMergeIfMatchingDocumentNotFound() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection. The merge operation should succeed and unmatched documents discarded.
+(function testMergeIfMatchingDocumentNotFound() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 3, b: 3, x: 1, y: 2}]
- });
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 3, b: 3, x: 1, y: 2}]
+ });
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 1, y: 2}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 1, y: 2}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 1, y: 2}]
- });
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 1, y: 2}]});
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 2, b: 2, x: 1, y: 2}, {_id: 3, b: 3}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 1, y: 2}, {_id: 2, b: 2, x: 1, y: 2}, {_id: 3, b: 3}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge does not insert a new document into the target collection if it was inserted
- // into the source collection.
- (function testMergeDoesNotInsertNewDocument() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge does not insert a new document into the target collection if it was inserted
+// into the source collection.
+(function testMergeDoesNotInsertNewDocument() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteOne({_id: 3}));
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteOne({_id: 3}));
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- })();
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+})();
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- const pipeline =
- makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ const pipeline =
+ makeMergePipeline({target: target.getName(), updatePipeline: [{$set: {x: 1, y: 2}}]});
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
- let pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a",
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ let pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a",
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, c: "x", x: 1, y: 2},
- {_id: 4, a: 30, c: "y", x: 1, y: 2},
- {_id: 5, a: 40, c: "z"}
- ]
- });
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, c: "x", x: 1, y: 2},
+ {_id: 4, a: 30, c: "y", x: 1, y: 2},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
- pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: ["a", "b"],
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: ["a", "b"],
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", x: 1, y: 2},
- {_id: 4, a: 30, b: "c", c: "y", x: 1, y: 2},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: 1, b: "a", x: 1, y: 2},
+ {_id: 4, a: 30, b: "c", c: "y", x: 1, y: 2},
+ {_id: 5, a: 40, c: "z"}
+ ]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
- const pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a.b",
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ const pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a.b",
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 2, a: {b: "c"}, x: 1, y: 2},
- ]
- });
- })();
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 2, a: {b: "c"}, x: 1, y: 2},
+ ]
+ });
+})();
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- let pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ let pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- on: ["_id", "a"],
- target: target.getName(),
- updatePipeline: [{$set: {x: 1, y: 2}}]
- });
+ pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ on: ["_id", "a"],
+ target: target.getName(),
+ updatePipeline: [{$set: {x: 1, y: 2}}]
+ });
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
- // Test that variables referencing the fields in the source document can be specified in the
- // 'let' argument and referenced in the update pipeline.
- (function testMergeWithLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert([{_id: 1, c: 1}]));
+// Test that variables referencing the fields in the source document can be specified in the
+// 'let' argument and referenced in the update pipeline.
+(function testMergeWithLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, c: 1}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- let : {x: "$a", y: "$b"},
- whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
- whenNotMatched: "discard"
- }
- }]));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}]});
- })();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ let : {x: "$a", y: "$b"},
+ whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
+ whenNotMatched: "discard"
+ }
+ }]));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_pipeline_fail.js b/jstests/aggregation/sources/merge/mode_pipeline_fail.js
index 7d8d2337949..60c46ce8708 100644
--- a/jstests/aggregation/sources/merge/mode_pipeline_fail.js
+++ b/jstests/aggregation/sources/merge/mode_pipeline_fail.js
@@ -4,95 +4,89 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge:
- {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "fail"}
- };
- const pipeline = [mergeStage];
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: [{$addFields: {x: 2}}], whenNotMatched: "fail"}
+};
+const pipeline = [mergeStage];
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection.
- (function testMergeFailsIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 2}, {_id: 3, b: 3, x: 2}]
- });
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection.
+(function testMergeFailsIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}, {_id: 3, b: 3, x: 2}]});
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 2}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, b: 3, x: 2}]});
- // Source has multiple documents with matches in the target.
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, b: 1, x: 2}, {_id: 2, b: 2, x: 2}, {_id: 3, b: 3, x: 2}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, b: 1, x: 2}, {_id: 2, b: 2, x: 2}, {_id: 3, b: 3, x: 2}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test that variables referencing the fields in the source document can be specified in the
- // 'let' argument and referenced in the update pipeline.
- (function testMergeWithLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert([{_id: 1, c: 1}, {_id: 2, c: 2}]));
+// Test that variables referencing the fields in the source document can be specified in the
+// 'let' argument and referenced in the update pipeline.
+(function testMergeWithLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, c: 1}, {_id: 2, c: 2}]));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- let : {x: "$a", y: "$b"},
- whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
- whenNotMatched: "fail"
- }
- }]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, z: 2}, {_id: 2, c: 2, z: 4}]
- });
- })();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ let : {x: "$a", y: "$b"},
+ whenMatched: [{$set: {z: {$add: ["$$x", "$$y"]}}}],
+ whenNotMatched: "fail"
+ }
+ }]));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, c: 2, z: 4}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_pipeline_insert.js b/jstests/aggregation/sources/merge/mode_pipeline_insert.js
index b8f8374cfc9..df3414e0950 100644
--- a/jstests/aggregation/sources/merge/mode_pipeline_insert.js
+++ b/jstests/aggregation/sources/merge/mode_pipeline_insert.js
@@ -4,644 +4,624 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- // A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
- // for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
- // appended to this array and the result returned to the caller, otherwise an array with a
- // single $merge stage is returned. An output collection for the $merge stage is specified
- // in the 'target', and the $merge stage 'on' fields in the 'on' parameter. The 'letVars'
- // parameter describes the 'let' argument of the $merge stage and holds variables that can be
- // referenced in the pipeline.
- function makeMergePipeline({target = "",
- initialStages = [],
- updatePipeline = [],
- on = "_id",
- letVars = undefined} = {}) {
- const baseObj = letVars !== undefined ? {let : letVars} : {};
- return initialStages.concat([{
- $merge: Object.assign(
- baseObj,
- {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "insert"})
- }]);
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+// A helper function to create a pipeline with a $merge stage using a custom 'updatePipeline'
+// for the whenMatched mode. If 'initialStages' array is specified, the $merge stage will be
+// appended to this array and the result returned to the caller, otherwise an array with a
+// single $merge stage is returned. An output collection for the $merge stage is specified
+// in the 'target', and the $merge stage 'on' fields in the 'on' parameter. The 'letVars'
+// parameter describes the 'let' argument of the $merge stage and holds variables that can be
+// referenced in the pipeline.
+function makeMergePipeline(
+ {target = "", initialStages = [], updatePipeline = [], on = "_id", letVars = undefined} = {}) {
+ const baseObj = letVars !== undefined ? {let : letVars} : {};
+ return initialStages.concat([{
+ $merge: Object.assign(
+ baseObj, {into: target, on: on, whenMatched: updatePipeline, whenNotMatched: "insert"})
+ }]);
+}
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+
+(function testMergeIntoNonExistentCollection() {
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, x: 1},
+ ]
+ });
+})();
+
+// Test $merge inserts a document into an existing target collection if no matching document
+// is found.
+(function testMergeInsertsDocumentIfMatchNotFound() {
+ assert.commandWorked(target.deleteMany({}));
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: 1, y: 2}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 1, y: 2}]});
+})();
+
+// Test $merge updates an existing document in the target collection by applying a
+// pipeline-style update.
+(function testMergeUpdatesDocumentIfMatchFound() {
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ updatePipeline: [{$project: {x: {$add: ["$x", 1]}, y: {$add: ["$y", 2]}}}]
+ })));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 2, y: 4}]});
+})();
+
+// Test $merge with various pipeline stages which are currently supported by the pipeline-style
+// update.
+(function testMergeWithSupportedUpdatePipelineStages() {
+ assert(source.drop());
+ assert(target.drop());
+
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert({_id: 1, b: 1}));
+
+ // Test $addFields stage.
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: {$add: ["$b", 1]}}}]})));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: null}]});
+
+ // Test $project stage.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: 1}));
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$project: {x: {$add: ["$b", 1]}}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 2}, {_id: 2, x: null}]});
+
+ // Test $replaceWith stage.
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$replaceWith: "$c"}]})));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]});
+
+ // Test $replaceRoot stage.
+ assert(target.drop());
+ assert.commandWorked(
+ target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$replaceRoot: {newRoot: "$c"}}]})));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]});
+})();
+
+// Test $merge inserts a new document into the target collection if not matching document is
+// found by applying a pipeline-style update with upsert=true semantics.
+(function testMergeInsertDocumentIfMatchNotFound() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1}));
+ assert.commandWorked(target.insert({_id: 2, a: 2}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 1}, {_id: 2, a: 2}]});
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ updatePipeline: [{$project: {x: {$add: ["$x", 1]}, a: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, x: 1},
+ {_id: 2, a: 2},
+ ]
+ });
+})();
+
+// Test $merge fails if a unique index constraint in the target collection is violated.
+(function testMergeFailsIfTargetUniqueKeyIsViolated() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
}
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
-
- (function testMergeIntoNonExistentCollection() {
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, x: 1},
- ]
- });
- })();
-
- // Test $merge inserts a document into an existing target collection if no matching document
- // is found.
- (function testMergeInsertsDocumentIfMatchNotFound() {
- assert.commandWorked(target.deleteMany({}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {x: 1, y: 2}}]})));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 1, y: 2}]});
- })();
-
- // Test $merge updates an existing document in the target collection by applying a
- // pipeline-style update.
- (function testMergeUpdatesDocumentIfMatchFound() {
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$project: {x: {$add: ["$x", 1]}, y: {$add: ["$y", 2]}}}]
- })));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, x: 2, y: 4}]});
- })();
-
- // Test $merge with various pipeline stages which are currently supported by the pipeline-style
- // update.
- (function testMergeWithSupportedUpdatePipelineStages() {
- assert(source.drop());
- assert(target.drop());
-
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert({_id: 1, b: 1}));
-
- // Test $addFields stage.
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$addFields: {x: {$add: ["$b", 1]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1, x: 2}, {_id: 2, x: null}]});
-
- // Test $project stage.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: 1}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$project: {x: {$add: ["$b", 1]}}}]})));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, x: 2}, {_id: 2, x: null}]});
-
- // Test $replaceWith stage.
- assert(target.drop());
- assert.commandWorked(
- target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$replaceWith: "$c"}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]
- });
-
- // Test $replaceRoot stage.
- assert(target.drop());
- assert.commandWorked(
- target.insert([{_id: 1, b: 1, c: {x: {y: 1}}}, {_id: 2, b: 2, c: {x: {y: 2}}}]));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$replaceRoot: {newRoot: "$c"}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, x: {y: 1}}, {_id: 2, x: {y: 2}}]
- });
- })();
-
- // Test $merge inserts a new document into the target collection if not matching document is
- // found by applying a pipeline-style update with upsert=true semantics.
- (function testMergeInsertDocumentIfMatchNotFound() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1}));
- assert.commandWorked(target.insert({_id: 2, a: 2}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {x: 1}}]})));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, x: 1}, {_id: 2, a: 2}]});
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$project: {x: {$add: ["$x", 1]}, a: 1}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, x: 1},
- {_id: 2, a: 2},
- ]
- });
- })();
-
- // Test $merge fails if a unique index constraint in the target collection is violated.
- (function testMergeFailsIfTargetUniqueKeyIsViolated() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 4, a: 2}));
- assert.commandWorked(target.insert([{_id: 1, x: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- const error = assert.throws(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$project: {x: 1, a: 1}}]})));
- assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, x: 1},
- {_id: 2, a: 2},
- ]
- });
- assert.commandWorked(target.dropIndex({a: 1}));
- })();
-
- // Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
- (function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
- // The 'on' fields contains a single document field.
- let error = assert.throws(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- on: "nonexistent",
- updatePipeline: [{$project: {x: 1, a: 1}}]
- })));
- assert.commandFailedWithCode(error, [51190, 51183]);
-
- // The 'on' fields contains multiple document fields.
- error = assert.throws(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- on: ["nonexistent1", "nonexistent2"],
- updatePipeline: [{$project: {x: 1, a: 1}}]
- })));
- assert.commandFailedWithCode(error, [51190, 51183]);
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 30}]));
- assert.commandWorked(
- target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a",
- updatePipeline: [{$addFields: {z: 1}}]
- })));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{a: 1, b: 1, z: 1}, {a: 2, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(
- source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 4}, {_id: 3, a: 30, b: 2}]));
- assert.commandWorked(
- target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: ["a", "b"],
- updatePipeline: [{$addFields: {z: 1}}]
- })));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected:
- [{a: 1, b: 1, z: 1}, {a: 2, b: 4, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
- {_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, c: "y"}));
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "a.b",
- updatePipeline: [{$addFields: {z: 1}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: {b: "b"}, z: 1},
- {_id: 2, a: {b: "c"}, c: "y", z: 1},
- {_id: 3, a: {b: 30}, z: 1}
- ]
- });
- })();
-
- // Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
- // null or an array.
- (function testMergeFailsIfOnFieldIsInvalid() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
-
- const pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- on: "z",
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- // The 'on' field is missing.
- assert.commandWorked(source.insert({_id: 1}));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is null.
- assert.commandWorked(source.update({_id: 1}, {z: null}));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, 51132);
-
- // The 'on' field is an array.
- assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, 51185);
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field. When the _id is missing, the $merge stage will create a new ObjectId in
- // its place before performing the insert or update.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- let pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- target: target.getName(),
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- // There is a matching document in the target with {_id: 1}, but since we cannot match
- // it (no _id in projection), we just insert two new documents from the source
- // collection by applying a pipeline-style update.
- expected: [{b: "c"}, {z: 1}, {z: 1}]
- });
-
- pipeline = makeMergePipeline({
- initialStages: [{$project: {_id: 0}}],
- on: ["_id", "a"],
- target: target.getName(),
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find({}, {_id: 0}).toArray(),
- expected: [{b: "c"}, {a: 1, z: 1}, {a: 2, z: 1}]
- });
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
-
- // Test $merge preserves indexes and options of the existing target collection.
- (function testMergePresrvesIndexesAndOptions() {
- const validator = {z: {$gt: 0}};
- assert(target.drop());
- assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
- assert.commandWorked(target.createIndex({a: 1}));
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {target: target.getName(), updatePipeline: [{$addFields: {z: 1}}]})));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, z: 1}, {_id: 2, z: 1}]});
- assert.eq(2, target.getIndexes().length);
-
- const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
- assert.commandWorked(listColl);
- assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
- })();
-
- // Test $merge implicitly creates a new database when the target collection's database doesn't
- // exist.
- (function testMergeImplicitlyCreatesTargetDatabase() {
- assert(source.drop());
- assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
-
- const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
- assert.commandWorked(foreignDb.dropDatabase());
- const foreignTarget = foreignDb[`${jsTest.name()}_target`];
- const foreignPipeline = makeMergePipeline({
- target: {db: foreignDb.getName(), coll: foreignTarget.getName()},
- updatePipeline: [{$addFields: {z: 1}}]
- });
-
- if (!FixtureHelpers.isMongos(db)) {
- assert.doesNotThrow(() => source.aggregate(foreignPipeline));
- assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, z: 1}]});
- } else {
- // Implicit database creation is prohibited in a cluster.
- const error = assert.throws(() => source.aggregate(foreignPipeline));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test
- // below.
- assert.commandWorked(foreignTarget.insert({_id: 1}));
- }
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 4, a: 2}));
+ assert.commandWorked(target.insert([{_id: 1, x: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ const error =
+ assert.throws(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$project: {x: 1, a: 1}}]})));
+ assert.commandFailedWithCode(error, ErrorCodes.DuplicateKey);
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, x: 1},
+ {_id: 2, a: 2},
+ ]
+ });
+ assert.commandWorked(target.dropIndex({a: 1}));
+})();
+
+// Test $merge fails if it cannot find an index to verify that the 'on' fields will be unique.
+(function testMergeFailsIfOnFieldCannotBeVerifiedForUniquness() {
+ // The 'on' fields contains a single document field.
+ let error = assert.throws(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ on: "nonexistent",
+ updatePipeline: [{$project: {x: 1, a: 1}}]
+ })));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+
+ // The 'on' fields contains multiple document fields.
+ error = assert.throws(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ on: ["nonexistent1", "nonexistent2"],
+ updatePipeline: [{$project: {x: 1, a: 1}}]
+ })));
+ assert.commandFailedWithCode(error, [51190, 51183]);
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 30}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a",
+ updatePipeline: [{$addFields: {z: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{a: 1, b: 1, z: 1}, {a: 2, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 4}, {_id: 3, a: 30, b: 2}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, b: 1}, {_id: 4, a: 30, b: 2}, {_id: 5, a: 40, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: ["a", "b"],
+ updatePipeline: [{$addFields: {z: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{a: 1, b: 1, z: 1}, {a: 2, b: 4, z: 1}, {a: 30, b: 2, z: 1}, {a: 40, b: 3}]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}, c: "y"}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "a.b",
+ updatePipeline: [{$addFields: {z: 1}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
+ {_id: 1, a: {b: "b"}, z: 1},
+ {_id: 2, a: {b: "c"}, c: "y", z: 1},
+ {_id: 3, a: {b: 30}, z: 1}
+ ]
+ });
+})();
+
+// Test $merge fails if the value of the 'on' field in a document is invalid, e.g. missing,
+// null or an array.
+(function testMergeFailsIfOnFieldIsInvalid() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"z": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"z": 1}, {unique: true}));
+
+ const pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ on: "z",
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ // The 'on' field is missing.
+ assert.commandWorked(source.insert({_id: 1}));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is null.
+ assert.commandWorked(source.update({_id: 1}, {z: null}));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, 51132);
+
+ // The 'on' field is an array.
+ assert.commandWorked(source.update({_id: 1}, {z: [1, 2]}));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, 51185);
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field. When the _id is missing, the $merge stage will create a new ObjectId in
+// its place before performing the insert or update.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ let pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ target: target.getName(),
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ // There is a matching document in the target with {_id: 1}, but since we cannot match
+ // it (no _id in projection), we just insert two new documents from the source
+ // collection by applying a pipeline-style update.
+ expected: [{b: "c"}, {z: 1}, {z: 1}]
+ });
+
+ pipeline = makeMergePipeline({
+ initialStages: [{$project: {_id: 0}}],
+ on: ["_id", "a"],
+ target: target.getName(),
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find({}, {_id: 0}).toArray(),
+ expected: [{b: "c"}, {a: 1, z: 1}, {a: 2, z: 1}]
+ });
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
+
+// Test $merge preserves indexes and options of the existing target collection.
+(function testMergePresrvesIndexesAndOptions() {
+ const validator = {z: {$gt: 0}};
+ assert(target.drop());
+ assert.commandWorked(db.createCollection(target.getName(), {validator: validator}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline(
+ {target: target.getName(), updatePipeline: [{$addFields: {z: 1}}]})));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, z: 1}, {_id: 2, z: 1}]});
+ assert.eq(2, target.getIndexes().length);
+
+ const listColl = db.runCommand({listCollections: 1, filter: {name: target.getName()}});
+ assert.commandWorked(listColl);
+ assert.eq(validator, listColl.cursor.firstBatch[0].options["validator"]);
+})();
+
+// Test $merge implicitly creates a new database when the target collection's database doesn't
+// exist.
+(function testMergeImplicitlyCreatesTargetDatabase() {
+ assert(source.drop());
+ assert.commandWorked(source.insert({_id: 1, a: 1, b: "a"}));
+
+ const foreignDb = db.getSiblingDB(`${jsTest.name()}_foreign_db`);
+ assert.commandWorked(foreignDb.dropDatabase());
+ const foreignTarget = foreignDb[`${jsTest.name()}_target`];
+ const foreignPipeline = makeMergePipeline({
+ target: {db: foreignDb.getName(), coll: foreignTarget.getName()},
+ updatePipeline: [{$addFields: {z: 1}}]
+ });
+
+ if (!FixtureHelpers.isMongos(db)) {
assert.doesNotThrow(() => source.aggregate(foreignPipeline));
assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, z: 1}]});
- assert.commandWorked(foreignDb.dropDatabase());
- })();
-
- // Test that $merge can reference the default 'let' variable 'new' which holds the entire
- // document from the source collection.
- (function testMergeWithDefaultLetVariable() {
- assert(source.drop());
- assert(target.drop());
-
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- target: target.getName(),
- updatePipeline: [{$set: {x: {$add: ["$$new.a", "$$new.b"]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 4}]});
- })();
-
- // Test that the default 'let' variable 'new' is not available once the 'let' argument to the
- // $merge stage is specified explicitly.
- (function testMergeCannotUseDefaultLetVariableIfLetIsSpecified() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- const error = assert.throws(() => source.aggregate(makeMergePipeline({
- letVars: {foo: "bar"},
- target: target.getName(),
- updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
- })));
- assert.commandFailedWithCode(error, 17276);
- })();
-
- // Test that $merge can accept an empty object holding no variables and the default 'new'
- // variable is not available.
- (function testMergeWithEmptyLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- // Can use an empty object.
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {letVars: {}, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
- });
-
- // No default variable 'new' is available.
- const error = assert.throws(() => source.aggregate(makeMergePipeline({
- letVars: {},
- target: target.getName(),
- updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
- })));
- assert.commandFailedWithCode(error, 17276);
- })();
-
- // Test that $merge can accept a null value as the 'let' argument and the default variable 'new'
- // can be used.
- // Note that this is not a desirable behaviour but rather a limitation in the IDL parser which
- // cannot differentiate between an optional field specified explicitly as 'null', or not
- // specified at all. In both cases it will treat the field like it wasn't specified. So, this
- // test ensures that we're aware of this limitation. Once the limitation is addressed in
- // SERVER-41272, this test should be updated to accordingly.
- (function testMergeWithNullLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- // Can use a null 'let' argument.
- assert.doesNotThrow(
- () => source.aggregate(makeMergePipeline(
- {letVars: null, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
- });
-
- // Can use the default 'new' variable.
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: null,
- target: target.getName(),
- updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, x: 1, y: 1}, {_id: 2, x: 2, y: 2}]
- });
- })();
-
- // Test that constant values can be specified in the 'let' argument and referenced in the update
- // pipeline.
- (function testMergeWithConstantLetVariable() {
- // Non-array constants.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {a: 1, b: "foo", c: true},
- target: target.getName(),
- updatePipeline: [{$set: {x: "$$a", y: "$$b", z: "$$c"}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected:
- [{_id: 1, c: 1, x: 1, y: "foo", z: true}, {_id: 2, x: 1, y: "foo", z: true}]
- });
-
- // Constant array.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {a: [1, 2, 3]},
- target: target.getName(),
- updatePipeline: [{$set: {x: {$arrayElemAt: ["$$a", 1]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 2}]});
- })();
-
- // Test that variables referencing the fields in the source document can be specified in the
- // 'let' argument and referenced in the update pipeline.
- (function testMergeWithNonConstantLetVariables() {
- // Non-array fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: "$a", y: "$b"},
- target: target.getName(),
- updatePipeline: [{$set: {z: {$add: ["$$x", "$$y"]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 4}]});
-
- // Array field with expressions in the pipeline.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: [1, 2, 3]}, {_id: 2, a: [4, 5, 6]}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: "$a"},
- target: target.getName(),
- updatePipeline: [{$set: {z: {$arrayElemAt: ["$$x", 1]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 5}]});
-
- // Array field with expressions in the 'let' argument.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: {$arrayElemAt: ["$a", 2]}},
- target: target.getName(),
- updatePipeline: [{$set: {z: "$$x"}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 3}, {_id: 2, z: 6}]});
- })();
-
- // Test that variables using the dotted path can be specified in the 'let' argument and
- // referenced in the update pipeline.
- (function testMergeWithDottedPathLetVariables() {
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: {b: {c: 2}}}, {_id: 2, a: {b: {c: 3}}}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- letVars: {x: "$a.b.c"},
- target: target.getName(),
- updatePipeline: [{$set: {z: {$pow: ["$$x", 2]}}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 4}, {_id: 2, z: 9}]});
- })();
-
- // Test that 'let' variables are referred to the computed document in the aggregation pipeline,
- // not the original document in the source collection.
- (function testMergeLetVariablesHoldsComputedValues() {
- // Test the default 'new' variable.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(
- source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- // In the $group stage the total field 'a' uses the same name as in the source collection
- // intentionally, to make sure that even when a referenced field is present in the source
- // collection under the same name, the actual value for the variable will be picked up from
- // the computed document.
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
- target: target.getName(),
- updatePipeline: [{$set: {z: "$$new"}}]
- })));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, c: 1, z: {_id: 1, a: 3}}, {_id: 2, z: {_id: 2, a: 3}}]
- });
-
- // Test custom 'let' variables.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(
- source.insert([{_id: 1, a: 1, b: 5}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
- assert.commandWorked(target.insert({_id: 1, c: 1}));
-
- assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
- initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
- letVars: {x: {$pow: ["$a", 2]}},
- target: target.getName(),
- updatePipeline: [{$set: {z: "$$x"}}]
- })));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 49}, {_id: 2, z: 9}]});
- })();
+ } else {
+ // Implicit database creation is prohibited in a cluster.
+ const error = assert.throws(() => source.aggregate(foreignPipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test
+ // below.
+ assert.commandWorked(foreignTarget.insert({_id: 1}));
+ }
+
+ assert.doesNotThrow(() => source.aggregate(foreignPipeline));
+ assertArrayEq({actual: foreignTarget.find().toArray(), expected: [{_id: 1, z: 1}]});
+ assert.commandWorked(foreignDb.dropDatabase());
+})();
+
+// Test that $merge can reference the default 'let' variable 'new' which holds the entire
+// document from the source collection.
+(function testMergeWithDefaultLetVariable() {
+ assert(source.drop());
+ assert(target.drop());
+
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ target: target.getName(),
+ updatePipeline: [{$set: {x: {$add: ["$$new.a", "$$new.b"]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 4}]});
+})();
+
+// Test that the default 'let' variable 'new' is not available once the 'let' argument to the
+// $merge stage is specified explicitly.
+(function testMergeCannotUseDefaultLetVariableIfLetIsSpecified() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ const error = assert.throws(() => source.aggregate(makeMergePipeline({
+ letVars: {foo: "bar"},
+ target: target.getName(),
+ updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
+ })));
+ assert.commandFailedWithCode(error, 17276);
+})();
+
+// Test that $merge can accept an empty object holding no variables and the default 'new'
+// variable is not available.
+(function testMergeWithEmptyLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ // Can use an empty object.
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {letVars: {}, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
+ });
+
+ // No default variable 'new' is available.
+ const error = assert.throws(() => source.aggregate(makeMergePipeline({
+ letVars: {},
+ target: target.getName(),
+ updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
+ })));
+ assert.commandFailedWithCode(error, 17276);
+})();
+
+// Test that $merge can accept a null value as the 'let' argument and the default variable 'new'
+// can be used.
+// Note that this is not a desirable behaviour but rather a limitation in the IDL parser which
+// cannot differentiate between an optional field specified explicitly as 'null', or not
+// specified at all. In both cases it will treat the field like it wasn't specified. So, this
+// test ensures that we're aware of this limitation. Once the limitation is addressed in
+// SERVER-41272, this test should be updated to accordingly.
+(function testMergeWithNullLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ // Can use a null 'let' argument.
+ assert.doesNotThrow(
+ () => source.aggregate(makeMergePipeline(
+ {letVars: null, target: target.getName(), updatePipeline: [{$set: {x: "foo"}}]})));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, x: "foo"}, {_id: 2, x: "foo"}]
+ });
+
+ // Can use the default 'new' variable.
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: null,
+ target: target.getName(),
+ updatePipeline: [{$project: {x: "$$new.a", y: "$$new.b"}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, x: 1, y: 1}, {_id: 2, x: 2, y: 2}]});
+})();
+
+// Test that constant values can be specified in the 'let' argument and referenced in the update
+// pipeline.
+(function testMergeWithConstantLetVariable() {
+ // Non-array constants.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {a: 1, b: "foo", c: true},
+ target: target.getName(),
+ updatePipeline: [{$set: {x: "$$a", y: "$$b", z: "$$c"}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, x: 1, y: "foo", z: true}, {_id: 2, x: 1, y: "foo", z: true}]
+ });
+
+ // Constant array.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {a: [1, 2, 3]},
+ target: target.getName(),
+ updatePipeline: [{$set: {x: {$arrayElemAt: ["$$a", 1]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, x: 2}, {_id: 2, x: 2}]});
+})();
+
+// Test that variables referencing the fields in the source document can be specified in the
+// 'let' argument and referenced in the update pipeline.
+(function testMergeWithNonConstantLetVariables() {
+ // Non-array fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 2, b: 2}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: "$a", y: "$b"},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: {$add: ["$$x", "$$y"]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 4}]});
+
+ // Array field with expressions in the pipeline.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: [1, 2, 3]}, {_id: 2, a: [4, 5, 6]}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: "$a"},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: {$arrayElemAt: ["$$x", 1]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 2}, {_id: 2, z: 5}]});
+
+ // Array field with expressions in the 'let' argument.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: {$arrayElemAt: ["$a", 2]}},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: "$$x"}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 3}, {_id: 2, z: 6}]});
+})();
+
+// Test that variables using the dotted path can be specified in the 'let' argument and
+// referenced in the update pipeline.
+(function testMergeWithDottedPathLetVariables() {
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: {b: {c: 2}}}, {_id: 2, a: {b: {c: 3}}}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ letVars: {x: "$a.b.c"},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: {$pow: ["$$x", 2]}}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 4}, {_id: 2, z: 9}]});
+})();
+
+// Test that 'let' variables are referred to the computed document in the aggregation pipeline,
+// not the original document in the source collection.
+(function testMergeLetVariablesHoldsComputedValues() {
+ // Test the default 'new' variable.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: 1}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ // In the $group stage the total field 'a' uses the same name as in the source collection
+ // intentionally, to make sure that even when a referenced field is present in the source
+ // collection under the same name, the actual value for the variable will be picked up from
+ // the computed document.
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
+ target: target.getName(),
+ updatePipeline: [{$set: {z: "$$new"}}]
+ })));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, c: 1, z: {_id: 1, a: 3}}, {_id: 2, z: {_id: 2, a: 3}}]
+ });
+
+ // Test custom 'let' variables.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: 5}, {_id: 2, a: 1, b: 2}, {_id: 3, a: 2, b: 3}]));
+ assert.commandWorked(target.insert({_id: 1, c: 1}));
+
+ assert.doesNotThrow(() => source.aggregate(makeMergePipeline({
+ initialStages: [{$group: {_id: "$a", a: {$sum: "$b"}}}],
+ letVars: {x: {$pow: ["$a", 2]}},
+ target: target.getName(),
+ updatePipeline: [{$set: {z: "$$x"}}]
+ })));
+ assertArrayEq(
+ {actual: target.find().toArray(), expected: [{_id: 1, c: 1, z: 49}, {_id: 2, z: 9}]});
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_replace_discard.js b/jstests/aggregation/sources/merge/mode_replace_discard.js
index 5a0aa6eeb79..aba69a27d28 100644
--- a/jstests/aggregation/sources/merge/mode_replace_discard.js
+++ b/jstests/aggregation/sources/merge/mode_replace_discard.js
@@ -5,204 +5,193 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
-
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}
- };
- const pipeline = [mergeStage];
-
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection. The merge operation should succeed and unmatched documents discarded.
- (function testMergeIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
-
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
- })();
-
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
-
- // Source has multiple documents with matches in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]
- });
- })();
-
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge does not insert a new document into the target collection if it was inserted
- // into the source collection.
- (function testMergeDoesNotInsertNewDocument() {
- // Insert and merge a single document.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteOne({_id: 3}));
-
- // Insert and merge multiple documents.
- assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
- assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
- })();
-
- // Test $merge doesn't modify the target collection if a document has been removed from the
- // source collection.
- (function testMergeDoesNotUpdateDeletedDocument() {
- assert.commandWorked(source.deleteOne({_id: 1}));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
-
- // Test $merge with an explicit 'on' field over a single or multiple document fields which
- // differ from the _id field.
- (function testMergeWithOnFields() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- // The 'on' fields contains a single document field.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]
- });
-
- // The 'on' fields contains multiple document fields.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
- assert.commandWorked(source.insert(
- [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
- assert.commandWorked(target.insert(
- [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 1, a: 1, b: "a", c: "x"},
- {_id: 4, a: 30, b: "c"},
- {_id: 5, a: 40, c: "z"}
- ]
- });
- assert.commandWorked(source.dropIndex({a: 1, b: 1}));
- assert.commandWorked(target.dropIndex({a: 1, b: 1}));
- })();
-
- // Test $merge with a dotted path in the 'on' field.
- (function testMergeWithDottedOnField() {
- if (FixtureHelpers.isSharded(source)) {
- // Skip this test if the collection sharded, because an implicitly created sharded
- // key of {_id: 1} will not be covered by a unique index created in this test, which
- // is not allowed.
- return;
- }
-
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.commandWorked(source.insert([
- {_id: 1, a: {b: "b"}, c: "x"},
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isSharded.
+
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "discard"}
+};
+const pipeline = [mergeStage];
+
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection. The merge operation should succeed and unmatched documents discarded.
+(function testMergeIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
+
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
+})();
+
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
+
+ // Source has multiple documents with matches in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, b: 3}]
+ });
+})();
+
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge does not insert a new document into the target collection if it was inserted
+// into the source collection.
+(function testMergeDoesNotInsertNewDocument() {
+ // Insert and merge a single document.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteOne({_id: 3}));
+
+ // Insert and merge multiple documents.
+ assert.commandWorked(source.insert({_id: 3, a: 3, b: "c"}));
+ assert.commandWorked(source.insert({_id: 4, a: 4, c: "d"}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+ assert.commandWorked(source.deleteMany({_id: {$in: [3, 4]}}));
+})();
+
+// Test $merge doesn't modify the target collection if a document has been removed from the
+// source collection.
+(function testMergeDoesNotUpdateDeletedDocument() {
+ assert.commandWorked(source.deleteOne({_id: 1}));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
+
+// Test $merge with an explicit 'on' field over a single or multiple document fields which
+// differ from the _id field.
+(function testMergeWithOnFields() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ // The 'on' fields contains a single document field.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.commandWorked(
+ source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(
+ target.insert([{_id: 1, a: 1, c: "x"}, {_id: 4, a: 30, c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]
+ });
+
+ // The 'on' fields contains multiple document fields.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({a: 1, b: 1}, {unique: true}));
+ assert.commandWorked(source.insert(
+ [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 2, a: 2, b: "b"}, {_id: 3, a: 30, b: "c"}]));
+ assert.commandWorked(target.insert(
+ [{_id: 1, a: 1, b: "a"}, {_id: 4, a: 30, b: "c", c: "y"}, {_id: 5, a: 40, c: "z"}]));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: ["a", "b"]}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1, b: "a", c: "x"}, {_id: 4, a: 30, b: "c"}, {_id: 5, a: 40, c: "z"}]
+ });
+ assert.commandWorked(source.dropIndex({a: 1, b: 1}));
+ assert.commandWorked(target.dropIndex({a: 1, b: 1}));
+})();
+
+// Test $merge with a dotted path in the 'on' field.
+(function testMergeWithDottedOnField() {
+ if (FixtureHelpers.isSharded(source)) {
+ // Skip this test if the collection sharded, because an implicitly created sharded
+ // key of {_id: 1} will not be covered by a unique index created in this test, which
+ // is not allowed.
+ return;
+ }
+
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.commandWorked(source.insert([
+ {_id: 1, a: {b: "b"}, c: "x"},
+ {_id: 2, a: {b: "c"}, c: "y"},
+ {_id: 3, a: {b: 30}, b: "c"}
+ ]));
+ assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
+ assert.doesNotThrow(
+ () => source.aggregate(
+ [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [
{_id: 2, a: {b: "c"}, c: "y"},
- {_id: 3, a: {b: 30}, b: "c"}
- ]));
- assert.commandWorked(target.insert({_id: 2, a: {b: "c"}}));
- assert.doesNotThrow(
- () => source.aggregate(
- [{$project: {_id: 0}}, {$merge: Object.assign({on: "a.b"}, mergeStage.$merge)}]));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [
- {_id: 2, a: {b: "c"}, c: "y"},
- ]
- });
- })();
-
- // Test $merge when the _id field is removed from the aggregate projection but is used in the
- // $merge's 'on' field.
- (function testMergeWhenDocIdIsRemovedFromProjection() {
- // The _id is a single 'on' field (a default one).
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
-
- // The _id is part of the compound 'on' field.
- assert(target.drop());
- assert.commandWorked(target.insert({_id: 1, b: "c"}));
- assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0}},
- {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
- ]));
- assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
- assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
- assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
- })();
+ ]
+ });
+})();
+
+// Test $merge when the _id field is removed from the aggregate projection but is used in the
+// $merge's 'on' field.
+(function testMergeWhenDocIdIsRemovedFromProjection() {
+ // The _id is a single 'on' field (a default one).
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1, b: "a"}, {_id: 2, a: 2, b: "b"}]));
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.doesNotThrow(() => source.aggregate([{$project: {_id: 0}}, mergeStage]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+
+ // The _id is part of the compound 'on' field.
+ assert(target.drop());
+ assert.commandWorked(target.insert({_id: 1, b: "c"}));
+ assert.commandWorked(source.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.commandWorked(target.createIndex({_id: 1, a: -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0}},
+ {$merge: Object.assign({on: ["_id", "a"]}, mergeStage.$merge)}
+ ]));
+ assertArrayEq({actual: target.find({}, {_id: 0}).toArray(), expected: [{b: "c"}]});
+ assert.commandWorked(source.dropIndex({_id: 1, a: -1}));
+ assert.commandWorked(target.dropIndex({_id: 1, a: -1}));
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_replace_fail.js b/jstests/aggregation/sources/merge/mode_replace_fail.js
index 7afdb6579dc..19e74e58536 100644
--- a/jstests/aggregation/sources/merge/mode_replace_fail.js
+++ b/jstests/aggregation/sources/merge/mode_replace_fail.js
@@ -4,114 +4,111 @@
// exists when none is expected.
// @tags: [assumes_no_implicit_collection_creation_after_drop]
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+load("jstests/aggregation/extras/utils.js"); // For assertArrayEq.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
- const source = db[`${jsTest.name()}_source`];
- source.drop();
- const target = db[`${jsTest.name()}_target`];
- target.drop();
- const mergeStage = {
- $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}
- };
- const pipeline = [mergeStage];
+const source = db[`${jsTest.name()}_source`];
+source.drop();
+const target = db[`${jsTest.name()}_target`];
+target.drop();
+const mergeStage = {
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "fail"}
+};
+const pipeline = [mergeStage];
- // Test $merge when some documents in the source collection don't have a matching document in
- // the target collection.
- (function testMergeFailsIfMatchingDocumentNotFound() {
- // Single document without a match.
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- let error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
+// Test $merge when some documents in the source collection don't have a matching document in
+// the target collection.
+(function testMergeFailsIfMatchingDocumentNotFound() {
+ // Single document without a match.
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ let error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}, {_id: 3, a: 3}]});
- // Multiple documents without a match.
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}]));
- error = assert.throws(() => source.aggregate(pipeline));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
- })();
+ // Multiple documents without a match.
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}]));
+ error = assert.throws(() => source.aggregate(pipeline));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, a: 1}]});
+})();
- // Test $merge when all documents in the source collection have a matching document in the
- // target collection.
- (function testMergeWhenAllDocumentsHaveMatch() {
- // Source has a single element with a match in the target.
- assert(source.drop());
- assert(target.drop());
- assert.commandWorked(source.insert({_id: 3, a: 3}));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
+// Test $merge when all documents in the source collection have a matching document in the
+// target collection.
+(function testMergeWhenAllDocumentsHaveMatch() {
+ // Source has a single element with a match in the target.
+ assert(source.drop());
+ assert(target.drop());
+ assert.commandWorked(source.insert({_id: 3, a: 3}));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 3, a: 3}]});
- // Source has multiple documents with matches in the target.
- assert(target.drop());
- assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq({
- actual: target.find().toArray(),
- expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]
- });
- })();
+ // Source has multiple documents with matches in the target.
+ assert(target.drop());
+ assert.commandWorked(source.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}, {_id: 3, b: 3}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({
+ actual: target.find().toArray(),
+ expected: [{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]
+ });
+})();
- // Test $merge when the source collection is empty. The target collection should not be
- // modified.
- (function testMergeWhenSourceIsEmpty() {
- assert.commandWorked(source.deleteMany({}));
- assert(target.drop());
- assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
- assert.doesNotThrow(() => source.aggregate(pipeline));
- assertArrayEq(
- {actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
- })();
+// Test $merge when the source collection is empty. The target collection should not be
+// modified.
+(function testMergeWhenSourceIsEmpty() {
+ assert.commandWorked(source.deleteMany({}));
+ assert(target.drop());
+ assert.commandWorked(target.insert([{_id: 1, b: 1}, {_id: 2, b: 2}]));
+ assert.doesNotThrow(() => source.aggregate(pipeline));
+ assertArrayEq({actual: target.find().toArray(), expected: [{_id: 1, b: 1}, {_id: 2, b: 2}]});
+})();
- // Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
- // should be returned once the batch is processed and no further documents should be processed
- // and updated.
- (function testMergeUnorderedBatchUpdate() {
- const maxBatchSize = 16 * 1024 * 1024; // 16MB
- const docSize = 1024 * 1024; // 1MB
- const numDocs = 20;
- const maxDocsInBatch = maxBatchSize / docSize;
+// Test $merge uses unorderded batch update. When a mismatch is detected in a batch, the error
+// should be returned once the batch is processed and no further documents should be processed
+// and updated.
+(function testMergeUnorderedBatchUpdate() {
+ const maxBatchSize = 16 * 1024 * 1024; // 16MB
+ const docSize = 1024 * 1024; // 1MB
+ const numDocs = 20;
+ const maxDocsInBatch = maxBatchSize / docSize;
- assert(source.drop());
- assert(target.drop());
+ assert(source.drop());
+ assert(target.drop());
- // Insert 'numDocs' documents of size 'docSize' into the source collection.
- generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
+ // Insert 'numDocs' documents of size 'docSize' into the source collection.
+ generateCollection({coll: source, numDocs: numDocs, docSize: docSize});
- // Copy over documents from the source collection into the target and remove the 'padding'
- // field from the projection, so we can distinguish which documents have been modified by
- // the $merge stage.
- assert.doesNotThrow(
- () => source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
+ // Copy over documents from the source collection into the target and remove the 'padding'
+ // field from the projection, so we can distinguish which documents have been modified by
+ // the $merge stage.
+ assert.doesNotThrow(() =>
+ source.aggregate([{$project: {padding: 0}}, {$out: target.getName()}]));
- // Remove one document from the target collection so that $merge fails. This document should
- // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
- // field in ascending order. Since each document in the source collection is 1MB, and the
- // max batch size is 16MB, the first batch will contain documents with the _id in the range
- // of [0, 15].
- assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
+ // Remove one document from the target collection so that $merge fails. This document should
+ // be in the first batch of the aggregation pipeline below, which sorts documents by the _id
+ // field in ascending order. Since each document in the source collection is 1MB, and the
+ // max batch size is 16MB, the first batch will contain documents with the _id in the range
+ // of [0, 15].
+ assert.commandWorked(target.deleteOne({_id: Math.floor(Math.random() * maxDocsInBatch)}));
- // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
- assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
+ // Ensure the target collection has 'numDocs' - 1 documents without the 'padding' field.
+ assert.eq(numDocs - 1, target.find({padding: {$exists: false}}).itcount());
- // Run the $merge pipeline and ensure it fails, as there is one document in the source
- // collection without a match in the target.
- const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
- assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
+ // Run the $merge pipeline and ensure it fails, as there is one document in the source
+ // collection without a match in the target.
+ const error = assert.throws(() => source.aggregate([{$sort: {_id: 1}}, mergeStage]));
+ assert.commandFailedWithCode(error, ErrorCodes.MergeStageNoMatchingDocument);
- // There will be maxDocsInBatch documents in the batch, one without a match.
- const numDocsModified = maxDocsInBatch - 1;
- // All remaining documents except those in the first batch must be left unmodified.
- const numDocsUnmodified = numDocs - maxDocsInBatch;
- assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
- assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
- })();
+ // There will be maxDocsInBatch documents in the batch, one without a match.
+ const numDocsModified = maxDocsInBatch - 1;
+ // All remaining documents except those in the first batch must be left unmodified.
+ const numDocsUnmodified = numDocs - maxDocsInBatch;
+ assert.eq(numDocsModified, target.find({padding: {$exists: true}}).itcount());
+ assert.eq(numDocsUnmodified, target.find({padding: {$exists: false}}).itcount());
+})();
}());
diff --git a/jstests/aggregation/sources/merge/mode_replace_insert.js b/jstests/aggregation/sources/merge/mode_replace_insert.js
index bb1e407ea7b..e81ac857dc4 100644
--- a/jstests/aggregation/sources/merge/mode_replace_insert.js
+++ b/jstests/aggregation/sources/merge/mode_replace_insert.js
@@ -1,225 +1,214 @@
// Tests for the $merge stage with whenMatched: "replace" and whenNotMatched: "insert".
// @tags: [assumes_unsharded_collection]
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
- load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
-
- const coll = db.merge_replace_insert;
- const outColl = db.merge_replace_insert_out;
- coll.drop();
- outColl.drop();
-
- const nDocs = 10;
- for (let i = 0; i < nDocs; i++) {
- assert.commandWorked(coll.insert({_id: i, a: i}));
- }
-
- // Test that a $merge with whenMatched: "replace" and whenNotMatched: "insert" mode will
- // default the "on" fields to "_id".
- coll.aggregate(
- [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}]);
- assert.eq(nDocs, outColl.find().itcount());
-
- // Test that $merge will update existing documents that match the "on" fields.
- const nDocsReplaced = 5;
- coll.aggregate([
- {$project: {_id: {$mod: ["$_id", nDocsReplaced]}}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "_id"
- }
- }
- ]);
- assert.eq(nDocsReplaced, outColl.find({a: {$exists: false}}).itcount());
-
- // Test $merge with a dotted path "on" fields.
- coll.drop();
- outColl.drop();
- assert.commandWorked(coll.insert([{_id: 0, a: {b: 1}}, {_id: 1, a: {b: 1}, c: 1}]));
- assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
- coll.aggregate([
- {$addFields: {_id: 0}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["_id", "a.b"]
- }
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.isMongos.
+
+const coll = db.merge_replace_insert;
+const outColl = db.merge_replace_insert_out;
+coll.drop();
+outColl.drop();
+
+const nDocs = 10;
+for (let i = 0; i < nDocs; i++) {
+ assert.commandWorked(coll.insert({_id: i, a: i}));
+}
+
+// Test that a $merge with whenMatched: "replace" and whenNotMatched: "insert" mode will
+// default the "on" fields to "_id".
+coll.aggregate(
+ [{$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}]);
+assert.eq(nDocs, outColl.find().itcount());
+
+// Test that $merge will update existing documents that match the "on" fields.
+const nDocsReplaced = 5;
+coll.aggregate([
+ {$project: {_id: {$mod: ["$_id", nDocsReplaced]}}},
+ {$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "_id"}}
+]);
+assert.eq(nDocsReplaced, outColl.find({a: {$exists: false}}).itcount());
+
+// Test $merge with a dotted path "on" fields.
+coll.drop();
+outColl.drop();
+assert.commandWorked(coll.insert([{_id: 0, a: {b: 1}}, {_id: 1, a: {b: 1}, c: 1}]));
+assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
+coll.aggregate([
+ {$addFields: {_id: 0}},
+ {
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["_id", "a.b"]
}
- ]);
- assert.eq([{_id: 0, a: {b: 1}, c: 1}], outColl.find().toArray());
-
- // Test that $merge will automatically generate a missing "_id" for the "on" field.
- coll.drop();
- outColl.drop();
- assert.commandWorked(coll.insert({field: "will be removed"}));
- assert.doesNotThrow(() => coll.aggregate([
- {$replaceRoot: {newRoot: {}}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- }
+ }
+]);
+assert.eq([{_id: 0, a: {b: 1}, c: 1}], outColl.find().toArray());
+
+// Test that $merge will automatically generate a missing "_id" for the "on" field.
+coll.drop();
+outColl.drop();
+assert.commandWorked(coll.insert({field: "will be removed"}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$replaceRoot: {newRoot: {}}},
+ {
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
}
- ]));
- assert.eq(1, outColl.find({field: {$exists: false}}).itcount());
-
- // Test that $merge will automatically generate a missing "_id", and the aggregation succeeds
- // with multiple "on" fields.
- outColl.drop();
- assert.commandWorked(outColl.createIndex({name: -1, _id: 1}, {unique: true, sparse: true}));
- assert.doesNotThrow(() => coll.aggregate([
- {$replaceRoot: {newRoot: {name: "jungsoo"}}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["_id", "name"]
- }
+ }
+]));
+assert.eq(1, outColl.find({field: {$exists: false}}).itcount());
+
+// Test that $merge will automatically generate a missing "_id", and the aggregation succeeds
+// with multiple "on" fields.
+outColl.drop();
+assert.commandWorked(outColl.createIndex({name: -1, _id: 1}, {unique: true, sparse: true}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$replaceRoot: {newRoot: {name: "jungsoo"}}},
+ {
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["_id", "name"]
}
- ]));
- assert.eq(1, outColl.find().itcount());
-
- // Test that we will not attempt to modify the _id of an existing document if the _id is
- // projected away but the "on" field does not involve _id.
- coll.drop();
- assert.commandWorked(coll.insert({name: "kyle"}));
- assert.commandWorked(coll.insert({name: "nick"}));
- outColl.drop();
- assert.commandWorked(outColl.createIndex({name: 1}, {unique: true}));
- assert.commandWorked(outColl.insert({_id: "must be unchanged", name: "kyle"}));
- assert.doesNotThrow(() => coll.aggregate([
- {$project: {_id: 0}},
- {$addFields: {newField: 1}},
- {
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "name"
- }
+ }
+]));
+assert.eq(1, outColl.find().itcount());
+
+// Test that we will not attempt to modify the _id of an existing document if the _id is
+// projected away but the "on" field does not involve _id.
+coll.drop();
+assert.commandWorked(coll.insert({name: "kyle"}));
+assert.commandWorked(coll.insert({name: "nick"}));
+outColl.drop();
+assert.commandWorked(outColl.createIndex({name: 1}, {unique: true}));
+assert.commandWorked(outColl.insert({_id: "must be unchanged", name: "kyle"}));
+assert.doesNotThrow(() => coll.aggregate([
+ {$project: {_id: 0}},
+ {$addFields: {newField: 1}},
+ {
+ $merge:
+ {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "name"}
+ }
+]));
+const outResult = outColl.find().sort({name: 1}).toArray();
+const errmsgFn = () => tojson(outResult);
+assert.eq(2, outResult.length, errmsgFn);
+assert.docEq({_id: "must be unchanged", name: "kyle", newField: 1}, outResult[0], errmsgFn);
+assert.eq("nick", outResult[1].name, errmsgFn);
+assert.eq(1, outResult[1].newField, errmsgFn);
+assert.neq(null, outResult[1]._id, errmsgFn);
+
+// Test that $merge with a missing non-id "on" field fails.
+outColl.drop();
+assert.commandWorked(outColl.createIndex({missing: 1}, {unique: true}));
+assertErrorCode(
+ coll,
+ [{
+ $merge: {
+ into: outColl.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "missing"
}
- ]));
- const outResult = outColl.find().sort({name: 1}).toArray();
- const errmsgFn = () => tojson(outResult);
- assert.eq(2, outResult.length, errmsgFn);
- assert.docEq({_id: "must be unchanged", name: "kyle", newField: 1}, outResult[0], errmsgFn);
- assert.eq("nick", outResult[1].name, errmsgFn);
- assert.eq(1, outResult[1].newField, errmsgFn);
- assert.neq(null, outResult[1]._id, errmsgFn);
-
- // Test that $merge with a missing non-id "on" field fails.
- outColl.drop();
- assert.commandWorked(outColl.createIndex({missing: 1}, {unique: true}));
- assertErrorCode(
- coll,
- [{
- $merge: {
- into: outColl.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "missing"
- }
- }],
- 51132 // This attempt should fail because there's no field 'missing' in the document.
- );
-
- // Test that a replace fails to insert a document if it violates a unique index constraint. In
- // this example, $merge will attempt to insert multiple documents with {a: 0} which is not
- // allowed with the unique index on {a: 1}.
- coll.drop();
- assert.commandWorked(coll.insert([{_id: 0}, {_id: 1}]));
-
- outColl.drop();
- assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
- assertErrorCode(
- coll,
- [
- {$addFields: {a: 0}},
- {$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
- ],
- ErrorCodes.DuplicateKey);
-
- // Test that $merge fails if the "on" fields contains an array.
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0, a: [1, 2]}));
- assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
- assertErrorCode(coll,
- [
- {$addFields: {_id: 0}},
- {
+ }],
+ 51132 // This attempt should fail because there's no field 'missing' in the document.
+);
+
+// Test that a replace fails to insert a document if it violates a unique index constraint. In
+// this example, $merge will attempt to insert multiple documents with {a: 0} which is not
+// allowed with the unique index on {a: 1}.
+coll.drop();
+assert.commandWorked(coll.insert([{_id: 0}, {_id: 1}]));
+
+outColl.drop();
+assert.commandWorked(outColl.createIndex({a: 1}, {unique: true}));
+assertErrorCode(
+ coll,
+ [
+ {$addFields: {a: 0}},
+ {$merge: {into: outColl.getName(), whenMatched: "replace", whenNotMatched: "insert"}}
+ ],
+ ErrorCodes.DuplicateKey);
+
+// Test that $merge fails if the "on" fields contains an array.
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0, a: [1, 2]}));
+assert.commandWorked(outColl.createIndex({"a.b": 1, _id: 1}, {unique: true}));
+assertErrorCode(coll,
+ [
+ {$addFields: {_id: 0}},
+ {
$merge: {
into: outColl.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
on: ["_id", "a.b"]
}
- }
- ],
- 51132);
-
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0, a: [{b: 1}]}));
- assertErrorCode(coll,
- [
- {$addFields: {_id: 0}},
- {
+ }
+ ],
+ 51132);
+
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0, a: [{b: 1}]}));
+assertErrorCode(coll,
+ [
+ {$addFields: {_id: 0}},
+ {
$merge: {
into: outColl.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
on: ["_id", "a.b"]
}
- }
- ],
- 51132);
-
- // Tests for $merge to a database that differs from the aggregation database.
- const foreignDb = db.getSiblingDB("merge_replace_insert_foreign");
- const foreignTargetColl = foreignDb.out;
- const pipelineDifferentOutputDb = [{
- $merge: {
- into: {
- db: foreignDb.getName(),
- coll: foreignTargetColl.getName(),
- },
- whenMatched: "replace",
- whenNotMatched: "insert",
- }
- }];
-
- coll.drop();
- assert.commandWorked(coll.insert({_id: 0}));
- foreignDb.dropDatabase();
-
- if (!FixtureHelpers.isMongos(db)) {
- // Test that $merge implicitly creates a new database when the output collection's database
- // doesn't exist.
- coll.aggregate(pipelineDifferentOutputDb);
- assert.eq(foreignTargetColl.find().itcount(), 1);
- } else {
- // Implicit database creation is prohibited in a cluster.
- let error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
- assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
-
- // Force a creation of the database and collection, then fall through the test below.
- assert.commandWorked(foreignTargetColl.insert({_id: 0}));
+ }
+ ],
+ 51132);
+
+// Tests for $merge to a database that differs from the aggregation database.
+const foreignDb = db.getSiblingDB("merge_replace_insert_foreign");
+const foreignTargetColl = foreignDb.out;
+const pipelineDifferentOutputDb = [{
+ $merge: {
+ into: {
+ db: foreignDb.getName(),
+ coll: foreignTargetColl.getName(),
+ },
+ whenMatched: "replace",
+ whenNotMatched: "insert",
}
+}];
+
+coll.drop();
+assert.commandWorked(coll.insert({_id: 0}));
+foreignDb.dropDatabase();
- // Insert a new document into the source collection, then test that running the same
- // aggregation will replace existing documents in the foreign output collection when
- // applicable.
- coll.drop();
- const newDocuments = [{_id: 0, newField: 1}, {_id: 1}];
- assert.commandWorked(coll.insert(newDocuments));
+if (!FixtureHelpers.isMongos(db)) {
+ // Test that $merge implicitly creates a new database when the output collection's database
+ // doesn't exist.
coll.aggregate(pipelineDifferentOutputDb);
- assert.eq(foreignTargetColl.find().sort({_id: 1}).toArray(), newDocuments);
+ assert.eq(foreignTargetColl.find().itcount(), 1);
+} else {
+ // Implicit database creation is prohibited in a cluster.
+ let error = assert.throws(() => coll.aggregate(pipelineDifferentOutputDb));
+ assert.commandFailedWithCode(error, ErrorCodes.NamespaceNotFound);
+
+ // Force a creation of the database and collection, then fall through the test below.
+ assert.commandWorked(foreignTargetColl.insert({_id: 0}));
+}
+
+// Insert a new document into the source collection, then test that running the same
+// aggregation will replace existing documents in the foreign output collection when
+// applicable.
+coll.drop();
+const newDocuments = [{_id: 0, newField: 1}, {_id: 1}];
+assert.commandWorked(coll.insert(newDocuments));
+coll.aggregate(pipelineDifferentOutputDb);
+assert.eq(foreignTargetColl.find().sort({_id: 1}).toArray(), newDocuments);
}());
diff --git a/jstests/aggregation/sources/merge/on_fields_validation.js b/jstests/aggregation/sources/merge/on_fields_validation.js
index 78c7dd4eb41..ae911689cdf 100644
--- a/jstests/aggregation/sources/merge/on_fields_validation.js
+++ b/jstests/aggregation/sources/merge/on_fields_validation.js
@@ -7,137 +7,133 @@
* @tags: [cannot_create_unique_index_when_using_hashed_shard_key]
*/
(function() {
- "use strict";
-
- load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
-
- const source = db.unique_key_validation_source;
- const target = db.unique_key_validation_target;
-
- [source, target].forEach(coll => coll.drop());
- assert.commandWorked(source.insert({_id: 0}));
-
- //
- // Tests for invalid "on" fields specifications.
- //
- function assertOnFieldsIsInvalid(onFields, expectedErrorCode) {
- const stage = {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: onFields
- }
- };
- assertErrorCode(source, stage, expectedErrorCode);
+"use strict";
+
+load("jstests/aggregation/extras/utils.js"); // For assertErrorCode.
+
+const source = db.unique_key_validation_source;
+const target = db.unique_key_validation_target;
+
+[source, target].forEach(coll => coll.drop());
+assert.commandWorked(source.insert({_id: 0}));
+
+//
+// Tests for invalid "on" fields specifications.
+//
+function assertOnFieldsIsInvalid(onFields, expectedErrorCode) {
+ const stage = {
+ $merge:
+ {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: onFields}
+ };
+ assertErrorCode(source, stage, expectedErrorCode);
+}
+
+// A non-array or string "on" fields is prohibited.
+assertOnFieldsIsInvalid(3.14, 51186);
+assertOnFieldsIsInvalid({_id: 1}, 51186);
+
+// Explicitly specifying an empty-array "on" fields is invalid.
+assertOnFieldsIsInvalid([], 51187);
+
+// The "on" fields array won't be accepted if any element is not a string.
+assertOnFieldsIsInvalid(["hashed", 1], 51134);
+assertOnFieldsIsInvalid([["_id"]], 51134);
+assertOnFieldsIsInvalid([null], 51134);
+assertOnFieldsIsInvalid([true, "a"], 51134);
+
+//
+// An error is raised if $merge encounters a document that is missing one or more of the
+// "on" fields.
+//
+assert.commandWorked(target.remove({}));
+assert.commandWorked(target.createIndex({name: 1, team: -1}, {unique: true}));
+const pipelineNameTeam = [{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["name", "team"]
}
+}];
+
+// Missing both "name" and "team".
+assertErrorCode(source, pipelineNameTeam, 51132);
+
+// Missing "name".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, team: "query"}));
+assertErrorCode(source, pipelineNameTeam, 51132);
+
+// Missing "team".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas"}));
+assertErrorCode(source, pipelineNameTeam, 51132);
+
+// A document with both "name" and "team" will be accepted.
+assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas", team: "query"}));
+assert.doesNotThrow(() => source.aggregate(pipelineNameTeam));
+assert.eq(target.find().toArray(), [{_id: 0, name: "nicholas", team: "query"}]);
+
+//
+// An error is raised if $merge encounters a document where one of the "on" fields is a nullish
+// value.
+//
+assert.commandWorked(target.remove({}));
+assert.commandWorked(target.createIndex({"song.artist": 1}, {unique: 1}));
+const pipelineSongDotArtist = [{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["song.artist"]
+ }
+}];
+
+// Explicit null "song" (a prefix of an "on" field).
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: null}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// Explicit undefined "song" (a prefix of an "on" field).
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: undefined}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// Explicit null "song.artist".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: null}}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// Explicit undefined "song.artist".
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: undefined}}));
+assertErrorCode(source, pipelineSongDotArtist, 51132);
+
+// A valid "artist" will be accepted.
+assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: "Illenium"}}));
+assert.doesNotThrow(() => source.aggregate(pipelineSongDotArtist));
+assert.eq(target.find().toArray(), [{_id: 0, song: {artist: "Illenium"}}]);
+
+//
+// An error is raised if $merge encounters a document where one of the "on" fields (or a prefix
+// of an "on" field) is an array.
+//
+assert.commandWorked(target.remove({}));
+assert.commandWorked(target.createIndex({"address.street": 1}, {unique: 1}));
+const pipelineAddressDotStreet = [{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["address.street"]
+ }
+}];
+
+// "address.street" is an array.
+assert.commandWorked(
+ source.update({_id: 0}, {_id: 0, address: {street: ["West 43rd St", "1633 Broadway"]}}));
+assertErrorCode(source, pipelineAddressDotStreet, 51185);
+
+// "address" is an array (a prefix of an "on" field).
+assert.commandWorked(source.update({_id: 0}, {_id: 0, address: [{street: "1633 Broadway"}]}));
+assertErrorCode(source, pipelineAddressDotStreet, 51132);
- // A non-array or string "on" fields is prohibited.
- assertOnFieldsIsInvalid(3.14, 51186);
- assertOnFieldsIsInvalid({_id: 1}, 51186);
-
- // Explicitly specifying an empty-array "on" fields is invalid.
- assertOnFieldsIsInvalid([], 51187);
-
- // The "on" fields array won't be accepted if any element is not a string.
- assertOnFieldsIsInvalid(["hashed", 1], 51134);
- assertOnFieldsIsInvalid([["_id"]], 51134);
- assertOnFieldsIsInvalid([null], 51134);
- assertOnFieldsIsInvalid([true, "a"], 51134);
-
- //
- // An error is raised if $merge encounters a document that is missing one or more of the
- // "on" fields.
- //
- assert.commandWorked(target.remove({}));
- assert.commandWorked(target.createIndex({name: 1, team: -1}, {unique: true}));
- const pipelineNameTeam = [{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["name", "team"]
- }
- }];
-
- // Missing both "name" and "team".
- assertErrorCode(source, pipelineNameTeam, 51132);
-
- // Missing "name".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, team: "query"}));
- assertErrorCode(source, pipelineNameTeam, 51132);
-
- // Missing "team".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas"}));
- assertErrorCode(source, pipelineNameTeam, 51132);
-
- // A document with both "name" and "team" will be accepted.
- assert.commandWorked(source.update({_id: 0}, {_id: 0, name: "nicholas", team: "query"}));
- assert.doesNotThrow(() => source.aggregate(pipelineNameTeam));
- assert.eq(target.find().toArray(), [{_id: 0, name: "nicholas", team: "query"}]);
-
- //
- // An error is raised if $merge encounters a document where one of the "on" fields is a nullish
- // value.
- //
- assert.commandWorked(target.remove({}));
- assert.commandWorked(target.createIndex({"song.artist": 1}, {unique: 1}));
- const pipelineSongDotArtist = [{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["song.artist"]
- }
- }];
-
- // Explicit null "song" (a prefix of an "on" field).
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: null}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // Explicit undefined "song" (a prefix of an "on" field).
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: undefined}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // Explicit null "song.artist".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: null}}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // Explicit undefined "song.artist".
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: undefined}}));
- assertErrorCode(source, pipelineSongDotArtist, 51132);
-
- // A valid "artist" will be accepted.
- assert.commandWorked(source.update({_id: 0}, {_id: 0, song: {artist: "Illenium"}}));
- assert.doesNotThrow(() => source.aggregate(pipelineSongDotArtist));
- assert.eq(target.find().toArray(), [{_id: 0, song: {artist: "Illenium"}}]);
-
- //
- // An error is raised if $merge encounters a document where one of the "on" fields (or a prefix
- // of an "on" field) is an array.
- //
- assert.commandWorked(target.remove({}));
- assert.commandWorked(target.createIndex({"address.street": 1}, {unique: 1}));
- const pipelineAddressDotStreet = [{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["address.street"]
- }
- }];
-
- // "address.street" is an array.
- assert.commandWorked(
- source.update({_id: 0}, {_id: 0, address: {street: ["West 43rd St", "1633 Broadway"]}}));
- assertErrorCode(source, pipelineAddressDotStreet, 51185);
-
- // "address" is an array (a prefix of an "on" field).
- assert.commandWorked(source.update({_id: 0}, {_id: 0, address: [{street: "1633 Broadway"}]}));
- assertErrorCode(source, pipelineAddressDotStreet, 51132);
-
- // A scalar "address.street" is accepted.
- assert.commandWorked(source.update({_id: 0}, {_id: 0, address: {street: "1633 Broadway"}}));
- assert.doesNotThrow(() => source.aggregate(pipelineAddressDotStreet));
- assert.eq(target.find().toArray(), [{_id: 0, address: {street: "1633 Broadway"}}]);
+// A scalar "address.street" is accepted.
+assert.commandWorked(source.update({_id: 0}, {_id: 0, address: {street: "1633 Broadway"}}));
+assert.doesNotThrow(() => source.aggregate(pipelineAddressDotStreet));
+assert.eq(target.find().toArray(), [{_id: 0, address: {street: "1633 Broadway"}}]);
}());
diff --git a/jstests/aggregation/sources/merge/requires_unique_index.js b/jstests/aggregation/sources/merge/requires_unique_index.js
index a316d239321..38f8aa27f64 100644
--- a/jstests/aggregation/sources/merge/requires_unique_index.js
+++ b/jstests/aggregation/sources/merge/requires_unique_index.js
@@ -6,407 +6,365 @@
// manually. This is to avoid implicit creation and sharding of the $merge target collections in the
// passthrough suites.
(function() {
- "use strict";
+"use strict";
- load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode,
- // assertMergeFailsWithoutUniqueIndex.
+load("jstests/aggregation/extras/merge_helpers.js"); // For withEachMergeMode,
+ // assertMergeFailsWithoutUniqueIndex.
- const testDB = db.getSiblingDB("merge_requires_unique_index");
- assert.commandWorked(testDB.dropDatabase());
+const testDB = db.getSiblingDB("merge_requires_unique_index");
+assert.commandWorked(testDB.dropDatabase());
- const source = testDB.source;
- assert.commandWorked(source.insert([{_id: 0, a: 0}, {_id: 1, a: 1}]));
+const source = testDB.source;
+assert.commandWorked(source.insert([{_id: 0, a: 0}, {_id: 1, a: 1}]));
- // Helper to drop a collection without using the shell helper, and thus avoiding the implicit
- // recreation in the passthrough suites.
- function dropWithoutImplicitRecreate(coll) {
- testDB.runCommand({drop: coll.getName()});
+// Helper to drop a collection without using the shell helper, and thus avoiding the implicit
+// recreation in the passthrough suites.
+function dropWithoutImplicitRecreate(coll) {
+ testDB.runCommand({drop: coll.getName()});
+}
+
+// Test that using {_id: 1} or not providing a unique key does not require any special indexes.
+(function simpleIdOnFieldsOrDefaultShouldNotRequireIndexes() {
+ function assertDefaultOnFieldsSucceeds({setupCallback, collName}) {
+ withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ // Skip the combination of merge modes which will fail depending on the contents of
+ // the source and target collection, as this will cause the assertion below to trip.
+ if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
+ return;
+
+ setupCallback();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: collName,
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }]));
+ setupCallback();
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: collName,
+ on: "_id",
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }]));
+ });
}
- // Test that using {_id: 1} or not providing a unique key does not require any special indexes.
- (function simpleIdOnFieldsOrDefaultShouldNotRequireIndexes() {
- function assertDefaultOnFieldsSucceeds({setupCallback, collName}) {
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- // Skip the combination of merge modes which will fail depending on the contents of
- // the source and target collection, as this will cause the assertion below to trip.
- if (whenMatchedMode == "fail" || whenNotMatchedMode == "fail")
- return;
-
- setupCallback();
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: collName,
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]));
- setupCallback();
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: collName,
- on: "_id",
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
- }]));
- });
+ // Test that using "_id" or not specifying "on" fields works for a collection which does
+ // not exist.
+ const non_existent = testDB.non_existent;
+ assertDefaultOnFieldsSucceeds({
+ setupCallback: () => dropWithoutImplicitRecreate(non_existent),
+ collName: non_existent.getName()
+ });
+
+ const unindexed = testDB.unindexed;
+ assertDefaultOnFieldsSucceeds({
+ setupCallback: () => {
+ dropWithoutImplicitRecreate(unindexed);
+ assert.commandWorked(testDB.runCommand({create: unindexed.getName()}));
+ },
+ collName: unindexed.getName()
+ });
+}());
+
+// Test that a unique index on the "on" fields can be used to satisfy the requirement.
+(function basicUniqueIndexWorks() {
+ const target = testDB.regular_unique;
+ dropWithoutImplicitRecreate(target);
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["_id", "a"], target: target});
+
+ assert.commandWorked(testDB.runCommand({create: target.getName()}));
+ assert.commandWorked(target.createIndex({a: 1, _id: 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["_id", "a"]
}
+ }]));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: ["a", "_id"]
+ }
+ }]));
+
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: ["_id", "a", "b"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "b"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["b"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a"], target: target});
+
+ assert.commandWorked(target.dropIndex({a: 1, _id: 1}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+
+ // Create a non-unique index and make sure that doesn't work.
+ assert.commandWorked(target.dropIndex({a: 1}));
+ assert.commandWorked(target.createIndex({a: 1}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["_id", "a"], target: target});
+}());
- // Test that using "_id" or not specifying "on" fields works for a collection which does
- // not exist.
- const non_existent = testDB.non_existent;
- assertDefaultOnFieldsSucceeds({
- setupCallback: () => dropWithoutImplicitRecreate(non_existent),
- collName: non_existent.getName()
- });
+// Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
+// is a partial index.
+(function uniqueButPartialShouldNotWork() {
+ const target = testDB.unique_but_partial_indexes;
+ dropWithoutImplicitRecreate(target);
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+
+ assert.commandWorked(
+ target.createIndex({a: 1}, {unique: true, partialFilterExpression: {a: {$gte: 2}}}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["_id", "a"], target: target});
+}());
- const unindexed = testDB.unindexed;
- assertDefaultOnFieldsSucceeds({
- setupCallback: () => {
- dropWithoutImplicitRecreate(unindexed);
- assert.commandWorked(testDB.runCommand({create: unindexed.getName()}));
- },
- collName: unindexed.getName()
- });
- }());
-
- // Test that a unique index on the "on" fields can be used to satisfy the requirement.
- (function basicUniqueIndexWorks() {
- const target = testDB.regular_unique;
- dropWithoutImplicitRecreate(target);
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a"], target: target});
-
- assert.commandWorked(testDB.runCommand({create: target.getName()}));
- assert.commandWorked(target.createIndex({a: 1, _id: 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([{
+// Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
+// has a different collation.
+(function indexMustMatchCollationOfOperation() {
+ const target = testDB.collation_indexes;
+ dropWithoutImplicitRecreate(target);
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true, collation: {locale: "en_US"}}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "en"}}});
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "simple"}}});
+ assertMergeFailsWithoutUniqueIndex({
+ source: source,
+ onFields: "a",
+ target: target,
+ options: {collation: {locale: "en_US", strength: 1}}
+ });
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "a"
+ }
+ }],
+ {collation: {locale: "en_US"}}));
+
+ // Test that a non-unique index with the same collation cannot be used.
+ assert.commandWorked(target.dropIndex({a: 1}));
+ assert.commandWorked(target.createIndex({a: 1}, {collation: {locale: "en_US"}}));
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "en_US"}}});
+
+ // Test that a collection-default collation will be applied to the index, but not the
+ // $merge's update or insert into that collection. The pipeline will inherit a
+ // collection-default collation, but from the source collection, not the $merge's target
+ // collection.
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(
+ testDB.runCommand({create: target.getName(), collation: {locale: "en_US"}}));
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({
+ source: source,
+ onFields: "a",
+ target: target,
+ });
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "a"
+ }
+ }],
+ {collation: {locale: "en_US"}}));
+
+ // Test that when the source collection and foreign collection have the same default
+ // collation, a unique index on the foreign collection can be used.
+ const newSourceColl = testDB.new_source;
+ dropWithoutImplicitRecreate(newSourceColl);
+ assert.commandWorked(
+ testDB.runCommand({create: newSourceColl.getName(), collation: {locale: "en_US"}}));
+ assert.commandWorked(newSourceColl.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
+ // This aggregate does not specify a collation, but it should inherit the default collation
+ // from 'newSourceColl', and therefore the index on 'target' should be eligible for use
+ // since it has the same collation.
+ assert.doesNotThrow(() => newSourceColl.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+
+ // Test that an explicit "simple" collation can be used with an index without a collation.
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {
+ into: target.getName(),
+ whenMatched: "replace",
+ whenNotMatched: "insert",
+ on: "a"
+ }
+ }],
+ {collation: {locale: "simple"}}));
+ assertMergeFailsWithoutUniqueIndex(
+ {source: source, onFields: "a", target: target, options: {collation: {locale: "en_US"}}});
+}());
+
+// Test that a unique index which is not simply ascending/descending fields cannot be used for
+// the "on" fields.
+(function testSpecialIndexTypes() {
+ const target = testDB.special_index_types;
+ dropWithoutImplicitRecreate(target);
+
+ assert.commandWorked(target.createIndex({a: 1, text: "text"}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "text"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "text", target: target});
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({a: 1, geo: "2dsphere"}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "geo"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["geo", "a"], target: target});
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({geo: "2d"}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "geo"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "geo", target: target});
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(
+ target.createIndex({geo: "geoHaystack", a: 1}, {unique: true, bucketSize: 5}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "geo"], target: target});
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["geo", "a"], target: target});
+
+ dropWithoutImplicitRecreate(target);
+ // MongoDB does not support unique hashed indexes.
+ assert.commandFailedWithCode(target.createIndex({a: "hashed"}, {unique: true}), 16764);
+ assert.commandWorked(target.createIndex({a: "hashed"}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+}());
+
+// Test that a unique index with dotted field names can be used.
+(function testDottedFieldNames() {
+ const target = testDB.dotted_field_paths;
+ dropWithoutImplicitRecreate(target);
+
+ assert.commandWorked(target.createIndex({a: 1, "b.c.d": -1}, {unique: true}));
+ assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 1, a: 1, b: {c: {d: "x"}}}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: ["_id", "a"]
+ on: ["a", "b.c.d"]
}
- }]));
- assert.doesNotThrow(() => source.aggregate([{
+ }
+ ]));
+
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({"id.x": 1, "id.y": -1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$group: {_id: {x: "$_id", y: "$a"}}},
+ {$project: {id: "$_id"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: ["a", "_id"]
+ on: ["id.x", "id.y"]
}
- }]));
-
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a", "b"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a", "b"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["b"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: ["a"], target: target});
-
- assert.commandWorked(target.dropIndex({a: 1, _id: 1}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([{
+ }
+ ]));
+ assert.doesNotThrow(() => source.aggregate([
+ {$group: {_id: {x: "$_id", y: "$a"}}},
+ {$project: {id: "$_id"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: "a"
+ on: ["id.y", "id.x"]
}
- }]));
-
- // Create a non-unique index and make sure that doesn't work.
- assert.commandWorked(target.dropIndex({a: 1}));
- assert.commandWorked(target.createIndex({a: 1}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a"], target: target});
- }());
-
- // Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
- // is a partial index.
- (function uniqueButPartialShouldNotWork() {
- const target = testDB.unique_but_partial_indexes;
- dropWithoutImplicitRecreate(target);
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
-
- assert.commandWorked(
- target.createIndex({a: 1}, {unique: true, partialFilterExpression: {a: {$gte: 2}}}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["_id", "a"], target: target});
- }());
-
- // Test that a unique index on the "on" fields cannot be used to satisfy the requirement if it
- // has a different collation.
- (function indexMustMatchCollationOfOperation() {
- const target = testDB.collation_indexes;
- dropWithoutImplicitRecreate(target);
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
-
- assert.commandWorked(
- target.createIndex({a: 1}, {unique: true, collation: {locale: "en_US"}}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: "a", target: target, options: {collation: {locale: "en"}}});
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "simple"}}
- });
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "en_US", strength: 1}}
- });
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }],
- {collation: {locale: "en_US"}}));
-
- // Test that a non-unique index with the same collation cannot be used.
- assert.commandWorked(target.dropIndex({a: 1}));
- assert.commandWorked(target.createIndex({a: 1}, {collation: {locale: "en_US"}}));
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "en_US"}}
- });
+ }
+ ]));
+
+ // Test that we cannot use arrays with a dotted path within a $merge.
+ dropWithoutImplicitRecreate(target);
+ assert.commandWorked(target.createIndex({"b.c": 1}, {unique: true}));
+ withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
+ assert.commandFailedWithCode(testDB.runCommand({
+ aggregate: source.getName(),
+ pipeline: [
+ {$replaceRoot: {newRoot: {b: [{c: 1}, {c: 2}]}}},
+ {
+ $merge: {
+ into: target.getName(),
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode,
+ on: "b.c"
+ }
+ }
+ ],
+ cursor: {}
+ }),
+ [50905, 51132]);
+ });
+}());
- // Test that a collection-default collation will be applied to the index, but not the
- // $merge's update or insert into that collection. The pipeline will inherit a
- // collection-default collation, but from the source collection, not the $merge's target
- // collection.
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(
- testDB.runCommand({create: target.getName(), collation: {locale: "en_US"}}));
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- });
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }],
- {collation: {locale: "en_US"}}));
-
- // Test that when the source collection and foreign collection have the same default
- // collation, a unique index on the foreign collection can be used.
- const newSourceColl = testDB.new_source;
- dropWithoutImplicitRecreate(newSourceColl);
- assert.commandWorked(
- testDB.runCommand({create: newSourceColl.getName(), collation: {locale: "en_US"}}));
- assert.commandWorked(newSourceColl.insert([{_id: 1, a: 1}, {_id: 2, a: 2}]));
- // This aggregate does not specify a collation, but it should inherit the default collation
- // from 'newSourceColl', and therefore the index on 'target' should be eligible for use
- // since it has the same collation.
- assert.doesNotThrow(() => newSourceColl.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }]));
-
- // Test that an explicit "simple" collation can be used with an index without a collation.
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({a: 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([{
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a"
- }
- }],
- {collation: {locale: "simple"}}));
- assertMergeFailsWithoutUniqueIndex({
- source: source,
- onFields: "a",
- target: target,
- options: {collation: {locale: "en_US"}}
- });
- }());
-
- // Test that a unique index which is not simply ascending/descending fields cannot be used for
- // the "on" fields.
- (function testSpecialIndexTypes() {
- const target = testDB.special_index_types;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({a: 1, text: "text"}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "text"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "text", target: target});
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({a: 1, geo: "2dsphere"}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "geo"], target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["geo", "a"], target: target});
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({geo: "2d"}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "geo"], target: target});
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "geo", target: target});
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(
- target.createIndex({geo: "geoHaystack", a: 1}, {unique: true, bucketSize: 5}));
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["a", "geo"], target: target});
- assertMergeFailsWithoutUniqueIndex(
- {source: source, onFields: ["geo", "a"], target: target});
-
- dropWithoutImplicitRecreate(target);
- // MongoDB does not support unique hashed indexes.
- assert.commandFailedWithCode(target.createIndex({a: "hashed"}, {unique: true}), 16764);
- assert.commandWorked(target.createIndex({a: "hashed"}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- }());
-
- // Test that a unique index with dotted field names can be used.
- (function testDottedFieldNames() {
- const target = testDB.dotted_field_paths;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({a: 1, "b.c.d": -1}, {unique: true}));
- assertMergeFailsWithoutUniqueIndex({source: source, onFields: "a", target: target});
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 1, a: 1, b: {c: {d: "x"}}}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["a", "b.c.d"]
- }
- }
- ]));
-
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({"id.x": 1, "id.y": -1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$group: {_id: {x: "$_id", y: "$a"}}},
- {$project: {id: "$_id"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["id.x", "id.y"]
- }
- }
- ]));
- assert.doesNotThrow(() => source.aggregate([
- {$group: {_id: {x: "$_id", y: "$a"}}},
- {$project: {id: "$_id"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: ["id.y", "id.x"]
- }
- }
- ]));
+// Test that a unique index that is multikey can still be used.
+(function testMultikeyIndex() {
+ const target = testDB.multikey_index;
+ dropWithoutImplicitRecreate(target);
- // Test that we cannot use arrays with a dotted path within a $merge.
- dropWithoutImplicitRecreate(target);
- assert.commandWorked(target.createIndex({"b.c": 1}, {unique: true}));
- withEachMergeMode(({whenMatchedMode, whenNotMatchedMode}) => {
- assert.commandFailedWithCode(testDB.runCommand({
- aggregate: source.getName(),
- pipeline: [
- {$replaceRoot: {newRoot: {b: [{c: 1}, {c: 2}]}}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode,
- on: "b.c"
- }
- }
- ],
- cursor: {}
- }),
- [50905, 51132]);
- });
- }());
-
- // Test that a unique index that is multikey can still be used.
- (function testMultikeyIndex() {
- const target = testDB.multikey_index;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 1, "a.b": "$a"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a.b"
- }
- }
- ]));
- assert.commandWorked(target.insert({_id: "TARGET", a: [{b: "hi"}, {b: "hello"}]}));
- assert.commandWorked(source.insert({a: "hi", proofOfUpdate: "PROOF"}));
- assert.doesNotThrow(() => source.aggregate([
- {$project: {_id: 0, proofOfUpdate: "PROOF", "a.b": "$a"}},
- {
- $merge: {
- into: target.getName(),
- whenMatched: "replace",
- whenNotMatched: "insert",
- on: "a.b"
- }
- }
- ]));
- assert.docEq(target.findOne({"a.b": "hi", proofOfUpdate: "PROOF"}),
- {_id: "TARGET", a: {b: "hi"}, proofOfUpdate: "PROOF"});
- }());
-
- // Test that a unique index that is sparse can still be used.
- (function testSparseIndex() {
- const target = testDB.multikey_index;
- dropWithoutImplicitRecreate(target);
-
- assert.commandWorked(target.createIndex({a: 1}, {unique: true, sparse: true}));
- assert.doesNotThrow(() => source.aggregate([{
+ assert.commandWorked(target.createIndex({"a.b": 1}, {unique: true}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 1, "a.b": "$a"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: "a"
+ on: "a.b"
}
- }]));
- assert.commandWorked(target.insert([{b: 1, c: 1}, {a: null}, {d: 4}]));
- assert.doesNotThrow(() => source.aggregate([{
+ }
+ ]));
+ assert.commandWorked(target.insert({_id: "TARGET", a: [{b: "hi"}, {b: "hello"}]}));
+ assert.commandWorked(source.insert({a: "hi", proofOfUpdate: "PROOF"}));
+ assert.doesNotThrow(() => source.aggregate([
+ {$project: {_id: 0, proofOfUpdate: "PROOF", "a.b": "$a"}},
+ {
$merge: {
into: target.getName(),
whenMatched: "replace",
whenNotMatched: "insert",
- on: "a"
+ on: "a.b"
}
- }]));
- }());
+ }
+ ]));
+ assert.docEq(target.findOne({"a.b": "hi", proofOfUpdate: "PROOF"}),
+ {_id: "TARGET", a: {b: "hi"}, proofOfUpdate: "PROOF"});
+}());
+
+// Test that a unique index that is sparse can still be used.
+(function testSparseIndex() {
+ const target = testDB.multikey_index;
+ dropWithoutImplicitRecreate(target);
+
+ assert.commandWorked(target.createIndex({a: 1}, {unique: true, sparse: true}));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+ assert.commandWorked(target.insert([{b: 1, c: 1}, {a: null}, {d: 4}]));
+ assert.doesNotThrow(() => source.aggregate([{
+ $merge: {into: target.getName(), whenMatched: "replace", whenNotMatched: "insert", on: "a"}
+ }]));
+}());
}());
diff --git a/jstests/aggregation/sources/merge/use_cases.js b/jstests/aggregation/sources/merge/use_cases.js
index 5bce8006656..6c1c71b9419 100644
--- a/jstests/aggregation/sources/merge/use_cases.js
+++ b/jstests/aggregation/sources/merge/use_cases.js
@@ -5,112 +5,109 @@
* @tags: [requires_sharding]
*/
(function() {
- "use strict";
+"use strict";
- Random.setRandomSeed();
+Random.setRandomSeed();
- const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
+const st = new ShardingTest({shards: 2, rs: {nodes: 1}});
- const mongosDB = st.s.getDB("use_cases");
+const mongosDB = st.s.getDB("use_cases");
- const metricsColl = mongosDB["metrics"];
- const rollupColl = mongosDB["rollup"];
+const metricsColl = mongosDB["metrics"];
+const rollupColl = mongosDB["rollup"];
- function incDateByMinutes(date, mins) {
- return new Date(date.getTime() + (60 * 1000 * mins));
- }
-
- // Inserts 'nSamples' worth of random data starting at 'date'.
- function insertRandomData(coll, date, nSamples) {
- let ticksSum = 0, tempSum = 0;
- let bulk = coll.initializeUnorderedBulkOp();
- for (let i = 0; i < nSamples; i++) {
- const randTick = Random.randInt(100);
- const randTemp = Random.randInt(100);
- ticksSum += randTick;
- tempSum += randTemp;
- bulk.insert({
- _id: incDateByMinutes(date, i * (60 / nSamples)),
- ticks: randTick,
- temp: randTemp
- });
- }
- assert.commandWorked(bulk.execute());
+function incDateByMinutes(date, mins) {
+ return new Date(date.getTime() + (60 * 1000 * mins));
+}
- return [ticksSum, tempSum];
+// Inserts 'nSamples' worth of random data starting at 'date'.
+function insertRandomData(coll, date, nSamples) {
+ let ticksSum = 0, tempSum = 0;
+ let bulk = coll.initializeUnorderedBulkOp();
+ for (let i = 0; i < nSamples; i++) {
+ const randTick = Random.randInt(100);
+ const randTemp = Random.randInt(100);
+ ticksSum += randTick;
+ tempSum += randTemp;
+ bulk.insert(
+ {_id: incDateByMinutes(date, i * (60 / nSamples)), ticks: randTick, temp: randTemp});
}
-
- // Runs a $merge aggregate on the metrics collection to the rollup collection, grouping by hour,
- // summing the ticks, and averaging the temps.
- function runAggregate({startDate, whenMatchedMode, whenNotMatchedMode}) {
- metricsColl.aggregate([
- {$match: {_id: {$gte: startDate}}},
- {
- $group: {
- _id: {$dateToString: {format: "%Y-%m-%dT%H", date: "$_id"}},
- ticks: {$sum: "$ticks"},
- avgTemp: {$avg: "$temp"},
- }
- },
- {
- $merge: {
- into: {db: rollupColl.getDB().getName(), coll: rollupColl.getName()},
- whenMatched: whenMatchedMode,
- whenNotMatched: whenNotMatchedMode
- }
+ assert.commandWorked(bulk.execute());
+
+ return [ticksSum, tempSum];
+}
+
+// Runs a $merge aggregate on the metrics collection to the rollup collection, grouping by hour,
+// summing the ticks, and averaging the temps.
+function runAggregate({startDate, whenMatchedMode, whenNotMatchedMode}) {
+ metricsColl.aggregate([
+ {$match: {_id: {$gte: startDate}}},
+ {
+ $group: {
+ _id: {$dateToString: {format: "%Y-%m-%dT%H", date: "$_id"}},
+ ticks: {$sum: "$ticks"},
+ avgTemp: {$avg: "$temp"},
}
- ]);
- }
+ },
+ {
+ $merge: {
+ into: {db: rollupColl.getDB().getName(), coll: rollupColl.getName()},
+ whenMatched: whenMatchedMode,
+ whenNotMatched: whenNotMatchedMode
+ }
+ }
+ ]);
+}
- // Shard the metrics (source) collection on _id, which is the date of the sample.
- const hourZero = new ISODate("2018-08-15T00:00:00.000Z");
- const hourOne = incDateByMinutes(hourZero, 60);
- st.shardColl(metricsColl, {_id: 1}, {_id: hourOne}, {_id: hourOne}, mongosDB.getName());
+// Shard the metrics (source) collection on _id, which is the date of the sample.
+const hourZero = new ISODate("2018-08-15T00:00:00.000Z");
+const hourOne = incDateByMinutes(hourZero, 60);
+st.shardColl(metricsColl, {_id: 1}, {_id: hourOne}, {_id: hourOne}, mongosDB.getName());
- // Insert sample documents into the metrics collection.
- const samplesPerHour = 10;
- let [ticksSum, tempSum] = insertRandomData(metricsColl, hourZero, samplesPerHour);
+// Insert sample documents into the metrics collection.
+const samplesPerHour = 10;
+let [ticksSum, tempSum] = insertRandomData(metricsColl, hourZero, samplesPerHour);
- runAggregate({startDate: hourZero, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourZero, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
- // Verify the results of the $merge in the rollup collection.
- let res = rollupColl.find().sort({_id: 1});
- assert.eq([{_id: "2018-08-15T00", ticks: ticksSum, avgTemp: tempSum / samplesPerHour}],
- res.toArray());
+// Verify the results of the $merge in the rollup collection.
+let res = rollupColl.find().sort({_id: 1});
+assert.eq([{_id: "2018-08-15T00", ticks: ticksSum, avgTemp: tempSum / samplesPerHour}],
+ res.toArray());
- // Insert another hour's worth of data, and verify that the $merge will append the result to the
- // output collection.
- [ticksSum, tempSum] = insertRandomData(metricsColl, hourOne, samplesPerHour);
+// Insert another hour's worth of data, and verify that the $merge will append the result to the
+// output collection.
+[ticksSum, tempSum] = insertRandomData(metricsColl, hourOne, samplesPerHour);
- runAggregate({startDate: hourOne, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourOne, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
- res = rollupColl.find().sort({_id: 1}).toArray();
- assert.eq(2, res.length);
- assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
+res = rollupColl.find().sort({_id: 1}).toArray();
+assert.eq(2, res.length);
+assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
- // Whoops, there was a mistake in the last hour of data. Let's re-run the aggregation and update
- // the rollup collection using the "replace".
- assert.commandWorked(metricsColl.update({_id: hourOne}, {$inc: {ticks: 10}}));
- ticksSum += 10;
+// Whoops, there was a mistake in the last hour of data. Let's re-run the aggregation and update
+// the rollup collection using the "replace".
+assert.commandWorked(metricsColl.update({_id: hourOne}, {$inc: {ticks: 10}}));
+ticksSum += 10;
- runAggregate({startDate: hourOne, whenMatchedMode: "replace", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourOne, whenMatchedMode: "replace", whenNotMatchedMode: "insert"});
- res = rollupColl.find().sort({_id: 1}).toArray();
- assert.eq(2, res.length);
- assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
+res = rollupColl.find().sort({_id: 1}).toArray();
+assert.eq(2, res.length);
+assert.eq(res[1], {_id: "2018-08-15T01", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
- // Shard the output collection into 2 chunks, and make the split hour 6.
- const hourSix = incDateByMinutes(hourZero, 60 * 6);
- st.shardColl(rollupColl, {_id: 1}, {_id: hourSix}, {_id: hourSix}, mongosDB.getName());
+// Shard the output collection into 2 chunks, and make the split hour 6.
+const hourSix = incDateByMinutes(hourZero, 60 * 6);
+st.shardColl(rollupColl, {_id: 1}, {_id: hourSix}, {_id: hourSix}, mongosDB.getName());
- // Insert hour 7 data into the metrics collection and re-run the aggregation.
- [ticksSum, tempSum] = insertRandomData(metricsColl, hourSix, samplesPerHour);
+// Insert hour 7 data into the metrics collection and re-run the aggregation.
+[ticksSum, tempSum] = insertRandomData(metricsColl, hourSix, samplesPerHour);
- runAggregate({startDate: hourSix, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
+runAggregate({startDate: hourSix, whenMatchedMode: "fail", whenNotMatchedMode: "insert"});
- res = rollupColl.find().sort({_id: 1}).toArray();
- assert.eq(3, res.length, tojson(res));
- assert.eq(res[2], {_id: "2018-08-15T06", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
+res = rollupColl.find().sort({_id: 1}).toArray();
+assert.eq(3, res.length, tojson(res));
+assert.eq(res[2], {_id: "2018-08-15T06", ticks: ticksSum, avgTemp: tempSum / samplesPerHour});
- st.stop();
+st.stop();
}());