summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHenrik Edin <henrik.edin@mongodb.com>2019-05-10 13:08:16 -0400
committerHenrik Edin <henrik.edin@mongodb.com>2019-05-10 13:08:16 -0400
commitc34bc93a15cbb6fe8b222f12afac5adbab6f0737 (patch)
tree73dfb5e8a30e14a079abcb8b485ef8881a4db316
parent5ec2e2508bec5cc64ea36f01f502c756c3e84597 (diff)
downloadmongo-c34bc93a15cbb6fe8b222f12afac5adbab6f0737.tar.gz
Revert "SERVER-40238 New stage alias: $set => $addFields"
This reverts commit bf6c2696da7eb207c28e83f5bb7401c97b0f69ac.
-rw-r--r--jstests/change_streams/pipeline_cannot_modify_id_field.js30
-rw-r--r--jstests/change_streams/whitelist.js3
-rw-r--r--jstests/concurrency/fsm_workloads/update_inc_pipeline.js4
-rw-r--r--jstests/core/benchrun_pipeline_updates.js2
-rw-r--r--jstests/core/bypass_doc_validation.js8
-rw-r--r--jstests/core/collation_find_and_modify.js6
-rw-r--r--jstests/core/find_and_modify_pipeline_update.js25
-rw-r--r--jstests/core/update_pipeline_shell_helpers.js22
-rw-r--r--jstests/core/update_with_pipeline.js10
-rw-r--r--jstests/noPassthrough/pipeline_update_gated_by_enable_test_commands.js9
-rw-r--r--jstests/noPassthroughWithMongod/commands_that_write_accept_wc_standalone.js4
-rw-r--r--jstests/replsets/commands_that_write_accept_wc.js4
-rw-r--r--jstests/sharding/commands_that_write_accept_wc_shards.js4
-rw-r--r--src/mongo/db/pipeline/document_source_add_fields.cpp14
-rw-r--r--src/mongo/db/pipeline/document_source_add_fields.h9
-rw-r--r--src/mongo/db/pipeline/document_source_add_fields_test.cpp29
16 files changed, 72 insertions, 111 deletions
diff --git a/jstests/change_streams/pipeline_cannot_modify_id_field.js b/jstests/change_streams/pipeline_cannot_modify_id_field.js
index 815dad648ae..3db6c0990eb 100644
--- a/jstests/change_streams/pipeline_cannot_modify_id_field.js
+++ b/jstests/change_streams/pipeline_cannot_modify_id_field.js
@@ -34,30 +34,30 @@
{$project: {_id: {data: "$_id._data", typeBits: "$_id._typeBits"}}}, // Fields renamed.
{$project: {_id: {_typeBits: "$_id._typeBits", _data: "$_id._data"}}}, // Fields reordered.
{$project: {_id: {_data: "$_id._typeBits", _typeBits: "$_id._data"}}}, // Fields swapped.
- {$set: {_id: "newValue"}},
- {$set: {_id: "$otherField"}},
- {$set: {"_id._data": "newValue"}},
- {$set: {"_id._data": "$otherField"}},
- {$set: {"_id.otherField": "newValue"}}, // New subfield added to _id.
+ {$addFields: {_id: "newValue"}},
+ {$addFields: {_id: "$otherField"}},
+ {$addFields: {"_id._data": "newValue"}},
+ {$addFields: {"_id._data": "$otherField"}},
+ {$addFields: {"_id.otherField": "newValue"}}, // New subfield added to _id.
[
{$addFields: {otherField: "$_id"}},
- {$set: {otherField: "newValue"}},
- {$set: {_id: "$otherField"}}
+ {$addFields: {otherField: "newValue"}},
+ {$addFields: {_id: "$otherField"}}
],
[
// Fields renamed.
{$addFields: {newId: {data: "$_id._data", typeBits: "$_id._typeBits"}}},
- {$set: {_id: "$newId"}}
+ {$addFields: {_id: "$newId"}}
],
[
// Fields reordered.
{$addFields: {newId: {_typeBits: "$_id._typeBits", _data: "$_id._data"}}},
- {$set: {_id: "$newId"}}
+ {$addFields: {_id: "$newId"}}
],
[
// Fields swapped.
{$addFields: {newId: {_data: "$_id._typeBits", _typeBits: "$_id._data"}}},
- {$set: {_id: "$newId"}}
+ {$addFields: {_id: "$newId"}}
],
{$replaceRoot: {newRoot: {otherField: "$_id"}}},
{$redact: {$cond: {if: {$gt: ["$_id", {}]}, then: "$$DESCEND", else: "$$PRUNE"}}} // _id:0
@@ -82,13 +82,13 @@
],
{$project: {"_id._data": 1, "_id._typeBits": 1}},
{$project: {_id: {_data: "$_id._data", _typeBits: "$_id._typeBits"}}},
- {$set: {_id: "$_id"}},
+ {$addFields: {_id: "$_id"}},
{$addFields: {otherField: "newValue"}},
- {$set: {_id: {_data: "$_id._data", _typeBits: "$_id._typeBits"}}},
- [{$addFields: {otherField: "$_id"}}, {$set: {_id: "$otherField"}}],
+ {$addFields: {_id: {_data: "$_id._data", _typeBits: "$_id._typeBits"}}},
+ [{$addFields: {otherField: "$_id"}}, {$addFields: {_id: "$otherField"}}],
[
{$addFields: {newId: {_data: "$_id._data", _typeBits: "$_id._typeBits"}}},
- {$set: {_id: "$newId"}}
+ {$addFields: {_id: "$newId"}}
],
{$replaceRoot: {newRoot: {_id: "$_id"}}},
{
@@ -141,4 +141,4 @@
getMoreRes, ErrorCodes.ChangeStreamFatalError, transform);
}, transform);
}
-}());
+}()); \ No newline at end of file
diff --git a/jstests/change_streams/whitelist.js b/jstests/change_streams/whitelist.js
index 16b6ca93145..12848e1e9e7 100644
--- a/jstests/change_streams/whitelist.js
+++ b/jstests/change_streams/whitelist.js
@@ -18,7 +18,6 @@
{$match: {_id: {$exists: true}}},
{$project: {_id: 1}},
{$addFields: {newField: 1}},
- {$set: {newField: 1}},
{$replaceRoot: {newRoot: {_id: "$_id"}}},
{$redact: "$$DESCEND"}
];
@@ -61,4 +60,4 @@
for (let bannedStage of blacklist) {
assertErrorCode(coll, changeStream.concat(bannedStage), ErrorCodes.IllegalOperation);
}
-}());
+}()); \ No newline at end of file
diff --git a/jstests/concurrency/fsm_workloads/update_inc_pipeline.js b/jstests/concurrency/fsm_workloads/update_inc_pipeline.js
index bb4d78d7bcd..554b56c824d 100644
--- a/jstests/concurrency/fsm_workloads/update_inc_pipeline.js
+++ b/jstests/concurrency/fsm_workloads/update_inc_pipeline.js
@@ -11,7 +11,9 @@ load('jstests/concurrency/fsm_workloads/update_inc.js'); // for $config
var $config = extendWorkload($config, function($config, $super) {
$config.data.getUpdateArgument = function getUpdateArgument(fieldName) {
- return [{$set: {[fieldName]: {$add: ["$" + fieldName, 1]}}}];
+ const updateDoc = [{$addFields: {}}];
+ updateDoc[0].$addFields[fieldName] = {$add: ["$" + fieldName, 1]};
+ return updateDoc;
};
$config.data.update_inc = "update_inc_pipeline";
diff --git a/jstests/core/benchrun_pipeline_updates.js b/jstests/core/benchrun_pipeline_updates.js
index a9b684c4d96..ef1ef719a0c 100644
--- a/jstests/core/benchrun_pipeline_updates.js
+++ b/jstests/core/benchrun_pipeline_updates.js
@@ -18,7 +18,7 @@
ns: coll.getFullName(),
query: {_id: 0},
writeCmd: true,
- update: [{$set: {x: {$add: ["$x", 1]}}}]
+ update: [{$addFields: {x: {$add: ["$x", 1]}}}]
},
],
parallel: 2,
diff --git a/jstests/core/bypass_doc_validation.js b/jstests/core/bypass_doc_validation.js
index d2b41c531c2..67394b30450 100644
--- a/jstests/core/bypass_doc_validation.js
+++ b/jstests/core/bypass_doc_validation.js
@@ -151,7 +151,7 @@
// differentiate between an update object and an array.
res = myDb.runCommand({
update: collName,
- updates: [{q: {}, u: [{$set: {pipeline: 1}}]}],
+ updates: [{q: {}, u: [{$addFields: {pipeline: 1}}]}],
bypassDocumentValidation: false
});
assertFailsValidation(BulkWriteResult(res));
@@ -159,21 +159,21 @@
assert.commandWorked(myDb.runCommand({
update: collName,
- updates: [{q: {}, u: [{$set: {pipeline: 1}}]}],
+ updates: [{q: {}, u: [{$addFields: {pipeline: 1}}]}],
bypassDocumentValidation: true
}));
assert.eq(1, coll.count({pipeline: 1}));
assert.commandFailed(myDb.runCommand({
findAndModify: collName,
- update: [{$set: {findAndModifyPipeline: 1}}],
+ update: [{$addFields: {findAndModifyPipeline: 1}}],
bypassDocumentValidation: false
}));
assert.eq(0, coll.count({findAndModifyPipeline: 1}));
assert.commandWorked(myDb.runCommand({
findAndModify: collName,
- update: [{$set: {findAndModifyPipeline: 1}}],
+ update: [{$addFields: {findAndModifyPipeline: 1}}],
bypassDocumentValidation: true
}));
assert.eq(1, coll.count({findAndModifyPipeline: 1}));
diff --git a/jstests/core/collation_find_and_modify.js b/jstests/core/collation_find_and_modify.js
index c66a8005870..5b014414580 100644
--- a/jstests/core/collation_find_and_modify.js
+++ b/jstests/core/collation_find_and_modify.js
@@ -21,7 +21,7 @@
assert.commandWorked(db.createCollection(coll.getName(), {collation: caseInsensitive}));
assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]}));
let doc = coll.findAndModify(
- {update: [{$set: {newField: {$indexOfArray: ["$x", "B"]}}}], new: true});
+ {update: [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}], new: true});
assert.eq(doc.newField, 3, doc);
//
@@ -32,7 +32,7 @@
assert(coll.drop());
assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]}));
doc = coll.findAndModify({
- update: [{$set: {newField: {$indexOfArray: ["$x", "B"]}}}],
+ update: [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}],
collation: caseSensitive,
new: true
});
@@ -60,7 +60,7 @@
assert(coll.drop());
assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]}));
doc = coll.findAndModify({
- update: [{$set: {newField: {$indexOfArray: ["$x", "B"]}}}],
+ update: [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}],
collation: caseInsensitive,
new: true
});
diff --git a/jstests/core/find_and_modify_pipeline_update.js b/jstests/core/find_and_modify_pipeline_update.js
index 1f7d229b7bc..d97ace94777 100644
--- a/jstests/core/find_and_modify_pipeline_update.js
+++ b/jstests/core/find_and_modify_pipeline_update.js
@@ -12,27 +12,27 @@
// Test that it generally works.
assert.commandWorked(coll.insert([{_id: 0}, {_id: 1}, {_id: 2}, {_id: 3}, {_id: 4}]));
- let found = coll.findAndModify({query: {_id: 0}, update: [{$set: {y: 1}}]});
+ let found = coll.findAndModify({query: {_id: 0}, update: [{$addFields: {y: 1}}]});
assert.eq(found, {_id: 0});
- found = coll.findAndModify({query: {_id: 0}, update: [{$set: {z: 2}}], new: true});
+ found = coll.findAndModify({query: {_id: 0}, update: [{$addFields: {z: 2}}], new: true});
assert.eq(found, {_id: 0, y: 1, z: 2});
// Test that pipeline-style update supports the 'fields' argument.
assert(coll.drop());
assert.commandWorked(
coll.insert([{_id: 0, x: 0}, {_id: 1, x: 1}, {_id: 2, x: 2}, {_id: 3, x: 3}]));
- found = coll.findAndModify({query: {_id: 0}, update: [{$set: {y: 0}}], fields: {x: 0}});
+ found = coll.findAndModify({query: {_id: 0}, update: [{$addFields: {y: 0}}], fields: {x: 0}});
assert.eq(found, {_id: 0});
- found = coll.findAndModify({query: {_id: 1}, update: [{$set: {y: 1}}], fields: {x: 1}});
+ found = coll.findAndModify({query: {_id: 1}, update: [{$addFields: {y: 1}}], fields: {x: 1}});
assert.eq(found, {_id: 1, x: 1});
- found =
- coll.findAndModify({query: {_id: 2}, update: [{$set: {y: 2}}], fields: {x: 0}, new: true});
+ found = coll.findAndModify(
+ {query: {_id: 2}, update: [{$addFields: {y: 2}}], fields: {x: 0}, new: true});
assert.eq(found, {_id: 2, y: 2});
- found =
- coll.findAndModify({query: {_id: 3}, update: [{$set: {y: 3}}], fields: {x: 1}, new: true});
+ found = coll.findAndModify(
+ {query: {_id: 3}, update: [{$addFields: {y: 3}}], fields: {x: 1}, new: true});
assert.eq(found, {_id: 3, x: 3});
// We skip the following test for sharded fixtures as it will fail as the query for
@@ -42,13 +42,14 @@
assert(coll.drop());
assert.commandWorked(
coll.insert([{_id: 0, x: 'b'}, {_id: 1, x: 'd'}, {_id: 2, x: 'a'}, {_id: 3, x: 'c'}]));
- found = coll.findAndModify({update: [{$set: {foo: "bar"}}], sort: {x: -1}, new: true});
+ found =
+ coll.findAndModify({update: [{$addFields: {foo: "bar"}}], sort: {x: -1}, new: true});
assert.eq(found, {_id: 1, x: 'd', foo: "bar"});
}
// Test that it rejects the combination of arrayFilters and a pipeline-style update.
- let err =
- assert.throws(() => coll.findAndModify(
- {query: {_id: 1}, update: [{$set: {y: 1}}], arrayFilters: [{"i.x": 4}]}));
+ let err = assert.throws(
+ () => coll.findAndModify(
+ {query: {_id: 1}, update: [{$addFields: {y: 1}}], arrayFilters: [{"i.x": 4}]}));
assert.eq(err.code, ErrorCodes.FailedToParse);
}());
diff --git a/jstests/core/update_pipeline_shell_helpers.js b/jstests/core/update_pipeline_shell_helpers.js
index 36f373f7811..c5369befde2 100644
--- a/jstests/core/update_pipeline_shell_helpers.js
+++ b/jstests/core/update_pipeline_shell_helpers.js
@@ -20,22 +20,22 @@
assert.commandWorked(testColl.insert({_id: 2, a: 2, b: 3}));
// Test that each of the update shell helpers permits pipeline-style updates.
- assert.commandWorked(testColl.update({_id: 1}, [{$set: {update: true}}]));
- assert.commandWorked(testColl.update({}, [{$set: {updateMulti: true}}], {multi: true}));
- assert.commandWorked(testColl.updateOne({_id: 1}, [{$set: {updateOne: true}}]));
+ assert.commandWorked(testColl.update({_id: 1}, [{$addFields: {update: true}}]));
+ assert.commandWorked(testColl.update({}, [{$addFields: {updateMulti: true}}], {multi: true}));
+ assert.commandWorked(testColl.updateOne({_id: 1}, [{$addFields: {updateOne: true}}]));
assert.commandWorked(testColl.bulkWrite([
- {updateOne: {filter: {_id: 1}, update: [{$set: {bulkWriteUpdateOne: true}}]}},
- {updateMany: {filter: {}, update: [{$set: {bulkWriteUpdateMany: true}}]}}
+ {updateOne: {filter: {_id: 1}, update: [{$addFields: {bulkWriteUpdateOne: true}}]}},
+ {updateMany: {filter: {}, update: [{$addFields: {bulkWriteUpdateMany: true}}]}}
]));
// Test that each of the Bulk API update functions correctly handle pipeline syntax.
const unorderedBulkOp = testColl.initializeUnorderedBulkOp();
const orderedBulkOp = testColl.initializeOrderedBulkOp();
- unorderedBulkOp.find({_id: 1}).updateOne([{$set: {unorderedBulkOpUpdateOne: true}}]);
- unorderedBulkOp.find({}).update([{$set: {unorderedBulkOpUpdateMulti: true}}]);
- orderedBulkOp.find({_id: 1}).updateOne([{$set: {orderedBulkOpUpdateOne: true}}]);
- orderedBulkOp.find({}).update([{$set: {orderedBulkOpUpdateMulti: true}}]);
+ unorderedBulkOp.find({_id: 1}).updateOne([{$addFields: {unorderedBulkOpUpdateOne: true}}]);
+ unorderedBulkOp.find({}).update([{$addFields: {unorderedBulkOpUpdateMulti: true}}]);
+ orderedBulkOp.find({_id: 1}).updateOne([{$addFields: {orderedBulkOpUpdateOne: true}}]);
+ orderedBulkOp.find({}).update([{$addFields: {orderedBulkOpUpdateMulti: true}}]);
assert.commandWorked(unorderedBulkOp.execute());
assert.commandWorked(orderedBulkOp.execute());
@@ -73,10 +73,10 @@
const expectedFindOneAndUpdatePostImage =
Object.merge(expectedFindAndModifyPostImage, {findOneAndUpdate: true});
const findAndModifyPostImage = testColl.findAndModify(
- {query: {_id: 1}, update: [{$set: {findAndModify: true}}], new: true});
+ {query: {_id: 1}, update: [{$addFields: {findAndModify: true}}], new: true});
assert.docEq(findAndModifyPostImage, expectedFindAndModifyPostImage);
const findOneAndUpdatePostImage = testColl.findOneAndUpdate(
- {_id: 1}, [{$set: {findOneAndUpdate: true}}], {returnNewDocument: true});
+ {_id: 1}, [{$addFields: {findOneAndUpdate: true}}], {returnNewDocument: true});
assert.docEq(findOneAndUpdatePostImage, expectedFindOneAndUpdatePostImage);
// Shell helpers for replacement updates should reject pipeline-style updates.
diff --git a/jstests/core/update_with_pipeline.js b/jstests/core/update_with_pipeline.js
index e0b6774a786..d9dae091835 100644
--- a/jstests/core/update_with_pipeline.js
+++ b/jstests/core/update_with_pipeline.js
@@ -50,7 +50,7 @@
testUpdate({
query: {_id: 1},
initialDocumentList: [{_id: 1, x: 1}],
- update: [{$set: {foo: 4}}],
+ update: [{$addFields: {foo: 4}}],
resultDocList: [{_id: 1, x: 1, foo: 4}],
nModified: 1
});
@@ -73,7 +73,7 @@
testUpdate({
query: {x: 1},
initialDocumentList: [{_id: 1, x: 1}, {_id: 2, x: 1}],
- update: [{$set: {bar: 4}}],
+ update: [{$addFields: {bar: 4}}],
resultDocList: [{_id: 1, x: 1, bar: 4}, {_id: 2, x: 1, bar: 4}],
nModified: 2,
options: {multi: true}
@@ -85,7 +85,7 @@
testUpdate({
query: {_id: {$in: [1, 2]}},
initialDocumentList: [{_id: 1, x: 1}, {_id: 2, x: 2}],
- update: [{$set: {bar: 4}}],
+ update: [{$addFields: {bar: 4}}],
resultDocList: [{_id: 1, x: 1, bar: 4}, {_id: 2, x: 2, bar: 4}],
nModified: 1,
options: {multi: false}
@@ -93,7 +93,7 @@
}
// Upsert performs insert.
- testUpsertDoesInsert({_id: 1, x: 1}, [{$set: {foo: 4}}], {_id: 1, x: 1, foo: 4});
+ testUpsertDoesInsert({_id: 1, x: 1}, [{$addFields: {foo: 4}}], {_id: 1, x: 1, foo: 4});
testUpsertDoesInsert({_id: 1, x: 1}, [{$project: {x: 1}}], {_id: 1, x: 1});
testUpsertDoesInsert({_id: 1, x: 1}, [{$project: {x: "foo"}}], {_id: 1, x: "foo"});
@@ -136,6 +136,6 @@
// The 'arrayFilters' option is not valid for pipeline updates.
assert.commandFailedWithCode(
- coll.update({_id: 1}, [{$set: {x: 1}}], {arrayFilters: [{x: {$eq: 1}}]}),
+ coll.update({_id: 1}, [{$addFields: {x: 1}}], {arrayFilters: [{x: {$eq: 1}}]}),
ErrorCodes.FailedToParse);
})();
diff --git a/jstests/noPassthrough/pipeline_update_gated_by_enable_test_commands.js b/jstests/noPassthrough/pipeline_update_gated_by_enable_test_commands.js
index 210a1ed9b16..ae97830267e 100644
--- a/jstests/noPassthrough/pipeline_update_gated_by_enable_test_commands.js
+++ b/jstests/noPassthrough/pipeline_update_gated_by_enable_test_commands.js
@@ -10,10 +10,10 @@
const conn = MongoRunner.runMongod();
const db = conn.getDB("test");
- assert.commandFailedWithCode(db.coll.update({}, [{$set: {x: 1}}]),
+ assert.commandFailedWithCode(db.coll.update({}, [{$addFields: {x: 1}}]),
ErrorCodes.FailedToParse);
const error =
- assert.throws(() => db.coll.findAndModify({query: {}, update: [{$set: {x: 1}}]}));
+ assert.throws(() => db.coll.findAndModify({query: {}, update: [{$addFields: {x: 1}}]}));
assert.eq(error.code, ErrorCodes.FailedToParse);
MongoRunner.stopMongod(conn);
@@ -24,8 +24,9 @@
const conn = MongoRunner.runMongod();
const db = conn.getDB("test");
- assert.commandWorked(db.coll.update({}, [{$set: {x: 1}}]));
- assert.doesNotThrow(() => db.coll.findAndModify({query: {}, update: [{$set: {x: 1}}]}));
+ assert.commandWorked(db.coll.update({}, [{$addFields: {x: 1}}]));
+ assert.doesNotThrow(() =>
+ db.coll.findAndModify({query: {}, update: [{$addFields: {x: 1}}]}));
MongoRunner.stopMongod(conn);
}());
diff --git a/jstests/noPassthroughWithMongod/commands_that_write_accept_wc_standalone.js b/jstests/noPassthroughWithMongod/commands_that_write_accept_wc_standalone.js
index 0e12eb05a97..aab4ba9b30b 100644
--- a/jstests/noPassthroughWithMongod/commands_that_write_accept_wc_standalone.js
+++ b/jstests/noPassthroughWithMongod/commands_that_write_accept_wc_standalone.js
@@ -36,7 +36,7 @@
update: collName,
updates: [{
q: {type: 'oak'},
- u: [{$set: {type: 'ginkgo'}}],
+ u: [{$addFields: {type: 'ginkgo'}}],
}],
writeConcern: {w: 'majority'}
},
@@ -73,7 +73,7 @@
req: {
findAndModify: collName,
query: {type: 'oak'},
- update: [{$set: {type: 'ginkgo'}}],
+ update: [{$addFields: {type: 'ginkgo'}}],
writeConcern: {w: 'majority'}
},
setupFunc: function() {
diff --git a/jstests/replsets/commands_that_write_accept_wc.js b/jstests/replsets/commands_that_write_accept_wc.js
index e99ef63ddba..e18d43c50df 100644
--- a/jstests/replsets/commands_that_write_accept_wc.js
+++ b/jstests/replsets/commands_that_write_accept_wc.js
@@ -57,7 +57,7 @@
update: collName,
updates: [{
q: {type: 'oak'},
- u: [{$set: {type: 'ginkgo'}}],
+ u: [{$addFields: {type: 'ginkgo'}}],
}],
writeConcern: {w: 'majority'}
},
@@ -94,7 +94,7 @@
req: {
findAndModify: collName,
query: {type: 'oak'},
- update: [{$set: {type: 'ginkgo'}}],
+ update: [{$addFields: {type: 'ginkgo'}}],
writeConcern: {w: 'majority'}
},
setupFunc: function() {
diff --git a/jstests/sharding/commands_that_write_accept_wc_shards.js b/jstests/sharding/commands_that_write_accept_wc_shards.js
index 80ac26b36e7..2e3659eb6d1 100644
--- a/jstests/sharding/commands_that_write_accept_wc_shards.js
+++ b/jstests/sharding/commands_that_write_accept_wc_shards.js
@@ -113,7 +113,7 @@ load('jstests/libs/write_concern_util.js');
update: collName,
updates: [{
q: {type: 'oak'},
- u: [{$set: {type: 'ginkgo'}}],
+ u: [{$addFields: {type: 'ginkgo'}}],
}],
writeConcern: {w: 'majority'}
},
@@ -152,7 +152,7 @@ load('jstests/libs/write_concern_util.js');
req: {
findAndModify: collName,
query: {type: 'oak'},
- update: [{$set: {type: 'ginkgo'}}],
+ update: [{$addFields: {type: 'ginkgo'}}],
writeConcern: {w: 'majority'}
},
setupFunc: function() {
diff --git a/src/mongo/db/pipeline/document_source_add_fields.cpp b/src/mongo/db/pipeline/document_source_add_fields.cpp
index 319ef9776c6..e95d420ac4d 100644
--- a/src/mongo/db/pipeline/document_source_add_fields.cpp
+++ b/src/mongo/db/pipeline/document_source_add_fields.cpp
@@ -45,33 +45,27 @@ using parsed_aggregation_projection::ParsedAddFields;
REGISTER_DOCUMENT_SOURCE(addFields,
LiteParsedDocumentSourceDefault::parse,
DocumentSourceAddFields::createFromBson);
-REGISTER_DOCUMENT_SOURCE(set,
- LiteParsedDocumentSourceDefault::parse,
- DocumentSourceAddFields::createFromBson);
intrusive_ptr<DocumentSource> DocumentSourceAddFields::create(
- BSONObj addFieldsSpec, const intrusive_ptr<ExpressionContext>& expCtx, StringData stageName) {
+ BSONObj addFieldsSpec, const intrusive_ptr<ExpressionContext>& expCtx) {
const bool isIndependentOfAnyCollection = false;
intrusive_ptr<DocumentSourceSingleDocumentTransformation> addFields(
new DocumentSourceSingleDocumentTransformation(
expCtx,
ParsedAddFields::create(expCtx, addFieldsSpec),
- stageName.toString(),
+ "$addFields",
isIndependentOfAnyCollection));
return addFields;
}
intrusive_ptr<DocumentSource> DocumentSourceAddFields::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& expCtx) {
- const auto specifiedName = elem.fieldNameStringData();
- invariant(specifiedName == kStageName || specifiedName == kAliasNameSet);
-
uassert(40272,
- str::stream() << specifiedName << " specification stage must be an object, got "
+ str::stream() << "$addFields specification stage must be an object, got "
<< typeName(elem.type()),
elem.type() == Object);
- return DocumentSourceAddFields::create(elem.Obj(), expCtx, specifiedName);
+ return DocumentSourceAddFields::create(elem.Obj(), expCtx);
}
}
diff --git a/src/mongo/db/pipeline/document_source_add_fields.h b/src/mongo/db/pipeline/document_source_add_fields.h
index 5c99a3790bb..cf24246676e 100644
--- a/src/mongo/db/pipeline/document_source_add_fields.h
+++ b/src/mongo/db/pipeline/document_source_add_fields.h
@@ -36,21 +36,14 @@ namespace mongo {
/**
* $addFields adds or replaces the specified fields to/in the document while preserving the original
* document. It is modeled on and throws the same errors as $project.
- *
- * This stage is also aliased as $set and functions the same way.
*/
class DocumentSourceAddFields final {
public:
- static constexpr StringData kStageName = "$addFields"_sd;
- static constexpr StringData kAliasNameSet = "$set"_sd; // An alternate name for this stage.
-
/**
* Convenience method for creating a $addFields stage from 'addFieldsSpec'.
*/
static boost::intrusive_ptr<DocumentSource> create(
- BSONObj addFieldsSpec,
- const boost::intrusive_ptr<ExpressionContext>& expCtx,
- StringData stageName = kStageName);
+ BSONObj addFieldsSpec, const boost::intrusive_ptr<ExpressionContext>& expCtx);
/**
* Parses a $addFields stage from the user-supplied BSON.
diff --git a/src/mongo/db/pipeline/document_source_add_fields_test.cpp b/src/mongo/db/pipeline/document_source_add_fields_test.cpp
index bd8c9483ea7..c0b896d973a 100644
--- a/src/mongo/db/pipeline/document_source_add_fields_test.cpp
+++ b/src/mongo/db/pipeline/document_source_add_fields_test.cpp
@@ -73,35 +73,6 @@ TEST_F(AddFieldsTest, ShouldKeepUnspecifiedFieldsReplaceExistingFieldsAndAddNewF
ASSERT_TRUE(addFields->getNext().isEOF());
}
-TEST_F(AddFieldsTest, ShouldSerializeAndParse) {
- auto addFields = DocumentSourceAddFields::create(BSON("a" << BSON("$const"
- << "new")),
- getExpCtx());
- ASSERT(addFields->getSourceName() == DocumentSourceAddFields::kStageName);
- vector<Value> serializedArray;
- addFields->serializeToArray(serializedArray);
- auto serializedBson = serializedArray[0].getDocument().toBson();
- ASSERT_BSONOBJ_EQ(serializedBson, fromjson("{$addFields: {a: {$const: 'new'}}}"));
- addFields = DocumentSourceAddFields::createFromBson(serializedBson.firstElement(), getExpCtx());
- ASSERT(addFields != nullptr);
- ASSERT(addFields->getSourceName() == DocumentSourceAddFields::kStageName);
-}
-
-TEST_F(AddFieldsTest, SetAliasShouldSerializeAndParse) {
- auto setStage = DocumentSourceAddFields::create(BSON("a" << BSON("$const"
- << "new")),
- getExpCtx(),
- DocumentSourceAddFields::kAliasNameSet);
- ASSERT(setStage->getSourceName() == DocumentSourceAddFields::kAliasNameSet);
- vector<Value> serializedArray;
- setStage->serializeToArray(serializedArray);
- auto serializedBson = serializedArray[0].getDocument().toBson();
- ASSERT_BSONOBJ_EQ(serializedBson, fromjson("{$set: {a: {$const: 'new'}}}"));
- setStage = DocumentSourceAddFields::createFromBson(serializedBson.firstElement(), getExpCtx());
- ASSERT(setStage != nullptr);
- ASSERT(setStage->getSourceName() == DocumentSourceAddFields::kAliasNameSet);
-}
-
TEST_F(AddFieldsTest, ShouldOptimizeInnerExpressions) {
auto addFields = DocumentSourceAddFields::create(
BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))), getExpCtx());