diff options
Diffstat (limited to 'jstests/core/write/update')
60 files changed, 4367 insertions, 0 deletions
diff --git a/jstests/core/write/update/batch_write_command_update.js b/jstests/core/write/update/batch_write_command_update.js new file mode 100644 index 00000000000..d9e0fea6668 --- /dev/null +++ b/jstests/core/write/update/batch_write_command_update.js @@ -0,0 +1,299 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [ +// assumes_unsharded_collection, +// assumes_write_concern_unchanged, +// requires_multi_updates, +// requires_non_retryable_writes, +// requires_fastcount, +// ] + +// +// Ensures that mongod respects the batch write protocols for updates +// + +var coll = db.getCollection("batch_write_update"); +coll.drop(); + +var request; +var result; +var batch; + +var maxWriteBatchSize = db.hello().maxWriteBatchSize; + +function resultOK(result) { + return result.ok && !('code' in result) && !('errmsg' in result) && !('errInfo' in result) && + !('writeErrors' in result); +} + +function resultNOK(result) { + return !result.ok && typeof (result.code) == 'number' && typeof (result.errmsg) == 'string'; +} + +function countEventually(collection, n) { + assert.soon( + function() { + return collection.count() === n; + }, + function() { + return "unacknowledged write timed out"; + }); +} + +// EACH TEST BELOW SHOULD BE SELF-CONTAINED, FOR EASIER DEBUGGING + +// +// NO DOCS, illegal command +coll.remove({}); +request = { + update: coll.getName() +}; +result = coll.runCommand(request); +assert(resultNOK(result), tojson(result)); + +// +// Single document upsert, no write concern specified +coll.remove({}); +request = { + update: coll.getName(), + updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}] +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(1, result.n); +assert('upserted' in result); +assert.eq(1, result.upserted.length); +assert.eq(0, result.upserted[0].index); + +// Count the upserted doc +var upsertedId = result.upserted[0]._id; +assert.eq(1, coll.count({_id: upsertedId})); +assert.eq(0, result.nModified, "missing/wrong nModified"); + +// +// Single document upsert, write concern specified, no ordered specified +coll.remove({}); +request = { + update: coll.getName(), + updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}], + writeConcern: {w: 1} +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(1, result.n); +assert('upserted' in result); +assert.eq(1, result.upserted.length); +assert.eq(0, result.upserted[0].index); + +// Count the upserted doc +upsertedId = result.upserted[0]._id; +assert.eq(1, coll.count({_id: upsertedId})); +assert.eq(0, result.nModified, "missing/wrong nModified"); + +// +// Single document upsert, write concern specified, ordered = true +coll.remove({}); +request = { + update: coll.getName(), + updates: [{q: {a: 1}, u: {$set: {a: 1}}, upsert: true}], + writeConcern: {w: 1}, + ordered: true +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(1, result.n); +assert('upserted' in result); +assert.eq(1, result.upserted.length); +assert.eq(0, result.upserted[0].index); + +// Count the upserted doc +upsertedId = result.upserted[0]._id; +assert.eq(1, coll.count({_id: upsertedId})); +assert.eq(0, result.nModified, "missing/wrong nModified"); + +// +// Single document update +coll.remove({}); +coll.insert({a: 1}); +request = { + update: coll.getName(), + updates: [{q: {a: 1}, u: {$set: {c: 1}}}], + writeConcern: {w: 1} +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(1, result.n); +assert(!('upserted' in result)); +assert.eq(1, coll.count()); +assert.eq(1, result.nModified, "missing/wrong nModified"); + +// +// Multi document update/upsert +coll.remove({}); +coll.insert({b: 1}); +request = { + update: coll.getName(), + updates: [ + {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true}, + {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true} + ], + writeConcern: {w: 1}, + ordered: false +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(2, result.n); +assert.eq(1, result.nModified, "missing/wrong nModified"); + +assert.eq(1, result.upserted.length); +assert.eq(1, result.upserted[0].index); +assert.eq(1, coll.count({_id: result.upserted[0]._id})); +assert.eq(2, coll.count()); + +// +// Multiple document update +coll.remove({}); +coll.insert({a: 1}); +coll.insert({a: 1}); +request = { + update: coll.getName(), + updates: [{q: {a: 1}, u: {$set: {c: 2}}, multi: true}], + writeConcern: {w: 1}, + ordered: false +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(2, result.n); +assert.eq(2, result.nModified, "missing/wrong nModified"); +assert.eq(2, coll.find({a: 1, c: 2}).count()); +assert.eq(2, coll.count()); + +// +// Multiple document update, some no-ops +coll.remove({}); +coll.insert({a: 1, c: 2}); +coll.insert({a: 1}); +request = { + update: coll.getName(), + updates: [{q: {a: 1}, u: {$set: {c: 2}}, multi: true}], + writeConcern: {w: 1}, + ordered: false +}; +printjson(result = coll.runCommand(request)); +assert(resultOK(result), tojson(result)); +assert.eq(2, result.n); +assert.eq(1, result.nModified, "missing/wrong nModified"); +assert.eq(2, coll.find({a: 1, c: 2}).count()); +assert.eq(2, coll.count()); + +// +// Large batch under the size threshold should update successfully +coll.remove({}); +coll.insert({a: 0}); +batch = []; +for (var i = 0; i < maxWriteBatchSize; ++i) { + batch.push({q: {}, u: {$inc: {a: 1}}}); +} +request = { + update: coll.getName(), + updates: batch, + writeConcern: {w: 1}, + ordered: false +}; +result = coll.runCommand(request); +assert(resultOK(result), tojson(result)); +assert.eq(batch.length, result.n); +assert.eq(batch.length, result.nModified, "missing/wrong nModified"); +assert.eq(1, coll.find({a: batch.length}).count()); +assert.eq(1, coll.count()); + +// +// Large batch above the size threshold should fail to update +coll.remove({}); +coll.insert({a: 0}); +batch = []; +for (var i = 0; i < maxWriteBatchSize + 1; ++i) { + batch.push({q: {}, u: {$inc: {a: 1}}}); +} +request = { + update: coll.getName(), + updates: batch, + writeConcern: {w: 1}, + ordered: false +}; +result = coll.runCommand(request); +assert(resultNOK(result), tojson(result)); +assert.eq(1, coll.find({a: 0}).count()); +assert.eq(1, coll.count()); + +// +// +// Unique index tests + +// +// Upsert fail due to duplicate key index, w:1, ordered:true +coll.drop(); +coll.createIndex({a: 1}, {unique: true}); +request = { + update: coll.getName(), + updates: [ + {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true}, + {q: {b: 3}, u: {$set: {b: 3, a: 2}}, upsert: true}, + {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true} + ], + writeConcern: {w: 1}, + ordered: true +}; +result = coll.runCommand(request); +assert(result.ok, tojson(result)); +assert.eq(2, result.n); +assert.eq(0, result.nModified, "wrong nModified"); +assert.eq(1, result.writeErrors.length); + +assert.eq(2, result.writeErrors[0].index); +assert.eq('number', typeof result.writeErrors[0].code); +assert.eq('string', typeof result.writeErrors[0].errmsg); + +assert.eq(2, result.upserted.length); +assert.eq(0, result.upserted[0].index); +assert.eq(1, coll.count({_id: result.upserted[0]._id})); + +assert.eq(1, result.upserted[1].index); +assert.eq(1, coll.count({_id: result.upserted[1]._id})); + +// +// Upsert fail due to duplicate key index, w:1, ordered:false +coll.drop(); +coll.createIndex({a: 1}, {unique: true}); +request = { + update: coll.getName(), + updates: [ + {q: {b: 1}, u: {$set: {b: 1, a: 1}}, upsert: true}, + {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true}, + {q: {b: 2}, u: {$set: {b: 2, a: 1}}, upsert: true}, + {q: {b: 3}, u: {$set: {b: 3, a: 3}}, upsert: true} + ], + writeConcern: {w: 1}, + ordered: false +}; +result = coll.runCommand(request); +assert(result.ok, tojson(result)); +assert.eq(2, result.n); +assert.eq(0, result.nModified, "wrong nModified"); +assert.eq(2, result.writeErrors.length); + +assert.eq(1, result.writeErrors[0].index); +assert.eq('number', typeof result.writeErrors[0].code); +assert.eq('string', typeof result.writeErrors[0].errmsg); + +assert.eq(2, result.writeErrors[1].index); +assert.eq('number', typeof result.writeErrors[1].code); +assert.eq('string', typeof result.writeErrors[1].errmsg); + +assert.eq(2, result.upserted.length); +assert.eq(0, result.upserted[0].index); +assert.eq(1, coll.count({_id: result.upserted[0]._id})); + +assert.eq(3, result.upserted[1].index); +assert.eq(1, coll.count({_id: result.upserted[1]._id})); diff --git a/jstests/core/write/update/collation_update.js b/jstests/core/write/update/collation_update.js new file mode 100644 index 00000000000..72f51de48d1 --- /dev/null +++ b/jstests/core/write/update/collation_update.js @@ -0,0 +1,318 @@ +// Cannot implicitly shard accessed collections because of collection existing when none +// expected. +// @tags: [assumes_no_implicit_collection_creation_after_drop] + +// Integration tests for collation-aware updates. +(function() { +'use strict'; +var coll = db.getCollection("collation_update_test"); + +const caseInsensitive = { + collation: {locale: "en_US", strength: 2} +}; +const caseSensitive = { + collation: {locale: "en_US", strength: 3} +}; +const numericOrdering = { + collation: {locale: "en_US", numericOrdering: true} +}; + +// Update modifiers respect collection default collation on simple _id query. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), numericOrdering)); +assert.commandWorked(coll.insert({_id: 1, a: "124"})); +assert.commandWorked(coll.update({_id: 1}, {$min: {a: "1234"}})); +assert.eq(coll.find({a: "124"}).count(), 1); + +// Simple _id query with hint on different index should work. +assert.commandWorked(coll.createIndex({foobar: 1})); +function makeUpdateCmdWithHint(update, hint) { + return {update: coll.getName(), updates: [{q: {_id: 1}, u: update, hint: hint}]}; +} +assert.commandWorked(coll.runCommand(makeUpdateCmdWithHint({$min: {a: "49"}}, {foobar: 1}))); +assert.eq(coll.find({a: "49"}).count(), 1); +assert.commandWorked(coll.runCommand(makeUpdateCmdWithHint({$min: {a: "5"}}, {_id: 1}))); +assert.eq(coll.find({a: "5"}).count(), 1); + +// $min respects query collation. +coll.drop(); + +// 1234 > 124, so no change should occur. +assert.commandWorked(coll.insert({a: "124"})); +assert.commandWorked(coll.update({a: "124"}, {$min: {a: "1234"}}, numericOrdering)); +assert.eq(coll.find({a: "124"}).count(), 1); + +// "1234" < "124" (non-numeric ordering), so an update should occur. +assert.commandWorked(coll.update({a: "124"}, {$min: {a: "1234"}}, caseSensitive)); +assert.eq(coll.find({a: "1234"}).count(), 1); + +// $min respects collection default collation. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), numericOrdering)); +assert.commandWorked(coll.insert({a: "124"})); +assert.commandWorked(coll.update({a: "124"}, {$min: {a: "1234"}})); +assert.eq(coll.find({a: "124"}).count(), 1); + +// $max respects query collation. +coll.drop(); + +// "1234" < "124", so an update should not occur. +assert.commandWorked(coll.insert({a: "124"})); +assert.commandWorked(coll.update({a: "124"}, {$max: {a: "1234"}}, caseSensitive)); +assert.eq(coll.find({a: "124"}).count(), 1); + +// 1234 > 124, so an update should occur. +assert.commandWorked(coll.update({a: "124"}, {$max: {a: "1234"}}, numericOrdering)); +assert.eq(coll.find({a: "1234"}).count(), 1); + +// $max respects collection default collation. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), numericOrdering)); +assert.commandWorked(coll.insert({a: "124"})); +assert.commandWorked(coll.update({a: "124"}, {$max: {a: "1234"}})); +assert.eq(coll.find({a: "1234"}).count(), 1); + +// $addToSet respects query collation. +coll.drop(); + +// "foo" == "FOO" (case-insensitive), so set isn't extended. +assert.commandWorked(coll.insert({a: ["foo"]})); +assert.commandWorked(coll.update({}, {$addToSet: {a: "FOO"}}, caseInsensitive)); +var set = coll.findOne().a; +assert.eq(set.length, 1); + +// "foo" != "FOO" (case-sensitive), so set is extended. +assert.commandWorked(coll.update({}, {$addToSet: {a: "FOO"}}, caseSensitive)); +set = coll.findOne().a; +assert.eq(set.length, 2); + +coll.drop(); + +// $each and $addToSet respect collation +assert.commandWorked(coll.insert({a: ["foo", "bar", "FOO"]})); +assert.commandWorked( + coll.update({}, {$addToSet: {a: {$each: ["FOO", "BAR", "str"]}}}, caseInsensitive)); +set = coll.findOne().a; +assert.eq(set.length, 4); +assert(set.includes("foo")); +assert(set.includes("FOO")); +assert(set.includes("bar")); +assert(set.includes("str")); + +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive)); +// "foo" == "FOO" (case-insensitive), so set isn't extended. +assert.commandWorked(coll.insert({a: ["foo"]})); +assert.commandWorked(coll.update({}, {$addToSet: {a: "FOO"}})); +var set = coll.findOne().a; +assert.eq(set.length, 1); + +// $pull respects query collation. +coll.drop(); + +// "foo" != "FOO" (case-sensitive), so it is not pulled. +assert.commandWorked(coll.insert({a: ["foo", "FOO"]})); +assert.commandWorked(coll.update({}, {$pull: {a: "foo"}}, caseSensitive)); +var arr = coll.findOne().a; +assert.eq(arr.length, 1); +assert(arr.includes("FOO")); + +// "foo" == "FOO" (case-insensitive), so "FOO" is pulled. +assert.commandWorked(coll.update({}, {$pull: {a: "foo"}}, caseInsensitive)); +arr = coll.findOne().a; +assert.eq(arr.length, 0); + +// collation-aware $pull removes all instances that match. +coll.drop(); +assert.commandWorked(coll.insert({a: ["foo", "FOO"]})); +assert.commandWorked(coll.update({}, {$pull: {a: "foo"}}, caseInsensitive)); +arr = coll.findOne().a; +assert.eq(arr.length, 0); + +// collation-aware $pull with comparisons removes matching instances. +coll.drop(); + +// "124" > "1234" (case-sensitive), so it is not removed. +assert.commandWorked(coll.insert({a: ["124", "1234"]})); +assert.commandWorked(coll.update({}, {$pull: {a: {$lt: "1234"}}}, caseSensitive)); +arr = coll.findOne().a; +assert.eq(arr.length, 2); + +// 124 < 1234 (numeric ordering), so it is removed. +assert.commandWorked(coll.update({}, {$pull: {a: {$lt: "1234"}}}, numericOrdering)); +arr = coll.findOne().a; +assert.eq(arr.length, 1); +assert(arr.includes("1234")); + +// $pull respects collection default collation. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive)); +assert.commandWorked(coll.insert({a: ["foo", "FOO"]})); +assert.commandWorked(coll.update({}, {$pull: {a: "foo"}})); +var arr = coll.findOne().a; +assert.eq(arr.length, 0); + +// $pullAll respects query collation. +coll.drop(); + +// "foo" != "FOO" (case-sensitive), so no changes are made. +assert.commandWorked(coll.insert({a: ["foo", "bar"]})); +assert.commandWorked(coll.update({}, {$pullAll: {a: ["FOO", "BAR"]}}, caseSensitive)); +var arr = coll.findOne().a; +assert.eq(arr.length, 2); + +// "foo" == "FOO", "bar" == "BAR" (case-insensitive), so both are removed. +assert.commandWorked(coll.update({}, {$pullAll: {a: ["FOO", "BAR"]}}, caseInsensitive)); +arr = coll.findOne().a; +assert.eq(arr.length, 0); + +// $pullAll respects collection default collation. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive)); +assert.commandWorked(coll.insert({a: ["foo", "bar"]})); +assert.commandWorked(coll.update({}, {$pullAll: {a: ["FOO", "BAR"]}})); +var arr = coll.findOne().a; +assert.eq(arr.length, 0); + +// $push with $sort respects query collation. +coll.drop(); + +// "1230" < "1234" < "124" (case-sensitive) +assert.commandWorked(coll.insert({a: ["1234", "124"]})); +assert.commandWorked(coll.update({}, {$push: {a: {$each: ["1230"], $sort: 1}}}, caseSensitive)); +var arr = coll.findOne().a; +assert.eq(arr.length, 3); +assert.eq(arr[0], "1230"); +assert.eq(arr[1], "1234"); +assert.eq(arr[2], "124"); + +// "124" < "1230" < "1234" (numeric ordering) +coll.drop(); +assert.commandWorked(coll.insert({a: ["1234", "124"]})); +assert.commandWorked(coll.update({}, {$push: {a: {$each: ["1230"], $sort: 1}}}, numericOrdering)); +arr = coll.findOne().a; +assert.eq(arr.length, 3); +assert.eq(arr[0], "124"); +assert.eq(arr[1], "1230"); +assert.eq(arr[2], "1234"); + +// $push with $sort respects collection default collation. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), numericOrdering)); +assert.commandWorked(coll.insert({a: ["1234", "124"]})); +assert.commandWorked(coll.update({}, {$push: {a: {$each: ["1230"], $sort: 1}}})); +var arr = coll.findOne().a; +assert.eq(arr.length, 3); +assert.eq(arr[0], "124"); +assert.eq(arr[1], "1230"); +assert.eq(arr[2], "1234"); + +// $ positional operator respects query collation on $set. +coll.drop(); + +// "foo" != "FOO" (case-sensitive) so no update occurs. +assert.commandWorked(coll.insert({a: ["foo", "FOO"]})); +assert.commandWorked(coll.update({a: "FOO"}, {$set: {"a.$": "FOO"}}, caseSensitive)); +var arr = coll.findOne().a; +assert.eq(arr.length, 2); +assert.eq(arr[0], "foo"); +assert.eq(arr[1], "FOO"); + +// "foo" == "FOO" (case-insensitive) so an update occurs. +assert.commandWorked(coll.update({a: "FOO"}, {$set: {"a.$": "FOO"}}, caseInsensitive)); +var arr = coll.findOne().a; +assert.eq(arr.length, 2); +assert.eq(arr[0], "FOO"); +assert.eq(arr[1], "FOO"); + +// $ positional operator respects collection default collation on $set. +coll.drop(); +assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive)); +assert.commandWorked(coll.insert({a: ["foo", "FOO"]})); +assert.commandWorked(coll.update({a: "FOO"}, {$set: {"a.$": "FOO"}})); +var arr = coll.findOne().a; +assert.eq(arr.length, 2); +assert.eq(arr[0], "FOO"); +assert.eq(arr[1], "FOO"); + +// Pipeline-style update respects collection default collation. +assert(coll.drop()); +assert.commandWorked(db.createCollection(coll.getName(), caseInsensitive)); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked(coll.update({}, [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}])); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); + +// Pipeline-style update respects query collation. +// +// Case sensitive $indexOfArray on "B" matches "B". +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked( + coll.update({}, [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}], caseSensitive)); +assert.eq(coll.findOne().newField, 5, `actual=${coll.findOne()}`); + +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked( + coll.update({}, [{$project: {newField: {$indexOfArray: ["$x", "B"]}}}], caseSensitive)); +assert.eq(coll.findOne().newField, 5, `actual=${coll.findOne()}`); + +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked( + coll.update({}, [{$replaceWith: {newField: {$indexOfArray: ["$x", "B"]}}}], caseSensitive)); +assert.eq(coll.findOne().newField, 5, `actual=${coll.findOne()}`); + +// Case insensitive $indexOfArray on "B" matches "b". +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked( + coll.update({}, [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}], caseInsensitive)); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); + +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked( + coll.update({}, [{$project: {newField: {$indexOfArray: ["$x", "B"]}}}], caseInsensitive)); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); + +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked( + coll.update({}, [{$replaceWith: {newField: {$indexOfArray: ["$x", "B"]}}}], caseInsensitive)); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); + +// Collation is respected for pipeline-style bulk update. +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked(coll.bulkWrite([{ + updateOne: { + filter: {}, + update: [{$addFields: {newField: {$indexOfArray: ["$x", "B"]}}}], + collation: caseInsensitive.collation + } +}])); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); + +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked(coll.bulkWrite([{ + updateOne: { + filter: {}, + update: [{$project: {newField: {$indexOfArray: ["$x", "B"]}}}], + collation: caseInsensitive.collation + } +}])); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); + +assert(coll.drop()); +assert.commandWorked(coll.insert({x: [1, 2, "a", "b", "c", "B"]})); +assert.commandWorked(coll.bulkWrite([{ + updateOne: { + filter: {}, + update: [{$replaceWith: {newField: {$indexOfArray: ["$x", "B"]}}}], + collation: caseInsensitive.collation + } +}])); +assert.eq(coll.findOne().newField, 3, `actual=${coll.findOne()}`); +})(); diff --git a/jstests/core/write/update/update2.js b/jstests/core/write/update/update2.js new file mode 100644 index 00000000000..080875b50bd --- /dev/null +++ b/jstests/core/write/update/update2.js @@ -0,0 +1,23 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +f = db.ed_db_update2; + +f.drop(); +f.save({a: 4}); +f.update({a: 4}, {$inc: {a: 2}}); +assert.eq(6, f.findOne().a); + +f.drop(); +f.save({a: 4}); +f.createIndex({a: 1}); +f.update({a: 4}, {$inc: {a: 2}}); +assert.eq(6, f.findOne().a); + +// Verify that drop clears the index +f.drop(); +f.save({a: 4}); +f.update({a: 4}, {$inc: {a: 2}}); +assert.eq(6, f.findOne().a); diff --git a/jstests/core/write/update/update3.js b/jstests/core/write/update/update3.js new file mode 100644 index 00000000000..5a61b8bcfc9 --- /dev/null +++ b/jstests/core/write/update/update3.js @@ -0,0 +1,33 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +// Update with mods corner cases. + +f = db.jstests_update3; + +f.drop(); +f.save({a: 1}); +f.update({}, {$inc: {a: 1}}); +assert.eq(2, f.findOne().a, "A"); + +f.drop(); +f.save({a: {b: 1}}); +f.update({}, {$inc: {"a.b": 1}}); +assert.eq(2, f.findOne().a.b, "B"); + +f.drop(); +f.save({a: {b: 1}}); +f.update({}, {$set: {"a.b": 5}}); +assert.eq(5, f.findOne().a.b, "C"); + +f.drop(); +f.save({'_id': 0}); +f.update({}, {$set: {'_id': 5}}); +assert.eq(0, f.findOne()._id, "D"); + +f.drop(); +f.save({_id: 1, a: 1}); +f.update({}, {$unset: {"a": 1, "b.c": 1}}); +assert.docEq({_id: 1}, f.findOne(), "E");
\ No newline at end of file diff --git a/jstests/core/write/update/update5.js b/jstests/core/write/update/update5.js new file mode 100644 index 00000000000..fafc0d72ce0 --- /dev/null +++ b/jstests/core/write/update/update5.js @@ -0,0 +1,44 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// +// @tags: [assumes_unsharded_collection, requires_fastcount] + +t = db.update5; + +function go(key) { + t.drop(); + + function check(num, name) { + assert.eq(1, t.find().count(), tojson(key) + " count " + name); + assert.eq(num, t.findOne().n, tojson(key) + " value " + name); + } + + t.update(key, {$inc: {n: 1}}, true); + check(1, "A"); + + t.update(key, {$inc: {n: 1}}, true); + check(2, "B"); + + t.update(key, {$inc: {n: 1}}, true); + check(3, "C"); + + var ik = {}; + for (k in key) + ik[k] = 1; + t.createIndex(ik); + + t.update(key, {$inc: {n: 1}}, true); + check(4, "D"); +} + +go({a: 5}); +go({a: 5}); + +go({a: 5, b: 7}); +go({a: null, b: 7}); + +go({referer: 'blah'}); +go({referer: 'blah', lame: 'bar'}); +go({referer: 'blah', name: 'bar'}); +go({date: null, referer: 'blah', name: 'bar'}); diff --git a/jstests/core/write/update/update6.js b/jstests/core/write/update/update6.js new file mode 100644 index 00000000000..8a1950b8d02 --- /dev/null +++ b/jstests/core/write/update/update6.js @@ -0,0 +1,49 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [ +// assumes_unsharded_collection, +// ] + +t = db.update6; +t.drop(); + +t.save({a: 1, b: {c: 1, d: 1}}); + +t.update({a: 1}, {$inc: {"b.c": 1}}); +assert.eq(2, t.findOne().b.c, "A"); +assert.eq("c,d", Object.keySet(t.findOne().b).toString(), "B"); + +t.update({a: 1}, {$inc: {"b.0e": 1}}); +assert.eq(1, t.findOne().b["0e"], "C"); +assert.docEq({"c": 2, "d": 1, "0e": 1}, t.findOne().b, "D"); + +// ----- + +t.drop(); + +t.save({ + "_id": 2, + "b3": {"0720": 5, "0721": 12, "0722": 11, "0723": 3}, + //"b323" : {"0720" : 1} , +}); + +assert.eq(4, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 1 : ks before"); +t.update({_id: 2}, {$inc: {'b3.0719': 1}}, true); +assert.eq(5, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 1 : ks after"); + +// ----- + +t.drop(); + +t.save({ + "_id": 2, + "b3": {"0720": 5, "0721": 12, "0722": 11, "0723": 3}, + "b324": {"0720": 1}, +}); + +assert.eq(4, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 2 : ks before"); +printjson(t.find({_id: 2}, {b3: 1})[0].b3); +t.update({_id: 2}, {$inc: {'b3.0719': 1}}); +printjson(t.find({_id: 2}, {b3: 1})[0].b3); +assert.eq(5, Object.keySet(t.find({_id: 2}, {b3: 1})[0].b3).length, "test 2 : ks after"); diff --git a/jstests/core/write/update/update7.js b/jstests/core/write/update/update7.js new file mode 100644 index 00000000000..d3a7a5d1deb --- /dev/null +++ b/jstests/core/write/update/update7.js @@ -0,0 +1,141 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_multi_updates, requires_non_retryable_writes] + +t = db.update7; +t.drop(); + +function s() { + return t.find().sort({_id: 1}).map(function(z) { + return z.x; + }); +} + +t.save({_id: 1, x: 1}); +t.save({_id: 2, x: 5}); + +assert.eq("1,5", s(), "A"); + +t.update({}, {$inc: {x: 1}}); +assert.eq("2,5", s(), "B"); + +t.update({_id: 1}, {$inc: {x: 1}}); +assert.eq("3,5", s(), "C"); + +t.update({_id: 2}, {$inc: {x: 1}}); +assert.eq("3,6", s(), "D"); + +t.update({}, {$inc: {x: 1}}, false, true); +assert.eq("4,7", s(), "E"); + +t.update({}, {$set: {x: 2}}, false, true); +assert.eq("2,2", s(), "F"); + +// non-matching in cursor + +t.drop(); + +t.save({_id: 1, x: 1, a: 1, b: 1}); +t.save({_id: 2, x: 5, a: 1, b: 2}); +assert.eq("1,5", s(), "B1"); + +t.update({a: 1}, {$inc: {x: 1}}, false, true); +assert.eq("2,6", s(), "B2"); + +t.update({b: 1}, {$inc: {x: 1}}, false, true); +assert.eq("3,6", s(), "B3"); + +t.update({b: 3}, {$inc: {x: 1}}, false, true); +assert.eq("3,6", s(), "B4"); + +t.createIndex({a: 1}); +t.createIndex({b: 1}); + +t.update({a: 1}, {$inc: {x: 1}}, false, true); +assert.eq("4,7", s(), "B5"); + +t.update({b: 1}, {$inc: {x: 1}}, false, true); +assert.eq("5,7", s(), "B6"); + +t.update({b: 3}, {$inc: {x: 1}}, false, true); +assert.eq("5,7", s(), "B7"); + +t.update({b: 2}, {$inc: {x: 1}}, false, true); +assert.eq("5,8", s(), "B7"); + +// multi-key + +t.drop(); + +t.save({_id: 1, x: 1, a: [1, 2]}); +t.save({_id: 2, x: 5, a: [2, 3]}); +assert.eq("1,5", s(), "C1"); + +t.update({a: 1}, {$inc: {x: 1}}, false, true); +assert.eq("2,5", s(), "C2"); + +t.update({a: 1}, {$inc: {x: 1}}, false, true); +assert.eq("3,5", s(), "C3"); + +t.update({a: 3}, {$inc: {x: 1}}, false, true); +assert.eq("3,6", s(), "C4"); + +t.update({a: 2}, {$inc: {x: 1}}, false, true); +assert.eq("4,7", s(), "C5"); + +t.update({a: {$gt: 0}}, {$inc: {x: 1}}, false, true); +assert.eq("5,8", s(), "C6"); + +t.drop(); + +t.save({_id: 1, x: 1, a: [1, 2]}); +t.save({_id: 2, x: 5, a: [2, 3]}); +t.createIndex({a: 1}); +assert.eq("1,5", s(), "D1"); + +t.update({a: 1}, {$inc: {x: 1}}, false, true); +assert.eq("2,5", s(), "D2"); + +t.update({a: 1}, {$inc: {x: 1}}, false, true); +assert.eq("3,5", s(), "D3"); + +t.update({a: 3}, {$inc: {x: 1}}, false, true); +assert.eq("3,6", s(), "D4"); + +t.update({a: 2}, {$inc: {x: 1}}, false, true); +assert.eq("4,7", s(), "D5"); + +t.update({a: {$gt: 0}}, {$inc: {x: 1}}, false, true); +assert.eq("5,8", s(), "D6"); + +t.update({a: {$lt: 10}}, {$inc: {x: -1}}, false, true); +assert.eq("4,7", s(), "D7"); + +// --- + +t.save({_id: 3}); +assert.eq("4,7,", s(), "E1"); +t.update({}, {$inc: {x: 1}}, false, true); +assert.eq("5,8,1", s(), "E2"); + +for (i = 4; i < 8; i++) + t.save({_id: i}); +t.save({_id: i, x: 1}); +assert.eq("5,8,1,,,,,1", s(), "E4"); +t.update({}, {$inc: {x: 1}}, false, true); +assert.eq("6,9,2,1,1,1,1,2", s(), "E5"); + +// --- $inc indexed field + +t.drop(); + +t.save({_id: 1, x: 1}); +t.save({_id: 2, x: 2}); +t.save({_id: 3, x: 3}); + +t.createIndex({x: 1}); + +assert.eq("1,2,3", s(), "F1"); +t.update({x: {$gt: 0}}, {$inc: {x: 5}}, false, true); +assert.eq("6,7,8", s(), "F1"); diff --git a/jstests/core/write/update/update8.js b/jstests/core/write/update/update8.js new file mode 100644 index 00000000000..596bc8695dd --- /dev/null +++ b/jstests/core/write/update/update8.js @@ -0,0 +1,11 @@ + +t = db.update8; +t.drop(); + +t.update({_id: 1, tags: {"$ne": "a"}}, {"$push": {tags: "a"}}, true); +assert.eq({_id: 1, tags: ["a"]}, t.findOne(), "A"); + +t.drop(); +// SERVER-390 +// t.update( { "x.y" : 1 } , { $inc : { i : 1 } } , true ); +// printjson( t.findOne() ); diff --git a/jstests/core/write/update/update9.js b/jstests/core/write/update/update9.js new file mode 100644 index 00000000000..d119681a09e --- /dev/null +++ b/jstests/core/write/update/update9.js @@ -0,0 +1,18 @@ + +t = db.update9; +t.drop(); + +orig = { + "_id": 1, + "question": "a", + "choices": {"1": {"choice": "b"}, "0": {"choice": "c"}}, + +}; + +t.save(orig); +assert.eq(orig, t.findOne(), "A"); + +t.update({_id: 1, 'choices.0.votes': {$ne: 1}}, {$push: {'choices.0.votes': 1}}); + +orig.choices["0"].votes = [1]; +assert.eq(orig.choices["0"], t.findOne().choices["0"], "B"); diff --git a/jstests/core/write/update/update_addToSet.js b/jstests/core/write/update/update_addToSet.js new file mode 100644 index 00000000000..0713170d228 --- /dev/null +++ b/jstests/core/write/update/update_addToSet.js @@ -0,0 +1,144 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// +// @tags: [ +// assumes_unsharded_collection, +// ] + +(function() { +"use strict"; + +const collNamePrefix = 'update_addToSet_'; +let collCount = 0; +let coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); + +let doc = {_id: 1, a: [2, 1]}; +assert.commandWorked(coll.insert(doc)); + +assert.eq(doc, coll.findOne()); + +assert.commandWorked(coll.update({}, {$addToSet: {a: 3}})); +doc.a.push(3); +assert.eq(doc, coll.findOne()); + +coll.update({}, {$addToSet: {a: 3}}); +assert.eq(doc, coll.findOne()); + +// SERVER-628 +assert.commandWorked(coll.update({}, {$addToSet: {a: {$each: [3, 5, 6]}}})); +doc.a.push(5); +doc.a.push(6); +assert.eq(doc, coll.findOne()); + +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +doc = { + _id: 1, + a: [3, 5, 6] +}; +assert.commandWorked(coll.insert(doc)); +assert.commandWorked(coll.update({}, {$addToSet: {a: {$each: [3, 5, 6]}}})); +assert.eq(doc, coll.findOne()); + +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +assert.commandWorked(coll.update({_id: 1}, {$addToSet: {a: {$each: [3, 5, 6]}}}, true)); +assert.eq(doc, coll.findOne()); +assert.commandWorked(coll.update({_id: 1}, {$addToSet: {a: {$each: [3, 5, 6]}}}, true)); +assert.eq(doc, coll.findOne()); + +// SERVER-630 +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +assert.commandWorked(coll.update({_id: 2}, {$addToSet: {a: 3}}, true)); +assert.eq(1, coll.find({}).itcount()); +assert.eq({_id: 2, a: [3]}, coll.findOne()); + +// SERVER-3245 +doc = { + _id: 1, + a: [1, 2] +}; +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +assert.commandWorked(coll.update({_id: 1}, {$addToSet: {a: {$each: [1, 2]}}}, true)); +assert.eq(doc, coll.findOne()); + +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +assert.commandWorked(coll.update({_id: 1}, {$addToSet: {a: {$each: [1, 2, 1, 2]}}}, true)); +assert.eq(doc, coll.findOne()); + +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +assert.commandWorked(coll.insert({_id: 1})); +assert.commandWorked(coll.update({_id: 1}, {$addToSet: {a: {$each: [1, 2, 2, 1]}}})); +assert.eq(doc, coll.findOne()); + +assert.commandWorked(coll.update({_id: 1}, {$addToSet: {a: {$each: [3, 2, 2, 3, 3]}}})); +doc.a.push(3); +assert.eq(doc, coll.findOne()); + +// Test that dotted and '$' prefixed field names work when nested. +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +doc = { + _id: 1, + a: [1, 2] +}; +assert.commandWorked(coll.insert(doc)); + +assert.commandWorked(coll.update({}, {$addToSet: {a: {'x.$.y': 'bad'}}})); +assert.commandWorked(coll.update({}, {$addToSet: {a: {b: {'x.$.y': 'bad'}}}})); + +assert.commandWorked(coll.update({}, {$addToSet: {a: {"$bad": "bad"}}})); +assert.commandWorked(coll.update({}, {$addToSet: {a: {b: {"$bad": "bad"}}}})); + +assert.commandWorked(coll.update({}, {$addToSet: {a: {_id: {"x.y": 2}}}})); + +assert.commandWorked(coll.update({}, {$addToSet: {a: {$each: [{'x.$.y': 'bad'}]}}})); +assert.commandWorked(coll.update({}, {$addToSet: {a: {$each: [{b: {'x.$.y': 'bad'}}]}}})); + +assert.commandWorked(coll.update({}, {$addToSet: {a: {$each: [{'$bad': 'bad'}]}}})); +assert.commandWorked(coll.update({}, {$addToSet: {a: {$each: [{b: {'$bad': 'bad'}}]}}})); + +// Test that nested _id fields are allowed. +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +doc = { + _id: 1, + a: [1, 2] +}; +assert.commandWorked(coll.insert(doc)); + +assert.commandWorked(coll.update({}, {$addToSet: {a: {_id: ["foo", "bar", "baz"]}}})); +assert.commandWorked(coll.update({}, {$addToSet: {a: {_id: /acme.*corp/}}})); + +// Test that DBRefs are allowed. +coll = db.getCollection(collNamePrefix + collCount++); +coll.drop(); +doc = { + _id: 1, + a: [1, 2] +}; +assert.commandWorked(coll.insert(doc)); + +const foo = { + "foo": "bar" +}; +assert.commandWorked(coll.insert(foo)); +const fooDoc = coll.findOne(foo); +assert.eq(fooDoc.foo, foo.foo); + +const fooDocRef = { + reference: new DBRef(coll.getName(), fooDoc._id, coll.getDB().getName()) +}; + +assert.commandWorked(coll.update({_id: doc._id}, {$addToSet: {a: fooDocRef}})); +assert.eq(coll.findOne({_id: doc._id}).a[2], fooDocRef); + +assert.commandWorked(coll.update({_id: doc._id}, {$addToSet: {a: {b: fooDocRef}}})); +assert.eq(coll.findOne({_id: doc._id}).a[3].b, fooDocRef); +}()); diff --git a/jstests/core/write/update/update_addToSet2.js b/jstests/core/write/update/update_addToSet2.js new file mode 100644 index 00000000000..44ba8bce671 --- /dev/null +++ b/jstests/core/write/update/update_addToSet2.js @@ -0,0 +1,13 @@ + +t = db.update_addToSet2; +t.drop(); + +o = { + _id: 1 +}; +t.insert({_id: 1}); + +t.update({}, {$addToSet: {'kids': {'name': 'Bob', 'age': '4'}}}); +t.update({}, {$addToSet: {'kids': {'name': 'Dan', 'age': '2'}}}); + +printjson(t.findOne()); diff --git a/jstests/core/write/update/update_addToSet3.js b/jstests/core/write/update/update_addToSet3.js new file mode 100644 index 00000000000..efd682cef4c --- /dev/null +++ b/jstests/core/write/update/update_addToSet3.js @@ -0,0 +1,21 @@ +// Test the use of $each in $addToSet + +t = db.update_addToSet3; +t.drop(); + +t.insert({_id: 1}); + +t.update({_id: 1}, {$addToSet: {a: {$each: [6, 5, 4]}}}); +assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4]}, "A1"); + +t.update({_id: 1}, {$addToSet: {a: {$each: [3, 2, 1]}}}); +assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4, 3, 2, 1]}, "A2"); + +t.update({_id: 1}, {$addToSet: {a: {$each: [4, 7, 9, 2]}}}); +assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4, 3, 2, 1, 7, 9]}, "A3"); + +t.update({_id: 1}, {$addToSet: {a: {$each: [12, 13, 12]}}}); +assert.eq(t.findOne(), {_id: 1, a: [6, 5, 4, 3, 2, 1, 7, 9, 12, 13]}, "A4"); + +assert.writeError(t.update({_id: 1}, {$addToSet: {a: {$each: 0}}})); +assert.writeError(t.update({_id: 1}, {$addToSet: {a: {$each: {a: 1}}}})); diff --git a/jstests/core/write/update/update_affects_indexes.js b/jstests/core/write/update/update_affects_indexes.js new file mode 100644 index 00000000000..41b0cb4a016 --- /dev/null +++ b/jstests/core/write/update/update_affects_indexes.js @@ -0,0 +1,94 @@ +// This is a regression test for SERVER-32048. It checks that index keys are correctly updated when +// an update modifier implicitly creates a new array element. +(function() { +"use strict"; + +let coll = db.update_affects_indexes; +coll.drop(); +let indexKeyPattern = {"a.b": 1}; +assert.commandWorked(coll.createIndex(indexKeyPattern)); + +// Tests that the document 'docId' has all the index keys in 'expectedKeys' and none of the +// index keys in 'unexpectedKeys'. +function assertExpectedIndexKeys(docId, expectedKeys, unexpectedKeys) { + for (let key of expectedKeys) { + let res = coll.find(docId).hint(indexKeyPattern).min(key).returnKey().toArray(); + assert.eq(1, res.length, tojson(res)); + assert.eq(key, res[0]); + } + + for (let key of unexpectedKeys) { + let res = coll.find(docId).hint(indexKeyPattern).min(key).returnKey().toArray(); + if (res.length > 0) { + assert.eq(1, res.length, tojson(res)); + assert.neq(0, bsonWoCompare(key, res[0]), tojson(res[0])); + } + } +} + +// $set implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 0}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 0}, [{"a.b": 0}, {"a.b": null}], []); + +// $set implicitly creates array element beyond end of array. +assert.commandWorked(coll.insert({_id: 1, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 1}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 1}, {$set: {"a.3.c": 0}})); +assertExpectedIndexKeys({_id: 1}, [{"a.b": 0}, {"a.b": null}], []); + +// $set implicitly creates array element in empty array (no index key changes needed). +assert.commandWorked(coll.insert({_id: 2, a: []})); +assertExpectedIndexKeys({_id: 2}, [{"a.b": null}], []); +assert.commandWorked(coll.update({_id: 2}, {$set: {"a.0.c": 0}})); +assertExpectedIndexKeys({_id: 2}, [{"a.b": null}], []); + +// $inc implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 3, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 3}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 3}, {$inc: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 3}, [{"a.b": 0}, {"a.b": null}], []); + +// $mul implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 4, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 4}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 4}, {$mul: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 4}, [{"a.b": 0}, {"a.b": null}], []); + +// $addToSet implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 5, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 5}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 5}, {$addToSet: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 5}, [{"a.b": 0}, {"a.b": null}], []); + +// $bit implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 6, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 6}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 6}, {$bit: {"a.1.c": {and: NumberInt(1)}}})); +assertExpectedIndexKeys({_id: 6}, [{"a.b": 0}, {"a.b": null}], []); + +// $min implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 7, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 7}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 7}, {$min: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 7}, [{"a.b": 0}, {"a.b": null}], []); + +// $max implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 8, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 8}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 8}, {$max: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 8}, [{"a.b": 0}, {"a.b": null}], []); + +// $currentDate implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 9, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 9}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 9}, {$currentDate: {"a.1.c": true}})); +assertExpectedIndexKeys({_id: 9}, [{"a.b": 0}, {"a.b": null}], []); + +// $push implicitly creates array element at end of array. +assert.commandWorked(coll.insert({_id: 10, a: [{b: 0}]})); +assertExpectedIndexKeys({_id: 10}, [{"a.b": 0}], [{"a.b": null}]); +assert.commandWorked(coll.update({_id: 10}, {$push: {"a.1.c": 0}})); +assertExpectedIndexKeys({_id: 10}, [{"a.b": 0}, {"a.b": null}], []); +}()); diff --git a/jstests/core/write/update/update_arrayFilters.js b/jstests/core/write/update/update_arrayFilters.js new file mode 100644 index 00000000000..9618fed1c5c --- /dev/null +++ b/jstests/core/write/update/update_arrayFilters.js @@ -0,0 +1,603 @@ +// Cannot implicitly shard accessed collections because of collection existing when none +// expected. +// @tags: [ +// assumes_no_implicit_collection_creation_after_drop, +// requires_multi_updates, +// requires_non_retryable_writes, +// # TODO SERVER-30466 +// does_not_support_causal_consistency, +// ] + +// Tests for the arrayFilters option to update and findAndModify. +(function() { +"use strict"; + +const collName = "update_arrayFilters"; +let coll = db[collName]; +coll.drop(); +assert.commandWorked(db.createCollection(collName)); +let res; + +// +// Tests for update. +// + +// Non-array arrayFilters fails to parse. +assert.writeError(coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: {i: 0}}), + ErrorCodes.TypeMismatch); + +// Non-object array filter fails to parse. +assert.writeError(coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: ["bad"]}), + ErrorCodes.TypeMismatch); + +// Bad array filter fails to parse. +res = coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{i: 0, j: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.FailedToParse); +assert.neq(-1, + res.getWriteError().errmsg.indexOf("Expected a single top-level field name"), + "update failed for a reason other than failing to parse array filters"); + +// Multiple array filters with the same id fails to parse. +res = coll.update( + {_id: 0}, {$set: {"a.$[i]": 5, "a.$[j]": 6}}, {arrayFilters: [{i: 0}, {j: 0}, {i: 1}]}); +assert.writeErrorWithCode(res, ErrorCodes.FailedToParse); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf( + "Found multiple array filters with the same top-level field name"), + "update failed for a reason other than multiple array filters with the same top-level field name"); + +// Unused array filter fails to parse. +res = coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{i: 0}, {j: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.FailedToParse); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf( + "The array filter for identifier 'j' was not used in the update { $set: { a.$[i]: 5.0 } }"), + "update failed for a reason other than unused array filter"); + +// Array filter without a top-level field name fails to parse. +res = coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{$alwaysTrue: 1}]}); +assert.writeErrorWithCode(res, ErrorCodes.FailedToParse); +assert.neq(-1, + res.getWriteError().errmsg.indexOf( + "Cannot use an expression without a top-level field name in arrayFilters"), + "update failed for a reason other than missing a top-level field name in arrayFilter"); + +// Array filter with $text inside fails to parse. +res = coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{$text: {$search: "foo"}}]}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); + +// Array filter with $where inside fails to parse. +res = coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{$where: "this.a == 2"}]}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); + +// Array filter with $geoNear inside fails to parse. +res = coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{loc: {$geoNear: [50, 50]}}]}); +assert.writeErrorWithCode(res, [ErrorCodes.BadValue, 5626500]); + +// Array filter with $expr inside fails to parse. +res = coll.update( + {_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{$expr: {$eq: ["$foo", "$bar"]}}]}); +assert.writeErrorWithCode(res, ErrorCodes.QueryFeatureNotAllowed); + +// Good value for arrayFilters succeeds. +assert.commandWorked( + coll.update({_id: 0}, {$set: {"a.$[i]": 5, "a.$[j]": 6}}, {arrayFilters: [{i: 0}, {j: 0}]})); +assert.commandWorked( + coll.update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{$or: [{i: 0}, {$and: [{}]}]}]})); + +// +// Tests for findAndModify. +// + +// Non-array arrayFilters fails to parse. +assert.throws(function() { + coll.findAndModify({query: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: {i: 0}}); +}); + +// Non-object array filter fails to parse. +assert.throws(function() { + coll.findAndModify({query: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: ["bad"]}); +}); + +// arrayFilters option not allowed with remove=true. +assert.throws(function() { + coll.findAndModify({query: {_id: 0}, remove: true, arrayFilters: [{i: 0}]}); +}); + +// Bad array filter fails to parse. +assert.throws(function() { + coll.findAndModify( + {query: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: [{i: 0, j: 0}]}); +}); + +// Multiple array filters with the same id fails to parse. +assert.throws(function() { + coll.findAndModify({ + query: {_id: 0}, + update: {$set: {"a.$[i]": 5, "a.$[j]": 6}}, + arrayFilters: [{i: 0}, {j: 0}, {i: 1}] + }); +}); + +// Unused array filter fails to parse. +assert.throws(function() { + coll.findAndModify( + {query: {_id: 0}, update: {$set: {"a.$[i]": 5}, arrayFilters: [{i: 0}, {j: 0}]}}); +}); + +// Good value for arrayFilters succeeds. +assert.eq(null, coll.findAndModify({ + query: {_id: 0}, + update: {$set: {"a.$[i]": 5, "a.$[j]": 6}}, + arrayFilters: [{i: 0}, {j: 0}] +})); +assert.eq(null, coll.findAndModify({ + query: {_id: 0}, + update: {$set: {"a.$[i]": 5}}, + arrayFilters: [{$or: [{i: 0}, {$and: [{}]}]}] +})); + +// +// Tests for the bulk API. +// + +// update(). +let bulk = coll.initializeUnorderedBulkOp(); +bulk.find({}).arrayFilters("bad").update({$set: {"a.$[i]": 5}}); +assert.throws(function() { + bulk.execute(); +}); +bulk = coll.initializeUnorderedBulkOp(); +bulk.find({}).arrayFilters([{i: 0}]).update({$set: {"a.$[i]": 5}}); +assert.commandWorked(bulk.execute()); + +// updateOne(). +bulk = coll.initializeUnorderedBulkOp(); +bulk.find({_id: 0}).arrayFilters("bad").updateOne({$set: {"a.$[i]": 5}}); +assert.throws(function() { + bulk.execute(); +}); +bulk = coll.initializeUnorderedBulkOp(); +bulk.find({_id: 0}).arrayFilters([{i: 0}]).updateOne({$set: {"a.$[i]": 5}}); +assert.commandWorked(bulk.execute()); + +// +// Tests for the CRUD API. +// + +// findOneAndUpdate(). +assert.throws(function() { + coll.findOneAndUpdate({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: "bad"}); +}); +assert.eq(null, coll.findOneAndUpdate({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{i: 0}]})); + +// updateOne(). +assert.throws(function() { + coll.updateOne({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: "bad"}); +}); +res = coll.updateOne({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{i: 0}]}); +assert.eq(0, res.modifiedCount); + +// updateMany(). +assert.throws(function() { + coll.updateMany({}, {$set: {"a.$[i]": 5}}, {arrayFilters: "bad"}); +}); +res = coll.updateMany({}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{i: 0}]}); +assert.eq(0, res.modifiedCount); + +// updateOne with bulkWrite(). +assert.throws(function() { + coll.bulkWrite( + [{updateOne: {filter: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: "bad"}}]); +}); +res = coll.bulkWrite( + [{updateOne: {filter: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: [{i: 0}]}}]); +assert.eq(0, res.matchedCount); + +// updateMany with bulkWrite(). +assert.throws(function() { + coll.bulkWrite( + [{updateMany: {filter: {}, update: {$set: {"a.$[i]": 5}}, arrayFilters: "bad"}}]); +}); +res = coll.bulkWrite( + [{updateMany: {filter: {}, update: {$set: {"a.$[i]": 5}}, arrayFilters: [{i: 0}]}}]); +assert.eq(0, res.matchedCount); + +// +// Tests for explain(). +// + +// update(). +assert.throws(function() { + coll.explain().update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: "bad"}); +}); +assert.commandWorked( + coll.explain().update({_id: 0}, {$set: {"a.$[i]": 5}}, {arrayFilters: [{i: 0}]})); + +// findAndModify(). +assert.throws(function() { + coll.explain().findAndModify( + {query: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: "bad"}); +}); +assert.commandWorked(coll.explain().findAndModify( + {query: {_id: 0}, update: {$set: {"a.$[i]": 5}}, arrayFilters: [{i: 0}]})); + +// +// Tests for individual update modifiers. +// + +// $set. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1, 0, 1]})); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.$[i]": 2}}, {arrayFilters: [{i: 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [2, 1, 2, 1]}); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.$[]": 3}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [3, 3, 3, 3]}); + +// $unset. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1, 0, 1]})); +assert.commandWorked(coll.update({_id: 0}, {$unset: {"a.$[i]": true}}, {arrayFilters: [{i: 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [null, 1, null, 1]}); +assert.commandWorked(coll.update({_id: 0}, {$unset: {"a.$[]": true}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [null, null, null, null]}); + +// $inc. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1, 0, 1]})); +assert.commandWorked(coll.update({_id: 0}, {$inc: {"a.$[i]": 1}}, {arrayFilters: [{i: 1}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [0, 2, 0, 2]}); +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1, 0, 1]})); +assert.commandWorked(coll.update({_id: 0}, {$inc: {"a.$[]": 1}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [1, 2, 1, 2]}); + +// $mul. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 2, 0, 2]})); +assert.commandWorked(coll.update({_id: 0}, {$mul: {"a.$[i]": 3}}, {arrayFilters: [{i: 2}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [0, 6, 0, 6]}); +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [1, 2, 1, 2]})); +assert.commandWorked(coll.update({_id: 0}, {$mul: {"a.$[]": 3}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [3, 6, 3, 6]}); + +// $rename. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [1, 2, 3, 4]})); +res = coll.update({_id: 0}, {$rename: {"a.$[i]": "b"}}, {arrayFilters: [{i: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("The source field for $rename may not be dynamic: a.$[i]"), + "update failed for a reason other than using $[] syntax in $rename path"); +res = coll.update({id: 0}, {$rename: {"a": "b"}}, {arrayFilters: [{i: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.FailedToParse); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf( + "The array filter for identifier 'i' was not used in the update { $rename: { a: \"b\" } }"), + "updated failed for reason other than unused array filter"); +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0], b: [1]})); +res = coll.update({_id: 0}, {$rename: {"a.$[]": "b"}}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("The source field for $rename may not be dynamic: a.$[]"), + "update failed for a reason other than using array updates with $rename"); +res = coll.update({_id: 0}, {$rename: {"a": "b.$[]"}}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); +assert.neq(-1, + res.getWriteError().errmsg.indexOf( + "The destination field for $rename may not be dynamic: b.$[]"), + "update failed for a reason other than using array updates with $rename"); +assert.commandWorked(coll.update({_id: 0}, {$rename: {"a": "b"}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, b: [0]}); + +// $setOnInsert. +coll.drop(); +assert.commandWorked(coll.update( + {_id: 0, a: [0]}, {$setOnInsert: {"a.$[i]": 1}}, {arrayFilters: [{i: 0}], upsert: true})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [1]}); +coll.drop(); +assert.commandWorked(coll.update({_id: 0, a: [0]}, {$setOnInsert: {"a.$[]": 1}}, {upsert: true})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [1]}); + +// $min. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0, c: 1}, {b: 0, c: -1}, {b: 1, c: 1}]})); +assert.commandWorked(coll.update({_id: 0}, {$min: {"a.$[i].c": 0}}, {arrayFilters: [{"i.b": 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: 0, c: 0}, {b: 0, c: -1}, {b: 1, c: 1}]}); +assert.commandWorked(coll.update({_id: 0}, {$min: {"a.$[].c": 0}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: 0, c: 0}, {b: 0, c: -1}, {b: 1, c: 0}]}); + +// $max. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0, c: 1}, {b: 0, c: -1}, {b: 1, c: -1}]})); +assert.commandWorked(coll.update({_id: 0}, {$max: {"a.$[i].c": 0}}, {arrayFilters: [{"i.b": 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: 0, c: 1}, {b: 0, c: 0}, {b: 1, c: -1}]}); +assert.commandWorked(coll.update({_id: 0}, {$max: {"a.$[].c": 0}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: 0, c: 1}, {b: 0, c: 0}, {b: 1, c: 0}]}); + +// $currentDate. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1]})); +assert.commandWorked( + coll.update({_id: 0}, {$currentDate: {"a.$[i]": true}}, {arrayFilters: [{i: 0}]})); +let doc = coll.findOne({_id: 0}); +assert(doc.a[0].constructor == Date, tojson(doc)); +assert.eq(doc.a[1], 1, printjson(doc)); +assert.commandWorked(coll.update({_id: 0}, {$currentDate: {"a.$[]": true}})); +doc = coll.findOne({_id: 0}); +assert(doc.a[0].constructor == Date, tojson(doc)); +assert(doc.a[1].constructor == Date, tojson(doc)); + +// $addToSet. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0], [1]]})); +assert.commandWorked(coll.update({_id: 0}, {$addToSet: {"a.$[i]": 2}}, {arrayFilters: [{i: 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [[0, 2], [1]]}); +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0], [1]]})); +assert.commandWorked(coll.update({_id: 0}, {$addToSet: {"a.$[]": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [[0, 2], [1, 2]]}); + +// $pop. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1], [1, 2]]})); +assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.$[i]": 1}}, {arrayFilters: [{i: 0}]})); +assert.eq({_id: 0, a: [[0], [1, 2]]}, coll.findOne()); +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [[0]]})); +assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.$[]": 1}})); +assert.eq({_id: 0, a: [[]]}, coll.findOne()); + +// $pullAll. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1, 2, 3], [1, 2, 3, 4]]})); +assert.commandWorked( + coll.update({_id: 0}, {$pullAll: {"a.$[i]": [0, 2]}}, {arrayFilters: [{i: 0}]})); +assert.eq({_id: 0, a: [[1, 3], [1, 2, 3, 4]]}, coll.findOne()); +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1, 2, 3], [1, 2, 3, 4]]})); +res = coll.update({_id: 0}, {$pullAll: {"a.$[]": [0, 2]}}); +assert.eq({_id: 0, a: [[1, 3], [1, 3, 4]]}, coll.findOne()); + +// $pull. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1], [1, 2]]})); +assert.commandWorked(coll.update({_id: 0}, {$pull: {"a.$[i]": 1}}, {arrayFilters: [{i: 2}]})); +assert.eq({_id: 0, a: [[0, 1], [2]]}, coll.findOne()); +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1], [1, 2]]})); +assert.commandWorked(coll.update({_id: 0}, {$pull: {"a.$[]": 1}})); +assert.eq({_id: 0, a: [[0], [2]]}, coll.findOne()); + +// $push. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1], [2, 3]]})); +assert.commandWorked(coll.update({_id: 0}, {$push: {"a.$[i]": 4}}, {arrayFilters: [{i: 0}]})); +assert.eq({_id: 0, a: [[0, 1, 4], [2, 3]]}, coll.findOne()); +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1], [2, 3]]})); +assert.commandWorked(coll.update({_id: 0}, {$push: {"a.$[]": 4}})); +assert.eq({_id: 0, a: [[0, 1, 4], [2, 3, 4]]}, coll.findOne()); + +// $bit. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [NumberInt(0), NumberInt(2)]})); +assert.commandWorked( + coll.update({_id: 0}, {$bit: {"a.$[i]": {or: NumberInt(10)}}}, {arrayFilters: [{i: 0}]})); +assert.eq({_id: 0, a: [NumberInt(10), NumberInt(2)]}, coll.findOne()); +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [NumberInt(0), NumberInt(2)]})); +assert.commandWorked(coll.update({_id: 0}, {$bit: {"a.$[]": {or: NumberInt(10)}}})); +assert.eq({_id: 0, a: [NumberInt(10), NumberInt(10)]}, coll.findOne()); + +// +// Multi update tests. +// + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1, 0, 1]})); +assert.commandWorked(coll.insert({_id: 1, a: [0, 2, 0, 2]})); +assert.commandWorked(coll.update({}, {$set: {"a.$[i]": 3}}, {multi: true, arrayFilters: [{i: 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [3, 1, 3, 1]}); +assert.eq(coll.findOne({_id: 1}), {_id: 1, a: [3, 2, 3, 2]}); +assert.commandWorked(coll.update({}, {$set: {"a.$[]": 3}}, {multi: true})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [3, 3, 3, 3]}); +assert.eq(coll.findOne({_id: 1}), {_id: 1, a: [3, 3, 3, 3]}); + +// +// Collation tests. +// + +// arrayFilters respect operation collation. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: ["foo", "FOO"]})); +assert.commandWorked( + coll.update({_id: 0}, + {$set: {"a.$[i]": "bar"}}, + {arrayFilters: [{i: "foo"}], collation: {locale: "en_US", strength: 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: ["bar", "bar"]}); + +// arrayFilters respect the collection default collation. +coll.drop(); +assert.commandWorked(db.createCollection(collName, {collation: {locale: "en_US", strength: 2}})); +coll = db[collName]; +assert.commandWorked(coll.insert({_id: 0, a: ["foo", "FOO"]})); +assert.commandWorked( + coll.update({_id: 0}, {$set: {"a.$[i]": "bar"}}, {arrayFilters: [{i: "foo"}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: ["bar", "bar"]}); + +// +// Examples. +// + +// Update all documents in array. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0}, {b: 1}]})); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.$[].b": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: 2}, {b: 2}]}); + +// Update all matching documents in array. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0}, {b: 1}]})); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.$[i].b": 2}}, {arrayFilters: [{"i.b": 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: 2}, {b: 1}]}); + +// Update all matching scalars in array. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1]})); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.$[i]": 2}}, {arrayFilters: [{i: 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [2, 1]}); + +// Update all matching scalars in array of arrays. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0, 1], [0, 1]]})); +assert.commandWorked(coll.update({_id: 0}, {$set: {"a.$[].$[j]": 2}}, {arrayFilters: [{j: 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [[2, 1], [2, 1]]}); + +// Update all matching documents in nested array. +coll.drop(); +assert.commandWorked( + coll.insert({_id: 0, a: [{b: 0, c: [{d: 0}, {d: 1}]}, {b: 1, c: [{d: 0}, {d: 1}]}]})); +assert.commandWorked(coll.update( + {_id: 0}, {$set: {"a.$[i].c.$[j].d": 2}}, {arrayFilters: [{"i.b": 0}, {"j.d": 0}]})); +assert.eq(coll.findOne({_id: 0}), + {_id: 0, a: [{b: 0, c: [{d: 2}, {d: 1}]}, {b: 1, c: [{d: 0}, {d: 1}]}]}); + +// Update all scalars in array matching a logical predicate. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1, 3]})); +assert.commandWorked( + coll.update({_id: 0}, {$set: {"a.$[i]": 2}}, {arrayFilters: [{$or: [{i: 0}, {i: 3}]}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [2, 1, 2]}); + +// +// Error cases. +// + +// Provide an <id> with no array filter. +coll.drop(); +res = coll.update({_id: 0}, {$set: {"a.$[i]": 0}}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("No array filter found for identifier 'i' in path 'a.$[i]'"), + "update failed for a reason other than missing array filter"); + +// Use an <id> at the same position as a $, integer, or field name. +coll.drop(); + +res = coll.update({_id: 0}, {$set: {"a.$[i]": 0, "a.$": 0}}, {arrayFilters: [{i: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.ConflictingUpdateOperators); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("Updating the path 'a.$' would create a conflict at 'a'"), + "update failed for a reason other than conflicting array update and positional operator"); + +res = coll.update({_id: 0}, {$set: {"a.$[i]": 0, "a.0": 0}}, {arrayFilters: [{i: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.ConflictingUpdateOperators); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("Updating the path 'a.0' would create a conflict at 'a'"), + "update failed for a reason other than conflicting array update and integer field name"); + +res = coll.update({_id: 0}, {$set: {"a.$[i]": 0, "a.b": 0}}, {arrayFilters: [{i: 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.ConflictingUpdateOperators); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("Updating the path 'a.b' would create a conflict at 'a'"), + "update failed for a reason other than conflicting array update and field name"); + +// Include an implicit array traversal in a path in an update modifier. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0}]})); +res = coll.update({_id: 0}, {$set: {"a.b": 1}}); +assert.writeErrorWithCode(res, ErrorCodes.PathNotViable); +assert.neq( + -1, + res.getWriteError().errmsg.indexOf("Cannot create field 'b' in element {a: [ { b: 0.0 } ]}"), + "update failed for a reason other than implicit array traversal"); + +// <id> contains special characters or does not begin with a lowercase letter. +coll.drop(); + +res = coll.update({_id: 0}, {$set: {"a.$[$i]": 1}}, {arrayFilters: [{"$i": 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); +assert.neq(-1, + res.getWriteError().errmsg.indexOf("unknown top level operator: $i"), + "update failed for a reason other than bad array filter identifier"); + +res = coll.update({_id: 0}, {$set: {"a.$[I]": 1}}, {arrayFilters: [{"I": 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.BadValue); +assert( + res.getWriteError().errmsg.startsWith("Error parsing array filter") && + res.getWriteError().errmsg.endsWith("The top-level field name must be an alphanumeric " + + "string beginning with a lowercase letter, found 'I'"), + "update failed for a reason other than bad array filter identifier: " + + tojson(res.getWriteError())); + +assert.commandWorked(coll.insert({_id: 0, a: [0], b: [{j: 0}]})); +res = coll.update({_id: 0}, {$set: {"a.$[i.j]": 1, "b.$[i]": 1}}, {arrayFilters: [{"i.j": 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.PathNotViable); +assert.neq(-1, + res.getWriteError().errmsg.indexOf("Cannot create field '$[i' in element {a: [ 0.0 ]}"), + "update failed for a reason other than bad array filter identifier"); + +// +// Nested array update conflict detection. +// + +// "a.$[i].b.$[k].c" and "a.$[j].b.$[k].d" are not a conflict, even if i and j are not +// disjoint. +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{x: 0, b: [{y: 0, c: 0, d: 0}]}]})); +assert.commandWorked(coll.update({_id: 0}, + {$set: {"a.$[i].b.$[k].c": 1, "a.$[j].b.$[k].d": 1}}, + {arrayFilters: [{"i.x": 0}, {"j.x": 0}, {"k.y": 0}]})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{x: 0, b: [{y: 0, c: 1, d: 1}]}]}); + +// "a.$[i].b.$[k].c" and "a.$[j].b.$[k].c" are a conflict iff i and j are not disjoint. +coll.drop(); +assert.commandWorked( + coll.insert({_id: 0, a: [{x: 0, b: [{y: 0, c: 0}]}, {x: 1, b: [{y: 0, c: 0}]}]})); + +res = coll.update({_id: 0}, + {$set: {"a.$[i].b.$[k].c": 1, "a.$[j].b.$[k].c": 2}}, + {arrayFilters: [{"i.x": 0}, {"j.x": 0}, {"k.y": 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.ConflictingUpdateOperators); +assert.neq(-1, + res.getWriteError().errmsg.indexOf("Update created a conflict at 'a.0.b.$[k].c'"), + "update failed for a reason other than conflicting array updates"); + +assert.commandWorked(coll.update({_id: 0}, + {$set: {"a.$[i].b.$[k].c": 1, "a.$[j].b.$[k].c": 2}}, + {arrayFilters: [{"i.x": 0}, {"j.x": 1}, {"k.y": 0}]})); +assert.eq(coll.findOne({_id: 0}), + {_id: 0, a: [{x: 0, b: [{y: 0, c: 1}]}, {x: 1, b: [{y: 0, c: 2}]}]}); + +// "a.$[i].b.$[k].c" and "a.$[j].b.$[m].c" are a conflict iff k and m intersect for some +// element of a matching i and j. +coll.drop(); +assert.commandWorked( + coll.insert({_id: 0, a: [{x: 0, b: [{y: 0, c: 0}]}, {x: 1, b: [{y: 0, c: 0}, {y: 1, c: 0}]}]})); + +res = coll.update({_id: 0}, + {$set: {"a.$[i].b.$[k].c": 1, "a.$[j].b.$[m].c": 2}}, + {arrayFilters: [{"i.x": 0}, {"j.x": 0}, {"k.y": 0}, {"m.y": 0}]}); +assert.writeErrorWithCode(res, ErrorCodes.ConflictingUpdateOperators); +assert.neq(-1, + res.getWriteError().errmsg.indexOf("Update created a conflict at 'a.0.b.0.c'"), + "update failed for a reason other than conflicting array updates"); + +assert.commandWorked(coll.update({_id: 0}, + {$set: {"a.$[i].b.$[k].c": 1, "a.$[j].b.$[m].c": 2}}, + {arrayFilters: [{"i.x": 1}, {"j.x": 1}, {"k.y": 0}, {"m.y": 1}]})); +assert.eq(coll.findOne({_id: 0}), + {_id: 0, a: [{x: 0, b: [{y: 0, c: 0}]}, {x: 1, b: [{y: 0, c: 1}, {y: 1, c: 2}]}]}); +})(); diff --git a/jstests/core/write/update/update_array_offset_positional.js b/jstests/core/write/update/update_array_offset_positional.js new file mode 100644 index 00000000000..210e4d65bb7 --- /dev/null +++ b/jstests/core/write/update/update_array_offset_positional.js @@ -0,0 +1,69 @@ +/** + * Tests that array offset matches are not used to provide values for the positional operator. + */ +(function() { +"use strict"; + +let coll = db.jstest_update_array_offset_positional; +coll.drop(); + +// +// If there is no implicit array traversal, the positional operator cannot be used. +// + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0]})); +assert.writeError(coll.update({_id: 0, "a.0": 0}, {$set: {"a.$": 1}})); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0}]})); +assert.writeError(coll.update({_id: 0, "a.0.b": 0}, {$set: {"a.$.b": 1}})); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [[0]]})); +assert.writeError(coll.update({_id: 0, "a.0.0": 0}, {$set: {"a.$.0": 1}})); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: [0]}]})); +assert.writeError(coll.update({_id: 0, "a.0.b.0": 0}, {$set: {"a.$.b.0": 1}})); + +// +// Array offset matches are not used to provide values for the positional operator on the same +// path. +// + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: [0, 1]}]})); +assert.commandWorked(coll.update({_id: 0, "a.0.b": 1}, {$set: {"a.0.b.$": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: [0, 2]}]}); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [{b: [0, 1]}]})); +assert.commandWorked(coll.update({_id: 0, "a.b.1": 1}, {$set: {"a.$.b.1": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [{b: [0, 2]}]}); + +// +// Array offset matches are not used to provide values for the positional operator on a +// different path. +// + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1], b: [0]})); +assert.commandWorked(coll.update({_id: 0, a: 1, "b.0": 0}, {$set: {"a.$": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [0, 2], b: [0]}); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1], b: [{c: 0}]})); +assert.commandWorked(coll.update({_id: 0, a: 1, "b.0.c": 0}, {$set: {"a.$": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [0, 2], b: [{c: 0}]}); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1], b: [[0]]})); +assert.commandWorked(coll.update({_id: 0, a: 1, "b.0.0": 0}, {$set: {"a.$": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [0, 2], b: [[0]]}); + +coll.drop(); +assert.commandWorked(coll.insert({_id: 0, a: [0, 1], b: [{c: [0]}]})); +assert.commandWorked(coll.update({_id: 0, a: 1, "b.0.c.0": 0}, {$set: {"a.$": 2}})); +assert.eq(coll.findOne({_id: 0}), {_id: 0, a: [0, 2], b: [{c: [0]}]}); +}()); diff --git a/jstests/core/write/update/update_arraymatch1.js b/jstests/core/write/update/update_arraymatch1.js new file mode 100644 index 00000000000..10b7e37e451 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch1.js @@ -0,0 +1,25 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +t = db.update_arraymatch1; +t.drop(); + +o = { + _id: 1, + a: [{x: 1, y: 1}, {x: 2, y: 2}, {x: 3, y: 3}] +}; +t.insert(o); +assert.eq(o, t.findOne(), "A1"); + +q = { + "a.x": 2 +}; +t.update(q, {$set: {b: 5}}); +o.b = 5; +assert.eq(o, t.findOne(), "A2"); + +t.update({"a.x": 2}, {$inc: {"a.$.y": 1}}); +o.a[1].y++; +assert.eq(o, t.findOne(), "A3"); diff --git a/jstests/core/write/update/update_arraymatch2.js b/jstests/core/write/update/update_arraymatch2.js new file mode 100644 index 00000000000..7610de7c962 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch2.js @@ -0,0 +1,18 @@ +// @tags: [requires_multi_updates, requires_non_retryable_writes] + +t = db.update_arraymatch2; +t.drop(); + +t.insert({}); +t.insert({x: [1, 2, 3]}); +t.insert({x: 99}); +t.update({x: 2}, {$inc: {"x.$": 1}}, false, true); +assert(t.findOne({x: 1}).x[1] == 3, "A1"); + +t.insert({x: {y: [8, 7, 6]}}); +t.update({'x.y': 7}, {$inc: {"x.y.$": 1}}, false, true); +assert.eq(8, t.findOne({"x.y": 8}).x.y[1], "B1"); + +t.insert({x: [90, 91, 92], y: ['a', 'b', 'c']}); +t.update({x: 92}, {$set: {'y.$': 'z'}}, false, true); +assert.eq('z', t.findOne({x: 92}).y[2], "B2"); diff --git a/jstests/core/write/update/update_arraymatch3.js b/jstests/core/write/update/update_arraymatch3.js new file mode 100644 index 00000000000..36f7ab22430 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch3.js @@ -0,0 +1,17 @@ +// @tags: [requires_multi_updates, requires_non_retryable_writes] + +t = db.update_arraymatch3; +t.drop(); + +o = { + _id: 1, + title: "ABC", + comments: [{"by": "joe", "votes": 3}, {"by": "jane", "votes": 7}] +}; + +t.save(o); +assert.eq(o, t.findOne(), "A1"); + +t.update({'comments.by': 'joe'}, {$inc: {'comments.$.votes': 1}}, false, true); +o.comments[0].votes++; +assert.eq(o, t.findOne(), "A2"); diff --git a/jstests/core/write/update/update_arraymatch4.js b/jstests/core/write/update/update_arraymatch4.js new file mode 100644 index 00000000000..3c087e53ca5 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch4.js @@ -0,0 +1,23 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +t = db.update_arraymatch4; +t.drop(); + +x = { + _id: 1, + arr: ["A1", "B1", "C1"] +}; +t.insert(x); +assert.eq(x, t.findOne(), "A1"); + +x.arr[0] = "A2"; +t.update({arr: "A1"}, {$set: {"arr.$": "A2"}}); +assert.eq(x, t.findOne(), "A2"); + +t.createIndex({arr: 1}); +x.arr[0] = "A3"; +t.update({arr: "A2"}, {$set: {"arr.$": "A3"}}); +assert.eq(x, t.findOne(), "A3"); // SERVER-1055 diff --git a/jstests/core/write/update/update_arraymatch5.js b/jstests/core/write/update/update_arraymatch5.js new file mode 100644 index 00000000000..1b4c967b38b --- /dev/null +++ b/jstests/core/write/update/update_arraymatch5.js @@ -0,0 +1,23 @@ +// @tags: [ +// requires_fastcount, +// requires_multi_updates, +// requires_non_retryable_writes, +// ] + +t = db.update_arraymatch5; +t.drop(); + +t.insert({abc: {visible: true}, testarray: [{foobar_id: 316, visible: true, xxx: 1}]}); +t.createIndex({'abc.visible': 1, 'testarray.visible': 1, 'testarray.xxx': 1}); +assert(t.findOne({'abc.visible': true, testarray: {'$elemMatch': {visible: true, xxx: 1}}}), "A1"); +assert(t.findOne({testarray: {'$elemMatch': {visible: true, xxx: 1}}}), "A2"); + +t.update({'testarray.foobar_id': 316}, + {'$set': {'testarray.$.visible': true, 'testarray.$.xxx': 2}}, + false, + true); + +assert(t.findOne(), "B1"); +assert(t.findOne({testarray: {'$elemMatch': {visible: true, xxx: 2}}}), "B2"); +assert(t.findOne({'abc.visible': true, testarray: {'$elemMatch': {visible: true, xxx: 2}}}), "B3"); +assert.eq(1, t.find().count(), "B4"); diff --git a/jstests/core/write/update/update_arraymatch6.js b/jstests/core/write/update/update_arraymatch6.js new file mode 100644 index 00000000000..1241753b866 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch6.js @@ -0,0 +1,20 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +var res; +t = db.jstests_update_arraymatch6; +t.drop(); + +function doTest() { + t.save({a: [{id: 1, x: [5, 6, 7]}, {id: 2, x: [8, 9, 10]}]}); + res = t.update({'a.id': 1}, {$set: {'a.$.x': [1, 1, 1]}}); + assert.commandWorked(res); + assert.eq.automsg("1", "t.findOne().a[ 0 ].x[ 0 ]"); +} + +doTest(); +t.drop(); +t.createIndex({'a.id': 1}); +doTest();
\ No newline at end of file diff --git a/jstests/core/write/update/update_arraymatch7.js b/jstests/core/write/update/update_arraymatch7.js new file mode 100644 index 00000000000..cded4ba56f4 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch7.js @@ -0,0 +1,24 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_non_retryable_writes] + +// Check that the positional operator works properly when an index only match is used for the update +// query spec. SERVER-5067 + +t = db.jstests_update_arraymatch7; +t.drop(); + +function testPositionalInc() { + t.remove({}); + t.save({a: [{b: 'match', count: 0}]}); + t.update({'a.b': 'match'}, {$inc: {'a.$.count': 1}}); + // Check that the positional $inc succeeded. + assert(t.findOne({'a.count': 1})); +} + +testPositionalInc(); + +// Now check with a non multikey index. +t.createIndex({'a.b': 1}); +testPositionalInc(); diff --git a/jstests/core/write/update/update_arraymatch8.js b/jstests/core/write/update/update_arraymatch8.js new file mode 100644 index 00000000000..e3aa91d6422 --- /dev/null +++ b/jstests/core/write/update/update_arraymatch8.js @@ -0,0 +1,165 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [ +// assumes_unsharded_collection, +// ] + +// Checking for positional array updates with either .$ or .0 at the end +// SERVER-7511 + +// array.$.name +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.name': 1}); +t.insert({'array': [{'name': 'old'}]}); +assert(t.findOne({'array.name': 'old'})); +t.update({'array.name': 'old'}, {$set: {'array.$.name': 'new'}}); +assert(t.findOne({'array.name': 'new'})); +assert(!t.findOne({'array.name': 'old'})); + +// array.$ (failed in 2.2.2) +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.name': 1}); +t.insert({'array': [{'name': 'old'}]}); +assert(t.findOne({'array.name': 'old'})); +t.update({'array.name': 'old'}, {$set: {'array.$': {'name': 'new'}}}); +assert(t.findOne({'array.name': 'new'})); +assert(!t.findOne({'array.name': 'old'})); + +// array.0.name +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.name': 1}); +t.insert({'array': [{'name': 'old'}]}); +assert(t.findOne({'array.name': 'old'})); +t.update({'array.name': 'old'}, {$set: {'array.0.name': 'new'}}); +assert(t.findOne({'array.name': 'new'})); +assert(!t.findOne({'array.name': 'old'})); + +// array.0 (failed in 2.2.2) +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.name': 1}); +t.insert({'array': [{'name': 'old'}]}); +assert(t.findOne({'array.name': 'old'})); +t.update({'array.name': 'old'}, {$set: {'array.0': {'name': 'new'}}}); +assert(t.findOne({'array.name': 'new'})); +assert(!t.findOne({'array.name': 'old'})); + +// // array.12.name +t = db.jstests_update_arraymatch8; +t.drop(); +arr = new Array(); +for (var i = 0; i < 20; i++) { + arr.push({'name': 'old'}); +} +t.createIndex({'array.name': 1}); +t.insert({_id: 0, 'array': arr}); +assert(t.findOne({'array.name': 'old'})); +t.update({_id: 0}, {$set: {'array.12.name': 'new'}}); +// note: both documents now have to be in the array +assert(t.findOne({'array.name': 'new'})); +assert(t.findOne({'array.name': 'old'})); + +// array.12 (failed in 2.2.2) +t = db.jstests_update_arraymatch8; +t.drop(); +arr = new Array(); +for (var i = 0; i < 20; i++) { + arr.push({'name': 'old'}); +} +t.createIndex({'array.name': 1}); +t.insert({_id: 0, 'array': arr}); +assert(t.findOne({'array.name': 'old'})); +t.update({_id: 0}, {$set: {'array.12': {'name': 'new'}}}); +// note: both documents now have to be in the array +assert(t.findOne({'array.name': 'new'})); +assert(t.findOne({'array.name': 'old'})); + +// array.$.123a.name +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.123a.name': 1}); +t.insert({'array': [{'123a': {'name': 'old'}}]}); +assert(t.findOne({'array.123a.name': 'old'})); +t.update({'array.123a.name': 'old'}, {$set: {'array.$.123a.name': 'new'}}); +assert(t.findOne({'array.123a.name': 'new'})); +assert(!t.findOne({'array.123a.name': 'old'})); + +// array.$.123a +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.name': 1}); +t.insert({'array': [{'123a': {'name': 'old'}}]}); +assert(t.findOne({'array.123a.name': 'old'})); +t.update({'array.123a.name': 'old'}, {$set: {'array.$.123a': {'name': 'new'}}}); +assert(t.findOne({'array.123a.name': 'new'})); +assert(!t.findOne({'array.123a.name': 'old'})); + +// array.0.123a.name +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.123a.name': 1}); +t.insert({'array': [{'123a': {'name': 'old'}}]}); +assert(t.findOne({'array.123a.name': 'old'})); +t.update({'array.123a.name': 'old'}, {$set: {'array.0.123a.name': 'new'}}); +assert(t.findOne({'array.123a.name': 'new'})); +assert(!t.findOne({'array.123a.name': 'old'})); + +// array.0.123a +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'array.name': 1}); +t.insert({'array': [{'123a': {'name': 'old'}}]}); +assert(t.findOne({'array.123a.name': 'old'})); +t.update({'array.123a.name': 'old'}, {$set: {'array.0.123a': {'name': 'new'}}}); +assert(t.findOne({'array.123a.name': 'new'})); +assert(!t.findOne({'array.123a.name': 'old'})); + +// a.0.b +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'a.0.b': 1}); +t.insert({'a': [[{b: 'old'}]]}); +assert(t.findOne({'a.0.0.b': 'old'})); +assert(t.findOne({'a.0.b': 'old'})); +t.update({}, {$set: {'a.0.0.b': 'new'}}); +assert(t.findOne({'a.0.b': 'new'})); +assert(!t.findOne({'a.0.b': 'old'})); + +// a.0.b.c +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'a.0.b.c': 1}); +t.insert({'a': [{b: [{c: 'old'}]}]}); +assert(t.findOne({'a.0.b.0.c': 'old'})); +assert(t.findOne({'a.b.0.c': 'old'})); +assert(t.findOne({'a.0.b.c': 'old'})); +assert(t.findOne({'a.b.c': 'old'})); +t.update({}, {$set: {'a.0.b.0.c': 'new'}}); +assert(t.findOne({'a.0.b.c': 'new'})); +assert(!t.findOne({'a.0.b.c': 'old'})); + +// a.b.$ref +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'a.b.$ref': 1}); +t.insert({'a': [{'b': {'$ref': 'old', '$id': 0}}]}); +assert(t.findOne({'a.b.$ref': 'old'})); +assert(t.findOne({'a.0.b.$ref': 'old'})); +t.update({}, {$set: {'a.0.b.$ref': 'new'}}); +assert(t.findOne({'a.b.$ref': 'new'})); +assert(!t.findOne({'a.b.$ref': 'old'})); + +// a.b and a-b +t = db.jstests_update_arraymatch8; +t.drop(); +t.createIndex({'a.b': 1}); +t.createIndex({'a-b': 1}); +t.insert({'a': {'b': 'old'}}); +assert(t.findOne({'a.b': 'old'})); +t.update({}, {$set: {'a': {'b': 'new'}}}); +assert(t.findOne({'a.b': 'new'})); +assert(!t.findOne({'a.b': 'old'})); diff --git a/jstests/core/write/update/update_bit_examples.js b/jstests/core/write/update/update_bit_examples.js new file mode 100644 index 00000000000..fd94a1b417d --- /dev/null +++ b/jstests/core/write/update/update_bit_examples.js @@ -0,0 +1,44 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [ +// assumes_unsharded_collection, +// requires_non_retryable_writes, +// ] + +// Basic examples for $bit +var res; +var coll = db.update_bit; +coll.drop(); + +// $bit and +coll.remove({}); +coll.save({_id: 1, a: NumberInt(2)}); +res = coll.update({}, {$bit: {a: {and: NumberInt(4)}}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, 0); + +// $bit or +coll.remove({}); +coll.save({_id: 1, a: NumberInt(2)}); +res = coll.update({}, {$bit: {a: {or: NumberInt(4)}}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, 6); + +// $bit xor +coll.remove({}); +coll.save({_id: 1, a: NumberInt(0)}); +res = coll.update({}, {$bit: {a: {xor: NumberInt(4)}}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, 4); + +// SERVER-19706 Empty bit operation. +res = coll.update({}, {$bit: {a: {}}}); +assert.writeError(res); + +// Make sure $bit on index arrays 9 and 10 when padding is needed works. +assert.commandWorked(coll.insert({_id: 2, a: [0]})); +assert.commandWorked( + coll.update({_id: 2}, {$bit: {"a.9": {or: NumberInt(0)}, "a.10": {or: NumberInt(0)}}})); +res = coll.find({_id: 2}).toArray(); +assert.eq(res[0]["a"], [0, null, null, null, null, null, null, null, null, 0, 0]); diff --git a/jstests/core/write/update/update_blank1.js b/jstests/core/write/update/update_blank1.js new file mode 100644 index 00000000000..cd8f7433ebe --- /dev/null +++ b/jstests/core/write/update/update_blank1.js @@ -0,0 +1,19 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +t = db.update_blank1; +t.drop(); + +orig = { + "": 1, + _id: 2, + "a": 3, + "b": 4 +}; +t.insert(orig); +var res = t.update({}, {$set: {"c": 5}}); +print(res); +orig["c"] = 5; +assert.docEq(orig, t.findOne(), "after $set"); // SERVER-2651 diff --git a/jstests/core/write/update/update_currentdate_examples.js b/jstests/core/write/update/update_currentdate_examples.js new file mode 100644 index 00000000000..e8a3da2ea09 --- /dev/null +++ b/jstests/core/write/update/update_currentdate_examples.js @@ -0,0 +1,30 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_non_retryable_writes] + +// Basic examples for $currentDate +var res; +var coll = db.update_currentdate; +coll.drop(); + +// $currentDate default +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$currentDate: {a: true}}); +assert.commandWorked(res); +assert(coll.findOne().a.constructor == Date); + +// $currentDate type = date +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$currentDate: {a: {$type: "date"}}}); +assert.commandWorked(res); +assert(coll.findOne().a.constructor == Date); + +// $currentDate type = timestamp +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$currentDate: {a: {$type: "timestamp"}}}); +assert.commandWorked(res); +assert(coll.findOne().a.constructor == Timestamp); diff --git a/jstests/core/write/update/update_dbref.js b/jstests/core/write/update/update_dbref.js new file mode 100644 index 00000000000..f3e461c7379 --- /dev/null +++ b/jstests/core/write/update/update_dbref.js @@ -0,0 +1,41 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [ +// assumes_unsharded_collection, +// ] + +// Test that we can update DBRefs, but not dbref fields outside a DBRef + +var res; +t = db.jstests_update_dbref; +t.drop(); + +res = t.save({_id: 1, a: new DBRef("a", "b")}); +assert(!res.hasWriteError(), "failed to save dbref"); +assert.docEq({_id: 1, a: new DBRef("a", "b")}, t.findOne()); + +res = t.update({}, {$set: {"a.$id": 2}}); +assert(!res.hasWriteError(), "a.$id update"); +assert.docEq({_id: 1, a: new DBRef("a", 2)}, t.findOne()); + +res = t.update({}, {$set: {"a.$ref": "b"}}); +assert(!res.hasWriteError(), "a.$ref update"); + +assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne()); + +// Bad updates +res = t.update({}, {$set: {"$id": 3}}); +assert.writeError(res); +assert(/\$id/.test(res.getWriteError()), "expected bad update because of $id"); +assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne()); + +res = t.update({}, {$set: {"$ref": "foo"}}); +assert.writeError(res); +assert(/\$ref/.test(res.getWriteError()), "expected bad update because of $ref"); +assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne()); + +res = t.update({}, {$set: {"$db": "aDB"}}); +assert.writeError(res); +assert(/\$db/.test(res.getWriteError()), "expected bad update because of $db"); +assert.docEq({_id: 1, a: new DBRef("b", 2)}, t.findOne()); diff --git a/jstests/core/write/update/update_find_and_modify_id.js b/jstests/core/write/update/update_find_and_modify_id.js new file mode 100644 index 00000000000..f6fd646d087 --- /dev/null +++ b/jstests/core/write/update/update_find_and_modify_id.js @@ -0,0 +1,46 @@ +// SERVER-4516 and SERVER-6913: test that update and findAndModify tolerate +// an _id in the update document, as long as the _id will not be modified +// +// @tags: [requires_fastcount] + +var t = db.jstests_server4516; +var startingDoc = {_id: 1, a: 1}; + +function prepare() { + t.drop(); + t.save(startingDoc); +} + +function update_succeeds(updateDoc, qid, resultDoc) { + prepare(); + t.update({_id: qid}, updateDoc, true); + assert.eq(t.findOne({_id: qid}), resultDoc); + + prepare(); + t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true}); + assert.eq(t.findOne({_id: qid}), resultDoc); +} + +update_succeeds({_id: 1, a: 2}, 1, {_id: 1, a: 2}); +update_succeeds({$set: {_id: 1}}, 1, {_id: 1, a: 1}); +update_succeeds({_id: 1, b: "a"}, 1, {_id: 1, b: "a"}); +update_succeeds({_id: 2, a: 3}, 2, {_id: 2, a: 3}); + +function update_fails(updateDoc, qid) { + prepare(); + var res = t.update({_id: qid}, updateDoc, true); + assert.writeError(res); + assert.eq(t.count(), 1); + assert.eq(t.findOne(), startingDoc); + + prepare(); + assert.throws(function() { + t.findAndModify({query: {_id: qid}, update: updateDoc, upsert: true}); + }); + assert.eq(t.count(), 1); + assert.eq(t.findOne(), startingDoc); +} + +update_fails({$set: {_id: 2}}, 1); +update_fails({_id: 2, a: 3}, 1); +update_fails({_id: 2, a: 3}, 3); diff --git a/jstests/core/write/update/update_hint.js b/jstests/core/write/update/update_hint.js new file mode 100644 index 00000000000..f0869073de0 --- /dev/null +++ b/jstests/core/write/update/update_hint.js @@ -0,0 +1,166 @@ +/** + * Tests passing hint to the update command: + * - A bad argument to the hint option should raise an error. + * - The hint option should support both the name of the index, and the object spec of the + * index. + * + * @tags: [assumes_unsharded_collection, requires_multi_updates, requires_non_retryable_writes] + */ + +(function() { +"use strict"; + +load("jstests/libs/analyze_plan.js"); + +function assertCommandUsesIndex(command, expectedHintKeyPattern) { + const out = assert.commandWorked(coll.runCommand({explain: command})); + const planStage = getPlanStage(out, "IXSCAN"); + assert.neq(null, planStage); + assert.eq(planStage.keyPattern, expectedHintKeyPattern, tojson(planStage)); +} + +const coll = db.jstests_update_hint; + +function normalIndexTest() { + // Hint using a key pattern. + coll.drop(); + assert.commandWorked(coll.insert({x: 1, y: 1})); + assert.commandWorked(coll.createIndex({x: 1})); + assert.commandWorked(coll.createIndex({y: -1})); + + // Hint using index key pattern. + let updateCmd = { + update: coll.getName(), + updates: [{q: {x: 1}, u: {$set: {y: 1}}, hint: {x: 1}}] + }; + assertCommandUsesIndex(updateCmd, {x: 1}); + + // Hint using an index name. + updateCmd = {update: coll.getName(), updates: [{q: {x: 1}, u: {$set: {y: 1}}, hint: 'y_-1'}]}; + assertCommandUsesIndex(updateCmd, {y: -1}); + + // Passing a hint should not use the idhack fast-path. + updateCmd = {update: coll.getName(), updates: [{q: {_id: 1}, u: {$set: {y: 1}}, hint: 'y_-1'}]}; + assertCommandUsesIndex(updateCmd, {y: -1}); +} + +function sparseIndexTest() { + // Create a sparse index with 2 documents. + coll.drop(); + assert.commandWorked(coll.insert([{x: 1}, {x: 1}, {x: 1, s: 0}, {x: 1, s: 0}])); + assert.commandWorked(coll.createIndex({x: 1})); + assert.commandWorked(coll.createIndex({s: 1}, {sparse: true})); + + // Hint should be respected, even on incomplete indexes. + let updateCmd = { + update: coll.getName(), + updates: [{q: {_id: 1}, u: {$set: {y: 1}}, hint: {s: 1}}] + }; + assertCommandUsesIndex(updateCmd, {s: 1}); + + // Update hinting a sparse index updates only the document in the sparse index. + updateCmd = { + update: coll.getName(), + updates: [{q: {}, u: {$set: {s: 1}}, hint: {s: 1}, multi: true}] + }; + assert.commandWorked(coll.runCommand(updateCmd)); + assert.eq(2, coll.count({s: 1})); + + // Update hinting a sparse index with upsert option can result in an insert even if the + // correct behaviour would be to update an existing document. + assert.commandWorked(coll.insert({x: 2})); + updateCmd = { + update: coll.getName(), + updates: [{q: {x: 2}, u: {$set: {x: 1}}, hint: {s: 1}, upsert: true}] + }; + let res = assert.commandWorked(coll.runCommand(updateCmd)); + assert.eq(res.upserted.length, 1); +} + +function shellHelpersTest() { + coll.drop(); + assert.commandWorked(coll.insert([{x: 1}, {x: 1, s: 0}, {x: 1, s: 0}])); + assert.commandWorked(coll.createIndex({x: 1})); + assert.commandWorked(coll.createIndex({s: 1}, {sparse: true})); + + // Test shell helpers using a hinted sparse index should only update documents that exist in + // the sparse index. + let res = coll.update({x: 1}, {$set: {y: 2}}, {hint: {s: 1}, multi: true}); + assert.eq(res.nMatched, 2); + + // Insert document that will not be in the sparse index. Update hinting sparse index should + // result in upsert. + assert.commandWorked(coll.insert({x: 2})); + res = coll.updateOne({x: 2}, {$set: {y: 2}}, {hint: {s: 1}, upsert: true}); + assert(res.upsertedId); + res = coll.updateMany({x: 1}, {$set: {y: 2}}, {hint: {s: 1}}); + assert.eq(res.matchedCount, 2); + + // Test bulk writes. + let bulk = coll.initializeUnorderedBulkOp(); + bulk.find({x: 1}).hint({s: 1}).update({$set: {y: 1}}); + res = bulk.execute(); + assert.eq(res.nMatched, 2); + bulk = coll.initializeUnorderedBulkOp(); + bulk.find({x: 2}).hint({s: 1}).upsert().updateOne({$set: {y: 1}}); + res = bulk.execute(); + assert.eq(res.nUpserted, 1); + bulk = coll.initializeUnorderedBulkOp(); + bulk.find({x: 2}).hint({s: 1}).upsert().replaceOne({$set: {y: 1}}); + res = bulk.execute(); + assert.eq(res.nUpserted, 1); + + res = coll.bulkWrite([{ + updateOne: { + filter: {x: 2}, + update: {$set: {y: 2}}, + hint: {s: 1}, + upsert: true, + } + }]); + assert.eq(res.upsertedCount, 1); + + res = coll.bulkWrite([{ + updateMany: { + filter: {x: 1}, + update: {$set: {y: 2}}, + hint: {s: 1}, + } + }]); + assert.eq(res.matchedCount, 2); + + res = coll.bulkWrite([{ + replaceOne: { + filter: {x: 2}, + replacement: {x: 2, y: 3}, + hint: {s: 1}, + upsert: true, + } + }]); + assert.eq(res.upsertedCount, 1); +} + +function failedHintTest() { + coll.drop(); + assert.commandWorked(coll.insert({x: 1})); + assert.commandWorked(coll.createIndex({x: 1})); + + // Command should fail with incorrectly formatted hints. + let updateCmd = {update: coll.getName(), updates: [{q: {_id: 1}, u: {$set: {y: 1}}, hint: 1}]}; + assert.commandFailedWithCode(coll.runCommand(updateCmd), ErrorCodes.FailedToParse); + updateCmd = {update: coll.getName(), updates: [{q: {_id: 1}, u: {$set: {y: 1}}, hint: true}]}; + assert.commandFailedWithCode(coll.runCommand(updateCmd), ErrorCodes.FailedToParse); + + // Command should fail with hints to non-existent indexes. + updateCmd = { + update: coll.getName(), + updates: [{q: {_id: 1}, u: {$set: {y: 1}}, hint: {badHint: 1}}] + }; + assert.commandFailedWithCode(coll.runCommand(updateCmd), ErrorCodes.BadValue); +} + +normalIndexTest(); +sparseIndexTest(); +shellHelpersTest(); +failedHintTest(); +})(); diff --git a/jstests/core/write/update/update_invalid1.js b/jstests/core/write/update/update_invalid1.js new file mode 100644 index 00000000000..bbda4cee53e --- /dev/null +++ b/jstests/core/write/update/update_invalid1.js @@ -0,0 +1,13 @@ +// @tags: [ +// requires_fastcount, +// ] + +t = db.update_invalid1; +t.drop(); + +t.update({_id: 5}, {$set: {$inc: {x: 5}}}, true); + +// From version 5.0 on field names with dots and dollars are enabled and only top-level $-prefixed +// fields are validated. The field '$inc' appears at a lower level than the operator $set, so it is +// accepted by the update validation. +assert.eq(1, t.count(), "A1"); diff --git a/jstests/core/write/update/update_metrics.js b/jstests/core/write/update/update_metrics.js new file mode 100644 index 00000000000..8bf93e5009d --- /dev/null +++ b/jstests/core/write/update/update_metrics.js @@ -0,0 +1,79 @@ +/** + * Tests "metrics.commands.update.pipeline" and "metrics.commands.update.arrayFilters" counters of + * the update command. + * + * @tags: [ + * # The test is designed to work with an unsharded collection. + * assumes_unsharded_collection, + * # The test relies on the precise number of executions of commands. + * requires_non_retryable_writes, + * # This test contains assertions on the number of executed operations, and tenant migrations + * # passthrough suites automatically retry operations on TenantMigrationAborted errors. + * tenant_migration_incompatible, + * ] + */ +(function() { +"use strict"; + +const testDB = db.getSiblingDB(jsTestName()); +assert.commandWorked(testDB.dropDatabase()); +const coll = testDB.update_metrics; +assert.commandWorked(testDB.createCollection(coll.getName())); + +assert.commandWorked(coll.insert([{key: 1, value: 1, array: [5, 10]}])); + +// "Initialize" the counters for the update command. +assert.commandWorked(coll.update({key: 1}, {$set: {value: 0}})); + +let serverStatusBeforeTest = testDB.serverStatus(); + +// Verify that the metrics.commands.update.pipeline counter is present. +assert.gte( + serverStatusBeforeTest.metrics.commands.update.pipeline, 0, tojson(serverStatusBeforeTest)); + +// Verify that that update command without aggregation pipeline-style update does not increment the +// counter. +assert.commandWorked(coll.update({key: 1}, {$set: {value: 5}})); +let serverStatusAfterTest = testDB.serverStatus(); +assert.eq(serverStatusBeforeTest.metrics.commands.update.pipeline, + serverStatusAfterTest.metrics.commands.update.pipeline, + `Before: ${tojson(serverStatusBeforeTest)}, after: ${tojson(serverStatusAfterTest)}`); + +// Verify that that update command with aggregation pipeline-style update increments the counter. +assert.commandWorked(coll.update({key: 1}, [{$set: {value: 10}}])); +serverStatusAfterTest = testDB.serverStatus(); +assert.eq(serverStatusBeforeTest.metrics.commands.update.pipeline + 1, + serverStatusAfterTest.metrics.commands.update.pipeline, + `Before: ${tojson(serverStatusBeforeTest)}, after: ${tojson(serverStatusAfterTest)}`); + +serverStatusBeforeTest = testDB.serverStatus(); + +// Verify that the metrics.commands.update.arrayFilters counter is present. +assert.gte( + serverStatusBeforeTest.metrics.commands.update.arrayFilters, 0, tojson(serverStatusBeforeTest)); + +// Verify that that update command without arrayFilters does not increment the counter. +assert.commandWorked(coll.update({key: 1}, {$set: {value: 5}})); +serverStatusAfterTest = testDB.serverStatus(); +assert.eq(serverStatusBeforeTest.metrics.commands.update.arrayFilters, + serverStatusAfterTest.metrics.commands.update.arrayFilters, + `Before: ${tojson(serverStatusBeforeTest)}, after: ${tojson(serverStatusAfterTest)}`); + +// Verify that that update command with arrayFilters increments the counter. +assert.commandWorked(coll.update( + {key: 1}, {$set: {"array.$[element]": 20}}, {arrayFilters: [{"element": {$gt: 6}}]})); +serverStatusAfterTest = testDB.serverStatus(); +assert.eq(serverStatusBeforeTest.metrics.commands.update.arrayFilters + 1, + serverStatusAfterTest.metrics.commands.update.arrayFilters, + `Before: ${tojson(serverStatusBeforeTest)}, after: ${tojson(serverStatusAfterTest)}`); + +// Verify that that a multi-document update command with arrayFilters increments the counter. +assert.commandWorked( + coll.insert([{key: 2, value: 1, array: [7, 0]}, {key: 3, value: 1, array: [7, 0]}])); +assert.commandWorked(coll.update( + {}, {$set: {"array.$[element]": 20}}, {multi: true, arrayFilters: [{"element": {$gt: 6}}]})); +serverStatusAfterTest = testDB.serverStatus(); +assert.eq(serverStatusBeforeTest.metrics.commands.update.arrayFilters + 2, + serverStatusAfterTest.metrics.commands.update.arrayFilters, + `Before: ${tojson(serverStatusBeforeTest)}, after: ${tojson(serverStatusAfterTest)}`); +})(); diff --git a/jstests/core/write/update/update_min_max_examples.js b/jstests/core/write/update/update_min_max_examples.js new file mode 100644 index 00000000000..bc5a7302e79 --- /dev/null +++ b/jstests/core/write/update/update_min_max_examples.js @@ -0,0 +1,74 @@ +// Basic examples for $min/$max +(function() { +"use strict"; + +let res; +const coll = db.update_min_max; +coll.drop(); + +// $min for number +coll.insert({_id: 1, a: 2}); +res = coll.update({_id: 1}, {$min: {a: 1}}); +assert.commandWorked(res); +assert.eq(coll.findOne({_id: 1}).a, 1); + +// $max for number +coll.insert({_id: 2, a: 2}); +res = coll.update({_id: 2}, {$max: {a: 1}}); +assert.commandWorked(res); +assert.eq(coll.findOne({_id: 2}).a, 2); + +// $min for Date +let date = new Date(2021, 19, 11, 12, 29, 31, 757); +coll.insert({_id: 3, a: date}); +// setMilliseconds() will roll over to change seconds if necessary. +date.setMilliseconds(date.getMilliseconds() + 2); +// Test that we have advanced the date and it's no longer the same as the one we inserted. +assert.eq(null, coll.findOne({_id: 3, a: date})); +const origDoc = coll.findOne({_id: 3}); +assert.commandWorked(coll.update({_id: 3}, {$min: {a: date}})); +assert.eq(coll.findOne({_id: 3}).a, origDoc.a); + +// $max for Date +coll.insert({_id: 4, a: date}); +// setMilliseconds() will roll over to change seconds if necessary. +date.setMilliseconds(date.getMilliseconds() + 2); +// Test that we have advanced the date and it's no longer the same as the one we inserted. +assert.eq(null, coll.findOne({_id: 4, a: date})); +res = coll.update({_id: 4}, {$max: {a: date}}); +assert.commandWorked(res); +assert.eq(coll.findOne({_id: 4}).a, date); + +// $max for small number +coll.insert({_id: 5, a: 1e-15}); +// Slightly bigger than 1e-15. +const biggerval = 0.000000000000001000000000000001; +res = coll.update({_id: 5}, {$max: {a: biggerval}}); +assert.commandWorked(res); +assert.eq(coll.findOne({_id: 5}).a, biggerval); + +// $min for a small number +coll.insert({_id: 6, a: biggerval}); +res = coll.update({_id: 6}, {$min: {a: 1e-15}}); +assert.commandWorked(res); +assert.eq(coll.findOne({_id: 6}).a, 1e-15); + +// $max with positional operator +let insertdoc = {_id: 7, y: [{a: 2}, {a: 6}, {a: [9, 1, 1]}]}; +coll.insert(insertdoc); +res = coll.update({_id: 7, "y.a": 6}, {$max: {"y.$.a": 7}}); +assert.commandWorked(res); +insertdoc.y[1].a = 7; +assert.docEq(insertdoc, coll.findOne({_id: 7})); + +// $min with positional operator +insertdoc = { + _id: 8, + y: [{a: 2}, {a: 6}, {a: [9, 1, 1]}] +}; +coll.insert(insertdoc); +res = coll.update({_id: 8, "y.a": 6}, {$min: {"y.$.a": 5}}); +assert.commandWorked(res); +insertdoc.y[1].a = 5; +assert.docEq(insertdoc, coll.findOne({_id: 8})); +}()); diff --git a/jstests/core/write/update/update_modifier_pop.js b/jstests/core/write/update/update_modifier_pop.js new file mode 100644 index 00000000000..e5a61c8ea8f --- /dev/null +++ b/jstests/core/write/update/update_modifier_pop.js @@ -0,0 +1,98 @@ +// @tags: [requires_non_retryable_writes] + +(function() { +"use strict"; + +let coll = db.update_modifier_pop; +coll.drop(); + +assert.commandWorked(coll.insert({_id: 0})); + +// $pop with value of 0 fails to parse. +assert.writeErrorWithCode(coll.update({_id: 0}, {$pop: {"a.b": 0}}), ErrorCodes.FailedToParse); + +// $pop with value of -2 fails to parse. +assert.writeErrorWithCode(coll.update({_id: 0}, {$pop: {"a.b": -2}}), ErrorCodes.FailedToParse); + +// $pop with value of 2.5 fails to parse. +assert.writeErrorWithCode(coll.update({_id: 0}, {$pop: {"a.b": 2.5}}), ErrorCodes.FailedToParse); + +// $pop with value of 1.1 fails to parse. +assert.writeErrorWithCode(coll.update({_id: 0}, {$pop: {"a.b": 1.1}}), ErrorCodes.FailedToParse); + +// $pop with a nested object fails to parse. +assert.writeErrorWithCode(coll.update({_id: 0}, {$pop: {a: {b: 1}}}), ErrorCodes.FailedToParse); + +// $pop is a no-op when the path does not exist. +let writeRes = assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.b": 1}})); +assert.eq(writeRes.nMatched, 1); +assert.eq(writeRes.nModified, 0); + +// $pop is a no-op when the path partially exists. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: {c: 1}})); +writeRes = assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.b": 1}})); +assert.eq(writeRes.nMatched, 1); +assert.eq(writeRes.nModified, 0); + +// $pop fails when the path is blocked by a scalar element. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: {b: 1}})); +assert.writeError(coll.update({_id: 0}, {$pop: {"a.b.c": 1}})); + +// $pop fails when the path is blocked by an array element. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: {b: [1, 2]}})); +assert.writeError(coll.update({_id: 0}, {$pop: {"a.b.c": 1}})); + +// $pop fails when the path exists but is not an array. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: {b: {c: 1}}})); +assert.writeError(coll.update({_id: 0}, {$pop: {"a.b": 1}})); + +// $pop is a no-op when the path contains an empty array. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: {b: []}})); +writeRes = assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.b": 1}})); +assert.eq(writeRes.nMatched, 1); +assert.eq(writeRes.nModified, 0); + +// Successfully pop from the end of an array. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: {b: [1, 2, 3]}})); +writeRes = assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.b": 1}})); +assert.eq(writeRes.nMatched, 1); +assert.eq(writeRes.nModified, 1); +assert.eq({_id: 0, a: {b: [1, 2]}}, coll.findOne()); + +// Successfully pop from the beginning of an array. +writeRes = assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.b": -1}})); +assert.eq(writeRes.nMatched, 1); +assert.eq(writeRes.nModified, 1); +assert.eq({_id: 0, a: {b: [2]}}, coll.findOne()); + +// $pop with the positional ($) operator. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [{b: [1, 2, 3]}, {b: [4, 5, 6]}]})); +assert.commandWorked(coll.update({_id: 0, "a.b": 5}, {$pop: {"a.$.b": 1}})); +assert.eq({_id: 0, a: [{b: [1, 2, 3]}, {b: [4, 5]}]}, coll.findOne()); + +// $pop with arrayFilters. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [{b: [1, 2]}, {b: [4, 5]}, {b: [2, 3]}]})); +assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.$[i].b": -1}}, {arrayFilters: [{"i.b": 2}]})); +assert.eq({_id: 0, a: [{b: [2]}, {b: [4, 5]}, {b: [3]}]}, coll.findOne()); + +// $pop from a nested array. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [1, [2, 3, 4]]})); +assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.1": 1}})); +assert.eq({_id: 0, a: [1, [2, 3]]}, coll.findOne()); + +// $pop is a no-op when array element in path does not exist. +assert.commandWorked(coll.remove({})); +assert.commandWorked(coll.insert({_id: 0, a: [{b: 0}, {b: 1}]})); +writeRes = assert.commandWorked(coll.update({_id: 0}, {$pop: {"a.2.b": 1}})); +assert.eq(writeRes.nMatched, 1); +assert.eq(writeRes.nModified, 0); +}()); diff --git a/jstests/core/write/update/update_mul_examples.js b/jstests/core/write/update/update_mul_examples.js new file mode 100644 index 00000000000..89a0ac70532 --- /dev/null +++ b/jstests/core/write/update/update_mul_examples.js @@ -0,0 +1,44 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_non_retryable_writes] + +// Basic examples for $mul (multiply) +var res; +var coll = db.update_mul; +coll.drop(); + +// $mul positive +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$mul: {a: 10}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, 20); + +// $mul negative +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$mul: {a: -10}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, -20); + +// $mul zero +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$mul: {a: 0}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, 0); + +// $mul decimal +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$mul: {a: 1.1}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, 2.2); + +// $mul negative decimal +coll.remove({}); +coll.save({_id: 1, a: 2}); +res = coll.update({}, {$mul: {a: -0.1}}); +assert.commandWorked(res); +assert.eq(coll.findOne().a, -0.2); diff --git a/jstests/core/write/update/update_multi3.js b/jstests/core/write/update/update_multi3.js new file mode 100644 index 00000000000..4c6769bc65a --- /dev/null +++ b/jstests/core/write/update/update_multi3.js @@ -0,0 +1,23 @@ +// @tags: [requires_multi_updates, requires_non_retryable_writes] + +t = db.update_multi3; + +function test(useIndex) { + t.drop(); + + if (useIndex) + t.createIndex({k: 1}); + + for (i = 0; i < 10; i++) { + t.save({_id: i, k: 'x', a: []}); + } + + t.update({k: 'x'}, {$push: {a: 'y'}}, false, true); + + t.find({k: "x"}).forEach(function(z) { + assert.eq(["y"], z.a, "useIndex: " + useIndex); + }); +} + +test(false); +test(true); diff --git a/jstests/core/write/update/update_multi4.js b/jstests/core/write/update/update_multi4.js new file mode 100644 index 00000000000..cfe11616efc --- /dev/null +++ b/jstests/core/write/update/update_multi4.js @@ -0,0 +1,17 @@ +// @tags: [requires_multi_updates, requires_non_retryable_writes] + +t = db.update_mulit4; +t.drop(); + +for (i = 0; i < 1000; i++) { + t.insert({_id: i, k: i % 12, v: "v" + i % 12}); +} + +t.createIndex({k: 1}); + +assert.eq(84, t.count({k: 2, v: "v2"}), "A0"); + +t.update({k: 2}, {$set: {v: "two v2"}}, false, true); + +assert.eq(0, t.count({k: 2, v: "v2"}), "A1"); +assert.eq(84, t.count({k: 2, v: "two v2"}), "A2"); diff --git a/jstests/core/write/update/update_multi5.js b/jstests/core/write/update/update_multi5.js new file mode 100644 index 00000000000..683b4426874 --- /dev/null +++ b/jstests/core/write/update/update_multi5.js @@ -0,0 +1,20 @@ +// @tags: [assumes_balancer_off, requires_multi_updates, requires_non_retryable_writes] + +// tests that $addToSet works in a multi-update. +(function() { +"use strict"; +var t = db.update_multi5; +t.drop(); + +assert.commandWorked(t.insert({path: 'r1', subscribers: [1, 2]})); +assert.commandWorked(t.insert({path: 'r2', subscribers: [3, 4]})); + +var res = + assert.commandWorked(t.update({}, {$addToSet: {subscribers: 5}}, {upsert: false, multi: true})); + +assert.eq(res.nMatched, 2, tojson(res)); + +t.find().forEach(function(z) { + assert.eq(3, z.subscribers.length, tojson(z)); +}); +})(); diff --git a/jstests/core/write/update/update_multi6.js b/jstests/core/write/update/update_multi6.js new file mode 100644 index 00000000000..023398534cd --- /dev/null +++ b/jstests/core/write/update/update_multi6.js @@ -0,0 +1,12 @@ +// @tags: [requires_non_retryable_writes] + +var res; + +t = db.update_multi6; +t.drop(); + +t.update({_id: 1}, {_id: 1, x: 1, y: 2}, true, false); +assert(t.findOne({_id: 1}), "A"); + +res = t.update({_id: 2}, {_id: 2, x: 1, y: 2}, true, true); +assert.writeError(res); diff --git a/jstests/core/write/update/update_numeric_field_name.js b/jstests/core/write/update/update_numeric_field_name.js new file mode 100644 index 00000000000..fbc89da7fda --- /dev/null +++ b/jstests/core/write/update/update_numeric_field_name.js @@ -0,0 +1,32 @@ +// Test that update operations correctly fail if they violate the "ambiguous field name in array" +// constraint for indexes. This is designed to reproduce SERVER-37058. +// @tags: [ +// uses_full_validation, +// ] +(function() { +"use strict"; + +const coll = db.update_numeric_field_name; +coll.drop(); + +assert.commandWorked(coll.insert({_id: 0, 'a': [{}]})); +assert.commandWorked(coll.createIndex({'a.0.c': 1})); + +// Attempt to insert a field name '0'. The first '0' refers to the first element of the array +// 'a'. +assert.commandFailedWithCode(coll.update({_id: 0}, {$set: {'a.0.0': 1}}), 16746); + +// Verify that the indexes were not affected. +let res = assert.commandWorked(coll.validate({full: true})); +assert(res.valid, tojson(res)); + +assert.commandFailedWithCode(coll.update({_id: 0}, {$set: {'a.0.0.b': 1}}), 16746); +res = assert.commandWorked(coll.validate({full: true})); +assert(res.valid, tojson(res)); + +// An update which does not violate the ambiguous field name in array constraint should succeed. +assert.commandWorked(coll.update({_id: 0}, {$set: {'a.1.b.0.0': 1}})); + +res = assert.commandWorked(coll.validate({full: true})); +assert(res.valid, tojson(res)); +})(); diff --git a/jstests/core/write/update/update_pipeline_shell_helpers.js b/jstests/core/write/update/update_pipeline_shell_helpers.js new file mode 100644 index 00000000000..0ac1b35f4ce --- /dev/null +++ b/jstests/core/write/update/update_pipeline_shell_helpers.js @@ -0,0 +1,133 @@ +/** + * Tests that each of the update shell helpers correctly validates pipeline-style update operations. + * + * @tags: [ + * assumes_write_concern_unchanged, + * requires_multi_updates, + * requires_non_retryable_writes, + * ] + */ +(function() { +"use strict"; + +load("jstests/aggregation/extras/utils.js"); // For 'arrayEq'. +load("jstests/libs/analyze_plan.js"); // For planHasStage(). +load("jstests/libs/fixture_helpers.js"); // For isMongos(). + +// Make sure that the test collection is empty before starting the test. +const testColl = db.update_pipeline_shell_helpers_test; +testColl.drop(); + +// Insert some test documents. +assert.commandWorked(testColl.insert({_id: 1, a: 1, b: 2})); +assert.commandWorked(testColl.insert({_id: 2, a: 2, b: 3})); + +// Test that each of the update shell helpers permits pipeline-style updates. +assert.commandWorked(testColl.update({_id: 1}, [{$set: {update: true}}])); +assert.commandWorked(testColl.update({}, [{$set: {updateMulti: true}}], {multi: true})); +assert.commandWorked(testColl.updateOne({_id: 1}, [{$set: {updateOne: true}}])); +assert.commandWorked(testColl.updateMany({}, [{$set: {updateMany: true}}])); +assert.commandWorked(testColl.bulkWrite([ + {updateOne: {filter: {_id: 1}, update: [{$set: {bulkWriteUpdateOne: true}}]}}, + {updateMany: {filter: {}, update: [{$set: {bulkWriteUpdateMany: true}}]}} +])); + +// Test that each of the Bulk API update functions correctly handle pipeline syntax. +const unorderedBulkOp = testColl.initializeUnorderedBulkOp(); +const orderedBulkOp = testColl.initializeOrderedBulkOp(); + +unorderedBulkOp.find({_id: 1}).updateOne([{$set: {unorderedBulkOpUpdateOne: true}}]); +unorderedBulkOp.find({}).update([{$set: {unorderedBulkOpUpdateMulti: true}}]); +orderedBulkOp.find({_id: 1}).updateOne([{$set: {orderedBulkOpUpdateOne: true}}]); +orderedBulkOp.find({}).update([{$set: {orderedBulkOpUpdateMulti: true}}]); +assert.commandWorked(unorderedBulkOp.execute()); +assert.commandWorked(orderedBulkOp.execute()); + +// Verify that the results of the various update operations are as expected. +const observedResults = testColl.find().toArray(); +const expectedResults = [ + { + _id: 1, + a: 1, + b: 2, + update: true, + updateMulti: true, + updateOne: true, + updateMany: true, + bulkWriteUpdateOne: true, + bulkWriteUpdateMany: true, + unorderedBulkOpUpdateOne: true, + unorderedBulkOpUpdateMulti: true, + orderedBulkOpUpdateOne: true, + orderedBulkOpUpdateMulti: true + }, + { + _id: 2, + a: 2, + b: 3, + updateMulti: true, + updateMany: true, + bulkWriteUpdateMany: true, + unorderedBulkOpUpdateMulti: true, + orderedBulkOpUpdateMulti: true + } +]; +assert(arrayEq(observedResults, expectedResults)); + +// Test that findAndModify and associated helpers correctly handle pipeline syntax. +const expectedFindAndModifyPostImage = Object.merge(expectedResults[0], {findAndModify: true}); +const expectedFindOneAndUpdatePostImage = + Object.merge(expectedFindAndModifyPostImage, {findOneAndUpdate: true}); +const findAndModifyPostImage = + testColl.findAndModify({query: {_id: 1}, update: [{$set: {findAndModify: true}}], new: true}); +assert.docEq(expectedFindAndModifyPostImage, findAndModifyPostImage); +const findOneAndUpdatePostImage = testColl.findOneAndUpdate( + {_id: 1}, [{$set: {findOneAndUpdate: true}}], {returnNewDocument: true}); +assert.docEq(expectedFindOneAndUpdatePostImage, findOneAndUpdatePostImage); + +// +// Explain for updates that use an _id lookup query. +// +{ + let explain = testColl.explain("queryPlanner").update({_id: 2}, [{$set: {y: 999}}]); + assert(planHasStage(db, explain.queryPlanner.winningPlan, "IDHACK")); + assert(planHasStage(db, explain.queryPlanner.winningPlan, "UPDATE")); + + // Run explain with execution-level verbosity. + explain = testColl.explain("executionStats").update({_id: 2}, [{$set: {y: 999}}]); + assert.eq(explain.executionStats.totalDocsExamined, 1, explain); + // UPDATE stage would modify one document. + const updateStage = getPlanStage(explain.executionStats.executionStages, "UPDATE"); + assert.eq(updateStage.nWouldModify, 1); + + // Check that no write was performed. + assert.eq(testColl.find({y: 999}).count(), 0); +} + +// +// Explain for updates that use a query which requires a COLLSCAN. +// + +// We skip these tests under sharded fixtures, since sharded passthroughs require that FAM queries +// contain the shard key. +if (!FixtureHelpers.isMongos(db)) { + let explain = testColl.explain("queryPlanner").update({a: 2}, [{$set: {y: 999}}]); + assert(planHasStage(db, explain.queryPlanner.winningPlan, "COLLSCAN")); + assert(planHasStage(db, explain.queryPlanner.winningPlan, "UPDATE")); + + // Run explain with execution-level verbosity. + explain = testColl.explain("executionStats").update({a: 2}, [{$set: {y: 999}}]); + // UPDATE stage would modify one document. + const updateStage = getPlanStage(explain.executionStats.executionStages, "UPDATE"); + assert.eq(updateStage.nWouldModify, 1); + + // Check that no write was performed. + assert.eq(testColl.find({y: 999}).count(), 0); +} + +// Shell helpers for replacement updates should reject pipeline-style updates. +assert.throws(() => testColl.replaceOne({_id: 1}, [{$replaceWith: {}}])); +assert.throws(() => testColl.findOneAndReplace({_id: 1}, [{$replaceWith: {}}])); +assert.throws(() => testColl.bulkWrite( + [{replaceOne: {filter: {_id: 1}, replacement: [{$replaceWith: {}}]}}])); +})(); diff --git a/jstests/core/write/update/update_server-12848.js b/jstests/core/write/update/update_server-12848.js new file mode 100644 index 00000000000..9f14feb09cd --- /dev/null +++ b/jstests/core/write/update/update_server-12848.js @@ -0,0 +1,20 @@ +// In SERVER-12848, it was shown that validation fails for certain types of updates +// because the validate code was attempting to validate field names in array. Mutable +// doesn't offer guarantees about the examined field name of array elements, only of the +// field name of array elements when serialized. This is a regression test to +// check that the new validation logic doesn't attempt to validate field names. + +var res; +var t = db.update_server_12848; +t.drop(); + +var orig = {"_id": 1, "a": [1, []]}; +res = t.insert(orig); +assert.commandWorked(res, "insert"); +assert.eq(orig, t.findOne()); + +res = t.update({"_id": 1}, {$addToSet: {"a.1": 1}}); +assert.commandWorked(res, "update"); + +var updated = {"_id": 1, "a": [1, [1]]}; +assert.eq(updated, t.findOne()); diff --git a/jstests/core/write/update/update_setOnInsert.js b/jstests/core/write/update/update_setOnInsert.js new file mode 100644 index 00000000000..9457c69f325 --- /dev/null +++ b/jstests/core/write/update/update_setOnInsert.js @@ -0,0 +1,33 @@ +// This tests that $setOnInsert works and allow setting the _id +t = db.update_setOnInsert; +var res; + +function dotest(useIndex) { + t.drop(); + if (useIndex) { + t.createIndex({a: 1}); + } + + t.update({_id: 5}, {$inc: {x: 2}, $setOnInsert: {a: 3}}, true); + assert.docEq({_id: 5, a: 3, x: 2}, t.findOne()); + + t.update({_id: 5}, {$set: {a: 4}}, true); + + t.update({_id: 5}, {$inc: {x: 2}, $setOnInsert: {a: 3}}, true); + assert.docEq({_id: 5, a: 4, x: 4}, t.findOne()); +} + +dotest(false); +dotest(true); + +// Cases for SERVER-9958 -- Allow _id $setOnInsert during insert (if upsert:true, and not doc found) +t.drop(); + +res = t.update({_id: 1}, {$setOnInsert: {"_id.a": new Date()}}, true); +assert.writeError(res, "$setOnInsert _id.a worked"); + +res = t.update({"_id.a": 4}, {$setOnInsert: {"_id.b": 1}}, true); +assert.writeError(res, "$setOnInsert _id.a/b worked"); + +res = t.update({"_id.a": 4}, {$setOnInsert: {"_id": {a: 4, b: 1}}}, true); +assert.writeError(res, "$setOnInsert _id.a/a+b worked"); diff --git a/jstests/core/write/update/update_with_large_hint.js b/jstests/core/write/update/update_with_large_hint.js new file mode 100644 index 00000000000..0b2521337ce --- /dev/null +++ b/jstests/core/write/update/update_with_large_hint.js @@ -0,0 +1,36 @@ +// Test that write size estimation in mongos respects 'hint' field. +// @tags: [ +// requires_sharding, +// ] +(function() { +"use strict"; + +const coll = db.update_with_large_hint; +coll.drop(); + +const longHint = "x".repeat(1000); +assert.commandWorked(coll.createIndex({[longHint]: 1})); +assert.commandWorked(coll.insert({_id: 0})); + +assert.commandWorked(coll.runCommand("update", { + updates: [{q: {_id: 0}, u: {$set: {x: 1}}, hint: {[longHint]: 1}}], +})); + +assert.commandWorked(coll.runCommand("delete", { + deletes: [{q: {_id: 0}, limit: 1, hint: {[longHint]: 1}}], +})); + +assert.commandWorked(coll.dropIndexes()); +assert.commandWorked(coll.insert({_id: 0})); + +// Both commands should fail because hint does not correspond to the existing index. +assert.commandFailedWithCode(coll.runCommand("update", { + updates: [{q: {_id: 0}, u: {$set: {x: 1}}, hint: {[longHint]: 1}}], +}), + ErrorCodes.BadValue); + +assert.commandFailedWithCode(coll.runCommand("delete", { + deletes: [{q: {_id: 0}, limit: 1, hint: {[longHint]: 1}}], +}), + ErrorCodes.BadValue); +}());
\ No newline at end of file diff --git a/jstests/core/write/update/update_with_pipeline.js b/jstests/core/write/update/update_with_pipeline.js new file mode 100644 index 00000000000..b5366bfbc5a --- /dev/null +++ b/jstests/core/write/update/update_with_pipeline.js @@ -0,0 +1,296 @@ +/** + * Tests execution of pipeline-style update. + * @tags: [ + * requires_multi_updates, + * requires_non_retryable_writes, + * ] + */ +(function() { +"use strict"; + +load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers. + +const collName = "update_with_pipeline"; +const coll = db[collName]; + +assert.commandWorked(coll.createIndex({x: 1})); +assert.commandWorked(coll.createIndex({"y.$**": 1})); + +/** + * Confirms that an update returns the expected set of documents. 'nModified' documents from + * 'resultDocList' must match. 'nModified' may be smaller then the number of elements in + * 'resultDocList'. This allows for the case where there are multiple documents that could be + * updated, but only one is actually updated due to a 'multi: false' argument. Constant values + * to the update command are passed in the 'constants' argument. + */ +function testUpdate({ + query, + initialDocumentList, + update, + resultDocList, + nModified, + options = {}, + constants = undefined +}) { + assert.eq(initialDocumentList.length, resultDocList.length); + assert.commandWorked(coll.remove({})); + assert.commandWorked(coll.insert(initialDocumentList)); + const upd = Object.assign({q: query, u: update}, options); + if (constants !== undefined) { + upd.c = constants; + } + const res = assert.commandWorked(db.runCommand({update: collName, updates: [upd]})); + assert.eq(nModified, res.nModified); + + let nMatched = 0; + for (let i = 0; i < resultDocList.length; ++i) { + if (0 === bsonWoCompare(coll.findOne(resultDocList[i]), resultDocList[i])) { + ++nMatched; + } + } + assert.eq(nModified, nMatched, `actual=${coll.find().toArray()}, expected=${resultDocList}`); +} + +function testUpsertDoesInsert(query, update, resultDoc) { + assert.commandWorked(coll.remove({})); + assert.commandWorked(coll.update(query, update, {upsert: true})); + assert.eq(coll.findOne({}), resultDoc, coll.find({}).toArray()); +} + +// This can be used to make sure pipeline-based updates generate delta oplog entries. +const largeStr = "x".repeat(1000); + +// Update with existing document. +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1, largeStr: largeStr}], + update: [{$set: {foo: 4}}], + resultDocList: [{_id: 1, x: 1, largeStr: largeStr, foo: 4}], + nModified: 1 +}); +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1, y: 1}], + update: [{$project: {x: 1}}], + resultDocList: [{_id: 1, x: 1}], + nModified: 1 +}); +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1, y: [{z: 1, foo: 1}], largeStr: largeStr}], + update: [{$unset: ["x", "y.z"]}], + resultDocList: [{_id: 1, y: [{foo: 1}], largeStr: largeStr}], + nModified: 1 +}); +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1, t: {u: {v: 1}, largeStr: largeStr}}], + update: [{$replaceWith: "$t"}], + resultDocList: [{_id: 1, u: {v: 1}, largeStr: largeStr}], + nModified: 1 +}); + +// Multi-update. +testUpdate({ + query: {x: 1}, + initialDocumentList: [{_id: 1, x: 1, largeStr: largeStr}, {_id: 2, x: 1, largeStr: largeStr}], + update: [{$set: {bar: 4}}], + resultDocList: + [{_id: 1, x: 1, largeStr: largeStr, bar: 4}, {_id: 2, x: 1, largeStr: largeStr, bar: 4}], + nModified: 2, + options: {multi: true} +}); + +// This test will fail in a sharded cluster when the 2 initial documents live on different +// shards. +if (!FixtureHelpers.isMongos(db)) { + testUpdate({ + query: {_id: {$in: [1, 2]}}, + initialDocumentList: [{_id: 1, x: 1}, {_id: 2, x: 2}], + update: [{$set: {bar: 4}}], + resultDocList: [{_id: 1, x: 1, bar: 4}, {_id: 2, x: 2, bar: 4}], + nModified: 1, + options: {multi: false} + }); +} + +// Upsert performs insert. +testUpsertDoesInsert({_id: 1, x: 1}, [{$set: {foo: 4}}], {_id: 1, x: 1, foo: 4}); +testUpsertDoesInsert({_id: 1, x: 1}, [{$project: {x: 1}}], {_id: 1, x: 1}); +testUpsertDoesInsert({_id: 1, x: 1}, [{$project: {x: "foo"}}], {_id: 1, x: "foo"}); +testUpsertDoesInsert({_id: 1, x: 1, y: 1}, [{$unset: ["x"]}], {_id: 1, y: 1}); + +// Upsert with 'upsertSupplied' inserts the given document and populates _id from the query. +assert.commandWorked(db.runCommand({ + update: coll.getName(), + updates: [{ + q: {_id: "supplied_doc"}, + u: [{$set: {x: 1}}], + upsert: true, + upsertSupplied: true, + c: {new: {suppliedDoc: true}} + }] +})); +assert(coll.findOne({_id: "supplied_doc", suppliedDoc: true})); + +// Update with 'upsertSupplied:true' fails if 'upsert' is false. +assert.commandFailedWithCode(db.runCommand({ + update: coll.getName(), + updates: [{ + q: {_id: "supplied_doc"}, + u: [{$set: {x: 1}}], + upsert: false, + upsertSupplied: true, + c: {new: {suppliedDoc: true}} + }] +}), + ErrorCodes.FailedToParse); + +// Upsert with 'upsertSupplied' fails if no constants are provided. +assert.commandFailedWithCode(db.runCommand({ + update: coll.getName(), + updates: [{q: {_id: "supplied_doc"}, u: [{$set: {x: 1}}], upsert: true, upsertSupplied: true}] +}), + ErrorCodes.FailedToParse); + +// Upsert with 'upsertSupplied' fails if constants do not include a field called 'new'. +assert.commandFailedWithCode(db.runCommand({ + update: coll.getName(), + updates: + [{q: {_id: "supplied_doc"}, u: [{$set: {x: 1}}], upsert: true, upsertSupplied: true, c: {}}] +}), + ErrorCodes.FailedToParse); + +// Upsert with 'upsertSupplied' fails if c.new is not an object. +assert.commandFailedWithCode(db.runCommand({ + update: coll.getName(), + updates: [{ + q: {_id: "supplied_doc"}, + u: [{$set: {x: 1}}], + upsert: true, + upsertSupplied: true, + c: {new: "string"} + }] +}), + ErrorCodes.FailedToParse); + +// Update fails when invalid stage is specified. This is a sanity check rather than an exhaustive +// test of all stages. + +// Prime the collection with a matching document for each of the following test cases in the case of +// an updateOne without shard key since mongos does not validate the specified stage upfront, and no +// matching documents would just return a no-op OK status instead of an error code. +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode(coll.update({x: 1}, [{$match: {x: 1}}]), ErrorCodes.InvalidOptions); + +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode(coll.update({x: 1}, [{$sort: {x: 1}}]), ErrorCodes.InvalidOptions); + +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode(coll.update({x: 1}, [{$facet: {a: [{$match: {x: 1}}]}}]), + ErrorCodes.InvalidOptions); + +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode( + coll.update( + {x: 1}, [{ + $bucket: {groupBy: "$a", boundaries: [0, 1], default: "foo", output: {count: {$sum: 1}}} + }]), + ErrorCodes.InvalidOptions); + +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode( + coll.update({x: 1}, [{$lookup: {from: "foo", as: "as", localField: "a", foreignField: "b"}}]), + ErrorCodes.InvalidOptions); + +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode( + coll.update( + {x: 1}, [{ + $graphLookup: + {from: "foo", startWith: "$a", connectFromField: "a", connectToField: "b", as: "as"} + }]), + ErrorCodes.InvalidOptions); + +// $indexStats is not supported in a transaction passthrough and will fail with a different error. +assert.commandWorked(coll.insert({x: 1, z: 1})); +assert.commandFailedWithCode( + coll.update({x: 1}, [{$indexStats: {}}]), + [ErrorCodes.InvalidOptions, ErrorCodes.OperationNotSupportedInTransaction]); + +// Update fails when supported agg stage is specified outside of pipeline. +assert.commandFailedWithCode(coll.update({_id: 1}, {$addFields: {x: 1}}), ErrorCodes.FailedToParse); + +// The 'arrayFilters' option is not valid for pipeline updates. +assert.commandFailedWithCode( + coll.update({_id: 1}, [{$set: {x: 1}}], {arrayFilters: [{x: {$eq: 1}}]}), + ErrorCodes.FailedToParse); + +// Constants can be specified with pipeline-style updates. +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1}], + useUpdateCommand: true, + constants: {foo: "bar"}, + update: [{$set: {foo: "$$foo"}}], + resultDocList: [{_id: 1, x: 1, foo: "bar"}], + nModified: 1 +}); +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1}], + useUpdateCommand: true, + constants: {foo: {a: {b: {c: "bar"}}}}, + update: [{$set: {foo: "$$foo"}}], + resultDocList: [{_id: 1, x: 1, foo: {a: {b: {c: "bar"}}}}], + nModified: 1 +}); +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1}], + useUpdateCommand: true, + constants: {foo: [1, 2, 3]}, + update: [{$set: {foo: {$arrayElemAt: ["$$foo", 2]}}}], + resultDocList: [{_id: 1, x: 1, foo: 3}], + nModified: 1 +}); + +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1}], + useUpdateCommand: true, + constants: {largeStr: largeStr}, + update: [{$set: {foo: "$$largeStr"}}], + resultDocList: [{_id: 1, x: 1, foo: largeStr}], + nModified: 1 +}); + +// References to document fields are not resolved in constants. +testUpdate({ + query: {_id: 1}, + initialDocumentList: [{_id: 1, x: 1}], + useUpdateCommand: true, + constants: {foo: "$x"}, + update: [{$set: {foo: "$$foo"}}], + resultDocList: [{_id: 1, x: 1, foo: "$x"}], + nModified: 1 +}); + +// Test that expressions within constants are treated as field names instead of expressions. +db.runCommand({ + update: collName, + updates: [{q: {_id: 1}, u: [{$set: {x: "$$foo"}}], c: {foo: {$add: [1, 2]}}}] +}); +assert.eq([{_id: 1, x: {$add: [1, 2]}, foo: "$x"}], coll.find({_id: 1}).toArray()); + +// Cannot use constants with regular updates. +assert.commandFailedWithCode( + db.runCommand({update: collName, updates: [{q: {_id: 1}, u: {x: "$$foo"}, c: {foo: "bar"}}]}), + 51198); +assert.commandFailedWithCode( + db.runCommand( + {update: collName, updates: [{q: {_id: 1}, u: {$set: {x: "$$foo"}}, c: {foo: "bar"}}]}), + 51198); +assert.commandFailedWithCode( + db.runCommand({update: collName, updates: [{q: {_id: 1}, u: {$set: {x: "1"}}, c: {}}]}), 51198); +})(); diff --git a/jstests/core/write/update/updatea.js b/jstests/core/write/update/updatea.js new file mode 100644 index 00000000000..99938c433fa --- /dev/null +++ b/jstests/core/write/update/updatea.js @@ -0,0 +1,77 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +var res; +t = db.updatea; +t.drop(); + +orig = { + _id: 1, + a: [{x: 1, y: 2}, {x: 10, y: 11}] +}; + +res = t.save(orig); +assert.commandWorked(res); + +// SERVER-181 +res = t.update({}, {$set: {"a.0.x": 3}}); +assert.commandWorked(res); +orig.a[0].x = 3; +assert.eq(orig, t.findOne(), "A1"); + +res = t.update({}, {$set: {"a.1.z": 17}}); +assert.commandWorked(res); +orig.a[1].z = 17; +assert.eq(orig, t.findOne(), "A2"); + +// SERVER-273 +res = t.update({}, {$unset: {"a.1.y": 1}}); +assert.commandWorked(res); +delete orig.a[1].y; +assert.eq(orig, t.findOne(), "A3"); + +// SERVER-333 +t.drop(); +orig = { + _id: 1, + comments: [{name: "blah", rate_up: 0, rate_ups: []}] +}; +res = t.save(orig); +assert.commandWorked(res); + +res = t.update({}, {$inc: {"comments.0.rate_up": 1}, $push: {"comments.0.rate_ups": 99}}); +assert.commandWorked(res); +orig.comments[0].rate_up++; +orig.comments[0].rate_ups.push(99); +assert.eq(orig, t.findOne(), "B1"); + +t.drop(); +orig = { + _id: 1, + a: [] +}; +for (i = 0; i < 12; i++) + orig.a.push(i); + +res = t.save(orig); +assert.commandWorked(res); +assert.eq(orig, t.findOne(), "C1"); + +res = t.update({}, {$inc: {"a.0": 1}}); +assert.commandWorked(res); +orig.a[0]++; +assert.eq(orig, t.findOne(), "C2"); + +res = t.update({}, {$inc: {"a.10": 1}}); +assert.commandWorked(res); +orig.a[10]++; + +// SERVER-3218 +t.drop(); +t.insert({"a": {"c00": 1}, 'c': 2}); +res = t.update({"c": 2}, {'$inc': {'a.c000': 1}}); +assert.commandWorked(res); + +assert.eq({"c00": 1, "c000": 1}, t.findOne().a, "D1"); diff --git a/jstests/core/write/update/updateb.js b/jstests/core/write/update/updateb.js new file mode 100644 index 00000000000..1518e7f3546 --- /dev/null +++ b/jstests/core/write/update/updateb.js @@ -0,0 +1,17 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +t = db.updateb; +t.drop(); + +t.update({"x.y": 2}, {$inc: {a: 7}}, true); + +correct = { + a: 7, + x: {y: 2} +}; +got = t.findOne(); +delete got._id; +assert.docEq(correct, got, "A"); diff --git a/jstests/core/write/update/updatec.js b/jstests/core/write/update/updatec.js new file mode 100644 index 00000000000..8ce8cf4ecdd --- /dev/null +++ b/jstests/core/write/update/updatec.js @@ -0,0 +1,8 @@ + +t = db.updatec; +t.drop(); + +t.update({"_id": 123}, {$set: {"v": {"i": 123, "a": 456}}, $push: {"f": 234}}, 1, 0); +t.update({"_id": 123}, {$set: {"v": {"i": 123, "a": 456}}, $push: {"f": 234}}, 1, 0); + +assert.docEq({"_id": 123, "f": [234, 234], "v": {"i": 123, "a": 456}}, t.findOne()); diff --git a/jstests/core/write/update/updated.js b/jstests/core/write/update/updated.js new file mode 100644 index 00000000000..919d02610c7 --- /dev/null +++ b/jstests/core/write/update/updated.js @@ -0,0 +1,30 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +t = db.updated; +t.drop(); + +o = { + _id: Math.random(), + items: [null, null, null, null] +}; + +t.insert(o); +assert.docEq(o, t.findOne(), "A1"); + +o.items[0] = { + amount: 9000, + itemId: 1 +}; +t.update({}, {$set: {"items.0": o.items[0]}}); +assert.docEq(o, t.findOne(), "A2"); + +o.items[0].amount += 1000; +o.items[1] = { + amount: 1, + itemId: 2 +}; +t.update({}, {$inc: {"items.0.amount": 1000}, $set: {"items.1": o.items[1]}}); +assert.docEq(o, t.findOne(), "A3"); diff --git a/jstests/core/write/update/updatee.js b/jstests/core/write/update/updatee.js new file mode 100644 index 00000000000..fbbcac01c9a --- /dev/null +++ b/jstests/core/write/update/updatee.js @@ -0,0 +1,77 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +// big numeric updates (used to overflow) + +t = db.updatee; +t.drop(); + +var o = { + "_id": 1, + "actual": { + "key1": "val1", + "key2": "val2", + "001": "val3", + "002": "val4", + "0020000000000000000000": "val5" + }, + "profile-id": "test" +}; + +t.insert(o); +assert.eq(o, t.findOne(), "A1"); + +t.update({"profile-id": "test"}, {$set: {"actual.0030000000000000000000": "val6"}}); + +var q = t.findOne(); + +// server-1347 +assert.eq(q.actual["0020000000000000000000"], "val5", "A2"); +assert.eq(q.actual["0030000000000000000000"], "val6", "A3"); + +t.update({"profile-id": "test"}, {$set: {"actual.02": "v4"}}); + +q = t.findOne(); +assert.eq(q.actual["02"], "v4", "A4"); +assert.eq(q.actual["002"], "val4", "A5"); + +t.update({"_id": 1}, {$set: {"actual.2139043290148390248219423941.b": 4}}); +q = t.findOne(); +assert.eq(q.actual["2139043290148390248219423941"].b, 4, "A6"); + +// non-nested +t.update({"_id": 1}, {$set: {"7213647182934612837492342341": 1}}); +t.update({"_id": 1}, {$set: {"7213647182934612837492342342": 2}}); + +q = t.findOne(); +assert.eq(q["7213647182934612837492342341"], 1, "A7 1"); +assert.eq(q["7213647182934612837492342342"], 2, "A7 2"); + +// 0s +t.update({"_id": 1}, {$set: {"actual.000": "val000"}}); +q = t.findOne(); +assert.eq(q.actual["000"], "val000", "A8 zeros"); + +t.update({"_id": 1}, {$set: {"actual.00": "val00"}}); +q = t.findOne(); +assert.eq(q.actual["00"], "val00", "A8 00"); +assert.eq(q.actual["000"], "val000", "A9"); + +t.update({"_id": 1}, {$set: {"actual.000": "val000"}}); +q = t.findOne(); +assert.eq(q.actual["000"], "val000", "A9"); +assert.eq(q.actual["00"], "val00", "A10"); + +t.update({"_id": 1}, {$set: {"actual.01": "val01"}}); +q = t.findOne(); +assert.eq(q.actual["000"], "val000", "A11"); +assert.eq(q.actual["01"], "val01", "A12"); + +// shouldn't work, but shouldn't do anything too heinous, either +t.update({"_id": 1}, {$set: {"0..": "val01"}}); +t.update({"_id": 1}, {$set: {"0..0": "val01"}}); +t.update({"_id": 1}, {$set: {".0": "val01"}}); +t.update({"_id": 1}, {$set: {"..0": "val01"}}); +t.update({"_id": 1}, {$set: {"0.0..0": "val01"}}); diff --git a/jstests/core/write/update/updatef.js b/jstests/core/write/update/updatef.js new file mode 100644 index 00000000000..6597484f78a --- /dev/null +++ b/jstests/core/write/update/updatef.js @@ -0,0 +1,31 @@ +// @tags: [ +// requires_non_retryable_commands, +// requires_non_retryable_writes, +// uses_multiple_connections, +// uses_parallel_shell, +// ] + +// Test unsafe management of nsdt on update command yield SERVER-3208 + +prefixNS = db.jstests_updatef; +prefixNS.save({}); + +t = db.jstests_updatef_actual; +t.drop(); + +t.save({a: 0, b: []}); +for (i = 0; i < 1000; ++i) { + t.save({a: 100}); +} +t.save({a: 0, b: []}); + +// Repeatedly rename jstests_updatef to jstests_updatef_ and back. This will +// invalidate the jstests_updatef_actual NamespaceDetailsTransient object. +s = startParallelShell( + "for( i=0; i < 100; ++i ) { db.jstests_updatef.renameCollection( 'jstests_updatef_' ); db.jstests_updatef_.renameCollection( 'jstests_updatef' ); }"); + +for (i = 0; i < 20; ++i) { + t.update({a: 0}, {$push: {b: i}}, false, true); +} + +s(); diff --git a/jstests/core/write/update/updateg.js b/jstests/core/write/update/updateg.js new file mode 100644 index 00000000000..8a849a0ce59 --- /dev/null +++ b/jstests/core/write/update/updateg.js @@ -0,0 +1,22 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +// SERVER-3370 check modifiers with field name characters comparing less than '.' character. + +t = db.jstests_updateg; + +t.drop(); +t.update({}, {'$inc': {'all.t': 1, 'all-copy.t': 1}}, true); +assert.eq(1, t.count({all: {t: 1}, 'all-copy': {t: 1}})); + +t.drop(); +t.save({'all': {}, 'all-copy': {}}); +t.update({}, {'$inc': {'all.t': 1, 'all-copy.t': 1}}); +assert.eq(1, t.count({all: {t: 1}, 'all-copy': {t: 1}})); + +t.drop(); +t.save({'all11': {}, 'all2': {}}); +t.update({}, {'$inc': {'all11.t': 1, 'all2.t': 1}}); +assert.eq(1, t.count({all11: {t: 1}, 'all2': {t: 1}})); diff --git a/jstests/core/write/update/updateh.js b/jstests/core/write/update/updateh.js new file mode 100644 index 00000000000..72d0d22c616 --- /dev/null +++ b/jstests/core/write/update/updateh.js @@ -0,0 +1,95 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [ +// assumes_unsharded_collection, +// ] + +var res; + +t = db.jstest_updateh; +t.drop(); + +t.insert({x: 1}); + +res = t.update({x: 1}, {$set: {y: 1}}); // ok +assert.commandWorked(res); + +// Disallow $ in field names at the top-level. +res = t.update({x: 1}, {$set: {$z: 1}}); // not ok +assert.writeError(res); + +res = t.update({x: 1}, {$inc: {$z: 1}}); // not ok +assert.writeError(res); + +// Allow $ in nested field names. +res = t.update({x: 1}, {$set: {'a.$b': 1}}); +assert.commandWorked(res); + +res = t.update({x: 1}, {$set: {a: {$z: 1}}}); +assert.commandWorked(res); + +// Second section +t.drop(); + +t.save({_id: 0, n: 0}); + +// Test that '$' cannot be the first character in a top-level field. +// SERVER-7150 +res = t.update({n: 0}, {$set: {$x: 1}}); +assert.writeError(res); + +res = t.update({n: 0}, {$set: {$$$: 1}}); +assert.writeError(res); + +res = t.update({n: 0}, {$set: {"$secret.agent.x": 1}}); +assert.writeError(res); + +// Fields that are not at the top-level are allowed to have $-prefixes. +res = t.update({n: 0}, {$set: {"sneaky.$x": 1}}); +assert.commandWorked(res); + +res = t.update({n: 0}, {$set: {"secret.agent$.$x": 1}}); +assert.commandWorked(res); + +res = t.update({n: 0}, {$set: {"secret.agent$": 1}}); +assert.commandWorked(res); + +t.save({_id: 0, n: 0}); + +// Test that you cannot update database references into top level fields +// Enable after SERVER-14252 fixed: currently validation does not catch DBRef +// fields at the top level for update and will not cause an error here +// res = t.update({ n: 0 }, { $set: {$ref: "1", $id: 1, $db: "1"}}); +// assert.writeError(res); + +// res = t.update({ n: 0 }, { $set: {$ref: "1", $id: 1}}); +// assert.writeError(res); + +// SERVER-11241: Validation used to allow any DBRef field name as a prefix +// thus allowing things like $idXXX +res = t.update({n: 0}, {$set: {$reffoo: 1}}); +assert.writeError(res); + +res = t.update({n: 0}, {$set: {$idbar: 1}}); +assert.writeError(res); + +res = t.update({n: 0}, {$set: {$dbbaz: 1}}); +assert.writeError(res); + +// Test that '$id', '$db', and '$ref' are acceptable field names in +// the correct case ( subdoc) +// SERVER-3231 +res = t.update({n: 0}, {$set: {x: {$ref: '1', $id: 1, $db: '1'}}}); +assert.commandWorked(res); +t.save({_id: 0, n: 0}); + +// Test that '$' can occur elsewhere in a field name. +// SERVER-7557 +res = t.update({n: 0}, {$set: {ke$sha: 1}}); +assert.commandWorked(res); +t.save({_id: 0, n: 0}); + +res = t.update({n: 0}, {$set: {more$$moreproblem$: 1}}); +assert.commandWorked(res); +t.save({_id: 0, n: 0}); diff --git a/jstests/core/write/update/updatei.js b/jstests/core/write/update/updatei.js new file mode 100644 index 00000000000..599c9538a80 --- /dev/null +++ b/jstests/core/write/update/updatei.js @@ -0,0 +1,91 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_multi_updates, requires_non_retryable_writes] + +// Test new (optional) update syntax +// SERVER-4176 +t = db.updatei; + +// Using a multi update + +t.drop(); + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "x"}, {$push: {a: "y"}}, {multi: true}); +t.find({k: "x"}).forEach(function(z) { + assert.eq(["y"], z.a, "multi update using object arg"); +}); + +t.drop(); + +// Using a single update + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "x"}, {$push: {a: "y"}}, {multi: false}); +assert.eq(1, t.find({"a": "y"}).count(), "update using object arg"); + +t.drop(); + +// Using upsert, found + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "x"}, {$push: {a: "y"}}, {upsert: true}); +assert.eq(1, t.find({"k": "x", "a": "y"}).count(), "upsert (found) using object arg"); + +t.drop(); + +// Using upsert + multi, found + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "x"}, {$push: {a: "y"}}, {upsert: true, multi: true}); +t.find({k: "x"}).forEach(function(z) { + assert.eq(["y"], z.a, "multi + upsert (found) using object arg"); +}); + +t.drop(); + +// Using upsert, not found + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "y"}, {$push: {a: "y"}}, {upsert: true}); +assert.eq(1, t.find({"k": "y", "a": "y"}).count(), "upsert (not found) using object arg"); + +t.drop(); + +// Without upsert, found + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "x"}, {$push: {a: "y"}}, {upsert: false}); +assert.eq(1, t.find({"a": "y"}).count(), "no upsert (found) using object arg"); + +t.drop(); + +// Without upsert, not found + +for (i = 0; i < 10; i++) { + t.save({_id: i, k: "x", a: []}); +} + +t.update({k: "y"}, {$push: {a: "y"}}, {upsert: false}); +assert.eq(0, t.find({"a": "y"}).count(), "no upsert (not found) using object arg"); + +t.drop(); diff --git a/jstests/core/write/update/updatej.js b/jstests/core/write/update/updatej.js new file mode 100644 index 00000000000..bab2a32f45f --- /dev/null +++ b/jstests/core/write/update/updatej.js @@ -0,0 +1,16 @@ +// Test that update validation failure terminates the update without modifying subsequent +// documents. SERVER-4779 +// This test uses a multi-update, which is not retryable. The behavior it is testing is also not +// true of sharded clusters, since one shard may continue applying updates while the other +// encounters an error. +// @tags: [requires_multi_updates, requires_non_retryable_writes, assumes_unsharded_collection] + +t = db.jstests_updatej; +t.drop(); + +t.save({a: []}); +t.save({a: 1}); +t.save({a: []}); + +assert.writeError(t.update({}, {$push: {a: 2}}, false, true)); +assert.eq(1, t.count({a: 2})); diff --git a/jstests/core/write/update/updatek.js b/jstests/core/write/update/updatek.js new file mode 100644 index 00000000000..923b4145d1d --- /dev/null +++ b/jstests/core/write/update/updatek.js @@ -0,0 +1,18 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +// Test modifier operations on numerically equivalent string field names. SERVER-4776 + +t = db.jstests_updatek; + +t.drop(); +t.save({_id: 0, '1': {}, '01': {}}); +t.update({}, {$set: {'1.b': 1, '1.c': 2}}); +assert.docEq({"01": {}, "1": {"b": 1, "c": 2}, "_id": 0}, t.findOne()); + +t.drop(); +t.save({_id: 0, '1': {}, '01': {}}); +t.update({}, {$set: {'1.b': 1, '01.c': 2}}); +assert.docEq({"01": {"c": 2}, "1": {"b": 1}, "_id": 0}, t.findOne()); diff --git a/jstests/core/write/update/updatel.js b/jstests/core/write/update/updatel.js new file mode 100644 index 00000000000..a663f306722 --- /dev/null +++ b/jstests/core/write/update/updatel.js @@ -0,0 +1,57 @@ +// @tags: [requires_multi_updates, requires_non_retryable_writes, requires_fastcount] + +// The positional operator allows an update modifier field path to contain a sentinel ('$') path +// part that is replaced with the numeric position of an array element matched by the update's query +// spec. <http://dochub.mongodb.org/core/positionaloperator> + +// If no array element position from a query is available to substitute for the positional operator +// setinel ('$'), the update fails with an error. SERVER-6669 SERVER-4713 + +var res; +t = db.jstests_updatel; +t.drop(); + +// The collection is empty, forcing an upsert. In this case the query has no array position match +// to substiture for the positional operator. SERVER-4713 +res = t.update({}, {$set: {'a.$.b': 1}}, true); +assert(res.hasWriteError(), "An error is reported."); +assert.eq(0, t.count(), "No upsert occurred."); + +// Save a document to the collection so it is no longer empty. +t.save({_id: 0}); + +// Now, with an existing document, trigger an update rather than an upsert. The query has no array +// position match to substiture for the positional operator. SERVER-6669 +res = t.update({}, {$set: {'a.$.b': 1}}); +assert(res.hasWriteError(), "An error is reported."); +assert.eq([{_id: 0}], t.find().toArray(), "No update occurred."); + +// Now, try with an update by _id (without a query array match). +res = t.update({_id: 0}, {$set: {'a.$.b': 1}}); +assert(res.hasWriteError(), "An error is reported."); +assert.eq([{_id: 0}], t.find().toArray(), "No update occurred."); + +// Seed the collection with a document suitable for the following check. +t.remove({}); +t.save({_id: 0, a: [{b: {c: 1}}]}); + +// Now, attempt to apply an update with two nested positional operators. There is a positional +// query match for the first positional operator but not the second. Note that dollar sign +// substitution for multiple positional opertors is not implemented (SERVER-831). +res = t.update({'a.b.c': 1}, {$set: {'a.$.b.$.c': 2}}); +assert(res.hasWriteError(), "An error is reported"); +assert.eq([{_id: 0, a: [{b: {c: 1}}]}], t.find().toArray(), "No update occurred."); + +// SERVER-1155 test an update with the positional operator +// that has a regex in the query field +t.drop(); +t.insert({_id: 1, arr: [{a: "z", b: 1}]}); +res = t.update({"arr.a": /^z$/}, {$set: {"arr.$.b": 2}}, false, true); +assert.commandWorked(res); +assert.eq(t.findOne().arr[0], {a: "z", b: 2}); + +t.drop(); +t.insert({_id: 1, arr: [{a: "z", b: 1}, {a: "abc", b: 2}, {a: "lmn", b: 3}]}); +res = t.update({"arr.a": /l/}, {$inc: {"arr.$.b": 2}}, false, true); +assert.commandWorked(res); +assert.eq(t.findOne().arr[2], {a: "lmn", b: 5}); diff --git a/jstests/core/write/update/updatem.js b/jstests/core/write/update/updatem.js new file mode 100644 index 00000000000..8e4af7e56c7 --- /dev/null +++ b/jstests/core/write/update/updatem.js @@ -0,0 +1,27 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// The test runs commands that are not allowed with security token: godinsert. +// @tags: [ +// not_allowed_with_security_token,assumes_unsharded_collection, requires_non_retryable_commands] + +// Tests that _id will exist in all updated docs. + +t = db.jstests_updatem; +t.drop(); + +// new _id from insert (upsert:true) +t.update({a: 1}, {$inc: {b: 1}}, true); +var doc = t.findOne({a: 1}); +assert(doc["_id"], "missing _id"); + +// new _id from insert (upsert:true) +t.update({a: 1}, {$inc: {b: 1}}, true); +var doc = t.findOne({a: 1}); +assert(doc["_id"], "missing _id"); + +// no _id on existing doc +t.getDB().runCommand({godinsert: t.getName(), obj: {a: 2}}); +t.update({a: 2}, {$inc: {b: 1}}, true); +var doc = t.findOne({a: 2}); +assert(doc["_id"], "missing _id after update"); diff --git a/jstests/core/write/update/upsert_and.js b/jstests/core/write/update/upsert_and.js new file mode 100644 index 00000000000..1e45cbe8dc2 --- /dev/null +++ b/jstests/core/write/update/upsert_and.js @@ -0,0 +1,44 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_non_retryable_writes] + +// tests to ensure fields in $and conditions are created when using the query to do upsert +var res; +coll = db.upsert4; +coll.drop(); + +res = coll.update({_id: 1, $and: [{c: 1}, {d: 1}], a: 12}, {$inc: {y: 1}}, true); +assert.commandWorked(res); +assert.docEq({_id: 1, c: 1, d: 1, a: 12, y: 1}, coll.findOne()); + +coll.remove({}); +res = coll.update({$and: [{c: 1}, {d: 1}]}, {$setOnInsert: {_id: 1}}, true); +assert.commandWorked(res); +assert.docEq({_id: 1, c: 1, d: 1}, coll.findOne()); + +coll.remove({}); +res = coll.update({$and: [{c: 1}, {d: 1}, {$or: [{x: 1}]}]}, {$setOnInsert: {_id: 1}}, true); +assert.commandWorked(res); +assert.docEq({_id: 1, c: 1, d: 1, x: 1}, coll.findOne()); + +coll.remove({}); +res = coll.update({$and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]}, {$setOnInsert: {_id: 1}}, true); +assert.commandWorked(res); +assert.docEq({_id: 1, c: 1, d: 1}, coll.findOne()); + +coll.remove({}); +res = coll.update( + {r: {$gt: 3}, $and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]}, {$setOnInsert: {_id: 1}}, true); +assert.commandWorked(res); +assert.docEq({_id: 1, c: 1, d: 1}, coll.findOne()); + +coll.remove({}); +res = coll.update( + {r: /s/, $and: [{c: 1}, {d: 1}], $or: [{x: 1}, {x: 2}]}, {$setOnInsert: {_id: 1}}, true); +assert.commandWorked(res); +assert.docEq({_id: 1, c: 1, d: 1}, coll.findOne()); + +coll.remove({}); +res = coll.update({c: 2, $and: [{c: 1}, {d: 1}]}, {$setOnInsert: {_id: 1}}, true); +assert.writeError(res); diff --git a/jstests/core/write/update/upsert_fields.js b/jstests/core/write/update/upsert_fields.js new file mode 100644 index 00000000000..310bace4907 --- /dev/null +++ b/jstests/core/write/update/upsert_fields.js @@ -0,0 +1,229 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection] + +// +// Upsert behavior tests for field extraction +// + +var coll = db.upsert_field_gen; +coll.drop(); + +var upsertedResult = function(query, expr) { + coll.drop(); + result = coll.update(query, expr, {upsert: true}); + return result; +}; + +var upsertedField = function(query, expr, fieldName) { + var res = assert.commandWorked(upsertedResult(query, expr)); + var doc = coll.findOne(); + assert.neq(doc, null, "findOne query returned no results! UpdateRes: " + tojson(res)); + return doc[fieldName]; +}; + +var upsertedId = function(query, expr) { + return upsertedField(query, expr, "_id"); +}; + +var upsertedXVal = function(query, expr) { + return upsertedField(query, expr, "x"); +}; + +// +// _id field has special rules +// + +// _id autogenerated +assert.neq(null, upsertedId({}, {})); + +// _id autogenerated with add'l fields +assert.neq(null, upsertedId({}, {a: 1})); +assert.eq(1, upsertedField({}, {a: 1}, "a")); +assert.neq(null, upsertedId({}, {$set: {a: 1}}, "a")); +assert.eq(1, upsertedField({}, {$set: {a: 1}}, "a")); +assert.neq(null, upsertedId({}, {$setOnInsert: {a: 1}}, "a")); +assert.eq(1, upsertedField({}, {$setOnInsert: {a: 1}}, "a")); + +// _id not autogenerated +assert.eq(1, upsertedId({}, {_id: 1})); +assert.eq(1, upsertedId({}, {$set: {_id: 1}})); +assert.eq(1, upsertedId({}, {$setOnInsert: {_id: 1}})); + +// _id type error +assert.writeError(upsertedResult({}, {_id: [1, 2]})); +assert.writeError(upsertedResult({}, {_id: undefined})); +assert.writeError(upsertedResult({}, {$set: {_id: [1, 2]}})); +// Fails in v2.6, no validation +assert.writeError(upsertedResult({}, {$setOnInsert: {_id: undefined}})); + +// Check things that are pretty much the same for replacement and $op style upserts +for (var i = 0; i < 3; i++) { + // replacement style + var expr = {}; + + // $op style + if (i == 1) + expr = {$set: {a: 1}}; + if (i == 2) + expr = {$setOnInsert: {a: 1}}; + + var isReplStyle = i == 0; + + // _id extracted + assert.eq(1, upsertedId({_id: 1}, expr)); + // All below fail in v2.6, no $ops for _id and $and/$or not explored + assert.eq(1, upsertedId({_id: {$eq: 1}}, expr)); + assert.eq(1, upsertedId({_id: {$all: [1]}}, expr)); + assert.eq(1, upsertedId({_id: {$in: [1]}}, expr)); + assert.eq(1, upsertedId({$and: [{_id: 1}]}, expr)); + assert.eq(1, upsertedId({$and: [{_id: {$eq: 1}}]}, expr)); + assert.eq(1, upsertedId({$or: [{_id: 1}]}, expr)); + assert.eq(1, upsertedId({$or: [{_id: {$eq: 1}}]}, expr)); + + // _id not extracted, autogenerated + assert.neq(1, upsertedId({_id: {$gt: 1}}, expr)); + assert.neq(1, upsertedId({_id: {$ne: 1}}, expr)); + assert.neq(1, upsertedId({_id: {$in: [1, 2]}}, expr)); + assert.neq(1, upsertedId({_id: {$elemMatch: {$eq: 1}}}, expr)); + assert.neq(1, upsertedId({_id: {$exists: true}}, expr)); + assert.neq(1, upsertedId({_id: {$not: {$eq: 1}}}, expr)); + assert.neq(1, upsertedId({$or: [{_id: 1}, {_id: 1}]}, expr)); + assert.neq(1, upsertedId({$or: [{_id: {$eq: 1}}, {_id: 2}]}, expr)); + assert.neq(1, upsertedId({$nor: [{_id: 1}]}, expr)); + assert.neq(1, upsertedId({$nor: [{_id: {$eq: 1}}]}, expr)); + assert.neq(1, upsertedId({$nor: [{_id: {$eq: 1}}, {_id: 1}]}, expr)); + + // _id extraction errors + assert.writeError(upsertedResult({_id: [1, 2]}, expr)); + assert.writeError(upsertedResult({_id: undefined}, expr)); + assert.writeError(upsertedResult({_id: {$eq: [1, 2]}}, expr)); + assert.writeError(upsertedResult({_id: {$eq: undefined}}, expr)); + assert.writeError(upsertedResult({_id: {$all: [1, 2]}}, expr)); + // All below fail in v2.6, non-_id fields completely ignored + assert.writeError(upsertedResult({$and: [{_id: 1}, {_id: 1}]}, expr)); + assert.writeError(upsertedResult({$and: [{_id: {$eq: 1}}, {_id: 2}]}, expr)); + assert.writeError(upsertedResult({_id: 1, "_id.x": 1}, expr)); + assert.writeError(upsertedResult({_id: {x: 1}, "_id.x": 1}, expr)); + + // Special case - nested _id fields only used on $op-style updates + if (isReplStyle) { + // Fails in v2.6 + assert.writeError(upsertedResult({"_id.x": 1, "_id.y": 2}, expr)); + } else { + // Fails in v2.6 + assert.docEq({x: 1, y: 2}, upsertedId({"_id.x": 1, "_id.y": 2}, expr)); + } +} + +// regex _id in expression is an error, no regex ids allowed +assert.writeError(upsertedResult({}, {_id: /abc/})); +// Fails in v2.6, no validation +assert.writeError(upsertedResult({}, {$set: {_id: /abc/}})); + +// no regex _id extraction from query +assert.neq(/abc/, upsertedId({_id: /abc/}, {})); + +// +// Regular field extraction +// + +// Check things that are pretty much the same for replacement and $op style upserts +for (var i = 0; i < 3; i++) { + // replacement style + var expr = {}; + + // $op style + if (i == 1) { + expr = {$set: {a: 1}}; + } + if (i == 2) { + expr = {$setOnInsert: {a: 1}}; + } + + var isReplStyle = i == 0; + + // field extracted when replacement style + var value = isReplStyle ? undefined : 1; + assert.eq(value, upsertedXVal({x: 1}, expr)); + assert.eq(value, upsertedXVal({x: {$eq: 1}}, expr)); + assert.eq(value, upsertedXVal({x: {$in: [1]}}, expr)); + assert.eq(value, upsertedXVal({x: {$all: [1]}}, expr)); + assert.eq(value, upsertedXVal({$and: [{x: 1}]}, expr)); + assert.eq(value, upsertedXVal({$and: [{x: {$eq: 1}}]}, expr)); + assert.eq(value, upsertedXVal({$or: [{x: 1}]}, expr)); + assert.eq(value, upsertedXVal({$or: [{x: {$eq: 1}}]}, expr)); + // Special types extracted + assert.eq(isReplStyle ? undefined : [1, 2], upsertedXVal({x: [1, 2]}, expr)); + assert.eq(isReplStyle ? undefined : {'x.x': 1}, upsertedXVal({x: {'x.x': 1}}, expr)); + + // field not extracted + assert.eq(undefined, upsertedXVal({x: {$gt: 1}}, expr)); + assert.eq(undefined, upsertedXVal({x: {$ne: 1}}, expr)); + assert.eq(undefined, upsertedXVal({x: {$in: [1, 2]}}, expr)); + assert.eq(undefined, upsertedXVal({x: {$elemMatch: {$eq: 1}}}, expr)); + assert.eq(undefined, upsertedXVal({x: {$exists: true}}, expr)); + assert.eq(undefined, upsertedXVal({x: {$not: {$eq: 1}}}, expr)); + assert.eq(undefined, upsertedXVal({$or: [{x: 1}, {x: 1}]}, expr)); + assert.eq(undefined, upsertedXVal({$or: [{x: {$eq: 1}}, {x: 2}]}, expr)); + assert.eq(undefined, upsertedXVal({$nor: [{x: 1}]}, expr)); + assert.eq(undefined, upsertedXVal({$nor: [{x: {$eq: 1}}]}, expr)); + assert.eq(undefined, upsertedXVal({$nor: [{x: {$eq: 1}}, {x: 1}]}, expr)); + + // field extraction errors + assert.writeError(upsertedResult({x: undefined}, expr)); + + if (!isReplStyle) { + assert.writeError(upsertedResult({x: {$all: [1, 2]}}, expr)); + assert.writeError(upsertedResult({$and: [{x: 1}, {x: 1}]}, expr)); + assert.writeError(upsertedResult({$and: [{x: {$eq: 1}}, {x: 2}]}, expr)); + } else { + assert.eq(undefined, upsertedXVal({x: {'x.x': 1}}, expr)); + assert.eq(undefined, upsertedXVal({x: {$all: [1, 2]}}, expr)); + assert.eq(undefined, upsertedXVal({$and: [{x: 1}, {x: 1}]}, expr)); + assert.eq(undefined, upsertedXVal({$and: [{x: {$eq: 1}}, {x: 2}]}, expr)); + } + + // nested field extraction + var docValue = isReplStyle ? undefined : {x: 1}; + assert.docEq(docValue, upsertedXVal({"x.x": 1}, expr)); + assert.docEq(docValue, upsertedXVal({"x.x": {$eq: 1}}, expr)); + assert.docEq(docValue, upsertedXVal({"x.x": {$all: [1]}}, expr)); + assert.docEq(docValue, upsertedXVal({$and: [{"x.x": 1}]}, expr)); + assert.docEq(docValue, upsertedXVal({$and: [{"x.x": {$eq: 1}}]}, expr)); + assert.docEq(docValue, upsertedXVal({$or: [{"x.x": 1}]}, expr)); + assert.docEq(docValue, upsertedXVal({$or: [{"x.x": {$eq: 1}}]}, expr)); + + // nested field conflicts + if (!isReplStyle) { + assert.writeError(upsertedResult({x: 1, "x.x": 1}, expr)); + assert.writeError(upsertedResult({x: {}, "x.x": 1}, expr)); + assert.writeError(upsertedResult({x: {x: 1}, "x.x": 1}, expr)); + assert.writeError(upsertedResult({x: {x: 1}, "x.y": 1}, expr)); + assert.writeError(upsertedResult({x: [1, {x: 1}], "x.x": 1}, expr)); + } else { + assert.eq(undefined, upsertedXVal({x: 1, "x.x": 1}, expr)); + assert.eq(undefined, upsertedXVal({x: {}, "x.x": 1}, expr)); + assert.eq(undefined, upsertedXVal({x: {x: 1}, "x.x": 1}, expr)); + assert.eq(undefined, upsertedXVal({x: {x: 1}, "x.y": 1}, expr)); + assert.eq(undefined, upsertedXVal({x: [1, {x: 1}], "x.x": 1}, expr)); + } +} + +// regex field in expression is a value +assert.eq(/abc/, upsertedXVal({}, {x: /abc/})); +assert.eq(/abc/, upsertedXVal({}, {$set: {x: /abc/}})); + +// no regex field extraction from query unless $eq'd +assert.eq(/abc/, upsertedXVal({x: {$eq: /abc/}}, {$set: {a: 1}})); +assert.eq(undefined, upsertedXVal({x: /abc/}, {$set: {a: 1}})); + +// replacement-style updates ignore conflicts *except* on _id field +assert.eq(1, upsertedId({_id: 1, x: [1, {x: 1}], "x.x": 1}, {})); + +// DBRef special cases +// make sure query doesn't error when creating doc for insert, since it's missing the rest of the +// dbref fields. SERVER-14024 +// Fails in 2.6.1->3 +assert.docEq(DBRef("a", 1), upsertedXVal({"x.$id": 1}, {$set: {x: DBRef("a", 1)}})); diff --git a/jstests/core/write/update/upsert_shell.js b/jstests/core/write/update/upsert_shell.js new file mode 100644 index 00000000000..3ab07b50c21 --- /dev/null +++ b/jstests/core/write/update/upsert_shell.js @@ -0,0 +1,57 @@ +// Cannot implicitly shard accessed collections because of following errmsg: A single +// update/delete on a sharded collection must contain an exact match on _id or contain the shard +// key. +// @tags: [assumes_unsharded_collection, requires_fastcount] + +// tests to make sure that the new _id is returned after the insert in the shell +var l; +t = db.upsert1; +t.drop(); + +// make sure the new _id is returned when $mods are used +l = t.update({x: 1}, {$inc: {y: 1}}, true); +assert(l.getUpsertedId(), "A1 - " + tojson(l)); +assert.eq(l.getUpsertedId()._id.str, t.findOne()._id.str, "A2"); +assert.eq(l._id.str, t.findOne()._id.str, "A2"); + +// make sure the new _id is returned on a replacement (no $mod in update) +l = t.update({x: 2}, {x: 2, y: 3}, true); +assert(l.getUpsertedId(), "B1 - " + tojson(l)); +assert.eq(l.getUpsertedId()._id.str, t.findOne({x: 2})._id.str, "B2"); +assert.eq(l._id.str, t.findOne({x: 2})._id.str, "B2"); +assert.eq(2, t.find().count(), "B3"); + +// make sure that an upsert update that only updates doesn't return and _id +l = t.update({x: 2}, {x: 2, y: 4}, true); +assert(l.getUpsertedId() === null); +assert(l._id === undefined); + +// use the _id from the query for the insert +l = t.update({_id: 3}, {$set: {a: '123'}}, true); +assert(l.getUpsertedId(), "C1 - " + tojson(l)); +assert.eq(l.getUpsertedId()._id, 3, "C2 - " + tojson(l)); +assert.eq(l._id, 3, "C2 - " + tojson(l)); + +// test with an embedded doc for the _id field +l = t.update({_id: {a: 1}}, {$set: {a: 123}}, true); +assert(l.getUpsertedId(), "D1 - " + tojson(l)); +assert.eq(l.getUpsertedId()._id, {a: 1}, "D2 - " + tojson(l)); +assert.eq(l._id, {a: 1}, "D2 - " + tojson(l)); + +// test with a range query +l = t.update({_id: {$gt: 100}}, {$set: {a: 123}}, true); +assert(l.getUpsertedId(), "E1 - " + tojson(l)); +assert.neq(l.getUpsertedId()._id, 100, "E2 - " + tojson(l)); +assert.neq(l._id, 100, "E2 - " + tojson(l)); + +// test with an _id query +l = t.update({_id: 1233}, {$set: {a: 123}}, true); +assert(l.getUpsertedId(), "F1 - " + tojson(l)); +assert.eq(l.getUpsertedId()._id, 1233, "F2 - " + tojson(l)); +assert.eq(l._id, 1233, "F2 - " + tojson(l)); + +// test with an embedded _id query +l = t.update({_id: {a: 1, b: 2}}, {$set: {a: 123}}, true); +assert(l.getUpsertedId(), "G1 - " + tojson(l)); +assert.eq(l.getUpsertedId()._id, {a: 1, b: 2}, "G2 - " + tojson(l)); +assert.eq(l._id, {a: 1, b: 2}, "G2 - " + tojson(l)); |