summaryrefslogtreecommitdiff
path: root/src/mongo
diff options
context:
space:
mode:
Diffstat (limited to 'src/mongo')
-rw-r--r--src/mongo/db/commands/fle2_compact.cpp2
-rw-r--r--src/mongo/db/commands/write_commands.cpp3
-rw-r--r--src/mongo/db/fle_crud.cpp12
-rw-r--r--src/mongo/db/fle_crud_test.cpp11
-rw-r--r--src/mongo/db/ops/write_ops.cpp46
-rw-r--r--src/mongo/db/ops/write_ops_parsers.h24
-rw-r--r--src/mongo/db/query/query_knobs.idl7
-rw-r--r--src/mongo/db/repl/SConscript2
-rw-r--r--src/mongo/db/repl/idempotency_test.cpp127
-rw-r--r--src/mongo/db/repl/idempotency_update_sequence.cpp284
-rw-r--r--src/mongo/db/repl/idempotency_update_sequence.h128
-rw-r--r--src/mongo/db/repl/idempotency_update_sequence_test.cpp318
-rw-r--r--src/mongo/db/repl/oplog_applier_impl_test.cpp96
-rw-r--r--src/mongo/db/repl/replication_recovery_test.cpp21
-rw-r--r--src/mongo/db/repl/session_update_tracker.cpp8
-rw-r--r--src/mongo/db/repl/storage_timestamp_test.cpp42
-rw-r--r--src/mongo/db/repl/tenant_oplog_applier_test.cpp19
-rw-r--r--src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp14
-rw-r--r--src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp26
-rw-r--r--src/mongo/db/update/SConscript2
-rw-r--r--src/mongo/db/update/addtoset_node_test.cpp37
-rw-r--r--src/mongo/db/update/arithmetic_node_test.cpp37
-rw-r--r--src/mongo/db/update/bit_node_test.cpp25
-rw-r--r--src/mongo/db/update/compare_node_test.cpp23
-rw-r--r--src/mongo/db/update/current_date_node_test.cpp40
-rw-r--r--src/mongo/db/update/object_replace_executor_test.cpp2
-rw-r--r--src/mongo/db/update/object_transform_executor_test.cpp2
-rw-r--r--src/mongo/db/update/pipeline_executor_test.cpp123
-rw-r--r--src/mongo/db/update/pop_node_test.cpp20
-rw-r--r--src/mongo/db/update/pull_node_test.cpp58
-rw-r--r--src/mongo/db/update/pullall_node_test.cpp17
-rw-r--r--src/mongo/db/update/push_node_test.cpp73
-rw-r--r--src/mongo/db/update/rename_node_test.cpp41
-rw-r--r--src/mongo/db/update/set_node_test.cpp47
-rw-r--r--src/mongo/db/update/unset_node_test.cpp37
-rw-r--r--src/mongo/db/update/update_array_node_test.cpp32
-rw-r--r--src/mongo/db/update/update_driver.cpp42
-rw-r--r--src/mongo/db/update/update_executor.h4
-rw-r--r--src/mongo/db/update/update_node_test_fixture.h48
-rw-r--r--src/mongo/db/update/update_object_node_test.cpp61
-rw-r--r--src/mongo/db/update/update_oplog_entry_version.h8
-rw-r--r--src/mongo/db/update/update_tree_executor.h19
-rw-r--r--src/mongo/db/update/v1_log_builder.cpp143
-rw-r--r--src/mongo/db/update/v1_log_builder.h130
-rw-r--r--src/mongo/db/update/v1_log_builder_test.cpp158
45 files changed, 488 insertions, 1931 deletions
diff --git a/src/mongo/db/commands/fle2_compact.cpp b/src/mongo/db/commands/fle2_compact.cpp
index 886d0bdfc9a..7a5361d60e7 100644
--- a/src/mongo/db/commands/fle2_compact.cpp
+++ b/src/mongo/db/commands/fle2_compact.cpp
@@ -160,7 +160,7 @@ void upsertNullDocument(FLEQueryInterface* queryImpl,
updateEntry.setUpsert(false);
updateEntry.setQ(newNullDoc.getField("_id").wrap());
updateEntry.setU(mongo::write_ops::UpdateModification(
- newNullDoc, write_ops::UpdateModification::ClassicTag(), true));
+ newNullDoc, write_ops::UpdateModification::ReplacementTag{}));
write_ops::UpdateCommandRequest updateRequest(nss, {std::move(updateEntry)});
auto [reply, originalDoc] =
queryImpl->updateWithPreimage(nss, EncryptionInformation(BSONObj()), updateRequest);
diff --git a/src/mongo/db/commands/write_commands.cpp b/src/mongo/db/commands/write_commands.cpp
index b360c7b1a2c..a2c3e378d38 100644
--- a/src/mongo/db/commands/write_commands.cpp
+++ b/src/mongo/db/commands/write_commands.cpp
@@ -212,7 +212,8 @@ write_ops::UpdateOpEntry makeTimeseriesUpdateOpEntry(
write_ops::UpdateModification::DiffOptions options;
options.mustCheckExistenceForInsertOperations =
static_cast<bool>(repl::tenantMigrationRecipientInfo(opCtx));
- write_ops::UpdateModification u(updateBuilder.obj(), options);
+ write_ops::UpdateModification u(
+ updateBuilder.obj(), write_ops::UpdateModification::DeltaTag{}, options);
write_ops::UpdateOpEntry update(BSON("_id" << batch->bucket().id), std::move(u));
invariant(!update.getMulti(), batch->bucket().id.toString());
invariant(!update.getUpsert(), batch->bucket().id.toString());
diff --git a/src/mongo/db/fle_crud.cpp b/src/mongo/db/fle_crud.cpp
index f1f9cf96f2d..761efbeb843 100644
--- a/src/mongo/db/fle_crud.cpp
+++ b/src/mongo/db/fle_crud.cpp
@@ -842,7 +842,7 @@ write_ops::UpdateCommandReply processUpdate(FLEQueryInterface* queryImpl,
auto pushUpdate = EDCServerCollection::finalizeForUpdate(updateModifier, serverPayload);
newUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- pushUpdate, write_ops::UpdateModification::ClassicTag(), false));
+ pushUpdate, write_ops::UpdateModification::ModifierUpdateTag{}));
} else {
auto replacementDocument = updateModification.getUpdateReplacement();
EDCServerCollection::validateEncryptedFieldInfo(
@@ -857,7 +857,7 @@ write_ops::UpdateCommandReply processUpdate(FLEQueryInterface* queryImpl,
EDCServerCollection::finalizeForInsert(replacementDocument, serverPayload);
newUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- safeContentReplace, write_ops::UpdateModification::ClassicTag(), true));
+ safeContentReplace, write_ops::UpdateModification::ReplacementTag{}));
}
// Step 3 ----
@@ -908,7 +908,7 @@ write_ops::UpdateCommandReply processUpdate(FLEQueryInterface* queryImpl,
pullUpdateOpEntry.setMulti(false);
pullUpdateOpEntry.setQ(BSON("_id"_sd << idElement));
pullUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- pullUpdate, write_ops::UpdateModification::ClassicTag(), false));
+ pullUpdate, write_ops::UpdateModification::ModifierUpdateTag{}));
newUpdateRequest.setUpdates({pullUpdateOpEntry});
newUpdateRequest.getWriteCommandRequestBase().setStmtId(boost::none);
newUpdateRequest.setLegacyRuntimeConstants(boost::none);
@@ -1089,7 +1089,7 @@ write_ops::FindAndModifyCommandReply processFindAndModify(
// Step 2 ----
newUpdateModification = write_ops::UpdateModification(
- pushUpdate, write_ops::UpdateModification::ClassicTag(), false);
+ pushUpdate, write_ops::UpdateModification::ModifierUpdateTag{});
} else {
auto replacementDocument = updateModification.getUpdateReplacement();
EDCServerCollection::validateEncryptedFieldInfo(
@@ -1104,7 +1104,7 @@ write_ops::FindAndModifyCommandReply processFindAndModify(
EDCServerCollection::finalizeForInsert(replacementDocument, serverPayload);
newUpdateModification = write_ops::UpdateModification(
- safeContentReplace, write_ops::UpdateModification::ClassicTag(), true);
+ safeContentReplace, write_ops::UpdateModification::ReplacementTag{});
}
// Step 3 ----
@@ -1165,7 +1165,7 @@ write_ops::FindAndModifyCommandReply processFindAndModify(
pullUpdateOpEntry.setMulti(false);
pullUpdateOpEntry.setQ(BSON("_id"_sd << idElement));
pullUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- pullUpdate, write_ops::UpdateModification::ClassicTag(), false));
+ pullUpdate, write_ops::UpdateModification::ModifierUpdateTag{}));
newUpdateRequest.setUpdates({pullUpdateOpEntry});
newUpdateRequest.setLegacyRuntimeConstants(boost::none);
newUpdateRequest.getWriteCommandRequestBase().setStmtId(boost::none);
diff --git a/src/mongo/db/fle_crud_test.cpp b/src/mongo/db/fle_crud_test.cpp
index df3400b72b1..0e6489981e8 100644
--- a/src/mongo/db/fle_crud_test.cpp
+++ b/src/mongo/db/fle_crud_test.cpp
@@ -538,7 +538,7 @@ void FleCrudTest::doSingleUpdate(int id, BSONElement element) {
void FleCrudTest::doSingleUpdateWithUpdateDoc(int id, BSONObj update) {
doSingleUpdateWithUpdateDoc(
id,
- write_ops::UpdateModification(update, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(update, write_ops::UpdateModification::ModifierUpdateTag{}));
}
void FleCrudTest::doSingleUpdateWithUpdateDoc(int id,
@@ -944,8 +944,7 @@ TEST_F(FleCrudTest, UpdateOneReplace) {
auto result = FLEClientCrypto::transformPlaceholders(replaceEP, &_keyVault);
doSingleUpdateWithUpdateDoc(
- 1,
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, true));
+ 1, write_ops::UpdateModification(result, write_ops::UpdateModification::ReplacementTag{}));
assertDocumentCounts(1, 2, 1, 3);
@@ -1024,7 +1023,7 @@ TEST_F(FleCrudTest, FindAndModify_UpdateOne) {
write_ops::FindAndModifyCommandRequest req(_edcNs);
req.setQuery(BSON("_id" << 1));
req.setUpdate(
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(result, write_ops::UpdateModification::ModifierUpdateTag{}));
doFindAndModify(req);
assertDocumentCounts(1, 2, 1, 3);
@@ -1075,7 +1074,7 @@ TEST_F(FleCrudTest, FindAndModify_RenameSafeContent) {
write_ops::FindAndModifyCommandRequest req(_edcNs);
req.setQuery(BSON("_id" << 1));
req.setUpdate(
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(result, write_ops::UpdateModification::ModifierUpdateTag{}));
ASSERT_THROWS_CODE(doFindAndModify(req), DBException, 6371506);
}
@@ -1101,7 +1100,7 @@ TEST_F(FleCrudTest, FindAndModify_SetSafeContent) {
write_ops::FindAndModifyCommandRequest req(_edcNs);
req.setQuery(BSON("_id" << 1));
req.setUpdate(
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(result, write_ops::UpdateModification::ModifierUpdateTag{}));
ASSERT_THROWS_CODE(doFindAndModify(req), DBException, 6666200);
}
diff --git a/src/mongo/db/ops/write_ops.cpp b/src/mongo/db/ops/write_ops.cpp
index 411ecada1de..41c7fa4f5d2 100644
--- a/src/mongo/db/ops/write_ops.cpp
+++ b/src/mongo/db/ops/write_ops.cpp
@@ -158,17 +158,16 @@ UpdateModification UpdateModification::parseFromOplogEntry(const BSONObj& oField
BSONElement idField = oField["_id"];
// If _id field is present, we're getting a replacement style update in which $v can be a user
- // field. Otherwise, $v field has to be either missing or be one of the version flag $v:1 /
- // $v:2.
+ // field. Otherwise, $v field has to be $v:2.
uassert(4772600,
- str::stream() << "Expected _id field or $v field missing or $v:1/$v:2, but got: "
- << vField,
- idField.ok() || !vField.ok() ||
- vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kUpdateNodeV1) ||
- vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kDeltaV2));
-
- if (!idField.ok() && vField.ok() &&
- vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kDeltaV2)) {
+ str::stream() << "Expected _id field or $v:2, but got: " << vField,
+ idField.ok() ||
+ (vField.ok() &&
+ vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kDeltaV2)));
+
+ // It is important to check for '_id' field first, because a replacement style update can still
+ // have a '$v' field in the object.
+ if (!idField.ok()) {
// Make sure there's a diff field.
BSONElement diff = oField[update_oplog_entry::kDiffObjectFieldName];
uassert(4772601,
@@ -176,15 +175,14 @@ UpdateModification UpdateModification::parseFromOplogEntry(const BSONObj& oField
<< diff.type(),
diff.type() == BSONType::Object);
- return UpdateModification(doc_diff::Diff{diff.embeddedObject()}, options);
+ return UpdateModification(doc_diff::Diff{diff.embeddedObject()}, DeltaTag{}, options);
} else {
- // Treat it as a "classic" update which can either be a full replacement or a
- // modifier-style update. Use "_id" field to determine whether which style it is.
- return UpdateModification(oField, ClassicTag{}, idField.ok());
+ // Treat it as a a full replacement update.
+ return UpdateModification(oField, ReplacementTag{});
}
}
-UpdateModification::UpdateModification(doc_diff::Diff diff, DiffOptions options)
+UpdateModification::UpdateModification(doc_diff::Diff diff, DeltaTag, DiffOptions options)
: _update(DeltaUpdate{std::move(diff), options}) {}
UpdateModification::UpdateModification(TransformFunc transform)
@@ -193,7 +191,7 @@ UpdateModification::UpdateModification(TransformFunc transform)
UpdateModification::UpdateModification(BSONElement update) {
const auto type = update.type();
if (type == BSONType::Object) {
- _update = UpdateModification(update.Obj(), ClassicTag{})._update;
+ _update = UpdateModification(update.Obj())._update;
return;
}
@@ -204,21 +202,19 @@ UpdateModification::UpdateModification(BSONElement update) {
_update = PipelineUpdate{parsePipelineFromBSON(update)};
}
-// If we know whether the update is a replacement, use that value. For example, when we're parsing
-// the oplog entry, we know if the update is a replacement by checking whether there's an _id field.
-UpdateModification::UpdateModification(const BSONObj& update, ClassicTag, bool isReplacement) {
- if (isReplacement) {
+UpdateModification::UpdateModification(const BSONObj& update) {
+ if (isClassicalUpdateReplacement(update)) {
_update = ReplacementUpdate{update};
} else {
_update = ModifierUpdate{update};
}
}
-// If we don't know whether the update is a replacement, for example while we are parsing a user
-// request, we infer this by checking whether the first element is a $-field to distinguish modifier
-// style updates.
-UpdateModification::UpdateModification(const BSONObj& update, ClassicTag)
- : UpdateModification(update, ClassicTag{}, isClassicalUpdateReplacement(update)) {}
+UpdateModification::UpdateModification(const BSONObj& update, ModifierUpdateTag)
+ : _update{ModifierUpdate{update}} {}
+UpdateModification::UpdateModification(const BSONObj& update, ReplacementTag)
+ : _update{ReplacementUpdate{update}} {}
+
UpdateModification::UpdateModification(std::vector<BSONObj> pipeline)
: _update{PipelineUpdate{std::move(pipeline)}} {}
diff --git a/src/mongo/db/ops/write_ops_parsers.h b/src/mongo/db/ops/write_ops_parsers.h
index 4fd38f90b68..ea898a153b0 100644
--- a/src/mongo/db/ops/write_ops_parsers.h
+++ b/src/mongo/db/ops/write_ops_parsers.h
@@ -89,29 +89,39 @@ public:
struct DiffOptions {
bool mustCheckExistenceForInsertOperations = true;
};
- struct ClassicTag {};
+
+ /**
+ * Tags used to disambiguate between the constructors for different update types.
+ */
+ struct ModifierUpdateTag {};
+ struct ReplacementTag {};
+ struct DeltaTag {};
// Given the 'o' field of an update oplog entry, will return an UpdateModification that can be
// applied. The `options` parameter will be applied only in the case a Delta update is parsed.
static UpdateModification parseFromOplogEntry(const BSONObj& oField,
const DiffOptions& options);
static UpdateModification parseFromClassicUpdate(const BSONObj& modifiers) {
- return UpdateModification(modifiers, ClassicTag{});
+ return UpdateModification(modifiers);
}
static UpdateModification parseFromV2Delta(const doc_diff::Diff& diff,
DiffOptions const& options) {
- return UpdateModification(diff, options);
+ return UpdateModification(diff, DeltaTag{}, options);
}
UpdateModification() = default;
UpdateModification(BSONElement update);
UpdateModification(std::vector<BSONObj> pipeline);
- UpdateModification(doc_diff::Diff, DiffOptions);
+ UpdateModification(doc_diff::Diff, DeltaTag, DiffOptions);
// Creates an transform-style update. The transform function MUST preserve the _id element.
UpdateModification(TransformFunc transform);
- // This constructor exists only to provide a fast-path for constructing classic-style updates.
- UpdateModification(const BSONObj& update, ClassicTag, bool isReplacement);
- UpdateModification(const BSONObj& update, ClassicTag);
+ // These constructors exists only to provide a fast-path.
+ UpdateModification(const BSONObj& update, ModifierUpdateTag);
+ UpdateModification(const BSONObj& update, ReplacementTag);
+ // If we don't know whether the update is a replacement or a modifier style update, for example
+ // while we are parsing a user request, we infer this by checking whether the first element is a
+ // $-field to distinguish modifier style updates.
+ UpdateModification(const BSONObj& update);
/**
* These methods support IDL parsing of the "u" field from the update command and OP_UPDATE.
diff --git a/src/mongo/db/query/query_knobs.idl b/src/mongo/db/query/query_knobs.idl
index 9fa82639f51..4b7198314f2 100644
--- a/src/mongo/db/query/query_knobs.idl
+++ b/src/mongo/db/query/query_knobs.idl
@@ -535,13 +535,6 @@ server_parameters:
gt: 0
on_update: plan_cache_util::clearSbeCacheOnParameterChange
- internalQueryEnableLoggingV2OplogEntries:
- description: "If true, this node may log $v:2 delta-style oplog entries."
- set_at: [ startup, runtime ]
- cpp_varname: "internalQueryEnableLoggingV2OplogEntries"
- cpp_vartype: AtomicWord<bool>
- default: true
-
internalQuerySlotBasedExecutionMaxStaticIndexScanIntervals:
description: "Limits the number of statically known intervals that SBE can decompose index
bounds into when possible."
diff --git a/src/mongo/db/repl/SConscript b/src/mongo/db/repl/SConscript
index ba09749f22e..43520c0dfaf 100644
--- a/src/mongo/db/repl/SConscript
+++ b/src/mongo/db/repl/SConscript
@@ -653,7 +653,6 @@ env.Library(
source=[
'idempotency_document_structure.cpp',
'idempotency_scalar_generator.cpp',
- 'idempotency_update_sequence.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -1647,7 +1646,6 @@ if wiredtiger:
'delayable_timeout_callback_test.cpp',
'drop_pending_collection_reaper_test.cpp',
'idempotency_document_structure_test.cpp',
- 'idempotency_update_sequence_test.cpp',
'initial_syncer_test.cpp',
'isself_test.cpp',
'member_config_test.cpp',
diff --git a/src/mongo/db/repl/idempotency_test.cpp b/src/mongo/db/repl/idempotency_test.cpp
index 69777fdbc55..53acd427f38 100644
--- a/src/mongo/db/repl/idempotency_test.cpp
+++ b/src/mongo/db/repl/idempotency_test.cpp
@@ -38,7 +38,6 @@
#include "mongo/db/query/plan_executor.h"
#include "mongo/db/repl/idempotency_document_structure.h"
#include "mongo/db/repl/idempotency_test_fixture.h"
-#include "mongo/db/repl/idempotency_update_sequence.h"
#include "mongo/db/repl/replication_coordinator.h"
#include "mongo/db/server_options.h"
#include "mongo/db/update/document_diff_calculator.h"
@@ -58,10 +57,6 @@ class RandomizedIdempotencyTest : public IdempotencyTest {
protected:
const int kDocId = 1;
const BSONObj kDocIdQuery = BSON("_id" << kDocId);
-
- std::vector<OplogEntry> createUpdateSequence(const UpdateSequenceGenerator& generator,
- size_t length);
-
BSONObj canonicalizeDocumentForDataHash(const BSONObj& obj) override;
BSONObj getDoc();
@@ -73,15 +68,10 @@ protected:
Status resetState() override;
- void runIdempotencyTestCase();
- void runUpdateV2IdempotencyTestCase(double v2Probability);
+ void runUpdateV2IdempotencyTestCase();
std::vector<OplogEntry> initOps;
int64_t seed;
-
-private:
- // Op-style updates cannot guarantee field order for certain cases.
- bool _ignoreFieldOrder = true;
};
BSONObj canonicalizeBSONObjForDataHash(const BSONObj& obj);
@@ -123,9 +113,6 @@ BSONObj canonicalizeBSONObjForDataHash(const BSONObj& obj) {
}
BSONObj RandomizedIdempotencyTest::canonicalizeDocumentForDataHash(const BSONObj& obj) {
- if (!_ignoreFieldOrder) {
- return obj;
- }
return canonicalizeBSONObjForDataHash(obj);
}
BSONObj RandomizedIdempotencyTest::getDoc() {
@@ -135,18 +122,6 @@ BSONObj RandomizedIdempotencyTest::getDoc() {
return doc.getOwned();
}
-std::vector<OplogEntry> RandomizedIdempotencyTest::createUpdateSequence(
- const UpdateSequenceGenerator& generator, const size_t length) {
- // for each document enumerated & inserted generate a sequence of updates to apply to it.
- std::vector<OplogEntry> updateSequence;
- updateSequence.reserve(length);
- for (size_t i = 0; i < length; i++) {
- updateSequence.push_back(update(kDocId, generator.generateUpdate()));
- }
-
- return updateSequence;
-}
-
std::string RandomizedIdempotencyTest::getStatesString(const std::vector<CollectionState>& state1,
const std::vector<CollectionState>& state2,
const std::vector<OplogEntry>& state1Ops,
@@ -195,75 +170,13 @@ Status RandomizedIdempotencyTest::resetState() {
return runOpsInitialSync(initOps);
}
-void RandomizedIdempotencyTest::runIdempotencyTestCase() {
- _ignoreFieldOrder = true;
+void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase() {
ASSERT_OK(
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- const size_t lengthOfNumericComponent = 1;
-
- // Eliminate modification of array elements, because they cause theoretically valid sequences
- // that cause idempotency issues.
- const double kScalarProbability = 0.375;
- const double kDocProbability = 0.375;
- const double kArrProbability = 0;
-
this->seed = SecureRandom().nextInt64();
PseudoRandom seedGenerator(this->seed);
- RandomizedScalarGenerator scalarGenerator{PseudoRandom(seedGenerator.nextInt64())};
- UpdateSequenceGenerator updateGenerator({fields,
- depth,
- lengthOfNumericComponent,
- kScalarProbability,
- kDocProbability,
- kArrProbability},
- PseudoRandom{seedGenerator.nextInt64()},
- &scalarGenerator);
-
- const bool kSkipDocs = kDocProbability == 0.0;
- const bool kSkipArrs = kArrProbability == 0.0;
- DocumentStructureEnumerator enumerator(
- {fields, depth, lengthOfNumericComponent, kSkipDocs, kSkipArrs}, &scalarGenerator);
-
- const size_t kUpdateSequenceLength = 5;
- // For the sake of keeping the speed of iteration sane and feasible.
- const size_t kNumUpdateSequencesPerDoc = 2;
-
- for (auto doc : enumerator) {
- BSONObj docWithId = (BSONObjBuilder(doc) << "_id" << kDocId).obj();
- for (size_t i = 0; i < kNumUpdateSequencesPerDoc; i++) {
- this->initOps = std::vector<OplogEntry>{createCollection(), insert(docWithId)};
- std::vector<OplogEntry> updateSequence =
- createUpdateSequence(updateGenerator, kUpdateSequenceLength);
- testOpsAreIdempotent(updateSequence, SequenceType::kAnyPrefixOrSuffix);
- }
- }
-}
-
-void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase(double v2Probability) {
- _ignoreFieldOrder = (v2Probability < 1.0);
- ASSERT_OK(
- ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
-
- this->seed = SecureRandom().nextInt64();
- PseudoRandom seedGenerator(this->seed);
- RandomizedScalarGenerator scalarGenerator{PseudoRandom(seedGenerator.nextInt64())};
-
- // Eliminate modification of array elements when generating $v:1 oplog udpates, because they
- // cause theoretically valid sequences that cause idempotency issues.
- //
- // For example oplog entries '{$unset: {a.1: null}}' and '{$set: {a.1.1: null}}' can break
- // idempotency if the entries are applied on an input document '{a: []}'. These entries should
- // not have been generated in practice if the starting document is '{a: []}', but the current
- // 'UpdateSequenceGenerator' is not smart enough to figure that out.
- const size_t lengthOfNumericComponent = 0;
-
std::set<StringData> fields{"f00", "f10", "f01", "f11", "f02", "f20"};
- UpdateSequenceGenerator updateV1Generator({fields, 2 /* depth */, lengthOfNumericComponent},
- PseudoRandom(seedGenerator.nextInt64()),
- &scalarGenerator);
auto generateDocWithId = [&seedGenerator](int id) {
MutableDocument doc;
@@ -286,22 +199,15 @@ void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase(double v2Probabil
for (size_t i = 0; i < kUpdateSequenceLength; i++) {
BSONObj oplogDiff;
boost::optional<BSONObj> generatedDoc;
- if (rng.nextCanonicalDouble() <= v2Probability) {
- // With delta based updates, we cannot just generate any random diff since certains
- // diff when applied to an unrelated object (which would never have produced by
- // computing the input objects) would break idempotency. So we do a dry run of what
- // the collection state would look like and compute diffs based on that.
- generatedDoc = generateDocWithId(kDocId);
- auto diffOutput =
- doc_diff::computeDiff(oldDoc,
- *generatedDoc,
- update_oplog_entry::kSizeOfDeltaOplogEntryMetadata,
- nullptr);
- ASSERT(diffOutput);
- oplogDiff = BSON("$v" << 2 << "diff" << diffOutput->diff);
- } else {
- oplogDiff = updateV1Generator.generateUpdate();
- }
+ // With delta based updates, we cannot just generate any random diff since certain diffs
+ // when applied to an unrelated object (which would never have produced by computing the
+ // input objects) would break idempotency. So we do a dry run of what the collection
+ // state would look like and compute diffs based on that.
+ generatedDoc = generateDocWithId(kDocId);
+ auto diffOutput = doc_diff::computeDiff(
+ oldDoc, *generatedDoc, update_oplog_entry::kSizeOfDeltaOplogEntryMetadata, nullptr);
+ ASSERT(diffOutput);
+ oplogDiff = BSON("$v" << 2 << "diff" << diffOutput->diff);
auto op = update(kDocId, oplogDiff);
ASSERT_OK(runOpInitialSync(op));
if (generatedDoc) {
@@ -314,14 +220,11 @@ void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase(double v2Probabil
}
}
-TEST_F(RandomizedIdempotencyTest, CheckUpdateSequencesAreIdempotent) {
- runIdempotencyTestCase();
-}
TEST_F(RandomizedIdempotencyTest, CheckUpdateSequencesAreIdempotentV2) {
- runUpdateV2IdempotencyTestCase(1.0);
- runUpdateV2IdempotencyTestCase(0.4);
- runUpdateV2IdempotencyTestCase(0.5);
- runUpdateV2IdempotencyTestCase(0.6);
+ runUpdateV2IdempotencyTestCase();
+ runUpdateV2IdempotencyTestCase();
+ runUpdateV2IdempotencyTestCase();
+ runUpdateV2IdempotencyTestCase();
}
} // namespace
diff --git a/src/mongo/db/repl/idempotency_update_sequence.cpp b/src/mongo/db/repl/idempotency_update_sequence.cpp
deleted file mode 100644
index 3963e940276..00000000000
--- a/src/mongo/db/repl/idempotency_update_sequence.cpp
+++ /dev/null
@@ -1,284 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/platform/basic.h"
-
-#include "mongo/db/repl/idempotency_update_sequence.h"
-
-#include <algorithm>
-#include <memory>
-
-#include "mongo/db/exec/document_value/value.h"
-#include "mongo/db/field_ref.h"
-#include "mongo/db/jsobj.h"
-#include "mongo/db/repl/idempotency_document_structure.h"
-
-namespace mongo {
-
-UpdateSequenceGeneratorConfig::UpdateSequenceGeneratorConfig(std::set<StringData> fields_,
- std::size_t depth_,
- std::size_t lengthOfNumericComponent_,
- double scalarProbability_,
- double docProbability_,
- double arrProbability_)
- : fields(std::move(fields_)),
- depth(depth_),
- lengthOfNumericComponent(lengthOfNumericComponent_),
- scalarProbability(scalarProbability_),
- docProbability(docProbability_),
- arrProbability(arrProbability_) {}
-
-std::size_t UpdateSequenceGenerator::_getPathDepth(const std::string& path) {
- // Our depth is -1 because we count at 0, but numParts just counts the number of fields.
- return path == "" ? 0 : FieldRef(path).numParts() - 1;
-}
-
-std::vector<std::string> UpdateSequenceGenerator::_eliminatePrefixPaths(
- const std::string& path, const std::vector<std::string>& paths) {
- std::vector<std::string> remainingPaths;
- for (auto oldPath : paths) {
- if (!FieldRef(oldPath).isPrefixOf(FieldRef(path)) &&
- !FieldRef(path).isPrefixOf(FieldRef(oldPath)) && path != path) {
- remainingPaths.push_back(oldPath);
- }
- }
-
- return remainingPaths;
-}
-
-void UpdateSequenceGenerator::_generatePaths(const UpdateSequenceGeneratorConfig& config,
- const std::string& path) {
- if (UpdateSequenceGenerator::_getPathDepth(path) == config.depth) {
- return;
- }
-
- if (!path.empty()) {
- for (std::size_t i = 0; i < config.lengthOfNumericComponent; i++) {
- FieldRef arrPathRef(path);
- arrPathRef.appendPart(std::to_string(i));
- auto arrPath = arrPathRef.dottedField().toString();
- _paths.push_back(arrPath);
- _generatePaths(config, arrPath);
- }
- }
-
- if (config.fields.empty()) {
- return;
- }
-
- std::set<StringData> remainingFields(config.fields);
- for (auto field : config.fields) {
- remainingFields.erase(remainingFields.begin());
- FieldRef docPathRef(path);
- docPathRef.appendPart(field);
- auto docPath = docPathRef.dottedField().toString();
- _paths.push_back(docPath);
- UpdateSequenceGeneratorConfig remainingConfig = {remainingFields,
- config.depth,
- config.lengthOfNumericComponent,
- config.scalarProbability,
- config.docProbability,
- config.arrProbability};
- _generatePaths(remainingConfig, docPath);
- }
-}
-
-std::vector<std::string> UpdateSequenceGenerator::_getRandomPaths() const {
- std::size_t randomAmountOfArgs = this->_random.nextInt32(this->_paths.size()) + 1;
- std::vector<std::string> randomPaths;
- std::vector<std::string> validPaths(this->_paths);
-
- for (std::size_t i = 0; i < randomAmountOfArgs; i++) {
- int randomIndex = UpdateSequenceGenerator::_random.nextInt32(validPaths.size());
- std::string randomPath = validPaths[randomIndex];
- randomPaths.push_back(randomPath);
- validPaths = UpdateSequenceGenerator::_eliminatePrefixPaths(randomPath, validPaths);
- if (validPaths.empty()) {
- break;
- }
- }
-
- return randomPaths;
-}
-
-BSONObj UpdateSequenceGenerator::generateUpdate() const {
- double setSum = this->_config.scalarProbability + this->_config.arrProbability +
- this->_config.docProbability;
- double generateSetUpdate = this->_random.nextCanonicalDouble();
- if (generateSetUpdate <= setSum) {
- return _generateSet();
- } else {
- return _generateUnset();
- }
-}
-
-BSONObj UpdateSequenceGenerator::_generateSet() const {
- BSONObjBuilder setBuilder;
- {
- BSONObjBuilder setArgBuilder(setBuilder.subobjStart("$set"));
-
- for (auto randomPath : _getRandomPaths()) {
- _appendSetArgToBuilder(randomPath, &setArgBuilder);
- }
- }
- return setBuilder.obj();
-}
-
-UpdateSequenceGenerator::SetChoice UpdateSequenceGenerator::_determineWhatToSet(
- const std::string& setPath) const {
- if (UpdateSequenceGenerator::_getPathDepth(setPath) == this->_config.depth) {
- // If we have hit the max depth, we don't have a choice anyways.
- return SetChoice::kSetScalar;
- } else {
- double setSum = this->_config.scalarProbability + this->_config.arrProbability +
- this->_config.docProbability;
- double choice = this->_random.nextCanonicalDouble() * setSum;
- if (choice <= this->_config.scalarProbability) {
- return SetChoice::kSetScalar;
- } else if (choice <= setSum - this->_config.docProbability) {
- return SetChoice::kSetArr;
- } else {
- return SetChoice::kSetDoc;
- }
- }
-}
-
-void UpdateSequenceGenerator::_appendSetArgToBuilder(const std::string& setPath,
- BSONObjBuilder* setArgBuilder) const {
- auto setChoice = _determineWhatToSet(setPath);
- switch (setChoice) {
- case SetChoice::kSetScalar:
- this->_scalarGenerator->generateScalar().addToBsonObj(setArgBuilder, setPath);
- return;
- case SetChoice::kSetArr:
- setArgBuilder->append(setPath, _generateArrToSet(setPath));
- return;
- case SetChoice::kSetDoc:
- setArgBuilder->append(setPath, _generateDocToSet(setPath));
- return;
- case SetChoice::kNumTotalSetChoices:
- MONGO_UNREACHABLE;
- }
- MONGO_UNREACHABLE;
-}
-
-BSONObj UpdateSequenceGenerator::_generateUnset() const {
- BSONObjBuilder unsetBuilder;
- {
- BSONObjBuilder unsetArgBuilder(unsetBuilder.subobjStart("$unset"));
-
- for (auto randomPath : _getRandomPaths()) {
- unsetArgBuilder.appendNull(randomPath);
- }
- }
-
- return unsetBuilder.obj();
-}
-
-double UpdateSequenceGenerator::_generateNumericToSet() const {
- return UpdateSequenceGenerator::_random.nextCanonicalDouble() * INT_MAX;
-}
-
-bool UpdateSequenceGenerator::_generateBoolToSet() const {
- return this->_random.nextInt32(2) == 1;
-}
-
-BSONArray UpdateSequenceGenerator::_generateArrToSet(const std::string& setPath) const {
- auto enumerator = _getValidEnumeratorForPath(setPath);
-
- auto possibleArrs = enumerator.enumerateArrs();
- std::size_t randomIndex = this->_random.nextInt32(possibleArrs.size());
- auto chosenArr = possibleArrs[randomIndex];
-
- return chosenArr;
-}
-
-BSONObj UpdateSequenceGenerator::_generateDocToSet(const std::string& setPath) const {
- auto enumerator = _getValidEnumeratorForPath(setPath);
- std::size_t randomIndex = this->_random.nextInt32(enumerator.getDocs().size());
- return enumerator.getDocs()[randomIndex];
-}
-
-std::set<StringData> UpdateSequenceGenerator::_getRemainingFields(const std::string& path) const {
- std::set<StringData> remainingFields(this->_config.fields);
-
- FieldRef pathRef(path);
- StringData lastField;
- // This is guaranteed to terminate with a value for lastField, since no valid path contains only
- // array positions (numbers).
- for (int i = pathRef.numParts() - 1; i >= 0; i--) {
- auto field = pathRef.getPart(i);
- if (this->_config.fields.find(field) != this->_config.fields.end()) {
- lastField = field;
- break;
- }
- }
-
- // The last alphabetic field used must be after all other alphabetic fields that could ever be
- // used, since the fields that are used are selected in the order that they pop off from a
- // std::set.
- for (auto field : this->_config.fields) {
- remainingFields.erase(field);
- if (field == lastField) {
- break;
- }
- }
-
- return remainingFields;
-}
-
-DocumentStructureEnumerator UpdateSequenceGenerator::_getValidEnumeratorForPath(
- const std::string& path) const {
- auto remainingFields = _getRemainingFields(path);
- std::size_t remainingDepth = this->_config.depth - UpdateSequenceGenerator::_getPathDepth(path);
- if (remainingDepth > 0) {
- remainingDepth -= 1;
- }
-
- DocumentStructureEnumerator enumerator(
- {remainingFields, remainingDepth, this->_config.lengthOfNumericComponent},
- this->_scalarGenerator);
- return enumerator;
-}
-
-std::vector<std::string> UpdateSequenceGenerator::getPaths() const {
- return this->_paths;
-}
-
-UpdateSequenceGenerator::UpdateSequenceGenerator(UpdateSequenceGeneratorConfig config,
- PseudoRandom random,
- ScalarGenerator* scalarGenerator)
- : _config(std::move(config)), _random(random), _scalarGenerator(scalarGenerator) {
- auto path = "";
- _generatePaths(config, path);
- // Creates the same shuffle each time, but we don't care. We want to mess up the DFS ordering.
- std::shuffle(this->_paths.begin(), this->_paths.end(), this->_random.urbg());
-}
-
-} // namespace mongo
diff --git a/src/mongo/db/repl/idempotency_update_sequence.h b/src/mongo/db/repl/idempotency_update_sequence.h
deleted file mode 100644
index 2e98d5221f8..00000000000
--- a/src/mongo/db/repl/idempotency_update_sequence.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#pragma once
-
-#include <cstddef>
-#include <set>
-#include <string>
-#include <vector>
-
-#include "mongo/base/string_data.h"
-#include "mongo/db/repl/idempotency_scalar_generator.h"
-#include "mongo/platform/random.h"
-
-namespace mongo {
-
-class DocumentStructureEnumerator;
-class BSONObj;
-struct BSONArray;
-class BSONObjBuilder;
-
-struct UpdateSequenceGeneratorConfig {
- UpdateSequenceGeneratorConfig(std::set<StringData> fields_,
- std::size_t depth_,
- std::size_t lengthOfNumericComponent_,
- double scalarProbability_ = 0.250,
- double docProbability_ = 0.250,
- double arrProbability_ = 0.250);
-
- const std::set<StringData> fields = {};
- const std::size_t depth = 0;
- const std::size_t lengthOfNumericComponent = 0;
- const double scalarProbability = 0.250;
- const double docProbability = 0.250;
- const double arrProbability = 0.250;
-};
-
-class UpdateSequenceGenerator {
-
-public:
- UpdateSequenceGenerator(UpdateSequenceGeneratorConfig config,
- PseudoRandom random,
- ScalarGenerator* scalarGenerator);
-
- BSONObj generateUpdate() const;
-
- std::vector<std::string> getPaths() const;
-
- friend std::vector<std::string> eliminatePrefixPaths_forTest(
- const std::string& path, const std::vector<std::string>& paths);
-
- friend std::size_t getPathDepth_forTest(const std::string& path);
-
-private:
- enum class SetChoice : int { kSetScalar, kSetArr, kSetDoc, kNumTotalSetChoices = 3 };
-
- static std::size_t _getPathDepth(const std::string& path);
-
- /**
- * Given a path parameter, removes all paths from a copy of the given path vector that are:
- * 1) A prefix of the given path
- * 2) Prefixable by the given path.
- *
- * This function also removes the given path itself from the given path vector, if it exists
- * inside, since a path can prefix itself and therefore qualifies for both #1 and #2 above.
- *
- * A copy of the given path vector is returned after this pruning finishes.
- */
- static std::vector<std::string> _eliminatePrefixPaths(const std::string& path,
- const std::vector<std::string>& paths);
-
- void _generatePaths(const UpdateSequenceGeneratorConfig& config, const std::string& path);
-
- std::set<StringData> _getRemainingFields(const std::string& path) const;
-
- DocumentStructureEnumerator _getValidEnumeratorForPath(const std::string& path) const;
-
- std::vector<std::string> _getRandomPaths() const;
-
- BSONObj _generateSet() const;
-
- SetChoice _determineWhatToSet(const std::string& setPath) const;
-
- void _appendSetArgToBuilder(const std::string& setPath, BSONObjBuilder* setArgBuilder) const;
-
- BSONObj _generateUnset() const;
-
- double _generateNumericToSet() const;
-
- bool _generateBoolToSet() const;
-
- BSONArray _generateArrToSet(const std::string& setPath) const;
-
- BSONObj _generateDocToSet(const std::string& setPath) const;
-
- std::vector<std::string> _paths;
- const UpdateSequenceGeneratorConfig _config;
- mutable PseudoRandom _random;
- const ScalarGenerator* _scalarGenerator;
-};
-
-} // namespace mongo
diff --git a/src/mongo/db/repl/idempotency_update_sequence_test.cpp b/src/mongo/db/repl/idempotency_update_sequence_test.cpp
deleted file mode 100644
index d46607f194b..00000000000
--- a/src/mongo/db/repl/idempotency_update_sequence_test.cpp
+++ /dev/null
@@ -1,318 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/platform/basic.h"
-
-#include <algorithm>
-#include <memory>
-
-#include "mongo/db/field_ref.h"
-#include "mongo/db/field_ref_set.h"
-#include "mongo/db/repl/idempotency_document_structure.h"
-#include "mongo/db/repl/idempotency_update_sequence.h"
-#include "mongo/unittest/unittest.h"
-
-namespace mongo {
-
-std::vector<std::string> eliminatePrefixPaths_forTest(const std::string& path,
- const std::vector<std::string>& paths) {
- return UpdateSequenceGenerator::_eliminatePrefixPaths(path, paths);
-}
-
-size_t getPathDepth_forTest(const std::string& path) {
- return UpdateSequenceGenerator::_getPathDepth(path);
-}
-
-namespace {
-
-PseudoRandom random(SecureRandom().nextInt64());
-
-TEST(UpdateGenTest, FindsAllPaths) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
-
- ASSERT_EQ(generator.getPaths().size(), 5U);
-
- std::vector<std::string> expectedPaths{"a", "a.0", "a.b", "b", "b.0"};
- std::vector<std::string> foundPaths(generator.getPaths());
- std::sort(expectedPaths.begin(), expectedPaths.end());
- std::sort(foundPaths.begin(), foundPaths.end());
- if (foundPaths != expectedPaths) {
- StringBuilder sb;
- sb << "Did not find all paths. Instead, we found: [ ";
- bool firstIter = true;
- for (auto path : foundPaths) {
- if (!firstIter) {
- sb << ", ";
- } else {
- firstIter = false;
- }
- sb << path;
- }
- sb << " ]; ";
- FAIL(sb.str());
- }
-}
-
-TEST(UpdateGenTest, NoDuplicatePaths) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- size_t length = 2;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
-
- auto paths = generator.getPaths();
- for (size_t i = 0; i < paths.size(); i++) {
- for (size_t j = i + 1; j < paths.size(); j++) {
- if (paths[i] == paths[j]) {
- StringBuilder sb;
- sb << "Outer path matched with inner path.";
- sb << generator.getPaths()[i] << " was duplicated.";
- FAIL(sb.str());
- }
- }
- }
-}
-
-TEST(UpdateGenTest, UpdatesHaveValidPaths) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
- auto update = generator.generateUpdate();
-
- BSONObj updateArg;
- if (auto setElem = update["$set"]) {
- updateArg = setElem.Obj();
- } else if (auto unsetElem = update["$unset"]) {
- updateArg = unsetElem.Obj();
- } else {
- StringBuilder sb;
- sb << "The generated update is not a $set or $unset BSONObj: " << update;
- FAIL(sb.str());
- }
-
- auto argPaths = updateArg.getFieldNames<std::set<std::string>>();
- std::set<std::string> correctPaths{"a", "b", "a.0", "a.b", "b.0"};
- for (auto path : argPaths) {
- FieldRef pathRef(path);
- StringBuilder sb;
- if (path[0] == '0' || path[0] == '1') {
- sb << "Some path (" << path << "), found in the (un)set arguments from the update "
- << update << " contains a leading array position. ";
- FAIL(sb.str());
- }
- if (correctPaths.find(path) == correctPaths.end()) {
- sb << "Some path (" << path << "), found in the (un)set arguments from the update "
- << update << " contains an invalid fieldname(s). ";
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, UpdatesAreNotAmbiguous) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
- auto update = generator.generateUpdate();
-
- BSONObj updateArg;
- if (auto setElem = update["$set"]) {
- updateArg = setElem.Obj();
- } else if (auto unsetElem = update["$unset"]) {
- updateArg = unsetElem.Obj();
- } else {
- StringBuilder sb;
- sb << "The generated update is not a $set or $unset BSONObj: " << update;
- FAIL(sb.str());
- }
- auto argPathsSet = updateArg.getFieldNames<std::set<std::string>>();
-
- std::vector<std::unique_ptr<FieldRef>> argPathsRefVec;
- FieldRefSet pathRefSet;
- for (auto path : argPathsSet) {
- argPathsRefVec.push_back(std::make_unique<FieldRef>(path));
- const FieldRef* conflict;
- if (!pathRefSet.insert(argPathsRefVec.back().get(), &conflict)) {
- StringBuilder sb;
- sb << "Some path in the (un)set arguments of " << update
- << " causes ambiguity due to a conflict between "
- << argPathsRefVec.back()->dottedField() << " and " << conflict->dottedField();
- FAIL(sb.str());
- }
- }
-}
-
-std::size_t getMaxDepth(BSONObj obj) {
- size_t curMaxDepth = 0;
- for (auto elem : obj) {
- if (elem.type() == BSONType::Object || elem.type() == BSONType::Array) {
- curMaxDepth = std::max(curMaxDepth, 1 + getMaxDepth(elem.Obj()));
- }
- }
-
- return curMaxDepth;
-}
-
-TEST(UpdateGenTest, UpdatesPreserveDepthConstraint) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator(
- {fields, depth, length, 0.333, 0.333, 0.334}, random, &trivialScalarGenerator);
-
- BSONElement setElem;
- BSONObj update;
- // Because our probabilities sum to 1, we are guaranteed to always get a $set.
- update = generator.generateUpdate();
- setElem = update["$set"];
- BSONObj updateArg = setElem.Obj();
-
- auto argPaths = updateArg.getFieldNames<std::set<std::string>>();
- for (auto path : argPaths) {
- auto pathDepth = getPathDepth_forTest(path);
- auto particularSetArgument = updateArg[path];
- auto embeddedArgDepth = 0;
- if (particularSetArgument.type() == BSONType::Object ||
- particularSetArgument.type() == BSONType::Array) {
- embeddedArgDepth = getMaxDepth(particularSetArgument.Obj()) + 1;
- }
-
- auto argDepth = pathDepth + embeddedArgDepth;
- if (argDepth > depth) {
- StringBuilder sb;
- sb << "The path " << path << " and its argument " << particularSetArgument
- << " exceeds the maximum depth.";
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, OnlyGenerateUnset) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generatorNoSet(
- {fields, depth, length, 0.0, 0.0, 0.0}, random, &trivialScalarGenerator);
-
- for (size_t i = 0; i < 100; i++) {
- auto update = generatorNoSet.generateUpdate();
- if (!update["$unset"]) {
- StringBuilder sb;
- sb << "Generator created an update that was not an $unset, even though the probability "
- "of doing so is zero: "
- << update;
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, OnlySetUpdatesWithScalarValue) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generatorNoUnsetAndOnlyScalar(
- {fields, depth, length, 1.0, 0.0, 0.0}, random, &trivialScalarGenerator);
-
- for (size_t i = 0; i < 100; i++) {
- auto update = generatorNoUnsetAndOnlyScalar.generateUpdate();
- if (!update["$set"]) {
- StringBuilder sb;
- sb << "Generator created an update that was not an $set, even though the probability "
- "of doing so is zero: "
- << update;
- FAIL(sb.str());
- } else if (getMaxDepth(update["$set"].Obj()) != 0) {
- StringBuilder sb;
- sb << "Generator created an update that had a nonscalar value, because it's maximum "
- "depth was nonzero: "
- << update;
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, OnlySetUpdatesWithScalarsAtMaxDepth) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generatorNeverScalar(
- {fields, depth, length, 0.0, 0.5, 0.5}, random, &trivialScalarGenerator);
-
- for (size_t i = 0; i < 100; i++) {
- auto update = generatorNeverScalar.generateUpdate();
- for (auto elem : update["$set"].Obj()) {
- StringData fieldName = elem.fieldNameStringData();
- FieldRef fieldRef(fieldName);
- size_t pathDepth = getPathDepth_forTest(fieldName.toString());
- bool isDocOrArr = elem.type() == BSONType::Object || elem.type() == BSONType::Array;
- if (pathDepth != depth) {
- // If the path is not equal to the max depth we provided above, then there
- // should
- // only be an array or doc at this point.
- if (!isDocOrArr) {
- StringBuilder sb;
- sb << "The set argument: " << elem
- << " is a scalar, but the probability of a scalar occuring for a path that "
- "does not meet the maximum depth is zero.";
- FAIL(sb.str());
- }
- } else {
- if (isDocOrArr) {
- StringBuilder sb;
- sb << "The set argument: " << elem
- << " is not scalar, however, this path reaches the maximum depth so a "
- "scalar should be the only choice.";
- FAIL(sb.str());
- }
- }
- }
- }
-}
-
-} // namespace
-} // namespace mongo
diff --git a/src/mongo/db/repl/oplog_applier_impl_test.cpp b/src/mongo/db/repl/oplog_applier_impl_test.cpp
index b734004bb28..34e5996f887 100644
--- a/src/mongo/db/repl/oplog_applier_impl_test.cpp
+++ b/src/mongo/db/repl/oplog_applier_impl_test.cpp
@@ -67,6 +67,7 @@
#include "mongo/db/session_txn_record_gen.h"
#include "mongo/db/stats/counters.h"
#include "mongo/db/transaction_participant_gen.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/idl/server_parameter_test_util.h"
#include "mongo/platform/mutex.h"
#include "mongo/unittest/death_test.h"
@@ -308,8 +309,12 @@ TEST_F(OplogApplierImplTestDisableSteadyStateConstraints,
applyOplogEntryOrGroupedInsertsUpdateMissingDocument) {
const NamespaceString nss("test.t");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
- auto op = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto op = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
int prevUpdateOnMissingDoc = replOpCounters.getUpdateOnMissingDoc()->load();
_testApplyOplogEntryOrGroupedInsertsCrudOperation(ErrorCodes::OK, op, true);
auto postUpdateOnMissingDoc = replOpCounters.getUpdateOnMissingDoc()->load();
@@ -326,8 +331,12 @@ TEST_F(OplogApplierImplTestEnableSteadyStateConstraints,
applyOplogEntryOrGroupedInsertsUpdateMissingDocument) {
const NamespaceString nss("test.t");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
- auto op = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto op = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
_testApplyOplogEntryOrGroupedInsertsCrudOperation(ErrorCodes::UpdateOperationFailed, op, false);
}
@@ -446,8 +455,10 @@ TEST_F(OplogApplierImplTest, applyOplogEntryToRecordChangeStreamPreImages) {
testCase.opType,
nss,
options.uuid,
- testCase.opType == repl::OpTypeEnum::kUpdate ? BSON("$set" << BSON("a" << 1))
- : documentId,
+ testCase.opType == repl::OpTypeEnum::kUpdate
+ ? update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}")))
+ : documentId,
{documentId},
testCase.fromMigrate);
@@ -2221,7 +2232,9 @@ TEST_F(IdempotencyTest, Geo2dsphereIndexFailedOnUpdate) {
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, loc: 'hi'}"));
- auto updateOp = update(1, fromjson("{$set: {loc: [1, 2]}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{loc: [1, 2]}"))));
auto indexOp =
buildIndex(fromjson("{loc: '2dsphere'}"), BSON("2dsphereIndexVersion" << 3), kUuid);
@@ -2251,7 +2264,9 @@ TEST_F(IdempotencyTest, Geo2dIndex) {
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, loc: [1]}"));
- auto updateOp = update(1, fromjson("{$set: {loc: [1, 2]}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{loc: [1, 2]}"))));
auto indexOp = buildIndex(fromjson("{loc: '2d'}"), BSONObj(), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2267,7 +2282,9 @@ TEST_F(IdempotencyTest, UniqueKeyIndex) {
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 5}"));
- auto updateOp = update(1, fromjson("{$set: {x: 6}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 6}"))));
auto insertOp2 = insert(fromjson("{_id: 2, x: 5}"));
auto indexOp = buildIndex(fromjson("{x: 1}"), fromjson("{unique: true}"), kUuid);
@@ -2286,9 +2303,16 @@ TEST_F(IdempotencyTest, ParallelArrayError) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
ASSERT_OK(runOpInitialSync(insert(fromjson("{_id: 1}"))));
- auto updateOp1 = update(1, fromjson("{$set: {x: [1, 2]}}"));
- auto updateOp2 = update(1, fromjson("{$set: {x: 1}}"));
- auto updateOp3 = update(1, fromjson("{$set: {y: [3, 4]}}"));
+ auto updateOp1 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{x: [1, 2]}"))));
+ auto updateOp2 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 1}"))));
+ auto updateOp3 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{y: [3, 4]}"))));
+
auto indexOp = buildIndex(fromjson("{x: 1, y: 1}"), BSONObj(), kUuid);
auto ops = {updateOp1, updateOp2, updateOp3, indexOp};
@@ -2322,7 +2346,10 @@ TEST_F(IdempotencyTest, TextIndexDocumentHasNonStringLanguageField) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 'words to index', language: 1}"));
- auto updateOp = update(1, fromjson("{$unset: {language: 1}}"));
+ auto updateOp =
+ update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{language: false}"))));
auto indexOp = buildIndex(fromjson("{x: 'text'}"), BSONObj(), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2352,7 +2379,9 @@ TEST_F(IdempotencyTest, TextIndexDocumentHasNonStringLanguageOverrideField) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 'words to index', y: 1}"));
- auto updateOp = update(1, fromjson("{$unset: {y: 1}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{y: false}"))));
auto indexOp = buildIndex(fromjson("{x: 'text'}"), fromjson("{language_override: 'y'}"), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2382,7 +2411,10 @@ TEST_F(IdempotencyTest, TextIndexDocumentHasUnknownLanguage) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 'words to index', language: 'bad'}"));
- auto updateOp = update(1, fromjson("{$unset: {language: 1}}"));
+ auto updateOp =
+ update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{language: false}"))));
auto indexOp = buildIndex(fromjson("{x: 'text'}"), BSONObj(), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2441,7 +2473,9 @@ TEST_F(IdempotencyTest, CreateCollectionWithCollation) {
auto createColl = makeCreateCollectionOplogEntry(nextOpTime(), nss, options);
auto insertOp1 = insert(fromjson("{ _id: 'foo' }"));
auto insertOp2 = insert(fromjson("{ _id: 'Foo', x: 1 }"));
- auto updateOp = update("foo", BSON("$set" << BSON("x" << 2)));
+ auto updateOp = update("foo",
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 2}"))));
// We don't drop and re-create the collection since we don't have ways
// to wait until second-phase drop to completely finish.
@@ -2888,13 +2922,15 @@ TEST_F(OplogApplierImplTxnTableTest, InterleavedWriteWithTxnMixedWithDirectUpdat
date);
repl::OpTime newWriteOpTime(Timestamp(2, 0), 1);
- auto updateOp = makeOplogEntry(NamespaceString::kSessionTransactionsTableNamespace,
- {Timestamp(4, 0), 1},
- repl::OpTypeEnum::kUpdate,
- BSON("$set" << BSON("lastWriteOpTime" << newWriteOpTime)),
- BSON("_id" << sessionInfo.getSessionId()->toBSON()),
- {},
- Date_t::now());
+ auto updateOp = makeOplogEntry(
+ NamespaceString::kSessionTransactionsTableNamespace,
+ {Timestamp(4, 0), 1},
+ repl::OpTypeEnum::kUpdate,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("lastWriteOpTime" << newWriteOpTime))),
+ BSON("_id" << sessionInfo.getSessionId()->toBSON()),
+ {},
+ Date_t::now());
auto writerPool = makeReplWriterPool();
NoopOplogApplierObserver observer;
@@ -3292,10 +3328,16 @@ TEST_F(IdempotencyTest, UpdateTwoFields) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
ASSERT_OK(runOpInitialSync(insert(fromjson("{_id: 1, y: [0]}"))));
-
- auto updateOp1 = update(1, fromjson("{$set: {x: 1}}"));
- auto updateOp2 = update(1, fromjson("{$set: {x: 2, 'y.0': 2}}"));
- auto updateOp3 = update(1, fromjson("{$set: {y: 3}}"));
+ auto updateOp1 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 1}"))));
+ auto updateOp2 =
+ update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 2, 'y.0': 2}"))));
+ auto updateOp3 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{y: 3}"))));
auto ops = {updateOp1, updateOp2, updateOp3};
testOpsAreIdempotent(ops);
diff --git a/src/mongo/db/repl/replication_recovery_test.cpp b/src/mongo/db/repl/replication_recovery_test.cpp
index 40faf40552a..5d7bc3264df 100644
--- a/src/mongo/db/repl/replication_recovery_test.cpp
+++ b/src/mongo/db/repl/replication_recovery_test.cpp
@@ -51,6 +51,7 @@
#include "mongo/db/storage/storage_options.h"
#include "mongo/db/storage/storage_parameters_gen.h"
#include "mongo/db/transaction_participant.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/unittest/death_test.h"
#include "mongo/unittest/log_test.h"
#include "mongo/unittest/unittest.h"
@@ -829,7 +830,10 @@ TEST_F(ReplicationRecoveryTest, RecoveryAppliesUpdatesIdempotently) {
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(ts, BSON("_id" << 1), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(ts,
+ BSON("_id" << 1),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(ts, ts)},
@@ -845,7 +849,10 @@ TEST_F(ReplicationRecoveryTest, RecoveryAppliesUpdatesIdempotently) {
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(ts, BSON("_id" << 2), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(ts,
+ BSON("_id" << 2),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(ts, ts)},
@@ -861,7 +868,10 @@ TEST_F(ReplicationRecoveryTest, RecoveryAppliesUpdatesIdempotently) {
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(ts, BSON("_id" << 3), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(ts,
+ BSON("_id" << 3),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(ts, ts)},
@@ -886,7 +896,10 @@ DEATH_TEST_F(ReplicationRecoveryTest, RecoveryFailsWithBadOp, "terminate() calle
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(2, BSON("bad_op" << 1), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(2,
+ BSON("bad_op" << 1),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(2, 2)},
diff --git a/src/mongo/db/repl/session_update_tracker.cpp b/src/mongo/db/repl/session_update_tracker.cpp
index 4f695ae7630..175c8ffc07e 100644
--- a/src/mongo/db/repl/session_update_tracker.cpp
+++ b/src/mongo/db/repl/session_update_tracker.cpp
@@ -38,6 +38,7 @@
#include "mongo/db/session.h"
#include "mongo/db/session_txn_record_gen.h"
#include "mongo/db/transaction_participant_gen.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/logv2/log.h"
#include "mongo/util/assert_util.h"
@@ -343,9 +344,10 @@ boost::optional<OplogEntry> SessionUpdateTracker::_createTransactionTableUpdateF
// The prepare oplog entry is the first operation of the transaction.
newTxnRecord.setStartOpTime(entry.getOpTime());
} else {
- // Update the transaction record using $set to avoid overwriting the
- // startOpTime.
- return BSON("$set" << newTxnRecord.toBSON());
+ // Update the transaction record using a delta oplog entry to avoid
+ // overwriting the startOpTime.
+ return update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << newTxnRecord.toBSON()));
}
} else {
newTxnRecord.setState(DurableTxnStateEnum::kCommitted);
diff --git a/src/mongo/db/repl/storage_timestamp_test.cpp b/src/mongo/db/repl/storage_timestamp_test.cpp
index fb9325c1978..2ec52d2ffec 100644
--- a/src/mongo/db/repl/storage_timestamp_test.cpp
+++ b/src/mongo/db/repl/storage_timestamp_test.cpp
@@ -87,6 +87,7 @@
#include "mongo/db/storage/storage_engine_impl.h"
#include "mongo/db/transaction_participant.h"
#include "mongo/db/transaction_participant_gen.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/db/vector_clock_mutable.h"
#include "mongo/dbtests/dbtests.h"
#include "mongo/idl/server_parameter_test_util.h"
@@ -987,17 +988,34 @@ TEST_F(StorageTimestampTest, SecondaryUpdateTimes) {
// clock. `pair.first` is the update to perform and `pair.second` is the full value of the
// document after the transformation.
const std::vector<std::pair<BSONObj, BSONObj>> updates = {
- {BSON("$set" << BSON("val" << 1)), BSON("_id" << 0 << "val" << 1)},
- {BSON("$unset" << BSON("val" << 1)), BSON("_id" << 0)},
- {BSON("$addToSet" << BSON("theSet" << 1)), BSON("_id" << 0 << "theSet" << BSON_ARRAY(1))},
- {BSON("$addToSet" << BSON("theSet" << 2)),
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{val: 1}"))),
+ BSON("_id" << 0 << "val" << 1)},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{val: false}"))),
+ BSON("_id" << 0)},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{theSet: [1]}"))),
+ BSON("_id" << 0 << "theSet" << BSON_ARRAY(1))},
+ {update_oplog_entry::makeDeltaOplogEntry(BSON(
+ "stheSet" << BSON(doc_diff::kArrayHeader << true << doc_diff::kResizeSectionFieldName
+ << 2 << "u1" << 2))),
BSON("_id" << 0 << "theSet" << BSON_ARRAY(1 << 2))},
- {BSON("$pull" << BSON("theSet" << 1)), BSON("_id" << 0 << "theSet" << BSON_ARRAY(2))},
- {BSON("$pull" << BSON("theSet" << 2)), BSON("_id" << 0 << "theSet" << BSONArray())},
- {BSON("$set" << BSON("theMap.val" << 1)),
+ {update_oplog_entry::makeDeltaOplogEntry(BSON(
+ "stheSet" << BSON(doc_diff::kArrayHeader << true << doc_diff::kResizeSectionFieldName
+ << 1 << "u0" << 2))),
+ BSON("_id" << 0 << "theSet" << BSON_ARRAY(2))},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON("stheSet" << BSON(doc_diff::kArrayHeader
+ << true << doc_diff::kResizeSectionFieldName << 0))),
+ BSON("_id" << 0 << "theSet" << BSONArray())},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{theMap: {val: 1}}"))),
BSON("_id" << 0 << "theSet" << BSONArray() << "theMap" << BSON("val" << 1))},
- {BSON("$rename" << BSON("theSet"
- << "theOtherSet")),
+ {update_oplog_entry::makeDeltaOplogEntry(BSON(doc_diff::kDeleteSectionFieldName
+ << fromjson("{theSet: false}")
+ << doc_diff::kUpdateSectionFieldName
+ << fromjson("{theOtherSet: []}"))),
BSON("_id" << 0 << "theMap" << BSON("val" << 1) << "theOtherSet" << BSONArray())}};
const LogicalTime firstUpdateTime = _clock->tickClusterTime(updates.size());
@@ -1544,9 +1562,9 @@ TEST_F(StorageTimestampTest, SecondarySetWildcardIndexMultikeyOnUpdate) {
const LogicalTime updateTime1 = _clock->tickClusterTime(1);
const LogicalTime updateTime2 = _clock->tickClusterTime(1);
- BSONObj doc0 = BSON("_id" << 0 << "a" << 3);
- BSONObj doc1 = BSON("$v" << 1 << "$set" << BSON("a" << BSON_ARRAY(1 << 2)));
- BSONObj doc2 = BSON("$v" << 1 << "$set" << BSON("a" << BSON_ARRAY(1 << 2)));
+ BSONObj doc0 = fromjson("{_id: 0, a: 3}");
+ BSONObj doc1 = fromjson("{$v: 2, diff: {u: {a: [1,2]}}}");
+ BSONObj doc2 = fromjson("{$v: 2, diff: {u: {a: [1,2]}}}");
auto op0 = repl::OplogEntry(
BSON("ts" << insertTime0.asTimestamp() << "t" << 1LL << "v" << 2 << "op"
<< "i"
diff --git a/src/mongo/db/repl/tenant_oplog_applier_test.cpp b/src/mongo/db/repl/tenant_oplog_applier_test.cpp
index 864960d84d7..bdafadb1fb1 100644
--- a/src/mongo/db/repl/tenant_oplog_applier_test.cpp
+++ b/src/mongo/db/repl/tenant_oplog_applier_test.cpp
@@ -27,7 +27,6 @@
* it in the license file.
*/
-
#include "mongo/platform/basic.h"
#include <algorithm>
@@ -52,13 +51,13 @@
#include "mongo/db/service_context_test_fixture.h"
#include "mongo/db/session_catalog_mongod.h"
#include "mongo/db/tenant_id.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/executor/thread_pool_task_executor_test_fixture.h"
#include "mongo/logv2/log.h"
#include "mongo/unittest/log_test.h"
#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kTest
-
namespace mongo {
using executor::TaskExecutor;
@@ -662,8 +661,12 @@ TEST_F(TenantOplogApplierTest, ApplyInserts_Grouped) {
TEST_F(TenantOplogApplierTest, ApplyUpdate_MissingDocument) {
NamespaceString nss(_dbName.toStringWithTenantId(), "bar");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
- auto entry = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto entry = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
bool onInsertsCalled = false;
bool onUpdateCalled = false;
_opObserver->onInsertsFn = [&](OperationContext* opCtx,
@@ -698,8 +701,12 @@ TEST_F(TenantOplogApplierTest, ApplyUpdate_Success) {
NamespaceString nss(_dbName.toStringWithTenantId(), "bar");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
ASSERT_OK(getStorageInterface()->insertDocument(_opCtx.get(), nss, {BSON("_id" << 0)}, 0));
- auto entry = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto entry = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
bool onUpdateCalled = false;
_opObserver->onUpdateFn = [&](OperationContext* opCtx, const OplogUpdateEntryArgs& args) {
onUpdateCalled = true;
diff --git a/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp b/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp
index d2313684ff9..4e6c32ac1e2 100644
--- a/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp
+++ b/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp
@@ -48,6 +48,7 @@
#include "mongo/db/s/sharding_state.h"
#include "mongo/db/session_catalog_mongod.h"
#include "mongo/db/transaction_participant.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/db/vector_clock_metadata_hook.h"
#include "mongo/executor/network_interface_factory.h"
#include "mongo/executor/thread_pool_task_executor_test_fixture.h"
@@ -398,7 +399,8 @@ TEST_F(ReshardingOplogApplierTest, ApplyBasicCrud) {
boost::none));
crudOps.push_back(makeOplog(repl::OpTime(Timestamp(7, 3), 1),
repl::OpTypeEnum::kUpdate,
- BSON("$set" << BSON("x" << 1)),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))),
BSON("_id" << 2)));
crudOps.push_back(makeOplog(repl::OpTime(Timestamp(8, 3), 1),
repl::OpTypeEnum::kDelete,
@@ -536,7 +538,7 @@ TEST_F(ReshardingOplogApplierTest, ErrorDuringFirstBatchApply) {
auto cancelToken = operationContext()->getCancellationToken();
CancelableOperationContextFactory factory(cancelToken, getCancelableOpCtxExecutor());
auto future = applier->run(getExecutor(), getExecutor(), cancelToken, factory);
- ASSERT_EQ(future.getNoThrow(), ErrorCodes::FailedToParse);
+ ASSERT_EQ(future.getNoThrow(), ErrorCodes::duplicateCodeForTest(4772600));
DBDirectClient client(operationContext());
auto doc = client.findOne(appliedToNs(), BSON("_id" << 1));
@@ -579,7 +581,7 @@ TEST_F(ReshardingOplogApplierTest, ErrorDuringSecondBatchApply) {
auto cancelToken = operationContext()->getCancellationToken();
CancelableOperationContextFactory factory(cancelToken, getCancelableOpCtxExecutor());
auto future = applier->run(getExecutor(), getExecutor(), cancelToken, factory);
- ASSERT_EQ(future.getNoThrow(), ErrorCodes::FailedToParse);
+ ASSERT_EQ(future.getNoThrow(), ErrorCodes::duplicateCodeForTest(4772600));
DBDirectClient client(operationContext());
auto doc = client.findOne(appliedToNs(), BSON("_id" << 1));
@@ -834,7 +836,11 @@ TEST_F(ReshardingOplogApplierTest, MetricsAreReported) {
std::deque<repl::OplogEntry>{
easyOp(5, OpT::kDelete, BSON("_id" << 1)),
easyOp(6, OpT::kInsert, BSON("_id" << 2)),
- easyOp(7, OpT::kUpdate, BSON("$set" << BSON("x" << 1)), BSON("_id" << 2)),
+ easyOp(7,
+ OpT::kUpdate,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))),
+ BSON("_id" << 2)),
easyOp(8, OpT::kDelete, BSON("_id" << 1)),
easyOp(9, OpT::kInsert, BSON("_id" << 3))},
2);
diff --git a/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp b/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp
index 9c09f5ebcf0..19fb3864915 100644
--- a/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp
+++ b/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp
@@ -47,6 +47,7 @@
#include "mongo/db/s/sharding_state.h"
#include "mongo/db/service_context_d_test_fixture.h"
#include "mongo/db/session_catalog_mongod.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/s/catalog/type_chunk.h"
#include "mongo/s/chunk_manager.h"
#include "mongo/unittest/unittest.h"
@@ -456,7 +457,10 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpModifiesStashCollectionAfterI
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 0), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 0),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
}
// We should have applied rule #1 and updated the document with {_id: 0} in the stash collection
@@ -488,7 +492,10 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpIsNoopWhenDifferentOwningDono
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 0), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 0),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
}
// The document {_id: 0, sk: -1} that exists in the output collection does not belong to this
@@ -504,7 +511,10 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpIsNoopWhenDifferentOwningDono
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 2), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 2),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
}
// There does not exist a document with {_id: 2} in the output collection, so we should have
@@ -535,9 +545,15 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpModifiesOutputCollection) {
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 1), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 1),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 2), BSON("$set" << BSON("x" << 2)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 2),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 2))))));
}
// We should have updated both documents in the output collection to include the new field "x".
diff --git a/src/mongo/db/update/SConscript b/src/mongo/db/update/SConscript
index f08b7cd6553..0fed04974ab 100644
--- a/src/mongo/db/update/SConscript
+++ b/src/mongo/db/update/SConscript
@@ -26,7 +26,6 @@ env.Library(
'field_checker.cpp',
'path_support.cpp',
'storage_validation.cpp',
- 'v1_log_builder.cpp',
'v2_log_builder.cpp',
'update_oplog_entry_serialization.cpp',
],
@@ -158,7 +157,6 @@ env.CppUnitTest(
'update_driver_test.cpp',
'update_object_node_test.cpp',
'update_serialization_test.cpp',
- 'v1_log_builder_test.cpp',
'v2_log_builder_test.cpp',
],
LIBDEPS=[
diff --git a/src/mongo/db/update/addtoset_node_test.cpp b/src/mongo/db/update/addtoset_node_test.cpp
index d2b7f90be65..c09d1eb71e5 100644
--- a/src/mongo/db/update/addtoset_node_test.cpp
+++ b/src/mongo/db/update/addtoset_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using AddToSetNodeTest = UpdateNodeTest;
+using AddToSetNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -136,7 +136,7 @@ TEST_F(AddToSetNodeTest, ApplyNonEach) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -156,8 +156,7 @@ TEST_F(AddToSetNodeTest, ApplyNonEachArray) {
ASSERT_EQUALS(fromjson("{a: [0, [1]]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, [1]]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, [1]]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, [1]]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -176,8 +175,7 @@ TEST_F(AddToSetNodeTest, ApplyEach) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1, 2]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1, 2]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -196,7 +194,7 @@ TEST_F(AddToSetNodeTest, ApplyToEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2]}}"), fromjson("{$v: 2, diff: {u: {a: [1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 2]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -215,7 +213,7 @@ TEST_F(AddToSetNodeTest, ApplyDeduplicateElementsToAdd) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -235,7 +233,7 @@ TEST_F(AddToSetNodeTest, ApplyDoNotAddExistingElements) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -255,8 +253,7 @@ TEST_F(AddToSetNodeTest, ApplyDoNotDeduplicateExistingElements) {
ASSERT_EQUALS(fromjson("{a: [0, 0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 0, 1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -313,7 +310,7 @@ TEST_F(AddToSetNodeTest, ApplyCreateArray) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {i: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -332,7 +329,7 @@ TEST_F(AddToSetNodeTest, ApplyCreateEmptyArrayIsNotNoop) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {i: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: []}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -354,8 +351,7 @@ TEST_F(AddToSetNodeTest, ApplyDeduplicationOfElementsToAddRespectsCollation) {
ASSERT_EQUALS(fromjson("{a: ['abc', 'def']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['abc', 'def']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -377,8 +373,7 @@ TEST_F(AddToSetNodeTest, ApplyComparisonToExistingElementsRespectsCollation) {
ASSERT_EQUALS(fromjson("{a: ['ABC', 'def']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['ABC', 'def']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['ABC', 'def']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['ABC', 'def']}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -401,8 +396,7 @@ TEST_F(AddToSetNodeTest, ApplyRespectsCollationFromSetCollator) {
ASSERT_EQUALS(fromjson("{a: ['abc', 'def']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['abc', 'def']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -447,8 +441,7 @@ TEST_F(AddToSetNodeTest, ApplyNestedArray) {
ASSERT_EQUALS(fromjson("{ _id : 1, a : [1, [1]] }"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{ $set : { 'a.1' : [1] } }"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: [1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.1}");
}
@@ -467,7 +460,7 @@ TEST_F(AddToSetNodeTest, ApplyIndexesNotAffected) {
ASSERT_FALSE(result.indexesAffected);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
diff --git a/src/mongo/db/update/arithmetic_node_test.cpp b/src/mongo/db/update/arithmetic_node_test.cpp
index 3cb73d8f8e7..9aeb06c5b88 100644
--- a/src/mongo/db/update/arithmetic_node_test.cpp
+++ b/src/mongo/db/update/arithmetic_node_test.cpp
@@ -42,9 +42,7 @@
namespace mongo {
namespace {
-using ArithmeticNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using ArithmeticNodeTest = UpdateTestFixture;
DEATH_TEST_REGEX(ArithmeticNodeTest,
InitFailsForEmptyElement,
@@ -185,7 +183,7 @@ TEST_F(ArithmeticNodeTest, ApplyEmptyPathToCreate) {
ASSERT_EQUALS(fromjson("{a: 11}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 11}}"), fromjson("{$v: 2, diff: {u: {a: 11}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 11}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -205,8 +203,7 @@ TEST_F(ArithmeticNodeTest, ApplyCreatePath) {
ASSERT_EQUALS(fromjson("{a: {d: 5, b: {c: 6}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c}");
}
@@ -243,7 +240,7 @@ TEST_F(ArithmeticNodeTest, ApplyCreatePathFromRoot) {
ASSERT_EQUALS(fromjson("{c: 5, a: {b: 6}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6}}"), fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
@@ -263,8 +260,7 @@ TEST_F(ArithmeticNodeTest, ApplyPositional) {
ASSERT_EQUALS(fromjson("{a: [0, 7, 2]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 7}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 7}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 7}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.1}");
}
@@ -389,8 +385,7 @@ TEST_F(ArithmeticNodeTest, TypePromotionFromIntToDecimalIsNotANoOp) {
ASSERT_EQUALS(fromjson("{a: NumberDecimal(\"5.0\")}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: NumberDecimal('5.0')}}"),
- fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -409,8 +404,7 @@ TEST_F(ArithmeticNodeTest, TypePromotionFromLongToDecimalIsNotANoOp) {
ASSERT_EQUALS(fromjson("{a: NumberDecimal(\"5.0\")}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: NumberDecimal('5.0')}}"),
- fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -430,7 +424,6 @@ TEST_F(ArithmeticNodeTest, TypePromotionFromDoubleToDecimalIsNotANoOp) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {a: NumberDecimal('5.25')}}"),
fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.25')}}}"),
false // Not checking binary equality because the NumberDecimal in the expected output may
// not be bitwise identical to the result produced by the update system.
@@ -471,7 +464,6 @@ TEST_F(ArithmeticNodeTest, IncrementedDecimalStaysDecimal) {
ASSERT_TRUE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {a: NumberDecimal('11.5')}}"),
fromjson("{$v: 2, diff: {u: {a: NumberDecimal('11.5')}}}"),
false // Not checking binary equality because the NumberDecimal in the expected output may
// not be bitwise identical to the result produced by the update system.
@@ -696,7 +688,7 @@ TEST_F(ArithmeticNodeTest, ApplyEmptyIndexData) {
ASSERT_EQUALS(fromjson("{a: 3}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 3}}"), fromjson("{$v: 2, diff: {u: {a: 3}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 3}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1029,8 +1021,7 @@ TEST_F(ArithmeticNodeTest, ApplyLogDottedPath) {
ASSERT_EQUALS(fromjson("{a: [{b:0}, {b:1}, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1047,8 +1038,7 @@ TEST_F(ArithmeticNodeTest, LogEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [null, null, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1065,8 +1055,7 @@ TEST_F(ArithmeticNodeTest, LogEmptyObject) {
ASSERT_EQUALS(fromjson("{a: {'2': {b: 2}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.2.b}");
}
@@ -1087,7 +1076,7 @@ TEST_F(ArithmeticNodeTest, ApplyDeserializedDocNotNoOp) {
ASSERT_EQUALS(fromjson("{a: 1, b: NumberInt(0)}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: NumberInt(0)}}"), fromjson("{$v: 2, diff: {i: {b: 0}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 0}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{b}");
}
@@ -1150,7 +1139,7 @@ TEST_F(ArithmeticNodeTest, ApplyToDeserializedDocNestedNotNoop) {
ASSERT_EQUALS(fromjson("{a: {b: 3}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 3}}"), fromjson("{$v: 2, diff: {sa: {u: {b: 3}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: 3}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
diff --git a/src/mongo/db/update/bit_node_test.cpp b/src/mongo/db/update/bit_node_test.cpp
index e3903c6a301..0e218ebc29e 100644
--- a/src/mongo/db/update/bit_node_test.cpp
+++ b/src/mongo/db/update/bit_node_test.cpp
@@ -42,9 +42,7 @@
namespace mongo {
namespace {
-using BitNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using BitNodeTest = UpdateTestFixture;
TEST(BitNodeTest, InitWithDoubleFails) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
@@ -164,7 +162,7 @@ TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentAnd) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v: 2, diff: {i: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 0}}}"));
}
TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentOr) {
@@ -180,7 +178,7 @@ TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentOr) {
ASSERT_EQUALS(fromjson("{a: 1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 1}}"), fromjson("{$v: 2, diff: {i: {a: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 1}}}"));
}
TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentXor) {
@@ -196,7 +194,7 @@ TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentXor) {
ASSERT_EQUALS(fromjson("{a: 1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 1}}"), fromjson("{$v: 2, diff: {i: {a: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 1}}}"));
}
TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentAnd) {
@@ -212,8 +210,7 @@ TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentAnd) {
ASSERT_EQUALS(BSON("a" << 0b0100), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0100)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0100))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0100))));
}
TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentOr) {
@@ -229,8 +226,7 @@ TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentOr) {
ASSERT_EQUALS(BSON("a" << 0b0111), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0111)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0111))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0111))));
}
TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentXor) {
@@ -246,8 +242,7 @@ TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentXor) {
ASSERT_EQUALS(BSON("a" << 0b0011), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0011)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0011))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0011))));
}
TEST_F(BitNodeTest, ApplyShouldReportNoOp) {
@@ -284,8 +279,7 @@ TEST_F(BitNodeTest, ApplyMultipleBitOps) {
ASSERT_EQUALS(BSON("a" << 0b0101011001100110), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0101011001100110)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0101011001100110))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0101011001100110))));
}
TEST_F(BitNodeTest, ApplyRepeatedBitOps) {
@@ -301,8 +295,7 @@ TEST_F(BitNodeTest, ApplyRepeatedBitOps) {
ASSERT_EQUALS(BSON("a" << 0b10010110), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b10010110)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b10010110))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b10010110))));
}
} // namespace
diff --git a/src/mongo/db/update/compare_node_test.cpp b/src/mongo/db/update/compare_node_test.cpp
index e3a608ea535..367525b1356 100644
--- a/src/mongo/db/update/compare_node_test.cpp
+++ b/src/mongo/db/update/compare_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using CompareNodeTest = UpdateNodeTest;
+using CompareNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -197,7 +197,7 @@ TEST_F(CompareNodeTest, ApplyMissingFieldMinNumber) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v:2, diff: {i: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {i: {a: 0}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingNumberMinNumber) {
@@ -215,7 +215,7 @@ TEST_F(CompareNodeTest, ApplyExistingNumberMinNumber) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v:2, diff: {u: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 0}}}"));
}
TEST_F(CompareNodeTest, ApplyMissingFieldMaxNumber) {
@@ -233,7 +233,7 @@ TEST_F(CompareNodeTest, ApplyMissingFieldMaxNumber) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v:2, diff: {i: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {i: {a: 0}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingNumberMaxNumber) {
@@ -251,7 +251,7 @@ TEST_F(CompareNodeTest, ApplyExistingNumberMaxNumber) {
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 2}}"), fromjson("{$v:2, diff: {u: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 2}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingDateMaxDate) {
@@ -269,8 +269,7 @@ TEST_F(CompareNodeTest, ApplyExistingDateMaxDate) {
ASSERT_EQUALS(fromjson("{a: {$date: 123123123}}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {$date: 123123123}}}"),
- fromjson("{$v:2, diff: {u: {a: {$date: 123123123}}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: {$date: 123123123}}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingEmbeddedDocMaxDoc) {
@@ -288,7 +287,7 @@ TEST_F(CompareNodeTest, ApplyExistingEmbeddedDocMaxDoc) {
ASSERT_EQUALS(fromjson("{a: {b: 3}}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {b: 3}}}"), fromjson("{$v:2, diff: {u: {a: {b: 3}}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: {b: 3}}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingEmbeddedDocMaxNumber) {
@@ -327,7 +326,7 @@ TEST_F(CompareNodeTest, ApplyMinRespectsCollation) {
ASSERT_EQUALS(fromjson("{a: 'dba'}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 'dba'}}"), fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
}
TEST_F(CompareNodeTest, ApplyMinRespectsCollationFromSetCollator) {
@@ -349,7 +348,7 @@ TEST_F(CompareNodeTest, ApplyMinRespectsCollationFromSetCollator) {
ASSERT_EQUALS(fromjson("{a: 'dba'}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 'dba'}}"), fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
}
TEST_F(CompareNodeTest, ApplyMaxRespectsCollationFromSetCollator) {
@@ -371,7 +370,7 @@ TEST_F(CompareNodeTest, ApplyMaxRespectsCollationFromSetCollator) {
ASSERT_EQUALS(fromjson("{a: 'abd'}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 'abd'}}"), fromjson("{$v:2, diff: {u: {a: 'abd'}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 'abd'}}}"));
}
DEATH_TEST_REGEX(CompareNodeTest,
@@ -415,7 +414,7 @@ TEST_F(CompareNodeTest, ApplyIndexesNotAffected) {
ASSERT_EQUALS(fromjson("{a: 1}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 1}}"), fromjson("{$v:2, diff: {u: {a: 1}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 1}}}"));
}
TEST_F(CompareNodeTest, ApplyNoIndexDataOrLogBuilder) {
diff --git a/src/mongo/db/update/current_date_node_test.cpp b/src/mongo/db/update/current_date_node_test.cpp
index 95a1fcb2b9e..540a4b26f3b 100644
--- a/src/mongo/db/update/current_date_node_test.cpp
+++ b/src/mongo/db/update/current_date_node_test.cpp
@@ -43,24 +43,14 @@ namespace mongo {
namespace {
void assertOplogEntryIsUpdateOfExpectedType(const BSONObj& obj,
- bool v2LogBuilderUsed,
StringData fieldName,
BSONType expectedType = BSONType::Date) {
- if (v2LogBuilderUsed) {
- ASSERT_EQUALS(obj.nFields(), 2);
- ASSERT_EQUALS(obj["$v"].numberInt(), 2);
- ASSERT_EQUALS(obj["diff"]["u"][fieldName].type(), expectedType);
- } else {
- ASSERT_EQUALS(obj.nFields(), 1);
- ASSERT_TRUE(obj["$set"].type() == BSONType::Object);
- ASSERT_EQUALS(obj["$set"].embeddedObject().nFields(), 1U);
- ASSERT_EQUALS(obj["$set"][fieldName].type(), expectedType);
- }
+ ASSERT_EQUALS(obj.nFields(), 2);
+ ASSERT_EQUALS(obj["$v"].numberInt(), 2);
+ ASSERT_EQUALS(obj["diff"]["u"][fieldName].type(), expectedType);
}
-using CurrentDateNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using CurrentDateNodeTest = UpdateTestFixture;
DEATH_TEST_REGEX(CurrentDateNodeTest,
InitFailsForEmptyElement,
@@ -158,7 +148,7 @@ TEST_F(CurrentDateNodeTest, ApplyTrue) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyFalse) {
@@ -178,7 +168,7 @@ TEST_F(CurrentDateNodeTest, ApplyFalse) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyDate) {
@@ -198,7 +188,7 @@ TEST_F(CurrentDateNodeTest, ApplyDate) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyTimestamp) {
@@ -218,8 +208,7 @@ TEST_F(CurrentDateNodeTest, ApplyTimestamp) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::bsonTimestamp);
- assertOplogEntryIsUpdateOfExpectedType(
- getOplogEntry(), v2LogBuilderUsed(), "a", BSONType::bsonTimestamp);
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a", BSONType::bsonTimestamp);
}
TEST_F(CurrentDateNodeTest, ApplyFieldDoesNotExist) {
@@ -239,14 +228,9 @@ TEST_F(CurrentDateNodeTest, ApplyFieldDoesNotExist) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- if (v2LogBuilderUsed()) {
- ASSERT_EQUALS(getOplogEntry().nFields(), 2);
- ASSERT_EQUALS(getOplogEntry()["$v"].numberInt(), 2);
- ASSERT_EQUALS(getOplogEntry()["diff"]["i"]["a"].type(), BSONType::Date);
- } else {
- ASSERT_EQUALS(getOplogEntry().nFields(), 1);
- ASSERT_EQUALS(getOplogEntry()["$set"]["a"].type(), BSONType::Date);
- }
+ ASSERT_EQUALS(getOplogEntry().nFields(), 2);
+ ASSERT_EQUALS(getOplogEntry()["$v"].numberInt(), 2);
+ ASSERT_EQUALS(getOplogEntry()["diff"]["i"]["a"].type(), BSONType::Date);
}
TEST_F(CurrentDateNodeTest, ApplyIndexesNotAffected) {
@@ -262,7 +246,7 @@ TEST_F(CurrentDateNodeTest, ApplyIndexesNotAffected) {
ASSERT_FALSE(result.noop);
ASSERT_FALSE(result.indexesAffected);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyNoIndexDataOrLogBuilder) {
diff --git a/src/mongo/db/update/object_replace_executor_test.cpp b/src/mongo/db/update/object_replace_executor_test.cpp
index 68955ac3ba9..3785aa94d5b 100644
--- a/src/mongo/db/update/object_replace_executor_test.cpp
+++ b/src/mongo/db/update/object_replace_executor_test.cpp
@@ -41,7 +41,7 @@
namespace mongo {
namespace {
-using ObjectReplaceExecutorTest = UpdateNodeTest;
+using ObjectReplaceExecutorTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
diff --git a/src/mongo/db/update/object_transform_executor_test.cpp b/src/mongo/db/update/object_transform_executor_test.cpp
index 06cca4da7cd..23b9d52ff6d 100644
--- a/src/mongo/db/update/object_transform_executor_test.cpp
+++ b/src/mongo/db/update/object_transform_executor_test.cpp
@@ -40,7 +40,7 @@
namespace mongo {
namespace {
-using ObjectTransformExecutorTest = UpdateNodeTest;
+using ObjectTransformExecutorTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
diff --git a/src/mongo/db/update/pipeline_executor_test.cpp b/src/mongo/db/update/pipeline_executor_test.cpp
index c650e06749d..02fde0b0865 100644
--- a/src/mongo/db/update/pipeline_executor_test.cpp
+++ b/src/mongo/db/update/pipeline_executor_test.cpp
@@ -43,50 +43,7 @@
namespace mongo {
namespace {
-/**
- * Harness for running the tests with both $v:2 oplog entries enabled and disabled.
- */
-class PipelineExecutorTest : public UpdateNodeTest {
-public:
- void resetApplyParams() override {
- UpdateNodeTest::resetApplyParams();
- }
-
- UpdateExecutor::ApplyParams getApplyParams(mutablebson::Element element) override {
- auto applyParams = UpdateNodeTest::getApplyParams(element);
-
- // Use the same parameters as the parent test fixture, but make sure a v2 log builder
- // is provided and a normal log builder is not.
- applyParams.logMode = _allowDeltaOplogEntries
- ? ApplyParams::LogMode::kGenerateOplogEntry
- : ApplyParams::LogMode::kGenerateOnlyV1OplogEntry;
- return applyParams;
- }
-
- void run() {
- _allowDeltaOplogEntries = false;
- UpdateNodeTest::run();
- _allowDeltaOplogEntries = true;
- UpdateNodeTest::run();
- }
-
- bool deltaOplogEntryAllowed() const {
- return _allowDeltaOplogEntries;
- }
-
-protected:
- bool _allowDeltaOplogEntries = false;
-};
-
-class PipelineExecutorV2ModeTest : public PipelineExecutorTest {
-public:
- void run() {
- _allowDeltaOplogEntries = true;
- UpdateNodeTest::run();
- }
-};
-
-TEST_F(PipelineExecutorTest, Noop) {
+TEST_F(UpdateTestFixture, Noop) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: 2}}")};
@@ -101,7 +58,7 @@ TEST_F(PipelineExecutorTest, Noop) {
ASSERT_TRUE(result.oplogEntry.isEmpty());
}
-TEST_F(PipelineExecutorTest, ShouldNotCreateIdIfNoIdExistsAndNoneIsSpecified) {
+TEST_F(UpdateTestFixture, ShouldNotCreateIdIfNoIdExistsAndNoneIsSpecified) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: 2}}")};
@@ -112,17 +69,11 @@ TEST_F(PipelineExecutorTest, ShouldNotCreateIdIfNoIdExistsAndNoneIsSpecified) {
ASSERT_FALSE(result.noop);
ASSERT_EQUALS(fromjson("{c: 1, d: 'largeStringValue', a: 1, b: 2}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- if (deltaOplogEntryAllowed()) {
- ASSERT_FALSE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2}}}"), result.oplogEntry);
- } else {
- ASSERT_TRUE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{c: 1, d: 'largeStringValue', a: 1, b: 2}"),
- result.oplogEntry);
- }
+ ASSERT_FALSE(result.indexesAffected);
+ ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2}}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, ShouldPreserveIdOfExistingDocumentIfIdNotReplaced) {
+TEST_F(UpdateTestFixture, ShouldPreserveIdOfExistingDocumentIfIdNotReplaced) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: 2}}"),
@@ -138,7 +89,7 @@ TEST_F(PipelineExecutorTest, ShouldPreserveIdOfExistingDocumentIfIdNotReplaced)
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{_id: 0, a: 1, b: 2}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, ShouldSucceedWhenImmutableIdIsNotModified) {
+TEST_F(UpdateTestFixture, ShouldSucceedWhenImmutableIdIsNotModified) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: 1, b: 2}}")};
@@ -151,17 +102,11 @@ TEST_F(PipelineExecutorTest, ShouldSucceedWhenImmutableIdIsNotModified) {
ASSERT_EQUALS(fromjson("{_id: 0, c: 1, d: 'largeStringValue', a: 1, b: 2}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- if (deltaOplogEntryAllowed()) {
- ASSERT_FALSE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2 }}}"), result.oplogEntry);
- } else {
- ASSERT_TRUE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{_id: 0, c: 1, d: 'largeStringValue', a: 1, b: 2}"),
- result.oplogEntry);
- }
+ ASSERT_FALSE(result.indexesAffected);
+ ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2 }}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, ComplexDoc) {
+TEST_F(UpdateTestFixture, ComplexDoc) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: [0, 1, 2], c: {d: 1}}}")};
@@ -173,18 +118,12 @@ TEST_F(PipelineExecutorTest, ComplexDoc) {
ASSERT_EQUALS(fromjson("{a: 1, b: [0, 1, 2], e: ['val1', 'val2'], c: {d: 1}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- if (deltaOplogEntryAllowed()) {
- ASSERT_FALSE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {c: {d: 1}}, sb: {a: true, u1: 1} }}"),
- result.oplogEntry);
- } else {
- ASSERT_TRUE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{a: 1, b: [0, 1, 2], e: ['val1', 'val2'], c: {d: 1}}"),
- result.oplogEntry);
- }
+ ASSERT_FALSE(result.indexesAffected);
+ ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {c: {d: 1}}, sb: {a: true, u1: 1} }}"),
+ result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CannotRemoveImmutablePath) {
+TEST_F(UpdateTestFixture, CannotRemoveImmutablePath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$project: {c: 1}}")};
@@ -200,7 +139,7 @@ TEST_F(PipelineExecutorTest, CannotRemoveImmutablePath) {
}
-TEST_F(PipelineExecutorTest, IdFieldIsNotRemoved) {
+TEST_F(UpdateTestFixture, IdFieldIsNotRemoved) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$project: {a: 1, _id: 0}}")};
@@ -216,7 +155,7 @@ TEST_F(PipelineExecutorTest, IdFieldIsNotRemoved) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{_id: 0}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CannotReplaceImmutablePathWithArrayField) {
+TEST_F(UpdateTestFixture, CannotReplaceImmutablePathWithArrayField) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: [{b: 1}]}}")};
@@ -231,7 +170,7 @@ TEST_F(PipelineExecutorTest, CannotReplaceImmutablePathWithArrayField) {
"'a.b' was found to be an array or array descendant.");
}
-TEST_F(PipelineExecutorTest, CannotMakeImmutablePathArrayDescendant) {
+TEST_F(UpdateTestFixture, CannotMakeImmutablePathArrayDescendant) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: [1]}}")};
@@ -246,7 +185,7 @@ TEST_F(PipelineExecutorTest, CannotMakeImmutablePathArrayDescendant) {
"'a.0' was found to be an array or array descendant.");
}
-TEST_F(PipelineExecutorTest, CannotModifyImmutablePath) {
+TEST_F(UpdateTestFixture, CannotModifyImmutablePath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: {b: 2}}}")};
@@ -261,7 +200,7 @@ TEST_F(PipelineExecutorTest, CannotModifyImmutablePath) {
"to have been altered to b: 2");
}
-TEST_F(PipelineExecutorTest, CannotModifyImmutableId) {
+TEST_F(UpdateTestFixture, CannotModifyImmutableId) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 1}}")};
@@ -276,7 +215,7 @@ TEST_F(PipelineExecutorTest, CannotModifyImmutableId) {
"to have been altered to _id: 1");
}
-TEST_F(PipelineExecutorTest, CanAddImmutableField) {
+TEST_F(UpdateTestFixture, CanAddImmutableField) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: {b: 1}}}")};
@@ -292,7 +231,7 @@ TEST_F(PipelineExecutorTest, CanAddImmutableField) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{c: 1, a: {b: 1}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CanAddImmutableId) {
+TEST_F(UpdateTestFixture, CanAddImmutableId) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0}}")};
@@ -308,14 +247,14 @@ TEST_F(PipelineExecutorTest, CanAddImmutableId) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{c: 1, _id: 0}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CannotCreateDollarPrefixedName) {
+TEST_F(UpdateTestFixture, CannotCreateDollarPrefixedName) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {'a.$bad': 1}}")};
ASSERT_THROWS_CODE(PipelineExecutor(expCtx, pipeline), AssertionException, 16410);
}
-TEST_F(PipelineExecutorTest, NoLogBuilder) {
+TEST_F(UpdateTestFixture, NoLogBuilder) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1}}")};
@@ -330,7 +269,7 @@ TEST_F(PipelineExecutorTest, NoLogBuilder) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
}
-TEST_F(PipelineExecutorTest, SerializeTest) {
+TEST_F(UpdateTestFixture, SerializeTest) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: [{b: 1}]}}"),
@@ -345,7 +284,7 @@ TEST_F(PipelineExecutorTest, SerializeTest) {
ASSERT_VALUE_EQ(serialized, Value(BSONArray(doc)));
}
-TEST_F(PipelineExecutorTest, RejectsInvalidConstantNames) {
+TEST_F(UpdateTestFixture, RejectsInvalidConstantNames) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline;
@@ -368,7 +307,7 @@ TEST_F(PipelineExecutorTest, RejectsInvalidConstantNames) {
ErrorCodes::FailedToParse);
}
-TEST_F(PipelineExecutorTest, CanUseConstants) {
+TEST_F(UpdateTestFixture, CanUseConstants) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline{fromjson("{$set: {b: '$$var1', c: '$$var2'}}")};
@@ -384,7 +323,7 @@ TEST_F(PipelineExecutorTest, CanUseConstants) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{a: 1, b: 10, c : {x: 1, y: 2}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CanUseConstantsAcrossMultipleUpdates) {
+TEST_F(UpdateTestFixture, CanUseConstantsAcrossMultipleUpdates) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline{fromjson("{$set: {b: '$$var1'}}")};
@@ -412,7 +351,7 @@ TEST_F(PipelineExecutorTest, CanUseConstantsAcrossMultipleUpdates) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{a: 2, b: 'foo'}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, NoopWithConstants) {
+TEST_F(UpdateTestFixture, NoopWithConstants) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline{fromjson("{$set: {a: '$$var1', b: '$$var2'}}")};
@@ -428,7 +367,7 @@ TEST_F(PipelineExecutorTest, NoopWithConstants) {
ASSERT_TRUE(result.oplogEntry.isEmpty());
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithDeletes) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithDeletes) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: {a: {b: {c: 1, paddingField: 'largeValueString'}, c: 1, paddingField: "
@@ -497,7 +436,7 @@ TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithDeletes) {
}
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithUpdatesAndInserts) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithUpdatesAndInserts) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: {a: {b: {c: 1, paddingField: 'largeValueString'}, c: 1, paddingField: "
@@ -565,7 +504,7 @@ TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithUpdatesAndInserts) {
}
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithArraysAlongIndexPath) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithArraysAlongIndexPath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: [0, {a: {b: ['someStringValue', {c: 1, paddingField: 'largeValueString'}], "
@@ -656,7 +595,7 @@ TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithArraysAlongIndexPath)
}
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithArraysAfterIndexPath) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithArraysAfterIndexPath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: {a: {b: {c: [{paddingField: 'largeValueString'}, 1]}, c: 1, paddingField: "
diff --git a/src/mongo/db/update/pop_node_test.cpp b/src/mongo/db/update/pop_node_test.cpp
index b17fe6b189b..4159be9a475 100644
--- a/src/mongo/db/update/pop_node_test.cpp
+++ b/src/mongo/db/update/pop_node_test.cpp
@@ -42,7 +42,7 @@ namespace mongo {
namespace {
namespace mmb = mongo::mutablebson;
-using PopNodeTest = UpdateNodeTest;
+using PopNodeTest = UpdateTestFixture;
TEST(PopNodeTest, InitSucceedsPositiveOne) {
auto update = fromjson("{$pop: {a: 1}}");
@@ -237,8 +237,7 @@ TEST_F(PopNodeTest, PopsSingleElementFromTheBack) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: []}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': []}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -257,8 +256,7 @@ TEST_F(PopNodeTest, PopsSingleElementFromTheFront) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: []}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': []}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -277,8 +275,7 @@ TEST_F(PopNodeTest, PopsFromTheBackOfMultiElementArray) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [1, 2]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [1, 2]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -297,8 +294,7 @@ TEST_F(PopNodeTest, PopsFromTheFrontOfMultiElementArray) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [2, 3]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [2, 3]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -317,8 +313,7 @@ TEST_F(PopNodeTest, PopsFromTheFrontOfMultiElementArrayWithoutAffectingIndexes)
ASSERT_FALSE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [2, 3]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [2, 3]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -336,8 +331,7 @@ TEST_F(PopNodeTest, SucceedsWithNullUpdateIndexData) {
ASSERT_FALSE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [1, 2]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [1, 2]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/pull_node_test.cpp b/src/mongo/db/update/pull_node_test.cpp
index 34f3c31f34a..85141fa3841 100644
--- a/src/mongo/db/update/pull_node_test.cpp
+++ b/src/mongo/db/update/pull_node_test.cpp
@@ -43,9 +43,7 @@
namespace mongo {
namespace {
-using PullNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using PullNodeTest = UpdateTestFixture;
TEST(PullNodeTest, InitWithBadMatchExpressionFails) {
auto update = fromjson("{$pull: {a: {b: {$foo: 1}}}}");
@@ -267,8 +265,7 @@ TEST_F(PullNodeTest, ApplyToArrayMatchingOne) {
ASSERT_EQUALS(fromjson("{a: [1, 2, 3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2, 3]}}"),
- fromjson("{$v: 2, diff: {u: {a: [1, 2, 3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 2, 3]}}}"));
}
TEST_F(PullNodeTest, ApplyToArrayMatchingSeveral) {
@@ -286,8 +283,7 @@ TEST_F(PullNodeTest, ApplyToArrayMatchingSeveral) {
ASSERT_EQUALS(fromjson("{a: [1, 2, 3, 4, 5]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2, 3, 4, 5]}}"),
- fromjson("{$v: 2, diff: {u: {a: [1, 2, 3, 4, 5]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 2, 3, 4, 5]}}}"));
}
TEST_F(PullNodeTest, ApplyToArrayMatchingAll) {
@@ -305,7 +301,7 @@ TEST_F(PullNodeTest, ApplyToArrayMatchingAll) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullNodeTest, ApplyToArrayWithEq) {
@@ -323,8 +319,7 @@ TEST_F(PullNodeTest, ApplyToArrayWithEq) {
ASSERT_EQUALS(fromjson("{a: [0, 2, 3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 2, 3]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 2, 3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 2, 3]}}}"));
}
TEST_F(PullNodeTest, ApplyNoIndexDataNoLogBuilder) {
@@ -363,8 +358,7 @@ TEST_F(PullNodeTest, ApplyWithCollation) {
ASSERT_EQUALS(fromjson("{a: ['zaa', 'zbb']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['zaa', 'zbb']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['zaa', 'zbb']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['zaa', 'zbb']}}}"));
}
TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectNonStringMatches) {
@@ -385,7 +379,7 @@ TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectNonStringMatches) {
ASSERT_EQUALS(fromjson("{a: [2, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [2, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2, 1]}}}"));
}
TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectRegexMatches) {
@@ -406,8 +400,7 @@ TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectRegexMatches) {
ASSERT_EQUALS(fromjson("{a: ['b', 'cb']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['b', 'cb']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['b', 'cb']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['b', 'cb']}}}"));
}
TEST_F(PullNodeTest, ApplyStringLiteralMatchWithCollation) {
@@ -428,7 +421,7 @@ TEST_F(PullNodeTest, ApplyStringLiteralMatchWithCollation) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullNodeTest, ApplyCollationDoesNotAffectNumberLiteralMatches) {
@@ -449,8 +442,7 @@ TEST_F(PullNodeTest, ApplyCollationDoesNotAffectNumberLiteralMatches) {
ASSERT_EQUALS(fromjson("{a: ['a', 'b', 2, 'c', 'd']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['a', 'b', 2, 'c', 'd']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['a', 'b', 2, 'c', 'd']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['a', 'b', 2, 'c', 'd']}}}"));
}
TEST_F(PullNodeTest, ApplyStringMatchAfterSetCollator) {
@@ -587,8 +579,7 @@ TEST_F(PullNodeTest, ApplyComplexDocAndMatching1) {
ASSERT_EQUALS(fromjson("{a: {b: [{x: 1}, {x: 2}]}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': [{x: 1}, {x: 2}]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}]}}}}"));
}
TEST_F(PullNodeTest, ApplyComplexDocAndMatching2) {
@@ -606,8 +597,7 @@ TEST_F(PullNodeTest, ApplyComplexDocAndMatching2) {
ASSERT_EQUALS(fromjson("{a: {b: [{x: 1}, {x: 2}, {z: 'z'}]}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': [{x: 1}, {x: 2}, {z: 'z'}]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}, {z: 'z'}]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}, {z: 'z'}]}}}}"));
}
TEST_F(PullNodeTest, ApplyComplexDocAndMatching3) {
@@ -625,8 +615,7 @@ TEST_F(PullNodeTest, ApplyComplexDocAndMatching3) {
ASSERT_EQUALS(fromjson("{a: {b: [{x: 2}, {z: 'z'}]}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': [{x: 2}, {z: 'z'}]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 2}, {z: 'z'}]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 2}, {z: 'z'}]}}}}"));
}
TEST_F(PullNodeTest, ApplyFullPredicateWithCollation) {
@@ -648,8 +637,7 @@ TEST_F(PullNodeTest, ApplyFullPredicateWithCollation) {
ASSERT_EQUALS(fromjson("{a: {b: []}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': []}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
}
TEST_F(PullNodeTest, ApplyScalarValueMod) {
@@ -667,8 +655,7 @@ TEST_F(PullNodeTest, ApplyScalarValueMod) {
ASSERT_EQUALS(fromjson("{a: [2, 2, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2, 2, 2]}}"),
- fromjson("{$v: 2, diff: {u: {a: [2, 2, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2, 2, 2]}}}"));
}
TEST_F(PullNodeTest, ApplyObjectValueMod) {
@@ -686,8 +673,7 @@ TEST_F(PullNodeTest, ApplyObjectValueMod) {
ASSERT_EQUALS(fromjson("{a: [{x: 1}, {x: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{x: 1}, {x: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{x: 1}, {x: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{x: 1}, {x: 1}]}}}"));
}
TEST_F(PullNodeTest, DocumentationExample1) {
@@ -707,7 +693,6 @@ TEST_F(PullNodeTest, DocumentationExample1) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {flags: ['vme', 'de', 'pse', 'tsc', 'pae', 'mce']}}"),
fromjson("{$v: 2, diff: {u: {flags: ['vme', 'de', 'pse', 'tsc', 'pae', 'mce']}}}"));
}
@@ -726,8 +711,7 @@ TEST_F(PullNodeTest, DocumentationExample2a) {
ASSERT_EQUALS(fromjson("{votes: [3, 5, 6, 8]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {votes: [3, 5, 6, 8]}}"),
- fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6, 8]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6, 8]}}}"));
}
TEST_F(PullNodeTest, DocumentationExample2b) {
@@ -745,8 +729,7 @@ TEST_F(PullNodeTest, DocumentationExample2b) {
ASSERT_EQUALS(fromjson("{votes: [3, 5, 6]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {votes: [3, 5, 6]}}"),
- fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6]}}}"));
}
TEST_F(PullNodeTest, ApplyPullWithObjectValueToArrayWithNonObjectValue) {
@@ -764,7 +747,7 @@ TEST_F(PullNodeTest, ApplyPullWithObjectValueToArrayWithNonObjectValue) {
ASSERT_EQUALS(fromjson("{a: [2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2]}}"), fromjson("{$v: 2, diff: {u: {a: [2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2]}}}"));
}
TEST_F(PullNodeTest, CannotModifyImmutableField) {
@@ -798,8 +781,7 @@ TEST_F(PullNodeTest, SERVER_3988) {
ASSERT_EQUALS(fromjson("{x: 1, y: [2, 3, 4, 'abc']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {y: [2, 3, 4, 'abc']}}"),
- fromjson("{$v: 2, diff: {u: {y: [2, 3, 4, 'abc']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {y: [2, 3, 4, 'abc']}}}"));
}
} // namespace
diff --git a/src/mongo/db/update/pullall_node_test.cpp b/src/mongo/db/update/pullall_node_test.cpp
index f0879fff9cb..5286fc4d653 100644
--- a/src/mongo/db/update/pullall_node_test.cpp
+++ b/src/mongo/db/update/pullall_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using PullAllNodeTest = UpdateNodeTest;
+using PullAllNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -151,8 +151,7 @@ TEST_F(PullAllNodeTest, ApplyWithSingleNumber) {
ASSERT_EQUALS(fromjson("{a: ['a', {r: 1, b: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['a', {r: 1, b: 2}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [\"a\", {r: 1, b: 2}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [\"a\", {r: 1, b: 2}]}}}"));
}
TEST_F(PullAllNodeTest, ApplyNoIndexDataNoLogBuilder) {
@@ -204,8 +203,7 @@ TEST_F(PullAllNodeTest, ApplyWithWithTwoElements) {
ASSERT_EQUALS(fromjson("{a: [{r: 1, b: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{r: 1, b: 2}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{r: 1, b: 2}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{r: 1, b: 2}]}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithAllArrayElements) {
@@ -223,7 +221,7 @@ TEST_F(PullAllNodeTest, ApplyWithAllArrayElements) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsButOutOfOrder) {
@@ -241,7 +239,7 @@ TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsButOutOfOrder) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsAndThenSome) {
@@ -259,7 +257,7 @@ TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsAndThenSome) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithCollator) {
@@ -280,8 +278,7 @@ TEST_F(PullAllNodeTest, ApplyWithCollator) {
ASSERT_EQUALS(fromjson("{a: ['baz']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['baz']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['baz']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['baz']}}}"));
}
TEST_F(PullAllNodeTest, ApplyAfterSetCollator) {
diff --git a/src/mongo/db/update/push_node_test.cpp b/src/mongo/db/update/push_node_test.cpp
index 1810610c302..8999a2707a1 100644
--- a/src/mongo/db/update/push_node_test.cpp
+++ b/src/mongo/db/update/push_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using PushNodeTest = UpdateNodeTest;
+using PushNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -288,7 +288,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -307,7 +307,7 @@ TEST_F(PushNodeTest, ApplyToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -326,8 +326,7 @@ TEST_F(PushNodeTest, ApplyToArrayWithOneElement) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -358,8 +357,7 @@ TEST_F(PushNodeTest, ApplyToDottedPathElement) {
doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'choices.first.votes': [1]}}"),
- fromjson("{$v: 2, diff: {schoices: {sfirst: {i: {votes: [1]}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {schoices: {sfirst: {i: {votes: [1]}}}}}"));
ASSERT_EQUALS("{choices.first.votes}", getModifiedPaths());
}
@@ -378,7 +376,7 @@ TEST_F(PushNodeTest, ApplySimpleEachToEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -397,7 +395,7 @@ TEST_F(PushNodeTest, ApplySimpleEachToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -416,7 +414,7 @@ TEST_F(PushNodeTest, ApplyMultipleEachToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: [1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2]}}"), fromjson("{$v: 2, diff: {i: {a: [1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [1, 2]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -435,8 +433,7 @@ TEST_F(PushNodeTest, ApplySimpleEachToArrayWithOneElement) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -455,8 +452,7 @@ TEST_F(PushNodeTest, ApplyMultipleEachToArrayWithOneElement) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1, 'a.2': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 1, u2: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 1, u2: 2}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -494,7 +490,7 @@ TEST_F(PushNodeTest, ApplyEmptyEachToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {i: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: []}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -532,7 +528,7 @@ TEST_F(PushNodeTest, ApplyToArrayWithSlice) {
ASSERT_EQUALS(fromjson("{a: [3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [3]}}"), fromjson("{$v: 2, diff: {u: {a: [3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [3]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -551,8 +547,7 @@ TEST_F(PushNodeTest, ApplyWithNumericSort) {
ASSERT_EQUALS(fromjson("{a: [-1, 2, 3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [-1, 2, 3]}}"),
- fromjson("{$v: 2, diff: {u: {a: [-1, 2, 3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [-1, 2, 3]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -571,8 +566,7 @@ TEST_F(PushNodeTest, ApplyWithReverseNumericSort) {
ASSERT_EQUALS(fromjson("{a: [4, 3, -1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [4, 3, -1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [4, 3, -1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [4, 3, -1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -591,8 +585,7 @@ TEST_F(PushNodeTest, ApplyWithMixedSort) {
ASSERT_EQUALS(fromjson("{a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -611,8 +604,7 @@ TEST_F(PushNodeTest, ApplyWithReverseMixedSort) {
ASSERT_EQUALS(fromjson("{a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -631,8 +623,7 @@ TEST_F(PushNodeTest, ApplyWithEmbeddedFieldSort) {
ASSERT_EQUALS(fromjson("{a: [3, 't', {b: 1}, 4, -1, {a: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [3, 't', {b: 1}, 4, -1, {a: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [3, 't', {b: 1}, 4, -1, {a: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [3, 't', {b: 1}, 4, -1, {a: 1}]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -654,8 +645,7 @@ TEST_F(PushNodeTest, ApplySortWithCollator) {
ASSERT_EQUALS(fromjson("{a: ['ha', 'gb', 'fc', 'dd']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['ha', 'gb', 'fc', 'dd']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['ha', 'gb', 'fc', 'dd']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['ha', 'gb', 'fc', 'dd']}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -885,7 +875,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithPositionZero) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -904,7 +894,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithPositionOne) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -923,7 +913,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithLargePosition) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -942,7 +932,7 @@ TEST_F(PushNodeTest, ApplyToSingletonArrayWithPositionZero) {
ASSERT_EQUALS(fromjson("{a: [1, 0]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 0]}}"), fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -961,8 +951,7 @@ TEST_F(PushNodeTest, ApplyToSingletonArrayWithLargePosition) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1}}"),
- fromjson(" {$v: 2, diff: {sa: {a: true, u1: 1}}}"));
+ assertOplogEntry(fromjson(" {$v: 2, diff: {sa: {a: true, u1: 1}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -981,7 +970,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1000,7 +989,7 @@ TEST_F(PushNodeTest, ApplyToSingletonArrayWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [1, 0]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 0]}}"), fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1019,8 +1008,7 @@ TEST_F(PushNodeTest, ApplyToPopulatedArrayWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2, 5, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1, 2, 5, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1039,8 +1027,7 @@ TEST_F(PushNodeTest, ApplyToPopulatedArrayWithOutOfBoundsNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [5, 0, 1, 2, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [5, 0, 1, 2, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1059,8 +1046,7 @@ TEST_F(PushNodeTest, ApplyMultipleElementsPushWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2, 5, 6, 7, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1, 2, 5, 6, 7, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 6, 7, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 6, 7, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1081,8 +1067,7 @@ TEST_F(PushNodeTest, PushWithMinIntAsPosition) {
ASSERT_EQUALS(fromjson("{a: [5, 0, 1, 2, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [5, 0, 1, 2, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/rename_node_test.cpp b/src/mongo/db/update/rename_node_test.cpp
index 39c037a75a3..f8b7e010ca7 100644
--- a/src/mongo/db/update/rename_node_test.cpp
+++ b/src/mongo/db/update/rename_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using RenameNodeTest = UpdateNodeTest;
+using RenameNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -125,8 +125,7 @@ TEST_F(RenameNodeTest, SimpleNumberAtRoot) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, i: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, i: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -144,8 +143,7 @@ TEST_F(RenameNodeTest, ToExistsAtSameLevel) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -163,8 +161,7 @@ TEST_F(RenameNodeTest, ToAndFromHaveSameValue) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -182,8 +179,7 @@ TEST_F(RenameNodeTest, RenameToFieldWithSameValueButDifferentType) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 1}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 1}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 1}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -201,8 +197,7 @@ TEST_F(RenameNodeTest, FromDottedElement) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {}, b: {d: 6}}"), doc);
- assertOplogEntry(fromjson("{$set: {b: {d: 6}}, $unset: {'a.c': true}}"),
- fromjson("{$v: 2, diff: {u: {b: {d: 6}}, sa: {d: {c: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {b: {d: 6}}, sa: {d: {c: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.c, b}");
}
@@ -220,8 +215,7 @@ TEST_F(RenameNodeTest, RenameToExistingNestedFieldDoesNotReorderFields) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: {c: 4, d: 2}}, b: 3, c: {}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b.c': 4}, $unset: {'c.d': true}}"),
- fromjson("{$v: 2, diff: {sa: {sb: {u: {c: 4}}}, sc: {d: {d: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {sb: {u: {c: 4}}}, sc: {d: {d: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c, c.d}");
}
@@ -240,8 +234,7 @@ TEST_F(RenameNodeTest, MissingCompleteTo) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 1, c: {r: {d: 2}}}"), doc);
- assertOplogEntry(fromjson("{$set: {'c.r.d': 2}, $unset: {'a': true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, sc: {i: {r: {d: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, sc: {i: {r: {d: 2}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, c.r.d}");
}
@@ -259,8 +252,7 @@ TEST_F(RenameNodeTest, ToIsCompletelyMissing) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: {c: {d: 2}}}"), doc);
- assertOplogEntry(fromjson("{$set: {'b.c.d': 2}, $unset: {'a': true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: 2}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b.c.d}");
}
@@ -278,8 +270,7 @@ TEST_F(RenameNodeTest, ToMissingDottedField) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: {c: {d: [{a:2, b:1}]}}}"), doc);
- assertOplogEntry(fromjson("{$set: {'b.c.d': [{a:2, b:1}]}, $unset: {'a': true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: [{a: 2, b: 1}]}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: [{a: 2, b: 1}]}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b.c.d}");
}
@@ -398,8 +389,7 @@ TEST_F(RenameNodeTest, ReplaceArrayField) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -417,8 +407,7 @@ TEST_F(RenameNodeTest, ReplaceWithArrayField) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: []}"), doc);
- assertOplogEntry(fromjson("{$set: {b: []}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: []}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -436,8 +425,7 @@ TEST_F(RenameNodeTest, CanRenameFromInvalidFieldName) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {a: 2}, $unset: {'$a': true}}"),
- fromjson("{$v: 2, diff: {d: {$a: false}, i: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {$a: false}, i: {a: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{$a, a}");
}
@@ -492,8 +480,7 @@ TEST_F(RenameNodeTest, ApplyCanRemoveRequiredPartOfDBRefIfValidateForStorageIsFa
ASSERT_EQUALS(updated, doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'b': 0}, $unset: {'a.$id': true}}"),
- fromjson("{$v: 2, diff: {i: {b: 0}, sa: {d: {$id: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 0}, sa: {d: {$id: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.$id, b}");
}
diff --git a/src/mongo/db/update/set_node_test.cpp b/src/mongo/db/update/set_node_test.cpp
index 2667dac6d52..9c9b7733aeb 100644
--- a/src/mongo/db/update/set_node_test.cpp
+++ b/src/mongo/db/update/set_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using SetNodeTest = UpdateNodeTest;
+using SetNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -95,7 +95,7 @@ TEST_F(SetNodeTest, ApplyEmptyPathToCreate) {
ASSERT_EQUALS(fromjson("{a: 6}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 6}}"), fromjson("{$v: 2, diff: {u: {a: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 6}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -115,8 +115,7 @@ TEST_F(SetNodeTest, ApplyCreatePath) {
ASSERT_EQUALS(fromjson("{a: {d: 5, b: {c: 6}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
ASSERT_EQUALS("{a.b.c}", getModifiedPaths());
}
@@ -135,7 +134,7 @@ TEST_F(SetNodeTest, ApplyCreatePathFromRoot) {
ASSERT_EQUALS(fromjson("{c: 5, a: {b: 6}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6}}"), fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -155,8 +154,7 @@ TEST_F(SetNodeTest, ApplyPositional) {
ASSERT_EQUALS(fromjson("{a: [0, 6, 2]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 6}}}"));
ASSERT_EQUALS("{a.1}", getModifiedPaths());
}
@@ -367,7 +365,7 @@ TEST_F(SetNodeTest, ApplyLog) {
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 2}}"), fromjson("{$v: 2, diff: {u: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 2}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -767,8 +765,7 @@ TEST_F(SetNodeTest, ApplyLogDottedPath) {
ASSERT_EQUALS(fromjson("{a: [{b:0}, {b:1}, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -785,8 +782,7 @@ TEST_F(SetNodeTest, LogEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [null, null, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -803,8 +799,7 @@ TEST_F(SetNodeTest, LogEmptyObject) {
ASSERT_EQUALS(fromjson("{a: {'2': {b: 2}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
ASSERT_EQUALS("{a.2.b}", getModifiedPaths());
}
@@ -987,7 +982,7 @@ TEST_F(SetNodeTest, Set6) {
ASSERT_EQUALS(fromjson("{_id: 1, r: {a:2, b:2}}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'r.a': 2}}"), fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
ASSERT_EQUALS("{r.a}", getModifiedPaths());
}
@@ -1007,7 +1002,7 @@ TEST_F(SetNodeTest, Set6FromRepl) {
ASSERT_EQUALS(fromjson("{_id: 1, r: {a:2, b:2} }"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'r.a': 2}}"), fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
ASSERT_EQUALS("{r.a}", getModifiedPaths());
}
@@ -1051,7 +1046,7 @@ TEST_F(SetNodeTest, ApplyCanCreateDollarPrefixedFieldNameWhenValidateForStorageI
ASSERT_EQUALS(fromjson("{$bad: 1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {$bad: 1}}"), fromjson("{$v: 2, diff: {i: {$bad: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {$bad: 1}}}"));
ASSERT_EQUALS("{$bad}", getModifiedPaths());
}
@@ -1160,7 +1155,7 @@ TEST_F(SetNodeTest, ApplyCanOverwritePrefixToCreateImmutablePath) {
ASSERT_EQUALS(fromjson("{a: {b: 2}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {b: 2}}}"), fromjson("{$v: 2, diff: {u: {a: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: {b: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1180,8 +1175,7 @@ TEST_F(SetNodeTest, ApplyCanOverwritePrefixOfImmutablePathIfNoopOnImmutablePath)
ASSERT_EQUALS(fromjson("{a: {b: 2, c: 3}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {b: 2, c: 3}}}"),
- fromjson("{$v: 2, diff: {u: {a: {b: 2, c: 3}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: {b: 2, c: 3}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1272,7 +1266,7 @@ TEST_F(SetNodeTest, ApplyCanCreateImmutablePath) {
ASSERT_EQUALS(fromjson("{a: {b: 2}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 2}}"), fromjson("{$v: 2, diff: {sa: {i: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {b: 2}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -1292,7 +1286,7 @@ TEST_F(SetNodeTest, ApplyCanCreatePrefixOfImmutablePath) {
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 2}}"), fromjson("{$v: 2, diff: {i: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 2}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1312,8 +1306,7 @@ TEST_F(SetNodeTest, ApplySetFieldInNonExistentArrayElementAffectsIndexOnSiblingF
ASSERT_EQUALS(fromjson("{a: [{b: 0}, {c: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1.c': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1333,8 +1326,7 @@ TEST_F(SetNodeTest, ApplySetFieldInExistingArrayElementDoesNotAffectIndexOnSibli
ASSERT_EQUALS(fromjson("{a: [{b: 0, c: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.c': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {i: {c: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {i: {c: 2}}}}}"));
ASSERT_EQUALS("{a.0.c}", getModifiedPaths());
}
@@ -1355,8 +1347,7 @@ TEST_F(SetNodeTest, ApplySetFieldInNonExistentNumericFieldDoesNotAffectIndexOnSi
ASSERT_EQUALS(fromjson("{a: {'0': {b: 0}, '1': {c: 2}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1.c': 2}}"),
- fromjson("{$v: 2, diff: {sa: {i: {'1': {c: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {'1': {c: 2}}}}}"));
ASSERT_EQUALS("{a.1.c}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/unset_node_test.cpp b/src/mongo/db/update/unset_node_test.cpp
index 0dd55f165e4..91cb9b9ab63 100644
--- a/src/mongo/db/update/unset_node_test.cpp
+++ b/src/mongo/db/update/unset_node_test.cpp
@@ -43,9 +43,7 @@
namespace mongo {
namespace {
-using UnsetNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using UnsetNodeTest = UpdateTestFixture;
DEATH_TEST_REGEX(UnsetNodeTest,
InitFailsForEmptyElement,
@@ -169,7 +167,7 @@ TEST_F(UnsetNodeTest, UnsetTopLevelPath) {
ASSERT_EQUALS(fromjson("{}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {a: true}}"), fromjson("{$v: 2, diff: {d: {a: false}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -188,8 +186,7 @@ TEST_F(UnsetNodeTest, UnsetNestedPath) {
ASSERT_EQUALS(fromjson("{a: {b: {}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.b.c': true}}"),
- fromjson("{$v: 2, diff: {sa: {sb: {d: {c: false}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {sb: {d: {c: false}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c}");
}
@@ -208,8 +205,7 @@ TEST_F(UnsetNodeTest, UnsetObject) {
ASSERT_EQUALS(fromjson("{a: {}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.b': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
@@ -228,8 +224,7 @@ TEST_F(UnsetNodeTest, UnsetArrayElement) {
ASSERT_EQUALS(fromjson("{a:[null], b:1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': null}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u0: null}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u0: null}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0}");
}
@@ -249,8 +244,7 @@ TEST_F(UnsetNodeTest, UnsetPositional) {
ASSERT_EQUALS(fromjson("{a: [0, null, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': null}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: null}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: null}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.1}");
}
@@ -269,7 +263,7 @@ TEST_F(UnsetNodeTest, UnsetEntireArray) {
ASSERT_EQUALS(fromjson("{}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {a: true}}"), fromjson("{$v: 2, diff: {d: {a: false}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -288,8 +282,7 @@ TEST_F(UnsetNodeTest, UnsetFromObjectInArray) {
ASSERT_EQUALS(fromjson("{a:[{}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.0.b': true}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {b: false}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {b: false}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b}");
}
@@ -308,8 +301,7 @@ TEST_F(UnsetNodeTest, CanUnsetInvalidField) {
ASSERT_EQUALS(fromjson("{b: 1, a: [{}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.0.$b': true}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {$b: false}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {$b: false}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.$b}");
}
@@ -345,7 +337,7 @@ TEST_F(UnsetNodeTest, ApplyDoesNotAffectIndexes) {
ASSERT_EQUALS(fromjson("{}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {a: true}}"), fromjson("{$v: 2, diff: {d: {a: false}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -364,8 +356,7 @@ TEST_F(UnsetNodeTest, ApplyFieldWithDot) {
ASSERT_EQUALS(fromjson("{'a.b':4, a: {}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.b': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
@@ -385,8 +376,7 @@ TEST_F(UnsetNodeTest, ApplyCannotRemoveRequiredPartOfDBRef) {
ASSERT_EQUALS(updated, doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.$id': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.$id}");
}
@@ -408,8 +398,7 @@ TEST_F(UnsetNodeTest, ApplyCanRemoveRequiredPartOfDBRefIfValidateForStorageIsFal
ASSERT_EQUALS(updated, doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.$id': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.$id}");
}
diff --git a/src/mongo/db/update/update_array_node_test.cpp b/src/mongo/db/update/update_array_node_test.cpp
index 95dbf2356a7..8999a4c6c6b 100644
--- a/src/mongo/db/update/update_array_node_test.cpp
+++ b/src/mongo/db/update/update_array_node_test.cpp
@@ -45,8 +45,7 @@
namespace mongo {
namespace {
-using UpdateArrayNodeTest = UpdateNodeTest;
-using mongo::mutablebson::Element;
+using UpdateArrayNodeTest = UpdateTestFixture;
using unittest::assertGet;
TEST_F(UpdateArrayNodeTest, ApplyCreatePathFails) {
@@ -122,8 +121,7 @@ TEST_F(UpdateArrayNodeTest, UpdateIsAppliedToAllMatchingElements) {
ASSERT_EQUALS(fromjson("{a: [2, 1, 2]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2, 1, 2]}}"),
- fromjson("{$v: 2, diff: {u: {a: [2, 1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2, 1, 2]}}}"));
ASSERT_EQUALS("{a.0, a.2}", getModifiedPaths());
}
@@ -173,8 +171,7 @@ TEST_F(UpdateArrayNodeTest, UpdateForEmptyIdentifierIsAppliedToAllArrayElements)
ASSERT_EQUALS(fromjson("{a: [1, 1, 1]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 1, 1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [1, 1, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 1, 1]}}}"));
ASSERT_EQUALS("{a.0, a.1, a.2}", getModifiedPaths());
}
@@ -223,8 +220,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElement) {
ASSERT_EQUALS(fromjson("{a: [{b: 1, c: 1, d: 1}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 1, 'a.0.c': 1, 'a.0.d': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1, d: 1}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1, d: 1}}}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c, a.0.d}", getModifiedPaths());
}
@@ -264,8 +260,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElementsUsingMergedChildr
ASSERT_EQUALS(fromjson("{a: [{b: 1, c: 1}, {b: 1, c: 1}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{b: 1, c: 1}, {b: 1, c: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{b: 1, c: 1}, {b: 1, c: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{b: 1, c: 1}, {b: 1, c: 1}]}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c, a.1.b, a.1.c}", getModifiedPaths());
}
@@ -314,8 +309,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElementsWithoutMergedChil
ASSERT_EQUALS(fromjson("{a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c, a.1.c, a.1.d}", getModifiedPaths());
}
@@ -346,8 +340,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElementWithEmptyIdentifie
ASSERT_EQUALS(fromjson("{a: [{b: 1, c: 1}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 1, 'a.0.c': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1}}}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c}", getModifiedPaths());
}
@@ -394,7 +387,6 @@ TEST_F(UpdateArrayNodeTest, ApplyNestedArrayUpdates) {
ASSERT_TRUE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {'a.0.b.0.c': 1, 'a.0.b.0.d': 1}}"),
fromjson("{$v: 2, diff: {sa: {a: true, s0: {sb: {a: true, s0: {u: {c: 1, d: 1}}}}}}}"));
ASSERT_EQUALS("{a.0.b.0.c, a.0.b.0.d}", getModifiedPaths());
}
@@ -594,7 +586,6 @@ TEST_F(UpdateArrayNodeTest, NoArrayElementAffectsIndexes) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {a: [{c: 0, b: 0}, {c: 0, b: 0}, {c: 0, b: 0}]}}"),
fromjson("{$v: 2, diff: {u: {a: [{c: 0, b: 0}, {c: 0, b: 0}, {c: 0, b: 0}]}}}"));
ASSERT_EQUALS("{a.0.b, a.1.b, a.2.b}", getModifiedPaths());
}
@@ -623,8 +614,7 @@ TEST_F(UpdateArrayNodeTest, WhenOneElementIsMatchedLogElementUpdateDirectly) {
ASSERT_EQUALS(fromjson("{a: [{c: 1}, {c: 0, b: 0}, {c: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1.b': 0}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s1: {i: {b: 0}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s1: {i: {b: 0}}}}}"));
ASSERT_EQUALS("{a.1.b}", getModifiedPaths());
}
@@ -652,8 +642,7 @@ TEST_F(UpdateArrayNodeTest, WhenOneElementIsModifiedLogElement) {
ASSERT_EQUALS(fromjson("{a: [{c: 0, b: 0}, {c: 0, b: 0}, {c: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': {c: 0, b: 0}}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 0, b: 0}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 0, b: 0}}}}"));
ASSERT_EQUALS("{a.0.b, a.1.b}", getModifiedPaths());
}
@@ -707,8 +696,7 @@ TEST_F(UpdateArrayNodeTest, ApplyPositionalInsideArrayUpdate) {
ASSERT_EQUALS(fromjson("{a: [{b: [0, 1], c: 0}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b.1': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {sb: {a: true, u1: 1}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {sb: {a: true, u1: 1}}}}}"));
ASSERT_EQUALS("{a.0.b.1}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/update_driver.cpp b/src/mongo/db/update/update_driver.cpp
index 5ce9dc8aa84..dcb164f0ace 100644
--- a/src/mongo/db/update/update_driver.cpp
+++ b/src/mongo/db/update/update_driver.cpp
@@ -86,19 +86,7 @@ bool parseUpdateExpression(
const std::map<StringData, std::unique_ptr<ExpressionWithPlaceholder>>& arrayFilters) {
bool positional = false;
std::set<std::string> foundIdentifiers;
- bool foundVersionField = false;
for (auto&& mod : updateExpr) {
- // If there is a "$v" field among the modifiers, it should have already been used by the
- // caller to determine that this is the correct parsing function.
- if (mod.fieldNameStringData() == kUpdateOplogEntryVersionFieldName) {
- uassert(
- ErrorCodes::BadValue, "Duplicate $v in oplog update document", !foundVersionField);
- foundVersionField = true;
- invariant(mod.numberLong() ==
- static_cast<long long>(UpdateOplogEntryVersion::kUpdateNodeV1));
- continue;
- }
-
auto modType = validateMod(mod);
for (auto&& field : mod.Obj()) {
auto statusWithPositional = UpdateObjectNode::parseAndMerge(
@@ -180,25 +168,15 @@ void UpdateDriver::parse(
invariant(_updateType == UpdateType::kOperator);
- // By this point we are expecting a "classic" update. This version of mongod only supports $v:
- // 1 (modifier language) and $v: 2 (delta) (older versions support $v: 0). We've already
- // checked whether this is a delta update so we check that the $v field isn't present, or has a
- // value of 1.
-
- auto updateExpr = updateMod.getUpdateModifier();
- BSONElement versionElement = updateExpr[kUpdateOplogEntryVersionFieldName];
- if (versionElement) {
- uassert(ErrorCodes::FailedToParse,
- "The $v update field is only recognized internally",
- _fromOplogApplication);
-
- // The UpdateModification should have verified that the value of $v is valid.
- invariant(versionElement.numberInt() ==
- static_cast<int>(UpdateOplogEntryVersion::kUpdateNodeV1));
- }
-
+ // By this point we are expecting a "kModifier" update. This version of mongod only supports
+ // $v: 2 (delta) (older versions support $v: 0 and $v: 1). We've already checked whether
+ // this is a delta update, so we verify that we're not on the oplog application path.
+ tassert(5030100,
+ "An oplog update can only be of type 'kReplacement' or 'kDelta'",
+ !_fromOplogApplication);
auto root = std::make_unique<UpdateObjectNode>();
- _positional = parseUpdateExpression(updateExpr, root.get(), _expCtx, arrayFilters);
+ _positional =
+ parseUpdateExpression(updateMod.getUpdateModifier(), root.get(), _expCtx, arrayFilters);
_updateExecutor = std::make_unique<UpdateTreeExecutor>(std::move(root));
}
@@ -284,9 +262,7 @@ Status UpdateDriver::update(OperationContext* opCtx,
}
if (_logOp && logOpRec) {
- applyParams.logMode = internalQueryEnableLoggingV2OplogEntries.load()
- ? ApplyParams::LogMode::kGenerateOplogEntry
- : ApplyParams::LogMode::kGenerateOnlyV1OplogEntry;
+ applyParams.logMode = ApplyParams::LogMode::kGenerateOplogEntry;
if (MONGO_unlikely(hangAfterPipelineUpdateFCVCheck.shouldFail()) &&
type() == UpdateType::kPipeline) {
diff --git a/src/mongo/db/update/update_executor.h b/src/mongo/db/update/update_executor.h
index 539f044432b..8f674ff25e0 100644
--- a/src/mongo/db/update/update_executor.h
+++ b/src/mongo/db/update/update_executor.h
@@ -57,10 +57,6 @@ public:
// Indicates that no oplog entry should be produced.
kDoNotGenerateOplogEntry,
- // Indicates that the update executor should produce an oplog entry. Only the $v: 1
- // format or replacement-style format may be used, however.
- kGenerateOnlyV1OplogEntry,
-
// Indicates that the update executor should produce an oplog entry, and may use any
// format.
kGenerateOplogEntry
diff --git a/src/mongo/db/update/update_node_test_fixture.h b/src/mongo/db/update/update_node_test_fixture.h
index 22065cc7088..5125c77cfce 100644
--- a/src/mongo/db/update/update_node_test_fixture.h
+++ b/src/mongo/db/update/update_node_test_fixture.h
@@ -32,22 +32,14 @@
#include "mongo/db/concurrency/locker_noop_service_context_test_fixture.h"
#include "mongo/db/service_context.h"
#include "mongo/db/update/update_node.h"
-#include "mongo/db/update/v1_log_builder.h"
#include "mongo/db/update/v2_log_builder.h"
#include "mongo/unittest/unittest.h"
namespace mongo {
-class UpdateNodeTest : public LockerNoopServiceContextTest {
+class UpdateTestFixture : public LockerNoopServiceContextTest {
public:
- ~UpdateNodeTest() override = default;
-
- void run() {
- _useV2LogBuilder = false;
- ServiceContextTest::run();
- _useV2LogBuilder = true;
- ServiceContextTest::run();
- }
+ ~UpdateTestFixture() override = default;
protected:
// Creates a RuntimeUpdatePath from a string, assuming that all numeric path components are
@@ -81,11 +73,7 @@ protected:
_validateForStorage = true;
_indexData.reset();
_logDoc.reset();
- if (_useV2LogBuilder) {
- _logBuilder = std::make_unique<v2_log_builder::V2LogBuilder>();
- } else {
- _logBuilder = std::make_unique<V1LogBuilder>(_logDoc.root());
- }
+ _logBuilder = std::make_unique<v2_log_builder::V2LogBuilder>();
_modifiedPaths.clear();
}
@@ -155,37 +143,19 @@ protected:
return _modifiedPaths.toString();
}
- bool v2LogBuilderUsed() const {
- return _useV2LogBuilder;
- }
-
BSONObj getOplogEntry() const {
return _logBuilder->serialize();
}
void assertOplogEntryIsNoop() const {
- if (v2LogBuilderUsed()) {
- ASSERT_BSONOBJ_BINARY_EQ(getOplogEntry(), fromjson("{$v:2, diff: {}}"));
- } else {
- ASSERT_TRUE(getOplogEntry().isEmpty());
- }
+ ASSERT_BSONOBJ_BINARY_EQ(getOplogEntry(), fromjson("{$v:2, diff: {}}"));
}
- void assertOplogEntry(const BSONObj& expectedV1Entry,
- const BSONObj& expectedV2Entry,
- bool checkBinaryEquality = true) {
- auto assertFn = [checkBinaryEquality](auto expected, auto given) {
- if (checkBinaryEquality) {
- ASSERT_BSONOBJ_BINARY_EQ(expected, given);
- } else {
- ASSERT_BSONOBJ_EQ(expected, given);
- }
- };
-
- if (v2LogBuilderUsed()) {
- assertFn(expectedV2Entry, getOplogEntry());
+ void assertOplogEntry(const BSONObj& expectedV2Entry, bool checkBinaryEquality = true) {
+ if (checkBinaryEquality) {
+ ASSERT_BSONOBJ_BINARY_EQ(expectedV2Entry, getOplogEntry());
} else {
- assertFn(expectedV1Entry, getOplogEntry());
+ ASSERT_BSONOBJ_EQ(expectedV2Entry, getOplogEntry());
}
}
@@ -202,8 +172,6 @@ private:
mutablebson::Document _logDoc;
std::unique_ptr<LogBuilderInterface> _logBuilder;
FieldRefSetWithStorage _modifiedPaths;
-
- bool _useV2LogBuilder = false;
};
} // namespace mongo
diff --git a/src/mongo/db/update/update_object_node_test.cpp b/src/mongo/db/update/update_object_node_test.cpp
index 2e5906e8f30..ee77ffaee1b 100644
--- a/src/mongo/db/update/update_object_node_test.cpp
+++ b/src/mongo/db/update/update_object_node_test.cpp
@@ -47,8 +47,7 @@
namespace mongo {
namespace {
-using UpdateObjectNodeTest = UpdateNodeTest;
-using mongo::mutablebson::Element;
+using UpdateObjectNodeTest = UpdateTestFixture;
using unittest::assertGet;
TEST(UpdateObjectNodeTest, InvalidPathFailsToParse) {
@@ -1775,7 +1774,7 @@ TEST_F(UpdateObjectNodeTest, ApplyCreateField) {
ASSERT_EQUALS(fromjson("{a: 5, b: 6}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: 6}}"), fromjson("{$v: 2, diff: {i: {b: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{b}");
}
@@ -1800,7 +1799,7 @@ TEST_F(UpdateObjectNodeTest, ApplyExistingField) {
ASSERT_EQUALS(fromjson("{a: 6}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 6}}"), fromjson("{$v: 2, diff: {u: {a: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1843,8 +1842,7 @@ TEST_F(UpdateObjectNodeTest, ApplyExistingAndNonexistingFields) {
ASSERT_BSONOBJ_EQ(fromjson("{a: 5, c: 7, b: 6, d: 8}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 5, b: 6, c: 7, d: 8}}"),
- fromjson("{$v: 2, diff: {u: {a: 5, c: 7}, i: {b: 6, d: 8}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 5, c: 7}, i: {b: 6, d: 8}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b, c, d}");
}
@@ -1887,8 +1885,7 @@ TEST_F(UpdateObjectNodeTest, ApplyExistingNestedPaths) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {b: 6, c: 7}, b: {d: 8, e: 9}}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6, 'a.c': 7, 'b.d': 8, 'b.e': 9}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: 6, c: 7}}, sb: {u: {d: 8, e: 9}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: 6, c: 7}}, sb: {u: {d: 8, e: 9}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b, a.c, b.d, b.e}");
}
@@ -1931,8 +1928,7 @@ TEST_F(UpdateObjectNodeTest, ApplyCreateNestedPaths) {
ASSERT_BSONOBJ_EQ(fromjson("{z: 0, a: {b: 6, c: 7}, b: {d: 8, e: 9}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6, 'a.c': 7, 'b.d': 8, 'b.e': 9}}"),
- fromjson("{$v: 2, diff: {i: {a: {b: 6, c: 7}, b: {d: 8, e: 9}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: 6, c: 7}, b: {d: 8, e: 9}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b, a.c, b.d, b.e}");
}
@@ -1969,8 +1965,7 @@ TEST_F(UpdateObjectNodeTest, ApplyCreateDeeplyNestedPaths) {
ASSERT_BSONOBJ_EQ(fromjson("{z: 0, a: {b: {c: {d: 6, e: 7}}, f: 8}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b.c.d': 6, 'a.b.c.e': 7, 'a.f': 8}}"),
- fromjson("{$v: 2, diff: {i: {a: {b: {c: {d: 6, e: 7}}, f: 8}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: {c: {d: 6, e: 7}}, f: 8}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c.d, a.b.c.e, a.f}");
}
@@ -2019,8 +2014,7 @@ TEST_F(UpdateObjectNodeTest, ChildrenShouldBeAppliedInAlphabeticalOrder) {
ASSERT_BSONOBJ_EQ(fromjson("{z: 9, a: 5, b: 8, c: 7, d: 6}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 5, b: 8, c: 7, d: 6, z: 9}}"),
- fromjson("{$v: 2, diff: {u: {a: 5, z: 9}, i: {b: 8, c: 7, d: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 5, z: 9}, i: {b: 8, c: 7, d: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b, c, d, z}");
}
@@ -2054,8 +2048,7 @@ TEST_F(UpdateObjectNodeTest, CollatorShouldNotAffectUpdateOrder) {
ASSERT_BSONOBJ_EQ(fromjson("{abc: 5, cba: 6}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {abc: 5, cba: 6}}"),
- fromjson("{$v: 2, diff: {i: {abc: 5, cba: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {abc: 5, cba: 6}}}"));
}
TEST_F(UpdateObjectNodeTest, ApplyNoop) {
@@ -2132,7 +2125,7 @@ TEST_F(UpdateObjectNodeTest, ApplySomeChildrenNoops) {
ASSERT_BSONOBJ_EQ(fromjson("{a: 5, b: 6, c: 7}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: 6}}"), fromjson("{$v: 2, diff: {u: {b: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {b: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b, c}");
}
@@ -2186,7 +2179,7 @@ TEST_F(UpdateObjectNodeTest, ApplyBlockingElementFromReplication) {
ASSERT_BSONOBJ_EQ(fromjson("{a: 0, b: 6}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: 6}}"), fromjson("{$v: 2, diff: {i: {b: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 6}}}"));
}
TEST_F(UpdateObjectNodeTest, ApplyPositionalMissingMatchedField) {
@@ -2240,8 +2233,7 @@ TEST_F(UpdateObjectNodeTest, ApplyMergePositionalChild) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [{b: 5, c: 6}]}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c}");
}
@@ -2285,8 +2277,7 @@ TEST_F(UpdateObjectNodeTest, ApplyOrderMergedPositionalChild) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {'0': 7, '1': {b: 6, c: 8}, '2': 5}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': 7, 'a.1.b': 6, 'a.1.c': 8, 'a.2': 5}}"),
- fromjson("{$v: 2, diff: {i: {a: {'0': 7, '1': {b: 6, c: 8}, '2': 5}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {'0': 7, '1': {b: 6, c: 8}, '2': 5}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0, a.1.b, a.1.c, a.2}");
}
@@ -2353,8 +2344,7 @@ TEST_F(UpdateObjectNodeTest, ApplyDoNotMergePositionalChild) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {'0': 5, '1': 7, '2': 6}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': 5, 'a.1': 7, 'a.2': 6}}"),
- fromjson("{$v: 2, diff: {i: {a: {'0': 5, '1': 7, '2': 6}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {'0': 5, '1': 7, '2': 6}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0, a.1, a.2}");
}
@@ -2392,8 +2382,7 @@ TEST_F(UpdateObjectNodeTest, ApplyPositionalChildLast) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {'0': 6, '1': 7, '2': 5}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': 6, 'a.1': 7, 'a.2': 5}}"),
- fromjson("{$v: 2, diff: {i: {a: {'0': 6, '1': 7, '2': 5}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {'0': 6, '1': 7, '2': 5}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0, a.1, a.2}");
}
@@ -2425,8 +2414,7 @@ TEST_F(UpdateObjectNodeTest, ApplyUseStoredMergedPositional) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [{b: 5, c: 6}]}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c}");
mutablebson::Document doc2(fromjson("{a: [{b: 0, c: 0}]}"));
@@ -2439,8 +2427,7 @@ TEST_F(UpdateObjectNodeTest, ApplyUseStoredMergedPositional) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [{b: 5, c: 6}]}"), doc2.getObject());
ASSERT_TRUE(doc2.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c}");
}
@@ -2479,7 +2466,6 @@ TEST_F(UpdateObjectNodeTest, ApplyDoNotUseStoredMergedPositional) {
ASSERT_TRUE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6, 'a.1.d': 7}}"),
fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}, s1: {u: {d: 7}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c, a.1.d}");
@@ -2494,7 +2480,6 @@ TEST_F(UpdateObjectNodeTest, ApplyDoNotUseStoredMergedPositional) {
ASSERT_TRUE(doc2.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {'a.0.b': 5, 'a.1.c': 6, 'a.1.d': 7}}"),
fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5}}, s1: {u: {c: 6, d: 7}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.1.c, a.1.d}");
}
@@ -2525,8 +2510,7 @@ TEST_F(UpdateObjectNodeTest, ApplyToArrayByIndexWithLeadingZero) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [0, 0, 2, 0, 0]}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.02': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.02}");
}
@@ -2565,8 +2549,7 @@ TEST_F(UpdateObjectNodeTest, ApplyMultipleArrayUpdates) {
doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2': 2, 'a.10': 10}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: 2, u10: 10}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: 2, u10: 10}}}"));
}
TEST_F(UpdateObjectNodeTest, ApplyMultipleUpdatesToDocumentInArray) {
@@ -2596,8 +2579,7 @@ TEST_F(UpdateObjectNodeTest, ApplyMultipleUpdatesToDocumentInArray) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [null, null, {b: 1, c: 1}]}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 1, 'a.2.c': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 1, c: 1}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 1, c: 1}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -2648,8 +2630,7 @@ TEST_F(UpdateObjectNodeTest, SetAndPopModifiersWithCommonPrefixApplySuccessfully
ASSERT_BSONOBJ_EQ(fromjson("{a: {b: 5, c: [2, 3, 4]}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 5, 'a.c': [2, 3, 4]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: 5, c: [ 2, 3, 4 ]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: 5, c: [ 2, 3, 4 ]}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b, a.c}");
}
diff --git a/src/mongo/db/update/update_oplog_entry_version.h b/src/mongo/db/update/update_oplog_entry_version.h
index 5338ddcc76e..cbd18e51834 100644
--- a/src/mongo/db/update/update_oplog_entry_version.h
+++ b/src/mongo/db/update/update_oplog_entry_version.h
@@ -45,9 +45,11 @@ enum class UpdateOplogEntryVersion {
// Ancient update system which was deleted in 4.0. We still reserve its version number.
kRemovedV0 = 0,
- // The update system introduced in v3.6. When a single update adds multiple fields, those
- // fields are added in lexicographic order by field name. This system introduces support for
- // arrayFilters and $[] syntax.
+ // The update system introduced in v3.6, and, until 5.1, also served the function of how updates
+ // were record in oplog entries. Oplog entries of this form are no longer supported, but the
+ // user facing modifier-style update system remains. When a single update adds
+ // multiple fields, those fields are added in lexicographic order by field name. This system
+ // introduces support for arrayFilters and $[] syntax.
kUpdateNodeV1 = 1,
// Delta style update, introduced in 4.7. When a pipeline based update is executed, the pre and
diff --git a/src/mongo/db/update/update_tree_executor.h b/src/mongo/db/update/update_tree_executor.h
index e9f7dc93f99..520e1cfe177 100644
--- a/src/mongo/db/update/update_tree_executor.h
+++ b/src/mongo/db/update/update_tree_executor.h
@@ -33,7 +33,6 @@
#include "mongo/db/update/update_node.h"
#include "mongo/db/update/update_object_node.h"
-#include "mongo/db/update/v1_log_builder.h"
#include "mongo/db/update/v2_log_builder.h"
namespace mongo {
@@ -45,27 +44,11 @@ public:
ApplyResult applyUpdate(ApplyParams applyParams) const final {
mutablebson::Document logDocument;
- boost::optional<V1LogBuilder> optV1LogBuilder;
boost::optional<v2_log_builder::V2LogBuilder> optV2LogBuilder;
UpdateNode::UpdateNodeApplyParams updateNodeApplyParams;
- if (applyParams.logMode == ApplyParams::LogMode::kGenerateOnlyV1OplogEntry) {
- // In versions since 3.6, the absence of a $v field indicates either a
- // replacement-style update or a "classic" modifier-style update.
- //
- // Since 3.6, the presence of a $v field with value 1 may also indicate that the oplog
- // entry is a "classic" modifier-style update.
- //
- // While we could elide this $v field when providing a value of 1, we continue to log
- // it because:
- // (a) It avoids an unnecessary oplog format change.
- // (b) It is easy to distinguish from $v: 2 delta-style oplog entries.
- const bool includeVersionField = true;
-
- optV1LogBuilder.emplace(logDocument.root(), includeVersionField);
- updateNodeApplyParams.logBuilder = optV1LogBuilder.get_ptr();
- } else if (applyParams.logMode == ApplyParams::LogMode::kGenerateOplogEntry) {
+ if (applyParams.logMode == ApplyParams::LogMode::kGenerateOplogEntry) {
optV2LogBuilder.emplace();
updateNodeApplyParams.logBuilder = optV2LogBuilder.get_ptr();
}
diff --git a/src/mongo/db/update/v1_log_builder.cpp b/src/mongo/db/update/v1_log_builder.cpp
deleted file mode 100644
index ef3ff88abff..00000000000
--- a/src/mongo/db/update/v1_log_builder.cpp
+++ /dev/null
@@ -1,143 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/db/update/v1_log_builder.h"
-
-#include "mongo/db/update/runtime_update_path.h"
-#include "mongo/db/update/update_oplog_entry_serialization.h"
-#include "mongo/util/str.h"
-
-namespace mongo {
-
-using mutablebson::Element;
-
-namespace {
-const char kSet[] = "$set";
-const char kUnset[] = "$unset";
-} // namespace
-
-V1LogBuilder::V1LogBuilder(mutablebson::Element logRoot, bool includeVersionField)
- : _logRoot(logRoot),
- _setAccumulator(_logRoot.getDocument().end()),
- _unsetAccumulator(_setAccumulator) {
- invariant(logRoot.isType(mongo::Object));
- invariant(!logRoot.hasChildren());
-
- if (includeVersionField) {
- auto version = logRoot.getDocument().makeElementInt(
- kUpdateOplogEntryVersionFieldName,
- static_cast<int>(UpdateOplogEntryVersion::kUpdateNodeV1));
- invariant(_logRoot.pushFront(version).isOK());
- }
-}
-
-Status V1LogBuilder::addToSection(Element newElt, Element* section, const char* sectionName) {
- // If we don't already have this section, try to create it now.
- if (!section->ok()) {
- mutablebson::Document& doc = _logRoot.getDocument();
-
- // We should not already have an element with the section name under the root.
- dassert(_logRoot[sectionName] == doc.end());
-
- // Construct a new object element to represent this section in the log.
- const Element newElement = doc.makeElementObject(sectionName);
- if (!newElement.ok())
- return Status(ErrorCodes::InternalError,
- "V1LogBuilder: failed to construct Object Element for $set/$unset");
-
- // Enqueue the new section under the root, and record it as our out parameter.
- Status result = _logRoot.pushBack(newElement);
- if (!result.isOK())
- return result;
- *section = newElement;
- }
-
- // Whatever transpired, we should now have an ok accumulator for the section, and not
- // have a replacement accumulator.
- dassert(section->ok());
-
- // Enqueue the provided element to the section and propagate the result.
- return section->pushBack(newElt);
-}
-
-Status V1LogBuilder::addToSets(Element elt) {
- return addToSection(elt, &_setAccumulator, kSet);
-}
-
-Status V1LogBuilder::addToSetsWithNewFieldName(StringData name, const mutablebson::Element val) {
- mutablebson::Element elemToSet = _logRoot.getDocument().makeElementWithNewFieldName(name, val);
- if (!elemToSet.ok())
- return Status(ErrorCodes::InternalError,
- str::stream()
- << "Could not create new '" << name << "' element from existing element '"
- << val.getFieldName() << "' of type " << typeName(val.getType()));
-
- return addToSets(elemToSet);
-}
-
-Status V1LogBuilder::addToSetsWithNewFieldName(StringData name, const BSONElement& val) {
- mutablebson::Element elemToSet = _logRoot.getDocument().makeElementWithNewFieldName(name, val);
- if (!elemToSet.ok())
- return Status(ErrorCodes::InternalError,
- str::stream()
- << "Could not create new '" << name << "' element from existing element '"
- << val.fieldName() << "' of type " << typeName(val.type()));
-
- return addToSets(elemToSet);
-}
-
-Status V1LogBuilder::addToUnsets(StringData path) {
- mutablebson::Element logElement = _logRoot.getDocument().makeElementBool(path, true);
- if (!logElement.ok())
- return Status(ErrorCodes::InternalError,
- str::stream() << "Cannot create $unset oplog entry for path" << path);
-
- return addToSection(logElement, &_unsetAccumulator, kUnset);
-}
-
-Status V1LogBuilder::logUpdatedField(const RuntimeUpdatePath& path, mutablebson::Element elt) {
- return addToSetsWithNewFieldName(path.fieldRef().dottedField(), elt);
-}
-
-Status V1LogBuilder::logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- mutablebson::Element elt) {
- return addToSetsWithNewFieldName(path.fieldRef().dottedField(), elt);
-}
-
-Status V1LogBuilder::logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- BSONElement elt) {
- return addToSetsWithNewFieldName(path.fieldRef().dottedField(), elt);
-}
-
-Status V1LogBuilder::logDeletedField(const RuntimeUpdatePath& path) {
- return addToUnsets(path.fieldRef().dottedField());
-}
-} // namespace mongo
diff --git a/src/mongo/db/update/v1_log_builder.h b/src/mongo/db/update/v1_log_builder.h
deleted file mode 100644
index 7be6abc57a0..00000000000
--- a/src/mongo/db/update/v1_log_builder.h
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#pragma once
-
-#include "mongo/base/status.h"
-#include "mongo/bson/mutable/document.h"
-#include "mongo/db/update/log_builder_interface.h"
-
-namespace mongo {
-class RuntimeUpdatePath;
-
-/**
- * LogBuilder abstracts away some of the details of producing a properly constructed oplog $v:1
- * modifier-style update entry. It manages separate regions into which it accumulates $set and
- * $unset operations.
- */
-class V1LogBuilder : public LogBuilderInterface {
-public:
- /**
- * Construct a new LogBuilder. Log entries will be recorded as new children under the
- * 'logRoot' Element, which must be of type mongo::Object and have no children.
- *
- * The 'includeVersionField' indicates whether the generated log entry should include a $v
- * (version) field.
- */
- V1LogBuilder(mutablebson::Element logRoot, bool includeVersionField = false);
-
- /**
- * Overloads from LogBuilderInterface. Each of these methods logs a modification to the document
- * in _logRoot. The field name given in the mutablebson element or BSONElement is ignored
- * and the 'path' argument is used instead.
- */
- Status logUpdatedField(const RuntimeUpdatePath& path, mutablebson::Element elt) override;
-
- /**
- * Logs the creation of a new field. The 'idxOfFirstNewComponent' parameter is unused in this
- * implementation.
- */
- Status logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- mutablebson::Element elt) override;
- Status logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- BSONElement elt) override;
-
- Status logDeletedField(const RuntimeUpdatePath& path) override;
-
- /**
- * Return the Document to which the logging root belongs.
- */
- inline mutablebson::Document& getDocument() {
- return _logRoot.getDocument();
- }
-
- /**
- * Produces a BSON object representing this update using the modifier syntax which can be
- * stored in the oplog.
- */
- BSONObj serialize() const override {
- return _logRoot.getDocument().getObject();
- }
-
-private:
- /**
- * Add the given Element as a new entry in the '$set' section of the log. If a $set section
- * does not yet exist, it will be created. If this LogBuilder is currently configured to
- * contain an object replacement, the request to add to the $set section will return an Error.
- */
- Status addToSets(mutablebson::Element elt);
-
- /**
- * Convenience method which calls addToSets after
- * creating a new Element to wrap the old one.
- *
- * If any problem occurs then the operation will stop and return that error Status.
- */
- Status addToSetsWithNewFieldName(StringData name, mutablebson::Element val);
-
- /**
- * Convenience method which calls addToSets after
- * creating a new Element to wrap the old one.
- *
- * If any problem occurs then the operation will stop and return that error Status.
- */
- Status addToSetsWithNewFieldName(StringData name, const BSONElement& val);
-
- /**
- * Add the given path as a new entry in the '$unset' section of the log. If an '$unset' section
- * does not yet exist, it will be created. If this LogBuilder is currently configured to
- * contain an object replacement, the request to add to the $unset section will return an
- * Error.
- */
- Status addToUnsets(StringData path);
-
- Status addToSection(mutablebson::Element newElt,
- mutablebson::Element* section,
- const char* sectionName);
-
- mutablebson::Element _logRoot;
- mutablebson::Element _setAccumulator;
- mutablebson::Element _unsetAccumulator;
-};
-} // namespace mongo
diff --git a/src/mongo/db/update/v1_log_builder_test.cpp b/src/mongo/db/update/v1_log_builder_test.cpp
deleted file mode 100644
index 1e599181ef3..00000000000
--- a/src/mongo/db/update/v1_log_builder_test.cpp
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/db/update/v1_log_builder.h"
-
-#include "mongo/base/status.h"
-#include "mongo/bson/bsonobj.h"
-#include "mongo/bson/mutable/mutable_bson_test_utils.h"
-#include "mongo/db/json.h"
-#include "mongo/db/update/runtime_update_path.h"
-#include "mongo/unittest/unittest.h"
-#include "mongo/util/safe_num.h"
-
-namespace mongo {
-namespace {
-namespace mmb = mongo::mutablebson;
-
-/**
- * Given a FieldRef, creates a RuntimeUpdatePath based on it, assuming that every component is a
- * field name. This is safe to do while testing the V1 log builder, since it ignores the types of
- * the path given entirely.
- */
-RuntimeUpdatePath makeRuntimeUpdatePathAssumeAllComponentsFieldNames(StringData path) {
- FieldRef fieldRef(path);
- RuntimeUpdatePath::ComponentTypeVector types(fieldRef.numParts(),
- RuntimeUpdatePath::ComponentType::kFieldName);
- return RuntimeUpdatePath(std::move(fieldRef), std::move(types));
-}
-
-TEST(V1LogBuilder, UpdateFieldMutableBson) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("a.b", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"), elt_ab));
-
- ASSERT_BSONOBJ_BINARY_EQ(mongo::fromjson("{ $set : { 'a.b' : 1 } }"), lb.serialize());
-}
-
-TEST(V1LogBuilder, CreateField) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("a.b", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(lb.logCreatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"),
- 0, // idxOfFirstNewComponent (unused)
- elt_ab));
-
- ASSERT_BSONOBJ_BINARY_EQ(mongo::fromjson("{ $set : { 'a.b' : 1 } }"), lb.serialize());
-}
-
-TEST(V1LogBuilder, CreateFieldBSONElt) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- BSONObj storage = BSON("a" << 1);
- ASSERT_OK(lb.logCreatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"),
- 0, // idxOfFirstNewComponent (unused)
- storage.firstElement()));
-
- ASSERT_BSONOBJ_BINARY_EQ(mongo::fromjson("{ $set : { 'a.b' : 1 } }"), lb.serialize());
-}
-
-TEST(V1LogBuilder, AddOneToUnset) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y")));
- ASSERT_EQUALS(mongo::fromjson("{ $unset : { 'x.y' : true } }"), doc);
-}
-TEST(V1LogBuilder, AddOneToEach) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"), elt_ab));
-
- const mmb::Element elt_cd = doc.makeElementInt("", 2);
- ASSERT_TRUE(elt_cd.ok());
-
- ASSERT_OK(lb.logCreatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("c.d"),
- 0, // idxOfCreatedComponent (unused)
- elt_cd));
-
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y")));
-
- ASSERT_EQUALS(mongo::fromjson("{ "
- " $set : { 'a.b' : 1, 'c.d': 2 }, "
- " $unset : { 'x.y' : true } "
- "}"),
- doc);
-}
-TEST(V1LogBuilder, VerifySetsAreGrouped) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("a.b", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"), elt_ab));
-
- const mmb::Element elt_xy = doc.makeElementInt("x.y", 1);
- ASSERT_TRUE(elt_xy.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y"), elt_xy));
-
- ASSERT_EQUALS(mongo::fromjson("{ $set : {"
- " 'a.b' : 1, "
- " 'x.y' : 1 "
- "} }"),
- doc);
-}
-
-TEST(V1LogBuilder, VerifyUnsetsAreGrouped) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b")));
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y")));
-
- ASSERT_EQUALS(mongo::fromjson("{ $unset : {"
- " 'a.b' : true, "
- " 'x.y' : true "
- "} }"),
- doc);
-}
-} // namespace
-} // namespace mongo