summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMihai Andrei <mihai.andrei@10gen.com>2022-06-30 20:38:44 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2022-06-30 21:39:25 +0000
commit17f49ab8b16a6f45371771d8c152cfbf4ed186af (patch)
tree882643f79f42b6b0267cbc870eb03b135f231e1c
parent69ce17c6b02c009fc2d0502c6761f8a3e2e541ee (diff)
downloadmongo-17f49ab8b16a6f45371771d8c152cfbf4ed186af.tar.gz
SERVER-50301 Delete code which handles $v:1 update oplog entries
-rw-r--r--buildscripts/resmokeconfig/suites/change_streams_update_v1_oplog.yml62
-rw-r--r--buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog.yml19
-rw-r--r--buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog_jscore_passthrough.yml50
-rw-r--r--buildscripts/resmokeconfig/suites/sharding_update_v1_oplog.yml20
-rw-r--r--buildscripts/resmokeconfig/suites/sharding_update_v1_oplog_jscore_passthrough.yml70
-rw-r--r--etc/evergreen_timeouts.yml6
-rw-r--r--etc/evergreen_yml_components/definitions.yml36
-rw-r--r--jstests/auth/user_defined_roles_on_secondaries.js2
-rw-r--r--jstests/change_streams/lookup_pit_pre_and_post_image_in_transaction.js2
-rw-r--r--jstests/change_streams/oplog_rewrite/change_stream_match_pushdown_updateDescription_rewrite.js447
-rw-r--r--jstests/change_streams/pipeline_style_updates.js173
-rw-r--r--jstests/change_streams/show_raw_update_description.js7
-rw-r--r--jstests/change_streams/show_raw_update_description_v1_oplog.js207
-rw-r--r--jstests/core/apply_ops1.js49
-rw-r--r--jstests/core/apply_ops2.js46
-rw-r--r--jstests/core/apply_ops_missing_field.js20
-rw-r--r--jstests/core/collation.js18
-rw-r--r--jstests/core/internal_apply_oplog_update.js21
-rw-r--r--jstests/core/json_schema/misc_validation.js2
-rw-r--r--jstests/core/txns/commands_not_allowed_in_txn.js12
-rw-r--r--jstests/libs/change_stream_util.js16
-rw-r--r--jstests/noPassthrough/apply_ops_atomic.js11
-rw-r--r--jstests/noPassthrough/apply_ops_mode.js2
-rw-r--r--jstests/noPassthrough/oplog_writes_only_permitted_on_standalone.js2
-rw-r--r--jstests/noPassthrough/server_write_concern_metrics.js12
-rw-r--r--jstests/noPassthrough/write_change_stream_pit_preimage_in_transaction.js4
-rw-r--r--jstests/replsets/apply_ops_inserts_do_not_include_fromMigrate_field.js4
-rw-r--r--jstests/replsets/change_stream_pit_pre_images.js2
-rw-r--r--jstests/replsets/initial_sync_update_missing_doc_upsert.js4
-rw-r--r--jstests/replsets/initial_sync_update_missing_field.js32
-rw-r--r--jstests/replsets/oplog_format.js95
-rw-r--r--src/mongo/db/commands/fle2_compact.cpp2
-rw-r--r--src/mongo/db/commands/write_commands.cpp3
-rw-r--r--src/mongo/db/fle_crud.cpp12
-rw-r--r--src/mongo/db/fle_crud_test.cpp11
-rw-r--r--src/mongo/db/ops/write_ops.cpp46
-rw-r--r--src/mongo/db/ops/write_ops_parsers.h24
-rw-r--r--src/mongo/db/query/query_knobs.idl7
-rw-r--r--src/mongo/db/repl/SConscript2
-rw-r--r--src/mongo/db/repl/idempotency_test.cpp127
-rw-r--r--src/mongo/db/repl/idempotency_update_sequence.cpp284
-rw-r--r--src/mongo/db/repl/idempotency_update_sequence.h128
-rw-r--r--src/mongo/db/repl/idempotency_update_sequence_test.cpp318
-rw-r--r--src/mongo/db/repl/oplog_applier_impl_test.cpp96
-rw-r--r--src/mongo/db/repl/replication_recovery_test.cpp21
-rw-r--r--src/mongo/db/repl/session_update_tracker.cpp8
-rw-r--r--src/mongo/db/repl/storage_timestamp_test.cpp42
-rw-r--r--src/mongo/db/repl/tenant_oplog_applier_test.cpp19
-rw-r--r--src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp14
-rw-r--r--src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp26
-rw-r--r--src/mongo/db/update/SConscript2
-rw-r--r--src/mongo/db/update/addtoset_node_test.cpp37
-rw-r--r--src/mongo/db/update/arithmetic_node_test.cpp37
-rw-r--r--src/mongo/db/update/bit_node_test.cpp25
-rw-r--r--src/mongo/db/update/compare_node_test.cpp23
-rw-r--r--src/mongo/db/update/current_date_node_test.cpp40
-rw-r--r--src/mongo/db/update/object_replace_executor_test.cpp2
-rw-r--r--src/mongo/db/update/object_transform_executor_test.cpp2
-rw-r--r--src/mongo/db/update/pipeline_executor_test.cpp123
-rw-r--r--src/mongo/db/update/pop_node_test.cpp20
-rw-r--r--src/mongo/db/update/pull_node_test.cpp58
-rw-r--r--src/mongo/db/update/pullall_node_test.cpp17
-rw-r--r--src/mongo/db/update/push_node_test.cpp73
-rw-r--r--src/mongo/db/update/rename_node_test.cpp41
-rw-r--r--src/mongo/db/update/set_node_test.cpp47
-rw-r--r--src/mongo/db/update/unset_node_test.cpp37
-rw-r--r--src/mongo/db/update/update_array_node_test.cpp32
-rw-r--r--src/mongo/db/update/update_driver.cpp42
-rw-r--r--src/mongo/db/update/update_executor.h4
-rw-r--r--src/mongo/db/update/update_node_test_fixture.h48
-rw-r--r--src/mongo/db/update/update_object_node_test.cpp61
-rw-r--r--src/mongo/db/update/update_oplog_entry_version.h8
-rw-r--r--src/mongo/db/update/update_tree_executor.h19
-rw-r--r--src/mongo/db/update/v1_log_builder.cpp143
-rw-r--r--src/mongo/db/update/v1_log_builder.h130
-rw-r--r--src/mongo/db/update/v1_log_builder_test.cpp158
76 files changed, 937 insertions, 2935 deletions
diff --git a/buildscripts/resmokeconfig/suites/change_streams_update_v1_oplog.yml b/buildscripts/resmokeconfig/suites/change_streams_update_v1_oplog.yml
deleted file mode 100644
index 6be4f68ba9f..00000000000
--- a/buildscripts/resmokeconfig/suites/change_streams_update_v1_oplog.yml
+++ /dev/null
@@ -1,62 +0,0 @@
-test_kind: js_test
-
-selector:
- roots:
- - jstests/change_streams/**/*.js
- exclude_files:
- # Expects oplog entries to be in $v:2 format.
- - jstests/change_streams/expanded_update_description.js
- - jstests/change_streams/pipeline_style_updates_v2_oplog_entries.js
- - jstests/change_streams/show_expanded_events.js
-
- exclude_with_any_tags:
- ##
- # The next tags correspond to the special errors thrown by the
- # set_read_and_write_concerns.js override when it refuses to replace the readConcern or
- # writeConcern of a particular command. Above each tag are the message(s) that cause the tag to be
- # warranted.
- ##
- # "Cowardly refusing to override write concern of command: ..."
- - assumes_write_concern_unchanged
-
-executor:
- archive:
- hooks:
- - CheckReplDBHash
- - CheckReplOplogs
- - ValidateCollections
- config:
- shell_options:
- global_vars:
- TestData:
- defaultReadConcernLevel: null
- enableMajorityReadConcern: ''
- # Enable causal consistency for change streams suites. Some tests rely on the assumption that
- # a w:majority write will be visible immediately in a subsequently opened change stream. An
- # operation that majority commits at timestamp T will force the majority snapshot to advance
- # to T, but the oplog visibility point may not have advanced to T yet. Subsequent majority
- # snapshot reads will see this write in the oplog, but speculative majority reads may not,
- # since they read from a local snapshot and are bound to the oplog visibility rules. Using
- # causal consistency forces the visibility point to advance to the timestamp of the last write
- # before doing a new read.
- eval: >-
- var testingReplication = true;
- load('jstests/libs/override_methods/set_read_and_write_concerns.js');
- load('jstests/libs/override_methods/enable_causal_consistency_without_read_pref.js');
- hooks:
- # The CheckReplDBHash hook waits until all operations have replicated to and have been applied
- # on the secondaries, so we run the ValidateCollections hook after it to ensure we're
- # validating the entire contents of the collection.
- - class: CheckReplOplogs
- - class: CheckReplDBHash
- - class: ValidateCollections
- - class: CleanEveryN
- n: 20
- fixture:
- class: ReplicaSetFixture
- mongod_options:
- bind_ip_all: ''
- set_parameters:
- enableTestCommands: 1
- internalQueryEnableLoggingV2OplogEntries: false
- num_nodes: 2
diff --git a/buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog.yml b/buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog.yml
deleted file mode 100644
index 0d1dd1a5a57..00000000000
--- a/buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-test_kind: js_test
-
-selector:
- roots:
- - jstests/replsets/*.js
- exclude_files:
- # Expects oplog entries to be in $v:2 format.
- - jstests/replsets/v2_delta_oplog_entries.js
- - jstests/replsets/rollback_with_coalesced_txn_table_updates_during_oplog_application.js
- - jstests/replsets/tenant_migration_recipient_fetches_retryable_writes_entry_after_committed_snapshot.js
-
-executor:
- config:
- shell_options:
- nodb: ''
- global_vars:
- TestData:
- setParameters:
- internalQueryEnableLoggingV2OplogEntries: false
diff --git a/buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog_jscore_passthrough.yml b/buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog_jscore_passthrough.yml
deleted file mode 100644
index db98749b965..00000000000
--- a/buildscripts/resmokeconfig/suites/replica_sets_update_v1_oplog_jscore_passthrough.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-test_kind: js_test
-
-selector:
- roots:
- - jstests/core/**/*.js
- - jstests/fle2/**/*.js
- - src/mongo/db/modules/*/jstests/fle2/*.js
- exclude_files:
- # These tests change the transactionLifetimeLimitSeconds server parameter which conflicts with how
- # the CheckReplDBHashInBackground hook doesn't want transactions to be reaped while it is running.
- - jstests/core/txns/abort_expired_transaction.js
- - jstests/core/txns/abort_transaction_thread_does_not_block_on_locks.js
- - jstests/core/txns/kill_op_on_txn_expiry.js
- # The set_param1.js test attempts to compare the response from running the {getParameter: "*"}
- # command multiple times, which may observe the change to the "transactionLifetimeLimitSeconds"
- # server parameter.
- - jstests/core/set_param1.js
-
- exclude_with_any_tags:
- - assumes_standalone_mongod
-
-executor:
- archive:
- hooks:
- - CheckReplDBHashInBackground
- - ValidateCollectionsInBackground
- - CheckReplDBHash
- - CheckReplOplogs
- - ValidateCollections
- config:
- shell_options:
- eval: "testingReplication = true;"
- hooks:
- # The CheckReplDBHash hook waits until all operations have replicated to and have been applied
- # on the secondaries, so we run the ValidateCollections hook after it to ensure we're
- # validating the entire contents of the collection.
- - class: CheckReplDBHashInBackground
- - class: ValidateCollectionsInBackground
- - class: CheckReplOplogs
- - class: CheckReplDBHash
- - class: ValidateCollections
- - class: CleanEveryN
- n: 20
- fixture:
- class: ReplicaSetFixture
- mongod_options:
- set_parameters:
- enableTestCommands: 1
- internalQueryEnableLoggingV2OplogEntries: false
- num_nodes: 2
diff --git a/buildscripts/resmokeconfig/suites/sharding_update_v1_oplog.yml b/buildscripts/resmokeconfig/suites/sharding_update_v1_oplog.yml
deleted file mode 100644
index 5299e7cdadd..00000000000
--- a/buildscripts/resmokeconfig/suites/sharding_update_v1_oplog.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-test_kind: js_test
-
-selector:
- roots:
- - jstests/sharding/**/*.js
- exclude_files:
- - jstests/sharding/**/libs/**/*.js
-
-executor:
- archive:
- tests:
- - jstests/sharding/*reshard*.js
- config:
- shell_options:
- nodb: ''
- global_vars:
- TestData:
- setParameters:
- internalQueryEnableLoggingV2OplogEntries: false
- receiveChunkWaitForRangeDeleterTimeoutMS: 90000
diff --git a/buildscripts/resmokeconfig/suites/sharding_update_v1_oplog_jscore_passthrough.yml b/buildscripts/resmokeconfig/suites/sharding_update_v1_oplog_jscore_passthrough.yml
deleted file mode 100644
index 2278a3585bf..00000000000
--- a/buildscripts/resmokeconfig/suites/sharding_update_v1_oplog_jscore_passthrough.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-test_kind: js_test
-
-selector:
- roots:
- - jstests/core/**/*.js
- - jstests/fle2/**/*.js
- - src/mongo/db/modules/*/jstests/fle2/*.js
- exclude_files:
- # These tests are run in sharded_jscore_txns.
- - jstests/core/txns/**/*.js
- # The following tests fail because a certain command or functionality is not supported on
- # mongos. This command or functionality is placed in a comment next to the failing test.
- - jstests/core/apitest_db.js # serverStatus output doesn't have storageEngine.
- - jstests/core/check_shard_index.js # checkShardingIndex.
- - jstests/core/collection_truncate.js # emptycapped.
- - jstests/core/compact_keeps_indexes.js # compact.
- - jstests/core/currentop.js # uses fsync.
- - jstests/core/dbhash.js # dbhash.
- - jstests/core/dbhash2.js # dbhash.
- - jstests/core/fsync.js # uses fsync.
- - jstests/core/geo_s2cursorlimitskip.js # profiling.
- - jstests/core/geo_update_btree2.js # notablescan.
- - jstests/core/index9.js # "local" database.
- - jstests/core/queryoptimizera.js # "local" database.
- - jstests/core/stages*.js # stageDebug.
- - jstests/core/startup_log.js # "local" database.
- - jstests/core/top.js # top.
- # The following tests fail because mongos behaves differently from mongod when testing certain
- # functionality. The differences are in a comment next to the failing test.
- - jstests/core/explain_missing_database.js # Behavior with no db different on mongos.
- - jstests/core/geo_2d_explain.js # executionSuccess in different spot in explain().
- - jstests/core/geo_s2explain.js # inputStage in different spot in explain().
- - jstests/core/geo_s2sparse.js # keysPerIndex in different spot in validate().
- - jstests/core/operation_latency_histogram.js # Stats are counted differently on mongos, SERVER-24880.
- - jstests/core/killop_drop_collection.js # Uses fsyncLock.
- - jstests/core/or_to_in.js # queryPlanner in different spot in explain()
- # The following tests fail because of divergent dropCollection behavior between standalones and
- # sharded clusters. These tests expect a second drop command to error, whereas in sharded clusters
- # we expect a second drop to return status OK.
- - jstests/core/explain_upsert.js
-
- exclude_with_any_tags:
- - assumes_standalone_mongod
- - assumes_against_mongod_not_mongos
- # system.profile collection doesn't exist on mongos.
- - requires_profiling
-
-executor:
- archive:
- hooks:
- - CheckReplDBHash
- - ValidateCollections
- config: {}
- hooks:
- - class: CheckReplDBHash
- - class: ValidateCollections
- - class: CleanEveryN
- n: 20
- fixture:
- class: ShardedClusterFixture
- mongos_options:
- set_parameters:
- enableTestCommands: 1
- mongod_options:
- set_parameters:
- enableTestCommands: 1
- internalQueryEnableLoggingV2OplogEntries: false
- num_rs_nodes_per_shard: 1
- enable_sharding:
- - test
diff --git a/etc/evergreen_timeouts.yml b/etc/evergreen_timeouts.yml
index 33b1a6784a0..b1fa2f2f205 100644
--- a/etc/evergreen_timeouts.yml
+++ b/etc/evergreen_timeouts.yml
@@ -47,8 +47,6 @@ overrides:
enterprise-windows-all-feature-flags-suggested:
- task: replica_sets_jscore_passthrough
exec_timeout: 180 # 3 hours.
- - task: replica_sets_update_v1_oplog_jscore_passthrough
- exec_timeout: 150 # 2.5 hours.
enterprise-windows-inmem:
- task: replica_sets_jscore_passthrough
@@ -57,8 +55,6 @@ overrides:
enterprise-windows-required:
- task: replica_sets_jscore_passthrough
exec_timeout: 180 # 3 hours.
- - task: replica_sets_update_v1_oplog_jscore_passthrough
- exec_timeout: 150 # 2.5 hours.
linux-64-debug:
- task: auth
@@ -119,5 +115,3 @@ overrides:
exec_timeout: 150 # 2.5 hours.
- task: replica_sets_jscore_passthrough
exec_timeout: 180 # 3 hours.
- - task: replica_sets_update_v1_oplog_jscore_passthrough
- exec_timeout: 150 # 2.5 hours.
diff --git a/etc/evergreen_yml_components/definitions.yml b/etc/evergreen_yml_components/definitions.yml
index be2536f71af..e0815b99ee5 100644
--- a/etc/evergreen_yml_components/definitions.yml
+++ b/etc/evergreen_yml_components/definitions.yml
@@ -4372,13 +4372,6 @@ tasks:
- func: "generate resmoke tasks"
- <<: *task_template
- name: change_streams_update_v1_oplog
- tags: ["change_streams"]
- commands:
- - func: "do setup"
- - func: "run tests"
-
-- <<: *task_template
name: change_streams_mongos_sessions_passthrough
tags: ["change_streams"]
depends_on:
@@ -5011,12 +5004,6 @@ tasks:
- func: "run tests"
- <<: *gen_task_template
- name: replica_sets_update_v1_oplog_jscore_passthrough_gen
- tags: ["replica_sets", "non_maj_read"]
- commands:
- - func: "generate resmoke tasks"
-
-- <<: *gen_task_template
name: replica_sets_initsync_jscore_passthrough_gen
tags: ["replica_sets", "san", "large"]
commands:
@@ -5181,15 +5168,6 @@ tasks:
fallback_num_sub_suites: 5
- <<: *task_template
- name: sharding_update_v1_oplog_jscore_passthrough
- tags: ["sharding", "jscore"]
- commands:
- - func: "do setup"
- - func: "run tests"
- vars:
- suite: sharding_jscore_passthrough
-
-- <<: *task_template
name: sharded_multi_stmt_txn_jscore_passthrough
tags: ["sharding", "jscore", "multi_stmt"]
commands:
@@ -5781,12 +5759,6 @@ tasks:
- func: "generate resmoke tasks"
- <<: *gen_task_template
- name: replica_sets_update_v1_oplog_gen
- tags: ["replica_sets", "san"]
- commands:
- - func: "generate resmoke tasks"
-
-- <<: *gen_task_template
name: replica_sets_multiversion_gen
tags: ["random_multiversion_ds", "multiversion"]
commands:
@@ -5892,14 +5864,6 @@ tasks:
use_large_distro: "true"
- <<: *gen_task_template
- name: sharding_update_v1_oplog_gen
- tags: ["sharding", "common"]
- commands:
- - func: "generate resmoke tasks"
- vars:
- use_large_distro: "true"
-
-- <<: *gen_task_template
name: sharding_opportunistic_secondary_targeting_gen
tags: ["sharding", "common"]
commands:
diff --git a/jstests/auth/user_defined_roles_on_secondaries.js b/jstests/auth/user_defined_roles_on_secondaries.js
index 4d44a9bbfd0..d27fbb7d522 100644
--- a/jstests/auth/user_defined_roles_on_secondaries.js
+++ b/jstests/auth/user_defined_roles_on_secondaries.js
@@ -208,7 +208,7 @@ assert.commandWorked(rstest.getPrimary().getDB("admin").runCommand({
{
op: "u",
ns: "admin.system.roles",
- o: {$set: {roles: [{role: "readWrite", db: "db1"}]}},
+ o: {$v: 2, diff: {u: {roles: [{role: "readWrite", db: "db1"}]}}},
o2: {_id: "db1.t2"}
}
]
diff --git a/jstests/change_streams/lookup_pit_pre_and_post_image_in_transaction.js b/jstests/change_streams/lookup_pit_pre_and_post_image_in_transaction.js
index 7aa3c707b97..6f868c377c2 100644
--- a/jstests/change_streams/lookup_pit_pre_and_post_image_in_transaction.js
+++ b/jstests/change_streams/lookup_pit_pre_and_post_image_in_transaction.js
@@ -136,7 +136,7 @@ if (!FixtureHelpers.isMongos(testDB)) {
assert.commandWorked(coll.insert([{_id: 5, a: 1}, {_id: 6, a: 1}]));
assert.commandWorked(testDB.runCommand({
applyOps: [
- {op: "u", ns: coll.getFullName(), o2: {_id: 5}, o: {$set: {a: 2}}},
+ {op: "u", ns: coll.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {a: 2}}}},
{op: "d", ns: coll.getFullName(), o: {_id: 6}}
],
allowAtomic: false,
diff --git a/jstests/change_streams/oplog_rewrite/change_stream_match_pushdown_updateDescription_rewrite.js b/jstests/change_streams/oplog_rewrite/change_stream_match_pushdown_updateDescription_rewrite.js
index fcbd2f6b9c6..4092926f89c 100644
--- a/jstests/change_streams/oplog_rewrite/change_stream_match_pushdown_updateDescription_rewrite.js
+++ b/jstests/change_streams/oplog_rewrite/change_stream_match_pushdown_updateDescription_rewrite.js
@@ -17,9 +17,9 @@
load("jstests/libs/change_stream_rewrite_util.js"); // For rewrite helpers.
const dbName = "change_stream_match_pushdown_updateDescription_rewrite";
-const collNameBase = "change_stream_match_pushdown_updateDescription_rewrite";
+const collName = "change_stream_match_pushdown_updateDescription_rewrite";
-// Start a new 2-shard cluster. One shard will always write v1 update oplog entries, the other v2.
+// Start a new 2-shard cluster.
const st = new ShardingTest({
shards: [
{nodes: 1, setParameter: {writePeriodicNoops: true, periodicNoopIntervalSecs: 1}},
@@ -28,242 +28,225 @@ const st = new ShardingTest({
setParameter: {
writePeriodicNoops: true,
periodicNoopIntervalSecs: 1,
- internalQueryEnableLoggingV2OplogEntries: false
}
}
]
});
-const mongosConn = st.s;
-const db = mongosConn.getDB(dbName);
-
-// To maximize coverage, we perform these tests twice and reverse the order of shards on which each
-// update is performed for each run. This ensures that all updates are tested for both v1 and v2
-// oplog formats.
-for (const reverseShards of [false, true]) {
- const s0 = reverseShards ? 1 : 0;
- const s1 = reverseShards ? 0 : 1;
- const collName = collNameBase + s0 + s1;
-
- // Returns a newly created sharded collection, where shard key is 'shard'.
- const coll =
- createShardedCollection(st, "shard" /* shardKey */, dbName, collName, 1 /* splitAt */);
-
- // Open a change stream and store the resume token. This resume token will be used to replay the
- // stream after this point.
- const resumeAfterToken = coll.watch([]).getResumeToken();
-
- // A helper that opens a change stream with the user supplied match expression 'userMatchExpr'
- // and validates that: (1) for each shard, the events are seen in that order as specified in
- // 'expectedOps'; and (2) each shard returns the expected number of events; and (3) the number
- // of docs returned by the oplog cursor on each shard matches what we expect
- // as specified in 'expectedOplogCursorReturnedDocs'.
- const verifyOps = function(userMatchExpr, expectedOps, expectedOplogCursorReturnedDocs) {
- const cursor =
- coll.aggregate([{$changeStream: {resumeAfter: resumeAfterToken}}, userMatchExpr]);
-
- let expectedChangeStreamDocsReturned = [0, 0];
- for (const [op, id, s] of expectedOps) {
- const shardId = s == 0 ? s0 : s == 1 ? s1 : s;
-
- assert.soon(() => cursor.hasNext());
- const event = cursor.next();
-
- assert.eq(event.operationType, op, event);
- assert.eq(event.documentKey._id, id, event);
- assert.eq(event.documentKey.shard, shardId, event);
- if (shardId == 0 || shardId == 1) {
- ++expectedChangeStreamDocsReturned[shardId];
- }
+const s0 = 0;
+const s1 = 1;
+
+// Returns a newly created sharded collection, where shard key is 'shard'.
+const coll = createShardedCollection(st, "shard" /* shardKey */, dbName, collName, 1 /* splitAt */);
+
+// Open a change stream and store the resume token. This resume token will be used to replay the
+// stream after this point.
+const resumeAfterToken = coll.watch([]).getResumeToken();
+
+// A helper that opens a change stream with the user supplied match expression 'userMatchExpr'
+// and validates that: (1) for each shard, the events are seen in that order as specified in
+// 'expectedOps'; and (2) each shard returns the expected number of events; and (3) the number
+// of docs returned by the oplog cursor on each shard matches what we expect
+// as specified in 'expectedOplogCursorReturnedDocs'.
+const verifyOps = function(userMatchExpr, expectedOps, expectedOplogCursorReturnedDocs) {
+ const cursor =
+ coll.aggregate([{$changeStream: {resumeAfter: resumeAfterToken}}, userMatchExpr]);
+
+ let expectedChangeStreamDocsReturned = [0, 0];
+ for (const [op, id, s] of expectedOps) {
+ const shardId = s == 0 ? s0 : s == 1 ? s1 : s;
+
+ assert.soon(() => cursor.hasNext());
+ const event = cursor.next();
+
+ assert.eq(event.operationType, op, event);
+ assert.eq(event.documentKey._id, id, event);
+ assert.eq(event.documentKey.shard, shardId, event);
+ if (shardId == 0 || shardId == 1) {
+ ++expectedChangeStreamDocsReturned[shardId];
}
-
- assert(!cursor.hasNext());
-
- // An 'executionStats' could only be captured for a non-invalidating stream.
- const stats = coll.explain("executionStats").aggregate([
- {$changeStream: {resumeAfter: resumeAfterToken}},
- userMatchExpr
- ]);
-
- assertNumChangeStreamDocsReturnedFromShard(
- stats, st.rs0.name, expectedChangeStreamDocsReturned[0]);
- assertNumChangeStreamDocsReturnedFromShard(
- stats, st.rs1.name, expectedChangeStreamDocsReturned[1]);
-
- assertNumMatchingOplogEventsForShard(
- stats, st.rs0.name, expectedOplogCursorReturnedDocs[s0]);
- assertNumMatchingOplogEventsForShard(
- stats, st.rs1.name, expectedOplogCursorReturnedDocs[s1]);
- };
-
- // These operations will create oplog events. The change stream will apply several filters
- // on these series of events and ensure that the '$match' expressions are rewritten
- // correctly.
- assert.commandWorked(coll.insert({_id: 2, shard: s0}));
- assert.commandWorked(coll.insert({_id: 2, shard: s1}));
- assert.commandWorked(coll.insert({_id: 3, shard: s0}));
- assert.commandWorked(coll.insert({_id: 3, shard: s1}));
-
- assert.commandWorked(coll.replaceOne({_id: 2, shard: s0},
- {_id: 2, shard: s0, z: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
- assert.commandWorked(coll.replaceOne({_id: 2, shard: s1},
- {_id: 2, shard: s1, z: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
- assert.commandWorked(coll.replaceOne({_id: 3, shard: s0},
- {_id: 3, shard: s0, 1: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
- assert.commandWorked(coll.replaceOne({_id: 3, shard: s1},
- {_id: 3, shard: s1, y: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
-
- assert.commandWorked(reverseShards ? coll.update({_id: 2, shard: s0}, {$unset: {z: 0}})
- : coll.update({_id: 2, shard: s0}, [{$unset: ["z"]}]));
- assert.commandWorked(reverseShards
- ? coll.update({_id: 2, shard: s1}, [{$set: {g: "c"}}, {$unset: "z"}])
- : coll.update({_id: 2, shard: s1}, {$set: {g: "c"}, $unset: {z: 0}}));
- assert.commandWorked(
- coll.update({_id: 3, shard: s0}, {$set: {f: "b", x: {j: 7}}, $unset: {"w.h": 0, 1: 0}}));
- assert.commandWorked(
- coll.update({_id: 3, shard: s1}, {$set: {"0": "d", x: {j: 7}}, $unset: {y: 0, "w.h": 0}}));
-
- assert.commandWorked(coll.deleteOne({_id: 2, shard: s0}));
- assert.commandWorked(coll.deleteOne({_id: 2, shard: s1}));
- assert.commandWorked(coll.deleteOne({_id: 3, shard: s0}));
- assert.commandWorked(coll.deleteOne({_id: 3, shard: s1}));
-
- // Ensure that the '$match' on the 'update' operation type with various predicates are rewritten
- // correctly.
- const op = "update";
-
- const v1UpdateDesc = {updatedFields: {}, removedFields: ["z"]};
- const v2UpdateDesc = {updatedFields: {}, removedFields: ["z"], truncatedArrays: []};
- const updateDesc = reverseShards ? v1UpdateDesc : v2UpdateDesc;
-
- // Test out a predicate on the full 'updateDescription' field.
- verifyOps({$match: {operationType: op, updateDescription: updateDesc}},
- [[op, 2, 0]],
- [2, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq:null predicate on the full 'updateDescription' field.
- verifyOps({$match: {operationType: op, updateDescription: {$eq: null}}},
- [],
- [0, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $exists predicate on the full 'updateDescription' field.
- verifyOps({$match: {operationType: op, updateDescription: {$exists: false}}},
- [],
- [0, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq:null predicate on 'updateDescription.updatedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields": {$eq: null}}},
- [],
- [0, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $exists predicate on 'updateDescription.updatedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields": {$exists: false}}},
- [],
- [0, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq predicate on 'updateDescription.updatedFields.f'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.f": "b"}},
- [[op, 3, 0]],
- [1, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $lte predicate on 'updateDescription.updatedFields.f'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.f": {$lte: "b"}}},
- [[op, 3, 0]],
- [1, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq predicate on 'updateDescription.updatedFields.g'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.g": "c"}},
- [[op, 2, 1]],
- [0, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $exists predicate on 'updateDescription.updatedFields.g'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.g": {$exists: true}}},
- [[op, 2, 1]],
- [0, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq predicate on 'updateDescription.updatedFields.x.j'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.x.j": 7}},
- [[op, 3, 0], [op, 3, 1]],
- [2, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq predicate on 'updateDescription.updatedFields.0'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.0": "d"}},
- [[op, 3, 1]],
- [0, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $eq:null predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$eq: null}}},
- [],
- [0, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $exists predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$exists: false}}},
- [],
- [0, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a non-dotted string $eq predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": "z"}},
- [[op, 2, 0], [op, 2, 1]],
- [1, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an array $eq predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": ["z"]}},
- [[op, 2, 0], [op, 2, 1]],
- [2, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a dotted string $eq predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": "w.h"}},
- [[op, 3, 0], [op, 3, 1]],
- [2, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a number-like string $eq predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": "1"}},
- [[op, 3, 0]],
- [1, 0] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a non-dotted string $eq predicate on 'updateDescription.removedFields.0'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields.0": "z"}},
- [[op, 2, 0], [op, 2, 1]],
- [2, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an $in predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$in: ["y", "z"]}}},
- [[op, 2, 0], [op, 2, 1], [op, 3, 1]],
- [1, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated predicate on the full 'updateDescription' field.
- verifyOps({$match: {operationType: op, updateDescription: {$not: {$eq: updateDesc}}}},
- [[op, 2, 1], [op, 3, 0], [op, 3, 1]],
- [2, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $eq predicate on 'updateDescription.updatedFields.f'.
- verifyOps(
- {$match: {operationType: op, "updateDescription.updatedFields.f": {$not: {$eq: "b"}}}},
- [[op, 2, 0], [op, 2, 1], [op, 3, 1]],
- [1, 2] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $exists predicate on 'updateDescription.updatedFields.g'.
- verifyOps(
- {$match: {operationType: op, "updateDescription.updatedFields.g": {$not: {$exists: true}}}},
- [[op, 2, 0], [op, 3, 0], [op, 3, 1]],
- [2, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out an {$eq:null} predicate on 'updateDescription.updatedFields.g'.
- verifyOps({$match: {operationType: op, "updateDescription.updatedFields.g": {$eq: null}}},
- [[op, 2, 0], [op, 3, 0], [op, 3, 1]],
- [2, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $eq predicate on 'updateDescription.removedFields'.
- verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$not: {$eq: "z"}}}},
- [[op, 3, 0], [op, 3, 1]],
- [1, 1] /* expectedOplogCursorReturnedDocs */);
-
- // Test out a negated $in predicate on 'updateDescription.removedFields'.
- verifyOps(
- {$match: {operationType: op, "updateDescription.removedFields": {$not: {$in: ["y", "z"]}}}},
- [[op, 3, 0]],
- [1, 0] /* expectedOplogCursorReturnedDocs */);
-}
+ }
+
+ assert(!cursor.hasNext());
+
+ // An 'executionStats' could only be captured for a non-invalidating stream.
+ const stats = coll.explain("executionStats")
+ .aggregate([{$changeStream: {resumeAfter: resumeAfterToken}}, userMatchExpr]);
+
+ assertNumChangeStreamDocsReturnedFromShard(
+ stats, st.rs0.name, expectedChangeStreamDocsReturned[0]);
+ assertNumChangeStreamDocsReturnedFromShard(
+ stats, st.rs1.name, expectedChangeStreamDocsReturned[1]);
+
+ assertNumMatchingOplogEventsForShard(stats, st.rs0.name, expectedOplogCursorReturnedDocs[s0]);
+ assertNumMatchingOplogEventsForShard(stats, st.rs1.name, expectedOplogCursorReturnedDocs[s1]);
+};
+
+// These operations will create oplog events. The change stream will apply several filters
+// on these series of events and ensure that the '$match' expressions are rewritten
+// correctly.
+assert.commandWorked(coll.insert({_id: 2, shard: s0}));
+assert.commandWorked(coll.insert({_id: 2, shard: s1}));
+assert.commandWorked(coll.insert({_id: 3, shard: s0}));
+assert.commandWorked(coll.insert({_id: 3, shard: s1}));
+
+assert.commandWorked(
+ coll.replaceOne({_id: 2, shard: s0}, {_id: 2, shard: s0, z: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
+assert.commandWorked(
+ coll.replaceOne({_id: 2, shard: s1}, {_id: 2, shard: s1, z: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
+assert.commandWorked(
+ coll.replaceOne({_id: 3, shard: s0}, {_id: 3, shard: s0, 1: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
+assert.commandWorked(
+ coll.replaceOne({_id: 3, shard: s1}, {_id: 3, shard: s1, y: 4, f: "a", w: {h: 5, k: 5, l: 5}}));
+
+assert.commandWorked(coll.update({_id: 2, shard: s0}, {$unset: {z: 0}}));
+assert.commandWorked(coll.update({_id: 2, shard: s1}, [{$set: {g: "c"}}, {$unset: "z"}]));
+
+assert.commandWorked(
+ coll.update({_id: 3, shard: s0}, {$set: {f: "b", x: {j: 7}}, $unset: {"w.h": 0, 1: 0}}));
+assert.commandWorked(
+ coll.update({_id: 3, shard: s1}, {$set: {"0": "d", x: {j: 7}}, $unset: {y: 0, "w.h": 0}}));
+
+assert.commandWorked(coll.deleteOne({_id: 2, shard: s0}));
+assert.commandWorked(coll.deleteOne({_id: 2, shard: s1}));
+assert.commandWorked(coll.deleteOne({_id: 3, shard: s0}));
+assert.commandWorked(coll.deleteOne({_id: 3, shard: s1}));
+
+// Ensure that the '$match' on the 'update' operation type with various predicates are rewritten
+// correctly.
+const op = "update";
+const updateDesc = {
+ updatedFields: {},
+ removedFields: ["z"],
+ truncatedArrays: []
+};
+
+// Test out a predicate on the full 'updateDescription' field.
+verifyOps({$match: {operationType: op, updateDescription: updateDesc}},
+ [[op, 2, 0]],
+ [2, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq:null predicate on the full 'updateDescription' field.
+verifyOps({$match: {operationType: op, updateDescription: {$eq: null}}},
+ [],
+ [0, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $exists predicate on the full 'updateDescription' field.
+verifyOps({$match: {operationType: op, updateDescription: {$exists: false}}},
+ [],
+ [0, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq:null predicate on 'updateDescription.updatedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields": {$eq: null}}},
+ [],
+ [0, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $exists predicate on 'updateDescription.updatedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields": {$exists: false}}},
+ [],
+ [0, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq predicate on 'updateDescription.updatedFields.f'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.f": "b"}},
+ [[op, 3, 0]],
+ [1, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $lte predicate on 'updateDescription.updatedFields.f'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.f": {$lte: "b"}}},
+ [[op, 3, 0]],
+ [1, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq predicate on 'updateDescription.updatedFields.g'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.g": "c"}},
+ [[op, 2, 1]],
+ [0, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $exists predicate on 'updateDescription.updatedFields.g'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.g": {$exists: true}}},
+ [[op, 2, 1]],
+ [0, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq predicate on 'updateDescription.updatedFields.x.j'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.x.j": 7}},
+ [[op, 3, 0], [op, 3, 1]],
+ [2, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq predicate on 'updateDescription.updatedFields.0'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.0": "d"}},
+ [[op, 3, 1]],
+ [0, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $eq:null predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$eq: null}}},
+ [],
+ [0, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $exists predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$exists: false}}},
+ [],
+ [0, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a non-dotted string $eq predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": "z"}},
+ [[op, 2, 0], [op, 2, 1]],
+ [1, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an array $eq predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": ["z"]}},
+ [[op, 2, 0], [op, 2, 1]],
+ [2, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a dotted string $eq predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": "w.h"}},
+ [[op, 3, 0], [op, 3, 1]],
+ [2, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a number-like string $eq predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": "1"}},
+ [[op, 3, 0]],
+ [1, 0] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a non-dotted string $eq predicate on 'updateDescription.removedFields.0'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields.0": "z"}},
+ [[op, 2, 0], [op, 2, 1]],
+ [2, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an $in predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$in: ["y", "z"]}}},
+ [[op, 2, 0], [op, 2, 1], [op, 3, 1]],
+ [1, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated predicate on the full 'updateDescription' field.
+verifyOps({$match: {operationType: op, updateDescription: {$not: {$eq: updateDesc}}}},
+ [[op, 2, 1], [op, 3, 0], [op, 3, 1]],
+ [2, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $eq predicate on 'updateDescription.updatedFields.f'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.f": {$not: {$eq: "b"}}}},
+ [[op, 2, 0], [op, 2, 1], [op, 3, 1]],
+ [1, 2] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $exists predicate on 'updateDescription.updatedFields.g'.
+verifyOps(
+ {$match: {operationType: op, "updateDescription.updatedFields.g": {$not: {$exists: true}}}},
+ [[op, 2, 0], [op, 3, 0], [op, 3, 1]],
+ [2, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out an {$eq:null} predicate on 'updateDescription.updatedFields.g'.
+verifyOps({$match: {operationType: op, "updateDescription.updatedFields.g": {$eq: null}}},
+ [[op, 2, 0], [op, 3, 0], [op, 3, 1]],
+ [2, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $eq predicate on 'updateDescription.removedFields'.
+verifyOps({$match: {operationType: op, "updateDescription.removedFields": {$not: {$eq: "z"}}}},
+ [[op, 3, 0], [op, 3, 1]],
+ [1, 1] /* expectedOplogCursorReturnedDocs */);
+
+// Test out a negated $in predicate on 'updateDescription.removedFields'.
+verifyOps(
+ {$match: {operationType: op, "updateDescription.removedFields": {$not: {$in: ["y", "z"]}}}},
+ [[op, 3, 0]],
+ [1, 0] /* expectedOplogCursorReturnedDocs */);
st.stop();
})();
diff --git a/jstests/change_streams/pipeline_style_updates.js b/jstests/change_streams/pipeline_style_updates.js
index 45aa3374cf1..4b9f56842df 100644
--- a/jstests/change_streams/pipeline_style_updates.js
+++ b/jstests/change_streams/pipeline_style_updates.js
@@ -1,6 +1,5 @@
/**
- * Test the change events generated by pipeline-based updates are expected with delta format oplog
- * enabled and disabled.
+ * Test the change events generated by pipeline-based updates are expected with delta format oplog.
*
* @tags: [
* ]
@@ -11,114 +10,66 @@
load("jstests/libs/change_stream_util.js"); // For ChangeStreamTest
load("jstests/libs/collection_drop_recreate.js"); // For assert[Drop|Create]Collection.
-load("jstests/libs/discover_topology.js"); // For findNonConfigNodes.
-load("jstests/noPassthrough/libs/server_parameter_helpers.js"); // For setParameterOnAllHosts.
-const v2OplogEntriesServerParameter = "internalQueryEnableLoggingV2OplogEntries";
-const defaultOpLogMode = getParameter(db.getMongo(), v2OplogEntriesServerParameter);
-try {
- jsTestLog("Testing when $v:2 oplog entry is enabled.");
- setParameterOnAllHosts(
- DiscoverTopology.findNonConfigNodes(db.getMongo()), v2OplogEntriesServerParameter, true);
-
- assertDropAndRecreateCollection(db, "t1");
-
- const kLargeStr = '*'.repeat(512);
-
- assert.commandWorked(db.t1.insert({
- _id: 100,
- "a": 1,
- "b": 2,
- "obj": {"a": 1, "b": 2, "str": kLargeStr},
- }));
-
- const cst = new ChangeStreamTest(db);
- const changeStreamCursor =
- cst.startWatchingChanges({pipeline: [{$changeStream: {}}], collection: db.t1});
-
- function testPipelineStyleUpdate(pipeline, expectedChange, operationType) {
- assert.commandWorked(db.t1.update({_id: 100}, pipeline));
- const expected = Object.assign({
- documentKey: {_id: 100},
- ns: {db: "test", coll: "t1"},
- operationType: operationType,
- },
- expectedChange);
- cst.assertNextChangesEqual({cursor: changeStreamCursor, expectedChanges: [expected]});
- }
-
- jsTestLog("Testing pipeline-based update with $set.");
- let updatePipeline = [{$set: {a: 2}}];
- let expected = {
- updateDescription: {
- updatedFields: {"a": 2},
- removedFields: [],
- truncatedArrays: [],
- },
- };
- testPipelineStyleUpdate(updatePipeline, expected, "update");
-
- jsTestLog("Testing pipeline-based update with $unset.");
- updatePipeline = [{$unset: ["a"]}];
- expected = {
- updateDescription: {
- updatedFields: {},
- removedFields: ["a"],
- truncatedArrays: [],
- },
- };
- testPipelineStyleUpdate(updatePipeline, expected, "update");
-
- jsTestLog("Testing pipeline-based update with $replaceRoot.");
- updatePipeline =
- [{$replaceRoot: {newRoot: {_id: 100, b: 2, "obj": {"a": 2, "b": 2, "str": kLargeStr}}}}];
- expected = {
- updateDescription: {
- updatedFields: {"obj.a": 2},
- removedFields: [],
- truncatedArrays: [],
- },
- };
- testPipelineStyleUpdate(updatePipeline, expected, "update");
-
- jsTestLog("Testing when $v:2 oplog entry is disabled.");
- setParameterOnAllHosts(
- DiscoverTopology.findNonConfigNodes(db.getMongo()), v2OplogEntriesServerParameter, false);
-
- jsTestLog("Testing pipeline-based update with $set.");
- updatePipeline = [{$set: {a: 2}}];
- expected = {
- fullDocument: {
- _id: 100,
- "a": 2,
- "b": 2,
- "obj": {"a": 2, "b": 2, "str": kLargeStr},
- },
- };
- testPipelineStyleUpdate(updatePipeline, expected, "replace");
-
- jsTestLog("Testing pipeline-based update with $unset.");
- updatePipeline = [{$unset: ["a"]}];
- delete expected.fullDocument.a;
- testPipelineStyleUpdate(updatePipeline, expected, "replace");
-
- jsTestLog("Testing pipeline-based update with $replaceRoot.");
- updatePipeline = [{$replaceRoot: {newRoot: {_id: 100, "a": 1, "b": 2}}}];
- expected = {
- fullDocument: {
- _id: 100,
- "a": 1,
- "b": 2,
- },
- };
- testPipelineStyleUpdate(updatePipeline, expected, "replace");
-
- cst.cleanUp();
-} finally {
- // Reset the server parameter to the original value, so that other tests running in the same
- // suite will not be impacted.
- setParameterOnAllHosts(DiscoverTopology.findNonConfigNodes(db.getMongo()),
- v2OplogEntriesServerParameter,
- defaultOpLogMode);
+assertDropAndRecreateCollection(db, "t1");
+
+const kLargeStr = '*'.repeat(512);
+
+assert.commandWorked(db.t1.insert({
+ _id: 100,
+ "a": 1,
+ "b": 2,
+ "obj": {"a": 1, "b": 2, "str": kLargeStr},
+}));
+
+const cst = new ChangeStreamTest(db);
+const changeStreamCursor =
+ cst.startWatchingChanges({pipeline: [{$changeStream: {}}], collection: db.t1});
+
+function testPipelineStyleUpdate(pipeline, expectedChange, operationType) {
+ assert.commandWorked(db.t1.update({_id: 100}, pipeline));
+ const expected = Object.assign({
+ documentKey: {_id: 100},
+ ns: {db: "test", coll: "t1"},
+ operationType: operationType,
+ },
+ expectedChange);
+ cst.assertNextChangesEqual({cursor: changeStreamCursor, expectedChanges: [expected]});
}
-}());
+
+jsTestLog("Testing pipeline-based update with $set.");
+let updatePipeline = [{$set: {a: 2}}];
+let expected = {
+ updateDescription: {
+ updatedFields: {"a": 2},
+ removedFields: [],
+ truncatedArrays: [],
+ },
+};
+testPipelineStyleUpdate(updatePipeline, expected, "update");
+
+jsTestLog("Testing pipeline-based update with $unset.");
+updatePipeline = [{$unset: ["a"]}];
+expected = {
+ updateDescription: {
+ updatedFields: {},
+ removedFields: ["a"],
+ truncatedArrays: [],
+ },
+};
+testPipelineStyleUpdate(updatePipeline, expected, "update");
+
+jsTestLog("Testing pipeline-based update with $replaceRoot.");
+updatePipeline =
+ [{$replaceRoot: {newRoot: {_id: 100, b: 2, "obj": {"a": 2, "b": 2, "str": kLargeStr}}}}];
+expected = {
+ updateDescription: {
+ updatedFields: {"obj.a": 2},
+ removedFields: [],
+ truncatedArrays: [],
+ },
+};
+testPipelineStyleUpdate(updatePipeline, expected, "update");
+
+cst.cleanUp();
+}()); \ No newline at end of file
diff --git a/jstests/change_streams/show_raw_update_description.js b/jstests/change_streams/show_raw_update_description.js
index 67e0fbbe121..af372dc6928 100644
--- a/jstests/change_streams/show_raw_update_description.js
+++ b/jstests/change_streams/show_raw_update_description.js
@@ -27,13 +27,6 @@ if (!isFeatureEnabled) {
return;
}
-const oplogV2FlagName = "internalQueryEnableLoggingV2OplogEntries";
-const oplogV2Enabled =
- assert.commandWorked(db.adminCommand({getParameter: 1, [oplogV2FlagName]: 1}))[oplogV2FlagName];
-if (!oplogV2Enabled) {
- return;
-}
-
// Drop and recreate the collections to be used in this set of tests.
assertDropAndRecreateCollection(db, "t1");
assertDropAndRecreateCollection(db, "t2");
diff --git a/jstests/change_streams/show_raw_update_description_v1_oplog.js b/jstests/change_streams/show_raw_update_description_v1_oplog.js
deleted file mode 100644
index bb08902d264..00000000000
--- a/jstests/change_streams/show_raw_update_description_v1_oplog.js
+++ /dev/null
@@ -1,207 +0,0 @@
-/**
- * Tests that change streams with the 'showRawUpdateDescription' option enabled will return update
- * events with the 'rawUpdateDescription' field instead of the 'updateDescription' field, and tests
- * that the 'showRawUpdateDescription' option has no effect on replacements or other types of
- * events.
- *
- * @tags: [
- * requires_fcv_60,
- * ]
- */
-(function() {
-"use strict";
-
-load("jstests/aggregation/extras/utils.js"); // For arrayEq.
-load("jstests/libs/change_stream_util.js"); // For ChangeStreamTest.
-load("jstests/libs/collection_drop_recreate.js"); // For assertDropAndRecreateCollection.
-
-const isFeatureEnabled =
- assert.commandWorked(db.adminCommand({getParameter: 1, featureFlagChangeStreamsVisibility: 1}))
- .featureFlagChangeStreamsVisibility.value;
-if (!isFeatureEnabled) {
- assert.commandFailedWithCode(db.runCommand({
- aggregate: 1,
- pipeline: [{$changeStream: {showRawUpdateDescription: true}}],
- cursor: {},
- }),
- 6189400);
- return;
-}
-
-const oplogV2FlagName = "internalQueryEnableLoggingV2OplogEntries";
-const oplogV2Enabled =
- assert.commandWorked(db.adminCommand({getParameter: 1, [oplogV2FlagName]: 1}))[oplogV2FlagName];
-if (oplogV2Enabled) {
- return;
-}
-
-// Drop and recreate the collections to be used in this set of tests.
-assertDropAndRecreateCollection(db, "t1");
-assertDropAndRecreateCollection(db, "t1Copy");
-
-assert.commandWorked(db.t1.insert([
- {_id: 3, a: 5, b: 1},
- {_id: 4, a: 0, b: 1},
- {_id: 5, a: 0, b: 1},
- {_id: 6, a: 1, b: 1},
- {_id: 7, a: 1, b: 1},
- {_id: 8, a: 2, b: {c: 1}}
-]));
-
-const cst = new ChangeStreamTest(db);
-let cursor = cst.startWatchingChanges(
- {pipeline: [{$changeStream: {showRawUpdateDescription: true}}], collection: db.t1});
-
-//
-// Test insert, replace, and delete operations and verify the corresponding change stream events
-// are unaffected by the 'showRawUpdateDescription' option.
-//
-jsTestLog("Testing insert");
-assert.commandWorked(db.t1.insert({_id: 1, a: 1}));
-let expected = {
- documentKey: {_id: 1},
- fullDocument: {_id: 1, a: 1},
- ns: {db: "test", coll: "t1"},
- operationType: "insert",
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-
-jsTestLog("Testing upsert");
-assert.commandWorked(db.t1.update({_id: 2}, {_id: 2, a: 4}, {upsert: true}));
-expected = {
- documentKey: {_id: 2},
- fullDocument: {_id: 2, a: 4},
- ns: {db: "test", coll: "t1"},
- operationType: "insert",
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-
-jsTestLog("Testing replacement");
-assert.commandWorked(db.t1.update({_id: 1}, {_id: 1, a: 3}));
-expected = {
- documentKey: {_id: 1},
- fullDocument: {_id: 1, a: 3},
- ns: {db: "test", coll: "t1"},
- operationType: "replace",
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-
-jsTestLog("Testing another replacement");
-assert.commandWorked(db.t1.update({_id: 1}, {_id: 1, b: 3}));
-expected = {
- documentKey: {_id: 1},
- fullDocument: {_id: 1, b: 3},
- ns: {db: "test", coll: "t1"},
- operationType: "replace",
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-
-jsTestLog("Testing delete");
-assert.commandWorked(db.t1.remove({_id: 1}));
-expected = {
- documentKey: {_id: 1},
- ns: {db: "test", coll: "t1"},
- operationType: "delete",
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-
-jsTestLog("Testing delete with justOne:false");
-assert.commandWorked(db.t1.remove({a: 1}, {justOne: false}));
-expected = [
- {
- documentKey: {_id: 6},
- ns: {db: "test", coll: "t1"},
- operationType: "delete",
- },
- {
- documentKey: {_id: 7},
- ns: {db: "test", coll: "t1"},
- operationType: "delete",
- }
-];
-cst.assertNextChangesEqualUnordered({cursor: cursor, expectedChanges: expected});
-
-//
-// The remainder of the test-cases below exercise various update scenarios that produce
-// 'rawUpdateDescription'.
-//
-
-function assertCollectionsAreIdentical(coll1, coll2) {
- const values1 = coll1.find().toArray();
- const values2 = coll2.find().toArray();
- assert(arrayEq(values1, values2),
- () => "actual: " + tojson(values1) + " expected: " + tojson(values2));
-}
-
-function assertCanApplyRawUpdate(origColl, copyColl, events) {
- if (!Array.isArray(events)) {
- events = [events];
- }
- for (let event of events) {
- assert.commandWorked(copyColl.update(
- event.documentKey,
- [{$_internalApplyOplogUpdate: {oplogUpdate: event.rawUpdateDescription}}]));
- }
- assertCollectionsAreIdentical(origColl, copyColl);
-}
-
-assert.commandWorked(db.t1Copy.insert(db.t1.find().toArray()));
-assertCollectionsAreIdentical(db.t1, db.t1Copy);
-
-//
-// Test op-style updates.
-//
-jsTestLog("Testing op-style update with $inc");
-assert.commandWorked(db.t1.update({_id: 3}, {$inc: {b: 2}}));
-expected = {
- documentKey: {_id: 3},
- ns: {db: "test", coll: "t1"},
- operationType: "update",
- rawUpdateDescription: {"$v": NumberInt(1), "$set": {b: 3}}
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-assertCanApplyRawUpdate(db.t1, db.t1Copy, expected);
-
-jsTestLog("Testing op-style update with $set and multi:true");
-assert.commandWorked(db.t1.update({a: 0}, {$set: {b: 2}}, {multi: true}));
-expected = [
- {
- documentKey: {_id: 4},
- ns: {db: "test", coll: "t1"},
- operationType: "update",
- rawUpdateDescription: {"$v": NumberInt(1), "$set": {b: 2}}
- },
- {
- documentKey: {_id: 5},
- ns: {db: "test", coll: "t1"},
- operationType: "update",
- rawUpdateDescription: {"$v": NumberInt(1), "$set": {b: 2}}
- }
-];
-cst.assertNextChangesEqualUnordered({cursor: cursor, expectedChanges: expected});
-assertCanApplyRawUpdate(db.t1, db.t1Copy, expected);
-
-jsTestLog("Testing op-style update with $unset");
-assert.commandWorked(db.t1.update({_id: 3}, {$unset: {b: ""}}));
-expected = {
- documentKey: {_id: 3},
- ns: {db: "test", coll: "t1"},
- operationType: "update",
- rawUpdateDescription: {"$v": NumberInt(1), "$unset": {b: true}}
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-assertCanApplyRawUpdate(db.t1, db.t1Copy, expected);
-
-jsTestLog("Testing op-style update with $set on nested field");
-assert.commandWorked(db.t1.update({_id: 8}, {$set: {"b.d": 2}}));
-expected = {
- documentKey: {_id: 8},
- ns: {db: "test", coll: "t1"},
- operationType: "update",
- rawUpdateDescription: {"$v": NumberInt(1), "$set": {"b.d": 2}}
-};
-cst.assertNextChangesEqual({cursor: cursor, expectedChanges: [expected]});
-assertCanApplyRawUpdate(db.t1, db.t1Copy, expected);
-
-cst.cleanUp();
-}());
diff --git a/jstests/core/apply_ops1.js b/jstests/core/apply_ops1.js
index 444b416e903..dde09b5bbcc 100644
--- a/jstests/core/apply_ops1.js
+++ b/jstests/core/apply_ops1.js
@@ -245,7 +245,8 @@ function testCrudOperationOnNonExistentNamespace(optype, o, o2, expectedErrorCod
// Insert and update operations on non-existent collections/databases should return
// NamespaceNotFound.
testCrudOperationOnNonExistentNamespace('i', {_id: 0}, {}, ErrorCodes.NamespaceNotFound);
-testCrudOperationOnNonExistentNamespace('u', {x: 0}, {_id: 0}, ErrorCodes.NamespaceNotFound);
+testCrudOperationOnNonExistentNamespace(
+ 'u', {$v: 2, diff: {x: 0}}, {_id: 0}, ErrorCodes.NamespaceNotFound);
// TODO(SERVER-46221): These oplog entries are inserted as given. After SERVER-21700 and with
// steady-state oplog constraint enforcement on, they will result in secondary crashes. We
@@ -280,8 +281,8 @@ assert.commandFailed(db.adminCommand({applyOps: [{op: 'i', ns: t.getFullName(),
var res = assert.commandWorked(db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 18}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 19}}}
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 18}}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 19}}}}
]
}));
@@ -296,8 +297,8 @@ assert.eq(true, res.results[1], "Bad result value for valid update");
// preCondition fully matches
res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 20}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 21}}}
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 20}}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 21}}}}
],
preCondition: [{ns: t.getFullName(), q: {_id: 5}, res: {x: 19}}]
});
@@ -306,7 +307,7 @@ res = db.runCommand({
// with {allowAtomic: false}.
assert.commandFailedWithCode(
db.runCommand({
- applyOps: [{op: 'u', ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 22}}}],
+ applyOps: [{op: 'u', ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 22}}}}],
preCondition: [{ns: t.getFullName(), q: {_id: 5}, res: {x: 21}}],
allowAtomic: false,
}),
@@ -333,8 +334,8 @@ assert.eq(true, res.results[1], "Bad result value for valid update");
// preCondition doesn't match ns
res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 22}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 23}}}
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 22}}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 23}}}}
],
preCondition: [{ns: "foo.otherName", q: {_id: 5}, res: {x: 21}}]
});
@@ -344,8 +345,8 @@ assert.eq(o, t.findOne(), "preCondition didn't match, but ops were still applied
// preCondition doesn't match query
res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 22}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 23}}}
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 22}}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 23}}}}
],
preCondition: [{ns: t.getFullName(), q: {_id: 5}, res: {x: 19}}]
});
@@ -354,8 +355,8 @@ assert.eq(o, t.findOne(), "preCondition didn't match, but ops were still applied
res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$set: {x: 22}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 6}, o: {$set: {x: 23}}}
+ {op: "u", ns: t.getFullName(), o2: {_id: 5}, o: {$v: 2, diff: {u: {x: 22}}}},
+ {op: "u", ns: t.getFullName(), o2: {_id: 6}, o: {$v: 2, diff: {u: {x: 23}}}}
]
});
@@ -381,7 +382,7 @@ res = db.runCommand({
op: "u",
ns: t.getFullName(),
o2: {_id: 8},
- o: {$set: {x: 25}},
+ o: {$v: 2, diff: {u: {x: 25}}},
lsid: lsid,
txnNumber: NumberLong(1),
stmtId: NumberInt(1)
@@ -416,7 +417,7 @@ res = db.runCommand({
op: "u",
ns: t.getFullName(),
o2: {_id: 8},
- o: {$set: {x: 25}},
+ o: {$v: 2, diff: {u: {x: 25}}},
lsid: lsid,
txnNumber: NumberLong(3),
stmtId: [NumberInt(2), NumberInt(3)]
@@ -436,15 +437,14 @@ assert.eq(true, res.results[0], "Valid insert with multiple statement IDs failed
assert.eq(true, res.results[1], "Valid update with multiple statement IDs failed");
assert.eq(true, res.results[2], "Valid delete with multiple statement IDs failed");
-// When applying a "u" (update) op, we default to 'UpdateNode' update semantics, and $set
-// operations add new fields in lexicographic order.
+// When applying a "u" (update) op in the $v: 2 format, the
res = assert.commandWorked(db.adminCommand({
applyOps: [
{"op": "i", "ns": t.getFullName(), "o": {_id: 9}},
- {"op": "u", "ns": t.getFullName(), "o2": {_id: 9}, "o": {$set: {z: 1, a: 2}}}
+ {"op": "u", "ns": t.getFullName(), "o2": {_id: 9}, "o": {$v: 2, diff: {u: {z: 1, a: 2}}}},
]
}));
-assert.eq(t.findOne({_id: 9}), {_id: 9, a: 2, z: 1}); // Note: 'a' and 'z' have been sorted.
+assert.eq(t.findOne({_id: 9}), {_id: 9, z: 1, a: 2}); // Note: 'a' and 'z' have been sorted.
// 'ModifierInterface' semantics are not supported, so an update with {$v: 0} should fail.
res = assert.commandFailed(db.adminCommand({
@@ -460,9 +460,8 @@ res = assert.commandFailed(db.adminCommand({
}));
assert.eq(res.code, 4772600);
-// When we explicitly specify {$v: 1}, we should get 'UpdateNode' update semantics, and $set
-// operations get performed in lexicographic order.
-res = assert.commandWorked(db.adminCommand({
+// When we explicitly specify {$v: 1} it should fail because this version is no longer supported.
+assert.commandFailedWithCode(db.adminCommand({
applyOps: [
{"op": "i", "ns": t.getFullName(), "o": {_id: 10}},
{
@@ -472,8 +471,8 @@ res = assert.commandWorked(db.adminCommand({
"o": {$v: NumberInt(1), $set: {z: 1, a: 2}}
}
]
-}));
-assert.eq(t.findOne({_id: 10}), {_id: 10, a: 2, z: 1}); // Note: 'a' and 'z' have been sorted.
+}),
+ 4772600);
// {$v: 2} entries encode diffs differently, and operations are applied in the order specified
// rather than in lexicographic order.
@@ -544,13 +543,13 @@ assert.commandWorked(db.adminCommand({
op: 'u',
ns: t.getFullName(),
o2: {_id: 13},
- o: {$set: {x: 'nested apply op update1'}}
+ o: {$v: 2, diff: {u: {x: 'nested apply op update1'}}},
},
{
op: 'u',
ns: t.getFullName(),
o2: {_id: 14},
- o: {$set: {x: 'nested apply op update2'}}
+ o: {$v: 2, diff: {u: {x: 'nested apply op update2'}}},
}
]
}
diff --git a/jstests/core/apply_ops2.js b/jstests/core/apply_ops2.js
index 379c1d0744e..486d60aac1d 100644
--- a/jstests/core/apply_ops2.js
+++ b/jstests/core/apply_ops2.js
@@ -25,8 +25,18 @@ print("Testing applyOps with alwaysUpsert = true");
var res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 1}, o: {$set: {x: "upsert=true existing"}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 2}, o: {$set: {x: "upsert=true non-existing"}}}
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 1},
+ o: {$v: 2, diff: {u: {x: "upsert=true existing"}}}
+ },
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 2},
+ o: {$v: 2, diff: {u: {x: "upsert=true non-existing"}}}
+ }
],
alwaysUpsert: true
});
@@ -40,8 +50,20 @@ print("Testing applyOps with alwaysUpsert = false");
res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 1}, o: {$set: {x: "upsert=false existing"}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 3}, o: {$set: {x: "upsert=false non-existing"}}}
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 1},
+ o: {$v: 2, diff: {u: {x: "upsert=false existing"}}}
+ },
+
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 3},
+ o: {$v: 2, diff: {u: {x: "upsert=false non-existing"}}}
+ }
+
],
alwaysUpsert: false
});
@@ -56,8 +78,20 @@ print("Testing applyOps with default alwaysUpsert");
res = db.runCommand({
applyOps: [
- {op: "u", ns: t.getFullName(), o2: {_id: 1}, o: {$set: {x: "upsert=default existing"}}},
- {op: "u", ns: t.getFullName(), o2: {_id: 4}, o: {$set: {x: "upsert=defaults non-existing"}}}
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 1},
+ o: {$v: 2, diff: {u: {x: "upsert=default existing"}}}
+ },
+
+ {
+ op: "u",
+ ns: t.getFullName(),
+ o2: {_id: 4},
+
+ o: {$v: 2, diff: {u: {x: "upsert=default non-existing"}}}
+ },
]
});
diff --git a/jstests/core/apply_ops_missing_field.js b/jstests/core/apply_ops_missing_field.js
index 7d397ef16d8..c392c473ec8 100644
--- a/jstests/core/apply_ops_missing_field.js
+++ b/jstests/core/apply_ops_missing_field.js
@@ -42,18 +42,30 @@ const originalDoc = {
coll.insert(originalDoc);
// Update field "a", which is null, but apply an operation intended for an array.
-applyOps([{ns: coll.getFullName(), op: "u", o2: {_id: 1}, o: {$set: {'a.0': 2}}}]);
+applyOps(
+ [{ns: coll.getFullName(), op: "u", o2: {_id: 1}, o: {$v: 2, diff: {sa: {a: true, u0: 1}}}}]);
assert.eq(originalDoc, coll.findOne());
// Same, but also set a new field 'b' to 1.
-applyOps([{ns: coll.getFullName(), op: "u", o2: {_id: 1}, o: {$set: {'a.0': 2, b: 1}}}]);
+applyOps([{
+ ns: coll.getFullName(),
+ op: "u",
+ o2: {_id: 1},
+ o: {$v: 2, diff: {u: {b: 1}, sa: {a: true, u0: 1}}}
+}]);
assert.eq({_id: 1, a: null, b: 1}, coll.findOne());
// An operation intended for a subdocument.
-applyOps([{ns: coll.getFullName(), op: "u", o2: {_id: 1}, o: {$set: {'a.field': 2}}}]);
+applyOps(
+ [{ns: coll.getFullName(), op: "u", o2: {_id: 1}, o: {$v: 2, diff: {sa: {u: {field: 2}}}}}]);
assert.eq({_id: 1, a: null, b: 1}, coll.findOne());
// Same, but set 'b' to 2.
-applyOps([{ns: coll.getFullName(), op: "u", o2: {_id: 1}, o: {$set: {'a.field': 2, b: 2}}}]);
+applyOps([{
+ ns: coll.getFullName(),
+ op: "u",
+ o2: {_id: 1},
+ o: {$v: 2, diff: {u: {b: 2}, sa: {u: {field: 2}}}}
+}]);
assert.eq({_id: 1, a: null, b: 2}, coll.findOne());
}());
diff --git a/jstests/core/collation.js b/jstests/core/collation.js
index fa6dddefe7e..0a1c87c5d65 100644
--- a/jstests/core/collation.js
+++ b/jstests/core/collation.js
@@ -1603,31 +1603,37 @@ if (!isMongos) {
// preCondition.q respects collection default collation.
assert.commandFailed(db.runCommand({
- applyOps: [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$set: {x: 6}}}],
+ applyOps:
+ [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$v: 2, diff: {u: {x: 6}}}}],
preCondition: [{ns: coll.getFullName(), q: {_id: "not foo"}, res: {str: "bar"}}]
}));
assert.eq(5, coll.findOne({_id: "foo"}).x);
assert.commandWorked(db.runCommand({
- applyOps: [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$set: {x: 6}}}],
+ applyOps:
+ [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$v: 2, diff: {u: {x: 6}}}}],
preCondition: [{ns: coll.getFullName(), q: {_id: "FOO"}, res: {str: "bar"}}]
}));
assert.eq(6, coll.findOne({_id: "foo"}).x);
// preCondition.res respects collection default collation.
assert.commandFailed(db.runCommand({
- applyOps: [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$set: {x: 7}}}],
+ applyOps:
+ [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$v: 2, diff: {u: {x: 7}}}}],
preCondition: [{ns: coll.getFullName(), q: {_id: "foo"}, res: {str: "not bar"}}]
}));
assert.eq(6, coll.findOne({_id: "foo"}).x);
assert.commandWorked(db.runCommand({
- applyOps: [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$set: {x: 7}}}],
+ applyOps:
+ [{op: "u", ns: coll.getFullName(), o2: {_id: "foo"}, o: {$v: 2, diff: {u: {x: 7}}}}],
preCondition: [{ns: coll.getFullName(), q: {_id: "foo"}, res: {str: "BAR"}}]
}));
assert.eq(7, coll.findOne({_id: "foo"}).x);
// <operation>.o2 respects collection default collation.
- assert.commandWorked(db.runCommand(
- {applyOps: [{op: "u", ns: coll.getFullName(), o2: {_id: "FOO"}, o: {$set: {x: 8}}}]}));
+ assert.commandWorked(db.runCommand({
+ applyOps:
+ [{op: "u", ns: coll.getFullName(), o2: {_id: "FOO"}, o: {$v: 2, diff: {u: {x: 8}}}}]
+ }));
assert.eq(8, coll.findOne({_id: "foo"}).x);
}
diff --git a/jstests/core/internal_apply_oplog_update.js b/jstests/core/internal_apply_oplog_update.js
index 0f306a08f20..66f24403145 100644
--- a/jstests/core/internal_apply_oplog_update.js
+++ b/jstests/core/internal_apply_oplog_update.js
@@ -42,8 +42,8 @@ let documents2 = [{
assert.commandWorked(db.t2.insert(documents2));
//
-// Test $_internalApplyOplogUpdate with v1 oplog and v2 oplog update descriptions. For each
-// update description, we execute $_internalApplyOplogUpdate twice to verify idempotency.
+// Test $_internalApplyOplogUpdate with v2 oplog update descriptions. For each update description,
+// we execute $_internalApplyOplogUpdate twice to verify idempotency.
//
function testUpdate(expected, coll, filter, oplogUpdate, opts = {}) {
@@ -62,26 +62,13 @@ documents1[1].b = 3;
testUpdate(documents1, db.t1, {_id: 3}, oplogUpdate);
oplogUpdate = {
- "$v": NumberInt(1),
- "$set": {b: 2}
+ "$v": NumberInt(2),
+ diff: {d: {b: false}}
};
-documents1[2].b = 2;
-documents1[3].b = 2;
-testUpdate(documents1, db.t1, {a: 0}, oplogUpdate, {multi: true});
-oplogUpdate = {
- "$unset": {b: true}
-};
delete documents1[1].b;
testUpdate(documents1, db.t1, {_id: 3}, oplogUpdate);
-oplogUpdate = {
- "$v": NumberInt(1),
- "$set": {"b.d": 2}
-};
-documents1[4].b.d = 2;
-testUpdate(documents1, db.t1, {_id: 8}, oplogUpdate);
-
// Test an update with upsert=true where no documents match the filter prior to the update.
oplogUpdate = {
"$v": NumberInt(2),
diff --git a/jstests/core/json_schema/misc_validation.js b/jstests/core/json_schema/misc_validation.js
index 58e76fc0d68..113eee4e897 100644
--- a/jstests/core/json_schema/misc_validation.js
+++ b/jstests/core/json_schema/misc_validation.js
@@ -308,7 +308,7 @@ if (!isMongos) {
// Test $jsonSchema in the precondition checking for applyOps.
res = testDB.adminCommand({
applyOps: [
- {op: "u", ns: coll.getFullName(), o2: {_id: 0}, o: {$set: {a: false}}},
+ {op: "u", ns: coll.getFullName(), o2: {_id: 0}, o: {$v: 2, diff: {u: {a: false}}}},
],
preCondition: [{
ns: coll.getFullName(),
diff --git a/jstests/core/txns/commands_not_allowed_in_txn.js b/jstests/core/txns/commands_not_allowed_in_txn.js
index 01486c5065d..0b6243a0f58 100644
--- a/jstests/core/txns/commands_not_allowed_in_txn.js
+++ b/jstests/core/txns/commands_not_allowed_in_txn.js
@@ -133,8 +133,16 @@ const commands = [
// There is no applyOps command on mongos.
if (!isMongos) {
- commands.push(
- {applyOps: [{op: "u", ns: testColl.getFullName(), o2: {_id: 0}, o: {$set: {a: 5}}}]});
+ commands.push({
+ applyOps: [{
+ op: "u",
+ ns: testColl.getFullName(),
+ o2: {_id: 0},
+ o:
+
+ {$v: 2, diff: {u: {a: 5}}}
+ }]
+ });
}
commands.forEach(testCommand);
diff --git a/jstests/libs/change_stream_util.js b/jstests/libs/change_stream_util.js
index e5a00bf92c9..be71d282d2c 100644
--- a/jstests/libs/change_stream_util.js
+++ b/jstests/libs/change_stream_util.js
@@ -125,22 +125,6 @@ function canonicalizeEventForTesting(event, expected) {
if (!expected.hasOwnProperty("updateDescription"))
delete event.updateDescription;
- // TODO SERVER-50301: The 'truncatedArrays' field may not appear in the updateDescription
- // depending on whether $v:2 update oplog entries are enabled. When the expected event has an
- // empty 'truncatedFields' we do not require that the actual event contain the field. This
- // logic can be removed when $v:2 update oplog entries are enabled on all configurations.
- if (
- // If the expected event has an empty 'truncatedArrays' field...
- expected.hasOwnProperty("updateDescription") &&
- Array.isArray(expected.updateDescription.truncatedArrays) &&
- expected.updateDescription.truncatedArrays.length == 0 &&
- // ...And the actual event has no truncated arrays field.
- event.hasOwnProperty("updateDescription") &&
- !event.updateDescription.hasOwnProperty("truncatedArrays")) {
- // Treat the actual event as if it had an empty 'truncatedArrays' field.
- event.updateDescription.truncatedArrays = [];
- }
-
return event;
}
diff --git a/jstests/noPassthrough/apply_ops_atomic.js b/jstests/noPassthrough/apply_ops_atomic.js
index 1d30fb016d9..9e62a1c74e3 100644
--- a/jstests/noPassthrough/apply_ops_atomic.js
+++ b/jstests/noPassthrough/apply_ops_atomic.js
@@ -112,9 +112,12 @@ function validateInsertApplyOpsInOplog(conn, ns, uuid, id) {
assert.eq(0, coll.find({"_id": idUnexisting}).itcount());
assert.eq(0, coll.find({"_id": idUpserted}).itcount());
const op = {
- applyOps: [
- {op: 'u', ns: coll.getFullName(), o2: {_id: idUnexisting}, o: {$set: {_id: idUpserted}}}
- ]
+ applyOps: [{
+ op: 'u',
+ ns: coll.getFullName(),
+ o2: {_id: idUnexisting},
+ o: {$v: 2, diff: {u: {_id: idUpserted}}}
+ }]
};
assert.commandWorked(db.runCommand(op));
assert.eq(0, coll.find({"_id": idUnexisting}).itcount());
@@ -137,7 +140,7 @@ function validateInsertApplyOpsInOplog(conn, ns, uuid, id) {
op: 'u',
ns: coll.getFullName(),
o2: {_id: 1},
- o: {$set: {a: "b"}},
+ o: {$v: 2, diff: {u: {a: "b"}}},
}]
}));
assert.eq(1, coll.find().itcount());
diff --git a/jstests/noPassthrough/apply_ops_mode.js b/jstests/noPassthrough/apply_ops_mode.js
index 9f069e2eca8..3bf4317de93 100644
--- a/jstests/noPassthrough/apply_ops_mode.js
+++ b/jstests/noPassthrough/apply_ops_mode.js
@@ -17,7 +17,7 @@ var coll = db.getCollection("apply_ops_mode1");
var id = ObjectId();
for (let updateOp of [
// An update with a modifier.
- {op: 'u', ns: coll.getFullName(), o: {$set: {x: 1}}, o2: {_id: id}},
+ {op: 'u', ns: coll.getFullName(), o: {$v: 2, diff: {u: {x: 1}}}, o2: {_id: id}},
// A full-document replace.
{op: 'u', ns: coll.getFullName(), o: {_id: id, x: 1}, o2: {_id: id}},
]) {
diff --git a/jstests/noPassthrough/oplog_writes_only_permitted_on_standalone.js b/jstests/noPassthrough/oplog_writes_only_permitted_on_standalone.js
index 23ca9da88f3..5081d3a3be1 100644
--- a/jstests/noPassthrough/oplog_writes_only_permitted_on_standalone.js
+++ b/jstests/noPassthrough/oplog_writes_only_permitted_on_standalone.js
@@ -29,7 +29,7 @@ function constructOplogEntry(oplog) {
const toInsertTS = Timestamp(highestTS.getTime(), highestTS.getInc() + 1);
return Object.extend(
testCollOplogEntry,
- {op: "u", ns: "test.coll", o: {$set: {a: 1}}, o2: {_id: 0}, ts: toInsertTS});
+ {op: "u", ns: "test.coll", o: {$v: 2, diff: {u: {a: 1}}}, o2: {_id: 0}, ts: toInsertTS});
}
let toInsert = constructOplogEntry(oplog);
diff --git a/jstests/noPassthrough/server_write_concern_metrics.js b/jstests/noPassthrough/server_write_concern_metrics.js
index 61b18ff1b1f..c0b646d7b0d 100644
--- a/jstests/noPassthrough/server_write_concern_metrics.js
+++ b/jstests/noPassthrough/server_write_concern_metrics.js
@@ -292,11 +292,13 @@ for (const isPSASet of [true, false]) {
// application, as testWriteConcernMetrics will run them multiple times.
testWriteConcernMetrics(
{applyOps: [{op: "i", ns: testColl.getFullName(), o: {_id: 0}}]}, "insert", 1, isPSASet);
- testWriteConcernMetrics(
- {applyOps: [{op: "u", ns: testColl.getFullName(), o2: {_id: 0}, o: {$set: {a: 1}}}]},
- "update",
- 1,
- isPSASet);
+ testWriteConcernMetrics({
+ applyOps:
+ [{op: "u", ns: testColl.getFullName(), o2: {_id: 0}, o: {$v: 2, diff: {u: {a: 1}}}}]
+ },
+ "update",
+ 1,
+ isPSASet);
testWriteConcernMetrics({applyOps: [{op: "d", ns: testColl.getFullName(), o: {_id: 0}}]},
"delete",
1,
diff --git a/jstests/noPassthrough/write_change_stream_pit_preimage_in_transaction.js b/jstests/noPassthrough/write_change_stream_pit_preimage_in_transaction.js
index ce69fe0a092..4000a318405 100644
--- a/jstests/noPassthrough/write_change_stream_pit_preimage_in_transaction.js
+++ b/jstests/noPassthrough/write_change_stream_pit_preimage_in_transaction.js
@@ -209,7 +209,7 @@ function getCollections(db) {
assertPreImagesWrittenForOps(testDB, function() {
assert.commandWorked(testDB.runCommand({
applyOps: [
- {op: "u", ns: coll.getFullName(), o2: {_id: 1}, o: {$set: {a: 2}}},
+ {op: "u", ns: coll.getFullName(), o2: {_id: 1}, o: {$v: 2, diff: {u: {a: 2}}}},
{op: "d", ns: coll.getFullName(), o: {_id: 2}}
],
allowAtomic: false,
@@ -226,7 +226,7 @@ function getCollections(db) {
assertPreImagesWrittenForOps(testDB, function() {
assert.commandWorked(testDB.runCommand({
applyOps: [
- {op: "u", ns: coll.getFullName(), o2: {_id: 1}, o: {$set: {a: 2}}},
+ {op: "u", ns: coll.getFullName(), o2: {_id: 1}, o: {$v: 2, diff: {u: {a: 2}}}},
{op: "d", ns: coll.getFullName(), o: {_id: 2}}
],
}));
diff --git a/jstests/replsets/apply_ops_inserts_do_not_include_fromMigrate_field.js b/jstests/replsets/apply_ops_inserts_do_not_include_fromMigrate_field.js
index 60ea0fbaeb0..2b7d8729b61 100644
--- a/jstests/replsets/apply_ops_inserts_do_not_include_fromMigrate_field.js
+++ b/jstests/replsets/apply_ops_inserts_do_not_include_fromMigrate_field.js
@@ -46,13 +46,13 @@ assert.commandWorked(primaryDB.runCommand({
// Test non-atomic applyOps upserts. These will be logged as insert oplog entries.
assert.commandWorked(primaryDB.runCommand({
- applyOps: [{op: "u", ns: nss(dbName, collName), o2: {_id: 2}, o: {$set: {x: 2}}}],
+ applyOps: [{op: "u", ns: nss(dbName, collName), o2: {_id: 2}, o: {$v: 2, diff: {u: {x: 2}}}}],
allowAtomic: false
}));
assert.commandWorked(primaryDB.runCommand({
applyOps: [
- {op: "u", ns: nss(dbName, collName), o2: {_id: 3}, o: {$set: {x: 3}}},
+ {op: "u", ns: nss(dbName, collName), o2: {_id: 3}, o: {$v: 2, diff: {u: {x: 3}}}},
{op: "c", ns: nss(dbName, "$cmd"), o: {create: "other2"}}
]
}));
diff --git a/jstests/replsets/change_stream_pit_pre_images.js b/jstests/replsets/change_stream_pit_pre_images.js
index 7c7c6667fb1..154ae1a6c04 100644
--- a/jstests/replsets/change_stream_pit_pre_images.js
+++ b/jstests/replsets/change_stream_pit_pre_images.js
@@ -116,7 +116,7 @@ for (const [collectionName, collectionOptions] of [
assert.commandWorked(coll.insert([{_id: 9, a: 1}, {_id: 10, a: 1}]));
assert.commandWorked(testDB.runCommand({
applyOps: [
- {op: "u", ns: coll.getFullName(), o2: {_id: 9}, o: {$set: {a: 2}}},
+ {op: "u", ns: coll.getFullName(), o2: {_id: 9}, o: {$v: 2, diff: {u: {a: 2}}}},
{op: "d", ns: coll.getFullName(), o: {_id: 10}}
],
allowAtomic: false,
diff --git a/jstests/replsets/initial_sync_update_missing_doc_upsert.js b/jstests/replsets/initial_sync_update_missing_doc_upsert.js
index d7637ac7ab7..0c5db031730 100644
--- a/jstests/replsets/initial_sync_update_missing_doc_upsert.js
+++ b/jstests/replsets/initial_sync_update_missing_doc_upsert.js
@@ -50,7 +50,9 @@ numDocuments++;
function applyOps({documentId, alwaysUpsert, allowAtomic}) {
let command = {
- applyOps: [{op: "u", ns: coll.getFullName(), o2: {_id: documentId}, o: {$set: {x: 1}}}]
+ applyOps: [
+ {op: "u", ns: coll.getFullName(), o2: {_id: documentId}, o: {$v: 2, diff: {u: {x: 1}}}}
+ ]
};
if (alwaysUpsert !== null) {
diff --git a/jstests/replsets/initial_sync_update_missing_field.js b/jstests/replsets/initial_sync_update_missing_field.js
index 9e7a9449d5f..fe8269b867d 100644
--- a/jstests/replsets/initial_sync_update_missing_field.js
+++ b/jstests/replsets/initial_sync_update_missing_field.js
@@ -54,20 +54,36 @@ assert.commandWorked(coll.updateOne({_id: 3}, {$set: {'array.0': 1, 'scalar': 1}
assert.commandWorked(coll.updateOne({_id: 2}, {$set: {'array.0': 1}}));
assert.commandWorked(primary.adminCommand({
- applyOps:
- [{op: 'u', ns: coll.getFullName(), o2: {_id: 5}, o: {$set: {'doc.field': 1, 'scalar': 1}}}]
+ applyOps: [{
+ op: 'u',
+ ns: coll.getFullName(),
+ o2: {_id: 5},
+ o: {$v: 2, diff: {u: {'scalar': 1}, sdoc: {u: {field: 1}}}}
+ }]
}));
-assert.commandWorked(primary.adminCommand(
- {applyOps: [{op: 'u', ns: coll.getFullName(), o2: {_id: 4}, o: {$set: {'doc.field': 1}}}]}));
-
assert.commandWorked(primary.adminCommand({
applyOps:
- [{op: 'u', ns: coll.getFullName(), o2: {_id: 7}, o: {$set: {'array.0': 1, 'scalar': 1}}}]
+ [{op: 'u', ns: coll.getFullName(), o2: {_id: 4}, o: {$v: 2, diff: {sdoc: {u: {field: 1}}}}}]
}));
-assert.commandWorked(primary.adminCommand(
- {applyOps: [{op: 'u', ns: coll.getFullName(), o2: {_id: 6}, o: {$set: {'array.0': 1}}}]}));
+assert.commandWorked(primary.adminCommand({
+ applyOps: [{
+ op: 'u',
+ ns: coll.getFullName(),
+ o2: {_id: 7},
+ o: {$v: 2, diff: {u: {'scalar': 1}, sarray: {a: true, u0: 1}}}
+ }]
+}));
+
+assert.commandWorked(primary.adminCommand({
+ applyOps: [{
+ op: 'u',
+ ns: coll.getFullName(),
+ o2: {_id: 6},
+ o: {$v: 2, diff: {sarray: {a: true, u0: 1}}}
+ }]
+}));
jsTestLog("Set array and subdoc fields to strings on primary");
diff --git a/jstests/replsets/oplog_format.js b/jstests/replsets/oplog_format.js
index d27f60ed2df..e37266fb9da 100644
--- a/jstests/replsets/oplog_format.js
+++ b/jstests/replsets/oplog_format.js
@@ -16,14 +16,6 @@ const primary = replTest.getPrimary();
const coll = primary.getDB("o").fake;
const cdb = coll.getDB();
-const v2FlagName = "internalQueryEnableLoggingV2OplogEntries";
-
-const getParamCommandRes =
- assert.commandWorked(cdb.adminCommand({getParameter: 1, [v2FlagName]: 1}));
-// The flag may be unrecognized (in which case $v:2 is not enabled), or we may be running in a
-// configuration where it is disabled.
-const v2Enabled = getParamCommandRes[v2FlagName];
-
function getLastOplogEntry() {
return primary.getDB("local").oplog.rs.find().limit(1).sort({$natural: -1}).next();
}
@@ -52,11 +44,7 @@ assertLastOplog({_id: 1, a: 2}, {_id: 1}, "save " + msg);
var res = assert.commandWorked(coll.update({}, {$inc: {a: 1}, $set: {b: 2}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: 3, b: 2}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({$v: 2, diff: {u: {a: 3}, i: {b: 2}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: 3, b: 2}}, {_id: 1}, msg);
-}
+assertLastOplog({$v: 2, diff: {u: {a: 3}, i: {b: 2}}}, {_id: 1}, msg);
var msg = "IncRewriteNonExistingField: $inc $set";
coll.save({_id: 1, c: 0});
@@ -64,11 +52,7 @@ assertLastOplog({_id: 1, c: 0}, {_id: 1}, "save " + msg);
res = assert.commandWorked(coll.update({}, {$inc: {a: 1}, $set: {b: 2}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, c: 0, a: 1, b: 2}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"i": {"a": 1, "b": 2}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: 1, b: 2}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"i": {"a": 1, "b": 2}}}, {_id: 1}, msg);
var msg = "TwoNestedPulls: two $pull";
coll.save({_id: 1, a: {b: [1, 2], c: [1, 2]}});
@@ -76,11 +60,7 @@ assertLastOplog({_id: 1, a: {b: [1, 2], c: [1, 2]}}, {_id: 1}, "save " + msg);
res = assert.commandWorked(coll.update({}, {$pull: {'a.b': 2, 'a.c': 2}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: {b: [1], c: [1]}}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [1], "c": [1]}}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {'a.b': [1], 'a.c': [1]}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [1], "c": [1]}}}}, {_id: 1}, msg);
var msg = "MultiSets: two $set";
coll.save({_id: 1, a: 1, b: 1});
@@ -88,11 +68,7 @@ assertLastOplog({_id: 1, a: 1, b: 1}, {_id: 1}, "save " + msg);
res = assert.commandWorked(coll.update({}, {$set: {a: 2, b: 2}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: 2, b: 2}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"u": {"a": 2, "b": 2}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: 2, b: 2}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"u": {"a": 2, "b": 2}}}, {_id: 1}, msg);
// More tests to validate the oplog format and correct excution
@@ -102,31 +78,19 @@ assertLastOplog({_id: 1, a: 1}, {_id: 1}, "save " + msg);
res = assert.commandWorked(coll.update({}, {$set: {a: 2}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: 2}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"u": {"a": 2}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: 2}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"u": {"a": 2}}}, {_id: 1}, msg);
var msg = "bad single $inc";
res = assert.commandWorked(coll.update({}, {$inc: {a: 1}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: 3}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"u": {"a": 3}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: 3}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"u": {"a": 3}}}, {_id: 1}, msg);
var msg = "bad double $set";
res = assert.commandWorked(coll.update({}, {$set: {a: 2, b: 2}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: 2, b: 2}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"u": {"a": 2}, "i": {"b": 2}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: 2, b: 2}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"u": {"a": 2}, "i": {"b": 2}}}, {_id: 1}, msg);
var msg = "bad save";
assert.commandWorked(coll.save({_id: 1, a: [2]}));
@@ -137,12 +101,7 @@ var msg = "bad array $inc";
res = assert.commandWorked(coll.update({}, {$inc: {"a.0": 1}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: [3]}, coll.findOne({}), msg);
-let lastTS;
-if (v2Enabled) {
- lastTS = assertLastOplog({"$v": 2, "diff": {"sa": {"a": true, "u0": 3}}}, {_id: 1}, msg);
-} else {
- lastTS = assertLastOplog({$v: 1, $set: {"a.0": 3}}, {_id: 1}, msg);
-}
+var lastTS = assertLastOplog({"$v": 2, "diff": {"sa": {"a": true, "u0": 3}}}, {_id: 1}, msg);
var msg = "bad $setOnInsert";
res = assert.commandWorked(coll.update({}, {$setOnInsert: {a: -1}}));
@@ -169,11 +128,7 @@ coll.save({_id: 1, a: "foo"});
res = assert.commandWorked(coll.update({}, {$push: {c: 18}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: "foo", c: [18]}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"i": {"c": [18]}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {"c": [18]}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"i": {"c": [18]}}}, {_id: 1}, msg);
var msg = "bad array $push $slice";
coll.save({_id: 1, a: {b: [18]}});
@@ -181,11 +136,7 @@ res = assert.commandWorked(
coll.update({_id: {$gt: 0}}, {$push: {"a.b": {$each: [1, 2], $slice: -2}}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: {b: [1, 2]}}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [1, 2]}}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {"a.b": [1, 2]}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [1, 2]}}}}, {_id: 1}, msg);
var msg = "bad array $push $sort ($slice -100)";
coll.save({_id: 1, a: {b: [{c: 2}, {c: 1}]}});
@@ -193,12 +144,8 @@ res = assert.commandWorked(
coll.update({}, {$push: {"a.b": {$each: [{c: -1}], $sort: {c: 1}, $slice: -100}}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: {b: [{c: -1}, {c: 1}, {c: 2}]}}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog(
- {"$v": 2, "diff": {"sa": {"u": {"b": [{"c": -1}, {"c": 1}, {"c": 2}]}}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {"a.b": [{c: -1}, {c: 1}, {c: 2}]}}, {_id: 1}, msg);
-}
+assertLastOplog(
+ {"$v": 2, "diff": {"sa": {"u": {"b": [{"c": -1}, {"c": 1}, {"c": 2}]}}}}, {_id: 1}, msg);
var msg = "bad array $push $slice $sort";
coll.save({_id: 1, a: [{b: 2}, {b: 1}]});
@@ -206,11 +153,7 @@ res = assert.commandWorked(
coll.update({_id: {$gt: 0}}, {$push: {a: {$each: [{b: -1}], $slice: -2, $sort: {b: 1}}}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: [{b: 1}, {b: 2}]}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"u": {"a": [{"b": 1}, {"b": 2}]}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {a: [{b: 1}, {b: 2}]}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"u": {"a": [{"b": 1}, {"b": 2}]}}}, {_id: 1}, msg);
var msg = "bad array $push $slice $sort first two";
coll.save({_id: 1, a: {b: [{c: 2}, {c: 1}]}});
@@ -218,11 +161,7 @@ res = assert.commandWorked(
coll.update({_id: {$gt: 0}}, {$push: {"a.b": {$each: [{c: -1}], $slice: -2, $sort: {c: 1}}}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: {b: [{c: 1}, {c: 2}]}}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [{"c": 1}, {"c": 2}]}}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {"a.b": [{c: 1}, {c: 2}]}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [{"c": 1}, {"c": 2}]}}}}, {_id: 1}, msg);
var msg = "bad array $push $slice $sort reversed first two";
coll.save({_id: 1, a: {b: [{c: 1}, {c: 2}]}});
@@ -230,11 +169,7 @@ res = assert.commandWorked(
coll.update({_id: {$gt: 0}}, {$push: {"a.b": {$each: [{c: -1}], $slice: -2, $sort: {c: -1}}}}));
assert.eq(res.nModified, 1, "update failed for '" + msg + "': " + res.toString());
assert.docEq({_id: 1, a: {b: [{c: 1}, {c: -1}]}}, coll.findOne({}), msg);
-if (v2Enabled) {
- assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [{"c": 1}, {"c": -1}]}}}}, {_id: 1}, msg);
-} else {
- assertLastOplog({$v: 1, $set: {"a.b": [{c: 1}, {c: -1}]}}, {_id: 1}, msg);
-}
+assertLastOplog({"$v": 2, "diff": {"sa": {"u": {"b": [{"c": 1}, {"c": -1}]}}}}, {_id: 1}, msg);
replTest.stopSet();
})();
diff --git a/src/mongo/db/commands/fle2_compact.cpp b/src/mongo/db/commands/fle2_compact.cpp
index 886d0bdfc9a..7a5361d60e7 100644
--- a/src/mongo/db/commands/fle2_compact.cpp
+++ b/src/mongo/db/commands/fle2_compact.cpp
@@ -160,7 +160,7 @@ void upsertNullDocument(FLEQueryInterface* queryImpl,
updateEntry.setUpsert(false);
updateEntry.setQ(newNullDoc.getField("_id").wrap());
updateEntry.setU(mongo::write_ops::UpdateModification(
- newNullDoc, write_ops::UpdateModification::ClassicTag(), true));
+ newNullDoc, write_ops::UpdateModification::ReplacementTag{}));
write_ops::UpdateCommandRequest updateRequest(nss, {std::move(updateEntry)});
auto [reply, originalDoc] =
queryImpl->updateWithPreimage(nss, EncryptionInformation(BSONObj()), updateRequest);
diff --git a/src/mongo/db/commands/write_commands.cpp b/src/mongo/db/commands/write_commands.cpp
index b360c7b1a2c..a2c3e378d38 100644
--- a/src/mongo/db/commands/write_commands.cpp
+++ b/src/mongo/db/commands/write_commands.cpp
@@ -212,7 +212,8 @@ write_ops::UpdateOpEntry makeTimeseriesUpdateOpEntry(
write_ops::UpdateModification::DiffOptions options;
options.mustCheckExistenceForInsertOperations =
static_cast<bool>(repl::tenantMigrationRecipientInfo(opCtx));
- write_ops::UpdateModification u(updateBuilder.obj(), options);
+ write_ops::UpdateModification u(
+ updateBuilder.obj(), write_ops::UpdateModification::DeltaTag{}, options);
write_ops::UpdateOpEntry update(BSON("_id" << batch->bucket().id), std::move(u));
invariant(!update.getMulti(), batch->bucket().id.toString());
invariant(!update.getUpsert(), batch->bucket().id.toString());
diff --git a/src/mongo/db/fle_crud.cpp b/src/mongo/db/fle_crud.cpp
index f1f9cf96f2d..761efbeb843 100644
--- a/src/mongo/db/fle_crud.cpp
+++ b/src/mongo/db/fle_crud.cpp
@@ -842,7 +842,7 @@ write_ops::UpdateCommandReply processUpdate(FLEQueryInterface* queryImpl,
auto pushUpdate = EDCServerCollection::finalizeForUpdate(updateModifier, serverPayload);
newUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- pushUpdate, write_ops::UpdateModification::ClassicTag(), false));
+ pushUpdate, write_ops::UpdateModification::ModifierUpdateTag{}));
} else {
auto replacementDocument = updateModification.getUpdateReplacement();
EDCServerCollection::validateEncryptedFieldInfo(
@@ -857,7 +857,7 @@ write_ops::UpdateCommandReply processUpdate(FLEQueryInterface* queryImpl,
EDCServerCollection::finalizeForInsert(replacementDocument, serverPayload);
newUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- safeContentReplace, write_ops::UpdateModification::ClassicTag(), true));
+ safeContentReplace, write_ops::UpdateModification::ReplacementTag{}));
}
// Step 3 ----
@@ -908,7 +908,7 @@ write_ops::UpdateCommandReply processUpdate(FLEQueryInterface* queryImpl,
pullUpdateOpEntry.setMulti(false);
pullUpdateOpEntry.setQ(BSON("_id"_sd << idElement));
pullUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- pullUpdate, write_ops::UpdateModification::ClassicTag(), false));
+ pullUpdate, write_ops::UpdateModification::ModifierUpdateTag{}));
newUpdateRequest.setUpdates({pullUpdateOpEntry});
newUpdateRequest.getWriteCommandRequestBase().setStmtId(boost::none);
newUpdateRequest.setLegacyRuntimeConstants(boost::none);
@@ -1089,7 +1089,7 @@ write_ops::FindAndModifyCommandReply processFindAndModify(
// Step 2 ----
newUpdateModification = write_ops::UpdateModification(
- pushUpdate, write_ops::UpdateModification::ClassicTag(), false);
+ pushUpdate, write_ops::UpdateModification::ModifierUpdateTag{});
} else {
auto replacementDocument = updateModification.getUpdateReplacement();
EDCServerCollection::validateEncryptedFieldInfo(
@@ -1104,7 +1104,7 @@ write_ops::FindAndModifyCommandReply processFindAndModify(
EDCServerCollection::finalizeForInsert(replacementDocument, serverPayload);
newUpdateModification = write_ops::UpdateModification(
- safeContentReplace, write_ops::UpdateModification::ClassicTag(), true);
+ safeContentReplace, write_ops::UpdateModification::ReplacementTag{});
}
// Step 3 ----
@@ -1165,7 +1165,7 @@ write_ops::FindAndModifyCommandReply processFindAndModify(
pullUpdateOpEntry.setMulti(false);
pullUpdateOpEntry.setQ(BSON("_id"_sd << idElement));
pullUpdateOpEntry.setU(mongo::write_ops::UpdateModification(
- pullUpdate, write_ops::UpdateModification::ClassicTag(), false));
+ pullUpdate, write_ops::UpdateModification::ModifierUpdateTag{}));
newUpdateRequest.setUpdates({pullUpdateOpEntry});
newUpdateRequest.setLegacyRuntimeConstants(boost::none);
newUpdateRequest.getWriteCommandRequestBase().setStmtId(boost::none);
diff --git a/src/mongo/db/fle_crud_test.cpp b/src/mongo/db/fle_crud_test.cpp
index df3400b72b1..0e6489981e8 100644
--- a/src/mongo/db/fle_crud_test.cpp
+++ b/src/mongo/db/fle_crud_test.cpp
@@ -538,7 +538,7 @@ void FleCrudTest::doSingleUpdate(int id, BSONElement element) {
void FleCrudTest::doSingleUpdateWithUpdateDoc(int id, BSONObj update) {
doSingleUpdateWithUpdateDoc(
id,
- write_ops::UpdateModification(update, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(update, write_ops::UpdateModification::ModifierUpdateTag{}));
}
void FleCrudTest::doSingleUpdateWithUpdateDoc(int id,
@@ -944,8 +944,7 @@ TEST_F(FleCrudTest, UpdateOneReplace) {
auto result = FLEClientCrypto::transformPlaceholders(replaceEP, &_keyVault);
doSingleUpdateWithUpdateDoc(
- 1,
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, true));
+ 1, write_ops::UpdateModification(result, write_ops::UpdateModification::ReplacementTag{}));
assertDocumentCounts(1, 2, 1, 3);
@@ -1024,7 +1023,7 @@ TEST_F(FleCrudTest, FindAndModify_UpdateOne) {
write_ops::FindAndModifyCommandRequest req(_edcNs);
req.setQuery(BSON("_id" << 1));
req.setUpdate(
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(result, write_ops::UpdateModification::ModifierUpdateTag{}));
doFindAndModify(req);
assertDocumentCounts(1, 2, 1, 3);
@@ -1075,7 +1074,7 @@ TEST_F(FleCrudTest, FindAndModify_RenameSafeContent) {
write_ops::FindAndModifyCommandRequest req(_edcNs);
req.setQuery(BSON("_id" << 1));
req.setUpdate(
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(result, write_ops::UpdateModification::ModifierUpdateTag{}));
ASSERT_THROWS_CODE(doFindAndModify(req), DBException, 6371506);
}
@@ -1101,7 +1100,7 @@ TEST_F(FleCrudTest, FindAndModify_SetSafeContent) {
write_ops::FindAndModifyCommandRequest req(_edcNs);
req.setQuery(BSON("_id" << 1));
req.setUpdate(
- write_ops::UpdateModification(result, write_ops::UpdateModification::ClassicTag{}, false));
+ write_ops::UpdateModification(result, write_ops::UpdateModification::ModifierUpdateTag{}));
ASSERT_THROWS_CODE(doFindAndModify(req), DBException, 6666200);
}
diff --git a/src/mongo/db/ops/write_ops.cpp b/src/mongo/db/ops/write_ops.cpp
index 411ecada1de..41c7fa4f5d2 100644
--- a/src/mongo/db/ops/write_ops.cpp
+++ b/src/mongo/db/ops/write_ops.cpp
@@ -158,17 +158,16 @@ UpdateModification UpdateModification::parseFromOplogEntry(const BSONObj& oField
BSONElement idField = oField["_id"];
// If _id field is present, we're getting a replacement style update in which $v can be a user
- // field. Otherwise, $v field has to be either missing or be one of the version flag $v:1 /
- // $v:2.
+ // field. Otherwise, $v field has to be $v:2.
uassert(4772600,
- str::stream() << "Expected _id field or $v field missing or $v:1/$v:2, but got: "
- << vField,
- idField.ok() || !vField.ok() ||
- vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kUpdateNodeV1) ||
- vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kDeltaV2));
-
- if (!idField.ok() && vField.ok() &&
- vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kDeltaV2)) {
+ str::stream() << "Expected _id field or $v:2, but got: " << vField,
+ idField.ok() ||
+ (vField.ok() &&
+ vField.numberInt() == static_cast<int>(UpdateOplogEntryVersion::kDeltaV2)));
+
+ // It is important to check for '_id' field first, because a replacement style update can still
+ // have a '$v' field in the object.
+ if (!idField.ok()) {
// Make sure there's a diff field.
BSONElement diff = oField[update_oplog_entry::kDiffObjectFieldName];
uassert(4772601,
@@ -176,15 +175,14 @@ UpdateModification UpdateModification::parseFromOplogEntry(const BSONObj& oField
<< diff.type(),
diff.type() == BSONType::Object);
- return UpdateModification(doc_diff::Diff{diff.embeddedObject()}, options);
+ return UpdateModification(doc_diff::Diff{diff.embeddedObject()}, DeltaTag{}, options);
} else {
- // Treat it as a "classic" update which can either be a full replacement or a
- // modifier-style update. Use "_id" field to determine whether which style it is.
- return UpdateModification(oField, ClassicTag{}, idField.ok());
+ // Treat it as a a full replacement update.
+ return UpdateModification(oField, ReplacementTag{});
}
}
-UpdateModification::UpdateModification(doc_diff::Diff diff, DiffOptions options)
+UpdateModification::UpdateModification(doc_diff::Diff diff, DeltaTag, DiffOptions options)
: _update(DeltaUpdate{std::move(diff), options}) {}
UpdateModification::UpdateModification(TransformFunc transform)
@@ -193,7 +191,7 @@ UpdateModification::UpdateModification(TransformFunc transform)
UpdateModification::UpdateModification(BSONElement update) {
const auto type = update.type();
if (type == BSONType::Object) {
- _update = UpdateModification(update.Obj(), ClassicTag{})._update;
+ _update = UpdateModification(update.Obj())._update;
return;
}
@@ -204,21 +202,19 @@ UpdateModification::UpdateModification(BSONElement update) {
_update = PipelineUpdate{parsePipelineFromBSON(update)};
}
-// If we know whether the update is a replacement, use that value. For example, when we're parsing
-// the oplog entry, we know if the update is a replacement by checking whether there's an _id field.
-UpdateModification::UpdateModification(const BSONObj& update, ClassicTag, bool isReplacement) {
- if (isReplacement) {
+UpdateModification::UpdateModification(const BSONObj& update) {
+ if (isClassicalUpdateReplacement(update)) {
_update = ReplacementUpdate{update};
} else {
_update = ModifierUpdate{update};
}
}
-// If we don't know whether the update is a replacement, for example while we are parsing a user
-// request, we infer this by checking whether the first element is a $-field to distinguish modifier
-// style updates.
-UpdateModification::UpdateModification(const BSONObj& update, ClassicTag)
- : UpdateModification(update, ClassicTag{}, isClassicalUpdateReplacement(update)) {}
+UpdateModification::UpdateModification(const BSONObj& update, ModifierUpdateTag)
+ : _update{ModifierUpdate{update}} {}
+UpdateModification::UpdateModification(const BSONObj& update, ReplacementTag)
+ : _update{ReplacementUpdate{update}} {}
+
UpdateModification::UpdateModification(std::vector<BSONObj> pipeline)
: _update{PipelineUpdate{std::move(pipeline)}} {}
diff --git a/src/mongo/db/ops/write_ops_parsers.h b/src/mongo/db/ops/write_ops_parsers.h
index 4fd38f90b68..ea898a153b0 100644
--- a/src/mongo/db/ops/write_ops_parsers.h
+++ b/src/mongo/db/ops/write_ops_parsers.h
@@ -89,29 +89,39 @@ public:
struct DiffOptions {
bool mustCheckExistenceForInsertOperations = true;
};
- struct ClassicTag {};
+
+ /**
+ * Tags used to disambiguate between the constructors for different update types.
+ */
+ struct ModifierUpdateTag {};
+ struct ReplacementTag {};
+ struct DeltaTag {};
// Given the 'o' field of an update oplog entry, will return an UpdateModification that can be
// applied. The `options` parameter will be applied only in the case a Delta update is parsed.
static UpdateModification parseFromOplogEntry(const BSONObj& oField,
const DiffOptions& options);
static UpdateModification parseFromClassicUpdate(const BSONObj& modifiers) {
- return UpdateModification(modifiers, ClassicTag{});
+ return UpdateModification(modifiers);
}
static UpdateModification parseFromV2Delta(const doc_diff::Diff& diff,
DiffOptions const& options) {
- return UpdateModification(diff, options);
+ return UpdateModification(diff, DeltaTag{}, options);
}
UpdateModification() = default;
UpdateModification(BSONElement update);
UpdateModification(std::vector<BSONObj> pipeline);
- UpdateModification(doc_diff::Diff, DiffOptions);
+ UpdateModification(doc_diff::Diff, DeltaTag, DiffOptions);
// Creates an transform-style update. The transform function MUST preserve the _id element.
UpdateModification(TransformFunc transform);
- // This constructor exists only to provide a fast-path for constructing classic-style updates.
- UpdateModification(const BSONObj& update, ClassicTag, bool isReplacement);
- UpdateModification(const BSONObj& update, ClassicTag);
+ // These constructors exists only to provide a fast-path.
+ UpdateModification(const BSONObj& update, ModifierUpdateTag);
+ UpdateModification(const BSONObj& update, ReplacementTag);
+ // If we don't know whether the update is a replacement or a modifier style update, for example
+ // while we are parsing a user request, we infer this by checking whether the first element is a
+ // $-field to distinguish modifier style updates.
+ UpdateModification(const BSONObj& update);
/**
* These methods support IDL parsing of the "u" field from the update command and OP_UPDATE.
diff --git a/src/mongo/db/query/query_knobs.idl b/src/mongo/db/query/query_knobs.idl
index 9fa82639f51..4b7198314f2 100644
--- a/src/mongo/db/query/query_knobs.idl
+++ b/src/mongo/db/query/query_knobs.idl
@@ -535,13 +535,6 @@ server_parameters:
gt: 0
on_update: plan_cache_util::clearSbeCacheOnParameterChange
- internalQueryEnableLoggingV2OplogEntries:
- description: "If true, this node may log $v:2 delta-style oplog entries."
- set_at: [ startup, runtime ]
- cpp_varname: "internalQueryEnableLoggingV2OplogEntries"
- cpp_vartype: AtomicWord<bool>
- default: true
-
internalQuerySlotBasedExecutionMaxStaticIndexScanIntervals:
description: "Limits the number of statically known intervals that SBE can decompose index
bounds into when possible."
diff --git a/src/mongo/db/repl/SConscript b/src/mongo/db/repl/SConscript
index ba09749f22e..43520c0dfaf 100644
--- a/src/mongo/db/repl/SConscript
+++ b/src/mongo/db/repl/SConscript
@@ -653,7 +653,6 @@ env.Library(
source=[
'idempotency_document_structure.cpp',
'idempotency_scalar_generator.cpp',
- 'idempotency_update_sequence.cpp',
],
LIBDEPS=[
'$BUILD_DIR/mongo/base',
@@ -1647,7 +1646,6 @@ if wiredtiger:
'delayable_timeout_callback_test.cpp',
'drop_pending_collection_reaper_test.cpp',
'idempotency_document_structure_test.cpp',
- 'idempotency_update_sequence_test.cpp',
'initial_syncer_test.cpp',
'isself_test.cpp',
'member_config_test.cpp',
diff --git a/src/mongo/db/repl/idempotency_test.cpp b/src/mongo/db/repl/idempotency_test.cpp
index 69777fdbc55..53acd427f38 100644
--- a/src/mongo/db/repl/idempotency_test.cpp
+++ b/src/mongo/db/repl/idempotency_test.cpp
@@ -38,7 +38,6 @@
#include "mongo/db/query/plan_executor.h"
#include "mongo/db/repl/idempotency_document_structure.h"
#include "mongo/db/repl/idempotency_test_fixture.h"
-#include "mongo/db/repl/idempotency_update_sequence.h"
#include "mongo/db/repl/replication_coordinator.h"
#include "mongo/db/server_options.h"
#include "mongo/db/update/document_diff_calculator.h"
@@ -58,10 +57,6 @@ class RandomizedIdempotencyTest : public IdempotencyTest {
protected:
const int kDocId = 1;
const BSONObj kDocIdQuery = BSON("_id" << kDocId);
-
- std::vector<OplogEntry> createUpdateSequence(const UpdateSequenceGenerator& generator,
- size_t length);
-
BSONObj canonicalizeDocumentForDataHash(const BSONObj& obj) override;
BSONObj getDoc();
@@ -73,15 +68,10 @@ protected:
Status resetState() override;
- void runIdempotencyTestCase();
- void runUpdateV2IdempotencyTestCase(double v2Probability);
+ void runUpdateV2IdempotencyTestCase();
std::vector<OplogEntry> initOps;
int64_t seed;
-
-private:
- // Op-style updates cannot guarantee field order for certain cases.
- bool _ignoreFieldOrder = true;
};
BSONObj canonicalizeBSONObjForDataHash(const BSONObj& obj);
@@ -123,9 +113,6 @@ BSONObj canonicalizeBSONObjForDataHash(const BSONObj& obj) {
}
BSONObj RandomizedIdempotencyTest::canonicalizeDocumentForDataHash(const BSONObj& obj) {
- if (!_ignoreFieldOrder) {
- return obj;
- }
return canonicalizeBSONObjForDataHash(obj);
}
BSONObj RandomizedIdempotencyTest::getDoc() {
@@ -135,18 +122,6 @@ BSONObj RandomizedIdempotencyTest::getDoc() {
return doc.getOwned();
}
-std::vector<OplogEntry> RandomizedIdempotencyTest::createUpdateSequence(
- const UpdateSequenceGenerator& generator, const size_t length) {
- // for each document enumerated & inserted generate a sequence of updates to apply to it.
- std::vector<OplogEntry> updateSequence;
- updateSequence.reserve(length);
- for (size_t i = 0; i < length; i++) {
- updateSequence.push_back(update(kDocId, generator.generateUpdate()));
- }
-
- return updateSequence;
-}
-
std::string RandomizedIdempotencyTest::getStatesString(const std::vector<CollectionState>& state1,
const std::vector<CollectionState>& state2,
const std::vector<OplogEntry>& state1Ops,
@@ -195,75 +170,13 @@ Status RandomizedIdempotencyTest::resetState() {
return runOpsInitialSync(initOps);
}
-void RandomizedIdempotencyTest::runIdempotencyTestCase() {
- _ignoreFieldOrder = true;
+void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase() {
ASSERT_OK(
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- const size_t lengthOfNumericComponent = 1;
-
- // Eliminate modification of array elements, because they cause theoretically valid sequences
- // that cause idempotency issues.
- const double kScalarProbability = 0.375;
- const double kDocProbability = 0.375;
- const double kArrProbability = 0;
-
this->seed = SecureRandom().nextInt64();
PseudoRandom seedGenerator(this->seed);
- RandomizedScalarGenerator scalarGenerator{PseudoRandom(seedGenerator.nextInt64())};
- UpdateSequenceGenerator updateGenerator({fields,
- depth,
- lengthOfNumericComponent,
- kScalarProbability,
- kDocProbability,
- kArrProbability},
- PseudoRandom{seedGenerator.nextInt64()},
- &scalarGenerator);
-
- const bool kSkipDocs = kDocProbability == 0.0;
- const bool kSkipArrs = kArrProbability == 0.0;
- DocumentStructureEnumerator enumerator(
- {fields, depth, lengthOfNumericComponent, kSkipDocs, kSkipArrs}, &scalarGenerator);
-
- const size_t kUpdateSequenceLength = 5;
- // For the sake of keeping the speed of iteration sane and feasible.
- const size_t kNumUpdateSequencesPerDoc = 2;
-
- for (auto doc : enumerator) {
- BSONObj docWithId = (BSONObjBuilder(doc) << "_id" << kDocId).obj();
- for (size_t i = 0; i < kNumUpdateSequencesPerDoc; i++) {
- this->initOps = std::vector<OplogEntry>{createCollection(), insert(docWithId)};
- std::vector<OplogEntry> updateSequence =
- createUpdateSequence(updateGenerator, kUpdateSequenceLength);
- testOpsAreIdempotent(updateSequence, SequenceType::kAnyPrefixOrSuffix);
- }
- }
-}
-
-void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase(double v2Probability) {
- _ignoreFieldOrder = (v2Probability < 1.0);
- ASSERT_OK(
- ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
-
- this->seed = SecureRandom().nextInt64();
- PseudoRandom seedGenerator(this->seed);
- RandomizedScalarGenerator scalarGenerator{PseudoRandom(seedGenerator.nextInt64())};
-
- // Eliminate modification of array elements when generating $v:1 oplog udpates, because they
- // cause theoretically valid sequences that cause idempotency issues.
- //
- // For example oplog entries '{$unset: {a.1: null}}' and '{$set: {a.1.1: null}}' can break
- // idempotency if the entries are applied on an input document '{a: []}'. These entries should
- // not have been generated in practice if the starting document is '{a: []}', but the current
- // 'UpdateSequenceGenerator' is not smart enough to figure that out.
- const size_t lengthOfNumericComponent = 0;
-
std::set<StringData> fields{"f00", "f10", "f01", "f11", "f02", "f20"};
- UpdateSequenceGenerator updateV1Generator({fields, 2 /* depth */, lengthOfNumericComponent},
- PseudoRandom(seedGenerator.nextInt64()),
- &scalarGenerator);
auto generateDocWithId = [&seedGenerator](int id) {
MutableDocument doc;
@@ -286,22 +199,15 @@ void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase(double v2Probabil
for (size_t i = 0; i < kUpdateSequenceLength; i++) {
BSONObj oplogDiff;
boost::optional<BSONObj> generatedDoc;
- if (rng.nextCanonicalDouble() <= v2Probability) {
- // With delta based updates, we cannot just generate any random diff since certains
- // diff when applied to an unrelated object (which would never have produced by
- // computing the input objects) would break idempotency. So we do a dry run of what
- // the collection state would look like and compute diffs based on that.
- generatedDoc = generateDocWithId(kDocId);
- auto diffOutput =
- doc_diff::computeDiff(oldDoc,
- *generatedDoc,
- update_oplog_entry::kSizeOfDeltaOplogEntryMetadata,
- nullptr);
- ASSERT(diffOutput);
- oplogDiff = BSON("$v" << 2 << "diff" << diffOutput->diff);
- } else {
- oplogDiff = updateV1Generator.generateUpdate();
- }
+ // With delta based updates, we cannot just generate any random diff since certain diffs
+ // when applied to an unrelated object (which would never have produced by computing the
+ // input objects) would break idempotency. So we do a dry run of what the collection
+ // state would look like and compute diffs based on that.
+ generatedDoc = generateDocWithId(kDocId);
+ auto diffOutput = doc_diff::computeDiff(
+ oldDoc, *generatedDoc, update_oplog_entry::kSizeOfDeltaOplogEntryMetadata, nullptr);
+ ASSERT(diffOutput);
+ oplogDiff = BSON("$v" << 2 << "diff" << diffOutput->diff);
auto op = update(kDocId, oplogDiff);
ASSERT_OK(runOpInitialSync(op));
if (generatedDoc) {
@@ -314,14 +220,11 @@ void RandomizedIdempotencyTest::runUpdateV2IdempotencyTestCase(double v2Probabil
}
}
-TEST_F(RandomizedIdempotencyTest, CheckUpdateSequencesAreIdempotent) {
- runIdempotencyTestCase();
-}
TEST_F(RandomizedIdempotencyTest, CheckUpdateSequencesAreIdempotentV2) {
- runUpdateV2IdempotencyTestCase(1.0);
- runUpdateV2IdempotencyTestCase(0.4);
- runUpdateV2IdempotencyTestCase(0.5);
- runUpdateV2IdempotencyTestCase(0.6);
+ runUpdateV2IdempotencyTestCase();
+ runUpdateV2IdempotencyTestCase();
+ runUpdateV2IdempotencyTestCase();
+ runUpdateV2IdempotencyTestCase();
}
} // namespace
diff --git a/src/mongo/db/repl/idempotency_update_sequence.cpp b/src/mongo/db/repl/idempotency_update_sequence.cpp
deleted file mode 100644
index 3963e940276..00000000000
--- a/src/mongo/db/repl/idempotency_update_sequence.cpp
+++ /dev/null
@@ -1,284 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/platform/basic.h"
-
-#include "mongo/db/repl/idempotency_update_sequence.h"
-
-#include <algorithm>
-#include <memory>
-
-#include "mongo/db/exec/document_value/value.h"
-#include "mongo/db/field_ref.h"
-#include "mongo/db/jsobj.h"
-#include "mongo/db/repl/idempotency_document_structure.h"
-
-namespace mongo {
-
-UpdateSequenceGeneratorConfig::UpdateSequenceGeneratorConfig(std::set<StringData> fields_,
- std::size_t depth_,
- std::size_t lengthOfNumericComponent_,
- double scalarProbability_,
- double docProbability_,
- double arrProbability_)
- : fields(std::move(fields_)),
- depth(depth_),
- lengthOfNumericComponent(lengthOfNumericComponent_),
- scalarProbability(scalarProbability_),
- docProbability(docProbability_),
- arrProbability(arrProbability_) {}
-
-std::size_t UpdateSequenceGenerator::_getPathDepth(const std::string& path) {
- // Our depth is -1 because we count at 0, but numParts just counts the number of fields.
- return path == "" ? 0 : FieldRef(path).numParts() - 1;
-}
-
-std::vector<std::string> UpdateSequenceGenerator::_eliminatePrefixPaths(
- const std::string& path, const std::vector<std::string>& paths) {
- std::vector<std::string> remainingPaths;
- for (auto oldPath : paths) {
- if (!FieldRef(oldPath).isPrefixOf(FieldRef(path)) &&
- !FieldRef(path).isPrefixOf(FieldRef(oldPath)) && path != path) {
- remainingPaths.push_back(oldPath);
- }
- }
-
- return remainingPaths;
-}
-
-void UpdateSequenceGenerator::_generatePaths(const UpdateSequenceGeneratorConfig& config,
- const std::string& path) {
- if (UpdateSequenceGenerator::_getPathDepth(path) == config.depth) {
- return;
- }
-
- if (!path.empty()) {
- for (std::size_t i = 0; i < config.lengthOfNumericComponent; i++) {
- FieldRef arrPathRef(path);
- arrPathRef.appendPart(std::to_string(i));
- auto arrPath = arrPathRef.dottedField().toString();
- _paths.push_back(arrPath);
- _generatePaths(config, arrPath);
- }
- }
-
- if (config.fields.empty()) {
- return;
- }
-
- std::set<StringData> remainingFields(config.fields);
- for (auto field : config.fields) {
- remainingFields.erase(remainingFields.begin());
- FieldRef docPathRef(path);
- docPathRef.appendPart(field);
- auto docPath = docPathRef.dottedField().toString();
- _paths.push_back(docPath);
- UpdateSequenceGeneratorConfig remainingConfig = {remainingFields,
- config.depth,
- config.lengthOfNumericComponent,
- config.scalarProbability,
- config.docProbability,
- config.arrProbability};
- _generatePaths(remainingConfig, docPath);
- }
-}
-
-std::vector<std::string> UpdateSequenceGenerator::_getRandomPaths() const {
- std::size_t randomAmountOfArgs = this->_random.nextInt32(this->_paths.size()) + 1;
- std::vector<std::string> randomPaths;
- std::vector<std::string> validPaths(this->_paths);
-
- for (std::size_t i = 0; i < randomAmountOfArgs; i++) {
- int randomIndex = UpdateSequenceGenerator::_random.nextInt32(validPaths.size());
- std::string randomPath = validPaths[randomIndex];
- randomPaths.push_back(randomPath);
- validPaths = UpdateSequenceGenerator::_eliminatePrefixPaths(randomPath, validPaths);
- if (validPaths.empty()) {
- break;
- }
- }
-
- return randomPaths;
-}
-
-BSONObj UpdateSequenceGenerator::generateUpdate() const {
- double setSum = this->_config.scalarProbability + this->_config.arrProbability +
- this->_config.docProbability;
- double generateSetUpdate = this->_random.nextCanonicalDouble();
- if (generateSetUpdate <= setSum) {
- return _generateSet();
- } else {
- return _generateUnset();
- }
-}
-
-BSONObj UpdateSequenceGenerator::_generateSet() const {
- BSONObjBuilder setBuilder;
- {
- BSONObjBuilder setArgBuilder(setBuilder.subobjStart("$set"));
-
- for (auto randomPath : _getRandomPaths()) {
- _appendSetArgToBuilder(randomPath, &setArgBuilder);
- }
- }
- return setBuilder.obj();
-}
-
-UpdateSequenceGenerator::SetChoice UpdateSequenceGenerator::_determineWhatToSet(
- const std::string& setPath) const {
- if (UpdateSequenceGenerator::_getPathDepth(setPath) == this->_config.depth) {
- // If we have hit the max depth, we don't have a choice anyways.
- return SetChoice::kSetScalar;
- } else {
- double setSum = this->_config.scalarProbability + this->_config.arrProbability +
- this->_config.docProbability;
- double choice = this->_random.nextCanonicalDouble() * setSum;
- if (choice <= this->_config.scalarProbability) {
- return SetChoice::kSetScalar;
- } else if (choice <= setSum - this->_config.docProbability) {
- return SetChoice::kSetArr;
- } else {
- return SetChoice::kSetDoc;
- }
- }
-}
-
-void UpdateSequenceGenerator::_appendSetArgToBuilder(const std::string& setPath,
- BSONObjBuilder* setArgBuilder) const {
- auto setChoice = _determineWhatToSet(setPath);
- switch (setChoice) {
- case SetChoice::kSetScalar:
- this->_scalarGenerator->generateScalar().addToBsonObj(setArgBuilder, setPath);
- return;
- case SetChoice::kSetArr:
- setArgBuilder->append(setPath, _generateArrToSet(setPath));
- return;
- case SetChoice::kSetDoc:
- setArgBuilder->append(setPath, _generateDocToSet(setPath));
- return;
- case SetChoice::kNumTotalSetChoices:
- MONGO_UNREACHABLE;
- }
- MONGO_UNREACHABLE;
-}
-
-BSONObj UpdateSequenceGenerator::_generateUnset() const {
- BSONObjBuilder unsetBuilder;
- {
- BSONObjBuilder unsetArgBuilder(unsetBuilder.subobjStart("$unset"));
-
- for (auto randomPath : _getRandomPaths()) {
- unsetArgBuilder.appendNull(randomPath);
- }
- }
-
- return unsetBuilder.obj();
-}
-
-double UpdateSequenceGenerator::_generateNumericToSet() const {
- return UpdateSequenceGenerator::_random.nextCanonicalDouble() * INT_MAX;
-}
-
-bool UpdateSequenceGenerator::_generateBoolToSet() const {
- return this->_random.nextInt32(2) == 1;
-}
-
-BSONArray UpdateSequenceGenerator::_generateArrToSet(const std::string& setPath) const {
- auto enumerator = _getValidEnumeratorForPath(setPath);
-
- auto possibleArrs = enumerator.enumerateArrs();
- std::size_t randomIndex = this->_random.nextInt32(possibleArrs.size());
- auto chosenArr = possibleArrs[randomIndex];
-
- return chosenArr;
-}
-
-BSONObj UpdateSequenceGenerator::_generateDocToSet(const std::string& setPath) const {
- auto enumerator = _getValidEnumeratorForPath(setPath);
- std::size_t randomIndex = this->_random.nextInt32(enumerator.getDocs().size());
- return enumerator.getDocs()[randomIndex];
-}
-
-std::set<StringData> UpdateSequenceGenerator::_getRemainingFields(const std::string& path) const {
- std::set<StringData> remainingFields(this->_config.fields);
-
- FieldRef pathRef(path);
- StringData lastField;
- // This is guaranteed to terminate with a value for lastField, since no valid path contains only
- // array positions (numbers).
- for (int i = pathRef.numParts() - 1; i >= 0; i--) {
- auto field = pathRef.getPart(i);
- if (this->_config.fields.find(field) != this->_config.fields.end()) {
- lastField = field;
- break;
- }
- }
-
- // The last alphabetic field used must be after all other alphabetic fields that could ever be
- // used, since the fields that are used are selected in the order that they pop off from a
- // std::set.
- for (auto field : this->_config.fields) {
- remainingFields.erase(field);
- if (field == lastField) {
- break;
- }
- }
-
- return remainingFields;
-}
-
-DocumentStructureEnumerator UpdateSequenceGenerator::_getValidEnumeratorForPath(
- const std::string& path) const {
- auto remainingFields = _getRemainingFields(path);
- std::size_t remainingDepth = this->_config.depth - UpdateSequenceGenerator::_getPathDepth(path);
- if (remainingDepth > 0) {
- remainingDepth -= 1;
- }
-
- DocumentStructureEnumerator enumerator(
- {remainingFields, remainingDepth, this->_config.lengthOfNumericComponent},
- this->_scalarGenerator);
- return enumerator;
-}
-
-std::vector<std::string> UpdateSequenceGenerator::getPaths() const {
- return this->_paths;
-}
-
-UpdateSequenceGenerator::UpdateSequenceGenerator(UpdateSequenceGeneratorConfig config,
- PseudoRandom random,
- ScalarGenerator* scalarGenerator)
- : _config(std::move(config)), _random(random), _scalarGenerator(scalarGenerator) {
- auto path = "";
- _generatePaths(config, path);
- // Creates the same shuffle each time, but we don't care. We want to mess up the DFS ordering.
- std::shuffle(this->_paths.begin(), this->_paths.end(), this->_random.urbg());
-}
-
-} // namespace mongo
diff --git a/src/mongo/db/repl/idempotency_update_sequence.h b/src/mongo/db/repl/idempotency_update_sequence.h
deleted file mode 100644
index 2e98d5221f8..00000000000
--- a/src/mongo/db/repl/idempotency_update_sequence.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#pragma once
-
-#include <cstddef>
-#include <set>
-#include <string>
-#include <vector>
-
-#include "mongo/base/string_data.h"
-#include "mongo/db/repl/idempotency_scalar_generator.h"
-#include "mongo/platform/random.h"
-
-namespace mongo {
-
-class DocumentStructureEnumerator;
-class BSONObj;
-struct BSONArray;
-class BSONObjBuilder;
-
-struct UpdateSequenceGeneratorConfig {
- UpdateSequenceGeneratorConfig(std::set<StringData> fields_,
- std::size_t depth_,
- std::size_t lengthOfNumericComponent_,
- double scalarProbability_ = 0.250,
- double docProbability_ = 0.250,
- double arrProbability_ = 0.250);
-
- const std::set<StringData> fields = {};
- const std::size_t depth = 0;
- const std::size_t lengthOfNumericComponent = 0;
- const double scalarProbability = 0.250;
- const double docProbability = 0.250;
- const double arrProbability = 0.250;
-};
-
-class UpdateSequenceGenerator {
-
-public:
- UpdateSequenceGenerator(UpdateSequenceGeneratorConfig config,
- PseudoRandom random,
- ScalarGenerator* scalarGenerator);
-
- BSONObj generateUpdate() const;
-
- std::vector<std::string> getPaths() const;
-
- friend std::vector<std::string> eliminatePrefixPaths_forTest(
- const std::string& path, const std::vector<std::string>& paths);
-
- friend std::size_t getPathDepth_forTest(const std::string& path);
-
-private:
- enum class SetChoice : int { kSetScalar, kSetArr, kSetDoc, kNumTotalSetChoices = 3 };
-
- static std::size_t _getPathDepth(const std::string& path);
-
- /**
- * Given a path parameter, removes all paths from a copy of the given path vector that are:
- * 1) A prefix of the given path
- * 2) Prefixable by the given path.
- *
- * This function also removes the given path itself from the given path vector, if it exists
- * inside, since a path can prefix itself and therefore qualifies for both #1 and #2 above.
- *
- * A copy of the given path vector is returned after this pruning finishes.
- */
- static std::vector<std::string> _eliminatePrefixPaths(const std::string& path,
- const std::vector<std::string>& paths);
-
- void _generatePaths(const UpdateSequenceGeneratorConfig& config, const std::string& path);
-
- std::set<StringData> _getRemainingFields(const std::string& path) const;
-
- DocumentStructureEnumerator _getValidEnumeratorForPath(const std::string& path) const;
-
- std::vector<std::string> _getRandomPaths() const;
-
- BSONObj _generateSet() const;
-
- SetChoice _determineWhatToSet(const std::string& setPath) const;
-
- void _appendSetArgToBuilder(const std::string& setPath, BSONObjBuilder* setArgBuilder) const;
-
- BSONObj _generateUnset() const;
-
- double _generateNumericToSet() const;
-
- bool _generateBoolToSet() const;
-
- BSONArray _generateArrToSet(const std::string& setPath) const;
-
- BSONObj _generateDocToSet(const std::string& setPath) const;
-
- std::vector<std::string> _paths;
- const UpdateSequenceGeneratorConfig _config;
- mutable PseudoRandom _random;
- const ScalarGenerator* _scalarGenerator;
-};
-
-} // namespace mongo
diff --git a/src/mongo/db/repl/idempotency_update_sequence_test.cpp b/src/mongo/db/repl/idempotency_update_sequence_test.cpp
deleted file mode 100644
index d46607f194b..00000000000
--- a/src/mongo/db/repl/idempotency_update_sequence_test.cpp
+++ /dev/null
@@ -1,318 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/platform/basic.h"
-
-#include <algorithm>
-#include <memory>
-
-#include "mongo/db/field_ref.h"
-#include "mongo/db/field_ref_set.h"
-#include "mongo/db/repl/idempotency_document_structure.h"
-#include "mongo/db/repl/idempotency_update_sequence.h"
-#include "mongo/unittest/unittest.h"
-
-namespace mongo {
-
-std::vector<std::string> eliminatePrefixPaths_forTest(const std::string& path,
- const std::vector<std::string>& paths) {
- return UpdateSequenceGenerator::_eliminatePrefixPaths(path, paths);
-}
-
-size_t getPathDepth_forTest(const std::string& path) {
- return UpdateSequenceGenerator::_getPathDepth(path);
-}
-
-namespace {
-
-PseudoRandom random(SecureRandom().nextInt64());
-
-TEST(UpdateGenTest, FindsAllPaths) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
-
- ASSERT_EQ(generator.getPaths().size(), 5U);
-
- std::vector<std::string> expectedPaths{"a", "a.0", "a.b", "b", "b.0"};
- std::vector<std::string> foundPaths(generator.getPaths());
- std::sort(expectedPaths.begin(), expectedPaths.end());
- std::sort(foundPaths.begin(), foundPaths.end());
- if (foundPaths != expectedPaths) {
- StringBuilder sb;
- sb << "Did not find all paths. Instead, we found: [ ";
- bool firstIter = true;
- for (auto path : foundPaths) {
- if (!firstIter) {
- sb << ", ";
- } else {
- firstIter = false;
- }
- sb << path;
- }
- sb << " ]; ";
- FAIL(sb.str());
- }
-}
-
-TEST(UpdateGenTest, NoDuplicatePaths) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- size_t length = 2;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
-
- auto paths = generator.getPaths();
- for (size_t i = 0; i < paths.size(); i++) {
- for (size_t j = i + 1; j < paths.size(); j++) {
- if (paths[i] == paths[j]) {
- StringBuilder sb;
- sb << "Outer path matched with inner path.";
- sb << generator.getPaths()[i] << " was duplicated.";
- FAIL(sb.str());
- }
- }
- }
-}
-
-TEST(UpdateGenTest, UpdatesHaveValidPaths) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
- auto update = generator.generateUpdate();
-
- BSONObj updateArg;
- if (auto setElem = update["$set"]) {
- updateArg = setElem.Obj();
- } else if (auto unsetElem = update["$unset"]) {
- updateArg = unsetElem.Obj();
- } else {
- StringBuilder sb;
- sb << "The generated update is not a $set or $unset BSONObj: " << update;
- FAIL(sb.str());
- }
-
- auto argPaths = updateArg.getFieldNames<std::set<std::string>>();
- std::set<std::string> correctPaths{"a", "b", "a.0", "a.b", "b.0"};
- for (auto path : argPaths) {
- FieldRef pathRef(path);
- StringBuilder sb;
- if (path[0] == '0' || path[0] == '1') {
- sb << "Some path (" << path << "), found in the (un)set arguments from the update "
- << update << " contains a leading array position. ";
- FAIL(sb.str());
- }
- if (correctPaths.find(path) == correctPaths.end()) {
- sb << "Some path (" << path << "), found in the (un)set arguments from the update "
- << update << " contains an invalid fieldname(s). ";
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, UpdatesAreNotAmbiguous) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator({fields, depth, length}, random, &trivialScalarGenerator);
- auto update = generator.generateUpdate();
-
- BSONObj updateArg;
- if (auto setElem = update["$set"]) {
- updateArg = setElem.Obj();
- } else if (auto unsetElem = update["$unset"]) {
- updateArg = unsetElem.Obj();
- } else {
- StringBuilder sb;
- sb << "The generated update is not a $set or $unset BSONObj: " << update;
- FAIL(sb.str());
- }
- auto argPathsSet = updateArg.getFieldNames<std::set<std::string>>();
-
- std::vector<std::unique_ptr<FieldRef>> argPathsRefVec;
- FieldRefSet pathRefSet;
- for (auto path : argPathsSet) {
- argPathsRefVec.push_back(std::make_unique<FieldRef>(path));
- const FieldRef* conflict;
- if (!pathRefSet.insert(argPathsRefVec.back().get(), &conflict)) {
- StringBuilder sb;
- sb << "Some path in the (un)set arguments of " << update
- << " causes ambiguity due to a conflict between "
- << argPathsRefVec.back()->dottedField() << " and " << conflict->dottedField();
- FAIL(sb.str());
- }
- }
-}
-
-std::size_t getMaxDepth(BSONObj obj) {
- size_t curMaxDepth = 0;
- for (auto elem : obj) {
- if (elem.type() == BSONType::Object || elem.type() == BSONType::Array) {
- curMaxDepth = std::max(curMaxDepth, 1 + getMaxDepth(elem.Obj()));
- }
- }
-
- return curMaxDepth;
-}
-
-TEST(UpdateGenTest, UpdatesPreserveDepthConstraint) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generator(
- {fields, depth, length, 0.333, 0.333, 0.334}, random, &trivialScalarGenerator);
-
- BSONElement setElem;
- BSONObj update;
- // Because our probabilities sum to 1, we are guaranteed to always get a $set.
- update = generator.generateUpdate();
- setElem = update["$set"];
- BSONObj updateArg = setElem.Obj();
-
- auto argPaths = updateArg.getFieldNames<std::set<std::string>>();
- for (auto path : argPaths) {
- auto pathDepth = getPathDepth_forTest(path);
- auto particularSetArgument = updateArg[path];
- auto embeddedArgDepth = 0;
- if (particularSetArgument.type() == BSONType::Object ||
- particularSetArgument.type() == BSONType::Array) {
- embeddedArgDepth = getMaxDepth(particularSetArgument.Obj()) + 1;
- }
-
- auto argDepth = pathDepth + embeddedArgDepth;
- if (argDepth > depth) {
- StringBuilder sb;
- sb << "The path " << path << " and its argument " << particularSetArgument
- << " exceeds the maximum depth.";
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, OnlyGenerateUnset) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generatorNoSet(
- {fields, depth, length, 0.0, 0.0, 0.0}, random, &trivialScalarGenerator);
-
- for (size_t i = 0; i < 100; i++) {
- auto update = generatorNoSet.generateUpdate();
- if (!update["$unset"]) {
- StringBuilder sb;
- sb << "Generator created an update that was not an $unset, even though the probability "
- "of doing so is zero: "
- << update;
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, OnlySetUpdatesWithScalarValue) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 1;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generatorNoUnsetAndOnlyScalar(
- {fields, depth, length, 1.0, 0.0, 0.0}, random, &trivialScalarGenerator);
-
- for (size_t i = 0; i < 100; i++) {
- auto update = generatorNoUnsetAndOnlyScalar.generateUpdate();
- if (!update["$set"]) {
- StringBuilder sb;
- sb << "Generator created an update that was not an $set, even though the probability "
- "of doing so is zero: "
- << update;
- FAIL(sb.str());
- } else if (getMaxDepth(update["$set"].Obj()) != 0) {
- StringBuilder sb;
- sb << "Generator created an update that had a nonscalar value, because it's maximum "
- "depth was nonzero: "
- << update;
- FAIL(sb.str());
- }
- }
-}
-
-TEST(UpdateGenTest, OnlySetUpdatesWithScalarsAtMaxDepth) {
- std::set<StringData> fields{"a", "b"};
- size_t depth = 2;
- size_t length = 1;
-
- TrivialScalarGenerator trivialScalarGenerator;
- UpdateSequenceGenerator generatorNeverScalar(
- {fields, depth, length, 0.0, 0.5, 0.5}, random, &trivialScalarGenerator);
-
- for (size_t i = 0; i < 100; i++) {
- auto update = generatorNeverScalar.generateUpdate();
- for (auto elem : update["$set"].Obj()) {
- StringData fieldName = elem.fieldNameStringData();
- FieldRef fieldRef(fieldName);
- size_t pathDepth = getPathDepth_forTest(fieldName.toString());
- bool isDocOrArr = elem.type() == BSONType::Object || elem.type() == BSONType::Array;
- if (pathDepth != depth) {
- // If the path is not equal to the max depth we provided above, then there
- // should
- // only be an array or doc at this point.
- if (!isDocOrArr) {
- StringBuilder sb;
- sb << "The set argument: " << elem
- << " is a scalar, but the probability of a scalar occuring for a path that "
- "does not meet the maximum depth is zero.";
- FAIL(sb.str());
- }
- } else {
- if (isDocOrArr) {
- StringBuilder sb;
- sb << "The set argument: " << elem
- << " is not scalar, however, this path reaches the maximum depth so a "
- "scalar should be the only choice.";
- FAIL(sb.str());
- }
- }
- }
- }
-}
-
-} // namespace
-} // namespace mongo
diff --git a/src/mongo/db/repl/oplog_applier_impl_test.cpp b/src/mongo/db/repl/oplog_applier_impl_test.cpp
index b734004bb28..34e5996f887 100644
--- a/src/mongo/db/repl/oplog_applier_impl_test.cpp
+++ b/src/mongo/db/repl/oplog_applier_impl_test.cpp
@@ -67,6 +67,7 @@
#include "mongo/db/session_txn_record_gen.h"
#include "mongo/db/stats/counters.h"
#include "mongo/db/transaction_participant_gen.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/idl/server_parameter_test_util.h"
#include "mongo/platform/mutex.h"
#include "mongo/unittest/death_test.h"
@@ -308,8 +309,12 @@ TEST_F(OplogApplierImplTestDisableSteadyStateConstraints,
applyOplogEntryOrGroupedInsertsUpdateMissingDocument) {
const NamespaceString nss("test.t");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
- auto op = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto op = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
int prevUpdateOnMissingDoc = replOpCounters.getUpdateOnMissingDoc()->load();
_testApplyOplogEntryOrGroupedInsertsCrudOperation(ErrorCodes::OK, op, true);
auto postUpdateOnMissingDoc = replOpCounters.getUpdateOnMissingDoc()->load();
@@ -326,8 +331,12 @@ TEST_F(OplogApplierImplTestEnableSteadyStateConstraints,
applyOplogEntryOrGroupedInsertsUpdateMissingDocument) {
const NamespaceString nss("test.t");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
- auto op = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto op = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
_testApplyOplogEntryOrGroupedInsertsCrudOperation(ErrorCodes::UpdateOperationFailed, op, false);
}
@@ -446,8 +455,10 @@ TEST_F(OplogApplierImplTest, applyOplogEntryToRecordChangeStreamPreImages) {
testCase.opType,
nss,
options.uuid,
- testCase.opType == repl::OpTypeEnum::kUpdate ? BSON("$set" << BSON("a" << 1))
- : documentId,
+ testCase.opType == repl::OpTypeEnum::kUpdate
+ ? update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}")))
+ : documentId,
{documentId},
testCase.fromMigrate);
@@ -2221,7 +2232,9 @@ TEST_F(IdempotencyTest, Geo2dsphereIndexFailedOnUpdate) {
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, loc: 'hi'}"));
- auto updateOp = update(1, fromjson("{$set: {loc: [1, 2]}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{loc: [1, 2]}"))));
auto indexOp =
buildIndex(fromjson("{loc: '2dsphere'}"), BSON("2dsphereIndexVersion" << 3), kUuid);
@@ -2251,7 +2264,9 @@ TEST_F(IdempotencyTest, Geo2dIndex) {
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, loc: [1]}"));
- auto updateOp = update(1, fromjson("{$set: {loc: [1, 2]}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{loc: [1, 2]}"))));
auto indexOp = buildIndex(fromjson("{loc: '2d'}"), BSONObj(), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2267,7 +2282,9 @@ TEST_F(IdempotencyTest, UniqueKeyIndex) {
ReplicationCoordinator::get(_opCtx.get())->setFollowerMode(MemberState::RS_RECOVERING));
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 5}"));
- auto updateOp = update(1, fromjson("{$set: {x: 6}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 6}"))));
auto insertOp2 = insert(fromjson("{_id: 2, x: 5}"));
auto indexOp = buildIndex(fromjson("{x: 1}"), fromjson("{unique: true}"), kUuid);
@@ -2286,9 +2303,16 @@ TEST_F(IdempotencyTest, ParallelArrayError) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
ASSERT_OK(runOpInitialSync(insert(fromjson("{_id: 1}"))));
- auto updateOp1 = update(1, fromjson("{$set: {x: [1, 2]}}"));
- auto updateOp2 = update(1, fromjson("{$set: {x: 1}}"));
- auto updateOp3 = update(1, fromjson("{$set: {y: [3, 4]}}"));
+ auto updateOp1 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{x: [1, 2]}"))));
+ auto updateOp2 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 1}"))));
+ auto updateOp3 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(BSON(
+ doc_diff::kUpdateSectionFieldName << fromjson("{y: [3, 4]}"))));
+
auto indexOp = buildIndex(fromjson("{x: 1, y: 1}"), BSONObj(), kUuid);
auto ops = {updateOp1, updateOp2, updateOp3, indexOp};
@@ -2322,7 +2346,10 @@ TEST_F(IdempotencyTest, TextIndexDocumentHasNonStringLanguageField) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 'words to index', language: 1}"));
- auto updateOp = update(1, fromjson("{$unset: {language: 1}}"));
+ auto updateOp =
+ update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{language: false}"))));
auto indexOp = buildIndex(fromjson("{x: 'text'}"), BSONObj(), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2352,7 +2379,9 @@ TEST_F(IdempotencyTest, TextIndexDocumentHasNonStringLanguageOverrideField) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 'words to index', y: 1}"));
- auto updateOp = update(1, fromjson("{$unset: {y: 1}}"));
+ auto updateOp = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{y: false}"))));
auto indexOp = buildIndex(fromjson("{x: 'text'}"), fromjson("{language_override: 'y'}"), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2382,7 +2411,10 @@ TEST_F(IdempotencyTest, TextIndexDocumentHasUnknownLanguage) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
auto insertOp = insert(fromjson("{_id: 1, x: 'words to index', language: 'bad'}"));
- auto updateOp = update(1, fromjson("{$unset: {language: 1}}"));
+ auto updateOp =
+ update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{language: false}"))));
auto indexOp = buildIndex(fromjson("{x: 'text'}"), BSONObj(), kUuid);
auto ops = {insertOp, updateOp, indexOp};
@@ -2441,7 +2473,9 @@ TEST_F(IdempotencyTest, CreateCollectionWithCollation) {
auto createColl = makeCreateCollectionOplogEntry(nextOpTime(), nss, options);
auto insertOp1 = insert(fromjson("{ _id: 'foo' }"));
auto insertOp2 = insert(fromjson("{ _id: 'Foo', x: 1 }"));
- auto updateOp = update("foo", BSON("$set" << BSON("x" << 2)));
+ auto updateOp = update("foo",
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 2}"))));
// We don't drop and re-create the collection since we don't have ways
// to wait until second-phase drop to completely finish.
@@ -2888,13 +2922,15 @@ TEST_F(OplogApplierImplTxnTableTest, InterleavedWriteWithTxnMixedWithDirectUpdat
date);
repl::OpTime newWriteOpTime(Timestamp(2, 0), 1);
- auto updateOp = makeOplogEntry(NamespaceString::kSessionTransactionsTableNamespace,
- {Timestamp(4, 0), 1},
- repl::OpTypeEnum::kUpdate,
- BSON("$set" << BSON("lastWriteOpTime" << newWriteOpTime)),
- BSON("_id" << sessionInfo.getSessionId()->toBSON()),
- {},
- Date_t::now());
+ auto updateOp = makeOplogEntry(
+ NamespaceString::kSessionTransactionsTableNamespace,
+ {Timestamp(4, 0), 1},
+ repl::OpTypeEnum::kUpdate,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("lastWriteOpTime" << newWriteOpTime))),
+ BSON("_id" << sessionInfo.getSessionId()->toBSON()),
+ {},
+ Date_t::now());
auto writerPool = makeReplWriterPool();
NoopOplogApplierObserver observer;
@@ -3292,10 +3328,16 @@ TEST_F(IdempotencyTest, UpdateTwoFields) {
ASSERT_OK(runOpInitialSync(createCollection(kUuid)));
ASSERT_OK(runOpInitialSync(insert(fromjson("{_id: 1, y: [0]}"))));
-
- auto updateOp1 = update(1, fromjson("{$set: {x: 1}}"));
- auto updateOp2 = update(1, fromjson("{$set: {x: 2, 'y.0': 2}}"));
- auto updateOp3 = update(1, fromjson("{$set: {y: 3}}"));
+ auto updateOp1 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 1}"))));
+ auto updateOp2 =
+ update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{x: 2, 'y.0': 2}"))));
+ auto updateOp3 = update(1,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{y: 3}"))));
auto ops = {updateOp1, updateOp2, updateOp3};
testOpsAreIdempotent(ops);
diff --git a/src/mongo/db/repl/replication_recovery_test.cpp b/src/mongo/db/repl/replication_recovery_test.cpp
index 40faf40552a..5d7bc3264df 100644
--- a/src/mongo/db/repl/replication_recovery_test.cpp
+++ b/src/mongo/db/repl/replication_recovery_test.cpp
@@ -51,6 +51,7 @@
#include "mongo/db/storage/storage_options.h"
#include "mongo/db/storage/storage_parameters_gen.h"
#include "mongo/db/transaction_participant.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/unittest/death_test.h"
#include "mongo/unittest/log_test.h"
#include "mongo/unittest/unittest.h"
@@ -829,7 +830,10 @@ TEST_F(ReplicationRecoveryTest, RecoveryAppliesUpdatesIdempotently) {
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(ts, BSON("_id" << 1), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(ts,
+ BSON("_id" << 1),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(ts, ts)},
@@ -845,7 +849,10 @@ TEST_F(ReplicationRecoveryTest, RecoveryAppliesUpdatesIdempotently) {
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(ts, BSON("_id" << 2), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(ts,
+ BSON("_id" << 2),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(ts, ts)},
@@ -861,7 +868,10 @@ TEST_F(ReplicationRecoveryTest, RecoveryAppliesUpdatesIdempotently) {
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(ts, BSON("_id" << 3), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(ts,
+ BSON("_id" << 3),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(ts, ts)},
@@ -886,7 +896,10 @@ DEATH_TEST_F(ReplicationRecoveryTest, RecoveryFailsWithBadOp, "terminate() calle
ASSERT_OK(getStorageInterface()->insertDocument(
opCtx,
oplogNs,
- {_makeUpdateOplogEntry(2, BSON("bad_op" << 1), BSON("$set" << BSON("a" << 7)))
+ {_makeUpdateOplogEntry(2,
+ BSON("bad_op" << 1),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 7}"))))
.getEntry()
.toBSON(),
Timestamp(2, 2)},
diff --git a/src/mongo/db/repl/session_update_tracker.cpp b/src/mongo/db/repl/session_update_tracker.cpp
index 4f695ae7630..175c8ffc07e 100644
--- a/src/mongo/db/repl/session_update_tracker.cpp
+++ b/src/mongo/db/repl/session_update_tracker.cpp
@@ -38,6 +38,7 @@
#include "mongo/db/session.h"
#include "mongo/db/session_txn_record_gen.h"
#include "mongo/db/transaction_participant_gen.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/logv2/log.h"
#include "mongo/util/assert_util.h"
@@ -343,9 +344,10 @@ boost::optional<OplogEntry> SessionUpdateTracker::_createTransactionTableUpdateF
// The prepare oplog entry is the first operation of the transaction.
newTxnRecord.setStartOpTime(entry.getOpTime());
} else {
- // Update the transaction record using $set to avoid overwriting the
- // startOpTime.
- return BSON("$set" << newTxnRecord.toBSON());
+ // Update the transaction record using a delta oplog entry to avoid
+ // overwriting the startOpTime.
+ return update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << newTxnRecord.toBSON()));
}
} else {
newTxnRecord.setState(DurableTxnStateEnum::kCommitted);
diff --git a/src/mongo/db/repl/storage_timestamp_test.cpp b/src/mongo/db/repl/storage_timestamp_test.cpp
index fb9325c1978..2ec52d2ffec 100644
--- a/src/mongo/db/repl/storage_timestamp_test.cpp
+++ b/src/mongo/db/repl/storage_timestamp_test.cpp
@@ -87,6 +87,7 @@
#include "mongo/db/storage/storage_engine_impl.h"
#include "mongo/db/transaction_participant.h"
#include "mongo/db/transaction_participant_gen.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/db/vector_clock_mutable.h"
#include "mongo/dbtests/dbtests.h"
#include "mongo/idl/server_parameter_test_util.h"
@@ -987,17 +988,34 @@ TEST_F(StorageTimestampTest, SecondaryUpdateTimes) {
// clock. `pair.first` is the update to perform and `pair.second` is the full value of the
// document after the transformation.
const std::vector<std::pair<BSONObj, BSONObj>> updates = {
- {BSON("$set" << BSON("val" << 1)), BSON("_id" << 0 << "val" << 1)},
- {BSON("$unset" << BSON("val" << 1)), BSON("_id" << 0)},
- {BSON("$addToSet" << BSON("theSet" << 1)), BSON("_id" << 0 << "theSet" << BSON_ARRAY(1))},
- {BSON("$addToSet" << BSON("theSet" << 2)),
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{val: 1}"))),
+ BSON("_id" << 0 << "val" << 1)},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kDeleteSectionFieldName << fromjson("{val: false}"))),
+ BSON("_id" << 0)},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{theSet: [1]}"))),
+ BSON("_id" << 0 << "theSet" << BSON_ARRAY(1))},
+ {update_oplog_entry::makeDeltaOplogEntry(BSON(
+ "stheSet" << BSON(doc_diff::kArrayHeader << true << doc_diff::kResizeSectionFieldName
+ << 2 << "u1" << 2))),
BSON("_id" << 0 << "theSet" << BSON_ARRAY(1 << 2))},
- {BSON("$pull" << BSON("theSet" << 1)), BSON("_id" << 0 << "theSet" << BSON_ARRAY(2))},
- {BSON("$pull" << BSON("theSet" << 2)), BSON("_id" << 0 << "theSet" << BSONArray())},
- {BSON("$set" << BSON("theMap.val" << 1)),
+ {update_oplog_entry::makeDeltaOplogEntry(BSON(
+ "stheSet" << BSON(doc_diff::kArrayHeader << true << doc_diff::kResizeSectionFieldName
+ << 1 << "u0" << 2))),
+ BSON("_id" << 0 << "theSet" << BSON_ARRAY(2))},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON("stheSet" << BSON(doc_diff::kArrayHeader
+ << true << doc_diff::kResizeSectionFieldName << 0))),
+ BSON("_id" << 0 << "theSet" << BSONArray())},
+ {update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{theMap: {val: 1}}"))),
BSON("_id" << 0 << "theSet" << BSONArray() << "theMap" << BSON("val" << 1))},
- {BSON("$rename" << BSON("theSet"
- << "theOtherSet")),
+ {update_oplog_entry::makeDeltaOplogEntry(BSON(doc_diff::kDeleteSectionFieldName
+ << fromjson("{theSet: false}")
+ << doc_diff::kUpdateSectionFieldName
+ << fromjson("{theOtherSet: []}"))),
BSON("_id" << 0 << "theMap" << BSON("val" << 1) << "theOtherSet" << BSONArray())}};
const LogicalTime firstUpdateTime = _clock->tickClusterTime(updates.size());
@@ -1544,9 +1562,9 @@ TEST_F(StorageTimestampTest, SecondarySetWildcardIndexMultikeyOnUpdate) {
const LogicalTime updateTime1 = _clock->tickClusterTime(1);
const LogicalTime updateTime2 = _clock->tickClusterTime(1);
- BSONObj doc0 = BSON("_id" << 0 << "a" << 3);
- BSONObj doc1 = BSON("$v" << 1 << "$set" << BSON("a" << BSON_ARRAY(1 << 2)));
- BSONObj doc2 = BSON("$v" << 1 << "$set" << BSON("a" << BSON_ARRAY(1 << 2)));
+ BSONObj doc0 = fromjson("{_id: 0, a: 3}");
+ BSONObj doc1 = fromjson("{$v: 2, diff: {u: {a: [1,2]}}}");
+ BSONObj doc2 = fromjson("{$v: 2, diff: {u: {a: [1,2]}}}");
auto op0 = repl::OplogEntry(
BSON("ts" << insertTime0.asTimestamp() << "t" << 1LL << "v" << 2 << "op"
<< "i"
diff --git a/src/mongo/db/repl/tenant_oplog_applier_test.cpp b/src/mongo/db/repl/tenant_oplog_applier_test.cpp
index 864960d84d7..bdafadb1fb1 100644
--- a/src/mongo/db/repl/tenant_oplog_applier_test.cpp
+++ b/src/mongo/db/repl/tenant_oplog_applier_test.cpp
@@ -27,7 +27,6 @@
* it in the license file.
*/
-
#include "mongo/platform/basic.h"
#include <algorithm>
@@ -52,13 +51,13 @@
#include "mongo/db/service_context_test_fixture.h"
#include "mongo/db/session_catalog_mongod.h"
#include "mongo/db/tenant_id.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/executor/thread_pool_task_executor_test_fixture.h"
#include "mongo/logv2/log.h"
#include "mongo/unittest/log_test.h"
#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kTest
-
namespace mongo {
using executor::TaskExecutor;
@@ -662,8 +661,12 @@ TEST_F(TenantOplogApplierTest, ApplyInserts_Grouped) {
TEST_F(TenantOplogApplierTest, ApplyUpdate_MissingDocument) {
NamespaceString nss(_dbName.toStringWithTenantId(), "bar");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
- auto entry = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto entry = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
bool onInsertsCalled = false;
bool onUpdateCalled = false;
_opObserver->onInsertsFn = [&](OperationContext* opCtx,
@@ -698,8 +701,12 @@ TEST_F(TenantOplogApplierTest, ApplyUpdate_Success) {
NamespaceString nss(_dbName.toStringWithTenantId(), "bar");
auto uuid = createCollectionWithUuid(_opCtx.get(), nss);
ASSERT_OK(getStorageInterface()->insertDocument(_opCtx.get(), nss, {BSON("_id" << 0)}, 0));
- auto entry = makeOplogEntry(
- repl::OpTypeEnum::kUpdate, nss, uuid, BSON("$set" << BSON("a" << 1)), BSON("_id" << 0));
+ auto entry = makeOplogEntry(repl::OpTypeEnum::kUpdate,
+ nss,
+ uuid,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << fromjson("{a: 1}"))),
+ BSON("_id" << 0));
bool onUpdateCalled = false;
_opObserver->onUpdateFn = [&](OperationContext* opCtx, const OplogUpdateEntryArgs& args) {
onUpdateCalled = true;
diff --git a/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp b/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp
index d2313684ff9..4e6c32ac1e2 100644
--- a/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp
+++ b/src/mongo/db/s/resharding/resharding_oplog_applier_test.cpp
@@ -48,6 +48,7 @@
#include "mongo/db/s/sharding_state.h"
#include "mongo/db/session_catalog_mongod.h"
#include "mongo/db/transaction_participant.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/db/vector_clock_metadata_hook.h"
#include "mongo/executor/network_interface_factory.h"
#include "mongo/executor/thread_pool_task_executor_test_fixture.h"
@@ -398,7 +399,8 @@ TEST_F(ReshardingOplogApplierTest, ApplyBasicCrud) {
boost::none));
crudOps.push_back(makeOplog(repl::OpTime(Timestamp(7, 3), 1),
repl::OpTypeEnum::kUpdate,
- BSON("$set" << BSON("x" << 1)),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))),
BSON("_id" << 2)));
crudOps.push_back(makeOplog(repl::OpTime(Timestamp(8, 3), 1),
repl::OpTypeEnum::kDelete,
@@ -536,7 +538,7 @@ TEST_F(ReshardingOplogApplierTest, ErrorDuringFirstBatchApply) {
auto cancelToken = operationContext()->getCancellationToken();
CancelableOperationContextFactory factory(cancelToken, getCancelableOpCtxExecutor());
auto future = applier->run(getExecutor(), getExecutor(), cancelToken, factory);
- ASSERT_EQ(future.getNoThrow(), ErrorCodes::FailedToParse);
+ ASSERT_EQ(future.getNoThrow(), ErrorCodes::duplicateCodeForTest(4772600));
DBDirectClient client(operationContext());
auto doc = client.findOne(appliedToNs(), BSON("_id" << 1));
@@ -579,7 +581,7 @@ TEST_F(ReshardingOplogApplierTest, ErrorDuringSecondBatchApply) {
auto cancelToken = operationContext()->getCancellationToken();
CancelableOperationContextFactory factory(cancelToken, getCancelableOpCtxExecutor());
auto future = applier->run(getExecutor(), getExecutor(), cancelToken, factory);
- ASSERT_EQ(future.getNoThrow(), ErrorCodes::FailedToParse);
+ ASSERT_EQ(future.getNoThrow(), ErrorCodes::duplicateCodeForTest(4772600));
DBDirectClient client(operationContext());
auto doc = client.findOne(appliedToNs(), BSON("_id" << 1));
@@ -834,7 +836,11 @@ TEST_F(ReshardingOplogApplierTest, MetricsAreReported) {
std::deque<repl::OplogEntry>{
easyOp(5, OpT::kDelete, BSON("_id" << 1)),
easyOp(6, OpT::kInsert, BSON("_id" << 2)),
- easyOp(7, OpT::kUpdate, BSON("$set" << BSON("x" << 1)), BSON("_id" << 2)),
+ easyOp(7,
+ OpT::kUpdate,
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))),
+ BSON("_id" << 2)),
easyOp(8, OpT::kDelete, BSON("_id" << 1)),
easyOp(9, OpT::kInsert, BSON("_id" << 3))},
2);
diff --git a/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp b/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp
index 9c09f5ebcf0..19fb3864915 100644
--- a/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp
+++ b/src/mongo/db/s/resharding/resharding_oplog_crud_application_test.cpp
@@ -47,6 +47,7 @@
#include "mongo/db/s/sharding_state.h"
#include "mongo/db/service_context_d_test_fixture.h"
#include "mongo/db/session_catalog_mongod.h"
+#include "mongo/db/update/update_oplog_entry_serialization.h"
#include "mongo/s/catalog/type_chunk.h"
#include "mongo/s/chunk_manager.h"
#include "mongo/unittest/unittest.h"
@@ -456,7 +457,10 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpModifiesStashCollectionAfterI
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 0), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 0),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
}
// We should have applied rule #1 and updated the document with {_id: 0} in the stash collection
@@ -488,7 +492,10 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpIsNoopWhenDifferentOwningDono
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 0), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 0),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
}
// The document {_id: 0, sk: -1} that exists in the output collection does not belong to this
@@ -504,7 +511,10 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpIsNoopWhenDifferentOwningDono
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 2), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 2),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
}
// There does not exist a document with {_id: 2} in the output collection, so we should have
@@ -535,9 +545,15 @@ TEST_F(ReshardingOplogCrudApplicationTest, UpdateOpModifiesOutputCollection) {
{
auto opCtx = makeOperationContext();
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 1), BSON("$set" << BSON("x" << 1)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 1),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 1))))));
ASSERT_OK(applier()->applyOperation(
- opCtx.get(), makeUpdateOp(BSON("_id" << 2), BSON("$set" << BSON("x" << 2)))));
+ opCtx.get(),
+ makeUpdateOp(BSON("_id" << 2),
+ update_oplog_entry::makeDeltaOplogEntry(
+ BSON(doc_diff::kUpdateSectionFieldName << BSON("x" << 2))))));
}
// We should have updated both documents in the output collection to include the new field "x".
diff --git a/src/mongo/db/update/SConscript b/src/mongo/db/update/SConscript
index f08b7cd6553..0fed04974ab 100644
--- a/src/mongo/db/update/SConscript
+++ b/src/mongo/db/update/SConscript
@@ -26,7 +26,6 @@ env.Library(
'field_checker.cpp',
'path_support.cpp',
'storage_validation.cpp',
- 'v1_log_builder.cpp',
'v2_log_builder.cpp',
'update_oplog_entry_serialization.cpp',
],
@@ -158,7 +157,6 @@ env.CppUnitTest(
'update_driver_test.cpp',
'update_object_node_test.cpp',
'update_serialization_test.cpp',
- 'v1_log_builder_test.cpp',
'v2_log_builder_test.cpp',
],
LIBDEPS=[
diff --git a/src/mongo/db/update/addtoset_node_test.cpp b/src/mongo/db/update/addtoset_node_test.cpp
index d2b7f90be65..c09d1eb71e5 100644
--- a/src/mongo/db/update/addtoset_node_test.cpp
+++ b/src/mongo/db/update/addtoset_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using AddToSetNodeTest = UpdateNodeTest;
+using AddToSetNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -136,7 +136,7 @@ TEST_F(AddToSetNodeTest, ApplyNonEach) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -156,8 +156,7 @@ TEST_F(AddToSetNodeTest, ApplyNonEachArray) {
ASSERT_EQUALS(fromjson("{a: [0, [1]]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, [1]]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, [1]]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, [1]]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -176,8 +175,7 @@ TEST_F(AddToSetNodeTest, ApplyEach) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1, 2]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1, 2]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -196,7 +194,7 @@ TEST_F(AddToSetNodeTest, ApplyToEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2]}}"), fromjson("{$v: 2, diff: {u: {a: [1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 2]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -215,7 +213,7 @@ TEST_F(AddToSetNodeTest, ApplyDeduplicateElementsToAdd) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -235,7 +233,7 @@ TEST_F(AddToSetNodeTest, ApplyDoNotAddExistingElements) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -255,8 +253,7 @@ TEST_F(AddToSetNodeTest, ApplyDoNotDeduplicateExistingElements) {
ASSERT_EQUALS(fromjson("{a: [0, 0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 0, 1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -313,7 +310,7 @@ TEST_F(AddToSetNodeTest, ApplyCreateArray) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {i: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -332,7 +329,7 @@ TEST_F(AddToSetNodeTest, ApplyCreateEmptyArrayIsNotNoop) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {i: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: []}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -354,8 +351,7 @@ TEST_F(AddToSetNodeTest, ApplyDeduplicationOfElementsToAddRespectsCollation) {
ASSERT_EQUALS(fromjson("{a: ['abc', 'def']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['abc', 'def']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -377,8 +373,7 @@ TEST_F(AddToSetNodeTest, ApplyComparisonToExistingElementsRespectsCollation) {
ASSERT_EQUALS(fromjson("{a: ['ABC', 'def']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['ABC', 'def']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['ABC', 'def']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['ABC', 'def']}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -401,8 +396,7 @@ TEST_F(AddToSetNodeTest, ApplyRespectsCollationFromSetCollator) {
ASSERT_EQUALS(fromjson("{a: ['abc', 'def']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['abc', 'def']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['abc', 'def']}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -447,8 +441,7 @@ TEST_F(AddToSetNodeTest, ApplyNestedArray) {
ASSERT_EQUALS(fromjson("{ _id : 1, a : [1, [1]] }"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{ $set : { 'a.1' : [1] } }"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: [1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.1}");
}
@@ -467,7 +460,7 @@ TEST_F(AddToSetNodeTest, ApplyIndexesNotAffected) {
ASSERT_FALSE(result.indexesAffected);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1]}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
diff --git a/src/mongo/db/update/arithmetic_node_test.cpp b/src/mongo/db/update/arithmetic_node_test.cpp
index 3cb73d8f8e7..9aeb06c5b88 100644
--- a/src/mongo/db/update/arithmetic_node_test.cpp
+++ b/src/mongo/db/update/arithmetic_node_test.cpp
@@ -42,9 +42,7 @@
namespace mongo {
namespace {
-using ArithmeticNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using ArithmeticNodeTest = UpdateTestFixture;
DEATH_TEST_REGEX(ArithmeticNodeTest,
InitFailsForEmptyElement,
@@ -185,7 +183,7 @@ TEST_F(ArithmeticNodeTest, ApplyEmptyPathToCreate) {
ASSERT_EQUALS(fromjson("{a: 11}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 11}}"), fromjson("{$v: 2, diff: {u: {a: 11}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 11}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -205,8 +203,7 @@ TEST_F(ArithmeticNodeTest, ApplyCreatePath) {
ASSERT_EQUALS(fromjson("{a: {d: 5, b: {c: 6}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c}");
}
@@ -243,7 +240,7 @@ TEST_F(ArithmeticNodeTest, ApplyCreatePathFromRoot) {
ASSERT_EQUALS(fromjson("{c: 5, a: {b: 6}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6}}"), fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
@@ -263,8 +260,7 @@ TEST_F(ArithmeticNodeTest, ApplyPositional) {
ASSERT_EQUALS(fromjson("{a: [0, 7, 2]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 7}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 7}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 7}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.1}");
}
@@ -389,8 +385,7 @@ TEST_F(ArithmeticNodeTest, TypePromotionFromIntToDecimalIsNotANoOp) {
ASSERT_EQUALS(fromjson("{a: NumberDecimal(\"5.0\")}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: NumberDecimal('5.0')}}"),
- fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -409,8 +404,7 @@ TEST_F(ArithmeticNodeTest, TypePromotionFromLongToDecimalIsNotANoOp) {
ASSERT_EQUALS(fromjson("{a: NumberDecimal(\"5.0\")}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: NumberDecimal('5.0')}}"),
- fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.0')}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -430,7 +424,6 @@ TEST_F(ArithmeticNodeTest, TypePromotionFromDoubleToDecimalIsNotANoOp) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {a: NumberDecimal('5.25')}}"),
fromjson("{$v: 2, diff: {u: {a: NumberDecimal('5.25')}}}"),
false // Not checking binary equality because the NumberDecimal in the expected output may
// not be bitwise identical to the result produced by the update system.
@@ -471,7 +464,6 @@ TEST_F(ArithmeticNodeTest, IncrementedDecimalStaysDecimal) {
ASSERT_TRUE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {a: NumberDecimal('11.5')}}"),
fromjson("{$v: 2, diff: {u: {a: NumberDecimal('11.5')}}}"),
false // Not checking binary equality because the NumberDecimal in the expected output may
// not be bitwise identical to the result produced by the update system.
@@ -696,7 +688,7 @@ TEST_F(ArithmeticNodeTest, ApplyEmptyIndexData) {
ASSERT_EQUALS(fromjson("{a: 3}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 3}}"), fromjson("{$v: 2, diff: {u: {a: 3}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 3}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1029,8 +1021,7 @@ TEST_F(ArithmeticNodeTest, ApplyLogDottedPath) {
ASSERT_EQUALS(fromjson("{a: [{b:0}, {b:1}, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1047,8 +1038,7 @@ TEST_F(ArithmeticNodeTest, LogEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [null, null, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1065,8 +1055,7 @@ TEST_F(ArithmeticNodeTest, LogEmptyObject) {
ASSERT_EQUALS(fromjson("{a: {'2': {b: 2}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.2.b}");
}
@@ -1087,7 +1076,7 @@ TEST_F(ArithmeticNodeTest, ApplyDeserializedDocNotNoOp) {
ASSERT_EQUALS(fromjson("{a: 1, b: NumberInt(0)}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: NumberInt(0)}}"), fromjson("{$v: 2, diff: {i: {b: 0}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 0}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{b}");
}
@@ -1150,7 +1139,7 @@ TEST_F(ArithmeticNodeTest, ApplyToDeserializedDocNestedNotNoop) {
ASSERT_EQUALS(fromjson("{a: {b: 3}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 3}}"), fromjson("{$v: 2, diff: {sa: {u: {b: 3}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: 3}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
diff --git a/src/mongo/db/update/bit_node_test.cpp b/src/mongo/db/update/bit_node_test.cpp
index e3903c6a301..0e218ebc29e 100644
--- a/src/mongo/db/update/bit_node_test.cpp
+++ b/src/mongo/db/update/bit_node_test.cpp
@@ -42,9 +42,7 @@
namespace mongo {
namespace {
-using BitNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using BitNodeTest = UpdateTestFixture;
TEST(BitNodeTest, InitWithDoubleFails) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
@@ -164,7 +162,7 @@ TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentAnd) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v: 2, diff: {i: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 0}}}"));
}
TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentOr) {
@@ -180,7 +178,7 @@ TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentOr) {
ASSERT_EQUALS(fromjson("{a: 1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 1}}"), fromjson("{$v: 2, diff: {i: {a: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 1}}}"));
}
TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentXor) {
@@ -196,7 +194,7 @@ TEST_F(BitNodeTest, ApplyAndLogEmptyDocumentXor) {
ASSERT_EQUALS(fromjson("{a: 1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 1}}"), fromjson("{$v: 2, diff: {i: {a: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 1}}}"));
}
TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentAnd) {
@@ -212,8 +210,7 @@ TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentAnd) {
ASSERT_EQUALS(BSON("a" << 0b0100), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0100)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0100))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0100))));
}
TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentOr) {
@@ -229,8 +226,7 @@ TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentOr) {
ASSERT_EQUALS(BSON("a" << 0b0111), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0111)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0111))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0111))));
}
TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentXor) {
@@ -246,8 +242,7 @@ TEST_F(BitNodeTest, ApplyAndLogSimpleDocumentXor) {
ASSERT_EQUALS(BSON("a" << 0b0011), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0011)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0011))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0011))));
}
TEST_F(BitNodeTest, ApplyShouldReportNoOp) {
@@ -284,8 +279,7 @@ TEST_F(BitNodeTest, ApplyMultipleBitOps) {
ASSERT_EQUALS(BSON("a" << 0b0101011001100110), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b0101011001100110)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0101011001100110))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b0101011001100110))));
}
TEST_F(BitNodeTest, ApplyRepeatedBitOps) {
@@ -301,8 +295,7 @@ TEST_F(BitNodeTest, ApplyRepeatedBitOps) {
ASSERT_EQUALS(BSON("a" << 0b10010110), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(BSON("$set" << BSON("a" << 0b10010110)),
- BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b10010110))));
+ assertOplogEntry(BSON("$v" << 2 << "diff" << BSON("u" << BSON("a" << 0b10010110))));
}
} // namespace
diff --git a/src/mongo/db/update/compare_node_test.cpp b/src/mongo/db/update/compare_node_test.cpp
index e3a608ea535..367525b1356 100644
--- a/src/mongo/db/update/compare_node_test.cpp
+++ b/src/mongo/db/update/compare_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using CompareNodeTest = UpdateNodeTest;
+using CompareNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -197,7 +197,7 @@ TEST_F(CompareNodeTest, ApplyMissingFieldMinNumber) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v:2, diff: {i: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {i: {a: 0}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingNumberMinNumber) {
@@ -215,7 +215,7 @@ TEST_F(CompareNodeTest, ApplyExistingNumberMinNumber) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v:2, diff: {u: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 0}}}"));
}
TEST_F(CompareNodeTest, ApplyMissingFieldMaxNumber) {
@@ -233,7 +233,7 @@ TEST_F(CompareNodeTest, ApplyMissingFieldMaxNumber) {
ASSERT_EQUALS(fromjson("{a: 0}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 0}}"), fromjson("{$v:2, diff: {i: {a: 0}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {i: {a: 0}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingNumberMaxNumber) {
@@ -251,7 +251,7 @@ TEST_F(CompareNodeTest, ApplyExistingNumberMaxNumber) {
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 2}}"), fromjson("{$v:2, diff: {u: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 2}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingDateMaxDate) {
@@ -269,8 +269,7 @@ TEST_F(CompareNodeTest, ApplyExistingDateMaxDate) {
ASSERT_EQUALS(fromjson("{a: {$date: 123123123}}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {$date: 123123123}}}"),
- fromjson("{$v:2, diff: {u: {a: {$date: 123123123}}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: {$date: 123123123}}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingEmbeddedDocMaxDoc) {
@@ -288,7 +287,7 @@ TEST_F(CompareNodeTest, ApplyExistingEmbeddedDocMaxDoc) {
ASSERT_EQUALS(fromjson("{a: {b: 3}}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {b: 3}}}"), fromjson("{$v:2, diff: {u: {a: {b: 3}}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: {b: 3}}}}"));
}
TEST_F(CompareNodeTest, ApplyExistingEmbeddedDocMaxNumber) {
@@ -327,7 +326,7 @@ TEST_F(CompareNodeTest, ApplyMinRespectsCollation) {
ASSERT_EQUALS(fromjson("{a: 'dba'}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 'dba'}}"), fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
}
TEST_F(CompareNodeTest, ApplyMinRespectsCollationFromSetCollator) {
@@ -349,7 +348,7 @@ TEST_F(CompareNodeTest, ApplyMinRespectsCollationFromSetCollator) {
ASSERT_EQUALS(fromjson("{a: 'dba'}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 'dba'}}"), fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 'dba'}}}"));
}
TEST_F(CompareNodeTest, ApplyMaxRespectsCollationFromSetCollator) {
@@ -371,7 +370,7 @@ TEST_F(CompareNodeTest, ApplyMaxRespectsCollationFromSetCollator) {
ASSERT_EQUALS(fromjson("{a: 'abd'}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 'abd'}}"), fromjson("{$v:2, diff: {u: {a: 'abd'}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 'abd'}}}"));
}
DEATH_TEST_REGEX(CompareNodeTest,
@@ -415,7 +414,7 @@ TEST_F(CompareNodeTest, ApplyIndexesNotAffected) {
ASSERT_EQUALS(fromjson("{a: 1}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 1}}"), fromjson("{$v:2, diff: {u: {a: 1}}}"));
+ assertOplogEntry(fromjson("{$v:2, diff: {u: {a: 1}}}"));
}
TEST_F(CompareNodeTest, ApplyNoIndexDataOrLogBuilder) {
diff --git a/src/mongo/db/update/current_date_node_test.cpp b/src/mongo/db/update/current_date_node_test.cpp
index 95a1fcb2b9e..540a4b26f3b 100644
--- a/src/mongo/db/update/current_date_node_test.cpp
+++ b/src/mongo/db/update/current_date_node_test.cpp
@@ -43,24 +43,14 @@ namespace mongo {
namespace {
void assertOplogEntryIsUpdateOfExpectedType(const BSONObj& obj,
- bool v2LogBuilderUsed,
StringData fieldName,
BSONType expectedType = BSONType::Date) {
- if (v2LogBuilderUsed) {
- ASSERT_EQUALS(obj.nFields(), 2);
- ASSERT_EQUALS(obj["$v"].numberInt(), 2);
- ASSERT_EQUALS(obj["diff"]["u"][fieldName].type(), expectedType);
- } else {
- ASSERT_EQUALS(obj.nFields(), 1);
- ASSERT_TRUE(obj["$set"].type() == BSONType::Object);
- ASSERT_EQUALS(obj["$set"].embeddedObject().nFields(), 1U);
- ASSERT_EQUALS(obj["$set"][fieldName].type(), expectedType);
- }
+ ASSERT_EQUALS(obj.nFields(), 2);
+ ASSERT_EQUALS(obj["$v"].numberInt(), 2);
+ ASSERT_EQUALS(obj["diff"]["u"][fieldName].type(), expectedType);
}
-using CurrentDateNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using CurrentDateNodeTest = UpdateTestFixture;
DEATH_TEST_REGEX(CurrentDateNodeTest,
InitFailsForEmptyElement,
@@ -158,7 +148,7 @@ TEST_F(CurrentDateNodeTest, ApplyTrue) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyFalse) {
@@ -178,7 +168,7 @@ TEST_F(CurrentDateNodeTest, ApplyFalse) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyDate) {
@@ -198,7 +188,7 @@ TEST_F(CurrentDateNodeTest, ApplyDate) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyTimestamp) {
@@ -218,8 +208,7 @@ TEST_F(CurrentDateNodeTest, ApplyTimestamp) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::bsonTimestamp);
- assertOplogEntryIsUpdateOfExpectedType(
- getOplogEntry(), v2LogBuilderUsed(), "a", BSONType::bsonTimestamp);
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a", BSONType::bsonTimestamp);
}
TEST_F(CurrentDateNodeTest, ApplyFieldDoesNotExist) {
@@ -239,14 +228,9 @@ TEST_F(CurrentDateNodeTest, ApplyFieldDoesNotExist) {
ASSERT_TRUE(doc.root()["a"].ok());
ASSERT_EQUALS(doc.root()["a"].getType(), BSONType::Date);
- if (v2LogBuilderUsed()) {
- ASSERT_EQUALS(getOplogEntry().nFields(), 2);
- ASSERT_EQUALS(getOplogEntry()["$v"].numberInt(), 2);
- ASSERT_EQUALS(getOplogEntry()["diff"]["i"]["a"].type(), BSONType::Date);
- } else {
- ASSERT_EQUALS(getOplogEntry().nFields(), 1);
- ASSERT_EQUALS(getOplogEntry()["$set"]["a"].type(), BSONType::Date);
- }
+ ASSERT_EQUALS(getOplogEntry().nFields(), 2);
+ ASSERT_EQUALS(getOplogEntry()["$v"].numberInt(), 2);
+ ASSERT_EQUALS(getOplogEntry()["diff"]["i"]["a"].type(), BSONType::Date);
}
TEST_F(CurrentDateNodeTest, ApplyIndexesNotAffected) {
@@ -262,7 +246,7 @@ TEST_F(CurrentDateNodeTest, ApplyIndexesNotAffected) {
ASSERT_FALSE(result.noop);
ASSERT_FALSE(result.indexesAffected);
- assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), v2LogBuilderUsed(), "a");
+ assertOplogEntryIsUpdateOfExpectedType(getOplogEntry(), "a");
}
TEST_F(CurrentDateNodeTest, ApplyNoIndexDataOrLogBuilder) {
diff --git a/src/mongo/db/update/object_replace_executor_test.cpp b/src/mongo/db/update/object_replace_executor_test.cpp
index 68955ac3ba9..3785aa94d5b 100644
--- a/src/mongo/db/update/object_replace_executor_test.cpp
+++ b/src/mongo/db/update/object_replace_executor_test.cpp
@@ -41,7 +41,7 @@
namespace mongo {
namespace {
-using ObjectReplaceExecutorTest = UpdateNodeTest;
+using ObjectReplaceExecutorTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
diff --git a/src/mongo/db/update/object_transform_executor_test.cpp b/src/mongo/db/update/object_transform_executor_test.cpp
index 06cca4da7cd..23b9d52ff6d 100644
--- a/src/mongo/db/update/object_transform_executor_test.cpp
+++ b/src/mongo/db/update/object_transform_executor_test.cpp
@@ -40,7 +40,7 @@
namespace mongo {
namespace {
-using ObjectTransformExecutorTest = UpdateNodeTest;
+using ObjectTransformExecutorTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
diff --git a/src/mongo/db/update/pipeline_executor_test.cpp b/src/mongo/db/update/pipeline_executor_test.cpp
index c650e06749d..02fde0b0865 100644
--- a/src/mongo/db/update/pipeline_executor_test.cpp
+++ b/src/mongo/db/update/pipeline_executor_test.cpp
@@ -43,50 +43,7 @@
namespace mongo {
namespace {
-/**
- * Harness for running the tests with both $v:2 oplog entries enabled and disabled.
- */
-class PipelineExecutorTest : public UpdateNodeTest {
-public:
- void resetApplyParams() override {
- UpdateNodeTest::resetApplyParams();
- }
-
- UpdateExecutor::ApplyParams getApplyParams(mutablebson::Element element) override {
- auto applyParams = UpdateNodeTest::getApplyParams(element);
-
- // Use the same parameters as the parent test fixture, but make sure a v2 log builder
- // is provided and a normal log builder is not.
- applyParams.logMode = _allowDeltaOplogEntries
- ? ApplyParams::LogMode::kGenerateOplogEntry
- : ApplyParams::LogMode::kGenerateOnlyV1OplogEntry;
- return applyParams;
- }
-
- void run() {
- _allowDeltaOplogEntries = false;
- UpdateNodeTest::run();
- _allowDeltaOplogEntries = true;
- UpdateNodeTest::run();
- }
-
- bool deltaOplogEntryAllowed() const {
- return _allowDeltaOplogEntries;
- }
-
-protected:
- bool _allowDeltaOplogEntries = false;
-};
-
-class PipelineExecutorV2ModeTest : public PipelineExecutorTest {
-public:
- void run() {
- _allowDeltaOplogEntries = true;
- UpdateNodeTest::run();
- }
-};
-
-TEST_F(PipelineExecutorTest, Noop) {
+TEST_F(UpdateTestFixture, Noop) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: 2}}")};
@@ -101,7 +58,7 @@ TEST_F(PipelineExecutorTest, Noop) {
ASSERT_TRUE(result.oplogEntry.isEmpty());
}
-TEST_F(PipelineExecutorTest, ShouldNotCreateIdIfNoIdExistsAndNoneIsSpecified) {
+TEST_F(UpdateTestFixture, ShouldNotCreateIdIfNoIdExistsAndNoneIsSpecified) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: 2}}")};
@@ -112,17 +69,11 @@ TEST_F(PipelineExecutorTest, ShouldNotCreateIdIfNoIdExistsAndNoneIsSpecified) {
ASSERT_FALSE(result.noop);
ASSERT_EQUALS(fromjson("{c: 1, d: 'largeStringValue', a: 1, b: 2}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- if (deltaOplogEntryAllowed()) {
- ASSERT_FALSE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2}}}"), result.oplogEntry);
- } else {
- ASSERT_TRUE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{c: 1, d: 'largeStringValue', a: 1, b: 2}"),
- result.oplogEntry);
- }
+ ASSERT_FALSE(result.indexesAffected);
+ ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2}}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, ShouldPreserveIdOfExistingDocumentIfIdNotReplaced) {
+TEST_F(UpdateTestFixture, ShouldPreserveIdOfExistingDocumentIfIdNotReplaced) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: 2}}"),
@@ -138,7 +89,7 @@ TEST_F(PipelineExecutorTest, ShouldPreserveIdOfExistingDocumentIfIdNotReplaced)
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{_id: 0, a: 1, b: 2}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, ShouldSucceedWhenImmutableIdIsNotModified) {
+TEST_F(UpdateTestFixture, ShouldSucceedWhenImmutableIdIsNotModified) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: 1, b: 2}}")};
@@ -151,17 +102,11 @@ TEST_F(PipelineExecutorTest, ShouldSucceedWhenImmutableIdIsNotModified) {
ASSERT_EQUALS(fromjson("{_id: 0, c: 1, d: 'largeStringValue', a: 1, b: 2}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- if (deltaOplogEntryAllowed()) {
- ASSERT_FALSE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2 }}}"), result.oplogEntry);
- } else {
- ASSERT_TRUE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{_id: 0, c: 1, d: 'largeStringValue', a: 1, b: 2}"),
- result.oplogEntry);
- }
+ ASSERT_FALSE(result.indexesAffected);
+ ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {a: 1, b: 2 }}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, ComplexDoc) {
+TEST_F(UpdateTestFixture, ComplexDoc) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1, b: [0, 1, 2], c: {d: 1}}}")};
@@ -173,18 +118,12 @@ TEST_F(PipelineExecutorTest, ComplexDoc) {
ASSERT_EQUALS(fromjson("{a: 1, b: [0, 1, 2], e: ['val1', 'val2'], c: {d: 1}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- if (deltaOplogEntryAllowed()) {
- ASSERT_FALSE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {c: {d: 1}}, sb: {a: true, u1: 1} }}"),
- result.oplogEntry);
- } else {
- ASSERT_TRUE(result.indexesAffected);
- ASSERT_BSONOBJ_BINARY_EQ(fromjson("{a: 1, b: [0, 1, 2], e: ['val1', 'val2'], c: {d: 1}}"),
- result.oplogEntry);
- }
+ ASSERT_FALSE(result.indexesAffected);
+ ASSERT_BSONOBJ_BINARY_EQ(fromjson("{$v: 2, diff: {i: {c: {d: 1}}, sb: {a: true, u1: 1} }}"),
+ result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CannotRemoveImmutablePath) {
+TEST_F(UpdateTestFixture, CannotRemoveImmutablePath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$project: {c: 1}}")};
@@ -200,7 +139,7 @@ TEST_F(PipelineExecutorTest, CannotRemoveImmutablePath) {
}
-TEST_F(PipelineExecutorTest, IdFieldIsNotRemoved) {
+TEST_F(UpdateTestFixture, IdFieldIsNotRemoved) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$project: {a: 1, _id: 0}}")};
@@ -216,7 +155,7 @@ TEST_F(PipelineExecutorTest, IdFieldIsNotRemoved) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{_id: 0}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CannotReplaceImmutablePathWithArrayField) {
+TEST_F(UpdateTestFixture, CannotReplaceImmutablePathWithArrayField) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: [{b: 1}]}}")};
@@ -231,7 +170,7 @@ TEST_F(PipelineExecutorTest, CannotReplaceImmutablePathWithArrayField) {
"'a.b' was found to be an array or array descendant.");
}
-TEST_F(PipelineExecutorTest, CannotMakeImmutablePathArrayDescendant) {
+TEST_F(UpdateTestFixture, CannotMakeImmutablePathArrayDescendant) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: [1]}}")};
@@ -246,7 +185,7 @@ TEST_F(PipelineExecutorTest, CannotMakeImmutablePathArrayDescendant) {
"'a.0' was found to be an array or array descendant.");
}
-TEST_F(PipelineExecutorTest, CannotModifyImmutablePath) {
+TEST_F(UpdateTestFixture, CannotModifyImmutablePath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: {b: 2}}}")};
@@ -261,7 +200,7 @@ TEST_F(PipelineExecutorTest, CannotModifyImmutablePath) {
"to have been altered to b: 2");
}
-TEST_F(PipelineExecutorTest, CannotModifyImmutableId) {
+TEST_F(UpdateTestFixture, CannotModifyImmutableId) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 1}}")};
@@ -276,7 +215,7 @@ TEST_F(PipelineExecutorTest, CannotModifyImmutableId) {
"to have been altered to _id: 1");
}
-TEST_F(PipelineExecutorTest, CanAddImmutableField) {
+TEST_F(UpdateTestFixture, CanAddImmutableField) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: {b: 1}}}")};
@@ -292,7 +231,7 @@ TEST_F(PipelineExecutorTest, CanAddImmutableField) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{c: 1, a: {b: 1}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CanAddImmutableId) {
+TEST_F(UpdateTestFixture, CanAddImmutableId) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0}}")};
@@ -308,14 +247,14 @@ TEST_F(PipelineExecutorTest, CanAddImmutableId) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{c: 1, _id: 0}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CannotCreateDollarPrefixedName) {
+TEST_F(UpdateTestFixture, CannotCreateDollarPrefixedName) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {'a.$bad': 1}}")};
ASSERT_THROWS_CODE(PipelineExecutor(expCtx, pipeline), AssertionException, 16410);
}
-TEST_F(PipelineExecutorTest, NoLogBuilder) {
+TEST_F(UpdateTestFixture, NoLogBuilder) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {a: 1}}")};
@@ -330,7 +269,7 @@ TEST_F(PipelineExecutorTest, NoLogBuilder) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
}
-TEST_F(PipelineExecutorTest, SerializeTest) {
+TEST_F(UpdateTestFixture, SerializeTest) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
std::vector<BSONObj> pipeline{fromjson("{$addFields: {_id: 0, a: [{b: 1}]}}"),
@@ -345,7 +284,7 @@ TEST_F(PipelineExecutorTest, SerializeTest) {
ASSERT_VALUE_EQ(serialized, Value(BSONArray(doc)));
}
-TEST_F(PipelineExecutorTest, RejectsInvalidConstantNames) {
+TEST_F(UpdateTestFixture, RejectsInvalidConstantNames) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline;
@@ -368,7 +307,7 @@ TEST_F(PipelineExecutorTest, RejectsInvalidConstantNames) {
ErrorCodes::FailedToParse);
}
-TEST_F(PipelineExecutorTest, CanUseConstants) {
+TEST_F(UpdateTestFixture, CanUseConstants) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline{fromjson("{$set: {b: '$$var1', c: '$$var2'}}")};
@@ -384,7 +323,7 @@ TEST_F(PipelineExecutorTest, CanUseConstants) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{a: 1, b: 10, c : {x: 1, y: 2}}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, CanUseConstantsAcrossMultipleUpdates) {
+TEST_F(UpdateTestFixture, CanUseConstantsAcrossMultipleUpdates) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline{fromjson("{$set: {b: '$$var1'}}")};
@@ -412,7 +351,7 @@ TEST_F(PipelineExecutorTest, CanUseConstantsAcrossMultipleUpdates) {
ASSERT_BSONOBJ_BINARY_EQ(fromjson("{a: 2, b: 'foo'}"), result.oplogEntry);
}
-TEST_F(PipelineExecutorTest, NoopWithConstants) {
+TEST_F(UpdateTestFixture, NoopWithConstants) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
const std::vector<BSONObj> pipeline{fromjson("{$set: {a: '$$var1', b: '$$var2'}}")};
@@ -428,7 +367,7 @@ TEST_F(PipelineExecutorTest, NoopWithConstants) {
ASSERT_TRUE(result.oplogEntry.isEmpty());
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithDeletes) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithDeletes) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: {a: {b: {c: 1, paddingField: 'largeValueString'}, c: 1, paddingField: "
@@ -497,7 +436,7 @@ TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithDeletes) {
}
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithUpdatesAndInserts) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithUpdatesAndInserts) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: {a: {b: {c: 1, paddingField: 'largeValueString'}, c: 1, paddingField: "
@@ -565,7 +504,7 @@ TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithUpdatesAndInserts) {
}
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithArraysAlongIndexPath) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithArraysAlongIndexPath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: [0, {a: {b: ['someStringValue', {c: 1, paddingField: 'largeValueString'}], "
@@ -656,7 +595,7 @@ TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithArraysAlongIndexPath)
}
}
-TEST_F(PipelineExecutorV2ModeTest, TestIndexesAffectedWithArraysAfterIndexPath) {
+TEST_F(UpdateTestFixture, TestIndexesAffectedWithArraysAfterIndexPath) {
boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
BSONObj preImage(
fromjson("{f1: {a: {b: {c: [{paddingField: 'largeValueString'}, 1]}, c: 1, paddingField: "
diff --git a/src/mongo/db/update/pop_node_test.cpp b/src/mongo/db/update/pop_node_test.cpp
index b17fe6b189b..4159be9a475 100644
--- a/src/mongo/db/update/pop_node_test.cpp
+++ b/src/mongo/db/update/pop_node_test.cpp
@@ -42,7 +42,7 @@ namespace mongo {
namespace {
namespace mmb = mongo::mutablebson;
-using PopNodeTest = UpdateNodeTest;
+using PopNodeTest = UpdateTestFixture;
TEST(PopNodeTest, InitSucceedsPositiveOne) {
auto update = fromjson("{$pop: {a: 1}}");
@@ -237,8 +237,7 @@ TEST_F(PopNodeTest, PopsSingleElementFromTheBack) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: []}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': []}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -257,8 +256,7 @@ TEST_F(PopNodeTest, PopsSingleElementFromTheFront) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: []}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': []}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -277,8 +275,7 @@ TEST_F(PopNodeTest, PopsFromTheBackOfMultiElementArray) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [1, 2]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [1, 2]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -297,8 +294,7 @@ TEST_F(PopNodeTest, PopsFromTheFrontOfMultiElementArray) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [2, 3]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [2, 3]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -317,8 +313,7 @@ TEST_F(PopNodeTest, PopsFromTheFrontOfMultiElementArrayWithoutAffectingIndexes)
ASSERT_FALSE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [2, 3]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [2, 3]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [2, 3]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -336,8 +331,7 @@ TEST_F(PopNodeTest, SucceedsWithNullUpdateIndexData) {
ASSERT_FALSE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: [1, 2]}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b': [1, 2]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [1, 2]}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/pull_node_test.cpp b/src/mongo/db/update/pull_node_test.cpp
index 34f3c31f34a..85141fa3841 100644
--- a/src/mongo/db/update/pull_node_test.cpp
+++ b/src/mongo/db/update/pull_node_test.cpp
@@ -43,9 +43,7 @@
namespace mongo {
namespace {
-using PullNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using PullNodeTest = UpdateTestFixture;
TEST(PullNodeTest, InitWithBadMatchExpressionFails) {
auto update = fromjson("{$pull: {a: {b: {$foo: 1}}}}");
@@ -267,8 +265,7 @@ TEST_F(PullNodeTest, ApplyToArrayMatchingOne) {
ASSERT_EQUALS(fromjson("{a: [1, 2, 3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2, 3]}}"),
- fromjson("{$v: 2, diff: {u: {a: [1, 2, 3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 2, 3]}}}"));
}
TEST_F(PullNodeTest, ApplyToArrayMatchingSeveral) {
@@ -286,8 +283,7 @@ TEST_F(PullNodeTest, ApplyToArrayMatchingSeveral) {
ASSERT_EQUALS(fromjson("{a: [1, 2, 3, 4, 5]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2, 3, 4, 5]}}"),
- fromjson("{$v: 2, diff: {u: {a: [1, 2, 3, 4, 5]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 2, 3, 4, 5]}}}"));
}
TEST_F(PullNodeTest, ApplyToArrayMatchingAll) {
@@ -305,7 +301,7 @@ TEST_F(PullNodeTest, ApplyToArrayMatchingAll) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullNodeTest, ApplyToArrayWithEq) {
@@ -323,8 +319,7 @@ TEST_F(PullNodeTest, ApplyToArrayWithEq) {
ASSERT_EQUALS(fromjson("{a: [0, 2, 3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 2, 3]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 2, 3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 2, 3]}}}"));
}
TEST_F(PullNodeTest, ApplyNoIndexDataNoLogBuilder) {
@@ -363,8 +358,7 @@ TEST_F(PullNodeTest, ApplyWithCollation) {
ASSERT_EQUALS(fromjson("{a: ['zaa', 'zbb']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['zaa', 'zbb']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['zaa', 'zbb']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['zaa', 'zbb']}}}"));
}
TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectNonStringMatches) {
@@ -385,7 +379,7 @@ TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectNonStringMatches) {
ASSERT_EQUALS(fromjson("{a: [2, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2, 1]}}"), fromjson("{$v: 2, diff: {u: {a: [2, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2, 1]}}}"));
}
TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectRegexMatches) {
@@ -406,8 +400,7 @@ TEST_F(PullNodeTest, ApplyWithCollationDoesNotAffectRegexMatches) {
ASSERT_EQUALS(fromjson("{a: ['b', 'cb']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['b', 'cb']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['b', 'cb']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['b', 'cb']}}}"));
}
TEST_F(PullNodeTest, ApplyStringLiteralMatchWithCollation) {
@@ -428,7 +421,7 @@ TEST_F(PullNodeTest, ApplyStringLiteralMatchWithCollation) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullNodeTest, ApplyCollationDoesNotAffectNumberLiteralMatches) {
@@ -449,8 +442,7 @@ TEST_F(PullNodeTest, ApplyCollationDoesNotAffectNumberLiteralMatches) {
ASSERT_EQUALS(fromjson("{a: ['a', 'b', 2, 'c', 'd']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['a', 'b', 2, 'c', 'd']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['a', 'b', 2, 'c', 'd']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['a', 'b', 2, 'c', 'd']}}}"));
}
TEST_F(PullNodeTest, ApplyStringMatchAfterSetCollator) {
@@ -587,8 +579,7 @@ TEST_F(PullNodeTest, ApplyComplexDocAndMatching1) {
ASSERT_EQUALS(fromjson("{a: {b: [{x: 1}, {x: 2}]}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': [{x: 1}, {x: 2}]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}]}}}}"));
}
TEST_F(PullNodeTest, ApplyComplexDocAndMatching2) {
@@ -606,8 +597,7 @@ TEST_F(PullNodeTest, ApplyComplexDocAndMatching2) {
ASSERT_EQUALS(fromjson("{a: {b: [{x: 1}, {x: 2}, {z: 'z'}]}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': [{x: 1}, {x: 2}, {z: 'z'}]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}, {z: 'z'}]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 1}, {x: 2}, {z: 'z'}]}}}}"));
}
TEST_F(PullNodeTest, ApplyComplexDocAndMatching3) {
@@ -625,8 +615,7 @@ TEST_F(PullNodeTest, ApplyComplexDocAndMatching3) {
ASSERT_EQUALS(fromjson("{a: {b: [{x: 2}, {z: 'z'}]}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': [{x: 2}, {z: 'z'}]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 2}, {z: 'z'}]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: [{x: 2}, {z: 'z'}]}}}}"));
}
TEST_F(PullNodeTest, ApplyFullPredicateWithCollation) {
@@ -648,8 +637,7 @@ TEST_F(PullNodeTest, ApplyFullPredicateWithCollation) {
ASSERT_EQUALS(fromjson("{a: {b: []}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': []}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: []}}}}"));
}
TEST_F(PullNodeTest, ApplyScalarValueMod) {
@@ -667,8 +655,7 @@ TEST_F(PullNodeTest, ApplyScalarValueMod) {
ASSERT_EQUALS(fromjson("{a: [2, 2, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2, 2, 2]}}"),
- fromjson("{$v: 2, diff: {u: {a: [2, 2, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2, 2, 2]}}}"));
}
TEST_F(PullNodeTest, ApplyObjectValueMod) {
@@ -686,8 +673,7 @@ TEST_F(PullNodeTest, ApplyObjectValueMod) {
ASSERT_EQUALS(fromjson("{a: [{x: 1}, {x: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{x: 1}, {x: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{x: 1}, {x: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{x: 1}, {x: 1}]}}}"));
}
TEST_F(PullNodeTest, DocumentationExample1) {
@@ -707,7 +693,6 @@ TEST_F(PullNodeTest, DocumentationExample1) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {flags: ['vme', 'de', 'pse', 'tsc', 'pae', 'mce']}}"),
fromjson("{$v: 2, diff: {u: {flags: ['vme', 'de', 'pse', 'tsc', 'pae', 'mce']}}}"));
}
@@ -726,8 +711,7 @@ TEST_F(PullNodeTest, DocumentationExample2a) {
ASSERT_EQUALS(fromjson("{votes: [3, 5, 6, 8]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {votes: [3, 5, 6, 8]}}"),
- fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6, 8]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6, 8]}}}"));
}
TEST_F(PullNodeTest, DocumentationExample2b) {
@@ -745,8 +729,7 @@ TEST_F(PullNodeTest, DocumentationExample2b) {
ASSERT_EQUALS(fromjson("{votes: [3, 5, 6]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {votes: [3, 5, 6]}}"),
- fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {votes: [3, 5, 6]}}}"));
}
TEST_F(PullNodeTest, ApplyPullWithObjectValueToArrayWithNonObjectValue) {
@@ -764,7 +747,7 @@ TEST_F(PullNodeTest, ApplyPullWithObjectValueToArrayWithNonObjectValue) {
ASSERT_EQUALS(fromjson("{a: [2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2]}}"), fromjson("{$v: 2, diff: {u: {a: [2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2]}}}"));
}
TEST_F(PullNodeTest, CannotModifyImmutableField) {
@@ -798,8 +781,7 @@ TEST_F(PullNodeTest, SERVER_3988) {
ASSERT_EQUALS(fromjson("{x: 1, y: [2, 3, 4, 'abc']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {y: [2, 3, 4, 'abc']}}"),
- fromjson("{$v: 2, diff: {u: {y: [2, 3, 4, 'abc']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {y: [2, 3, 4, 'abc']}}}"));
}
} // namespace
diff --git a/src/mongo/db/update/pullall_node_test.cpp b/src/mongo/db/update/pullall_node_test.cpp
index f0879fff9cb..5286fc4d653 100644
--- a/src/mongo/db/update/pullall_node_test.cpp
+++ b/src/mongo/db/update/pullall_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using PullAllNodeTest = UpdateNodeTest;
+using PullAllNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -151,8 +151,7 @@ TEST_F(PullAllNodeTest, ApplyWithSingleNumber) {
ASSERT_EQUALS(fromjson("{a: ['a', {r: 1, b: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['a', {r: 1, b: 2}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [\"a\", {r: 1, b: 2}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [\"a\", {r: 1, b: 2}]}}}"));
}
TEST_F(PullAllNodeTest, ApplyNoIndexDataNoLogBuilder) {
@@ -204,8 +203,7 @@ TEST_F(PullAllNodeTest, ApplyWithWithTwoElements) {
ASSERT_EQUALS(fromjson("{a: [{r: 1, b: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{r: 1, b: 2}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{r: 1, b: 2}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{r: 1, b: 2}]}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithAllArrayElements) {
@@ -223,7 +221,7 @@ TEST_F(PullAllNodeTest, ApplyWithAllArrayElements) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsButOutOfOrder) {
@@ -241,7 +239,7 @@ TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsButOutOfOrder) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsAndThenSome) {
@@ -259,7 +257,7 @@ TEST_F(PullAllNodeTest, ApplyWithAllArrayElementsAndThenSome) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {u: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: []}}}"));
}
TEST_F(PullAllNodeTest, ApplyWithCollator) {
@@ -280,8 +278,7 @@ TEST_F(PullAllNodeTest, ApplyWithCollator) {
ASSERT_EQUALS(fromjson("{a: ['baz']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['baz']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['baz']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['baz']}}}"));
}
TEST_F(PullAllNodeTest, ApplyAfterSetCollator) {
diff --git a/src/mongo/db/update/push_node_test.cpp b/src/mongo/db/update/push_node_test.cpp
index 1810610c302..8999a2707a1 100644
--- a/src/mongo/db/update/push_node_test.cpp
+++ b/src/mongo/db/update/push_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using PushNodeTest = UpdateNodeTest;
+using PushNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -288,7 +288,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -307,7 +307,7 @@ TEST_F(PushNodeTest, ApplyToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -326,8 +326,7 @@ TEST_F(PushNodeTest, ApplyToArrayWithOneElement) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -358,8 +357,7 @@ TEST_F(PushNodeTest, ApplyToDottedPathElement) {
doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'choices.first.votes': [1]}}"),
- fromjson("{$v: 2, diff: {schoices: {sfirst: {i: {votes: [1]}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {schoices: {sfirst: {i: {votes: [1]}}}}}"));
ASSERT_EQUALS("{choices.first.votes}", getModifiedPaths());
}
@@ -378,7 +376,7 @@ TEST_F(PushNodeTest, ApplySimpleEachToEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -397,7 +395,7 @@ TEST_F(PushNodeTest, ApplySimpleEachToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -416,7 +414,7 @@ TEST_F(PushNodeTest, ApplyMultipleEachToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: [1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 2]}}"), fromjson("{$v: 2, diff: {i: {a: [1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: [1, 2]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -435,8 +433,7 @@ TEST_F(PushNodeTest, ApplySimpleEachToArrayWithOneElement) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 1}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -455,8 +452,7 @@ TEST_F(PushNodeTest, ApplyMultipleEachToArrayWithOneElement) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1, 'a.2': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 1, u2: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 1, u2: 2}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -494,7 +490,7 @@ TEST_F(PushNodeTest, ApplyEmptyEachToEmptyDocument) {
ASSERT_EQUALS(fromjson("{a: []}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: []}}"), fromjson("{$v: 2, diff: {i: {a: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: []}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -532,7 +528,7 @@ TEST_F(PushNodeTest, ApplyToArrayWithSlice) {
ASSERT_EQUALS(fromjson("{a: [3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [3]}}"), fromjson("{$v: 2, diff: {u: {a: [3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [3]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -551,8 +547,7 @@ TEST_F(PushNodeTest, ApplyWithNumericSort) {
ASSERT_EQUALS(fromjson("{a: [-1, 2, 3]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [-1, 2, 3]}}"),
- fromjson("{$v: 2, diff: {u: {a: [-1, 2, 3]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [-1, 2, 3]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -571,8 +566,7 @@ TEST_F(PushNodeTest, ApplyWithReverseNumericSort) {
ASSERT_EQUALS(fromjson("{a: [4, 3, -1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [4, 3, -1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [4, 3, -1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [4, 3, -1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -591,8 +585,7 @@ TEST_F(PushNodeTest, ApplyWithMixedSort) {
ASSERT_EQUALS(fromjson("{a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [-1, 3, 4, 't', {a: 1}, {b: 1}]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -611,8 +604,7 @@ TEST_F(PushNodeTest, ApplyWithReverseMixedSort) {
ASSERT_EQUALS(fromjson("{a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{b: 1}, {a: 1}, 't', 4, 3, -1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -631,8 +623,7 @@ TEST_F(PushNodeTest, ApplyWithEmbeddedFieldSort) {
ASSERT_EQUALS(fromjson("{a: [3, 't', {b: 1}, 4, -1, {a: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [3, 't', {b: 1}, 4, -1, {a: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [3, 't', {b: 1}, 4, -1, {a: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [3, 't', {b: 1}, 4, -1, {a: 1}]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -654,8 +645,7 @@ TEST_F(PushNodeTest, ApplySortWithCollator) {
ASSERT_EQUALS(fromjson("{a: ['ha', 'gb', 'fc', 'dd']}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: ['ha', 'gb', 'fc', 'dd']}}"),
- fromjson("{$v: 2, diff: {u: {a: ['ha', 'gb', 'fc', 'dd']}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: ['ha', 'gb', 'fc', 'dd']}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -885,7 +875,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithPositionZero) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -904,7 +894,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithPositionOne) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -923,7 +913,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithLargePosition) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -942,7 +932,7 @@ TEST_F(PushNodeTest, ApplyToSingletonArrayWithPositionZero) {
ASSERT_EQUALS(fromjson("{a: [1, 0]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 0]}}"), fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -961,8 +951,7 @@ TEST_F(PushNodeTest, ApplyToSingletonArrayWithLargePosition) {
ASSERT_EQUALS(fromjson("{a: [0, 1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 1}}"),
- fromjson(" {$v: 2, diff: {sa: {a: true, u1: 1}}}"));
+ assertOplogEntry(fromjson(" {$v: 2, diff: {sa: {a: true, u1: 1}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -981,7 +970,7 @@ TEST_F(PushNodeTest, ApplyToEmptyArrayWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [1]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1]}}"), fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1000,7 +989,7 @@ TEST_F(PushNodeTest, ApplyToSingletonArrayWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [1, 0]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 0]}}"), fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 0]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1019,8 +1008,7 @@ TEST_F(PushNodeTest, ApplyToPopulatedArrayWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2, 5, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1, 2, 5, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1039,8 +1027,7 @@ TEST_F(PushNodeTest, ApplyToPopulatedArrayWithOutOfBoundsNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [5, 0, 1, 2, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [5, 0, 1, 2, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1059,8 +1046,7 @@ TEST_F(PushNodeTest, ApplyMultipleElementsPushWithNegativePosition) {
ASSERT_EQUALS(fromjson("{a: [0, 1, 2, 5, 6, 7, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [0, 1, 2, 5, 6, 7, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 6, 7, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [0, 1, 2, 5, 6, 7, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1081,8 +1067,7 @@ TEST_F(PushNodeTest, PushWithMinIntAsPosition) {
ASSERT_EQUALS(fromjson("{a: [5, 0, 1, 2, 3, 4]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [5, 0, 1, 2, 3, 4]}}"),
- fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [5, 0, 1, 2, 3, 4]}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/rename_node_test.cpp b/src/mongo/db/update/rename_node_test.cpp
index 39c037a75a3..f8b7e010ca7 100644
--- a/src/mongo/db/update/rename_node_test.cpp
+++ b/src/mongo/db/update/rename_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using RenameNodeTest = UpdateNodeTest;
+using RenameNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -125,8 +125,7 @@ TEST_F(RenameNodeTest, SimpleNumberAtRoot) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, i: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, i: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -144,8 +143,7 @@ TEST_F(RenameNodeTest, ToExistsAtSameLevel) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -163,8 +161,7 @@ TEST_F(RenameNodeTest, ToAndFromHaveSameValue) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -182,8 +179,7 @@ TEST_F(RenameNodeTest, RenameToFieldWithSameValueButDifferentType) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 1}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 1}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 1}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -201,8 +197,7 @@ TEST_F(RenameNodeTest, FromDottedElement) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {}, b: {d: 6}}"), doc);
- assertOplogEntry(fromjson("{$set: {b: {d: 6}}, $unset: {'a.c': true}}"),
- fromjson("{$v: 2, diff: {u: {b: {d: 6}}, sa: {d: {c: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {b: {d: 6}}, sa: {d: {c: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.c, b}");
}
@@ -220,8 +215,7 @@ TEST_F(RenameNodeTest, RenameToExistingNestedFieldDoesNotReorderFields) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: {b: {c: 4, d: 2}}, b: 3, c: {}}"), doc);
- assertOplogEntry(fromjson("{$set: {'a.b.c': 4}, $unset: {'c.d': true}}"),
- fromjson("{$v: 2, diff: {sa: {sb: {u: {c: 4}}}, sc: {d: {d: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {sb: {u: {c: 4}}}, sc: {d: {d: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c, c.d}");
}
@@ -240,8 +234,7 @@ TEST_F(RenameNodeTest, MissingCompleteTo) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 1, c: {r: {d: 2}}}"), doc);
- assertOplogEntry(fromjson("{$set: {'c.r.d': 2}, $unset: {'a': true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, sc: {i: {r: {d: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, sc: {i: {r: {d: 2}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, c.r.d}");
}
@@ -259,8 +252,7 @@ TEST_F(RenameNodeTest, ToIsCompletelyMissing) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: {c: {d: 2}}}"), doc);
- assertOplogEntry(fromjson("{$set: {'b.c.d': 2}, $unset: {'a': true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: 2}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b.c.d}");
}
@@ -278,8 +270,7 @@ TEST_F(RenameNodeTest, ToMissingDottedField) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: {c: {d: [{a:2, b:1}]}}}"), doc);
- assertOplogEntry(fromjson("{$set: {'b.c.d': [{a:2, b:1}]}, $unset: {'a': true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: [{a: 2, b: 1}]}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, i: {b: {c: {d: [{a: 2, b: 1}]}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b.c.d}");
}
@@ -398,8 +389,7 @@ TEST_F(RenameNodeTest, ReplaceArrayField) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {b: 2}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -417,8 +407,7 @@ TEST_F(RenameNodeTest, ReplaceWithArrayField) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{b: []}"), doc);
- assertOplogEntry(fromjson("{$set: {b: []}, $unset: {a: true}}"),
- fromjson("{$v: 2, diff: {d: {a: false}, u: {b: []}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}, u: {b: []}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b}");
}
@@ -436,8 +425,7 @@ TEST_F(RenameNodeTest, CanRenameFromInvalidFieldName) {
ASSERT_TRUE(result.indexesAffected);
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
- assertOplogEntry(fromjson("{$set: {a: 2}, $unset: {'$a': true}}"),
- fromjson("{$v: 2, diff: {d: {$a: false}, i: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {$a: false}, i: {a: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{$a, a}");
}
@@ -492,8 +480,7 @@ TEST_F(RenameNodeTest, ApplyCanRemoveRequiredPartOfDBRefIfValidateForStorageIsFa
ASSERT_EQUALS(updated, doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'b': 0}, $unset: {'a.$id': true}}"),
- fromjson("{$v: 2, diff: {i: {b: 0}, sa: {d: {$id: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 0}, sa: {d: {$id: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.$id, b}");
}
diff --git a/src/mongo/db/update/set_node_test.cpp b/src/mongo/db/update/set_node_test.cpp
index 2667dac6d52..9c9b7733aeb 100644
--- a/src/mongo/db/update/set_node_test.cpp
+++ b/src/mongo/db/update/set_node_test.cpp
@@ -43,7 +43,7 @@
namespace mongo {
namespace {
-using SetNodeTest = UpdateNodeTest;
+using SetNodeTest = UpdateTestFixture;
using mongo::mutablebson::countChildren;
using mongo::mutablebson::Element;
@@ -95,7 +95,7 @@ TEST_F(SetNodeTest, ApplyEmptyPathToCreate) {
ASSERT_EQUALS(fromjson("{a: 6}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 6}}"), fromjson("{$v: 2, diff: {u: {a: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 6}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -115,8 +115,7 @@ TEST_F(SetNodeTest, ApplyCreatePath) {
ASSERT_EQUALS(fromjson("{a: {d: 5, b: {c: 6}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {b: {c: 6}}}}}"));
ASSERT_EQUALS("{a.b.c}", getModifiedPaths());
}
@@ -135,7 +134,7 @@ TEST_F(SetNodeTest, ApplyCreatePathFromRoot) {
ASSERT_EQUALS(fromjson("{c: 5, a: {b: 6}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6}}"), fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: 6}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -155,8 +154,7 @@ TEST_F(SetNodeTest, ApplyPositional) {
ASSERT_EQUALS(fromjson("{a: [0, 6, 2]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: 6}}}"));
ASSERT_EQUALS("{a.1}", getModifiedPaths());
}
@@ -367,7 +365,7 @@ TEST_F(SetNodeTest, ApplyLog) {
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 2}}"), fromjson("{$v: 2, diff: {u: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 2}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -767,8 +765,7 @@ TEST_F(SetNodeTest, ApplyLogDottedPath) {
ASSERT_EQUALS(fromjson("{a: [{b:0}, {b:1}, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -785,8 +782,7 @@ TEST_F(SetNodeTest, LogEmptyArray) {
ASSERT_EQUALS(fromjson("{a: [null, null, {b:2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -803,8 +799,7 @@ TEST_F(SetNodeTest, LogEmptyObject) {
ASSERT_EQUALS(fromjson("{a: {'2': {b: 2}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 2}}"),
- fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {'2': {b: 2}}}}}"));
ASSERT_EQUALS("{a.2.b}", getModifiedPaths());
}
@@ -987,7 +982,7 @@ TEST_F(SetNodeTest, Set6) {
ASSERT_EQUALS(fromjson("{_id: 1, r: {a:2, b:2}}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'r.a': 2}}"), fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
ASSERT_EQUALS("{r.a}", getModifiedPaths());
}
@@ -1007,7 +1002,7 @@ TEST_F(SetNodeTest, Set6FromRepl) {
ASSERT_EQUALS(fromjson("{_id: 1, r: {a:2, b:2} }"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'r.a': 2}}"), fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sr: {u: {a: 2}}}}"));
ASSERT_EQUALS("{r.a}", getModifiedPaths());
}
@@ -1051,7 +1046,7 @@ TEST_F(SetNodeTest, ApplyCanCreateDollarPrefixedFieldNameWhenValidateForStorageI
ASSERT_EQUALS(fromjson("{$bad: 1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {$bad: 1}}"), fromjson("{$v: 2, diff: {i: {$bad: 1}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {$bad: 1}}}"));
ASSERT_EQUALS("{$bad}", getModifiedPaths());
}
@@ -1160,7 +1155,7 @@ TEST_F(SetNodeTest, ApplyCanOverwritePrefixToCreateImmutablePath) {
ASSERT_EQUALS(fromjson("{a: {b: 2}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {b: 2}}}"), fromjson("{$v: 2, diff: {u: {a: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: {b: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1180,8 +1175,7 @@ TEST_F(SetNodeTest, ApplyCanOverwritePrefixOfImmutablePathIfNoopOnImmutablePath)
ASSERT_EQUALS(fromjson("{a: {b: 2, c: 3}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: {b: 2, c: 3}}}"),
- fromjson("{$v: 2, diff: {u: {a: {b: 2, c: 3}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: {b: 2, c: 3}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1272,7 +1266,7 @@ TEST_F(SetNodeTest, ApplyCanCreateImmutablePath) {
ASSERT_EQUALS(fromjson("{a: {b: 2}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 2}}"), fromjson("{$v: 2, diff: {sa: {i: {b: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {b: 2}}}}"));
ASSERT_EQUALS("{a.b}", getModifiedPaths());
}
@@ -1292,7 +1286,7 @@ TEST_F(SetNodeTest, ApplyCanCreatePrefixOfImmutablePath) {
ASSERT_EQUALS(fromjson("{a: 2}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 2}}"), fromjson("{$v: 2, diff: {i: {a: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: 2}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1312,8 +1306,7 @@ TEST_F(SetNodeTest, ApplySetFieldInNonExistentArrayElementAffectsIndexOnSiblingF
ASSERT_EQUALS(fromjson("{a: [{b: 0}, {c: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1.c': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 2}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 2}}}}"));
ASSERT_EQUALS("{a}", getModifiedPaths());
}
@@ -1333,8 +1326,7 @@ TEST_F(SetNodeTest, ApplySetFieldInExistingArrayElementDoesNotAffectIndexOnSibli
ASSERT_EQUALS(fromjson("{a: [{b: 0, c: 2}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.c': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {i: {c: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {i: {c: 2}}}}}"));
ASSERT_EQUALS("{a.0.c}", getModifiedPaths());
}
@@ -1355,8 +1347,7 @@ TEST_F(SetNodeTest, ApplySetFieldInNonExistentNumericFieldDoesNotAffectIndexOnSi
ASSERT_EQUALS(fromjson("{a: {'0': {b: 0}, '1': {c: 2}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1.c': 2}}"),
- fromjson("{$v: 2, diff: {sa: {i: {'1': {c: 2}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {i: {'1': {c: 2}}}}}"));
ASSERT_EQUALS("{a.1.c}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/unset_node_test.cpp b/src/mongo/db/update/unset_node_test.cpp
index 0dd55f165e4..91cb9b9ab63 100644
--- a/src/mongo/db/update/unset_node_test.cpp
+++ b/src/mongo/db/update/unset_node_test.cpp
@@ -43,9 +43,7 @@
namespace mongo {
namespace {
-using UnsetNodeTest = UpdateNodeTest;
-using mongo::mutablebson::countChildren;
-using mongo::mutablebson::Element;
+using UnsetNodeTest = UpdateTestFixture;
DEATH_TEST_REGEX(UnsetNodeTest,
InitFailsForEmptyElement,
@@ -169,7 +167,7 @@ TEST_F(UnsetNodeTest, UnsetTopLevelPath) {
ASSERT_EQUALS(fromjson("{}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {a: true}}"), fromjson("{$v: 2, diff: {d: {a: false}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -188,8 +186,7 @@ TEST_F(UnsetNodeTest, UnsetNestedPath) {
ASSERT_EQUALS(fromjson("{a: {b: {}}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.b.c': true}}"),
- fromjson("{$v: 2, diff: {sa: {sb: {d: {c: false}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {sb: {d: {c: false}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c}");
}
@@ -208,8 +205,7 @@ TEST_F(UnsetNodeTest, UnsetObject) {
ASSERT_EQUALS(fromjson("{a: {}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.b': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
@@ -228,8 +224,7 @@ TEST_F(UnsetNodeTest, UnsetArrayElement) {
ASSERT_EQUALS(fromjson("{a:[null], b:1}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': null}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u0: null}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u0: null}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0}");
}
@@ -249,8 +244,7 @@ TEST_F(UnsetNodeTest, UnsetPositional) {
ASSERT_EQUALS(fromjson("{a: [0, null, 2]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': null}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: null}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: null}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.1}");
}
@@ -269,7 +263,7 @@ TEST_F(UnsetNodeTest, UnsetEntireArray) {
ASSERT_EQUALS(fromjson("{}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {a: true}}"), fromjson("{$v: 2, diff: {d: {a: false}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -288,8 +282,7 @@ TEST_F(UnsetNodeTest, UnsetFromObjectInArray) {
ASSERT_EQUALS(fromjson("{a:[{}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.0.b': true}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {b: false}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {b: false}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b}");
}
@@ -308,8 +301,7 @@ TEST_F(UnsetNodeTest, CanUnsetInvalidField) {
ASSERT_EQUALS(fromjson("{b: 1, a: [{}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.0.$b': true}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {$b: false}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {d: {$b: false}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.$b}");
}
@@ -345,7 +337,7 @@ TEST_F(UnsetNodeTest, ApplyDoesNotAffectIndexes) {
ASSERT_EQUALS(fromjson("{}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {a: true}}"), fromjson("{$v: 2, diff: {d: {a: false}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {d: {a: false}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -364,8 +356,7 @@ TEST_F(UnsetNodeTest, ApplyFieldWithDot) {
ASSERT_EQUALS(fromjson("{'a.b':4, a: {}}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.b': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {b: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b}");
}
@@ -385,8 +376,7 @@ TEST_F(UnsetNodeTest, ApplyCannotRemoveRequiredPartOfDBRef) {
ASSERT_EQUALS(updated, doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.$id': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.$id}");
}
@@ -408,8 +398,7 @@ TEST_F(UnsetNodeTest, ApplyCanRemoveRequiredPartOfDBRefIfValidateForStorageIsFal
ASSERT_EQUALS(updated, doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$unset: {'a.$id': true}}"),
- fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {d: {$id: false}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.$id}");
}
diff --git a/src/mongo/db/update/update_array_node_test.cpp b/src/mongo/db/update/update_array_node_test.cpp
index 95dbf2356a7..8999a4c6c6b 100644
--- a/src/mongo/db/update/update_array_node_test.cpp
+++ b/src/mongo/db/update/update_array_node_test.cpp
@@ -45,8 +45,7 @@
namespace mongo {
namespace {
-using UpdateArrayNodeTest = UpdateNodeTest;
-using mongo::mutablebson::Element;
+using UpdateArrayNodeTest = UpdateTestFixture;
using unittest::assertGet;
TEST_F(UpdateArrayNodeTest, ApplyCreatePathFails) {
@@ -122,8 +121,7 @@ TEST_F(UpdateArrayNodeTest, UpdateIsAppliedToAllMatchingElements) {
ASSERT_EQUALS(fromjson("{a: [2, 1, 2]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [2, 1, 2]}}"),
- fromjson("{$v: 2, diff: {u: {a: [2, 1, 2]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [2, 1, 2]}}}"));
ASSERT_EQUALS("{a.0, a.2}", getModifiedPaths());
}
@@ -173,8 +171,7 @@ TEST_F(UpdateArrayNodeTest, UpdateForEmptyIdentifierIsAppliedToAllArrayElements)
ASSERT_EQUALS(fromjson("{a: [1, 1, 1]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [1, 1, 1]}}"),
- fromjson("{$v: 2, diff: {u: {a: [1, 1, 1]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [1, 1, 1]}}}"));
ASSERT_EQUALS("{a.0, a.1, a.2}", getModifiedPaths());
}
@@ -223,8 +220,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElement) {
ASSERT_EQUALS(fromjson("{a: [{b: 1, c: 1, d: 1}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 1, 'a.0.c': 1, 'a.0.d': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1, d: 1}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1, d: 1}}}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c, a.0.d}", getModifiedPaths());
}
@@ -264,8 +260,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElementsUsingMergedChildr
ASSERT_EQUALS(fromjson("{a: [{b: 1, c: 1}, {b: 1, c: 1}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{b: 1, c: 1}, {b: 1, c: 1}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{b: 1, c: 1}, {b: 1, c: 1}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{b: 1, c: 1}, {b: 1, c: 1}]}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c, a.1.b, a.1.c}", getModifiedPaths());
}
@@ -314,8 +309,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElementsWithoutMergedChil
ASSERT_EQUALS(fromjson("{a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}}"),
- fromjson("{$v: 2, diff: {u: {a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: [{b: 2, c: 2, d: 1}, {b: 1, c: 2, d: 2}]}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c, a.1.c, a.1.d}", getModifiedPaths());
}
@@ -346,8 +340,7 @@ TEST_F(UpdateArrayNodeTest, ApplyMultipleUpdatesToArrayElementWithEmptyIdentifie
ASSERT_EQUALS(fromjson("{a: [{b: 1, c: 1}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 1, 'a.0.c': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 1, c: 1}}}}}"));
ASSERT_EQUALS("{a.0.b, a.0.c}", getModifiedPaths());
}
@@ -394,7 +387,6 @@ TEST_F(UpdateArrayNodeTest, ApplyNestedArrayUpdates) {
ASSERT_TRUE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {'a.0.b.0.c': 1, 'a.0.b.0.d': 1}}"),
fromjson("{$v: 2, diff: {sa: {a: true, s0: {sb: {a: true, s0: {u: {c: 1, d: 1}}}}}}}"));
ASSERT_EQUALS("{a.0.b.0.c, a.0.b.0.d}", getModifiedPaths());
}
@@ -594,7 +586,6 @@ TEST_F(UpdateArrayNodeTest, NoArrayElementAffectsIndexes) {
ASSERT_FALSE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {a: [{c: 0, b: 0}, {c: 0, b: 0}, {c: 0, b: 0}]}}"),
fromjson("{$v: 2, diff: {u: {a: [{c: 0, b: 0}, {c: 0, b: 0}, {c: 0, b: 0}]}}}"));
ASSERT_EQUALS("{a.0.b, a.1.b, a.2.b}", getModifiedPaths());
}
@@ -623,8 +614,7 @@ TEST_F(UpdateArrayNodeTest, WhenOneElementIsMatchedLogElementUpdateDirectly) {
ASSERT_EQUALS(fromjson("{a: [{c: 1}, {c: 0, b: 0}, {c: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1.b': 0}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s1: {i: {b: 0}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s1: {i: {b: 0}}}}}"));
ASSERT_EQUALS("{a.1.b}", getModifiedPaths());
}
@@ -652,8 +642,7 @@ TEST_F(UpdateArrayNodeTest, WhenOneElementIsModifiedLogElement) {
ASSERT_EQUALS(fromjson("{a: [{c: 0, b: 0}, {c: 0, b: 0}, {c: 1}]}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.1': {c: 0, b: 0}}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 0, b: 0}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u1: {c: 0, b: 0}}}}"));
ASSERT_EQUALS("{a.0.b, a.1.b}", getModifiedPaths());
}
@@ -707,8 +696,7 @@ TEST_F(UpdateArrayNodeTest, ApplyPositionalInsideArrayUpdate) {
ASSERT_EQUALS(fromjson("{a: [{b: [0, 1], c: 0}]}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b.1': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {sb: {a: true, u1: 1}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {sb: {a: true, u1: 1}}}}}"));
ASSERT_EQUALS("{a.0.b.1}", getModifiedPaths());
}
diff --git a/src/mongo/db/update/update_driver.cpp b/src/mongo/db/update/update_driver.cpp
index 5ce9dc8aa84..dcb164f0ace 100644
--- a/src/mongo/db/update/update_driver.cpp
+++ b/src/mongo/db/update/update_driver.cpp
@@ -86,19 +86,7 @@ bool parseUpdateExpression(
const std::map<StringData, std::unique_ptr<ExpressionWithPlaceholder>>& arrayFilters) {
bool positional = false;
std::set<std::string> foundIdentifiers;
- bool foundVersionField = false;
for (auto&& mod : updateExpr) {
- // If there is a "$v" field among the modifiers, it should have already been used by the
- // caller to determine that this is the correct parsing function.
- if (mod.fieldNameStringData() == kUpdateOplogEntryVersionFieldName) {
- uassert(
- ErrorCodes::BadValue, "Duplicate $v in oplog update document", !foundVersionField);
- foundVersionField = true;
- invariant(mod.numberLong() ==
- static_cast<long long>(UpdateOplogEntryVersion::kUpdateNodeV1));
- continue;
- }
-
auto modType = validateMod(mod);
for (auto&& field : mod.Obj()) {
auto statusWithPositional = UpdateObjectNode::parseAndMerge(
@@ -180,25 +168,15 @@ void UpdateDriver::parse(
invariant(_updateType == UpdateType::kOperator);
- // By this point we are expecting a "classic" update. This version of mongod only supports $v:
- // 1 (modifier language) and $v: 2 (delta) (older versions support $v: 0). We've already
- // checked whether this is a delta update so we check that the $v field isn't present, or has a
- // value of 1.
-
- auto updateExpr = updateMod.getUpdateModifier();
- BSONElement versionElement = updateExpr[kUpdateOplogEntryVersionFieldName];
- if (versionElement) {
- uassert(ErrorCodes::FailedToParse,
- "The $v update field is only recognized internally",
- _fromOplogApplication);
-
- // The UpdateModification should have verified that the value of $v is valid.
- invariant(versionElement.numberInt() ==
- static_cast<int>(UpdateOplogEntryVersion::kUpdateNodeV1));
- }
-
+ // By this point we are expecting a "kModifier" update. This version of mongod only supports
+ // $v: 2 (delta) (older versions support $v: 0 and $v: 1). We've already checked whether
+ // this is a delta update, so we verify that we're not on the oplog application path.
+ tassert(5030100,
+ "An oplog update can only be of type 'kReplacement' or 'kDelta'",
+ !_fromOplogApplication);
auto root = std::make_unique<UpdateObjectNode>();
- _positional = parseUpdateExpression(updateExpr, root.get(), _expCtx, arrayFilters);
+ _positional =
+ parseUpdateExpression(updateMod.getUpdateModifier(), root.get(), _expCtx, arrayFilters);
_updateExecutor = std::make_unique<UpdateTreeExecutor>(std::move(root));
}
@@ -284,9 +262,7 @@ Status UpdateDriver::update(OperationContext* opCtx,
}
if (_logOp && logOpRec) {
- applyParams.logMode = internalQueryEnableLoggingV2OplogEntries.load()
- ? ApplyParams::LogMode::kGenerateOplogEntry
- : ApplyParams::LogMode::kGenerateOnlyV1OplogEntry;
+ applyParams.logMode = ApplyParams::LogMode::kGenerateOplogEntry;
if (MONGO_unlikely(hangAfterPipelineUpdateFCVCheck.shouldFail()) &&
type() == UpdateType::kPipeline) {
diff --git a/src/mongo/db/update/update_executor.h b/src/mongo/db/update/update_executor.h
index 539f044432b..8f674ff25e0 100644
--- a/src/mongo/db/update/update_executor.h
+++ b/src/mongo/db/update/update_executor.h
@@ -57,10 +57,6 @@ public:
// Indicates that no oplog entry should be produced.
kDoNotGenerateOplogEntry,
- // Indicates that the update executor should produce an oplog entry. Only the $v: 1
- // format or replacement-style format may be used, however.
- kGenerateOnlyV1OplogEntry,
-
// Indicates that the update executor should produce an oplog entry, and may use any
// format.
kGenerateOplogEntry
diff --git a/src/mongo/db/update/update_node_test_fixture.h b/src/mongo/db/update/update_node_test_fixture.h
index 22065cc7088..5125c77cfce 100644
--- a/src/mongo/db/update/update_node_test_fixture.h
+++ b/src/mongo/db/update/update_node_test_fixture.h
@@ -32,22 +32,14 @@
#include "mongo/db/concurrency/locker_noop_service_context_test_fixture.h"
#include "mongo/db/service_context.h"
#include "mongo/db/update/update_node.h"
-#include "mongo/db/update/v1_log_builder.h"
#include "mongo/db/update/v2_log_builder.h"
#include "mongo/unittest/unittest.h"
namespace mongo {
-class UpdateNodeTest : public LockerNoopServiceContextTest {
+class UpdateTestFixture : public LockerNoopServiceContextTest {
public:
- ~UpdateNodeTest() override = default;
-
- void run() {
- _useV2LogBuilder = false;
- ServiceContextTest::run();
- _useV2LogBuilder = true;
- ServiceContextTest::run();
- }
+ ~UpdateTestFixture() override = default;
protected:
// Creates a RuntimeUpdatePath from a string, assuming that all numeric path components are
@@ -81,11 +73,7 @@ protected:
_validateForStorage = true;
_indexData.reset();
_logDoc.reset();
- if (_useV2LogBuilder) {
- _logBuilder = std::make_unique<v2_log_builder::V2LogBuilder>();
- } else {
- _logBuilder = std::make_unique<V1LogBuilder>(_logDoc.root());
- }
+ _logBuilder = std::make_unique<v2_log_builder::V2LogBuilder>();
_modifiedPaths.clear();
}
@@ -155,37 +143,19 @@ protected:
return _modifiedPaths.toString();
}
- bool v2LogBuilderUsed() const {
- return _useV2LogBuilder;
- }
-
BSONObj getOplogEntry() const {
return _logBuilder->serialize();
}
void assertOplogEntryIsNoop() const {
- if (v2LogBuilderUsed()) {
- ASSERT_BSONOBJ_BINARY_EQ(getOplogEntry(), fromjson("{$v:2, diff: {}}"));
- } else {
- ASSERT_TRUE(getOplogEntry().isEmpty());
- }
+ ASSERT_BSONOBJ_BINARY_EQ(getOplogEntry(), fromjson("{$v:2, diff: {}}"));
}
- void assertOplogEntry(const BSONObj& expectedV1Entry,
- const BSONObj& expectedV2Entry,
- bool checkBinaryEquality = true) {
- auto assertFn = [checkBinaryEquality](auto expected, auto given) {
- if (checkBinaryEquality) {
- ASSERT_BSONOBJ_BINARY_EQ(expected, given);
- } else {
- ASSERT_BSONOBJ_EQ(expected, given);
- }
- };
-
- if (v2LogBuilderUsed()) {
- assertFn(expectedV2Entry, getOplogEntry());
+ void assertOplogEntry(const BSONObj& expectedV2Entry, bool checkBinaryEquality = true) {
+ if (checkBinaryEquality) {
+ ASSERT_BSONOBJ_BINARY_EQ(expectedV2Entry, getOplogEntry());
} else {
- assertFn(expectedV1Entry, getOplogEntry());
+ ASSERT_BSONOBJ_EQ(expectedV2Entry, getOplogEntry());
}
}
@@ -202,8 +172,6 @@ private:
mutablebson::Document _logDoc;
std::unique_ptr<LogBuilderInterface> _logBuilder;
FieldRefSetWithStorage _modifiedPaths;
-
- bool _useV2LogBuilder = false;
};
} // namespace mongo
diff --git a/src/mongo/db/update/update_object_node_test.cpp b/src/mongo/db/update/update_object_node_test.cpp
index 2e5906e8f30..ee77ffaee1b 100644
--- a/src/mongo/db/update/update_object_node_test.cpp
+++ b/src/mongo/db/update/update_object_node_test.cpp
@@ -47,8 +47,7 @@
namespace mongo {
namespace {
-using UpdateObjectNodeTest = UpdateNodeTest;
-using mongo::mutablebson::Element;
+using UpdateObjectNodeTest = UpdateTestFixture;
using unittest::assertGet;
TEST(UpdateObjectNodeTest, InvalidPathFailsToParse) {
@@ -1775,7 +1774,7 @@ TEST_F(UpdateObjectNodeTest, ApplyCreateField) {
ASSERT_EQUALS(fromjson("{a: 5, b: 6}"), doc);
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: 6}}"), fromjson("{$v: 2, diff: {i: {b: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{b}");
}
@@ -1800,7 +1799,7 @@ TEST_F(UpdateObjectNodeTest, ApplyExistingField) {
ASSERT_EQUALS(fromjson("{a: 6}"), doc);
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 6}}"), fromjson("{$v: 2, diff: {u: {a: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -1843,8 +1842,7 @@ TEST_F(UpdateObjectNodeTest, ApplyExistingAndNonexistingFields) {
ASSERT_BSONOBJ_EQ(fromjson("{a: 5, c: 7, b: 6, d: 8}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 5, b: 6, c: 7, d: 8}}"),
- fromjson("{$v: 2, diff: {u: {a: 5, c: 7}, i: {b: 6, d: 8}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 5, c: 7}, i: {b: 6, d: 8}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b, c, d}");
}
@@ -1887,8 +1885,7 @@ TEST_F(UpdateObjectNodeTest, ApplyExistingNestedPaths) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {b: 6, c: 7}, b: {d: 8, e: 9}}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6, 'a.c': 7, 'b.d': 8, 'b.e': 9}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: 6, c: 7}}, sb: {u: {d: 8, e: 9}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: 6, c: 7}}, sb: {u: {d: 8, e: 9}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b, a.c, b.d, b.e}");
}
@@ -1931,8 +1928,7 @@ TEST_F(UpdateObjectNodeTest, ApplyCreateNestedPaths) {
ASSERT_BSONOBJ_EQ(fromjson("{z: 0, a: {b: 6, c: 7}, b: {d: 8, e: 9}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 6, 'a.c': 7, 'b.d': 8, 'b.e': 9}}"),
- fromjson("{$v: 2, diff: {i: {a: {b: 6, c: 7}, b: {d: 8, e: 9}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: 6, c: 7}, b: {d: 8, e: 9}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b, a.c, b.d, b.e}");
}
@@ -1969,8 +1965,7 @@ TEST_F(UpdateObjectNodeTest, ApplyCreateDeeplyNestedPaths) {
ASSERT_BSONOBJ_EQ(fromjson("{z: 0, a: {b: {c: {d: 6, e: 7}}, f: 8}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b.c.d': 6, 'a.b.c.e': 7, 'a.f': 8}}"),
- fromjson("{$v: 2, diff: {i: {a: {b: {c: {d: 6, e: 7}}, f: 8}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {b: {c: {d: 6, e: 7}}, f: 8}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b.c.d, a.b.c.e, a.f}");
}
@@ -2019,8 +2014,7 @@ TEST_F(UpdateObjectNodeTest, ChildrenShouldBeAppliedInAlphabeticalOrder) {
ASSERT_BSONOBJ_EQ(fromjson("{z: 9, a: 5, b: 8, c: 7, d: 6}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {a: 5, b: 8, c: 7, d: 6, z: 9}}"),
- fromjson("{$v: 2, diff: {u: {a: 5, z: 9}, i: {b: 8, c: 7, d: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {a: 5, z: 9}, i: {b: 8, c: 7, d: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b, c, d, z}");
}
@@ -2054,8 +2048,7 @@ TEST_F(UpdateObjectNodeTest, CollatorShouldNotAffectUpdateOrder) {
ASSERT_BSONOBJ_EQ(fromjson("{abc: 5, cba: 6}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {abc: 5, cba: 6}}"),
- fromjson("{$v: 2, diff: {i: {abc: 5, cba: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {abc: 5, cba: 6}}}"));
}
TEST_F(UpdateObjectNodeTest, ApplyNoop) {
@@ -2132,7 +2125,7 @@ TEST_F(UpdateObjectNodeTest, ApplySomeChildrenNoops) {
ASSERT_BSONOBJ_EQ(fromjson("{a: 5, b: 6, c: 7}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: 6}}"), fromjson("{$v: 2, diff: {u: {b: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {u: {b: 6}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a, b, c}");
}
@@ -2186,7 +2179,7 @@ TEST_F(UpdateObjectNodeTest, ApplyBlockingElementFromReplication) {
ASSERT_BSONOBJ_EQ(fromjson("{a: 0, b: 6}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {b: 6}}"), fromjson("{$v: 2, diff: {i: {b: 6}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {b: 6}}}"));
}
TEST_F(UpdateObjectNodeTest, ApplyPositionalMissingMatchedField) {
@@ -2240,8 +2233,7 @@ TEST_F(UpdateObjectNodeTest, ApplyMergePositionalChild) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [{b: 5, c: 6}]}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c}");
}
@@ -2285,8 +2277,7 @@ TEST_F(UpdateObjectNodeTest, ApplyOrderMergedPositionalChild) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {'0': 7, '1': {b: 6, c: 8}, '2': 5}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': 7, 'a.1.b': 6, 'a.1.c': 8, 'a.2': 5}}"),
- fromjson("{$v: 2, diff: {i: {a: {'0': 7, '1': {b: 6, c: 8}, '2': 5}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {'0': 7, '1': {b: 6, c: 8}, '2': 5}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0, a.1.b, a.1.c, a.2}");
}
@@ -2353,8 +2344,7 @@ TEST_F(UpdateObjectNodeTest, ApplyDoNotMergePositionalChild) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {'0': 5, '1': 7, '2': 6}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': 5, 'a.1': 7, 'a.2': 6}}"),
- fromjson("{$v: 2, diff: {i: {a: {'0': 5, '1': 7, '2': 6}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {'0': 5, '1': 7, '2': 6}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0, a.1, a.2}");
}
@@ -2392,8 +2382,7 @@ TEST_F(UpdateObjectNodeTest, ApplyPositionalChildLast) {
ASSERT_BSONOBJ_EQ(fromjson("{a: {'0': 6, '1': 7, '2': 5}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0': 6, 'a.1': 7, 'a.2': 5}}"),
- fromjson("{$v: 2, diff: {i: {a: {'0': 6, '1': 7, '2': 5}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {i: {a: {'0': 6, '1': 7, '2': 5}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0, a.1, a.2}");
}
@@ -2425,8 +2414,7 @@ TEST_F(UpdateObjectNodeTest, ApplyUseStoredMergedPositional) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [{b: 5, c: 6}]}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c}");
mutablebson::Document doc2(fromjson("{a: [{b: 0, c: 0}]}"));
@@ -2439,8 +2427,7 @@ TEST_F(UpdateObjectNodeTest, ApplyUseStoredMergedPositional) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [{b: 5, c: 6}]}"), doc2.getObject());
ASSERT_TRUE(doc2.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c}");
}
@@ -2479,7 +2466,6 @@ TEST_F(UpdateObjectNodeTest, ApplyDoNotUseStoredMergedPositional) {
ASSERT_TRUE(doc.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {'a.0.b': 5, 'a.0.c': 6, 'a.1.d': 7}}"),
fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5, c: 6}}, s1: {u: {d: 7}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.0.c, a.1.d}");
@@ -2494,7 +2480,6 @@ TEST_F(UpdateObjectNodeTest, ApplyDoNotUseStoredMergedPositional) {
ASSERT_TRUE(doc2.isInPlaceModeEnabled());
assertOplogEntry(
- fromjson("{$set: {'a.0.b': 5, 'a.1.c': 6, 'a.1.d': 7}}"),
fromjson("{$v: 2, diff: {sa: {a: true, s0: {u: {b: 5}}, s1: {u: {c: 6, d: 7}}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.0.b, a.1.c, a.1.d}");
}
@@ -2525,8 +2510,7 @@ TEST_F(UpdateObjectNodeTest, ApplyToArrayByIndexWithLeadingZero) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [0, 0, 2, 0, 0]}"), doc.getObject());
ASSERT_TRUE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.02': 2}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: 2}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: 2}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.02}");
}
@@ -2565,8 +2549,7 @@ TEST_F(UpdateObjectNodeTest, ApplyMultipleArrayUpdates) {
doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2': 2, 'a.10': 10}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: 2, u10: 10}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: 2, u10: 10}}}"));
}
TEST_F(UpdateObjectNodeTest, ApplyMultipleUpdatesToDocumentInArray) {
@@ -2596,8 +2579,7 @@ TEST_F(UpdateObjectNodeTest, ApplyMultipleUpdatesToDocumentInArray) {
ASSERT_BSONOBJ_EQ(fromjson("{a: [null, null, {b: 1, c: 1}]}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.2.b': 1, 'a.2.c': 1}}"),
- fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 1, c: 1}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {a: true, u2: {b: 1, c: 1}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a}");
}
@@ -2648,8 +2630,7 @@ TEST_F(UpdateObjectNodeTest, SetAndPopModifiersWithCommonPrefixApplySuccessfully
ASSERT_BSONOBJ_EQ(fromjson("{a: {b: 5, c: [2, 3, 4]}}"), doc.getObject());
ASSERT_FALSE(doc.isInPlaceModeEnabled());
- assertOplogEntry(fromjson("{$set: {'a.b': 5, 'a.c': [2, 3, 4]}}"),
- fromjson("{$v: 2, diff: {sa: {u: {b: 5, c: [ 2, 3, 4 ]}}}}"));
+ assertOplogEntry(fromjson("{$v: 2, diff: {sa: {u: {b: 5, c: [ 2, 3, 4 ]}}}}"));
ASSERT_EQUALS(getModifiedPaths(), "{a.b, a.c}");
}
diff --git a/src/mongo/db/update/update_oplog_entry_version.h b/src/mongo/db/update/update_oplog_entry_version.h
index 5338ddcc76e..cbd18e51834 100644
--- a/src/mongo/db/update/update_oplog_entry_version.h
+++ b/src/mongo/db/update/update_oplog_entry_version.h
@@ -45,9 +45,11 @@ enum class UpdateOplogEntryVersion {
// Ancient update system which was deleted in 4.0. We still reserve its version number.
kRemovedV0 = 0,
- // The update system introduced in v3.6. When a single update adds multiple fields, those
- // fields are added in lexicographic order by field name. This system introduces support for
- // arrayFilters and $[] syntax.
+ // The update system introduced in v3.6, and, until 5.1, also served the function of how updates
+ // were record in oplog entries. Oplog entries of this form are no longer supported, but the
+ // user facing modifier-style update system remains. When a single update adds
+ // multiple fields, those fields are added in lexicographic order by field name. This system
+ // introduces support for arrayFilters and $[] syntax.
kUpdateNodeV1 = 1,
// Delta style update, introduced in 4.7. When a pipeline based update is executed, the pre and
diff --git a/src/mongo/db/update/update_tree_executor.h b/src/mongo/db/update/update_tree_executor.h
index e9f7dc93f99..520e1cfe177 100644
--- a/src/mongo/db/update/update_tree_executor.h
+++ b/src/mongo/db/update/update_tree_executor.h
@@ -33,7 +33,6 @@
#include "mongo/db/update/update_node.h"
#include "mongo/db/update/update_object_node.h"
-#include "mongo/db/update/v1_log_builder.h"
#include "mongo/db/update/v2_log_builder.h"
namespace mongo {
@@ -45,27 +44,11 @@ public:
ApplyResult applyUpdate(ApplyParams applyParams) const final {
mutablebson::Document logDocument;
- boost::optional<V1LogBuilder> optV1LogBuilder;
boost::optional<v2_log_builder::V2LogBuilder> optV2LogBuilder;
UpdateNode::UpdateNodeApplyParams updateNodeApplyParams;
- if (applyParams.logMode == ApplyParams::LogMode::kGenerateOnlyV1OplogEntry) {
- // In versions since 3.6, the absence of a $v field indicates either a
- // replacement-style update or a "classic" modifier-style update.
- //
- // Since 3.6, the presence of a $v field with value 1 may also indicate that the oplog
- // entry is a "classic" modifier-style update.
- //
- // While we could elide this $v field when providing a value of 1, we continue to log
- // it because:
- // (a) It avoids an unnecessary oplog format change.
- // (b) It is easy to distinguish from $v: 2 delta-style oplog entries.
- const bool includeVersionField = true;
-
- optV1LogBuilder.emplace(logDocument.root(), includeVersionField);
- updateNodeApplyParams.logBuilder = optV1LogBuilder.get_ptr();
- } else if (applyParams.logMode == ApplyParams::LogMode::kGenerateOplogEntry) {
+ if (applyParams.logMode == ApplyParams::LogMode::kGenerateOplogEntry) {
optV2LogBuilder.emplace();
updateNodeApplyParams.logBuilder = optV2LogBuilder.get_ptr();
}
diff --git a/src/mongo/db/update/v1_log_builder.cpp b/src/mongo/db/update/v1_log_builder.cpp
deleted file mode 100644
index ef3ff88abff..00000000000
--- a/src/mongo/db/update/v1_log_builder.cpp
+++ /dev/null
@@ -1,143 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/db/update/v1_log_builder.h"
-
-#include "mongo/db/update/runtime_update_path.h"
-#include "mongo/db/update/update_oplog_entry_serialization.h"
-#include "mongo/util/str.h"
-
-namespace mongo {
-
-using mutablebson::Element;
-
-namespace {
-const char kSet[] = "$set";
-const char kUnset[] = "$unset";
-} // namespace
-
-V1LogBuilder::V1LogBuilder(mutablebson::Element logRoot, bool includeVersionField)
- : _logRoot(logRoot),
- _setAccumulator(_logRoot.getDocument().end()),
- _unsetAccumulator(_setAccumulator) {
- invariant(logRoot.isType(mongo::Object));
- invariant(!logRoot.hasChildren());
-
- if (includeVersionField) {
- auto version = logRoot.getDocument().makeElementInt(
- kUpdateOplogEntryVersionFieldName,
- static_cast<int>(UpdateOplogEntryVersion::kUpdateNodeV1));
- invariant(_logRoot.pushFront(version).isOK());
- }
-}
-
-Status V1LogBuilder::addToSection(Element newElt, Element* section, const char* sectionName) {
- // If we don't already have this section, try to create it now.
- if (!section->ok()) {
- mutablebson::Document& doc = _logRoot.getDocument();
-
- // We should not already have an element with the section name under the root.
- dassert(_logRoot[sectionName] == doc.end());
-
- // Construct a new object element to represent this section in the log.
- const Element newElement = doc.makeElementObject(sectionName);
- if (!newElement.ok())
- return Status(ErrorCodes::InternalError,
- "V1LogBuilder: failed to construct Object Element for $set/$unset");
-
- // Enqueue the new section under the root, and record it as our out parameter.
- Status result = _logRoot.pushBack(newElement);
- if (!result.isOK())
- return result;
- *section = newElement;
- }
-
- // Whatever transpired, we should now have an ok accumulator for the section, and not
- // have a replacement accumulator.
- dassert(section->ok());
-
- // Enqueue the provided element to the section and propagate the result.
- return section->pushBack(newElt);
-}
-
-Status V1LogBuilder::addToSets(Element elt) {
- return addToSection(elt, &_setAccumulator, kSet);
-}
-
-Status V1LogBuilder::addToSetsWithNewFieldName(StringData name, const mutablebson::Element val) {
- mutablebson::Element elemToSet = _logRoot.getDocument().makeElementWithNewFieldName(name, val);
- if (!elemToSet.ok())
- return Status(ErrorCodes::InternalError,
- str::stream()
- << "Could not create new '" << name << "' element from existing element '"
- << val.getFieldName() << "' of type " << typeName(val.getType()));
-
- return addToSets(elemToSet);
-}
-
-Status V1LogBuilder::addToSetsWithNewFieldName(StringData name, const BSONElement& val) {
- mutablebson::Element elemToSet = _logRoot.getDocument().makeElementWithNewFieldName(name, val);
- if (!elemToSet.ok())
- return Status(ErrorCodes::InternalError,
- str::stream()
- << "Could not create new '" << name << "' element from existing element '"
- << val.fieldName() << "' of type " << typeName(val.type()));
-
- return addToSets(elemToSet);
-}
-
-Status V1LogBuilder::addToUnsets(StringData path) {
- mutablebson::Element logElement = _logRoot.getDocument().makeElementBool(path, true);
- if (!logElement.ok())
- return Status(ErrorCodes::InternalError,
- str::stream() << "Cannot create $unset oplog entry for path" << path);
-
- return addToSection(logElement, &_unsetAccumulator, kUnset);
-}
-
-Status V1LogBuilder::logUpdatedField(const RuntimeUpdatePath& path, mutablebson::Element elt) {
- return addToSetsWithNewFieldName(path.fieldRef().dottedField(), elt);
-}
-
-Status V1LogBuilder::logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- mutablebson::Element elt) {
- return addToSetsWithNewFieldName(path.fieldRef().dottedField(), elt);
-}
-
-Status V1LogBuilder::logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- BSONElement elt) {
- return addToSetsWithNewFieldName(path.fieldRef().dottedField(), elt);
-}
-
-Status V1LogBuilder::logDeletedField(const RuntimeUpdatePath& path) {
- return addToUnsets(path.fieldRef().dottedField());
-}
-} // namespace mongo
diff --git a/src/mongo/db/update/v1_log_builder.h b/src/mongo/db/update/v1_log_builder.h
deleted file mode 100644
index 7be6abc57a0..00000000000
--- a/src/mongo/db/update/v1_log_builder.h
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#pragma once
-
-#include "mongo/base/status.h"
-#include "mongo/bson/mutable/document.h"
-#include "mongo/db/update/log_builder_interface.h"
-
-namespace mongo {
-class RuntimeUpdatePath;
-
-/**
- * LogBuilder abstracts away some of the details of producing a properly constructed oplog $v:1
- * modifier-style update entry. It manages separate regions into which it accumulates $set and
- * $unset operations.
- */
-class V1LogBuilder : public LogBuilderInterface {
-public:
- /**
- * Construct a new LogBuilder. Log entries will be recorded as new children under the
- * 'logRoot' Element, which must be of type mongo::Object and have no children.
- *
- * The 'includeVersionField' indicates whether the generated log entry should include a $v
- * (version) field.
- */
- V1LogBuilder(mutablebson::Element logRoot, bool includeVersionField = false);
-
- /**
- * Overloads from LogBuilderInterface. Each of these methods logs a modification to the document
- * in _logRoot. The field name given in the mutablebson element or BSONElement is ignored
- * and the 'path' argument is used instead.
- */
- Status logUpdatedField(const RuntimeUpdatePath& path, mutablebson::Element elt) override;
-
- /**
- * Logs the creation of a new field. The 'idxOfFirstNewComponent' parameter is unused in this
- * implementation.
- */
- Status logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- mutablebson::Element elt) override;
- Status logCreatedField(const RuntimeUpdatePath& path,
- int idxOfFirstNewComponent,
- BSONElement elt) override;
-
- Status logDeletedField(const RuntimeUpdatePath& path) override;
-
- /**
- * Return the Document to which the logging root belongs.
- */
- inline mutablebson::Document& getDocument() {
- return _logRoot.getDocument();
- }
-
- /**
- * Produces a BSON object representing this update using the modifier syntax which can be
- * stored in the oplog.
- */
- BSONObj serialize() const override {
- return _logRoot.getDocument().getObject();
- }
-
-private:
- /**
- * Add the given Element as a new entry in the '$set' section of the log. If a $set section
- * does not yet exist, it will be created. If this LogBuilder is currently configured to
- * contain an object replacement, the request to add to the $set section will return an Error.
- */
- Status addToSets(mutablebson::Element elt);
-
- /**
- * Convenience method which calls addToSets after
- * creating a new Element to wrap the old one.
- *
- * If any problem occurs then the operation will stop and return that error Status.
- */
- Status addToSetsWithNewFieldName(StringData name, mutablebson::Element val);
-
- /**
- * Convenience method which calls addToSets after
- * creating a new Element to wrap the old one.
- *
- * If any problem occurs then the operation will stop and return that error Status.
- */
- Status addToSetsWithNewFieldName(StringData name, const BSONElement& val);
-
- /**
- * Add the given path as a new entry in the '$unset' section of the log. If an '$unset' section
- * does not yet exist, it will be created. If this LogBuilder is currently configured to
- * contain an object replacement, the request to add to the $unset section will return an
- * Error.
- */
- Status addToUnsets(StringData path);
-
- Status addToSection(mutablebson::Element newElt,
- mutablebson::Element* section,
- const char* sectionName);
-
- mutablebson::Element _logRoot;
- mutablebson::Element _setAccumulator;
- mutablebson::Element _unsetAccumulator;
-};
-} // namespace mongo
diff --git a/src/mongo/db/update/v1_log_builder_test.cpp b/src/mongo/db/update/v1_log_builder_test.cpp
deleted file mode 100644
index 1e599181ef3..00000000000
--- a/src/mongo/db/update/v1_log_builder_test.cpp
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Copyright (C) 2018-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/db/update/v1_log_builder.h"
-
-#include "mongo/base/status.h"
-#include "mongo/bson/bsonobj.h"
-#include "mongo/bson/mutable/mutable_bson_test_utils.h"
-#include "mongo/db/json.h"
-#include "mongo/db/update/runtime_update_path.h"
-#include "mongo/unittest/unittest.h"
-#include "mongo/util/safe_num.h"
-
-namespace mongo {
-namespace {
-namespace mmb = mongo::mutablebson;
-
-/**
- * Given a FieldRef, creates a RuntimeUpdatePath based on it, assuming that every component is a
- * field name. This is safe to do while testing the V1 log builder, since it ignores the types of
- * the path given entirely.
- */
-RuntimeUpdatePath makeRuntimeUpdatePathAssumeAllComponentsFieldNames(StringData path) {
- FieldRef fieldRef(path);
- RuntimeUpdatePath::ComponentTypeVector types(fieldRef.numParts(),
- RuntimeUpdatePath::ComponentType::kFieldName);
- return RuntimeUpdatePath(std::move(fieldRef), std::move(types));
-}
-
-TEST(V1LogBuilder, UpdateFieldMutableBson) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("a.b", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"), elt_ab));
-
- ASSERT_BSONOBJ_BINARY_EQ(mongo::fromjson("{ $set : { 'a.b' : 1 } }"), lb.serialize());
-}
-
-TEST(V1LogBuilder, CreateField) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("a.b", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(lb.logCreatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"),
- 0, // idxOfFirstNewComponent (unused)
- elt_ab));
-
- ASSERT_BSONOBJ_BINARY_EQ(mongo::fromjson("{ $set : { 'a.b' : 1 } }"), lb.serialize());
-}
-
-TEST(V1LogBuilder, CreateFieldBSONElt) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- BSONObj storage = BSON("a" << 1);
- ASSERT_OK(lb.logCreatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"),
- 0, // idxOfFirstNewComponent (unused)
- storage.firstElement()));
-
- ASSERT_BSONOBJ_BINARY_EQ(mongo::fromjson("{ $set : { 'a.b' : 1 } }"), lb.serialize());
-}
-
-TEST(V1LogBuilder, AddOneToUnset) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y")));
- ASSERT_EQUALS(mongo::fromjson("{ $unset : { 'x.y' : true } }"), doc);
-}
-TEST(V1LogBuilder, AddOneToEach) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"), elt_ab));
-
- const mmb::Element elt_cd = doc.makeElementInt("", 2);
- ASSERT_TRUE(elt_cd.ok());
-
- ASSERT_OK(lb.logCreatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("c.d"),
- 0, // idxOfCreatedComponent (unused)
- elt_cd));
-
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y")));
-
- ASSERT_EQUALS(mongo::fromjson("{ "
- " $set : { 'a.b' : 1, 'c.d': 2 }, "
- " $unset : { 'x.y' : true } "
- "}"),
- doc);
-}
-TEST(V1LogBuilder, VerifySetsAreGrouped) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- const mmb::Element elt_ab = doc.makeElementInt("a.b", 1);
- ASSERT_TRUE(elt_ab.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b"), elt_ab));
-
- const mmb::Element elt_xy = doc.makeElementInt("x.y", 1);
- ASSERT_TRUE(elt_xy.ok());
- ASSERT_OK(
- lb.logUpdatedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y"), elt_xy));
-
- ASSERT_EQUALS(mongo::fromjson("{ $set : {"
- " 'a.b' : 1, "
- " 'x.y' : 1 "
- "} }"),
- doc);
-}
-
-TEST(V1LogBuilder, VerifyUnsetsAreGrouped) {
- mmb::Document doc;
- V1LogBuilder lb(doc.root());
-
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("a.b")));
- ASSERT_OK(lb.logDeletedField(makeRuntimeUpdatePathAssumeAllComponentsFieldNames("x.y")));
-
- ASSERT_EQUALS(mongo::fromjson("{ $unset : {"
- " 'a.b' : true, "
- " 'x.y' : true "
- "} }"),
- doc);
-}
-} // namespace
-} // namespace mongo