summaryrefslogtreecommitdiff
path: root/jstests/change_streams/change_stream.js
diff options
context:
space:
mode:
authorBernard Gorman <bernard.gorman@gmail.com>2018-04-03 16:55:52 +0100
committerBernard Gorman <bernard.gorman@gmail.com>2018-04-06 02:47:55 +0100
commit41084e8f0fa354a9efc28a354321200e94a2fcf6 (patch)
tree309fed55bfb25682117ca7a4c170c1699cadfbd6 /jstests/change_streams/change_stream.js
parent87be281e31034f80723d5299b70e7e956a48c494 (diff)
downloadmongo-41084e8f0fa354a9efc28a354321200e94a2fcf6.tar.gz
SERVER-34090 Allow resuming change stream when resume token's document key does not contain the shard key
Diffstat (limited to 'jstests/change_streams/change_stream.js')
-rw-r--r--jstests/change_streams/change_stream.js11
1 files changed, 4 insertions, 7 deletions
diff --git a/jstests/change_streams/change_stream.js b/jstests/change_streams/change_stream.js
index 791e8fba9ac..4401df139e4 100644
--- a/jstests/change_streams/change_stream.js
+++ b/jstests/change_streams/change_stream.js
@@ -171,8 +171,8 @@
if (!isMongos) {
jsTestLog("Ensuring attempt to read with legacy operations fails.");
db.getMongo().forceReadMode('legacy');
- const legacyCursor = db.tailable2.aggregate([{$changeStream: {}}, cst.oplogProjection],
- {cursor: {batchSize: 0}});
+ const legacyCursor =
+ db.tailable2.aggregate([{$changeStream: {}}], {cursor: {batchSize: 0}});
assert.throws(function() {
legacyCursor.next();
}, [], "Legacy getMore expected to fail on changeStream cursor.");
@@ -183,8 +183,8 @@
assertDropAndRecreateCollection(db, "resume1");
// Note we do not project away 'id.ts' as it is part of the resume token.
- let resumeCursor = cst.startWatchingChanges(
- {pipeline: [{$changeStream: {}}], collection: db.resume1, includeToken: true});
+ let resumeCursor =
+ cst.startWatchingChanges({pipeline: [{$changeStream: {}}], collection: db.resume1});
// Insert a document and save the resulting change stream.
assert.writeOK(db.resume1.insert({_id: 1}));
@@ -195,7 +195,6 @@
resumeCursor = cst.startWatchingChanges({
pipeline: [{$changeStream: {resumeAfter: firstInsertChangeDoc._id}}],
collection: db.resume1,
- includeToken: true,
aggregateOptions: {cursor: {batchSize: 0}},
});
@@ -211,7 +210,6 @@
resumeCursor = cst.startWatchingChanges({
pipeline: [{$changeStream: {resumeAfter: firstInsertChangeDoc._id}}],
collection: db.resume1,
- includeToken: true,
aggregateOptions: {cursor: {batchSize: 0}},
});
assert.docEq(cst.getOneChange(resumeCursor), secondInsertChangeDoc);
@@ -221,7 +219,6 @@
resumeCursor = cst.startWatchingChanges({
pipeline: [{$changeStream: {resumeAfter: secondInsertChangeDoc._id}}],
collection: db.resume1,
- includeToken: true,
aggregateOptions: {cursor: {batchSize: 0}},
});
assert.docEq(cst.getOneChange(resumeCursor), thirdInsertChangeDoc);