summaryrefslogtreecommitdiff
path: root/src/mongo/db
diff options
context:
space:
mode:
authorEddie Louie <eddie.louie@mongodb.com>2017-08-01 19:47:13 -0400
committerEddie Louie <eddie.louie@mongodb.com>2017-08-01 19:47:13 -0400
commit2a76bd75d75197d3604643ff2b11d0a8f23c14f9 (patch)
tree10a26d5aec28d9d912c2020a9e4c87064382cf09 /src/mongo/db
parent6fd97bdfa5f2d0d07993c538b0787ad788227ef2 (diff)
downloadmongo-2a76bd75d75197d3604643ff2b11d0a8f23c14f9.tar.gz
Revert "SERVER-29135 Add post-image lookup to $changeNotification"
This reverts commit ad30a49a33b8773cbc07388bb257d605cbd6aa12.
Diffstat (limited to 'src/mongo/db')
-rw-r--r--src/mongo/db/pipeline/SConscript20
-rw-r--r--src/mongo/db/pipeline/document_source.cpp3
-rw-r--r--src/mongo/db/pipeline/document_source.h6
-rw-r--r--src/mongo/db/pipeline/document_source_bucket.cpp3
-rw-r--r--src/mongo/db/pipeline/document_source_bucket.h2
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_test.cpp17
-rw-r--r--src/mongo/db/pipeline/document_source_change_notification.cpp197
-rw-r--r--src/mongo/db/pipeline/document_source_change_notification.h48
-rw-r--r--src/mongo/db/pipeline/document_source_change_notification_test.cpp166
-rw-r--r--src/mongo/db/pipeline/document_source_count.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_count.h2
-rw-r--r--src/mongo/db/pipeline/document_source_count_test.cpp11
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_change_post_image.cpp125
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_change_post_image.h115
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_change_post_image_test.cpp266
-rw-r--r--src/mongo/db/pipeline/document_source_sort_by_count.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_sort_by_count.h2
-rw-r--r--src/mongo/db/pipeline/document_source_sort_by_count_test.cpp11
-rw-r--r--src/mongo/db/pipeline/expression_context.cpp3
-rw-r--r--src/mongo/db/pipeline/expression_context.h4
-rw-r--r--src/mongo/db/pipeline/expression_context_for_test.h4
-rw-r--r--src/mongo/db/pipeline/pipeline_test.cpp58
-rw-r--r--src/mongo/db/pipeline/value.h3
23 files changed, 158 insertions, 916 deletions
diff --git a/src/mongo/db/pipeline/SConscript b/src/mongo/db/pipeline/SConscript
index 093cbb5a5c9..0d67ccbb2df 100644
--- a/src/mongo/db/pipeline/SConscript
+++ b/src/mongo/db/pipeline/SConscript
@@ -135,9 +135,7 @@ env.CppUnitTest(
'document_source_geo_near_test.cpp',
'document_source_group_test.cpp',
'document_source_limit_test.cpp',
- 'document_source_lookup_change_post_image_test.cpp',
'document_source_lookup_test.cpp',
- 'document_source_graph_lookup_test.cpp',
'document_source_match_test.cpp',
'document_source_mock_test.cpp',
'document_source_project_test.cpp',
@@ -233,6 +231,7 @@ docSourceEnv.Library(
'document_source_add_fields.cpp',
'document_source_bucket.cpp',
'document_source_bucket_auto.cpp',
+ 'document_source_change_notification.cpp',
'document_source_coll_stats.cpp',
'document_source_count.cpp',
'document_source_current_op.cpp',
@@ -263,7 +262,6 @@ docSourceEnv.Library(
'$BUILD_DIR/mongo/db/matcher/expression_algo',
'$BUILD_DIR/mongo/db/matcher/expressions',
'$BUILD_DIR/mongo/db/pipeline/lite_parsed_document_source',
- '$BUILD_DIR/mongo/db/repl/oplog_entry',
'$BUILD_DIR/mongo/db/service_context',
'$BUILD_DIR/mongo/db/stats/top',
'$BUILD_DIR/mongo/db/storage/encryption_hooks',
@@ -311,10 +309,8 @@ env.Library(
env.Library(
target='document_source_lookup',
source=[
- 'document_source_change_notification.cpp',
'document_source_graph_lookup.cpp',
'document_source_lookup.cpp',
- 'document_source_lookup_change_post_image.cpp',
],
LIBDEPS=[
'document_source',
@@ -322,6 +318,20 @@ env.Library(
],
)
+env.CppUnitTest(
+ target='document_source_graph_lookup_test',
+ source='document_source_graph_lookup_test.cpp',
+ LIBDEPS=[
+ 'document_source',
+ 'document_source_lookup',
+ 'document_value_test_util',
+ '$BUILD_DIR/mongo/db/auth/authorization_manager_mock_init',
+ '$BUILD_DIR/mongo/db/query/query_test_service_context',
+ '$BUILD_DIR/mongo/db/service_context_noop_init',
+ '$BUILD_DIR/mongo/s/is_mongos',
+ ],
+)
+
env.Library(
target='document_source_facet',
source=[
diff --git a/src/mongo/db/pipeline/document_source.cpp b/src/mongo/db/pipeline/document_source.cpp
index a9d9f74181a..6e58adae2bf 100644
--- a/src/mongo/db/pipeline/document_source.cpp
+++ b/src/mongo/db/pipeline/document_source.cpp
@@ -41,7 +41,6 @@ namespace mongo {
using Parser = DocumentSource::Parser;
using boost::intrusive_ptr;
-using std::list;
using std::string;
using std::vector;
@@ -61,7 +60,7 @@ void DocumentSource::registerParser(string name, Parser parser) {
parserMap[name] = parser;
}
-list<intrusive_ptr<DocumentSource>> DocumentSource::parse(
+vector<intrusive_ptr<DocumentSource>> DocumentSource::parse(
const intrusive_ptr<ExpressionContext>& expCtx, BSONObj stageObj) {
uassert(16435,
"A pipeline stage specification object must contain exactly one field.",
diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h
index 383cd56e35f..2545f02d8de 100644
--- a/src/mongo/db/pipeline/document_source.h
+++ b/src/mongo/db/pipeline/document_source.h
@@ -85,7 +85,7 @@ class Document;
MONGO_INITIALIZER(addToDocSourceParserMap_##key)(InitializerContext*) { \
auto fullParserWrapper = [](BSONElement stageSpec, \
const boost::intrusive_ptr<ExpressionContext>& expCtx) { \
- return std::list<boost::intrusive_ptr<DocumentSource>>{ \
+ return std::vector<boost::intrusive_ptr<DocumentSource>>{ \
(fullParser)(stageSpec, expCtx)}; \
}; \
LiteParsedDocumentSource::registerParser("$" #key, liteParser); \
@@ -115,7 +115,7 @@ class Document;
class DocumentSource : public IntrusiveCounterUnsigned {
public:
- using Parser = stdx::function<std::list<boost::intrusive_ptr<DocumentSource>>(
+ using Parser = stdx::function<std::vector<boost::intrusive_ptr<DocumentSource>>(
BSONElement, const boost::intrusive_ptr<ExpressionContext>&)>;
/**
@@ -303,7 +303,7 @@ public:
/**
* Create a DocumentSource pipeline stage from 'stageObj'.
*/
- static std::list<boost::intrusive_ptr<DocumentSource>> parse(
+ static std::vector<boost::intrusive_ptr<DocumentSource>> parse(
const boost::intrusive_ptr<ExpressionContext>& expCtx, BSONObj stageObj);
/**
diff --git a/src/mongo/db/pipeline/document_source_bucket.cpp b/src/mongo/db/pipeline/document_source_bucket.cpp
index e209834b14d..853ef0aaa42 100644
--- a/src/mongo/db/pipeline/document_source_bucket.cpp
+++ b/src/mongo/db/pipeline/document_source_bucket.cpp
@@ -37,7 +37,6 @@ namespace mongo {
using boost::intrusive_ptr;
using std::vector;
-using std::list;
REGISTER_MULTI_STAGE_ALIAS(bucket,
LiteParsedDocumentSourceDefault::parse,
@@ -53,7 +52,7 @@ intrusive_ptr<ExpressionConstant> getExpressionConstant(
}
} // namespace
-list<intrusive_ptr<DocumentSource>> DocumentSourceBucket::createFromBson(
+vector<intrusive_ptr<DocumentSource>> DocumentSourceBucket::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& pExpCtx) {
uassert(40201,
str::stream() << "Argument to $bucket stage must be an object, but found type: "
diff --git a/src/mongo/db/pipeline/document_source_bucket.h b/src/mongo/db/pipeline/document_source_bucket.h
index e059660fc99..aa83ee07cc2 100644
--- a/src/mongo/db/pipeline/document_source_bucket.h
+++ b/src/mongo/db/pipeline/document_source_bucket.h
@@ -40,7 +40,7 @@ public:
/**
* Returns a $group stage followed by a $sort stage.
*/
- static std::list<boost::intrusive_ptr<DocumentSource>> createFromBson(
+ static std::vector<boost::intrusive_ptr<DocumentSource>> createFromBson(
BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
private:
diff --git a/src/mongo/db/pipeline/document_source_bucket_test.cpp b/src/mongo/db/pipeline/document_source_bucket_test.cpp
index 26f18c4dca9..1a61934dcb5 100644
--- a/src/mongo/db/pipeline/document_source_bucket_test.cpp
+++ b/src/mongo/db/pipeline/document_source_bucket_test.cpp
@@ -47,22 +47,21 @@
namespace mongo {
namespace {
-using boost::intrusive_ptr;
-using std::list;
using std::vector;
+using boost::intrusive_ptr;
class BucketReturnsGroupAndSort : public AggregationContextFixture {
public:
void testCreateFromBsonResult(BSONObj bucketSpec, Value expectedGroupExplain) {
- list<intrusive_ptr<DocumentSource>> result =
+ vector<intrusive_ptr<DocumentSource>> result =
DocumentSourceBucket::createFromBson(bucketSpec.firstElement(), getExpCtx());
ASSERT_EQUALS(result.size(), 2UL);
- const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result.front().get());
+ const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
ASSERT(groupStage);
- const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result.back().get());
+ const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result[1].get());
ASSERT(sortStage);
// Serialize the DocumentSourceGroup and DocumentSourceSort from $bucket so that we can
@@ -155,7 +154,7 @@ TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWithMultipleBoundaryValues) {
class InvalidBucketSpec : public AggregationContextFixture {
public:
- list<intrusive_ptr<DocumentSource>> createBucket(BSONObj bucketSpec) {
+ vector<intrusive_ptr<DocumentSource>> createBucket(BSONObj bucketSpec) {
auto sources = DocumentSourceBucket::createFromBson(bucketSpec.firstElement(), getExpCtx());
return sources;
}
@@ -268,14 +267,14 @@ TEST_F(InvalidBucketSpec, GroupFailsForBucketWithInvalidOutputField) {
TEST_F(InvalidBucketSpec, SwitchFailsForBucketWhenNoDefaultSpecified) {
const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 3]}}");
- list<intrusive_ptr<DocumentSource>> bucketStages = createBucket(spec);
+ vector<intrusive_ptr<DocumentSource>> bucketStages = createBucket(spec);
ASSERT_EQUALS(bucketStages.size(), 2UL);
- auto* groupStage = dynamic_cast<DocumentSourceGroup*>(bucketStages.front().get());
+ auto* groupStage = dynamic_cast<DocumentSourceGroup*>(bucketStages[0].get());
ASSERT(groupStage);
- const auto* sortStage = dynamic_cast<DocumentSourceSort*>(bucketStages.back().get());
+ const auto* sortStage = dynamic_cast<DocumentSourceSort*>(bucketStages[1].get());
ASSERT(sortStage);
auto doc = Document{{"x", 4}};
diff --git a/src/mongo/db/pipeline/document_source_change_notification.cpp b/src/mongo/db/pipeline/document_source_change_notification.cpp
index d173d1183d5..dff06f05802 100644
--- a/src/mongo/db/pipeline/document_source_change_notification.cpp
+++ b/src/mongo/db/pipeline/document_source_change_notification.cpp
@@ -30,46 +30,30 @@
#include "mongo/db/pipeline/document_source_change_notification.h"
-#include "mongo/bson/simple_bsonelement_comparator.h"
#include "mongo/db/pipeline/document_source_limit.h"
-#include "mongo/db/pipeline/document_source_lookup_change_post_image.h"
#include "mongo/db/pipeline/document_source_match.h"
#include "mongo/db/pipeline/document_source_sort.h"
#include "mongo/db/pipeline/expression.h"
#include "mongo/db/pipeline/lite_parsed_document_source.h"
-#include "mongo/db/repl/oplog_entry.h"
-#include "mongo/db/repl/oplog_entry_gen.h"
#include "mongo/util/log.h"
namespace mongo {
using boost::intrusive_ptr;
using boost::optional;
-using std::list;
-using std::string;
using std::vector;
+using std::string;
+
+constexpr StringData kNamespaceField = "ns"_sd;
+constexpr StringData kTimestmapField = "ts"_sd;
+constexpr StringData kOpTypeField = "op"_sd;
+constexpr StringData kOField = "o"_sd;
+constexpr StringData kIdField = "_id"_sd;
-// The $changeNotification stage is an alias for many stages, but we need to be able to serialize
-// and re-parse the pipeline. To make this work, the 'transformation' stage will serialize itself
-// with the original specification, and all other stages that are created during the alias expansion
-// will not serialize themselves.
REGISTER_MULTI_STAGE_ALIAS(changeNotification,
DocumentSourceChangeNotification::LiteParsed::parse,
DocumentSourceChangeNotification::createFromBson);
-constexpr StringData DocumentSourceChangeNotification::kDocumentKeyField;
-constexpr StringData DocumentSourceChangeNotification::kFullDocumentField;
-constexpr StringData DocumentSourceChangeNotification::kIdField;
-constexpr StringData DocumentSourceChangeNotification::kNamespaceField;
-constexpr StringData DocumentSourceChangeNotification::kOperationTypeField;
-constexpr StringData DocumentSourceChangeNotification::kStageName;
-constexpr StringData DocumentSourceChangeNotification::kTimestmapField;
-constexpr StringData DocumentSourceChangeNotification::kUpdateOpType;
-constexpr StringData DocumentSourceChangeNotification::kDeleteOpType;
-constexpr StringData DocumentSourceChangeNotification::kReplaceOpType;
-constexpr StringData DocumentSourceChangeNotification::kInsertOpType;
-constexpr StringData DocumentSourceChangeNotification::kInvalidateOpType;
-
namespace {
static constexpr StringData kOplogMatchExplainName = "$_internalOplogMatch"_sd;
@@ -143,7 +127,7 @@ BSONObj DocumentSourceChangeNotification::buildMatchFilter(const NamespaceString
return BSON("ts" << GT << Timestamp() << "$or" << BSON_ARRAY(opMatch << commandMatch));
}
-list<intrusive_ptr<DocumentSource>> DocumentSourceChangeNotification::createFromBson(
+vector<intrusive_ptr<DocumentSource>> DocumentSourceChangeNotification::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& expCtx) {
// TODO: Add sharding support here (SERVER-29141).
uassert(40470,
@@ -153,54 +137,17 @@ list<intrusive_ptr<DocumentSource>> DocumentSourceChangeNotification::createFrom
"Only default collation is allowed when using a $changeNotification stage.",
!expCtx->getCollator());
- uassert(40573,
- str::stream() << "the $changeNotification stage must be specified as an object, got "
- << typeName(elem.type()),
- elem.type() == BSONType::Object);
-
- bool shouldLookupPostImage = false;
- for (auto&& option : elem.embeddedObject()) {
- auto optionName = option.fieldNameStringData();
- if (optionName == "fullDocument"_sd) {
- uassert(40574,
- str::stream() << "the 'fullDocument' option to the $changeNotification stage "
- "must be a string, got "
- << typeName(option.type()),
- option.type() == BSONType::String);
- auto fullDocOption = option.valueStringData();
- uassert(40575,
- str::stream() << "unrecognized value for the 'fullDocument' option to the "
- "$changeNotification stage. Expected \"none\" or "
- "\"fullDocument\", got \""
- << option.String()
- << "\"",
- fullDocOption == "lookup"_sd || fullDocOption == "none"_sd);
- shouldLookupPostImage = (fullDocOption == "lookup"_sd);
- } else if (optionName == "resumeAfter"_sd) {
- uasserted(
- 40576,
- "the 'resumeAfter' option to the $changeNotification stage is not yet supported");
- } else {
- uasserted(40577,
- str::stream() << "unrecognized option to $changeNotification stage: \""
- << optionName
- << "\"");
- }
- }
+ BSONObj filter = buildMatchFilter(expCtx->ns);
- auto oplogMatch = DocumentSourceOplogMatch::create(buildMatchFilter(expCtx->ns), expCtx);
- auto transformation = createTransformationStage(elem.embeddedObject(), expCtx);
- list<intrusive_ptr<DocumentSource>> stages = {oplogMatch, transformation};
- if (shouldLookupPostImage) {
- stages.push_back(DocumentSourceLookupChangePostImage::create(expCtx));
- }
- return stages;
+ auto oplogMatch = DocumentSourceOplogMatch::create(filter, expCtx);
+ auto transformation = createTransformationStage(expCtx);
+ return {oplogMatch, transformation};
}
intrusive_ptr<DocumentSource> DocumentSourceChangeNotification::createTransformationStage(
- BSONObj changeNotificationSpec, const intrusive_ptr<ExpressionContext>& expCtx) {
+ const intrusive_ptr<ExpressionContext>& expCtx) {
return intrusive_ptr<DocumentSource>(new DocumentSourceSingleDocumentTransformation(
- expCtx, stdx::make_unique<Transformation>(changeNotificationSpec), kStageName.toString()));
+ expCtx, stdx::make_unique<Transformation>(), "$changeNotification"));
}
namespace {
@@ -220,103 +167,89 @@ Document DocumentSourceChangeNotification::Transformation::applyTransformation(
MutableDocument doc;
// Extract the fields we need.
- checkValueType(input[repl::OplogEntry::kOpTypeFieldName],
- repl::OplogEntry::kOpTypeFieldName,
- BSONType::String);
- string op = input[repl::OplogEntry::kOpTypeFieldName].getString();
- Value ts = input[repl::OplogEntry::kTimestampFieldName];
- Value ns = input[repl::OplogEntry::kNamespaceFieldName];
- checkValueType(ns, repl::OplogEntry::kNamespaceFieldName, BSONType::String);
+ checkValueType(input[kOpTypeField], kOpTypeField, BSONType::String);
+ string op = input[kOpTypeField].getString();
+ Value ts = input[kTimestmapField];
+ Value ns = input[kNamespaceField];
+ checkValueType(ns, kNamespaceField, BSONType::String);
NamespaceString nss(ns.getString());
Value id = input.getNestedField("o._id");
// Non-replace updates have the _id in field "o2".
Value documentId = id.missing() ? input.getNestedField("o2._id") : id;
- StringData operationType;
- Value fullDocument = Value(BSONNULL);
+ string operationType;
+ Value newDocument;
Value updateDescription;
// Deal with CRUD operations and commands.
- auto opType = repl::OpType_parse(IDLParserErrorContext("ChangeNotificationEntry.op"), op);
- switch (opType) {
- case repl::OpTypeEnum::kInsert: {
- operationType = kInsertOpType;
- fullDocument = input[repl::OplogEntry::kObjectFieldName];
- break;
- }
- case repl::OpTypeEnum::kDelete: {
- operationType = kDeleteOpType;
- break;
- }
- case repl::OpTypeEnum::kUpdate: {
- if (id.missing()) {
- operationType = kUpdateOpType;
- checkValueType(input[repl::OplogEntry::kObjectFieldName],
- repl::OplogEntry::kObjectFieldName,
- BSONType::Object);
- Document opObject = input[repl::OplogEntry::kObjectFieldName].getDocument();
- Value updatedFields = opObject["$set"];
- Value removedFields = opObject["$unset"];
-
- // Extract the field names of $unset document.
- vector<Value> removedFieldsVector;
- if (removedFields.getType() == BSONType::Object) {
- auto iter = removedFields.getDocument().fieldIterator();
- while (iter.more()) {
- removedFieldsVector.push_back(Value(iter.next().first));
- }
+ if (op == "i") {
+ operationType = "insert";
+ newDocument = input[kOField];
+ } else if (op == "d") {
+ operationType = "delete";
+ } else if (op == "u") {
+ if (id.missing()) {
+ operationType = "update";
+ checkValueType(input[kOField], kOField, BSONType::Object);
+ Document o = input[kOField].getDocument();
+ Value updatedFields = o["$set"];
+ Value removedFields = o["$unset"];
+
+ // Extract the field names of $unset document.
+ vector<Value> removedFieldsVector;
+ if (removedFields.getType() == BSONType::Object) {
+ auto iter = removedFields.getDocument().fieldIterator();
+ while (iter.more()) {
+ removedFieldsVector.push_back(Value(iter.next().first));
}
- updateDescription = Value(Document{
- {"updatedFields", updatedFields.missing() ? Value(Document()) : updatedFields},
- {"removedFields", removedFieldsVector}});
- } else {
- operationType = kReplaceOpType;
- fullDocument = input[repl::OplogEntry::kObjectFieldName];
}
- break;
- }
- case repl::OpTypeEnum::kCommand: {
- operationType = kInvalidateOpType;
- // Make sure the result doesn't have a document id.
- documentId = Value();
- break;
+ updateDescription = Value(Document{
+ {"updatedFields", updatedFields.missing() ? Value(Document()) : updatedFields},
+ {"removedFields", removedFieldsVector}});
+ } else {
+ operationType = "replace";
+ newDocument = input[kOField];
}
- default: { MONGO_UNREACHABLE; }
+ } else if (op == "c") {
+ operationType = "invalidate";
+ // Make sure the result doesn't have a document id.
+ documentId = Value();
}
- // Construct the result document. Note that 'documentId' might be the missing value, in which
- // case it will not appear in the output.
+ // Construct the result document. If document id is missing, it will not appear in the output.
doc.addField(
kIdField,
Value(Document{{kTimestmapField, ts}, {kNamespaceField, ns}, {kIdField, documentId}}));
- doc.addField(kOperationTypeField, Value(operationType));
- doc.addField(kFullDocumentField, fullDocument);
+ doc.addField("operationType", Value(operationType));
// "invalidate" entry has fewer fields.
- if (opType == repl::OpTypeEnum::kCommand) {
+ if (op == "c") {
return doc.freeze();
}
+ // Add fields for normal operations.
doc.addField(kNamespaceField, Value(Document{{"db", nss.db()}, {"coll", nss.coll()}}));
- doc.addField(kDocumentKeyField, Value(Document{{kIdField, documentId}}));
+ doc.addField("documentKey", Value(Document{{kIdField, documentId}}));
- // Note that 'updateDescription' might be the 'missing' value, in which case it will not be
- // serialized.
+ // If newDocument or updateDescription is missing, it will not be serialized.
+ doc.addField("newDocument", newDocument);
doc.addField("updateDescription", updateDescription);
return doc.freeze();
}
Document DocumentSourceChangeNotification::Transformation::serializeStageOptions(
boost::optional<ExplainOptions::Verbosity> explain) const {
- return Document(_changeNotificationSpec);
+ // TODO SERVER-29135 Be sure to re-serialize the 'postImage' argument.
+ // TODO SERVER-29131 Be sure to re-serialize the 'resumeAfter' argument.
+ return Document();
}
DocumentSource::GetDepsReturn DocumentSourceChangeNotification::Transformation::addDependencies(
DepsTracker* deps) const {
- deps->fields.insert(repl::OplogEntry::kOpTypeFieldName.toString());
- deps->fields.insert(repl::OplogEntry::kTimestampFieldName.toString());
- deps->fields.insert(repl::OplogEntry::kNamespaceFieldName.toString());
- deps->fields.insert(repl::OplogEntry::kObjectFieldName.toString());
- deps->fields.insert(repl::OplogEntry::kObject2FieldName.toString());
+ deps->fields.insert(kOpTypeField.toString());
+ deps->fields.insert(kTimestmapField.toString());
+ deps->fields.insert(kNamespaceField.toString());
+ deps->fields.insert(kOField.toString());
+ deps->fields.insert("o2");
return DocumentSource::GetDepsReturn::EXHAUSTIVE_ALL;
}
diff --git a/src/mongo/db/pipeline/document_source_change_notification.h b/src/mongo/db/pipeline/document_source_change_notification.h
index ad10a5ad210..3b38a908626 100644
--- a/src/mongo/db/pipeline/document_source_change_notification.h
+++ b/src/mongo/db/pipeline/document_source_change_notification.h
@@ -51,8 +51,6 @@ public:
}
stdx::unordered_set<NamespaceString> getInvolvedNamespaces() const final {
- // TODO SERVER-29138: we need to communicate that this stage will need to look up
- // documents from different collections.
return stdx::unordered_set<NamespaceString>();
}
@@ -64,8 +62,6 @@ public:
class Transformation : public DocumentSourceSingleDocumentTransformation::TransformerInterface {
public:
- Transformation(BSONObj changeNotificationSpec)
- : _changeNotificationSpec(changeNotificationSpec.getOwned()) {}
~Transformation() = default;
Document applyTransformation(const Document& input) final;
TransformerType getType() const final {
@@ -76,48 +72,10 @@ public:
boost::optional<ExplainOptions::Verbosity> explain) const final;
DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const final;
DocumentSource::GetModPathsReturn getModifiedPaths() const final;
-
- private:
- BSONObj _changeNotificationSpec;
};
- // The name of the field where the document key (_id and shard key, if present) will be found
- // after the transformation.
- static constexpr StringData kDocumentKeyField = "documentKey"_sd;
-
- // The name of the field where the full document will be found after the transformation. The
- // full document is only present for certain types of operations, such as an insert.
- static constexpr StringData kFullDocumentField = "fullDocument"_sd;
-
- // The name of the field where the change identifier will be located after the transformation.
- static constexpr StringData kIdField = "_id"_sd;
-
- // The name of the field where the namespace of the change will be located after the
- // transformation.
- static constexpr StringData kNamespaceField = "ns"_sd;
-
- // The name of the field where the type of the operation will be located after the
- // transformation.
- static constexpr StringData kOperationTypeField = "operationType"_sd;
-
- // The name of this stage.
- static constexpr StringData kStageName = "$changeNotification"_sd;
-
- // The name of the field where the timestamp of the change will be located after the
- // transformation. The timestamp will be located inside the change identifier, so the full path
- // to the timestamp will be kIdField + "." + kTimestampField.
- static constexpr StringData kTimestmapField = "ts"_sd;
-
- // The different types of operations we can use for the operation type.
- static constexpr StringData kUpdateOpType = "update"_sd;
- static constexpr StringData kDeleteOpType = "delete"_sd;
- static constexpr StringData kReplaceOpType = "replace"_sd;
- static constexpr StringData kInsertOpType = "insert"_sd;
- static constexpr StringData kInvalidateOpType = "invalidate"_sd;
-
/**
- * Produce the BSON object representing the filter for the $match stage to filter oplog entries
- * to only those relevant for this $changeNotification stage.
+ * Produce the BSON for the $match stage based on a $changeNotification stage.
*/
static BSONObj buildMatchFilter(const NamespaceString& nss);
@@ -125,11 +83,11 @@ public:
* Parses a $changeNotification stage from 'elem' and produces the $match and transformation
* stages required.
*/
- static std::list<boost::intrusive_ptr<DocumentSource>> createFromBson(
+ static std::vector<boost::intrusive_ptr<DocumentSource>> createFromBson(
BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& expCtx);
static boost::intrusive_ptr<DocumentSource> createTransformationStage(
- BSONObj changeNotificationSpec, const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
+ const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
private:
// It is illegal to construct a DocumentSourceChangeNotification directly, use createFromBson()
diff --git a/src/mongo/db/pipeline/document_source_change_notification_test.cpp b/src/mongo/db/pipeline/document_source_change_notification_test.cpp
index 9c08bd91637..b3595789577 100644
--- a/src/mongo/db/pipeline/document_source_change_notification_test.cpp
+++ b/src/mongo/db/pipeline/document_source_change_notification_test.cpp
@@ -49,18 +49,14 @@
namespace mongo {
namespace {
+using std::vector;
+using std::string;
using boost::intrusive_ptr;
-using repl::OpTypeEnum;
using repl::OplogEntry;
-using std::list;
-using std::string;
-using std::vector;
-
+using repl::OpTypeEnum;
using D = Document;
using V = Value;
-using DSChangeNotification = DocumentSourceChangeNotification;
-
static const Timestamp ts(100, 1);
static const repl::OpTime optime(ts, 1);
static const NamespaceString nss("unittests.change_notification");
@@ -71,10 +67,10 @@ public:
void checkTransformation(const OplogEntry& entry, const boost::optional<Document> expectedDoc) {
const auto spec = fromjson("{$changeNotification: {}}");
- list<intrusive_ptr<DocumentSource>> result =
- DSChangeNotification::createFromBson(spec.firstElement(), getExpCtx());
+ vector<intrusive_ptr<DocumentSource>> result =
+ DocumentSourceChangeNotification::createFromBson(spec.firstElement(), getExpCtx());
- auto match = dynamic_cast<DocumentSourceMatch*>(result.front().get());
+ auto match = dynamic_cast<DocumentSourceMatch*>(result[0].get());
ASSERT(match);
auto mock = DocumentSourceMock::create(D(entry.toBSON()));
match->setSource(mock.get());
@@ -82,7 +78,7 @@ public:
// Check the oplog entry is transformed correctly.
auto transform = result.back().get();
ASSERT(transform);
- ASSERT_EQ(string(transform->getSourceName()), DSChangeNotification::kStageName);
+ ASSERT_EQ(string(transform->getSourceName()), "$changeNotification");
transform->setSource(match);
auto next = transform->getNext();
@@ -98,62 +94,16 @@ public:
}
};
-TEST_F(ChangeNotificationStageTest, ShouldRejectUnrecognizedOption) {
- auto expCtx = getExpCtx();
-
- ASSERT_THROWS_CODE(
- DSChangeNotification::createFromBson(
- BSON(DSChangeNotification::kStageName << BSON("unexpected" << 4)).firstElement(),
- expCtx),
- UserException,
- 40577);
-}
-
-TEST_F(ChangeNotificationStageTest, ShouldRejectResumeAfterOption) {
- // TODO SERVER-29131 change this test to accept the option.
- auto expCtx = getExpCtx();
-
- ASSERT_THROWS_CODE(
- DSChangeNotification::createFromBson(
- BSON(DSChangeNotification::kStageName << BSON("resumeAfter" << ts)).firstElement(),
- expCtx),
- UserException,
- 40576);
-}
-
-TEST_F(ChangeNotificationStageTest, ShouldRejectNonStringFullDocumentOption) {
- auto expCtx = getExpCtx();
-
- ASSERT_THROWS_CODE(
- DSChangeNotification::createFromBson(
- BSON(DSChangeNotification::kStageName << BSON("fullDocument" << true)).firstElement(),
- expCtx),
- UserException,
- 40574);
-}
-
-TEST_F(ChangeNotificationStageTest, ShouldRejectUnrecognizedFullDocumentOption) {
- auto expCtx = getExpCtx();
-
- ASSERT_THROWS_CODE(DSChangeNotification::createFromBson(
- BSON(DSChangeNotification::kStageName << BSON("fullDocument"
- << "unrecognized"))
- .firstElement(),
- expCtx),
- UserException,
- 40575);
-}
-
TEST_F(ChangeNotificationStageTest, StagesGeneratedCorrectly) {
const auto spec = fromjson("{$changeNotification: {}}");
- list<intrusive_ptr<DocumentSource>> result =
- DSChangeNotification::createFromBson(spec.firstElement(), getExpCtx());
+ vector<intrusive_ptr<DocumentSource>> result =
+ DocumentSourceChangeNotification::createFromBson(spec.firstElement(), getExpCtx());
ASSERT_EQUALS(result.size(), 2UL);
- ASSERT_TRUE(dynamic_cast<DocumentSourceMatch*>(result.front().get()));
- ASSERT_EQUALS(string(result.front()->getSourceName()), DSChangeNotification::kStageName);
- ASSERT_EQUALS(string(result.back()->getSourceName()), DSChangeNotification::kStageName);
+ ASSERT_TRUE(dynamic_cast<DocumentSourceMatch*>(result[0].get()));
+ ASSERT_EQUALS(string(result[0]->getSourceName()), "$changeNotification");
+ ASSERT_EQUALS(string(result[1]->getSourceName()), "$changeNotification");
// TODO: Check explain result.
}
@@ -162,11 +112,11 @@ TEST_F(ChangeNotificationStageTest, TransformInsert) {
OplogEntry insert(optime, 1, OpTypeEnum::kInsert, nss, BSON("_id" << 1 << "x" << 1));
// Insert
Document expectedInsert{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kInsertOpType},
- {DSChangeNotification::kFullDocumentField, D{{"_id", 1}, {"x", 1}}},
- {DSChangeNotification::kNamespaceField, D{{"db", nss.db()}, {"coll", nss.coll()}}},
- {DSChangeNotification::kDocumentKeyField, D{{"_id", 1}}},
+ {"_id", D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
+ {"operationType", "insert"_sd},
+ {"ns", D{{"db", nss.db()}, {"coll", nss.coll()}}},
+ {"documentKey", D{{"_id", 1}}},
+ {"newDocument", D{{"_id", 1}, {"x", 1}}},
};
checkTransformation(insert, expectedInsert);
}
@@ -176,15 +126,13 @@ TEST_F(ChangeNotificationStageTest, TransformUpdateFields) {
optime, 1, OpTypeEnum::kUpdate, nss, BSON("$set" << BSON("y" << 1)), BSON("_id" << 1));
// Update fields
Document expectedUpdateField{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kUpdateOpType},
- {DSChangeNotification::kFullDocumentField, BSONNULL},
- {DSChangeNotification::kNamespaceField, D{{"db", nss.db()}, {"coll", nss.coll()}}},
- {DSChangeNotification::kDocumentKeyField, D{{"_id", 1}}},
+ {"_id", D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
+ {"operationType", "update"_sd},
+ {"ns", D{{"db", nss.db()}, {"coll", nss.coll()}}},
+ {"documentKey", D{{"_id", 1}}},
{
"updateDescription", D{{"updatedFields", D{{"y", 1}}}, {"removedFields", vector<V>()}},
- },
- };
+ }};
checkTransformation(updateField, expectedUpdateField);
}
@@ -193,11 +141,10 @@ TEST_F(ChangeNotificationStageTest, TransformRemoveFields) {
optime, 1, OpTypeEnum::kUpdate, nss, BSON("$unset" << BSON("y" << 1)), BSON("_id" << 1));
// Remove fields
Document expectedRemoveField{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kUpdateOpType},
- {DSChangeNotification::kFullDocumentField, BSONNULL},
- {DSChangeNotification::kNamespaceField, D{{"db", nss.db()}, {"coll", nss.coll()}}},
- {DSChangeNotification::kDocumentKeyField, D{{"_id", 1}}},
+ {"_id", D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
+ {"operationType", "update"_sd},
+ {"ns", D{{"db", nss.db()}, {"coll", nss.coll()}}},
+ {"documentKey", D{{"_id", 1}}},
{
"updateDescription", D{{"updatedFields", D{}}, {"removedFields", vector<V>{V("y"_sd)}}},
}};
@@ -209,11 +156,11 @@ TEST_F(ChangeNotificationStageTest, TransformReplace) {
optime, 1, OpTypeEnum::kUpdate, nss, BSON("_id" << 1 << "y" << 1), BSON("_id" << 1));
// Replace
Document expectedReplace{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kReplaceOpType},
- {DSChangeNotification::kFullDocumentField, D{{"_id", 1}, {"y", 1}}},
- {DSChangeNotification::kNamespaceField, D{{"db", nss.db()}, {"coll", nss.coll()}}},
- {DSChangeNotification::kDocumentKeyField, D{{"_id", 1}}},
+ {"_id", D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
+ {"operationType", "replace"_sd},
+ {"ns", D{{"db", nss.db()}, {"coll", nss.coll()}}},
+ {"documentKey", D{{"_id", 1}}},
+ {"newDocument", D{{"_id", 1}, {"y", 1}}},
};
checkTransformation(replace, expectedReplace);
}
@@ -222,11 +169,10 @@ TEST_F(ChangeNotificationStageTest, TransformDelete) {
OplogEntry deleteEntry(optime, 1, OpTypeEnum::kDelete, nss, BSON("_id" << 1));
// Delete
Document expectedDelete{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kDeleteOpType},
- {DSChangeNotification::kFullDocumentField, BSONNULL},
- {DSChangeNotification::kNamespaceField, D{{"db", nss.db()}, {"coll", nss.coll()}}},
- {DSChangeNotification::kDocumentKeyField, D{{"_id", 1}}},
+ {"_id", D{{"ts", ts}, {"ns", nss.ns()}, {"_id", 1}}},
+ {"operationType", "delete"_sd},
+ {"ns", D{{"db", nss.db()}, {"coll", nss.coll()}}},
+ {"documentKey", D{{"_id", 1}}},
};
checkTransformation(deleteEntry, expectedDelete);
}
@@ -241,9 +187,7 @@ TEST_F(ChangeNotificationStageTest, TransformInvalidate) {
// Invalidate entry includes $cmd namespace in _id and doesn't have a document id.
Document expectedInvalidate{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", nss.getCommandNS().ns()}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kInvalidateOpType},
- {DSChangeNotification::kFullDocumentField, BSONNULL},
+ {"_id", D{{"ts", ts}, {"ns", nss.getCommandNS().ns()}}}, {"operationType", "invalidate"_sd},
};
for (auto& entry : {dropColl, dropDB, rename}) {
checkTransformation(entry, expectedInvalidate);
@@ -259,9 +203,8 @@ TEST_F(ChangeNotificationStageTest, TransformInvalidateRenameDropTarget) {
otherColl.getCommandNS(),
BSON("renameCollection" << otherColl.ns() << "to" << nss.ns()));
Document expectedInvalidate{
- {DSChangeNotification::kIdField, D{{"ts", ts}, {"ns", otherColl.getCommandNS().ns()}}},
- {DSChangeNotification::kOperationTypeField, DSChangeNotification::kInvalidateOpType},
- {DSChangeNotification::kFullDocumentField, BSONNULL},
+ {"_id", D{{"ts", ts}, {"ns", otherColl.getCommandNS().ns()}}},
+ {"operationType", "invalidate"_sd},
};
checkTransformation(rename, expectedInvalidate);
}
@@ -287,38 +230,5 @@ TEST_F(ChangeNotificationStageTest, MatchFiltersCreateIndex) {
checkTransformation(createIndex, boost::none);
}
-TEST_F(ChangeNotificationStageTest, TransformationShouldBeAbleToReParseSerializedStage) {
- auto expCtx = getExpCtx();
-
- auto originalSpec = BSON(DSChangeNotification::kStageName << BSONObj());
- auto allStages = DSChangeNotification::createFromBson(originalSpec.firstElement(), expCtx);
- ASSERT_EQ(allStages.size(), 2UL);
- auto stage = allStages.back();
- ASSERT(dynamic_cast<DocumentSourceSingleDocumentTransformation*>(stage.get()));
-
- //
- // Serialize the stage and confirm contents.
- //
- vector<Value> serialization;
- stage->serializeToArray(serialization);
- ASSERT_EQ(serialization.size(), 1UL);
- ASSERT_EQ(serialization[0].getType(), BSONType::Object);
- auto serializedDoc = serialization[0].getDocument();
- ASSERT_BSONOBJ_EQ(serializedDoc.toBson(), originalSpec);
-
- //
- // Create a new stage from the serialization. Serialize the new stage and confirm that it is
- // equivalent to the original serialization.
- //
- auto serializedBson = serializedDoc.toBson();
- auto roundTripped = uassertStatusOK(Pipeline::create(
- DSChangeNotification::createFromBson(serializedBson.firstElement(), expCtx), expCtx));
-
- auto newSerialization = roundTripped->serialize();
-
- ASSERT_EQ(newSerialization.size(), 1UL);
- ASSERT_VALUE_EQ(newSerialization[0], serialization[0]);
-}
-
} // namespace
} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_count.cpp b/src/mongo/db/pipeline/document_source_count.cpp
index 0a2d299200b..01c4044fece 100644
--- a/src/mongo/db/pipeline/document_source_count.cpp
+++ b/src/mongo/db/pipeline/document_source_count.cpp
@@ -39,14 +39,14 @@
namespace mongo {
using boost::intrusive_ptr;
-using std::list;
+using std::vector;
using std::string;
REGISTER_MULTI_STAGE_ALIAS(count,
LiteParsedDocumentSourceDefault::parse,
DocumentSourceCount::createFromBson);
-list<intrusive_ptr<DocumentSource>> DocumentSourceCount::createFromBson(
+vector<intrusive_ptr<DocumentSource>> DocumentSourceCount::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& pExpCtx) {
uassert(40156,
str::stream() << "the count field must be a non-empty string",
diff --git a/src/mongo/db/pipeline/document_source_count.h b/src/mongo/db/pipeline/document_source_count.h
index e77ce3eb0ed..50fbccdf41b 100644
--- a/src/mongo/db/pipeline/document_source_count.h
+++ b/src/mongo/db/pipeline/document_source_count.h
@@ -40,7 +40,7 @@ public:
/**
* Returns a $group stage followed by a $project stage.
*/
- static std::list<boost::intrusive_ptr<DocumentSource>> createFromBson(
+ static std::vector<boost::intrusive_ptr<DocumentSource>> createFromBson(
BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
private:
diff --git a/src/mongo/db/pipeline/document_source_count_test.cpp b/src/mongo/db/pipeline/document_source_count_test.cpp
index 254f011ff1d..e083399b0fa 100644
--- a/src/mongo/db/pipeline/document_source_count_test.cpp
+++ b/src/mongo/db/pipeline/document_source_count_test.cpp
@@ -45,23 +45,22 @@
namespace mongo {
namespace {
-using boost::intrusive_ptr;
-using std::list;
using std::vector;
+using boost::intrusive_ptr;
class CountReturnsGroupAndProjectStages : public AggregationContextFixture {
public:
void testCreateFromBsonResult(BSONObj countSpec) {
- list<intrusive_ptr<DocumentSource>> result =
+ vector<intrusive_ptr<DocumentSource>> result =
DocumentSourceCount::createFromBson(countSpec.firstElement(), getExpCtx());
ASSERT_EQUALS(result.size(), 2UL);
- const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result.front().get());
+ const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
ASSERT(groupStage);
const auto* projectStage =
- dynamic_cast<DocumentSourceSingleDocumentTransformation*>(result.back().get());
+ dynamic_cast<DocumentSourceSingleDocumentTransformation*>(result[1].get());
ASSERT(projectStage);
auto explain = ExplainOptions::Verbosity::kQueryPlanner;
@@ -95,7 +94,7 @@ TEST_F(CountReturnsGroupAndProjectStages, ValidStringSpec) {
class InvalidCountSpec : public AggregationContextFixture {
public:
- list<intrusive_ptr<DocumentSource>> createCount(BSONObj countSpec) {
+ vector<intrusive_ptr<DocumentSource>> createCount(BSONObj countSpec) {
auto specElem = countSpec.firstElement();
return DocumentSourceCount::createFromBson(specElem, getExpCtx());
}
diff --git a/src/mongo/db/pipeline/document_source_lookup_change_post_image.cpp b/src/mongo/db/pipeline/document_source_lookup_change_post_image.cpp
deleted file mode 100644
index ee605710a54..00000000000
--- a/src/mongo/db/pipeline/document_source_lookup_change_post_image.cpp
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * Copyright (C) 2017 MongoDB Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the GNU Affero General Public License in all respects
- * for all of the code used other than as permitted herein. If you modify
- * file(s) with this exception, you may extend this exception to your
- * version of the file(s), but you are not obligated to do so. If you do not
- * wish to do so, delete this exception statement from your version. If you
- * delete this exception statement from all source files in the program,
- * then also delete it in the license file.
- */
-
-#include "mongo/platform/basic.h"
-
-#include "mongo/db/pipeline/document_source_lookup_change_post_image.h"
-
-#include "mongo/bson/simple_bsonelement_comparator.h"
-
-namespace mongo {
-
-constexpr StringData DocumentSourceLookupChangePostImage::kStageName;
-constexpr StringData DocumentSourceLookupChangePostImage::kFullDocumentFieldName;
-
-namespace {
-Value assertFieldHasType(const Document& fullDoc, StringData fieldName, BSONType expectedType) {
- auto val = fullDoc[fieldName];
- uassert(40578,
- str::stream() << "failed to look up post image after change: expected \"" << fieldName
- << "\" field to have type "
- << typeName(expectedType)
- << ", instead found type "
- << typeName(val.getType())
- << ": "
- << val.toString()
- << ", full object: "
- << fullDoc.toString(),
- val.getType() == expectedType);
- return val;
-}
-} // namespace
-
-DocumentSource::GetNextResult DocumentSourceLookupChangePostImage::getNext() {
- pExpCtx->checkForInterrupt();
-
- auto input = pSource->getNext();
- if (!input.isAdvanced()) {
- return input;
- }
- auto opTypeVal = assertFieldHasType(input.getDocument(),
- DocumentSourceChangeNotification::kOperationTypeField,
- BSONType::String);
- if (opTypeVal.getString() != DocumentSourceChangeNotification::kUpdateOpType) {
- return input;
- }
-
- MutableDocument output(input.releaseDocument());
- output[kFullDocumentFieldName] = lookupPostImage(output.peek());
- return output.freeze();
-}
-
-NamespaceString DocumentSourceLookupChangePostImage::assertNamespaceMatches(
- const Document& inputDoc) const {
- auto namespaceObject = assertFieldHasType(inputDoc,
- DocumentSourceChangeNotification::kNamespaceField,
- BSONType::Object)
- .getDocument();
- auto dbName = assertFieldHasType(namespaceObject, "db"_sd, BSONType::String);
- auto collectionName = assertFieldHasType(namespaceObject, "coll"_sd, BSONType::String);
- NamespaceString nss(dbName.getString(), collectionName.getString());
- uassert(40579,
- str::stream() << "unexpected namespace during post image lookup: " << nss.ns()
- << ", expected "
- << pExpCtx->ns.ns(),
- nss == pExpCtx->ns);
- return nss;
-}
-
-Value DocumentSourceLookupChangePostImage::lookupPostImage(const Document& updateOp) const {
- // Make sure we have a well-formed input.
- auto nss = assertNamespaceMatches(updateOp);
-
- auto documentKey = assertFieldHasType(
- updateOp, DocumentSourceChangeNotification::kDocumentKeyField, BSONType::Object);
- auto matchSpec = BSON("$match" << documentKey);
-
- // TODO SERVER-29134 we need to extract the namespace from the document and set them on the new
- // ExpressionContext if we're getting notifications from an entire database.
- auto foreignExpCtx = pExpCtx->copyWith(nss);
- auto pipeline = uassertStatusOK(_mongod->makePipeline({matchSpec}, foreignExpCtx));
-
- if (auto first = pipeline->getNext()) {
- auto lookedUpDocument = Value(*first);
- if (auto next = pipeline->getNext()) {
- uasserted(40580,
- str::stream() << "found more than document with documentKey "
- << documentKey.toString()
- << " while looking up post image after change: ["
- << lookedUpDocument.toString()
- << ", "
- << next->toString()
- << "]");
- }
- return lookedUpDocument;
- }
- // We couldn't find it with the documentKey, it may have been deleted.
- return Value(BSONNULL);
-}
-
-} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_lookup_change_post_image.h b/src/mongo/db/pipeline/document_source_lookup_change_post_image.h
deleted file mode 100644
index ca14656a63c..00000000000
--- a/src/mongo/db/pipeline/document_source_lookup_change_post_image.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Copyright (C) 2017 MongoDB Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the GNU Affero General Public License in all respects
- * for all of the code used other than as permitted herein. If you modify
- * file(s) with this exception, you may extend this exception to your
- * version of the file(s), but you are not obligated to do so. If you do not
- * wish to do so, delete this exception statement from your version. If you
- * delete this exception statement from all source files in the program,
- * then also delete it in the license file.
- */
-
-#pragma once
-
-#include "mongo/db/pipeline/document_source.h"
-#include "mongo/db/pipeline/document_source_change_notification.h"
-
-namespace mongo {
-
-/**
- * Part of the change notification API machinery used to look up the post-image of a document. Uses
- * the "documentKey" field of the input to look up the new version of the document.
- *
- * Uses the ExpressionContext to determine what collection to look up into.
- * TODO SERVER-29134 When we allow change streams on multiple collections, this will need to change.
- */
-class DocumentSourceLookupChangePostImage final : public DocumentSourceNeedsMongod {
-public:
- static constexpr StringData kStageName = "$_internalLookupChangePostImage"_sd;
- static constexpr StringData kFullDocumentFieldName =
- DocumentSourceChangeNotification::kFullDocumentField;
-
- /**
- * Creates a DocumentSourceLookupChangePostImage stage.
- */
- static boost::intrusive_ptr<DocumentSourceLookupChangePostImage> create(
- const boost::intrusive_ptr<ExpressionContext>& expCtx) {
- return new DocumentSourceLookupChangePostImage(expCtx);
- }
-
- /**
- * Only modifies a single path: "fullDocument".
- */
- GetModPathsReturn getModifiedPaths() const final {
- return {GetModPathsReturn::Type::kFiniteSet, {kFullDocumentFieldName.toString()}, {}};
- }
-
- StageConstraints constraints() const final {
- StageConstraints constraints;
- constraints.canSwapWithMatch = true;
- constraints.isAllowedInsideFacetStage = false;
- return constraints;
- }
-
- GetDepsReturn getDependencies(DepsTracker* deps) const {
- // The namespace is not technically needed yet, but we will if there is more than one
- // collection involved.
- deps->fields.insert(DocumentSourceChangeNotification::kNamespaceField.toString());
- deps->fields.insert(DocumentSourceChangeNotification::kDocumentKeyField.toString());
- deps->fields.insert(DocumentSourceChangeNotification::kOperationTypeField.toString());
- // This stage does not restrict the output fields to a finite set, and has no impact on
- // whether metadata is available or needed.
- return SEE_NEXT;
- }
-
- /**
- * Performs the lookup to retrieve the full document.
- */
- GetNextResult getNext() final;
-
- Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final {
- if (explain) {
- return Value{Document{{kStageName, Document()}}};
- }
- return Value(); // Do not serialize this stage unless we're explaining.
- }
-
- const char* getSourceName() const final {
- return kStageName.rawData();
- }
-
-private:
- DocumentSourceLookupChangePostImage(const boost::intrusive_ptr<ExpressionContext>& expCtx)
- : DocumentSourceNeedsMongod(expCtx) {}
-
- /**
- * Uses the "documentKey" field from 'updateOp' to look up the current version of the document.
- * Returns Value(BSONNULL) if the document couldn't be found.
- */
- Value lookupPostImage(const Document& updateOp) const;
-
- /**
- * Throws a UserException if the namespace found in 'inputDoc' doesn't match the one on the
- * ExpressionContext.
- */
- NamespaceString assertNamespaceMatches(const Document& inputDoc) const;
-};
-
-} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_lookup_change_post_image_test.cpp b/src/mongo/db/pipeline/document_source_lookup_change_post_image_test.cpp
deleted file mode 100644
index 728db433c55..00000000000
--- a/src/mongo/db/pipeline/document_source_lookup_change_post_image_test.cpp
+++ /dev/null
@@ -1,266 +0,0 @@
-/**
- * Copyright (C) 2017 MongoDB Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the GNU Affero General Public License in all respects
- * for all of the code used other than as permitted herein. If you modify
- * file(s) with this exception, you may extend this exception to your
- * version of the file(s), but you are not obligated to do so. If you do not
- * wish to do so, delete this exception statement from your version. If you
- * delete this exception statement from all source files in the program,
- * then also delete it in the license file.
- */
-
-#include "mongo/platform/basic.h"
-
-#include <boost/intrusive_ptr.hpp>
-#include <deque>
-#include <vector>
-
-#include "mongo/bson/bsonmisc.h"
-#include "mongo/bson/bsonobj.h"
-#include "mongo/bson/bsonobjbuilder.h"
-#include "mongo/db/pipeline/aggregation_context_fixture.h"
-#include "mongo/db/pipeline/document.h"
-#include "mongo/db/pipeline/document_source_change_notification.h"
-#include "mongo/db/pipeline/document_source_lookup_change_post_image.h"
-#include "mongo/db/pipeline/document_source_mock.h"
-#include "mongo/db/pipeline/document_value_test_util.h"
-#include "mongo/db/pipeline/field_path.h"
-#include "mongo/db/pipeline/stub_mongod_interface.h"
-#include "mongo/db/pipeline/value.h"
-
-namespace mongo {
-namespace {
-using boost::intrusive_ptr;
-using std::deque;
-using std::vector;
-
-// This provides access to getExpCtx(), but we'll use a different name for this test suite.
-using DocumentSourceLookupChangePostImageTest = AggregationContextFixture;
-
-/**
- * A mock MongodInterface which allows mocking a foreign pipeline.
- */
-class MockMongodInterface final : public StubMongodInterface {
-public:
- MockMongodInterface(deque<DocumentSource::GetNextResult> mockResults)
- : _mockResults(std::move(mockResults)) {}
-
- bool isSharded(const NamespaceString& ns) final {
- return false;
- }
-
- StatusWith<std::unique_ptr<Pipeline, Pipeline::Deleter>> makePipeline(
- const std::vector<BSONObj>& rawPipeline,
- const boost::intrusive_ptr<ExpressionContext>& expCtx) final {
- auto pipeline = Pipeline::parse(rawPipeline, expCtx);
- if (!pipeline.isOK()) {
- return pipeline.getStatus();
- }
-
- pipeline.getValue()->addInitialSource(DocumentSourceMock::create(_mockResults));
- pipeline.getValue()->optimizePipeline();
-
- return pipeline;
- }
-
-private:
- deque<DocumentSource::GetNextResult> _mockResults;
-};
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldErrorIfMissingDocumentKeyOnUpdate) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input with a document without a "documentKey" field.
- auto mockLocalSource = DocumentSourceMock::create(
- Document{{"operationType", "update"_sd},
- {"fullDocument", Document{{"_id", 0}}},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}});
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection.
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(deque<DocumentSource::GetNextResult>{}));
-
- ASSERT_THROWS_CODE(lookupChangeStage->getNext(), UserException, 40578);
-}
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldErrorIfMissingOperationType) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input with a document without a "ns" field.
- auto mockLocalSource = DocumentSourceMock::create(
- Document{{"documentKey", Document{{"_id", 0}}},
- {"fullDocument", Document{{"_id", 0}}},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}});
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection.
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(deque<DocumentSource::GetNextResult>{}));
-
- ASSERT_THROWS_CODE(lookupChangeStage->getNext(), UserException, 40578);
-}
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldErrorIfMissingNamespace) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input with a document without a "ns" field.
- auto mockLocalSource = DocumentSourceMock::create(Document{
- {"documentKey", Document{{"_id", 0}}}, {"operationType", "update"_sd},
- });
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection.
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(deque<DocumentSource::GetNextResult>{}));
-
- ASSERT_THROWS_CODE(lookupChangeStage->getNext(), UserException, 40578);
-}
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldErrorIfNsFieldHasWrongType) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input with a document without a "ns" field.
- auto mockLocalSource = DocumentSourceMock::create(
- Document{{"documentKey", Document{{"_id", 0}}}, {"operationType", "update"_sd}, {"ns", 4}});
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection.
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(deque<DocumentSource::GetNextResult>{}));
-
- ASSERT_THROWS_CODE(lookupChangeStage->getNext(), UserException, 40578);
-}
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldErrorIfNsFieldDoesNotMatchPipeline) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input with a document without a "ns" field.
- auto mockLocalSource = DocumentSourceMock::create(
- Document{{"documentKey", Document{{"_id", 0}}},
- {"operationType", "update"_sd},
- {"ns", Document{{"db", "DIFFERENT"_sd}, {"coll", expCtx->ns.coll()}}}});
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection.
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(deque<DocumentSource::GetNextResult>{}));
-
- ASSERT_THROWS_CODE(lookupChangeStage->getNext(), UserException, 40579);
-}
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldErrorIfDocumentKeyIsNotUnique) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input with an update document.
- auto mockLocalSource = DocumentSourceMock::create(
- Document{{"documentKey", Document{{"_id", 0}}},
- {"operationType", "update"_sd},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}});
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection to have two documents with the same document key.
- deque<DocumentSource::GetNextResult> foreignCollection = {Document{{"_id", 0}},
- Document{{"_id", 0}}};
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(std::move(foreignCollection)));
-
- ASSERT_THROWS_CODE(lookupChangeStage->getNext(), UserException, 40580);
-}
-
-TEST_F(DocumentSourceLookupChangePostImageTest, ShouldPropagatePauses) {
- auto expCtx = getExpCtx();
-
- // Set up the $lookup stage.
- auto lookupChangeStage = DocumentSourceLookupChangePostImage::create(expCtx);
-
- // Mock its input, pausing every other result.
- auto mockLocalSource = DocumentSourceMock::create(
- {Document{{"documentKey", Document{{"_id", 0}}},
- {"operationType", "insert"_sd},
- {"fullDocument", Document{{"_id", 0}}},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}},
- DocumentSource::GetNextResult::makePauseExecution(),
- Document{{"documentKey", Document{{"_id", 1}}},
- {"operationType", "update"_sd},
- {"fullDocument", BSONNULL},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}},
- DocumentSource::GetNextResult::makePauseExecution()});
-
- lookupChangeStage->setSource(mockLocalSource.get());
-
- // Mock out the foreign collection.
- deque<DocumentSource::GetNextResult> mockForeignContents{Document{{"_id", 0}},
- Document{{"_id", 1}}};
- lookupChangeStage->injectMongodInterface(
- std::make_shared<MockMongodInterface>(std::move(mockForeignContents)));
-
- auto next = lookupChangeStage->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(
- next.releaseDocument(),
- (Document{{"documentKey", Document{{"_id", 0}}},
- {"operationType", "insert"_sd},
- {"fullDocument", Document{{"_id", 0}}},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}}));
-
- ASSERT_TRUE(lookupChangeStage->getNext().isPaused());
-
- next = lookupChangeStage->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(
- next.releaseDocument(),
- (Document{{"documentKey", Document{{"_id", 1}}},
- {"operationType", "update"_sd},
- {"fullDocument", Document{{"_id", 1}}},
- {"ns", Document{{"db", expCtx->ns.db()}, {"coll", expCtx->ns.coll()}}}}));
-
- ASSERT_TRUE(lookupChangeStage->getNext().isPaused());
-
- ASSERT_TRUE(lookupChangeStage->getNext().isEOF());
- ASSERT_TRUE(lookupChangeStage->getNext().isEOF());
-}
-
-} // namespace
-} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_sort_by_count.cpp b/src/mongo/db/pipeline/document_source_sort_by_count.cpp
index 97c5e0f0297..48cfaa9f559 100644
--- a/src/mongo/db/pipeline/document_source_sort_by_count.cpp
+++ b/src/mongo/db/pipeline/document_source_sort_by_count.cpp
@@ -39,13 +39,13 @@
namespace mongo {
using boost::intrusive_ptr;
-using std::list;
+using std::vector;
REGISTER_MULTI_STAGE_ALIAS(sortByCount,
LiteParsedDocumentSourceDefault::parse,
DocumentSourceSortByCount::createFromBson);
-list<intrusive_ptr<DocumentSource>> DocumentSourceSortByCount::createFromBson(
+vector<intrusive_ptr<DocumentSource>> DocumentSourceSortByCount::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& pExpCtx) {
if (elem.type() == Object) {
// Make sure that the sortByCount field is an expression inside an object
diff --git a/src/mongo/db/pipeline/document_source_sort_by_count.h b/src/mongo/db/pipeline/document_source_sort_by_count.h
index c8196c9b498..69bf8d2c5e0 100644
--- a/src/mongo/db/pipeline/document_source_sort_by_count.h
+++ b/src/mongo/db/pipeline/document_source_sort_by_count.h
@@ -40,7 +40,7 @@ public:
/**
* Returns a $group stage followed by a $sort stage.
*/
- static std::list<boost::intrusive_ptr<DocumentSource>> createFromBson(
+ static std::vector<boost::intrusive_ptr<DocumentSource>> createFromBson(
BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
private:
diff --git a/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp b/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp
index 12975e96149..3e0a007c59b 100644
--- a/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp
+++ b/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp
@@ -46,9 +46,8 @@
namespace mongo {
namespace {
-using boost::intrusive_ptr;
-using std::list;
using std::vector;
+using boost::intrusive_ptr;
/**
* Fixture to test that $sortByCount returns a DocumentSourceGroup and DocumentSourceSort.
@@ -56,15 +55,15 @@ using std::vector;
class SortByCountReturnsGroupAndSort : public AggregationContextFixture {
public:
void testCreateFromBsonResult(BSONObj sortByCountSpec, Value expectedGroupExplain) {
- list<intrusive_ptr<DocumentSource>> result =
+ vector<intrusive_ptr<DocumentSource>> result =
DocumentSourceSortByCount::createFromBson(sortByCountSpec.firstElement(), getExpCtx());
ASSERT_EQUALS(result.size(), 2UL);
- const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result.front().get());
+ const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
ASSERT(groupStage);
- const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result.back().get());
+ const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result[1].get());
ASSERT(sortStage);
// Serialize the DocumentSourceGroup and DocumentSourceSort from $sortByCount so that we can
@@ -112,7 +111,7 @@ TEST_F(SortByCountReturnsGroupAndSort, ExpressionInObjectSpec) {
*/
class InvalidSortByCountSpec : public AggregationContextFixture {
public:
- list<intrusive_ptr<DocumentSource>> createSortByCount(BSONObj sortByCountSpec) {
+ vector<intrusive_ptr<DocumentSource>> createSortByCount(BSONObj sortByCountSpec) {
auto specElem = sortByCountSpec.firstElement();
return DocumentSourceSortByCount::createFromBson(specElem, getExpCtx());
}
diff --git a/src/mongo/db/pipeline/expression_context.cpp b/src/mongo/db/pipeline/expression_context.cpp
index f5f42d197c0..d2a16a6ca87 100644
--- a/src/mongo/db/pipeline/expression_context.cpp
+++ b/src/mongo/db/pipeline/expression_context.cpp
@@ -73,7 +73,7 @@ void ExpressionContext::setCollator(std::unique_ptr<CollatorInterface> coll) {
}
intrusive_ptr<ExpressionContext> ExpressionContext::copyWith(NamespaceString ns) const {
- intrusive_ptr<ExpressionContext> expCtx = new ExpressionContext(std::move(ns));
+ intrusive_ptr<ExpressionContext> expCtx = new ExpressionContext();
expCtx->explain = explain;
expCtx->inShard = inShard;
@@ -81,6 +81,7 @@ intrusive_ptr<ExpressionContext> ExpressionContext::copyWith(NamespaceString ns)
expCtx->extSortAllowed = extSortAllowed;
expCtx->bypassDocumentValidation = bypassDocumentValidation;
+ expCtx->ns = std::move(ns);
expCtx->tempDir = tempDir;
expCtx->opCtx = opCtx;
diff --git a/src/mongo/db/pipeline/expression_context.h b/src/mongo/db/pipeline/expression_context.h
index ba2690796f6..d04b095aa4b 100644
--- a/src/mongo/db/pipeline/expression_context.h
+++ b/src/mongo/db/pipeline/expression_context.h
@@ -134,9 +134,7 @@ public:
protected:
static const int kInterruptCheckPeriod = 128;
-
- ExpressionContext(NamespaceString nss)
- : ns(std::move(nss)), variablesParseState(variables.useIdGenerator()) {}
+ ExpressionContext() : variablesParseState(variables.useIdGenerator()) {}
/**
* Sets '_collator' and resets '_documentComparator' and '_valueComparator'.
diff --git a/src/mongo/db/pipeline/expression_context_for_test.h b/src/mongo/db/pipeline/expression_context_for_test.h
index 542e5b91eea..234b54e1a10 100644
--- a/src/mongo/db/pipeline/expression_context_for_test.h
+++ b/src/mongo/db/pipeline/expression_context_for_test.h
@@ -40,9 +40,7 @@ namespace mongo {
*/
class ExpressionContextForTest : public ExpressionContext {
public:
- ExpressionContextForTest() : ExpressionContext(NamespaceString{"test"_sd, "namespace"_sd}) {}
-
- ExpressionContextForTest(NamespaceString nss) : ExpressionContext(std::move(nss)) {}
+ ExpressionContextForTest() = default;
ExpressionContextForTest(OperationContext* opCtx, const AggregationRequest& request)
: ExpressionContext(opCtx, request, nullptr, {}) {}
diff --git a/src/mongo/db/pipeline/pipeline_test.cpp b/src/mongo/db/pipeline/pipeline_test.cpp
index 1402f6f504d..1f37b30fc90 100644
--- a/src/mongo/db/pipeline/pipeline_test.cpp
+++ b/src/mongo/db/pipeline/pipeline_test.cpp
@@ -37,11 +37,7 @@
#include "mongo/db/pipeline/dependencies.h"
#include "mongo/db/pipeline/document.h"
#include "mongo/db/pipeline/document_source.h"
-#include "mongo/db/pipeline/document_source_change_notification.h"
-#include "mongo/db/pipeline/document_source_lookup_change_post_image.h"
-#include "mongo/db/pipeline/document_source_match.h"
#include "mongo/db/pipeline/document_source_mock.h"
-#include "mongo/db/pipeline/document_source_project.h"
#include "mongo/db/pipeline/document_value_test_util.h"
#include "mongo/db/pipeline/expression_context_for_test.h"
#include "mongo/db/pipeline/field_path.h"
@@ -56,8 +52,6 @@ using boost::intrusive_ptr;
using std::string;
using std::vector;
-const NamespaceString kTestNss = NamespaceString("a.collection");
-
namespace Optimizations {
using namespace mongo;
@@ -83,7 +77,7 @@ void assertPipelineOptimizesAndSerializesTo(std::string inputPipeJson,
ASSERT_EQUALS(stageElem.type(), BSONType::Object);
rawPipeline.push_back(stageElem.embeddedObject());
}
- AggregationRequest request(kTestNss, rawPipeline);
+ AggregationRequest request(NamespaceString("a.collection"), rawPipeline);
intrusive_ptr<ExpressionContextForTest> ctx =
new ExpressionContextForTest(opCtx.get(), request);
@@ -968,52 +962,6 @@ TEST(PipelineOptimizationTest, MatchOnMaxLengthShouldMoveAcrossRename) {
assertPipelineOptimizesTo(inputPipe, outputPipe);
}
-TEST(PipelineOptimizationTest, ChangeNotificationLookupSwapsWithIndependentMatch) {
- QueryTestServiceContext testServiceContext;
- auto opCtx = testServiceContext.makeOperationContext();
-
- intrusive_ptr<ExpressionContext> expCtx(new ExpressionContextForTest(kTestNss));
- expCtx->opCtx = opCtx.get();
-
- auto spec = BSON("$changeNotification" << BSON("fullDocument"
- << "lookup"));
- auto stages = DocumentSourceChangeNotification::createFromBson(spec.firstElement(), expCtx);
- ASSERT_EQ(stages.size(), 3UL);
- // Make sure the change lookup is at the end.
- ASSERT(dynamic_cast<DocumentSourceLookupChangePostImage*>(stages.back().get()));
-
- auto matchPredicate = BSON("extra"
- << "predicate");
- stages.push_back(DocumentSourceMatch::create(matchPredicate, expCtx));
- auto pipeline = uassertStatusOK(Pipeline::create(stages, expCtx));
- pipeline->optimizePipeline();
-
- // Make sure the $match stage has swapped before the change look up.
- ASSERT(dynamic_cast<DocumentSourceLookupChangePostImage*>(pipeline->getSources().back().get()));
-}
-
-TEST(PipelineOptimizationTest, ChangeNotificationLookupDoesNotSwapWithMatchOnPostImage) {
- QueryTestServiceContext testServiceContext;
- auto opCtx = testServiceContext.makeOperationContext();
-
- intrusive_ptr<ExpressionContext> expCtx(new ExpressionContextForTest(kTestNss));
- expCtx->opCtx = opCtx.get();
-
- auto spec = BSON("$changeNotification" << BSON("fullDocument"
- << "lookup"));
- auto stages = DocumentSourceChangeNotification::createFromBson(spec.firstElement(), expCtx);
- ASSERT_EQ(stages.size(), 3UL);
- // Make sure the change lookup is at the end.
- ASSERT(dynamic_cast<DocumentSourceLookupChangePostImage*>(stages.back().get()));
-
- stages.push_back(DocumentSourceMatch::create(
- BSON(DocumentSourceLookupChangePostImage::kFullDocumentFieldName << BSONNULL), expCtx));
- auto pipeline = uassertStatusOK(Pipeline::create(stages, expCtx));
- pipeline->optimizePipeline();
-
- // Make sure the $match stage stays at the end.
- ASSERT(dynamic_cast<DocumentSourceMatch*>(pipeline->getSources().back().get()));
-}
} // namespace Local
namespace Sharded {
@@ -1038,7 +986,7 @@ public:
ASSERT_EQUALS(stageElem.type(), BSONType::Object);
rawPipeline.push_back(stageElem.embeddedObject());
}
- AggregationRequest request(kTestNss, rawPipeline);
+ AggregationRequest request(NamespaceString("a.collection"), rawPipeline);
intrusive_ptr<ExpressionContextForTest> ctx =
new ExpressionContextForTest(&_opCtx, request);
@@ -1443,7 +1391,7 @@ TEST_F(PipelineInitialSourceNSTest, CollectionNSNotValidIfInitialStageIsCollecti
auto collectionlessSource = DocumentSourceCollectionlessMock::create();
auto ctx = getExpCtx();
- ctx->ns = kTestNss;
+ ctx->ns = NamespaceString("a.collection");
ASSERT_NOT_OK(Pipeline::create({collectionlessSource}, ctx).getStatus());
}
diff --git a/src/mongo/db/pipeline/value.h b/src/mongo/db/pipeline/value.h
index 04e396dc58d..9f7a357f677 100644
--- a/src/mongo/db/pipeline/value.h
+++ b/src/mongo/db/pipeline/value.h
@@ -117,9 +117,6 @@ public:
explicit Value(const MaxKeyLabeler&) : _storage(MaxKey) {} // MAXKEY
explicit Value(const Date_t& date) : _storage(Date, date.toMillisSinceEpoch()) {}
- explicit Value(const char*) = delete; // Use StringData instead to prevent accidentally
- // terminating the string at the first null byte.
-
// TODO: add an unsafe version that can share storage with the BSONElement
/// Deep-convert from BSONElement to Value
explicit Value(const BSONElement& elem);