summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorBernard Gorman <bernard.gorman@gmail.com>2018-06-12 13:35:22 +0100
committerBernard Gorman <bernard.gorman@gmail.com>2018-06-22 04:03:41 +0100
commitc798e34f5479c43da70d5f9f7d4919f16e6dbd50 (patch)
treefa2e47d9c74a26fa01874c9cc76a13f6dbc31dbe /src
parent04d8939bd9c6776da8b7effe9c4bcaa20591d88f (diff)
downloadmongo-c798e34f5479c43da70d5f9f7d4919f16e6dbd50.tar.gz
SERVER-35329 Package agg projection code for use outside of agg execution
Diffstat (limited to 'src')
-rw-r--r--src/mongo/db/SConscript10
-rw-r--r--src/mongo/db/exec/SConscript11
-rw-r--r--src/mongo/db/exec/projection_exec_agg.cpp88
-rw-r--r--src/mongo/db/exec/projection_exec_agg.h69
-rw-r--r--src/mongo/db/exec/projection_exec_agg_test.cpp148
-rw-r--r--src/mongo/db/pipeline/cluster_aggregation_planner.cpp2
-rw-r--r--src/mongo/db/pipeline/dependencies.h25
-rw-r--r--src/mongo/db/pipeline/document_source.h27
-rw-r--r--src/mongo/db/pipeline/document_source_add_fields_test.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_auto.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_auto.h2
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_auto_test.cpp6
-rw-r--r--src/mongo/db/pipeline/document_source_change_stream_transform.cpp5
-rw-r--r--src/mongo/db/pipeline/document_source_change_stream_transform.h2
-rw-r--r--src/mongo/db/pipeline/document_source_facet.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_facet.h2
-rw-r--r--src/mongo/db/pipeline/document_source_facet_test.cpp22
-rw-r--r--src/mongo/db/pipeline/document_source_geo_near.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_geo_near.h2
-rw-r--r--src/mongo/db/pipeline/document_source_graph_lookup.h4
-rw-r--r--src/mongo/db/pipeline/document_source_group.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_group.h2
-rw-r--r--src/mongo/db/pipeline/document_source_group_test.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_limit.h4
-rw-r--r--src/mongo/db/pipeline/document_source_limit_test.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_lookup.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_lookup.h2
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_change_post_image.h4
-rw-r--r--src/mongo/db/pipeline/document_source_match.cpp6
-rw-r--r--src/mongo/db/pipeline/document_source_match.h2
-rw-r--r--src/mongo/db/pipeline/document_source_match_test.cpp42
-rw-r--r--src/mongo/db/pipeline/document_source_out.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_out.h2
-rw-r--r--src/mongo/db/pipeline/document_source_project_test.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_replace_root.cpp10
-rw-r--r--src/mongo/db/pipeline/document_source_replace_root_test.cpp3
-rw-r--r--src/mongo/db/pipeline/document_source_sample.h4
-rw-r--r--src/mongo/db/pipeline/document_source_sample_from_random_cursor.cpp5
-rw-r--r--src/mongo/db/pipeline/document_source_sample_from_random_cursor.h2
-rw-r--r--src/mongo/db/pipeline/document_source_single_document_transformation.cpp6
-rw-r--r--src/mongo/db/pipeline/document_source_single_document_transformation.h53
-rw-r--r--src/mongo/db/pipeline/document_source_skip.h4
-rw-r--r--src/mongo/db/pipeline/document_source_sort.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_sort.h2
-rw-r--r--src/mongo/db/pipeline/document_source_sort_test.cpp2
-rw-r--r--src/mongo/db/pipeline/document_source_tee_consumer.h4
-rw-r--r--src/mongo/db/pipeline/document_source_unwind.cpp4
-rw-r--r--src/mongo/db/pipeline/document_source_unwind.h2
-rw-r--r--src/mongo/db/pipeline/document_source_unwind_test.cpp2
-rw-r--r--src/mongo/db/pipeline/parsed_add_fields.h7
-rw-r--r--src/mongo/db/pipeline/parsed_add_fields_test.cpp40
-rw-r--r--src/mongo/db/pipeline/parsed_aggregation_projection.cpp74
-rw-r--r--src/mongo/db/pipeline/parsed_aggregation_projection.h21
-rw-r--r--src/mongo/db/pipeline/parsed_aggregation_projection_test.cpp208
-rw-r--r--src/mongo/db/pipeline/parsed_exclusion_projection.cpp2
-rw-r--r--src/mongo/db/pipeline/parsed_exclusion_projection.h7
-rw-r--r--src/mongo/db/pipeline/parsed_exclusion_projection_test.cpp6
-rw-r--r--src/mongo/db/pipeline/parsed_inclusion_projection.h7
-rw-r--r--src/mongo/db/pipeline/parsed_inclusion_projection_test.cpp32
-rw-r--r--src/mongo/db/pipeline/pipeline.cpp8
-rw-r--r--src/mongo/db/pipeline/pipeline_test.cpp20
-rw-r--r--src/mongo/db/pipeline/transformer_interface.h83
-rw-r--r--src/mongo/db/query/parsed_projection.h2
63 files changed, 802 insertions, 345 deletions
diff --git a/src/mongo/db/SConscript b/src/mongo/db/SConscript
index 46a2c818d03..3fc627ea446 100644
--- a/src/mongo/db/SConscript
+++ b/src/mongo/db/SConscript
@@ -927,6 +927,16 @@ env.Library(
)
env.Library(
+ target='projection_exec_agg',
+ source=[
+ 'exec/projection_exec_agg.cpp',
+ ],
+ LIBDEPS_PRIVATE=[
+ 'pipeline/parsed_aggregation_projection'
+ ]
+)
+
+env.Library(
target='query_exec',
source=[
'clientcursor.cpp',
diff --git a/src/mongo/db/exec/SConscript b/src/mongo/db/exec/SConscript
index a5ffcde4cbb..70d48105e2c 100644
--- a/src/mongo/db/exec/SConscript
+++ b/src/mongo/db/exec/SConscript
@@ -97,3 +97,14 @@ env.CppUnitTest(
"$BUILD_DIR/mongo/db/service_context_d",
],
)
+
+env.CppUnitTest(
+ target='projection_exec_agg_test',
+ source=[
+ 'projection_exec_agg_test.cpp',
+ ],
+ LIBDEPS=[
+ '$BUILD_DIR/mongo/base',
+ '$BUILD_DIR/mongo/db/projection_exec_agg',
+ ],
+)
diff --git a/src/mongo/db/exec/projection_exec_agg.cpp b/src/mongo/db/exec/projection_exec_agg.cpp
new file mode 100644
index 00000000000..30aa62969b4
--- /dev/null
+++ b/src/mongo/db/exec/projection_exec_agg.cpp
@@ -0,0 +1,88 @@
+/**
+ * Copyright (C) 2018 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/db/exec/projection_exec_agg.h"
+
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/expression_context.h"
+#include "mongo/db/pipeline/parsed_aggregation_projection.h"
+
+namespace mongo {
+
+class ProjectionExecAgg::ProjectionExecutor {
+public:
+ using ParsedAggregationProjection = parsed_aggregation_projection::ParsedAggregationProjection;
+ using ProjectionParseMode = ParsedAggregationProjection::ProjectionParseMode;
+ using TransformerType = TransformerInterface::TransformerType;
+
+ ProjectionExecutor(BSONObj projSpec) {
+ // Construct a dummy ExpressionContext for ParsedAggregationProjection. It's OK to set the
+ // ExpressionContext's OperationContext and CollatorInterface to 'nullptr' here; since we
+ // ban computed fields from the projection, the ExpressionContext will never be used.
+ boost::intrusive_ptr<ExpressionContext> expCtx(new ExpressionContext(nullptr, nullptr));
+ _projection = ParsedAggregationProjection::create(
+ expCtx, projSpec, ProjectionParseMode::kBanComputedFields);
+ }
+
+ ProjectionType getType() const {
+ return (_projection->getType() == TransformerType::kInclusionProjection
+ ? ProjectionType::kInclusionProjection
+ : ProjectionType::kExclusionProjection);
+ }
+
+ BSONObj applyProjection(BSONObj inputDoc) const {
+ return _projection->applyTransformation(Document{inputDoc}).toBson();
+ }
+
+private:
+ std::unique_ptr<ParsedAggregationProjection> _projection;
+};
+
+// ProjectionExecAgg's constructor and destructor are defined here, at a point where the
+// implementation of ProjectionExecutor is known, so that std::unique_ptr can be used with the
+// forward-declared ProjectionExecutor class.
+ProjectionExecAgg::ProjectionExecAgg(BSONObj projSpec, std::unique_ptr<ProjectionExecutor> exec)
+ : _exec(std::move(exec)), _projSpec(std::move(projSpec)){};
+
+ProjectionExecAgg::~ProjectionExecAgg() = default;
+
+std::unique_ptr<ProjectionExecAgg> ProjectionExecAgg::create(BSONObj projSpec) {
+ return std::unique_ptr<ProjectionExecAgg>(
+ new ProjectionExecAgg(projSpec, std::make_unique<ProjectionExecutor>(projSpec)));
+}
+
+ProjectionExecAgg::ProjectionType ProjectionExecAgg::getType() const {
+ return _exec->getType();
+}
+
+BSONObj ProjectionExecAgg::applyProjection(BSONObj inputDoc) const {
+ return _exec->applyProjection(inputDoc);
+}
+} // namespace mongo
diff --git a/src/mongo/db/exec/projection_exec_agg.h b/src/mongo/db/exec/projection_exec_agg.h
new file mode 100644
index 00000000000..a71f611beda
--- /dev/null
+++ b/src/mongo/db/exec/projection_exec_agg.h
@@ -0,0 +1,69 @@
+/**
+ * Copyright (C) 2018 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#pragma once
+
+#include <memory>
+
+#include "mongo/bson/bsonobj.h"
+
+namespace mongo {
+
+/**
+ * This class provides the query system with the ability to perform projections using the
+ * aggregation system's projection semantics.
+ */
+class ProjectionExecAgg {
+public:
+ enum class ProjectionType { kInclusionProjection, kExclusionProjection };
+
+ static std::unique_ptr<ProjectionExecAgg> create(BSONObj projSpec);
+
+ ~ProjectionExecAgg();
+
+ ProjectionType getType() const;
+
+ BSONObj getProjectionSpec() const {
+ return _projSpec;
+ }
+
+ BSONObj applyProjection(BSONObj inputDoc) const;
+
+private:
+ /**
+ * ProjectionExecAgg::ProjectionExecutor wraps all agg-specific calls, and is forward-declared
+ * here to avoid exposing any types from ParsedAggregationProjection to the query system.
+ */
+ class ProjectionExecutor;
+
+ ProjectionExecAgg(BSONObj projSpec, std::unique_ptr<ProjectionExecutor> exec);
+
+ std::unique_ptr<ProjectionExecutor> _exec;
+ const BSONObj _projSpec;
+};
+} // namespace mongo
diff --git a/src/mongo/db/exec/projection_exec_agg_test.cpp b/src/mongo/db/exec/projection_exec_agg_test.cpp
new file mode 100644
index 00000000000..a8dcde138d3
--- /dev/null
+++ b/src/mongo/db/exec/projection_exec_agg_test.cpp
@@ -0,0 +1,148 @@
+/**
+ * Copyright (C) 2018 MongoDB, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/db/exec/projection_exec_agg.h"
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/unittest/unittest.h"
+#include "mongo/util/assert_util.h"
+
+namespace mongo {
+namespace {
+
+template <typename T>
+BSONObj wrapInLiteral(const T& arg) {
+ return BSON("$literal" << arg);
+}
+
+//
+// Error cases.
+//
+
+TEST(ProjectionExecAggErrors, ShouldRejectMixOfInclusionAndComputedFields) {
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a" << true << "b" << wrapInLiteral(1))),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a" << wrapInLiteral(1) << "b" << true)),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a.b" << true << "a.c" << wrapInLiteral(1))),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a.b" << wrapInLiteral(1) << "a.c" << true)),
+ AssertionException);
+
+ ASSERT_THROWS(
+ ProjectionExecAgg::create(BSON("a" << BSON("b" << true << "c" << wrapInLiteral(1)))),
+ AssertionException);
+
+ ASSERT_THROWS(
+ ProjectionExecAgg::create(BSON("a" << BSON("b" << wrapInLiteral(1) << "c" << true))),
+ AssertionException);
+}
+
+TEST(ProjectionExecAggErrors, ShouldRejectMixOfExclusionAndComputedFields) {
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a" << false << "b" << wrapInLiteral(1))),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a" << wrapInLiteral(1) << "b" << false)),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a.b" << false << "a.c" << wrapInLiteral(1))),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(BSON("a.b" << wrapInLiteral(1) << "a.c" << false)),
+ AssertionException);
+
+ ASSERT_THROWS(
+ ProjectionExecAgg::create(BSON("a" << BSON("b" << false << "c" << wrapInLiteral(1)))),
+ AssertionException);
+
+ ASSERT_THROWS(
+ ProjectionExecAgg::create(BSON("a" << BSON("b" << wrapInLiteral(1) << "c" << false))),
+ AssertionException);
+}
+
+TEST(ProjectionExecAggErrors, ShouldRejectOnlyComputedFields) {
+ ASSERT_THROWS(
+ ProjectionExecAgg::create(BSON("a" << wrapInLiteral(1) << "b" << wrapInLiteral(1))),
+ AssertionException);
+
+ ASSERT_THROWS(
+ ProjectionExecAgg::create(BSON("a.b" << wrapInLiteral(1) << "a.c" << wrapInLiteral(1))),
+ AssertionException);
+
+ ASSERT_THROWS(ProjectionExecAgg::create(
+ BSON("a" << BSON("b" << wrapInLiteral(1) << "c" << wrapInLiteral(1)))),
+ AssertionException);
+}
+
+// Valid projections.
+
+TEST(ProjectionExecAggType, ShouldAcceptInclusionProjection) {
+ auto parsedProject = ProjectionExecAgg::create(BSON("a" << true));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kInclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id" << false << "a" << true));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kInclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id" << false << "a.b.c" << true));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kInclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id.x" << true));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kInclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id" << BSON("x" << true)));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kInclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("x" << BSON("_id" << true)));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kInclusionProjection);
+}
+
+TEST(ProjectionExecAggType, ShouldAcceptExclusionProjection) {
+ auto parsedProject = ProjectionExecAgg::create(BSON("a" << false));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kExclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id.x" << false));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kExclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id" << BSON("x" << false)));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kExclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("x" << BSON("_id" << false)));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kExclusionProjection);
+
+ parsedProject = ProjectionExecAgg::create(BSON("_id" << false));
+ ASSERT(parsedProject->getType() == ProjectionExecAgg::ProjectionType::kExclusionProjection);
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/cluster_aggregation_planner.cpp b/src/mongo/db/pipeline/cluster_aggregation_planner.cpp
index 5148d56f888..8fa76c1db79 100644
--- a/src/mongo/db/pipeline/cluster_aggregation_planner.cpp
+++ b/src/mongo/db/pipeline/cluster_aggregation_planner.cpp
@@ -123,7 +123,7 @@ void limitFieldsSentFromShardsToMerger(Pipeline* shardPipe, Pipeline* mergePipe)
// add an unnecessary project (and therefore a deep-copy).
for (auto&& source : shardPipe->getSources()) {
DepsTracker dt(DepsTracker::kAllMetadataAvailable);
- if (source->getDependencies(&dt) & DocumentSource::EXHAUSTIVE_FIELDS)
+ if (source->getDependencies(&dt) & DepsTracker::State::EXHAUSTIVE_FIELDS)
return;
}
// if we get here, add the project.
diff --git a/src/mongo/db/pipeline/dependencies.h b/src/mongo/db/pipeline/dependencies.h
index dec5296060f..60bc4990f86 100644
--- a/src/mongo/db/pipeline/dependencies.h
+++ b/src/mongo/db/pipeline/dependencies.h
@@ -43,6 +43,31 @@ class ParsedDeps;
*/
struct DepsTracker {
/**
+ * Used by aggregation stages to report whether or not dependency resolution is complete, or
+ * must continue to the next stage.
+ */
+ enum State {
+ // The full object and all metadata may be required.
+ NOT_SUPPORTED = 0x0,
+
+ // Later stages could need either fields or metadata. For example, a $limit stage will pass
+ // through all fields, and they may or may not be needed by future stages.
+ SEE_NEXT = 0x1,
+
+ // Later stages won't need more fields from input. For example, an inclusion projection like
+ // {_id: 1, a: 1} will only output two fields, so future stages cannot possibly depend on
+ // any other fields.
+ EXHAUSTIVE_FIELDS = 0x2,
+
+ // Later stages won't need more metadata from input. For example, a $group stage will group
+ // documents together, discarding their text score and sort keys.
+ EXHAUSTIVE_META = 0x4,
+
+ // Later stages won't need either fields or metadata.
+ EXHAUSTIVE_ALL = EXHAUSTIVE_FIELDS | EXHAUSTIVE_META,
+ };
+
+ /**
* Represents the type of metadata a pipeline might request.
*/
enum class MetadataType {
diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h
index 6e706876aff..ec1541d815f 100644
--- a/src/mongo/db/pipeline/document_source.h
+++ b/src/mongo/db/pipeline/document_source.h
@@ -624,36 +624,15 @@ public:
return {GetModPathsReturn::Type::kNotSupported, std::set<std::string>{}, {}};
}
- enum GetDepsReturn {
- // The full object and all metadata may be required.
- NOT_SUPPORTED = 0x0,
-
- // Later stages could need either fields or metadata. For example, a $limit stage will pass
- // through all fields, and they may or may not be needed by future stages.
- SEE_NEXT = 0x1,
-
- // Later stages won't need more fields from input. For example, an inclusion projection like
- // {_id: 1, a: 1} will only output two fields, so future stages cannot possibly depend on
- // any other fields.
- EXHAUSTIVE_FIELDS = 0x2,
-
- // Later stages won't need more metadata from input. For example, a $group stage will group
- // documents together, discarding their text score and sort keys.
- EXHAUSTIVE_META = 0x4,
-
- // Later stages won't need either fields or metadata.
- EXHAUSTIVE_ALL = EXHAUSTIVE_FIELDS | EXHAUSTIVE_META,
- };
-
/**
* Get the dependencies this operation needs to do its job. If overridden, subclasses must add
* all paths needed to apply their transformation to 'deps->fields', and call
* 'deps->setNeedsMetadata()' to indicate what metadata (e.g. text score), if any, is required.
*
- * See GetDepsReturn above for the possible return values and what they mean.
+ * See DepsTracker::State for the possible return values and what they mean.
*/
- virtual GetDepsReturn getDependencies(DepsTracker* deps) const {
- return NOT_SUPPORTED;
+ virtual DepsTracker::State getDependencies(DepsTracker* deps) const {
+ return DepsTracker::State::NOT_SUPPORTED;
}
protected:
diff --git a/src/mongo/db/pipeline/document_source_add_fields_test.cpp b/src/mongo/db/pipeline/document_source_add_fields_test.cpp
index 8fa400e731a..3c7d6c5a426 100644
--- a/src/mongo/db/pipeline/document_source_add_fields_test.cpp
+++ b/src/mongo/db/pipeline/document_source_add_fields_test.cpp
@@ -117,7 +117,7 @@ TEST_F(AddFieldsTest, ShouldAddReferencedFieldsToDependencies) {
fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"),
getExpCtx());
DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, addFields->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, addFields->getDependencies(&dependencies));
ASSERT_EQUALS(3U, dependencies.fields.size());
// No implicit _id dependency.
diff --git a/src/mongo/db/pipeline/document_source_bucket_auto.cpp b/src/mongo/db/pipeline/document_source_bucket_auto.cpp
index a8123f5f2fc..1ee7230d496 100644
--- a/src/mongo/db/pipeline/document_source_bucket_auto.cpp
+++ b/src/mongo/db/pipeline/document_source_bucket_auto.cpp
@@ -72,7 +72,7 @@ DocumentSource::GetNextResult DocumentSourceBucketAuto::getNext() {
return makeDocument(*(_bucketsIterator++));
}
-DocumentSource::GetDepsReturn DocumentSourceBucketAuto::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceBucketAuto::getDependencies(DepsTracker* deps) const {
// Add the 'groupBy' expression.
_groupByExpression->addDependencies(deps);
@@ -84,7 +84,7 @@ DocumentSource::GetDepsReturn DocumentSourceBucketAuto::getDependencies(DepsTrac
// We know exactly which fields will be present in the output document. Future stages cannot
// depend on any further fields. The grouping process will remove any metadata from the
// documents, so there can be no further dependencies on metadata.
- return EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
DocumentSource::GetNextResult DocumentSourceBucketAuto::populateSorter() {
diff --git a/src/mongo/db/pipeline/document_source_bucket_auto.h b/src/mongo/db/pipeline/document_source_bucket_auto.h
index b2f46e2e7ae..9bd2c9f2877 100644
--- a/src/mongo/db/pipeline/document_source_bucket_auto.h
+++ b/src/mongo/db/pipeline/document_source_bucket_auto.h
@@ -44,7 +44,7 @@ namespace mongo {
class DocumentSourceBucketAuto final : public DocumentSource, public NeedsMergerDocumentSource {
public:
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
GetNextResult getNext() final;
const char* getSourceName() const final;
diff --git a/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp b/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
index 157c2aad4f1..bd74e43d6f2 100644
--- a/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
+++ b/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
@@ -436,7 +436,7 @@ TEST_F(BucketAutoTests, ShouldAddDependenciesOfGroupByFieldAndComputedFields) {
"{$sum : '$a'}, field2 : {$avg : '$b'}}}}"));
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
ASSERT_EQUALS(3U, dependencies.fields.size());
// Dependency from 'groupBy'
@@ -455,7 +455,7 @@ TEST_F(BucketAutoTests, ShouldNeedTextScoreInDependenciesFromGroupByField) {
createBucketAuto(fromjson("{$bucketAuto : {groupBy : {$meta: 'textScore'}, buckets : 2}}"));
DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -468,7 +468,7 @@ TEST_F(BucketAutoTests, ShouldNeedTextScoreInDependenciesFromOutputField) {
"{$avg : {$meta : 'textScore'}}}}}"));
DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.size());
// Dependency from 'groupBy'
diff --git a/src/mongo/db/pipeline/document_source_change_stream_transform.cpp b/src/mongo/db/pipeline/document_source_change_stream_transform.cpp
index d55b72aa2c4..efeb1f69bbb 100644
--- a/src/mongo/db/pipeline/document_source_change_stream_transform.cpp
+++ b/src/mongo/db/pipeline/document_source_change_stream_transform.cpp
@@ -403,15 +403,14 @@ Value DocumentSourceChangeStreamTransform::serialize(
return Value(Document{{getSourceName(), changeStreamOptions}});
}
-DocumentSource::GetDepsReturn DocumentSourceChangeStreamTransform::getDependencies(
- DepsTracker* deps) const {
+DepsTracker::State DocumentSourceChangeStreamTransform::getDependencies(DepsTracker* deps) const {
deps->fields.insert(repl::OplogEntry::kOpTypeFieldName.toString());
deps->fields.insert(repl::OplogEntry::kTimestampFieldName.toString());
deps->fields.insert(repl::OplogEntry::kNamespaceFieldName.toString());
deps->fields.insert(repl::OplogEntry::kUuidFieldName.toString());
deps->fields.insert(repl::OplogEntry::kObjectFieldName.toString());
deps->fields.insert(repl::OplogEntry::kObject2FieldName.toString());
- return DocumentSource::GetDepsReturn::EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
DocumentSource::GetModPathsReturn DocumentSourceChangeStreamTransform::getModifiedPaths() const {
diff --git a/src/mongo/db/pipeline/document_source_change_stream_transform.h b/src/mongo/db/pipeline/document_source_change_stream_transform.h
index c5f2121162c..1825152f887 100644
--- a/src/mongo/db/pipeline/document_source_change_stream_transform.h
+++ b/src/mongo/db/pipeline/document_source_change_stream_transform.h
@@ -45,7 +45,7 @@ public:
const boost::intrusive_ptr<ExpressionContext>&, BSONObj changeStreamSpec);
Document applyTransformation(const Document& input);
- DocumentSource::GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
DocumentSource::GetModPathsReturn getModifiedPaths() const final;
Value serialize(boost::optional<ExplainOptions::Verbosity> explain) const;
diff --git a/src/mongo/db/pipeline/document_source_facet.cpp b/src/mongo/db/pipeline/document_source_facet.cpp
index 8b1ce60f6e1..b163d82a3fa 100644
--- a/src/mongo/db/pipeline/document_source_facet.cpp
+++ b/src/mongo/db/pipeline/document_source_facet.cpp
@@ -267,7 +267,7 @@ DocumentSource::StageConstraints DocumentSourceFacet::constraints(
TransactionRequirement::kAllowed};
}
-DocumentSource::GetDepsReturn DocumentSourceFacet::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceFacet::getDependencies(DepsTracker* deps) const {
const bool scopeHasVariables = pExpCtx->variablesParseState.hasDefinedVariables();
for (auto&& facet : _facets) {
auto subDepsTracker = facet.pipeline->getDependencies(deps->getMetadataAvailable());
@@ -293,7 +293,7 @@ DocumentSource::GetDepsReturn DocumentSourceFacet::getDependencies(DepsTracker*
// We will combine multiple documents into one, and the output document will have new fields, so
// we will stop looking for dependencies at this point.
- return GetDepsReturn::EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
intrusive_ptr<DocumentSource> DocumentSourceFacet::createFromBson(
diff --git a/src/mongo/db/pipeline/document_source_facet.h b/src/mongo/db/pipeline/document_source_facet.h
index cfb0580090d..61b6d660476 100644
--- a/src/mongo/db/pipeline/document_source_facet.h
+++ b/src/mongo/db/pipeline/document_source_facet.h
@@ -105,7 +105,7 @@ public:
/**
* Takes a union of all sub-pipelines, and adds them to 'deps'.
*/
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
const char* getSourceName() const final {
return "$facet";
diff --git a/src/mongo/db/pipeline/document_source_facet_test.cpp b/src/mongo/db/pipeline/document_source_facet_test.cpp
index bc88b491229..9a367f14b24 100644
--- a/src/mongo/db/pipeline/document_source_facet_test.cpp
+++ b/src/mongo/db/pipeline/document_source_facet_test.cpp
@@ -510,9 +510,9 @@ TEST_F(DocumentSourceFacetTest, ShouldPropagateDetachingAndReattachingOfOpCtx) {
*/
class DocumentSourceNeedsA : public DocumentSourcePassthrough {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
deps->fields.insert("a");
- return GetDepsReturn::EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
static boost::intrusive_ptr<DocumentSource> create() {
@@ -525,9 +525,9 @@ public:
*/
class DocumentSourceNeedsB : public DocumentSourcePassthrough {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
deps->fields.insert("b");
- return GetDepsReturn::EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
static boost::intrusive_ptr<DocumentSource> create() {
@@ -562,7 +562,7 @@ TEST_F(DocumentSourceFacetTest, ShouldUnionDependenciesOfInnerPipelines) {
auto facetStage = DocumentSourceFacet::create(std::move(facets), ctx);
DepsTracker deps(DepsTracker::MetadataAvailable::kNoMetadata);
- ASSERT_EQ(facetStage->getDependencies(&deps), DocumentSource::GetDepsReturn::EXHAUSTIVE_ALL);
+ ASSERT_EQ(facetStage->getDependencies(&deps), DepsTracker::State::EXHAUSTIVE_ALL);
ASSERT_FALSE(deps.needWholeDocument);
ASSERT_FALSE(deps.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
ASSERT_EQ(deps.fields.size(), 2UL);
@@ -575,9 +575,9 @@ TEST_F(DocumentSourceFacetTest, ShouldUnionDependenciesOfInnerPipelines) {
*/
class DocumentSourceNeedsWholeDocument : public DocumentSourcePassthrough {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const override {
+ DepsTracker::State getDependencies(DepsTracker* deps) const override {
deps->needWholeDocument = true;
- return GetDepsReturn::EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
static boost::intrusive_ptr<DocumentSourceNeedsWholeDocument> create() {
return new DocumentSourceNeedsWholeDocument();
@@ -600,7 +600,7 @@ TEST_F(DocumentSourceFacetTest, ShouldRequireWholeDocumentIfAnyPipelineRequiresW
auto facetStage = DocumentSourceFacet::create(std::move(facets), ctx);
DepsTracker deps(DepsTracker::MetadataAvailable::kNoMetadata);
- ASSERT_EQ(facetStage->getDependencies(&deps), DocumentSource::GetDepsReturn::EXHAUSTIVE_ALL);
+ ASSERT_EQ(facetStage->getDependencies(&deps), DepsTracker::State::EXHAUSTIVE_ALL);
ASSERT_TRUE(deps.needWholeDocument);
ASSERT_FALSE(deps.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
}
@@ -610,9 +610,9 @@ TEST_F(DocumentSourceFacetTest, ShouldRequireWholeDocumentIfAnyPipelineRequiresW
*/
class DocumentSourceNeedsOnlyTextScore : public DocumentSourcePassthrough {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const override {
+ DepsTracker::State getDependencies(DepsTracker* deps) const override {
deps->setNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE, true);
- return GetDepsReturn::EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
static boost::intrusive_ptr<DocumentSourceNeedsOnlyTextScore> create() {
return new DocumentSourceNeedsOnlyTextScore();
@@ -639,7 +639,7 @@ TEST_F(DocumentSourceFacetTest, ShouldRequireTextScoreIfAnyPipelineRequiresTextS
auto facetStage = DocumentSourceFacet::create(std::move(facets), ctx);
DepsTracker deps(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQ(facetStage->getDependencies(&deps), DocumentSource::GetDepsReturn::EXHAUSTIVE_ALL);
+ ASSERT_EQ(facetStage->getDependencies(&deps), DepsTracker::State::EXHAUSTIVE_ALL);
ASSERT_TRUE(deps.needWholeDocument);
ASSERT_TRUE(deps.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
}
diff --git a/src/mongo/db/pipeline/document_source_geo_near.cpp b/src/mongo/db/pipeline/document_source_geo_near.cpp
index 7ba5e2a4f8e..1d8387309ae 100644
--- a/src/mongo/db/pipeline/document_source_geo_near.cpp
+++ b/src/mongo/db/pipeline/document_source_geo_near.cpp
@@ -220,7 +220,7 @@ bool DocumentSourceGeoNear::needsGeoNearPoint() const {
return static_cast<bool>(includeLocs);
}
-DocumentSource::GetDepsReturn DocumentSourceGeoNear::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceGeoNear::getDependencies(DepsTracker* deps) const {
// TODO (SERVER-35424): Implement better dependency tracking. For example, 'distanceField' is
// produced by this stage, and we could inform the query system that it need not include it in
// its response. For now, assume that we require the entire document as well as the appropriate
@@ -229,7 +229,7 @@ DocumentSource::GetDepsReturn DocumentSourceGeoNear::getDependencies(DepsTracker
deps->setNeedsMetadata(DepsTracker::MetadataType::GEO_NEAR_POINT, needsGeoNearPoint());
deps->needWholeDocument = true;
- return GetDepsReturn::EXHAUSTIVE_FIELDS;
+ return DepsTracker::State::EXHAUSTIVE_FIELDS;
}
DocumentSourceGeoNear::DocumentSourceGeoNear(const intrusive_ptr<ExpressionContext>& pExpCtx)
diff --git a/src/mongo/db/pipeline/document_source_geo_near.h b/src/mongo/db/pipeline/document_source_geo_near.h
index 7d77fa5a422..a0a6ea878ad 100644
--- a/src/mongo/db/pipeline/document_source_geo_near.h
+++ b/src/mongo/db/pipeline/document_source_geo_near.h
@@ -105,7 +105,7 @@ public:
return distanceMultiplier;
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
/**
* Returns true if the $geoNear specification requires the geoNear point metadata.
diff --git a/src/mongo/db/pipeline/document_source_graph_lookup.h b/src/mongo/db/pipeline/document_source_graph_lookup.h
index 527c71a21c2..992fdd0805f 100644
--- a/src/mongo/db/pipeline/document_source_graph_lookup.h
+++ b/src/mongo/db/pipeline/document_source_graph_lookup.h
@@ -65,9 +65,9 @@ public:
return constraints;
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
_startWith->addDependencies(deps);
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
};
void addInvolvedCollections(std::vector<NamespaceString>* collections) const final {
diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp
index 457e8eaf60a..03a4b3a3e29 100644
--- a/src/mongo/db/pipeline/document_source_group.cpp
+++ b/src/mongo/db/pipeline/document_source_group.cpp
@@ -227,7 +227,7 @@ Value DocumentSourceGroup::serialize(boost::optional<ExplainOptions::Verbosity>
return Value(DOC(getSourceName() << insides.freeze()));
}
-DocumentSource::GetDepsReturn DocumentSourceGroup::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceGroup::getDependencies(DepsTracker* deps) const {
// add the _id
for (size_t i = 0; i < _idExpressions.size(); i++) {
_idExpressions[i]->addDependencies(deps);
@@ -238,7 +238,7 @@ DocumentSource::GetDepsReturn DocumentSourceGroup::getDependencies(DepsTracker*
accumulatedField.expression->addDependencies(deps);
}
- return EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
intrusive_ptr<DocumentSourceGroup> DocumentSourceGroup::create(
diff --git a/src/mongo/db/pipeline/document_source_group.h b/src/mongo/db/pipeline/document_source_group.h
index eeec779807f..f0fadbd2320 100644
--- a/src/mongo/db/pipeline/document_source_group.h
+++ b/src/mongo/db/pipeline/document_source_group.h
@@ -47,7 +47,7 @@ public:
// Virtuals from DocumentSource.
boost::intrusive_ptr<DocumentSource> optimize() final;
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
GetNextResult getNext() final;
const char* getSourceName() const final;
diff --git a/src/mongo/db/pipeline/document_source_group_test.cpp b/src/mongo/db/pipeline/document_source_group_test.cpp
index 8d0ce4a6679..6a6d3232248 100644
--- a/src/mongo/db/pipeline/document_source_group_test.cpp
+++ b/src/mongo/db/pipeline/document_source_group_test.cpp
@@ -758,7 +758,7 @@ public:
void run() {
createGroup(fromjson("{_id:'$x',a:{$sum:'$y.z'},b:{$avg:{$add:['$u','$v']}}}"));
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, group()->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_ALL, group()->getDependencies(&dependencies));
ASSERT_EQUALS(4U, dependencies.fields.size());
// Dependency from _id expression.
ASSERT_EQUALS(1U, dependencies.fields.count("x"));
diff --git a/src/mongo/db/pipeline/document_source_limit.h b/src/mongo/db/pipeline/document_source_limit.h
index cfdbbdec036..aeda5902054 100644
--- a/src/mongo/db/pipeline/document_source_limit.h
+++ b/src/mongo/db/pipeline/document_source_limit.h
@@ -74,8 +74,8 @@ public:
Pipeline::SourceContainer* container) final;
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
- return SEE_NEXT; // This doesn't affect needed fields
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::SEE_NEXT; // This doesn't affect needed fields
}
/**
diff --git a/src/mongo/db/pipeline/document_source_limit_test.cpp b/src/mongo/db/pipeline/document_source_limit_test.cpp
index b776de9789b..eb3ba447457 100644
--- a/src/mongo/db/pipeline/document_source_limit_test.cpp
+++ b/src/mongo/db/pipeline/document_source_limit_test.cpp
@@ -95,7 +95,7 @@ TEST_F(DocumentSourceLimitTest, DisposeShouldCascadeAllTheWayToSource) {
TEST_F(DocumentSourceLimitTest, ShouldNotIntroduceAnyDependencies) {
auto limit = DocumentSourceLimit::create(getExpCtx(), 1);
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, limit->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, limit->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
diff --git a/src/mongo/db/pipeline/document_source_lookup.cpp b/src/mongo/db/pipeline/document_source_lookup.cpp
index e4fd70920f6..26a4a56d843 100644
--- a/src/mongo/db/pipeline/document_source_lookup.cpp
+++ b/src/mongo/db/pipeline/document_source_lookup.cpp
@@ -679,7 +679,7 @@ void DocumentSourceLookUp::serializeToArray(
}
}
-DocumentSource::GetDepsReturn DocumentSourceLookUp::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceLookUp::getDependencies(DepsTracker* deps) const {
if (wasConstructedWithPipelineSyntax()) {
// We will use the introspection pipeline which we prebuilt during construction.
invariant(_parsedIntrospectionPipeline);
@@ -706,7 +706,7 @@ DocumentSource::GetDepsReturn DocumentSourceLookUp::getDependencies(DepsTracker*
} else {
deps->fields.insert(_localField->fullPath());
}
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
void DocumentSourceLookUp::detachFromOperationContext() {
diff --git a/src/mongo/db/pipeline/document_source_lookup.h b/src/mongo/db/pipeline/document_source_lookup.h
index db62fb9b9f4..b4a86255dba 100644
--- a/src/mongo/db/pipeline/document_source_lookup.h
+++ b/src/mongo/db/pipeline/document_source_lookup.h
@@ -117,7 +117,7 @@ public:
return constraints;
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
BSONObjSet getOutputSorts() final {
return DocumentSource::truncateSortSet(pSource->getOutputSorts(), {_as.fullPath()});
diff --git a/src/mongo/db/pipeline/document_source_lookup_change_post_image.h b/src/mongo/db/pipeline/document_source_lookup_change_post_image.h
index efd5886d6ee..2699675a4b2 100644
--- a/src/mongo/db/pipeline/document_source_lookup_change_post_image.h
+++ b/src/mongo/db/pipeline/document_source_lookup_change_post_image.h
@@ -77,7 +77,7 @@ public:
return constraints;
}
- GetDepsReturn getDependencies(DepsTracker* deps) const {
+ DepsTracker::State getDependencies(DepsTracker* deps) const {
// The namespace is not technically needed yet, but we will if there is more than one
// collection involved.
deps->fields.insert(DocumentSourceChangeStream::kNamespaceField.toString());
@@ -86,7 +86,7 @@ public:
deps->fields.insert(DocumentSourceChangeStream::kIdField.toString());
// This stage does not restrict the output fields to a finite set, and has no impact on
// whether metadata is available or needed.
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
/**
diff --git a/src/mongo/db/pipeline/document_source_match.cpp b/src/mongo/db/pipeline/document_source_match.cpp
index 45bc8c8629e..6d5d7471f09 100644
--- a/src/mongo/db/pipeline/document_source_match.cpp
+++ b/src/mongo/db/pipeline/document_source_match.cpp
@@ -477,7 +477,7 @@ BSONObj DocumentSourceMatch::getQuery() const {
return _predicate;
}
-DocumentSource::GetDepsReturn DocumentSourceMatch::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceMatch::getDependencies(DepsTracker* deps) const {
// Get all field or variable dependencies.
_expression->addDependencies(deps);
@@ -486,10 +486,10 @@ DocumentSource::GetDepsReturn DocumentSourceMatch::getDependencies(DepsTracker*
// know what field it will be searching without examining indices.
deps->needWholeDocument = true;
deps->setNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE, true);
- return EXHAUSTIVE_FIELDS;
+ return DepsTracker::State::EXHAUSTIVE_FIELDS;
}
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
DocumentSourceMatch::DocumentSourceMatch(const BSONObj& query,
diff --git a/src/mongo/db/pipeline/document_source_match.h b/src/mongo/db/pipeline/document_source_match.h
index 4deb71b7118..1b66fef5f28 100644
--- a/src/mongo/db/pipeline/document_source_match.h
+++ b/src/mongo/db/pipeline/document_source_match.h
@@ -70,7 +70,7 @@ public:
Pipeline::SourceContainer::iterator doOptimizeAt(Pipeline::SourceContainer::iterator itr,
Pipeline::SourceContainer* container) final;
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
/**
* Convenience method for creating a $match stage.
diff --git a/src/mongo/db/pipeline/document_source_match_test.cpp b/src/mongo/db/pipeline/document_source_match_test.cpp
index 5f81dc71986..392588f4408 100644
--- a/src/mongo/db/pipeline/document_source_match_test.cpp
+++ b/src/mongo/db/pipeline/document_source_match_test.cpp
@@ -214,7 +214,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfAllBranchesOfOrClause) {
auto match =
DocumentSourceMatch::create(fromjson("{$or: [{a: 1}, {'x.y': {$gt: 4}}]}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.count("x.y"));
ASSERT_EQUALS(2U, dependencies.fields.size());
@@ -225,7 +225,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfAllBranchesOfOrClause) {
TEST_F(DocumentSourceMatchTest, TextSearchShouldRequireWholeDocumentAndTextScore) {
auto match = DocumentSourceMatch::create(fromjson("{$text: {$search: 'hello'} }"), getExpCtx());
DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_FIELDS, match->getDependencies(&dependencies));
ASSERT_EQUALS(true, dependencies.needWholeDocument);
ASSERT_EQUALS(true, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
}
@@ -234,7 +234,7 @@ TEST_F(DocumentSourceMatchTest, ShouldOnlyAddOuterFieldAsDependencyOfImplicitEqu
// Parses to {a: {$eq: {notAField: {$gte: 4}}}}.
auto match = DocumentSourceMatch::create(fromjson("{a: {notAField: {$gte: 4}}}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -245,7 +245,7 @@ TEST_F(DocumentSourceMatchTest, ShouldOnlyAddOuterFieldAsDependencyOfClausesWith
auto match =
DocumentSourceMatch::create(fromjson("{a: {$elemMatch: {c: {$gte: 4}}}}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -262,7 +262,7 @@ TEST_F(DocumentSourceMatchTest,
" }}}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -274,7 +274,7 @@ TEST_F(DocumentSourceMatchTest,
auto query = fromjson("{$_internalSchemaMinProperties: 1}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(true, dependencies.needWholeDocument);
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
@@ -285,7 +285,7 @@ TEST_F(DocumentSourceMatchTest,
auto query = fromjson("{$_internalSchemaMaxProperties: 1}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies1;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies1));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies1));
ASSERT_EQUALS(0U, dependencies1.fields.size());
ASSERT_EQUALS(true, dependencies1.needWholeDocument);
ASSERT_EQUALS(false, dependencies1.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
@@ -293,7 +293,7 @@ TEST_F(DocumentSourceMatchTest,
query = fromjson("{a: {$_internalSchemaObjectMatch: {$_internalSchemaMaxProperties: 1}}}");
match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies2;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies2));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies2));
ASSERT_EQUALS(1U, dependencies2.fields.size());
ASSERT_EQUALS(1U, dependencies2.fields.count("a"));
ASSERT_EQUALS(false, dependencies2.needWholeDocument);
@@ -307,7 +307,7 @@ TEST_F(DocumentSourceMatchTest,
"namePlaceholder: 'i', patternProperties: [], otherwise: {i: 0}}}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(true, dependencies.needWholeDocument);
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
@@ -318,7 +318,7 @@ TEST_F(DocumentSourceMatchTest,
auto query = fromjson("{$_internalSchemaRootDocEq: {a: 1}}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(true, dependencies.needWholeDocument);
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
@@ -328,7 +328,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddCorrectDependenciesForClausesWithIntern
auto query = fromjson("{a: {$_internalSchemaType: 1}}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -339,7 +339,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddCorrectDependenciesForClausesWithIntern
auto query = fromjson("{$_internalSchemaCond: [{a: 1}, {b: 1}, {c: 1}]}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(3U, dependencies.fields.size());
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.count("b"));
@@ -352,7 +352,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddCorrectDependenciesForClausesWithIntern
auto query = fromjson("{$_internalSchemaXor: [{a: 1}, {b: 1}, {c: 1}]}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(3U, dependencies.fields.size());
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.count("b"));
@@ -365,7 +365,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddCorrectDependenciesForClausesWithEmptyJ
DepsTracker dependencies;
auto query = fromjson("{$jsonSchema: {}}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
@@ -375,7 +375,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddCorrectDependenciesForClausesWithJSONSc
DepsTracker dependencies;
auto query = fromjson("{$jsonSchema: {properties: {a: {type: 'number'}}}}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -386,7 +386,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddCorrectDependenciesForMultiplePredicate
DepsTracker dependencies;
auto query = fromjson("{$jsonSchema: {properties: {a: {type: 'number'}}}, b: 1}");
auto match = DocumentSourceMatch::create(query, getExpCtx());
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(2U, dependencies.fields.size());
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.count("b"));
@@ -398,7 +398,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddOuterFieldToDependenciesIfElemMatchCont
auto match =
DocumentSourceMatch::create(fromjson("{a: {$elemMatch: {$gt: 1, $lt: 5}}}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -408,7 +408,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddOuterFieldToDependenciesIfElemMatchCont
TEST_F(DocumentSourceMatchTest, ShouldAddNotClausesFieldAsDependency) {
auto match = DocumentSourceMatch::create(fromjson("{b: {$not: {$gte: 4}}}}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("b"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
@@ -419,7 +419,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfEachNorClause) {
auto match = DocumentSourceMatch::create(
fromjson("{$nor: [{'a.b': {$gte: 4}}, {'b.c': {$in: [1, 2]}}]}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a.b"));
ASSERT_EQUALS(1U, dependencies.fields.count("b.c"));
ASSERT_EQUALS(2U, dependencies.fields.size());
@@ -430,7 +430,7 @@ TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfEachNorClause) {
TEST_F(DocumentSourceMatchTest, CommentShouldNotAddAnyDependencies) {
auto match = DocumentSourceMatch::create(fromjson("{$comment: 'misleading?'}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
@@ -440,7 +440,7 @@ TEST_F(DocumentSourceMatchTest, ClauseAndedWithCommentShouldAddDependencies) {
auto match =
DocumentSourceMatch::create(fromjson("{a: 4, $comment: 'irrelevant'}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, match->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
diff --git a/src/mongo/db/pipeline/document_source_out.cpp b/src/mongo/db/pipeline/document_source_out.cpp
index 71644ebc495..dff099c20a2 100644
--- a/src/mongo/db/pipeline/document_source_out.cpp
+++ b/src/mongo/db/pipeline/document_source_out.cpp
@@ -241,8 +241,8 @@ Value DocumentSourceOut::serialize(boost::optional<ExplainOptions::Verbosity> ex
return Value(DOC(getSourceName() << _outputNs.coll()));
}
-DocumentSource::GetDepsReturn DocumentSourceOut::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceOut::getDependencies(DepsTracker* deps) const {
deps->needWholeDocument = true;
- return EXHAUSTIVE_ALL;
+ return DepsTracker::State::EXHAUSTIVE_ALL;
}
}
diff --git a/src/mongo/db/pipeline/document_source_out.h b/src/mongo/db/pipeline/document_source_out.h
index d6732e59b9d..6945c674267 100644
--- a/src/mongo/db/pipeline/document_source_out.h
+++ b/src/mongo/db/pipeline/document_source_out.h
@@ -42,7 +42,7 @@ public:
GetNextResult getNext() final;
const char* getSourceName() const final;
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
StageConstraints constraints(Pipeline::SplitState pipeState) const final {
return {StreamType::kStreaming,
diff --git a/src/mongo/db/pipeline/document_source_project_test.cpp b/src/mongo/db/pipeline/document_source_project_test.cpp
index c991eebbbc4..c944c5047ea 100644
--- a/src/mongo/db/pipeline/document_source_project_test.cpp
+++ b/src/mongo/db/pipeline/document_source_project_test.cpp
@@ -165,7 +165,7 @@ TEST_F(ProjectStageTest, InclusionShouldAddDependenciesOfIncludedAndComputedFiel
fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"),
getExpCtx());
DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, project->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_FIELDS, project->getDependencies(&dependencies));
ASSERT_EQUALS(5U, dependencies.fields.size());
// Implicit _id dependency.
@@ -188,7 +188,7 @@ TEST_F(ProjectStageTest, ExclusionShouldNotAddDependencies) {
auto project = DocumentSourceProject::create(fromjson("{a: false, 'b.c': false}"), getExpCtx());
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, project->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, project->getDependencies(&dependencies));
ASSERT_EQUALS(0U, dependencies.fields.size());
ASSERT_EQUALS(false, dependencies.needWholeDocument);
diff --git a/src/mongo/db/pipeline/document_source_replace_root.cpp b/src/mongo/db/pipeline/document_source_replace_root.cpp
index 6e7ce90cc73..7674a49446c 100644
--- a/src/mongo/db/pipeline/document_source_replace_root.cpp
+++ b/src/mongo/db/pipeline/document_source_replace_root.cpp
@@ -45,8 +45,7 @@ using boost::intrusive_ptr;
/**
* This class implements the transformation logic for the $replaceRoot stage.
*/
-class ReplaceRootTransformation final
- : public DocumentSourceSingleDocumentTransformation::TransformerInterface {
+class ReplaceRootTransformation final : public TransformerInterface {
public:
ReplaceRootTransformation(const boost::intrusive_ptr<ExpressionContext>& expCtx)
@@ -80,15 +79,16 @@ public:
_newRoot->optimize();
}
- Document serializeStageOptions(boost::optional<ExplainOptions::Verbosity> explain) const final {
+ Document serializeTransformation(
+ boost::optional<ExplainOptions::Verbosity> explain) const final {
return Document{{"newRoot", _newRoot->serialize(static_cast<bool>(explain))}};
}
- DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const final {
+ DepsTracker::State addDependencies(DepsTracker* deps) const final {
_newRoot->addDependencies(deps);
// This stage will replace the entire document with a new document, so any existing fields
// will be replaced and cannot be required as dependencies.
- return DocumentSource::EXHAUSTIVE_FIELDS;
+ return DepsTracker::State::EXHAUSTIVE_FIELDS;
}
DocumentSource::GetModPathsReturn getModifiedPaths() const final {
diff --git a/src/mongo/db/pipeline/document_source_replace_root_test.cpp b/src/mongo/db/pipeline/document_source_replace_root_test.cpp
index 0ac8a9f9db0..69cb5fb5d4f 100644
--- a/src/mongo/db/pipeline/document_source_replace_root_test.cpp
+++ b/src/mongo/db/pipeline/document_source_replace_root_test.cpp
@@ -258,7 +258,8 @@ TEST_F(ReplaceRootBasics, OnlyDependentFieldIsNewRoot) {
auto replaceRoot = createReplaceRoot(BSON("newRoot"
<< "$a.b"));
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, replaceRoot->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_FIELDS,
+ replaceRoot->getDependencies(&dependencies));
// Should only depend on field a.b
ASSERT_EQUALS(1U, dependencies.fields.size());
diff --git a/src/mongo/db/pipeline/document_source_sample.h b/src/mongo/db/pipeline/document_source_sample.h
index ddba846442d..6195328b90d 100644
--- a/src/mongo/db/pipeline/document_source_sample.h
+++ b/src/mongo/db/pipeline/document_source_sample.h
@@ -52,8 +52,8 @@ public:
TransactionRequirement::kAllowed};
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
- return SEE_NEXT;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::SEE_NEXT;
}
boost::intrusive_ptr<DocumentSource> getShardSource() final;
diff --git a/src/mongo/db/pipeline/document_source_sample_from_random_cursor.cpp b/src/mongo/db/pipeline/document_source_sample_from_random_cursor.cpp
index b429dbba6bf..ea9131fa5ef 100644
--- a/src/mongo/db/pipeline/document_source_sample_from_random_cursor.cpp
+++ b/src/mongo/db/pipeline/document_source_sample_from_random_cursor.cpp
@@ -148,10 +148,9 @@ Value DocumentSourceSampleFromRandomCursor::serialize(
return Value(DOC(getSourceName() << DOC("size" << _size)));
}
-DocumentSource::GetDepsReturn DocumentSourceSampleFromRandomCursor::getDependencies(
- DepsTracker* deps) const {
+DepsTracker::State DocumentSourceSampleFromRandomCursor::getDependencies(DepsTracker* deps) const {
deps->fields.insert(_idField);
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
intrusive_ptr<DocumentSourceSampleFromRandomCursor> DocumentSourceSampleFromRandomCursor::create(
diff --git a/src/mongo/db/pipeline/document_source_sample_from_random_cursor.h b/src/mongo/db/pipeline/document_source_sample_from_random_cursor.h
index bf7f75252ed..083ddf38afb 100644
--- a/src/mongo/db/pipeline/document_source_sample_from_random_cursor.h
+++ b/src/mongo/db/pipeline/document_source_sample_from_random_cursor.h
@@ -42,7 +42,7 @@ public:
GetNextResult getNext() final;
const char* getSourceName() const final;
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
StageConstraints constraints(Pipeline::SplitState pipeState) const final {
return {StreamType::kStreaming,
diff --git a/src/mongo/db/pipeline/document_source_single_document_transformation.cpp b/src/mongo/db/pipeline/document_source_single_document_transformation.cpp
index 41be7bbb096..9c9fc3e9249 100644
--- a/src/mongo/db/pipeline/document_source_single_document_transformation.cpp
+++ b/src/mongo/db/pipeline/document_source_single_document_transformation.cpp
@@ -77,7 +77,7 @@ intrusive_ptr<DocumentSource> DocumentSourceSingleDocumentTransformation::optimi
void DocumentSourceSingleDocumentTransformation::doDispose() {
if (_parsedTransform) {
// Cache the stage options document in case this stage is serialized after disposing.
- _cachedStageOptions = _parsedTransform->serializeStageOptions(pExpCtx->explain);
+ _cachedStageOptions = _parsedTransform->serializeTransformation(pExpCtx->explain);
_parsedTransform.reset();
}
}
@@ -85,7 +85,7 @@ void DocumentSourceSingleDocumentTransformation::doDispose() {
Value DocumentSourceSingleDocumentTransformation::serialize(
boost::optional<ExplainOptions::Verbosity> explain) const {
return Value(Document{{getSourceName(),
- _parsedTransform ? _parsedTransform->serializeStageOptions(explain)
+ _parsedTransform ? _parsedTransform->serializeTransformation(explain)
: _cachedStageOptions}});
}
@@ -101,7 +101,7 @@ Pipeline::SourceContainer::iterator DocumentSourceSingleDocumentTransformation::
return std::next(itr);
}
-DocumentSource::GetDepsReturn DocumentSourceSingleDocumentTransformation::getDependencies(
+DepsTracker::State DocumentSourceSingleDocumentTransformation::getDependencies(
DepsTracker* deps) const {
// Each parsed transformation is responsible for adding its own dependencies, and returning
// the correct dependency return type for that transformation.
diff --git a/src/mongo/db/pipeline/document_source_single_document_transformation.h b/src/mongo/db/pipeline/document_source_single_document_transformation.h
index 477972ffdde..2fb7c876cc3 100644
--- a/src/mongo/db/pipeline/document_source_single_document_transformation.h
+++ b/src/mongo/db/pipeline/document_source_single_document_transformation.h
@@ -29,6 +29,7 @@
#pragma once
#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/transformer_interface.h"
namespace mongo {
@@ -40,56 +41,6 @@ namespace mongo {
*/
class DocumentSourceSingleDocumentTransformation final : public DocumentSource {
public:
- /**
- * This class defines the minimal interface that every parser wishing to take advantage of
- * DocumentSourceSingleDocumentTransformation must implement.
- *
- * This interface ensures that DocumentSourceSingleDocumentTransformations are passed parsed
- * objects that can execute the transformation and provide additional features like
- * serialization and reporting and returning dependencies. The parser must also provide
- * implementations for optimizing and adding the expression context, even if those functions do
- * nothing.
- */
- class TransformerInterface {
- public:
- enum class TransformerType {
- kExclusionProjection,
- kInclusionProjection,
- kComputedProjection,
- kReplaceRoot,
- };
- virtual ~TransformerInterface() = default;
- virtual Document applyTransformation(const Document& input) = 0;
- virtual TransformerType getType() const = 0;
- virtual void optimize() = 0;
- virtual DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const = 0;
- virtual GetModPathsReturn getModifiedPaths() const = 0;
-
- /**
- * Returns the document describing this stage, not including the stage name. For example,
- * should return just {_id: 0, x: 1} for the stage parsed from {$project: {_id: 0, x: 1}}.
- */
- virtual Document serializeStageOptions(
- boost::optional<ExplainOptions::Verbosity> explain) const = 0;
-
- /**
- * Returns true if this transformer is an inclusion projection and is a subset of
- * 'proj', which must be a valid projection specification. For example, if this
- * TransformerInterface represents the inclusion projection
- *
- * {a: 1, b: 1, c: 1}
- *
- * then it is a subset of the projection {a: 1, c: 1}, and this function returns
- * true.
- */
- virtual bool isSubsetOfProjection(const BSONObj& proj) const {
- return false;
- }
-
- private:
- friend class DocumentSourceSingleDocumentTransformation;
- };
-
DocumentSourceSingleDocumentTransformation(
const boost::intrusive_ptr<ExpressionContext>& pExpCtx,
std::unique_ptr<TransformerInterface> parsedTransform,
@@ -101,7 +52,7 @@ public:
GetNextResult getNext() final;
boost::intrusive_ptr<DocumentSource> optimize() final;
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
- DocumentSource::GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
GetModPathsReturn getModifiedPaths() const final;
StageConstraints constraints(Pipeline::SplitState pipeState) const final {
StageConstraints constraints(StreamType::kStreaming,
diff --git a/src/mongo/db/pipeline/document_source_skip.h b/src/mongo/db/pipeline/document_source_skip.h
index bb14fd3966e..927957d87a7 100644
--- a/src/mongo/db/pipeline/document_source_skip.h
+++ b/src/mongo/db/pipeline/document_source_skip.h
@@ -78,8 +78,8 @@ public:
: SimpleBSONObjComparator::kInstance.makeBSONObjSet();
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
- return SEE_NEXT; // This doesn't affect needed fields
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::SEE_NEXT; // This doesn't affect needed fields
}
// Virtuals for NeedsMergerDocumentSource
diff --git a/src/mongo/db/pipeline/document_source_sort.cpp b/src/mongo/db/pipeline/document_source_sort.cpp
index a3374521faf..3aaf0e4d511 100644
--- a/src/mongo/db/pipeline/document_source_sort.cpp
+++ b/src/mongo/db/pipeline/document_source_sort.cpp
@@ -216,7 +216,7 @@ Pipeline::SourceContainer::iterator DocumentSourceSort::doOptimizeAt(
return std::next(itr);
}
-DocumentSource::GetDepsReturn DocumentSourceSort::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceSort::getDependencies(DepsTracker* deps) const {
for (auto&& keyPart : _sortPattern) {
if (keyPart.expression) {
keyPart.expression->addDependencies(deps);
@@ -229,7 +229,7 @@ DocumentSource::GetDepsReturn DocumentSourceSort::getDependencies(DepsTracker* d
deps->setNeedsMetadata(DepsTracker::MetadataType::SORT_KEY, true);
}
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
diff --git a/src/mongo/db/pipeline/document_source_sort.h b/src/mongo/db/pipeline/document_source_sort.h
index 81871e7224c..8d558023324 100644
--- a/src/mongo/db/pipeline/document_source_sort.h
+++ b/src/mongo/db/pipeline/document_source_sort.h
@@ -82,7 +82,7 @@ public:
return allPrefixes(_rawSort);
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
boost::intrusive_ptr<DocumentSource> getShardSource() final;
std::list<boost::intrusive_ptr<DocumentSource>> getMergeSources() final;
diff --git a/src/mongo/db/pipeline/document_source_sort_test.cpp b/src/mongo/db/pipeline/document_source_sort_test.cpp
index 9f1f9606220..42a6590dca6 100644
--- a/src/mongo/db/pipeline/document_source_sort_test.cpp
+++ b/src/mongo/db/pipeline/document_source_sort_test.cpp
@@ -164,7 +164,7 @@ TEST_F(DocumentSourceSortTest, SortWithLimit) {
TEST_F(DocumentSourceSortTest, Dependencies) {
createSort(BSON("a" << 1 << "b.c" << -1));
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, sort()->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, sort()->getDependencies(&dependencies));
ASSERT_EQUALS(2U, dependencies.fields.size());
ASSERT_EQUALS(1U, dependencies.fields.count("a"));
ASSERT_EQUALS(1U, dependencies.fields.count("b.c"));
diff --git a/src/mongo/db/pipeline/document_source_tee_consumer.h b/src/mongo/db/pipeline/document_source_tee_consumer.h
index 75b33ea8415..e1805701ce0 100644
--- a/src/mongo/db/pipeline/document_source_tee_consumer.h
+++ b/src/mongo/db/pipeline/document_source_tee_consumer.h
@@ -67,8 +67,8 @@ public:
/**
* Returns SEE_NEXT, since it requires no fields, and changes nothing about the documents.
*/
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
- return GetDepsReturn::SEE_NEXT;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::SEE_NEXT;
}
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
diff --git a/src/mongo/db/pipeline/document_source_unwind.cpp b/src/mongo/db/pipeline/document_source_unwind.cpp
index 1c172cb17be..810638526a6 100644
--- a/src/mongo/db/pipeline/document_source_unwind.cpp
+++ b/src/mongo/db/pipeline/document_source_unwind.cpp
@@ -247,9 +247,9 @@ Value DocumentSourceUnwind::serialize(boost::optional<ExplainOptions::Verbosity>
<< (_indexPath ? Value((*_indexPath).fullPath()) : Value()))));
}
-DocumentSource::GetDepsReturn DocumentSourceUnwind::getDependencies(DepsTracker* deps) const {
+DepsTracker::State DocumentSourceUnwind::getDependencies(DepsTracker* deps) const {
deps->fields.insert(_unwindPath.fullPath());
- return SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
intrusive_ptr<DocumentSource> DocumentSourceUnwind::createFromBson(
diff --git a/src/mongo/db/pipeline/document_source_unwind.h b/src/mongo/db/pipeline/document_source_unwind.h
index 763d0523642..02cec716fcd 100644
--- a/src/mongo/db/pipeline/document_source_unwind.h
+++ b/src/mongo/db/pipeline/document_source_unwind.h
@@ -58,7 +58,7 @@ public:
return constraints;
}
- GetDepsReturn getDependencies(DepsTracker* deps) const final;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final;
/**
* Creates a new $unwind DocumentSource from a BSON specification.
diff --git a/src/mongo/db/pipeline/document_source_unwind_test.cpp b/src/mongo/db/pipeline/document_source_unwind_test.cpp
index af04d436083..f6b8e3e7a29 100644
--- a/src/mongo/db/pipeline/document_source_unwind_test.cpp
+++ b/src/mongo/db/pipeline/document_source_unwind_test.cpp
@@ -676,7 +676,7 @@ TEST_F(UnwindStageTest, AddsUnwoundPathToDependencies) {
auto unwind =
DocumentSourceUnwind::create(getExpCtx(), "x.y.z", false, boost::optional<string>("index"));
DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, unwind->getDependencies(&dependencies));
+ ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, unwind->getDependencies(&dependencies));
ASSERT_EQUALS(1U, dependencies.fields.size());
ASSERT_EQUALS(1U, dependencies.fields.count("x.y.z"));
ASSERT_EQUALS(false, dependencies.needWholeDocument);
diff --git a/src/mongo/db/pipeline/parsed_add_fields.h b/src/mongo/db/pipeline/parsed_add_fields.h
index f5e53284357..a7fa9c3e0eb 100644
--- a/src/mongo/db/pipeline/parsed_add_fields.h
+++ b/src/mongo/db/pipeline/parsed_add_fields.h
@@ -71,7 +71,8 @@ public:
*/
void parse(const BSONObj& spec) final;
- Document serializeStageOptions(boost::optional<ExplainOptions::Verbosity> explain) const final {
+ Document serializeTransformation(
+ boost::optional<ExplainOptions::Verbosity> explain) const final {
MutableDocument output;
_root->serialize(&output, explain);
return output.freeze();
@@ -84,9 +85,9 @@ public:
_root->optimize();
}
- DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const final {
+ DepsTracker::State addDependencies(DepsTracker* deps) const final {
_root->addDependencies(deps);
- return DocumentSource::SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
DocumentSource::GetModPathsReturn getModifiedPaths() const final {
diff --git a/src/mongo/db/pipeline/parsed_add_fields_test.cpp b/src/mongo/db/pipeline/parsed_add_fields_test.cpp
index 1191ea21cb5..8f40b681693 100644
--- a/src/mongo/db/pipeline/parsed_add_fields_test.cpp
+++ b/src/mongo/db/pipeline/parsed_add_fields_test.cpp
@@ -180,13 +180,13 @@ TEST(ParsedAddFieldsSerialize, SerializesToCorrectForm) {
fromjson("{a: {$add: [\"$a\", {$const: 2}]}, b: {d: {$const: 3}}, x: {y: {$const: 4}}}"));
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
// Verify that serialize treats the _id field as any other field: including when explicity included.
@@ -199,13 +199,13 @@ TEST(ParsedAddFieldsSerialize, AddsIdToSerializeWhenExplicitlyIncluded) {
auto expectedSerialization = Document(fromjson("{_id: {$const: false}}"));
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
// Verify that serialize treats the _id field as any other field: excluded when not explicitly
@@ -221,13 +221,13 @@ TEST(ParsedAddFieldsSerialize, OmitsIdFromSerializeWhenNotIncluded) {
auto expectedSerialization = Document(fromjson("{a: {$const: true}}"));
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
// Verify that the $addFields stage optimizes expressions into simpler forms when possible.
@@ -239,13 +239,13 @@ TEST(ParsedAddFieldsOptimize, OptimizesTopLevelExpressions) {
auto expectedSerialization = Document{{"a", Document{{"$const", 3}}}};
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
// Verify that the $addFields stage optimizes expressions even when they are nested.
@@ -257,13 +257,13 @@ TEST(ParsedAddFieldsOptimize, ShouldOptimizeNestedExpressions) {
auto expectedSerialization = Document{{"a", Document{{"b", Document{{"$const", 3}}}}}};
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, addition.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- addition.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ addition.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
//
diff --git a/src/mongo/db/pipeline/parsed_aggregation_projection.cpp b/src/mongo/db/pipeline/parsed_aggregation_projection.cpp
index 6c90053efc4..5e106efe1e4 100644
--- a/src/mongo/db/pipeline/parsed_aggregation_projection.cpp
+++ b/src/mongo/db/pipeline/parsed_aggregation_projection.cpp
@@ -46,8 +46,7 @@
namespace mongo {
namespace parsed_aggregation_projection {
-using TransformerType =
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType;
+using TransformerType = TransformerInterface::TransformerType;
using expression::isPathPrefixOf;
@@ -150,6 +149,14 @@ void ProjectionSpecValidator::parseNestedObject(const BSONObj& thisLevelSpec,
namespace {
+using ProjectionParseMode = ParsedAggregationProjection::ProjectionParseMode;
+
+std::string makeBannedComputedFieldsErrorMessage(BSONObj projSpec) {
+ return str::stream() << "Bad projection specification, cannot use computed fields when parsing "
+ "a spec in kBanComputedFields mode: "
+ << projSpec.toString();
+}
+
/**
* This class is responsible for determining what type of $project stage it specifies.
*/
@@ -158,17 +165,18 @@ public:
/**
* Parses 'spec' to determine whether it is an inclusion or exclusion projection. 'Computed'
* fields (ones which are defined by an expression or a literal) are treated as inclusion
- * projections for in this context of the$project stage.
+ * projections for in this context of the $project stage.
*/
- static TransformerType parse(const BSONObj& spec) {
- ProjectTypeParser parser(spec);
+ static TransformerType parse(const BSONObj& spec, ProjectionParseMode parseMode) {
+ ProjectTypeParser parser(spec, parseMode);
parser.parse();
invariant(parser._parsedType);
return *(parser._parsedType);
}
private:
- ProjectTypeParser(const BSONObj& spec) : _rawObj(spec) {}
+ ProjectTypeParser(const BSONObj& spec, ProjectionParseMode parseMode)
+ : _rawObj(spec), _parseMode(parseMode) {}
/**
* Parses a single BSONElement, with 'fieldName' representing the path used for projection
@@ -202,8 +210,7 @@ private:
// Default to inclusion if nothing (except maybe '_id') is explicitly included or excluded.
if (!_parsedType) {
- _parsedType = DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kInclusionProjection;
+ _parsedType = TransformerInterface::TransformerType::kInclusionProjection;
}
}
@@ -213,13 +220,22 @@ private:
* Delegates to parseSubObject() if 'elem' is an object. Otherwise updates '_parsedType' if
* appropriate.
*
- * Throws a AssertionException if this element represents a mix of projection types.
+ * Throws a AssertionException if this element represents a mix of projection types. If we are
+ * parsing in ProjectionParseMode::kBanComputedFields mode, an inclusion projection which
+ * contains computed fields will also be rejected.
*/
void parseElement(const BSONElement& elem, const FieldPath& pathToElem) {
if (elem.type() == BSONType::Object) {
return parseNestedObject(elem.Obj(), pathToElem);
}
+ // If this element is not a boolean or numeric value, then it is a literal value. These are
+ // illegal if we are in kBanComputedFields parse mode.
+ uassert(ErrorCodes::FailedToParse,
+ makeBannedComputedFieldsErrorMessage(_rawObj),
+ elem.isBoolean() || elem.isNumber() ||
+ _parseMode != ProjectionParseMode::kBanComputedFields);
+
if ((elem.isBoolean() || elem.isNumber()) && !elem.trueValue()) {
// A top-level exclusion of "_id" is allowed in either an inclusion projection or an
// exclusion projection, so doesn't affect '_parsedType'.
@@ -230,10 +246,8 @@ private:
<< _rawObj.toString(),
!_parsedType ||
(*_parsedType ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kExclusionProjection));
- _parsedType = DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kExclusionProjection;
+ TransformerInterface::TransformerType::kExclusionProjection));
+ _parsedType = TransformerInterface::TransformerType::kExclusionProjection;
}
} else {
// A boolean true, a truthy numeric value, or any expression can only be used with an
@@ -243,19 +257,18 @@ private:
str::stream() << "Bad projection specification, cannot include fields or "
"add computed fields during an exclusion projection: "
<< _rawObj.toString(),
- !_parsedType ||
- (*_parsedType ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kInclusionProjection));
- _parsedType = DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kInclusionProjection;
+ !_parsedType || (*_parsedType ==
+ TransformerInterface::TransformerType::kInclusionProjection));
+ _parsedType = TransformerInterface::TransformerType::kInclusionProjection;
}
}
/**
* Traverses 'thisLevelSpec', parsing each element in turn.
*
- * Throws a AssertionException if 'thisLevelSpec' represents an invalid mix of projections.
+ * Throws a AssertionException if 'thisLevelSpec' represents an invalid mix of projections. If
+ * we are parsing in ProjectionParseMode::kBanComputedFields mode, an inclusion projection which
+ * contains computed fields will also be rejected.
*/
void parseNestedObject(const BSONObj& thisLevelSpec, const FieldPath& prefix) {
@@ -264,17 +277,18 @@ private:
if (fieldName[0] == '$') {
// This object is an expression specification like {$add: [...]}. It will be parsed
// into an Expression later, but for now, just track that the prefix has been
- // specified and skip it.
+ // specified, validate that computed projections are legal, and skip it.
+ uassert(ErrorCodes::FailedToParse,
+ makeBannedComputedFieldsErrorMessage(_rawObj),
+ _parseMode != ProjectionParseMode::kBanComputedFields);
uassert(40182,
str::stream() << "Bad projection specification, cannot include fields or "
"add computed fields during an exclusion projection: "
<< _rawObj.toString(),
!_parsedType ||
_parsedType ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kInclusionProjection);
- _parsedType = DocumentSourceSingleDocumentTransformation::TransformerInterface::
- TransformerType::kInclusionProjection;
+ TransformerInterface::TransformerType::kInclusionProjection);
+ _parsedType = TransformerInterface::TransformerType::kInclusionProjection;
continue;
}
parseElement(elem, FieldPath::getFullyQualifiedPath(prefix.fullPath(), fieldName));
@@ -286,19 +300,24 @@ private:
// This will be populated during parse().
boost::optional<TransformerType> _parsedType;
+
+ // Determines whether an inclusion projection is permitted to contain computed fields.
+ ProjectionParseMode _parseMode;
};
} // namespace
std::unique_ptr<ParsedAggregationProjection> ParsedAggregationProjection::create(
- const boost::intrusive_ptr<ExpressionContext>& expCtx, const BSONObj& spec) {
+ const boost::intrusive_ptr<ExpressionContext>& expCtx,
+ const BSONObj& spec,
+ ProjectionParseMode parseMode) {
// Check that the specification was valid. Status returned is unspecific because validate()
// is used by the $addFields stage as well as $project.
// If there was an error, uassert with a $project-specific message.
ProjectionSpecValidator::uassertValid(spec, "$project");
// Check for any conflicting specifications, and determine the type of the projection.
- auto projectionType = ProjectTypeParser::parse(spec);
+ auto projectionType = ProjectTypeParser::parse(spec, parseMode);
// kComputed is a projection type reserved for $addFields, and should never be detected by the
// ProjectTypeParser.
invariant(projectionType != TransformerType::kComputedProjection);
@@ -313,6 +332,5 @@ std::unique_ptr<ParsedAggregationProjection> ParsedAggregationProjection::create
parsedProject->parse(spec);
return parsedProject;
}
-
} // namespace parsed_aggregation_projection
} // namespace mongo
diff --git a/src/mongo/db/pipeline/parsed_aggregation_projection.h b/src/mongo/db/pipeline/parsed_aggregation_projection.h
index b64ca7d88d7..f3ec7f23c85 100644
--- a/src/mongo/db/pipeline/parsed_aggregation_projection.h
+++ b/src/mongo/db/pipeline/parsed_aggregation_projection.h
@@ -34,8 +34,9 @@
#include <memory>
#include "mongo/bson/bsonelement.h"
-#include "mongo/db/pipeline/document_source_single_document_transformation.h"
+#include "mongo/db/pipeline/expression_context.h"
#include "mongo/db/pipeline/field_path.h"
+#include "mongo/db/pipeline/transformer_interface.h"
namespace mongo {
@@ -137,16 +138,24 @@ private:
* represents either an inclusion or exclusion projection. This is the common interface between the
* two types of projections.
*/
-class ParsedAggregationProjection
- : public DocumentSourceSingleDocumentTransformation::TransformerInterface {
+class ParsedAggregationProjection : public TransformerInterface {
public:
+ // Allows the caller to specify whether computed fields should be allowed within inclusion
+ // projections; they are implicitly prohibited within exclusion projections.
+ enum class ProjectionParseMode {
+ kBanComputedFields, // No computed fields are permitted in the projection spec.
+ kAllowComputedFields // Computed fields are permitted.
+ };
+
/**
* Main entry point for a ParsedAggregationProjection.
*
* Throws a AssertionException if 'spec' is an invalid projection specification.
*/
static std::unique_ptr<ParsedAggregationProjection> create(
- const boost::intrusive_ptr<ExpressionContext>& expCtx, const BSONObj& spec);
+ const boost::intrusive_ptr<ExpressionContext>& expCtx,
+ const BSONObj& spec,
+ ProjectionParseMode parseRules = ProjectionParseMode::kAllowComputedFields);
virtual ~ParsedAggregationProjection() = default;
@@ -166,8 +175,8 @@ public:
/**
* Add any dependencies needed by this projection or any sub-expressions to 'deps'.
*/
- virtual DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const {
- return DocumentSource::NOT_SUPPORTED;
+ virtual DepsTracker::State addDependencies(DepsTracker* deps) const {
+ return DepsTracker::State::NOT_SUPPORTED;
}
/**
diff --git a/src/mongo/db/pipeline/parsed_aggregation_projection_test.cpp b/src/mongo/db/pipeline/parsed_aggregation_projection_test.cpp
index f6bfad0200e..d7d50ea7316 100644
--- a/src/mongo/db/pipeline/parsed_aggregation_projection_test.cpp
+++ b/src/mongo/db/pipeline/parsed_aggregation_projection_test.cpp
@@ -44,6 +44,8 @@ namespace mongo {
namespace parsed_aggregation_projection {
namespace {
+using ProjectionParseMode = ParsedAggregationProjection::ProjectionParseMode;
+
template <typename T>
BSONObj wrapInLiteral(const T& arg) {
return BSON("$literal" << arg);
@@ -433,140 +435,202 @@ TEST(ParsedAggregationProjectionErrors, ShouldNotErrorOnTwoNestedFields) {
TEST(ParsedAggregationProjectionType, ShouldDefaultToInclusionProjection) {
const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
auto parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id" << true));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("a" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
}
TEST(ParsedAggregationProjectionType, ShouldDetectExclusionProjection) {
const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
auto parsedProject = ParsedAggregationProjection::create(expCtx, BSON("a" << false));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kExclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id.x" << false));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kExclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id" << BSON("x" << false)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kExclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("x" << BSON("_id" << false)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kExclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id" << false));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kExclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
}
TEST(ParsedAggregationProjectionType, ShouldDetectInclusionProjection) {
const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
auto parsedProject = ParsedAggregationProjection::create(expCtx, BSON("a" << true));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject =
ParsedAggregationProjection::create(expCtx, BSON("_id" << false << "a" << true));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject =
ParsedAggregationProjection::create(expCtx, BSON("_id" << false << "a.b.c" << true));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id.x" << true));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id" << BSON("x" << true)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("x" << BSON("_id" << true)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
}
TEST(ParsedAggregationProjectionType, ShouldTreatOnlyComputedFieldsAsAnInclusionProjection) {
const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
auto parsedProject = ParsedAggregationProjection::create(expCtx, BSON("a" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(
expCtx, BSON("_id" << false << "a" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(
expCtx, BSON("_id" << false << "a.b.c" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(expCtx, BSON("_id.x" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject =
ParsedAggregationProjection::create(expCtx, BSON("_id" << BSON("x" << wrapInLiteral(1))));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject =
ParsedAggregationProjection::create(expCtx, BSON("x" << BSON("_id" << wrapInLiteral(1))));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
}
TEST(ParsedAggregationProjectionType, ShouldAllowMixOfInclusionAndComputedFields) {
const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
auto parsedProject =
ParsedAggregationProjection::create(expCtx, BSON("a" << true << "b" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(
expCtx, BSON("a.b" << true << "a.c" << wrapInLiteral(1)));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
parsedProject = ParsedAggregationProjection::create(
expCtx, BSON("a" << BSON("b" << true << "c" << wrapInLiteral(1))));
- ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(expCtx,
+ BSON("a" << BSON("b" << true << "c"
+ << "stringLiteral")));
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+}
+
+TEST(ParsedAggregationProjectionType, ShouldRejectMixOfInclusionAndComputedFieldsInStrictMode) {
+ const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
+ ASSERT_THROWS(ParsedAggregationProjection::create(expCtx,
+ BSON("a" << true << "b" << wrapInLiteral(1)),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+
+ ASSERT_THROWS(
+ ParsedAggregationProjection::create(expCtx,
+ BSON("a.b" << true << "a.c" << wrapInLiteral(1)),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+
+ ASSERT_THROWS(ParsedAggregationProjection::create(
+ expCtx,
+ BSON("a" << BSON("b" << true << "c" << wrapInLiteral(1))),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+
+ ASSERT_THROWS(ParsedAggregationProjection::create(expCtx,
+ BSON("a" << BSON("b" << true << "c"
+ << "stringLiteral")),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+}
+
+TEST(ParsedAggregationProjectionType, ShouldRejectOnlyComputedFieldsInStrictMode) {
+ const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
+ ASSERT_THROWS(ParsedAggregationProjection::create(
+ expCtx,
+ BSON("a" << wrapInLiteral(1) << "b" << wrapInLiteral(2)),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+
+ ASSERT_THROWS(ParsedAggregationProjection::create(
+ expCtx,
+ BSON("a.b" << wrapInLiteral(1) << "a.c" << wrapInLiteral(2)),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+
+ ASSERT_THROWS(ParsedAggregationProjection::create(
+ expCtx,
+ BSON("a" << BSON("b" << wrapInLiteral(1) << "c" << wrapInLiteral(2))),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+
+ ASSERT_THROWS(ParsedAggregationProjection::create(
+ expCtx,
+ BSON("a" << BSON("b" << wrapInLiteral(1) << "c" << wrapInLiteral(2))),
+ ProjectionParseMode::kBanComputedFields),
+ AssertionException);
+}
+
+TEST(ParsedAggregationProjectionType, ShouldAcceptInclusionProjectionInStrictMode) {
+ const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
+ auto parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("a" << true), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id" << false << "a" << true), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id" << false << "a.b.c" << true), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id.x" << true), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id" << BSON("x" << true)), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("x" << BSON("_id" << true)), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kInclusionProjection);
+}
+
+TEST(ParsedAggregationProjectionType, ShouldAcceptExclusionProjectionInStrictMode) {
+ const boost::intrusive_ptr<ExpressionContextForTest> expCtx(new ExpressionContextForTest());
+ auto parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("a" << false), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id.x" << false), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id" << BSON("x" << false)), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("x" << BSON("_id" << false)), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
+
+ parsedProject = ParsedAggregationProjection::create(
+ expCtx, BSON("_id" << false), ProjectionParseMode::kBanComputedFields);
+ ASSERT(parsedProject->getType() == TransformerInterface::TransformerType::kExclusionProjection);
}
TEST(ParsedAggregationProjectionType, ShouldCoerceNumericsToBools) {
@@ -576,8 +640,7 @@ TEST(ParsedAggregationProjectionType, ShouldCoerceNumericsToBools) {
auto parsedProject =
ParsedAggregationProjection::create(expCtx, Document{{"a", zero}}.toBson());
ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kExclusionProjection);
+ TransformerInterface::TransformerType::kExclusionProjection);
}
std::vector<Value> nonZeroes = {
@@ -586,8 +649,7 @@ TEST(ParsedAggregationProjectionType, ShouldCoerceNumericsToBools) {
auto parsedProject =
ParsedAggregationProjection::create(expCtx, Document{{"a", nonZero}}.toBson());
ASSERT(parsedProject->getType() ==
- DocumentSourceSingleDocumentTransformation::TransformerInterface::TransformerType::
- kInclusionProjection);
+ TransformerInterface::TransformerType::kInclusionProjection);
}
}
diff --git a/src/mongo/db/pipeline/parsed_exclusion_projection.cpp b/src/mongo/db/pipeline/parsed_exclusion_projection.cpp
index 7eca645c314..4dffd87f64f 100644
--- a/src/mongo/db/pipeline/parsed_exclusion_projection.cpp
+++ b/src/mongo/db/pipeline/parsed_exclusion_projection.cpp
@@ -135,7 +135,7 @@ void ExclusionNode::addModifiedPaths(std::set<std::string>* modifiedPaths) const
// ParsedExclusionProjection.
//
-Document ParsedExclusionProjection::serializeStageOptions(
+Document ParsedExclusionProjection::serializeTransformation(
boost::optional<ExplainOptions::Verbosity> explain) const {
return _root->serialize();
}
diff --git a/src/mongo/db/pipeline/parsed_exclusion_projection.h b/src/mongo/db/pipeline/parsed_exclusion_projection.h
index 3de597910bc..381143f2390 100644
--- a/src/mongo/db/pipeline/parsed_exclusion_projection.h
+++ b/src/mongo/db/pipeline/parsed_exclusion_projection.h
@@ -104,7 +104,8 @@ public:
return TransformerType::kExclusionProjection;
}
- Document serializeStageOptions(boost::optional<ExplainOptions::Verbosity> explain) const final;
+ Document serializeTransformation(
+ boost::optional<ExplainOptions::Verbosity> explain) const final;
/**
* Parses the projection specification given by 'spec', populating internal data structures.
@@ -118,8 +119,8 @@ public:
*/
Document applyProjection(const Document& inputDoc) const final;
- DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const final {
- return DocumentSource::SEE_NEXT;
+ DepsTracker::State addDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::SEE_NEXT;
}
DocumentSource::GetModPathsReturn getModifiedPaths() const final {
diff --git a/src/mongo/db/pipeline/parsed_exclusion_projection_test.cpp b/src/mongo/db/pipeline/parsed_exclusion_projection_test.cpp
index 33d915a47a9..c5f31b1ad67 100644
--- a/src/mongo/db/pipeline/parsed_exclusion_projection_test.cpp
+++ b/src/mongo/db/pipeline/parsed_exclusion_projection_test.cpp
@@ -87,7 +87,7 @@ TEST(ExclusionProjection, ShouldSerializeToEquivalentProjection) {
// Converts numbers to bools, converts dotted paths to nested documents. Note order of excluded
// fields is subject to change.
- auto serialization = exclusion.serializeStageOptions(boost::none);
+ auto serialization = exclusion.serializeTransformation(boost::none);
ASSERT_EQ(serialization.size(), 4UL);
ASSERT_VALUE_EQ(serialization["a"], Value(false));
ASSERT_VALUE_EQ(serialization["_id"], Value(false));
@@ -103,8 +103,8 @@ TEST(ExclusionProjection, ShouldSerializeToEquivalentProjection) {
}
TEST(ExclusionProjection, ShouldNotAddAnyDependencies) {
- // An exclusion projection will cause the $project stage to return GetDepsReturn::SEE_NEXT,
- // meaning it doesn't strictly require any fields.
+ // An exclusion projection will cause the stage to return DepsTracker::State::SEE_NEXT, meaning
+ // it doesn't strictly require any fields.
//
// For example, if our projection was {a: 0}, and a later stage requires the field "a", then "a"
// will be added to the dependencies correctly. If a later stage doesn't need "a", then we don't
diff --git a/src/mongo/db/pipeline/parsed_inclusion_projection.h b/src/mongo/db/pipeline/parsed_inclusion_projection.h
index e93aa0fe736..81fdd0d1db2 100644
--- a/src/mongo/db/pipeline/parsed_inclusion_projection.h
+++ b/src/mongo/db/pipeline/parsed_inclusion_projection.h
@@ -199,7 +199,8 @@ public:
/**
* Serialize the projection.
*/
- Document serializeStageOptions(boost::optional<ExplainOptions::Verbosity> explain) const final {
+ Document serializeTransformation(
+ boost::optional<ExplainOptions::Verbosity> explain) const final {
MutableDocument output;
if (_idExcluded) {
output.addField("_id", Value(false));
@@ -215,9 +216,9 @@ public:
_root->optimize();
}
- DocumentSource::GetDepsReturn addDependencies(DepsTracker* deps) const final {
+ DepsTracker::State addDependencies(DepsTracker* deps) const final {
_root->addDependencies(deps);
- return DocumentSource::EXHAUSTIVE_FIELDS;
+ return DepsTracker::State::EXHAUSTIVE_FIELDS;
}
DocumentSource::GetModPathsReturn getModifiedPaths() const final {
diff --git a/src/mongo/db/pipeline/parsed_inclusion_projection_test.cpp b/src/mongo/db/pipeline/parsed_inclusion_projection_test.cpp
index 799ac5f92b3..46d28083680 100644
--- a/src/mongo/db/pipeline/parsed_inclusion_projection_test.cpp
+++ b/src/mongo/db/pipeline/parsed_inclusion_projection_test.cpp
@@ -138,13 +138,13 @@ TEST(InclusionProjection, ShouldSerializeToEquivalentProjection) {
"{_id: true, a: {$add: [\"$a\", {$const: 2}]}, b: {d: true}, x: {y: {$const: 4}}}"));
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
TEST(InclusionProjection, ShouldSerializeExplicitExclusionOfId) {
@@ -156,13 +156,13 @@ TEST(InclusionProjection, ShouldSerializeExplicitExclusionOfId) {
auto expectedSerialization = Document{{"_id", false}, {"a", true}};
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
@@ -176,13 +176,13 @@ TEST(InclusionProjection, ShouldOptimizeTopLevelExpressions) {
auto expectedSerialization = Document{{"_id", true}, {"a", Document{{"$const", 3}}}};
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
TEST(InclusionProjection, ShouldOptimizeNestedExpressions) {
@@ -196,13 +196,13 @@ TEST(InclusionProjection, ShouldOptimizeNestedExpressions) {
Document{{"_id", true}, {"a", Document{{"b", Document{{"$const", 3}}}}}};
// Should be the same if we're serializing for explain or for internal use.
- ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeStageOptions(boost::none));
+ ASSERT_DOCUMENT_EQ(expectedSerialization, inclusion.serializeTransformation(boost::none));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kQueryPlanner));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kQueryPlanner));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecStats));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecStats));
ASSERT_DOCUMENT_EQ(expectedSerialization,
- inclusion.serializeStageOptions(ExplainOptions::Verbosity::kExecAllPlans));
+ inclusion.serializeTransformation(ExplainOptions::Verbosity::kExecAllPlans));
}
TEST(InclusionProjection, ShouldReportThatAllExceptIncludedFieldsAreModified) {
diff --git a/src/mongo/db/pipeline/pipeline.cpp b/src/mongo/db/pipeline/pipeline.cpp
index dcd8359324f..e97421096d6 100644
--- a/src/mongo/db/pipeline/pipeline.cpp
+++ b/src/mongo/db/pipeline/pipeline.cpp
@@ -487,11 +487,11 @@ DepsTracker Pipeline::getDependencies(DepsTracker::MetadataAvailable metadataAva
bool knowAllMeta = false;
for (auto&& source : _sources) {
DepsTracker localDeps(deps.getMetadataAvailable());
- DocumentSource::GetDepsReturn status = source->getDependencies(&localDeps);
+ DepsTracker::State status = source->getDependencies(&localDeps);
deps.vars.insert(localDeps.vars.begin(), localDeps.vars.end());
- if ((skipFieldsAndMetadataDeps |= (status == DocumentSource::NOT_SUPPORTED))) {
+ if ((skipFieldsAndMetadataDeps |= (status == DepsTracker::State::NOT_SUPPORTED))) {
// Assume this stage needs everything. We may still know something about our
// dependencies if an earlier stage returned EXHAUSTIVE_FIELDS or EXHAUSTIVE_META. If
// this scope has variables, we need to keep enumerating the remaining stages but will
@@ -507,14 +507,14 @@ DepsTracker Pipeline::getDependencies(DepsTracker::MetadataAvailable metadataAva
deps.fields.insert(localDeps.fields.begin(), localDeps.fields.end());
if (localDeps.needWholeDocument)
deps.needWholeDocument = true;
- knowAllFields = status & DocumentSource::EXHAUSTIVE_FIELDS;
+ knowAllFields = status & DepsTracker::State::EXHAUSTIVE_FIELDS;
}
if (!knowAllMeta) {
for (auto&& req : localDeps.getAllRequiredMetadataTypes()) {
deps.setNeedsMetadata(req, true);
}
- knowAllMeta = status & DocumentSource::EXHAUSTIVE_META;
+ knowAllMeta = status & DepsTracker::State::EXHAUSTIVE_META;
}
// If there are variables defined at this pipeline's scope, there may be dependencies upon
diff --git a/src/mongo/db/pipeline/pipeline_test.cpp b/src/mongo/db/pipeline/pipeline_test.cpp
index 537f7a23ab0..bae65136100 100644
--- a/src/mongo/db/pipeline/pipeline_test.cpp
+++ b/src/mongo/db/pipeline/pipeline_test.cpp
@@ -2515,8 +2515,8 @@ public:
class DocumentSourceDependenciesNotSupported : public DocumentSourceDependencyDummy {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
- return GetDepsReturn::NOT_SUPPORTED;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::NOT_SUPPORTED;
}
static boost::intrusive_ptr<DocumentSourceDependenciesNotSupported> create() {
@@ -2526,9 +2526,9 @@ public:
class DocumentSourceNeedsASeeNext : public DocumentSourceDependencyDummy {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
deps->fields.insert("a");
- return GetDepsReturn::SEE_NEXT;
+ return DepsTracker::State::SEE_NEXT;
}
static boost::intrusive_ptr<DocumentSourceNeedsASeeNext> create() {
@@ -2538,9 +2538,9 @@ public:
class DocumentSourceNeedsOnlyB : public DocumentSourceDependencyDummy {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
deps->fields.insert("b");
- return GetDepsReturn::EXHAUSTIVE_FIELDS;
+ return DepsTracker::State::EXHAUSTIVE_FIELDS;
}
static boost::intrusive_ptr<DocumentSourceNeedsOnlyB> create() {
@@ -2550,9 +2550,9 @@ public:
class DocumentSourceNeedsOnlyTextScore : public DocumentSourceDependencyDummy {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
deps->setNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE, true);
- return GetDepsReturn::EXHAUSTIVE_META;
+ return DepsTracker::State::EXHAUSTIVE_META;
}
static boost::intrusive_ptr<DocumentSourceNeedsOnlyTextScore> create() {
@@ -2562,8 +2562,8 @@ public:
class DocumentSourceStripsTextScore : public DocumentSourceDependencyDummy {
public:
- GetDepsReturn getDependencies(DepsTracker* deps) const final {
- return GetDepsReturn::EXHAUSTIVE_META;
+ DepsTracker::State getDependencies(DepsTracker* deps) const final {
+ return DepsTracker::State::EXHAUSTIVE_META;
}
static boost::intrusive_ptr<DocumentSourceStripsTextScore> create() {
diff --git a/src/mongo/db/pipeline/transformer_interface.h b/src/mongo/db/pipeline/transformer_interface.h
new file mode 100644
index 00000000000..30d142b01ce
--- /dev/null
+++ b/src/mongo/db/pipeline/transformer_interface.h
@@ -0,0 +1,83 @@
+/**
+ * Copyright (C) 2018 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#pragma once
+
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/query/explain_options.h"
+
+namespace mongo {
+/**
+ * This class defines the minimal interface that every parser wishing to take advantage of
+ * DocumentSourceSingleDocumentTransformation must implement.
+ *
+ * This interface ensures that DocumentSourceSingleDocumentTransformations are passed parsed
+ * objects that can execute the transformation and provide additional features like
+ * serialization and reporting and returning dependencies. The parser must also provide
+ * implementations for optimizing and adding the expression context, even if those functions do
+ * nothing.
+ */
+class TransformerInterface {
+public:
+ enum class TransformerType {
+ kExclusionProjection,
+ kInclusionProjection,
+ kComputedProjection,
+ kReplaceRoot,
+ };
+ virtual ~TransformerInterface() = default;
+ virtual Document applyTransformation(const Document& input) = 0;
+ virtual TransformerType getType() const = 0;
+ virtual void optimize() = 0;
+ virtual DepsTracker::State addDependencies(DepsTracker* deps) const = 0;
+ virtual DocumentSource::GetModPathsReturn getModifiedPaths() const = 0;
+
+ /**
+ * Returns a document describing this transformation. For example, this function will return
+ * {_id: 0, x: 1} for the stage parsed from {$project: {_id: 0, x: 1}}.
+ */
+ virtual Document serializeTransformation(
+ boost::optional<ExplainOptions::Verbosity> explain) const = 0;
+
+ /**
+ * Returns true if this transformer is an inclusion projection and is a subset of
+ * 'proj', which must be a valid projection specification. For example, if this
+ * TransformerInterface represents the inclusion projection
+ *
+ * {a: 1, b: 1, c: 1}
+ *
+ * then it is a subset of the projection {a: 1, c: 1}, and this function returns
+ * true.
+ */
+ virtual bool isSubsetOfProjection(const BSONObj& proj) const {
+ return false;
+ }
+};
+} // namespace mongo
diff --git a/src/mongo/db/query/parsed_projection.h b/src/mongo/db/query/parsed_projection.h
index cbf7ad6903b..cb3d96631a2 100644
--- a/src/mongo/db/query/parsed_projection.h
+++ b/src/mongo/db/query/parsed_projection.h
@@ -26,6 +26,8 @@
* it in the license file.
*/
+#pragma once
+
#include "mongo/db/jsobj.h"
#include "mongo/db/matcher/expression_parser.h"
#include "mongo/util/mongoutils/str.h"