summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/mongo/db/pipeline/document_source.cpp48
-rw-r--r--src/mongo/db/pipeline/document_source.h27
-rw-r--r--src/mongo/db/pipeline/document_source_cursor.cpp1
-rw-r--r--src/mongo/db/pipeline/document_source_cursor.h5
-rw-r--r--src/mongo/db/pipeline/document_source_geo_near_cursor.cpp5
-rw-r--r--src/mongo/db/pipeline/document_source_geo_near_cursor.h6
-rw-r--r--src/mongo/db/pipeline/document_source_graph_lookup.cpp12
-rw-r--r--src/mongo/db/pipeline/document_source_graph_lookup.h3
-rw-r--r--src/mongo/db/pipeline/document_source_group.cpp283
-rw-r--r--src/mongo/db/pipeline/document_source_group.h22
-rw-r--r--src/mongo/db/pipeline/document_source_group_test.cpp310
-rw-r--r--src/mongo/db/pipeline/document_source_limit.h5
-rw-r--r--src/mongo/db/pipeline/document_source_lookup.h4
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_test.cpp46
-rw-r--r--src/mongo/db/pipeline/document_source_match.h5
-rw-r--r--src/mongo/db/pipeline/document_source_mock.h4
-rw-r--r--src/mongo/db/pipeline/document_source_skip.h6
-rw-r--r--src/mongo/db/pipeline/document_source_sort.h4
-rw-r--r--src/mongo/db/pipeline/document_source_sort_test.cpp8
-rw-r--r--src/mongo/db/pipeline/document_source_test.cpp39
-rw-r--r--src/mongo/db/pipeline/document_source_unwind.cpp25
-rw-r--r--src/mongo/db/pipeline/document_source_unwind.h3
-rw-r--r--src/mongo/db/pipeline/document_source_unwind_test.cpp12
-rw-r--r--src/mongo/db/query/plan_executor.h6
-rw-r--r--src/mongo/db/query/plan_executor_impl.cpp25
-rw-r--r--src/mongo/db/query/plan_executor_impl.h1
-rw-r--r--src/mongo/dbtests/documentsourcetests.cpp37
27 files changed, 10 insertions, 942 deletions
diff --git a/src/mongo/db/pipeline/document_source.cpp b/src/mongo/db/pipeline/document_source.cpp
index 56cf91d4e1e..25cf5605a3b 100644
--- a/src/mongo/db/pipeline/document_source.cpp
+++ b/src/mongo/db/pipeline/document_source.cpp
@@ -327,52 +327,4 @@ void DocumentSource::serializeToArray(vector<Value>& array,
}
}
-BSONObjSet DocumentSource::allPrefixes(BSONObj obj) {
- BSONObjSet out = SimpleBSONObjComparator::kInstance.makeBSONObjSet();
-
- BSONObj last = {};
- for (auto&& field : obj) {
- BSONObjBuilder builder(last.objsize() + field.size());
- builder.appendElements(last);
- builder.append(field);
- last = builder.obj();
- out.insert(last);
- }
-
- return out;
-}
-
-BSONObjSet DocumentSource::truncateSortSet(const BSONObjSet& sorts,
- const std::set<std::string>& fields) {
- BSONObjSet out = SimpleBSONObjComparator::kInstance.makeBSONObjSet();
-
- for (auto&& sort : sorts) {
- BSONObjBuilder outputSort;
-
- for (auto&& key : sort) {
- auto keyName = key.fieldNameStringData();
-
- bool shouldAppend = true;
- for (auto&& field : fields) {
- if (keyName == field || keyName.startsWith(field + '.')) {
- shouldAppend = false;
- break;
- }
- }
-
- if (!shouldAppend) {
- break;
- }
-
- outputSort.append(key);
- }
-
- BSONObj outSortObj = outputSort.obj();
- if (!outSortObj.isEmpty()) {
- out.insert(outSortObj);
- }
- }
-
- return out;
-}
} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h
index 1e6efa6dc54..f3f67db8e0a 100644
--- a/src/mongo/db/pipeline/document_source.h
+++ b/src/mongo/db/pipeline/document_source.h
@@ -328,26 +328,6 @@ public:
*/
static void registerParser(std::string name, Parser parser);
- /**
- * Given a BSONObj, construct a BSONObjSet consisting of all prefixes of that object. For
- * example, given {a: 1, b: 1, c: 1}, this will return a set: {{a: 1}, {a: 1, b: 1}, {a: 1, b:
- * 1, c: 1}}.
- */
- static BSONObjSet allPrefixes(BSONObj obj);
-
- /**
- * Given a BSONObjSet, where each BSONObj represents a sort key, return the BSONObjSet that
- * results from truncating each sort key before the first path that is a member of 'fields', or
- * is a child of a member of 'fields'.
- */
- static BSONObjSet truncateSortSet(const BSONObjSet& sorts, const std::set<std::string>& fields);
-
- //
- // Optimization API - These methods give each DocumentSource an opportunity to apply any local
- // optimizations, and to provide any rule-based optimizations to swap with or absorb subsequent
- // stages.
- //
-
private:
/**
* Attempt to push a match stage from directly ahead of the current stage given by itr to before
@@ -397,13 +377,6 @@ public:
// whether or not a blocking group can be upgraded to a streaming group).
//
- /**
- * Gets a BSONObjSet representing the sort order(s) of the output of the stage.
- */
- virtual BSONObjSet getOutputSorts() {
- return SimpleBSONObjComparator::kInstance.makeBSONObjSet();
- }
-
struct GetModPathsReturn {
enum class Type {
// No information is available about which paths are modified.
diff --git a/src/mongo/db/pipeline/document_source_cursor.cpp b/src/mongo/db/pipeline/document_source_cursor.cpp
index d3f1ae34003..99e45abbfe9 100644
--- a/src/mongo/db/pipeline/document_source_cursor.cpp
+++ b/src/mongo/db/pipeline/document_source_cursor.cpp
@@ -303,7 +303,6 @@ DocumentSourceCursor::DocumentSourceCursor(
: DocumentSource(pCtx),
_docsAddedToBatches(0),
_exec(std::move(exec)),
- _outputSorts(_exec->getOutputSorts()),
_trackOplogTS(trackOplogTimestamp) {
// Later code in the DocumentSourceCursor lifecycle expects that '_exec' is in a saved state.
_exec->saveState();
diff --git a/src/mongo/db/pipeline/document_source_cursor.h b/src/mongo/db/pipeline/document_source_cursor.h
index ba76fc3598a..d1ca145b886 100644
--- a/src/mongo/db/pipeline/document_source_cursor.h
+++ b/src/mongo/db/pipeline/document_source_cursor.h
@@ -51,10 +51,6 @@ public:
const char* getSourceName() const override;
- BSONObjSet getOutputSorts() override {
- return _outputSorts;
- }
-
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
StageConstraints constraints(Pipeline::SplitState pipeState) const final {
@@ -225,7 +221,6 @@ private:
// the default.
Status _execStatus = Status::OK();
- BSONObjSet _outputSorts;
std::string _planSummary;
PlanSummaryStats _planSummaryStats;
diff --git a/src/mongo/db/pipeline/document_source_geo_near_cursor.cpp b/src/mongo/db/pipeline/document_source_geo_near_cursor.cpp
index ce5e0f204a8..0a2d4b71e45 100644
--- a/src/mongo/db/pipeline/document_source_geo_near_cursor.cpp
+++ b/src/mongo/db/pipeline/document_source_geo_near_cursor.cpp
@@ -84,11 +84,6 @@ const char* DocumentSourceGeoNearCursor::getSourceName() const {
return kStageName;
}
-BSONObjSet DocumentSourceGeoNearCursor::getOutputSorts() {
- return SimpleBSONObjComparator::kInstance.makeBSONObjSet(
- {BSON(_distanceField.fullPath() << 1)});
-}
-
Document DocumentSourceGeoNearCursor::transformBSONObjToDocument(const BSONObj& obj) const {
MutableDocument output(Document::fromBsonWithMetaData(obj));
diff --git a/src/mongo/db/pipeline/document_source_geo_near_cursor.h b/src/mongo/db/pipeline/document_source_geo_near_cursor.h
index 84df929fc79..c23e01dc146 100644
--- a/src/mongo/db/pipeline/document_source_geo_near_cursor.h
+++ b/src/mongo/db/pipeline/document_source_geo_near_cursor.h
@@ -69,12 +69,6 @@ public:
const char* getSourceName() const final;
- /**
- * $geoNear returns documents ordered from nearest to furthest, which is an ascending sort on
- * '_distanceField'.
- */
- BSONObjSet getOutputSorts() final;
-
private:
DocumentSourceGeoNearCursor(Collection*,
std::unique_ptr<PlanExecutor, PlanExecutor::Deleter>,
diff --git a/src/mongo/db/pipeline/document_source_graph_lookup.cpp b/src/mongo/db/pipeline/document_source_graph_lookup.cpp
index eea8b370cdf..007718f0e13 100644
--- a/src/mongo/db/pipeline/document_source_graph_lookup.cpp
+++ b/src/mongo/db/pipeline/document_source_graph_lookup.cpp
@@ -376,18 +376,6 @@ Pipeline::SourceContainer::iterator DocumentSourceGraphLookUp::doOptimizeAt(
return std::next(itr);
}
-BSONObjSet DocumentSourceGraphLookUp::getOutputSorts() {
- std::set<std::string> fields{_as.fullPath()};
- if (_depthField) {
- fields.insert(_depthField->fullPath());
- }
- if (_unwind && (*_unwind)->indexPath()) {
- fields.insert((*_unwind)->indexPath()->fullPath());
- }
-
- return DocumentSource::truncateSortSet(pSource->getOutputSorts(), fields);
-}
-
void DocumentSourceGraphLookUp::checkMemoryUsage() {
// TODO SERVER-23980: Implement spilling to disk if allowDiskUse is specified.
uassert(40099,
diff --git a/src/mongo/db/pipeline/document_source_graph_lookup.h b/src/mongo/db/pipeline/document_source_graph_lookup.h
index 4077c058770..6449560875b 100644
--- a/src/mongo/db/pipeline/document_source_graph_lookup.h
+++ b/src/mongo/db/pipeline/document_source_graph_lookup.h
@@ -43,8 +43,9 @@ public:
const AggregationRequest& request, const BSONElement& spec);
GetNextResult getNext() final;
+
const char* getSourceName() const final;
- BSONObjSet getOutputSorts() final;
+
void serializeToArray(
std::vector<Value>& array,
boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp
index 839945ff86b..fa006b78b0c 100644
--- a/src/mongo/db/pipeline/document_source_group.cpp
+++ b/src/mongo/db/pipeline/document_source_group.cpp
@@ -149,8 +149,6 @@ DocumentSource::GetNextResult DocumentSourceGroup::getNext() {
if (_spilled) {
return getNextSpilled();
- } else if (_streaming) {
- return getNextStreaming();
} else {
return getNextStandard();
}
@@ -203,44 +201,6 @@ DocumentSource::GetNextResult DocumentSourceGroup::getNextStandard() {
return std::move(out);
}
-DocumentSource::GetNextResult DocumentSourceGroup::getNextStreaming() {
- // Streaming optimization is active.
- if (!_firstDocOfNextGroup) {
- auto nextInput = pSource->getNext();
- if (!nextInput.isAdvanced()) {
- return nextInput;
- }
- _firstDocOfNextGroup = nextInput.releaseDocument();
- }
-
- Value id;
- do {
- // Add to the current accumulator(s).
- for (size_t i = 0; i < _currentAccumulators.size(); i++) {
- _currentAccumulators[i]->process(
- _accumulatedFields[i].expression->evaluate(*_firstDocOfNextGroup), _doingMerge);
- }
-
- // Retrieve the next document.
- auto nextInput = pSource->getNext();
- if (!nextInput.isAdvanced()) {
- return nextInput;
- }
-
- _firstDocOfNextGroup = nextInput.releaseDocument();
-
-
- // Compute the id. If it does not match _currentId, we will exit the loop, leaving
- // _firstDocOfNextGroup set for the next time getNext() is called.
- id = computeId(*_firstDocOfNextGroup);
- } while (pExpCtx->getValueComparator().evaluate(_currentId == id));
-
- Document out = makeDocument(_currentId, _currentAccumulators, pExpCtx->needsMerge);
- _currentId = std::move(id);
-
- return std::move(out);
-}
-
void DocumentSourceGroup::doDispose() {
// Free our resources.
_groups = pExpCtx->getValueComparator().makeUnorderedValueMap<Accumulators>();
@@ -248,8 +208,6 @@ void DocumentSourceGroup::doDispose() {
// Make us look done.
groupsIterator = _groups->end();
-
- _firstDocOfNextGroup = boost::none;
}
intrusive_ptr<DocumentSource> DocumentSourceGroup::optimize() {
@@ -298,9 +256,6 @@ Value DocumentSourceGroup::serialize(boost::optional<ExplainOptions::Verbosity>
insides["$doingMerge"] = Value(true);
}
- if (explain && findRelevantInputSort()) {
- return Value(DOC("$streamingGroup" << insides.freeze()));
- }
return Value(DOC(getSourceName() << insides.freeze()));
}
@@ -360,8 +315,6 @@ DocumentSourceGroup::DocumentSourceGroup(const intrusive_ptr<ExpressionContext>&
_doingMerge(false),
_maxMemoryUsageBytes(maxMemoryUsageBytes ? *maxMemoryUsageBytes
: internalDocumentSourceGroupMaxMemoryBytes.load()),
- _inputSort(BSONObj()),
- _streaming(false),
_initialized(false),
_groups(pExpCtx->getValueComparator().makeUnorderedValueMap<Accumulators>()),
_spilled(false),
@@ -494,100 +447,11 @@ public:
private:
ValueComparator _valueComparator;
};
-
-bool containsOnlyFieldPathsAndConstants(ExpressionObject* expressionObj) {
- for (auto&& it : expressionObj->getChildExpressions()) {
- const intrusive_ptr<Expression>& childExp = it.second;
- if (dynamic_cast<ExpressionFieldPath*>(childExp.get())) {
- continue;
- } else if (dynamic_cast<ExpressionConstant*>(childExp.get())) {
- continue;
- } else if (auto expObj = dynamic_cast<ExpressionObject*>(childExp.get())) {
- if (!containsOnlyFieldPathsAndConstants(expObj)) {
- // A nested expression was not a FieldPath or a constant.
- return false;
- }
- } else {
- // expressionObj was something other than a FieldPath, a constant, or a nested object.
- return false;
- }
- }
- return true;
-}
-
-void getFieldPathMap(ExpressionObject* expressionObj,
- std::string prefix,
- StringMap<std::string>* fields) {
- // Given an expression with only constant and FieldPath leaf nodes, such as {x: {y: "$a.b"}},
- // attempt to compute a map from each FieldPath leaf to the path of that leaf. In the example,
- // this method would return: {"a.b" : "x.y"}.
-
- for (auto&& it : expressionObj->getChildExpressions()) {
- intrusive_ptr<Expression> childExp = it.second;
- ExpressionObject* expObj = dynamic_cast<ExpressionObject*>(childExp.get());
- ExpressionFieldPath* expPath = dynamic_cast<ExpressionFieldPath*>(childExp.get());
-
- std::string newPrefix = prefix.empty() ? it.first : prefix + "." + it.first;
-
- if (expObj) {
- getFieldPathMap(expObj, newPrefix, fields);
- } else if (expPath) {
- (*fields)[expPath->getFieldPath().tail().fullPath()] = newPrefix;
- }
- }
-}
-
-void getFieldPathListForSpilled(ExpressionObject* expressionObj,
- std::string prefix,
- std::vector<std::string>* fields) {
- // Given an expression, attempt to compute a vector of strings, each representing the path
- // through the object to a leaf. For example, for the expression represented by
- // {x: 2, y: {z: "$a.b"}}, the output would be ["x", "y.z"].
- for (auto&& it : expressionObj->getChildExpressions()) {
- intrusive_ptr<Expression> childExp = it.second;
- ExpressionObject* expObj = dynamic_cast<ExpressionObject*>(childExp.get());
-
- std::string newPrefix = prefix.empty() ? it.first : prefix + "." + it.first;
-
- if (expObj) {
- getFieldPathListForSpilled(expObj, newPrefix, fields);
- } else {
- fields->push_back(newPrefix);
- }
- }
-}
} // namespace
DocumentSource::GetNextResult DocumentSourceGroup::initialize() {
const size_t numAccumulators = _accumulatedFields.size();
- boost::optional<BSONObj> inputSort = findRelevantInputSort();
- if (inputSort) {
- // We can convert to streaming.
- _streaming = true;
- _inputSort = *inputSort;
-
- // Set up accumulators.
- _currentAccumulators.reserve(numAccumulators);
- for (auto&& accumulatedField : _accumulatedFields) {
- _currentAccumulators.push_back(accumulatedField.makeAccumulator(pExpCtx));
- }
-
- // We only need to load the first document.
- auto firstInput = pSource->getNext();
- if (!firstInput.isAdvanced()) {
- // Leave '_firstDocOfNextGroup' uninitialized and return.
- return firstInput;
- }
- _firstDocOfNextGroup = firstInput.releaseDocument();
-
- // Compute the _id value.
- _currentId = computeId(*_firstDocOfNextGroup);
- _initialized = true;
- return DocumentSource::GetNextResult::makeEOF();
- }
-
-
// Barring any pausing, this loop exhausts 'pSource' and populates '_groups'.
GetNextResult input = pSource->getNext();
for (; input.isAdvanced(); input = pSource->getNext()) {
@@ -744,153 +608,6 @@ shared_ptr<Sorter<Value, Value>::Iterator> DocumentSourceGroup::spill() {
return shared_ptr<Sorter<Value, Value>::Iterator>(iteratorPtr);
}
-boost::optional<BSONObj> DocumentSourceGroup::findRelevantInputSort() const {
- if (true) {
- // Until streaming $group correctly handles nullish values, the streaming behavior is
- // disabled. See SERVER-23318.
- return boost::none;
- }
-
- if (!pSource) {
- // Sometimes when performing an explain, or using $group as the merge point, 'pSource' will
- // not be set.
- return boost::none;
- }
-
- BSONObjSet sorts = pSource->getOutputSorts();
-
- // 'sorts' is a BSONObjSet. We need to check if our group pattern is compatible with one of the
- // input sort patterns.
-
- // We will only attempt to take advantage of a sorted input stream if the _id given to the
- // $group contained only FieldPaths or constants. Determine if this is the case, and extract
- // those FieldPaths if it is.
- DepsTracker deps(DepsTracker::MetadataAvailable::kNoMetadata); // We don't support streaming
- // based off a text score.
- for (auto&& exp : _idExpressions) {
- if (dynamic_cast<ExpressionConstant*>(exp.get())) {
- continue;
- }
- ExpressionObject* obj;
- if ((obj = dynamic_cast<ExpressionObject*>(exp.get()))) {
- // We can only perform an optimization if there are no operators in the _id expression.
- if (!containsOnlyFieldPathsAndConstants(obj)) {
- return boost::none;
- }
- } else if (!dynamic_cast<ExpressionFieldPath*>(exp.get())) {
- return boost::none;
- }
- exp->addDependencies(&deps);
- }
-
- if (deps.needWholeDocument) {
- // We don't swap to streaming if we need the entire document, which is likely because of
- // $$ROOT.
- return boost::none;
- }
-
- if (deps.fields.empty()) {
- // Our _id field is constant, so we should stream, but the input sort we choose is
- // irrelevant since we will output only one document.
- return BSONObj();
- }
-
- for (auto&& obj : sorts) {
- // Note that a sort order of, e.g., {a: 1, b: 1, c: 1} allows us to do a non-blocking group
- // for every permutation of group by (a, b, c), since we are guaranteed that documents with
- // the same value of (a, b, c) will be consecutive in the input stream, no matter what our
- // _id is.
- auto fieldNames = obj.getFieldNames<std::set<std::string>>();
- if (fieldNames == deps.fields) {
- return obj;
- }
- }
-
- return boost::none;
-}
-
-BSONObjSet DocumentSourceGroup::getOutputSorts() {
- if (!_initialized) {
- initialize(); // Note this might not finish initializing, but that's OK. We just want to
- // do some initialization to try to determine if we are streaming or spilled.
- // False negatives are OK.
- }
-
- if (!(_streaming || _spilled)) {
- return SimpleBSONObjComparator::kInstance.makeBSONObjSet();
- }
-
- BSONObjBuilder sortOrder;
-
- if (_idFieldNames.empty()) {
- if (_spilled) {
- sortOrder.append("_id", 1);
- } else {
- // We have an expression like {_id: "$a"}. Check if this is a FieldPath, and if it is,
- // get the sort order out of it.
- if (auto obj = dynamic_cast<ExpressionFieldPath*>(_idExpressions[0].get())) {
- FieldPath _idSort = obj->getFieldPath();
-
- sortOrder.append(
- "_id",
- _inputSort.getIntField(_idSort.getFieldName(_idSort.getPathLength() - 1)));
- }
- }
- } else if (_streaming) {
- // At this point, we know that _streaming is true, so _id must have only contained
- // ExpressionObjects, ExpressionConstants or ExpressionFieldPaths. We now process each
- // '_idExpression'.
-
- // We populate 'fieldMap' such that each key is a field the input is sorted by, and the
- // value is where that input field is located within the _id document. For example, if our
- // _id object is {_id: {x: {y: "$a.b"}}}, 'fieldMap' would be: {'a.b': '_id.x.y'}.
- StringMap<std::string> fieldMap;
- for (size_t i = 0; i < _idFieldNames.size(); i++) {
- intrusive_ptr<Expression> exp = _idExpressions[i];
- if (auto obj = dynamic_cast<ExpressionObject*>(exp.get())) {
- // _id is an object containing a nested document, such as: {_id: {x: {y: "$b"}}}.
- getFieldPathMap(obj, "_id." + _idFieldNames[i], &fieldMap);
- } else if (auto fieldPath = dynamic_cast<ExpressionFieldPath*>(exp.get())) {
- FieldPath _idSort = fieldPath->getFieldPath();
- fieldMap[_idSort.getFieldName(_idSort.getPathLength() - 1)] =
- "_id." + _idFieldNames[i];
- }
- }
-
- // Because the order of '_inputSort' is important, we go through each field we are sorted on
- // and append it to the BSONObjBuilder in order.
- for (BSONElement sortField : _inputSort) {
- std::string sortString = sortField.fieldNameStringData().toString();
-
- auto itr = fieldMap.find(sortString);
-
- // If our sort order is (a, b, c), we could not have converted to a streaming $group if
- // our _id was predicated on (a, c) but not 'b'. Verify that this is true.
- invariant(itr != fieldMap.end());
-
- sortOrder.append(itr->second, _inputSort.getIntField(sortString));
- }
- } else {
- // We are blocking and have spilled to disk.
- std::vector<std::string> outputSort;
- for (size_t i = 0; i < _idFieldNames.size(); i++) {
- intrusive_ptr<Expression> exp = _idExpressions[i];
- if (auto obj = dynamic_cast<ExpressionObject*>(exp.get())) {
- // _id is an object containing a nested document, such as: {_id: {x: {y: "$b"}}}.
- getFieldPathListForSpilled(obj, "_id." + _idFieldNames[i], &outputSort);
- } else {
- outputSort.push_back("_id." + _idFieldNames[i]);
- }
- }
- for (auto&& field : outputSort) {
- sortOrder.append(field, 1);
- }
- }
-
- return allPrefixes(sortOrder.obj());
-}
-
-
Value DocumentSourceGroup::computeId(const Document& root) {
// If only one expression, return result directly
if (_idExpressions.size() == 1) {
diff --git a/src/mongo/db/pipeline/document_source_group.h b/src/mongo/db/pipeline/document_source_group.h
index 5b364cd2338..b166d8e8335 100644
--- a/src/mongo/db/pipeline/document_source_group.h
+++ b/src/mongo/db/pipeline/document_source_group.h
@@ -98,7 +98,6 @@ public:
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
GetNextResult getNext() final;
const char* getSourceName() const final;
- BSONObjSet getOutputSorts() final;
GetModPathsReturn getModifiedPaths() const final;
/**
@@ -152,10 +151,6 @@ public:
_doingMerge = doingMerge;
}
- bool isStreaming() const {
- return _streaming;
- }
-
/**
* Returns true if this $group stage used disk during execution and false otherwise.
*/
@@ -187,21 +182,14 @@ private:
~DocumentSourceGroup();
/**
- * getNext() dispatches to one of these three depending on what type of $group it is. All three
- * of these methods expect '_currentAccumulators' to have been reset before being called, and
- * also expect initialize() to have been called already.
+ * getNext() dispatches to one of these three depending on what type of $group it is. These
+ * methods expect '_currentAccumulators' to have been reset before being called, and also expect
+ * initialize() to have been called already.
*/
- GetNextResult getNextStreaming();
GetNextResult getNextSpilled();
GetNextResult getNextStandard();
/**
- * Attempt to identify an input sort order that allows us to turn into a streaming $group. If we
- * find one, return it. Otherwise, return boost::none.
- */
- boost::optional<BSONObj> findRelevantInputSort() const;
-
- /**
* Before returning anything, this source must prepare itself. In a streaming $group,
* initialize() requests the first document from the previous source, and uses it to prepare the
* accumulators. In an unsorted $group, initialize() exhausts the previous source before
@@ -251,8 +239,6 @@ private:
std::vector<std::string> _idFieldNames; // used when id is a document
std::vector<boost::intrusive_ptr<Expression>> _idExpressions;
- BSONObj _inputSort;
- bool _streaming;
bool _initialized;
Value _currentId;
@@ -274,8 +260,6 @@ private:
const bool _allowDiskUse;
std::pair<Value, Value> _firstPartOfNextGroup;
- // Only used when '_sorted' is true.
- boost::optional<Document> _firstDocOfNextGroup;
};
} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_group_test.cpp b/src/mongo/db/pipeline/document_source_group_test.cpp
index 22173f400c1..111a2db01f2 100644
--- a/src/mongo/db/pipeline/document_source_group_test.cpp
+++ b/src/mongo/db/pipeline/document_source_group_test.cpp
@@ -810,316 +810,6 @@ public:
}
};
-class StreamingOptimization : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 0}", "{a: 0}", "{a: 1}", "{a: 1}"});
- source->sorts = {BSON("a" << 1)};
-
- createGroup(BSON("_id"
- << "$a"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id"), Value(0));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(1));
-
- assertEOF(source);
-
- res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id"), Value(1));
-
- assertEOF(group());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 1U);
-
- ASSERT_EQUALS(outputSort.count(BSON("_id" << 1)), 1U);
- }
-};
-
-class StreamingWithMultipleIdFields : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create(
- {"{a: 1, b: 2}", "{a: 1, b: 2}", "{a: 1, b: 1}", "{a: 2, b: 1}", "{a: 2, b: 1}"});
- source->sorts = {BSON("a" << 1 << "b" << -1)};
-
- createGroup(fromjson("{_id: {x: '$a', y: '$b'}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["y"], Value(2));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["y"], Value(1));
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(2));
- ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(1));
-
- assertEOF(source);
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 2U);
-
- BSONObj correctSort = BSON("_id.x" << 1 << "_id.y" << -1);
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSort = BSON("_id.x" << 1);
- ASSERT_EQUALS(outputSort.count(prefixSort), 1U);
- }
-};
-
-class StreamingWithMultipleLevels : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create(
- {"{a: {b: {c: 3}}, d: 1}", "{a: {b: {c: 1}}, d: 2}", "{a: {b: {c: 1}}, d: 0}"});
- source->sorts = {BSON("a.b.c" << -1 << "a.b.d" << 1 << "d" << 1)};
-
- createGroup(fromjson("{_id: {x: {y: {z: '$a.b.c', q: '$a.b.d'}}, v: '$d'}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"]["y"]["z"], Value(3));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a")["b"]["c"], Value(1));
-
- assertEOF(source);
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 3U);
-
- BSONObj correctSort = fromjson("{'_id.x.y.z': -1, '_id.x.y.q': 1, '_id.v': 1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSortTwo = fromjson("{'_id.x.y.z': -1, '_id.x.y.q': 1}");
- ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
-
- BSONObj prefixSortOne = fromjson("{'_id.x.y.z': -1}");
- ASSERT_EQUALS(outputSort.count(prefixSortOne), 1U);
- }
-};
-
-class StreamingWithFieldRepeated : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create(
- {"{a: 1, b: 1}", "{a: 1, b: 1}", "{a: 2, b: 1}", "{a: 2, b: 3}"});
- source->sorts = {BSON("a" << 1 << "b" << 1)};
-
- createGroup(fromjson("{_id: {sub: {x: '$a', y: '$b', z: '$a'}}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["x"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["y"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["z"], Value(1));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(2));
- ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(3));
-
- BSONObjSet outputSort = group()->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.size(), 2U);
-
- BSONObj correctSort = fromjson("{'_id.sub.z': 1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSortTwo = fromjson("{'_id.sub.z': 1, '_id.sub.y': 1}");
- ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
- }
-};
-
-class StreamingWithConstantAndFieldPath : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create(
- {"{a: 5, b: 1}", "{a: 5, b: 2}", "{a: 3, b: 1}", "{a: 1, b: 1}", "{a: 1, b: 1}"});
- source->sorts = {BSON("a" << -1 << "b" << 1)};
-
- createGroup(fromjson("{_id: {sub: {x: '$a', y: '$b', z: {$literal: 'c'}}}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["x"], Value(5));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["y"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["z"], Value("c"_sd));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(3));
- ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(1));
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 2U);
-
- BSONObj correctSort = fromjson("{'_id.sub.x': -1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSortTwo = fromjson("{'_id.sub.x': -1, '_id.sub.y': 1}");
- ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
- }
-};
-
-class StreamingWithRootSubfield : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("a" << 1)};
-
- createGroup(fromjson("{_id: '$$ROOT.a'}"));
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_TRUE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 1U);
-
- BSONObj correctSort = fromjson("{_id: 1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
- }
-};
-
-class StreamingWithConstant : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("$a" << 1)};
-
- createGroup(fromjson("{_id: 1}"));
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_TRUE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class StreamingWithEmptyId : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("$a" << 1)};
-
- createGroup(fromjson("{_id: {}}"));
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_TRUE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class NoOptimizationIfMissingDoubleSort : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("a" << 1)};
-
- // We pretend to be in the router so that we don't spill to disk, because this produces
- // inconsistent output on debug vs. non-debug builds.
- const bool inMongos = true;
- const bool inShard = false;
-
- createGroup(BSON("_id" << BSON("x"
- << "$a"
- << "y"
- << "$b")),
- inShard,
- inMongos);
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_FALSE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class NoOptimizationWithRawRoot : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("a" << 1)};
-
- // We pretend to be in the router so that we don't spill to disk, because this produces
- // inconsistent output on debug vs. non-debug builds.
- const bool inMongos = true;
- const bool inShard = false;
-
- createGroup(BSON("_id" << BSON("a"
- << "$$ROOT"
- << "b"
- << "$a")),
- inShard,
- inMongos);
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_FALSE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class NoOptimizationIfUsingExpressions : public Base {
-public:
- void _doTest() final {
- auto source = DocumentSourceMock::create({"{a: 1, b: 1}", "{a: 2, b: 2}", "{a: 3, b: 1}"});
- source->sorts = {BSON("a" << 1 << "b" << 1)};
-
- // We pretend to be in the router so that we don't spill to disk, because this produces
- // inconsistent output on debug vs. non-debug builds.
- const bool inMongos = true;
- const bool inShard = false;
-
- createGroup(fromjson("{_id: {$sum: ['$a', '$b']}}"), inShard, inMongos);
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_FALSE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
/**
* A string constant (not a field path) as an _id expression and passed to an accumulator.
* SERVER-6766
diff --git a/src/mongo/db/pipeline/document_source_limit.h b/src/mongo/db/pipeline/document_source_limit.h
index 947038111a1..0a5ed1428a5 100644
--- a/src/mongo/db/pipeline/document_source_limit.h
+++ b/src/mongo/db/pipeline/document_source_limit.h
@@ -63,11 +63,6 @@ public:
return kStageName.rawData();
}
- BSONObjSet getOutputSorts() final {
- return pSource ? pSource->getOutputSorts()
- : SimpleBSONObjComparator::kInstance.makeBSONObjSet();
- }
-
/**
* Attempts to combine with a subsequent $limit stage, setting 'limit' appropriately.
*/
diff --git a/src/mongo/db/pipeline/document_source_lookup.h b/src/mongo/db/pipeline/document_source_lookup.h
index dc233632496..aeeba4dfa0a 100644
--- a/src/mongo/db/pipeline/document_source_lookup.h
+++ b/src/mongo/db/pipeline/document_source_lookup.h
@@ -117,10 +117,6 @@ public:
DepsTracker::State getDependencies(DepsTracker* deps) const final;
- BSONObjSet getOutputSorts() final {
- return DocumentSource::truncateSortSet(pSource->getOutputSorts(), {_as.fullPath()});
- }
-
boost::optional<MergingLogic> mergingLogic() final {
// {shardsStage, mergingStage, sortPattern}
return MergingLogic{nullptr, this, boost::none};
diff --git a/src/mongo/db/pipeline/document_source_lookup_test.cpp b/src/mongo/db/pipeline/document_source_lookup_test.cpp
index 52a86e0ab1c..5d18a271718 100644
--- a/src/mongo/db/pipeline/document_source_lookup_test.cpp
+++ b/src/mongo/db/pipeline/document_source_lookup_test.cpp
@@ -107,52 +107,6 @@ TEST_F(DocumentSourceLookUpTest, PreservesParentPipelineLetVariables) {
ASSERT_VALUE_EQ(Value(123), lookupStage->getVariables_forTest().getValue(varId, Document()));
}
-TEST_F(DocumentSourceLookUpTest, ShouldTruncateOutputSortOnAsField) {
- auto expCtx = getExpCtx();
- NamespaceString fromNs("test", "a");
- expCtx->setResolvedNamespace_forTest(fromNs, {fromNs, std::vector<BSONObj>{}});
-
- intrusive_ptr<DocumentSourceMock> source = DocumentSourceMock::create();
- source->sorts = {BSON("a" << 1 << "d.e" << 1 << "c" << 1)};
- auto lookup = DocumentSourceLookUp::createFromBson(Document{{"$lookup",
- Document{{"from", "a"_sd},
- {"localField", "b"_sd},
- {"foreignField", "c"_sd},
- {"as", "d.e"_sd}}}}
- .toBson()
- .firstElement(),
- expCtx);
- lookup->setSource(source.get());
-
- BSONObjSet outputSort = lookup->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 1U);
-}
-
-TEST_F(DocumentSourceLookUpTest, ShouldTruncateOutputSortOnSuffixOfAsField) {
- auto expCtx = getExpCtx();
- NamespaceString fromNs("test", "a");
- expCtx->setResolvedNamespace_forTest(fromNs, {fromNs, std::vector<BSONObj>{}});
-
- intrusive_ptr<DocumentSourceMock> source = DocumentSourceMock::create();
- source->sorts = {BSON("a" << 1 << "d.e" << 1 << "c" << 1)};
- auto lookup = DocumentSourceLookUp::createFromBson(Document{{"$lookup",
- Document{{"from", "a"_sd},
- {"localField", "b"_sd},
- {"foreignField", "c"_sd},
- {"as", "d"_sd}}}}
- .toBson()
- .firstElement(),
- expCtx);
- lookup->setSource(source.get());
-
- BSONObjSet outputSort = lookup->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 1U);
-}
-
TEST_F(DocumentSourceLookUpTest, AcceptsPipelineSyntax) {
auto expCtx = getExpCtx();
NamespaceString fromNs("test", "coll");
diff --git a/src/mongo/db/pipeline/document_source_match.h b/src/mongo/db/pipeline/document_source_match.h
index 1bbff006576..1552c5b1219 100644
--- a/src/mongo/db/pipeline/document_source_match.h
+++ b/src/mongo/db/pipeline/document_source_match.h
@@ -43,11 +43,8 @@ public:
virtual ~DocumentSourceMatch() = default;
GetNextResult getNext() override;
+
boost::intrusive_ptr<DocumentSource> optimize() final;
- BSONObjSet getOutputSorts() final {
- return pSource ? pSource->getOutputSorts()
- : SimpleBSONObjComparator::kInstance.makeBSONObjSet();
- }
const char* getSourceName() const override;
diff --git a/src/mongo/db/pipeline/document_source_mock.h b/src/mongo/db/pipeline/document_source_mock.h
index 0c793324869..3fb65ba7c13 100644
--- a/src/mongo/db/pipeline/document_source_mock.h
+++ b/src/mongo/db/pipeline/document_source_mock.h
@@ -62,10 +62,6 @@ public:
return constraints;
}
- BSONObjSet getOutputSorts() override {
- return sorts;
- }
-
static boost::intrusive_ptr<DocumentSourceMock> create();
static boost::intrusive_ptr<DocumentSourceMock> create(Document doc);
diff --git a/src/mongo/db/pipeline/document_source_skip.h b/src/mongo/db/pipeline/document_source_skip.h
index 71a7a099701..d6bc4c88458 100644
--- a/src/mongo/db/pipeline/document_source_skip.h
+++ b/src/mongo/db/pipeline/document_source_skip.h
@@ -72,12 +72,10 @@ public:
*/
Pipeline::SourceContainer::iterator doOptimizeAt(Pipeline::SourceContainer::iterator itr,
Pipeline::SourceContainer* container) final;
+
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
+
boost::intrusive_ptr<DocumentSource> optimize() final;
- BSONObjSet getOutputSorts() final {
- return pSource ? pSource->getOutputSorts()
- : SimpleBSONObjComparator::kInstance.makeBSONObjSet();
- }
DepsTracker::State getDependencies(DepsTracker* deps) const final {
return DepsTracker::State::SEE_NEXT; // This doesn't affect needed fields
diff --git a/src/mongo/db/pipeline/document_source_sort.h b/src/mongo/db/pipeline/document_source_sort.h
index b0053c781fa..eeca2371b6e 100644
--- a/src/mongo/db/pipeline/document_source_sort.h
+++ b/src/mongo/db/pipeline/document_source_sort.h
@@ -76,10 +76,6 @@ public:
return constraints;
}
- BSONObjSet getOutputSorts() final {
- return allPrefixes(_rawSort);
- }
-
DepsTracker::State getDependencies(DepsTracker* deps) const final;
boost::optional<MergingLogic> mergingLogic() final;
diff --git a/src/mongo/db/pipeline/document_source_sort_test.cpp b/src/mongo/db/pipeline/document_source_sort_test.cpp
index 40673061ac7..6937cee08b5 100644
--- a/src/mongo/db/pipeline/document_source_sort_test.cpp
+++ b/src/mongo/db/pipeline/document_source_sort_test.cpp
@@ -176,14 +176,6 @@ TEST_F(DocumentSourceSortTest, Dependencies) {
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
}
-TEST_F(DocumentSourceSortTest, OutputSort) {
- createSort(BSON("a" << 1 << "b.c" << -1));
- BSONObjSet outputSort = sort()->getOutputSorts();
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1 << "b.c" << -1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 2U);
-}
-
TEST_F(DocumentSourceSortTest, ReportsNoPathsModified) {
createSort(BSON("a" << 1 << "b.c" << -1));
auto modifiedPaths = sort()->getModifiedPaths();
diff --git a/src/mongo/db/pipeline/document_source_test.cpp b/src/mongo/db/pipeline/document_source_test.cpp
index 8d84d0e847c..e5bf55c4151 100644
--- a/src/mongo/db/pipeline/document_source_test.cpp
+++ b/src/mongo/db/pipeline/document_source_test.cpp
@@ -41,45 +41,6 @@ namespace mongo {
namespace {
-class DocumentSourceTruncateSort : public ServiceContextTest {};
-
-TEST_F(DocumentSourceTruncateSort, SortTruncatesNormalField) {
- SimpleBSONObjComparator bsonComparator{};
- BSONObj sortKey = BSON("a" << 1 << "b" << 1 << "c" << 1);
- auto truncated =
- DocumentSource::truncateSortSet(bsonComparator.makeBSONObjSet({sortKey}), {"b"});
- ASSERT_EQUALS(truncated.size(), 1U);
- ASSERT_EQUALS(truncated.count(BSON("a" << 1)), 1U);
-}
-
-TEST_F(DocumentSourceTruncateSort, SortTruncatesOnSubfield) {
- SimpleBSONObjComparator bsonComparator{};
- BSONObj sortKey = BSON("a" << 1 << "b.c" << 1 << "d" << 1);
- auto truncated =
- DocumentSource::truncateSortSet(bsonComparator.makeBSONObjSet({sortKey}), {"b"});
- ASSERT_EQUALS(truncated.size(), 1U);
- ASSERT_EQUALS(truncated.count(BSON("a" << 1)), 1U);
-}
-
-TEST_F(DocumentSourceTruncateSort, SortDoesNotTruncateOnParent) {
- SimpleBSONObjComparator bsonComparator{};
- BSONObj sortKey = BSON("a" << 1 << "b" << 1 << "d" << 1);
- auto truncated =
- DocumentSource::truncateSortSet(bsonComparator.makeBSONObjSet({sortKey}), {"b.c"});
- ASSERT_EQUALS(truncated.size(), 1U);
- ASSERT_EQUALS(truncated.count(BSON("a" << 1 << "b" << 1 << "d" << 1)), 1U);
-}
-
-TEST_F(DocumentSourceTruncateSort, TruncateSortDedupsSortCorrectly) {
- SimpleBSONObjComparator bsonComparator{};
- BSONObj sortKeyOne = BSON("a" << 1 << "b" << 1);
- BSONObj sortKeyTwo = BSON("a" << 1);
- auto truncated = DocumentSource::truncateSortSet(
- bsonComparator.makeBSONObjSet({sortKeyOne, sortKeyTwo}), {"b"});
- ASSERT_EQUALS(truncated.size(), 1U);
- ASSERT_EQUALS(truncated.count(BSON("a" << 1)), 1U);
-}
-
class RenamesAToB : public DocumentSourceTestOptimizations {
public:
RenamesAToB() : DocumentSourceTestOptimizations() {}
diff --git a/src/mongo/db/pipeline/document_source_unwind.cpp b/src/mongo/db/pipeline/document_source_unwind.cpp
index 1bd447dae0e..576541c207b 100644
--- a/src/mongo/db/pipeline/document_source_unwind.cpp
+++ b/src/mongo/db/pipeline/document_source_unwind.cpp
@@ -207,31 +207,6 @@ DocumentSource::GetNextResult DocumentSourceUnwind::getNext() {
return nextOut;
}
-BSONObjSet DocumentSourceUnwind::getOutputSorts() {
- BSONObjSet out = SimpleBSONObjComparator::kInstance.makeBSONObjSet();
- std::string unwoundPath = getUnwindPath();
- BSONObjSet inputSort = pSource->getOutputSorts();
-
- for (auto&& sortObj : inputSort) {
- // Truncate each sortObj at the unwindPath.
- BSONObjBuilder outputSort;
-
- for (BSONElement fieldSort : sortObj) {
- if (fieldSort.fieldNameStringData() == unwoundPath) {
- break;
- }
- outputSort.append(fieldSort);
- }
-
- BSONObj outSortObj = outputSort.obj();
- if (!outSortObj.isEmpty()) {
- out.insert(outSortObj);
- }
- }
-
- return out;
-}
-
DocumentSource::GetModPathsReturn DocumentSourceUnwind::getModifiedPaths() const {
std::set<std::string> modifiedFields{_unwindPath.fullPath()};
if (_indexPath) {
diff --git a/src/mongo/db/pipeline/document_source_unwind.h b/src/mongo/db/pipeline/document_source_unwind.h
index 4b0afc046e7..7948f6fdcd6 100644
--- a/src/mongo/db/pipeline/document_source_unwind.h
+++ b/src/mongo/db/pipeline/document_source_unwind.h
@@ -38,9 +38,10 @@ class DocumentSourceUnwind final : public DocumentSource {
public:
// virtuals from DocumentSource
GetNextResult getNext() final;
+
const char* getSourceName() const final;
+
Value serialize(boost::optional<ExplainOptions::Verbosity> explain = boost::none) const final;
- BSONObjSet getOutputSorts() final;
/**
* Returns the unwound path, and the 'includeArrayIndex' path, if specified.
diff --git a/src/mongo/db/pipeline/document_source_unwind_test.cpp b/src/mongo/db/pipeline/document_source_unwind_test.cpp
index a24a971f13a..fb88d3fa28c 100644
--- a/src/mongo/db/pipeline/document_source_unwind_test.cpp
+++ b/src/mongo/db/pipeline/document_source_unwind_test.cpp
@@ -684,18 +684,6 @@ TEST_F(UnwindStageTest, AddsUnwoundPathToDependencies) {
ASSERT_EQUALS(false, dependencies.getNeedsMetadata(DepsTracker::MetadataType::TEXT_SCORE));
}
-TEST_F(UnwindStageTest, TruncatesOutputSortAtUnwoundPath) {
- auto unwind = DocumentSourceUnwind::create(getExpCtx(), "x.y", false, boost::none);
- auto source = DocumentSourceMock::create();
- source->sorts = {BSON("a" << 1 << "x.y" << 1 << "b" << 1)};
-
- unwind->setSource(source.get());
-
- BSONObjSet outputSort = unwind->getOutputSorts();
- ASSERT_EQUALS(1U, outputSort.size());
- ASSERT_EQUALS(1U, outputSort.count(BSON("a" << 1)));
-}
-
TEST_F(UnwindStageTest, ShouldPropagatePauses) {
const bool includeNullIfEmptyOrMissing = false;
const boost::optional<std::string> includeArrayIndex = boost::none;
diff --git a/src/mongo/db/query/plan_executor.h b/src/mongo/db/query/plan_executor.h
index 0e58359021e..7f112bd566c 100644
--- a/src/mongo/db/query/plan_executor.h
+++ b/src/mongo/db/query/plan_executor.h
@@ -421,12 +421,6 @@ public:
*/
virtual void enqueue(const BSONObj& obj) = 0;
- /**
- * Helper method which returns a set of BSONObj, where each represents a sort order of our
- * output.
- */
- virtual BSONObjSet getOutputSorts() const = 0;
-
virtual bool isMarkedAsKilled() const = 0;
virtual Status getKillStatus() = 0;
diff --git a/src/mongo/db/query/plan_executor_impl.cpp b/src/mongo/db/query/plan_executor_impl.cpp
index 5ce9ede3350..f0b6c6a0bb0 100644
--- a/src/mongo/db/query/plan_executor_impl.cpp
+++ b/src/mongo/db/query/plan_executor_impl.cpp
@@ -315,31 +315,6 @@ const NamespaceString& PlanExecutorImpl::nss() const {
return _nss;
}
-BSONObjSet PlanExecutorImpl::getOutputSorts() const {
- if (_qs && _qs->root) {
- _qs->root->computeProperties();
- return _qs->root->getSort();
- }
-
- if (_root->stageType() == STAGE_MULTI_PLAN) {
- // If we needed a MultiPlanStage, the PlanExecutor does not own the QuerySolution. We
- // must go through the MultiPlanStage to access the output sort.
- auto multiPlanStage = static_cast<MultiPlanStage*>(_root.get());
- if (multiPlanStage->bestSolution()) {
- multiPlanStage->bestSolution()->root->computeProperties();
- return multiPlanStage->bestSolution()->root->getSort();
- }
- } else if (_root->stageType() == STAGE_SUBPLAN) {
- auto subplanStage = static_cast<SubplanStage*>(_root.get());
- if (subplanStage->compositeSolution()) {
- subplanStage->compositeSolution()->root->computeProperties();
- return subplanStage->compositeSolution()->root->getSort();
- }
- }
-
- return SimpleBSONObjComparator::kInstance.makeBSONObjSet();
-}
-
OperationContext* PlanExecutorImpl::getOpCtx() const {
return _opCtx;
}
diff --git a/src/mongo/db/query/plan_executor_impl.h b/src/mongo/db/query/plan_executor_impl.h
index bf57ba839ab..673abf6dc55 100644
--- a/src/mongo/db/query/plan_executor_impl.h
+++ b/src/mongo/db/query/plan_executor_impl.h
@@ -72,7 +72,6 @@ public:
void markAsKilled(Status killStatus) final;
void dispose(OperationContext* opCtx) final;
void enqueue(const BSONObj& obj) final;
- BSONObjSet getOutputSorts() const final;
bool isMarkedAsKilled() const final;
Status getKillStatus() final;
bool isDisposed() const final;
diff --git a/src/mongo/dbtests/documentsourcetests.cpp b/src/mongo/dbtests/documentsourcetests.cpp
index bcc033257a0..b35bbda4d32 100644
--- a/src/mongo/dbtests/documentsourcetests.cpp
+++ b/src/mongo/dbtests/documentsourcetests.cpp
@@ -251,43 +251,6 @@ TEST_F(DocumentSourceCursorTest, LimitCoalesce) {
ASSERT(source()->getNext().isEOF());
}
-//
-// Test cursor output sort.
-//
-TEST_F(DocumentSourceCursorTest, CollectionScanProvidesNoSort) {
- createSource(BSON("$natural" << 1));
- ASSERT_EQ(source()->getOutputSorts().size(), 0U);
- source()->dispose();
-}
-
-TEST_F(DocumentSourceCursorTest, IndexScanProvidesSortOnKeys) {
- client.createIndex(nss.ns(), BSON("a" << 1));
- createSource(BSON("a" << 1));
-
- ASSERT_EQ(source()->getOutputSorts().size(), 1U);
- ASSERT_EQ(source()->getOutputSorts().count(BSON("a" << 1)), 1U);
- source()->dispose();
-}
-
-TEST_F(DocumentSourceCursorTest, ReverseIndexScanProvidesSort) {
- client.createIndex(nss.ns(), BSON("a" << -1));
- createSource(BSON("a" << -1));
-
- ASSERT_EQ(source()->getOutputSorts().size(), 1U);
- ASSERT_EQ(source()->getOutputSorts().count(BSON("a" << -1)), 1U);
- source()->dispose();
-}
-
-TEST_F(DocumentSourceCursorTest, CompoundIndexScanProvidesMultipleSorts) {
- client.createIndex(nss.ns(), BSON("a" << 1 << "b" << -1));
- createSource(BSON("a" << 1 << "b" << -1));
-
- ASSERT_EQ(source()->getOutputSorts().size(), 2U);
- ASSERT_EQ(source()->getOutputSorts().count(BSON("a" << 1)), 1U);
- ASSERT_EQ(source()->getOutputSorts().count(BSON("a" << 1 << "b" << -1)), 1U);
- source()->dispose();
-}
-
TEST_F(DocumentSourceCursorTest, SerializationNoExplainLevel) {
// Nothing serialized when no explain mode specified.
createSource();